OLD | NEW |
| (Empty) |
1 // Copyright 2012 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #if V8_TARGET_ARCH_MIPS | |
6 | |
7 #include "src/codegen.h" | |
8 #include "src/debug/debug.h" | |
9 #include "src/deoptimizer.h" | |
10 #include "src/full-codegen/full-codegen.h" | |
11 #include "src/runtime/runtime.h" | |
12 | |
13 | |
14 namespace v8 { | |
15 namespace internal { | |
16 | |
17 | |
18 #define __ ACCESS_MASM(masm) | |
19 | |
20 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, | |
21 ExitFrameType exit_frame_type) { | |
22 // ----------- S t a t e ------------- | |
23 // -- a0 : number of arguments excluding receiver | |
24 // -- a1 : target | |
25 // -- a3 : new.target | |
26 // -- sp[0] : last argument | |
27 // -- ... | |
28 // -- sp[4 * (argc - 1)] : first argument | |
29 // -- sp[4 * agrc] : receiver | |
30 // ----------------------------------- | |
31 __ AssertFunction(a1); | |
32 | |
33 // Make sure we operate in the context of the called function (for example | |
34 // ConstructStubs implemented in C++ will be run in the context of the caller | |
35 // instead of the callee, due to the way that [[Construct]] is defined for | |
36 // ordinary functions). | |
37 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | |
38 | |
39 // JumpToExternalReference expects a0 to contain the number of arguments | |
40 // including the receiver and the extra arguments. | |
41 const int num_extra_args = 3; | |
42 __ Addu(a0, a0, num_extra_args + 1); | |
43 | |
44 // Insert extra arguments. | |
45 __ SmiTag(a0); | |
46 __ Push(a0, a1, a3); | |
47 __ SmiUntag(a0); | |
48 | |
49 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), PROTECT, | |
50 exit_frame_type == BUILTIN_EXIT); | |
51 } | |
52 | |
53 | |
54 // Load the built-in InternalArray function from the current context. | |
55 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, | |
56 Register result) { | |
57 // Load the InternalArray function from the native context. | |
58 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); | |
59 } | |
60 | |
61 | |
62 // Load the built-in Array function from the current context. | |
63 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { | |
64 // Load the Array function from the native context. | |
65 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); | |
66 } | |
67 | |
68 | |
69 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { | |
70 // ----------- S t a t e ------------- | |
71 // -- a0 : number of arguments | |
72 // -- ra : return address | |
73 // -- sp[...]: constructor arguments | |
74 // ----------------------------------- | |
75 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | |
76 | |
77 // Get the InternalArray function. | |
78 GenerateLoadInternalArrayFunction(masm, a1); | |
79 | |
80 if (FLAG_debug_code) { | |
81 // Initial map for the builtin InternalArray functions should be maps. | |
82 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | |
83 __ SmiTst(a2, t0); | |
84 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, | |
85 t0, Operand(zero_reg)); | |
86 __ GetObjectType(a2, a3, t0); | |
87 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, | |
88 t0, Operand(MAP_TYPE)); | |
89 } | |
90 | |
91 // Run the native code for the InternalArray function called as a normal | |
92 // function. | |
93 // Tail call a stub. | |
94 InternalArrayConstructorStub stub(masm->isolate()); | |
95 __ TailCallStub(&stub); | |
96 } | |
97 | |
98 | |
99 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { | |
100 // ----------- S t a t e ------------- | |
101 // -- a0 : number of arguments | |
102 // -- ra : return address | |
103 // -- sp[...]: constructor arguments | |
104 // ----------------------------------- | |
105 Label generic_array_code; | |
106 | |
107 // Get the Array function. | |
108 GenerateLoadArrayFunction(masm, a1); | |
109 | |
110 if (FLAG_debug_code) { | |
111 // Initial map for the builtin Array functions should be maps. | |
112 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | |
113 __ SmiTst(a2, t0); | |
114 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, | |
115 t0, Operand(zero_reg)); | |
116 __ GetObjectType(a2, a3, t0); | |
117 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, | |
118 t0, Operand(MAP_TYPE)); | |
119 } | |
120 | |
121 // Run the native code for the Array function called as a normal function. | |
122 // Tail call a stub. | |
123 __ mov(a3, a1); | |
124 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | |
125 ArrayConstructorStub stub(masm->isolate()); | |
126 __ TailCallStub(&stub); | |
127 } | |
128 | |
129 | |
130 // static | |
131 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { | |
132 // ----------- S t a t e ------------- | |
133 // -- a0 : number of arguments | |
134 // -- a1 : function | |
135 // -- cp : context | |
136 // -- ra : return address | |
137 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) | |
138 // -- sp[argc * 4] : receiver | |
139 // ----------------------------------- | |
140 Heap::RootListIndex const root_index = | |
141 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex | |
142 : Heap::kMinusInfinityValueRootIndex; | |
143 | |
144 // Load the accumulator with the default return value (either -Infinity or | |
145 // +Infinity), with the tagged value in t2 and the double value in f0. | |
146 __ LoadRoot(t2, root_index); | |
147 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset)); | |
148 | |
149 Label done_loop, loop; | |
150 __ mov(a3, a0); | |
151 __ bind(&loop); | |
152 { | |
153 // Check if all parameters done. | |
154 __ Subu(a3, a3, Operand(1)); | |
155 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); | |
156 | |
157 // Load the next parameter tagged value into a2. | |
158 __ Lsa(at, sp, a3, kPointerSizeLog2); | |
159 __ lw(a2, MemOperand(at)); | |
160 | |
161 // Load the double value of the parameter into f2, maybe converting the | |
162 // parameter to a number first using the ToNumber builtin if necessary. | |
163 Label convert, convert_smi, convert_number, done_convert; | |
164 __ bind(&convert); | |
165 __ JumpIfSmi(a2, &convert_smi); | |
166 __ lw(t0, FieldMemOperand(a2, HeapObject::kMapOffset)); | |
167 __ JumpIfRoot(t0, Heap::kHeapNumberMapRootIndex, &convert_number); | |
168 { | |
169 // Parameter is not a Number, use the ToNumber builtin to convert it. | |
170 FrameScope scope(masm, StackFrame::MANUAL); | |
171 __ SmiTag(a0); | |
172 __ SmiTag(a3); | |
173 __ EnterBuiltinFrame(cp, a1, a0); | |
174 __ Push(t2, a3); | |
175 __ mov(a0, a2); | |
176 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
177 __ mov(a2, v0); | |
178 __ Pop(t2, a3); | |
179 __ LeaveBuiltinFrame(cp, a1, a0); | |
180 __ SmiUntag(a3); | |
181 __ SmiUntag(a0); | |
182 { | |
183 // Restore the double accumulator value (f0). | |
184 Label restore_smi, done_restore; | |
185 __ JumpIfSmi(t2, &restore_smi); | |
186 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset)); | |
187 __ jmp(&done_restore); | |
188 __ bind(&restore_smi); | |
189 __ SmiToDoubleFPURegister(t2, f0, t0); | |
190 __ bind(&done_restore); | |
191 } | |
192 } | |
193 __ jmp(&convert); | |
194 __ bind(&convert_number); | |
195 __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset)); | |
196 __ jmp(&done_convert); | |
197 __ bind(&convert_smi); | |
198 __ SmiToDoubleFPURegister(a2, f2, t0); | |
199 __ bind(&done_convert); | |
200 | |
201 // Perform the actual comparison with using Min/Max macro instructions the | |
202 // accumulator value on the left hand side (f0) and the next parameter value | |
203 // on the right hand side (f2). | |
204 // We need to work out which HeapNumber (or smi) the result came from. | |
205 Label compare_nan, set_value; | |
206 __ BranchF(nullptr, &compare_nan, eq, f0, f2); | |
207 __ Move(t0, t1, f0); | |
208 if (kind == MathMaxMinKind::kMin) { | |
209 __ MinNaNCheck_d(f0, f0, f2); | |
210 } else { | |
211 DCHECK(kind == MathMaxMinKind::kMax); | |
212 __ MaxNaNCheck_d(f0, f0, f2); | |
213 } | |
214 __ Move(at, t8, f0); | |
215 __ Branch(&set_value, ne, t0, Operand(at)); | |
216 __ Branch(&set_value, ne, t1, Operand(t8)); | |
217 __ jmp(&loop); | |
218 __ bind(&set_value); | |
219 __ mov(t2, a2); | |
220 __ jmp(&loop); | |
221 | |
222 // At least one side is NaN, which means that the result will be NaN too. | |
223 __ bind(&compare_nan); | |
224 __ LoadRoot(t2, Heap::kNanValueRootIndex); | |
225 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset)); | |
226 __ jmp(&loop); | |
227 } | |
228 | |
229 __ bind(&done_loop); | |
230 // Drop all slots, including the receiver. | |
231 __ Addu(a0, a0, Operand(1)); | |
232 __ Lsa(sp, sp, a0, kPointerSizeLog2); | |
233 __ Ret(USE_DELAY_SLOT); | |
234 __ mov(v0, t2); // In delay slot. | |
235 } | |
236 | |
237 // static | |
238 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) { | |
239 // ----------- S t a t e ------------- | |
240 // -- a0 : number of arguments | |
241 // -- a1 : constructor function | |
242 // -- cp : context | |
243 // -- ra : return address | |
244 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
245 // -- sp[argc * 4] : receiver | |
246 // ----------------------------------- | |
247 | |
248 // 1. Load the first argument into a0. | |
249 Label no_arguments; | |
250 { | |
251 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); | |
252 __ Subu(t1, a0, Operand(1)); // In delay slot. | |
253 __ mov(t0, a0); // Store argc in t0. | |
254 __ Lsa(at, sp, t1, kPointerSizeLog2); | |
255 __ lw(a0, MemOperand(at)); | |
256 } | |
257 | |
258 // 2a. Convert first argument to number. | |
259 { | |
260 FrameScope scope(masm, StackFrame::MANUAL); | |
261 __ SmiTag(t0); | |
262 __ EnterBuiltinFrame(cp, a1, t0); | |
263 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
264 __ LeaveBuiltinFrame(cp, a1, t0); | |
265 __ SmiUntag(t0); | |
266 } | |
267 | |
268 { | |
269 // Drop all arguments including the receiver. | |
270 __ Lsa(sp, sp, t0, kPointerSizeLog2); | |
271 __ DropAndRet(1); | |
272 } | |
273 | |
274 // 2b. No arguments, return +0. | |
275 __ bind(&no_arguments); | |
276 __ Move(v0, Smi::FromInt(0)); | |
277 __ DropAndRet(1); | |
278 } | |
279 | |
280 | |
281 // static | |
282 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { | |
283 // ----------- S t a t e ------------- | |
284 // -- a0 : number of arguments | |
285 // -- a1 : constructor function | |
286 // -- a3 : new target | |
287 // -- cp : context | |
288 // -- ra : return address | |
289 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
290 // -- sp[argc * 4] : receiver | |
291 // ----------------------------------- | |
292 | |
293 // 1. Make sure we operate in the context of the called function. | |
294 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | |
295 | |
296 // 2. Load the first argument into a0. | |
297 { | |
298 Label no_arguments, done; | |
299 __ mov(t0, a0); // Store argc in t0. | |
300 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); | |
301 __ Subu(t1, a0, Operand(1)); // In delay slot. | |
302 __ Lsa(at, sp, t1, kPointerSizeLog2); | |
303 __ lw(a0, MemOperand(at)); | |
304 __ jmp(&done); | |
305 __ bind(&no_arguments); | |
306 __ Move(a0, Smi::FromInt(0)); | |
307 __ bind(&done); | |
308 } | |
309 | |
310 // 3. Make sure a0 is a number. | |
311 { | |
312 Label done_convert; | |
313 __ JumpIfSmi(a0, &done_convert); | |
314 __ GetObjectType(a0, a2, a2); | |
315 __ Branch(&done_convert, eq, a2, Operand(HEAP_NUMBER_TYPE)); | |
316 { | |
317 FrameScope scope(masm, StackFrame::MANUAL); | |
318 __ SmiTag(t0); | |
319 __ EnterBuiltinFrame(cp, a1, t0); | |
320 __ Push(a3); | |
321 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
322 __ Move(a0, v0); | |
323 __ Pop(a3); | |
324 __ LeaveBuiltinFrame(cp, a1, t0); | |
325 __ SmiUntag(t0); | |
326 } | |
327 __ bind(&done_convert); | |
328 } | |
329 | |
330 // 4. Check if new target and constructor differ. | |
331 Label drop_frame_and_ret, new_object; | |
332 __ Branch(&new_object, ne, a1, Operand(a3)); | |
333 | |
334 // 5. Allocate a JSValue wrapper for the number. | |
335 __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object); | |
336 __ jmp(&drop_frame_and_ret); | |
337 | |
338 // 6. Fallback to the runtime to create new object. | |
339 __ bind(&new_object); | |
340 { | |
341 FrameScope scope(masm, StackFrame::MANUAL); | |
342 FastNewObjectStub stub(masm->isolate()); | |
343 __ SmiTag(t0); | |
344 __ EnterBuiltinFrame(cp, a1, t0); | |
345 __ Push(a0); // first argument | |
346 __ CallStub(&stub); | |
347 __ Pop(a0); | |
348 __ LeaveBuiltinFrame(cp, a1, t0); | |
349 __ SmiUntag(t0); | |
350 } | |
351 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); | |
352 | |
353 __ bind(&drop_frame_and_ret); | |
354 { | |
355 __ Lsa(sp, sp, t0, kPointerSizeLog2); | |
356 __ DropAndRet(1); | |
357 } | |
358 } | |
359 | |
360 | |
361 // static | |
362 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { | |
363 // ----------- S t a t e ------------- | |
364 // -- a0 : number of arguments | |
365 // -- a1 : constructor function | |
366 // -- cp : context | |
367 // -- ra : return address | |
368 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
369 // -- sp[argc * 4] : receiver | |
370 // ----------------------------------- | |
371 | |
372 // 1. Load the first argument into a0. | |
373 Label no_arguments; | |
374 { | |
375 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); | |
376 __ Subu(t1, a0, Operand(1)); | |
377 __ mov(t0, a0); // Store argc in t0. | |
378 __ Lsa(at, sp, t1, kPointerSizeLog2); | |
379 __ lw(a0, MemOperand(at)); | |
380 } | |
381 | |
382 // 2a. At least one argument, return a0 if it's a string, otherwise | |
383 // dispatch to appropriate conversion. | |
384 Label drop_frame_and_ret, to_string, symbol_descriptive_string; | |
385 { | |
386 __ JumpIfSmi(a0, &to_string); | |
387 __ GetObjectType(a0, t1, t1); | |
388 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE); | |
389 __ Subu(t1, t1, Operand(FIRST_NONSTRING_TYPE)); | |
390 __ Branch(&symbol_descriptive_string, eq, t1, Operand(zero_reg)); | |
391 __ Branch(&to_string, gt, t1, Operand(zero_reg)); | |
392 __ mov(v0, a0); | |
393 __ jmp(&drop_frame_and_ret); | |
394 } | |
395 | |
396 // 2b. No arguments, return the empty string (and pop the receiver). | |
397 __ bind(&no_arguments); | |
398 { | |
399 __ LoadRoot(v0, Heap::kempty_stringRootIndex); | |
400 __ DropAndRet(1); | |
401 } | |
402 | |
403 // 3a. Convert a0 to a string. | |
404 __ bind(&to_string); | |
405 { | |
406 FrameScope scope(masm, StackFrame::MANUAL); | |
407 ToStringStub stub(masm->isolate()); | |
408 __ SmiTag(t0); | |
409 __ EnterBuiltinFrame(cp, a1, t0); | |
410 __ CallStub(&stub); | |
411 __ LeaveBuiltinFrame(cp, a1, t0); | |
412 __ SmiUntag(t0); | |
413 } | |
414 __ jmp(&drop_frame_and_ret); | |
415 | |
416 // 3b. Convert symbol in a0 to a string. | |
417 __ bind(&symbol_descriptive_string); | |
418 { | |
419 __ Lsa(sp, sp, t0, kPointerSizeLog2); | |
420 __ Drop(1); | |
421 __ Push(a0); | |
422 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); | |
423 } | |
424 | |
425 __ bind(&drop_frame_and_ret); | |
426 { | |
427 __ Lsa(sp, sp, t0, kPointerSizeLog2); | |
428 __ DropAndRet(1); | |
429 } | |
430 } | |
431 | |
432 | |
433 // static | |
434 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { | |
435 // ----------- S t a t e ------------- | |
436 // -- a0 : number of arguments | |
437 // -- a1 : constructor function | |
438 // -- a3 : new target | |
439 // -- cp : context | |
440 // -- ra : return address | |
441 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
442 // -- sp[argc * 4] : receiver | |
443 // ----------------------------------- | |
444 | |
445 // 1. Make sure we operate in the context of the called function. | |
446 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | |
447 | |
448 // 2. Load the first argument into a0. | |
449 { | |
450 Label no_arguments, done; | |
451 __ mov(t0, a0); // Store argc in t0. | |
452 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); | |
453 __ Subu(t1, a0, Operand(1)); | |
454 __ Lsa(at, sp, t1, kPointerSizeLog2); | |
455 __ lw(a0, MemOperand(at)); | |
456 __ jmp(&done); | |
457 __ bind(&no_arguments); | |
458 __ LoadRoot(a0, Heap::kempty_stringRootIndex); | |
459 __ bind(&done); | |
460 } | |
461 | |
462 // 3. Make sure a0 is a string. | |
463 { | |
464 Label convert, done_convert; | |
465 __ JumpIfSmi(a0, &convert); | |
466 __ GetObjectType(a0, a2, a2); | |
467 __ And(t1, a2, Operand(kIsNotStringMask)); | |
468 __ Branch(&done_convert, eq, t1, Operand(zero_reg)); | |
469 __ bind(&convert); | |
470 { | |
471 FrameScope scope(masm, StackFrame::MANUAL); | |
472 ToStringStub stub(masm->isolate()); | |
473 __ SmiTag(t0); | |
474 __ EnterBuiltinFrame(cp, a1, t0); | |
475 __ Push(a3); | |
476 __ CallStub(&stub); | |
477 __ Move(a0, v0); | |
478 __ Pop(a3); | |
479 __ LeaveBuiltinFrame(cp, a1, t0); | |
480 __ SmiUntag(t0); | |
481 } | |
482 __ bind(&done_convert); | |
483 } | |
484 | |
485 // 4. Check if new target and constructor differ. | |
486 Label drop_frame_and_ret, new_object; | |
487 __ Branch(&new_object, ne, a1, Operand(a3)); | |
488 | |
489 // 5. Allocate a JSValue wrapper for the string. | |
490 __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object); | |
491 __ jmp(&drop_frame_and_ret); | |
492 | |
493 // 6. Fallback to the runtime to create new object. | |
494 __ bind(&new_object); | |
495 { | |
496 FrameScope scope(masm, StackFrame::MANUAL); | |
497 FastNewObjectStub stub(masm->isolate()); | |
498 __ SmiTag(t0); | |
499 __ EnterBuiltinFrame(cp, a1, t0); | |
500 __ Push(a0); // first argument | |
501 __ CallStub(&stub); | |
502 __ Pop(a0); | |
503 __ LeaveBuiltinFrame(cp, a1, t0); | |
504 __ SmiUntag(t0); | |
505 } | |
506 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); | |
507 | |
508 __ bind(&drop_frame_and_ret); | |
509 { | |
510 __ Lsa(sp, sp, t0, kPointerSizeLog2); | |
511 __ DropAndRet(1); | |
512 } | |
513 } | |
514 | |
515 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | |
516 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
517 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset)); | |
518 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
519 __ Jump(at); | |
520 } | |
521 | |
522 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, | |
523 Runtime::FunctionId function_id) { | |
524 // ----------- S t a t e ------------- | |
525 // -- a0 : argument count (preserved for callee) | |
526 // -- a1 : target function (preserved for callee) | |
527 // -- a3 : new target (preserved for callee) | |
528 // ----------------------------------- | |
529 { | |
530 FrameScope scope(masm, StackFrame::INTERNAL); | |
531 // Push a copy of the target function and the new target. | |
532 // Push function as parameter to the runtime call. | |
533 __ SmiTag(a0); | |
534 __ Push(a0, a1, a3, a1); | |
535 | |
536 __ CallRuntime(function_id, 1); | |
537 | |
538 // Restore target function and new target. | |
539 __ Pop(a0, a1, a3); | |
540 __ SmiUntag(a0); | |
541 } | |
542 | |
543 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
544 __ Jump(at); | |
545 } | |
546 | |
547 | |
548 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | |
549 // Checking whether the queued function is ready for install is optional, | |
550 // since we come across interrupts and stack checks elsewhere. However, | |
551 // not checking may delay installing ready functions, and always checking | |
552 // would be quite expensive. A good compromise is to first check against | |
553 // stack limit as a cue for an interrupt signal. | |
554 Label ok; | |
555 __ LoadRoot(t0, Heap::kStackLimitRootIndex); | |
556 __ Branch(&ok, hs, sp, Operand(t0)); | |
557 | |
558 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); | |
559 | |
560 __ bind(&ok); | |
561 GenerateTailCallToSharedCode(masm); | |
562 } | |
563 | |
564 | |
565 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | |
566 bool is_api_function, | |
567 bool create_implicit_receiver, | |
568 bool check_derived_construct) { | |
569 // ----------- S t a t e ------------- | |
570 // -- a0 : number of arguments | |
571 // -- a1 : constructor function | |
572 // -- a2 : allocation site or undefined | |
573 // -- a3 : new target | |
574 // -- cp : context | |
575 // -- ra : return address | |
576 // -- sp[...]: constructor arguments | |
577 // ----------------------------------- | |
578 | |
579 Isolate* isolate = masm->isolate(); | |
580 | |
581 // Enter a construct frame. | |
582 { | |
583 FrameScope scope(masm, StackFrame::CONSTRUCT); | |
584 | |
585 // Preserve the incoming parameters on the stack. | |
586 __ AssertUndefinedOrAllocationSite(a2, t0); | |
587 __ SmiTag(a0); | |
588 __ Push(cp, a2, a0); | |
589 | |
590 if (create_implicit_receiver) { | |
591 // Allocate the new receiver object. | |
592 __ Push(a1, a3); | |
593 FastNewObjectStub stub(masm->isolate()); | |
594 __ CallStub(&stub); | |
595 __ mov(t4, v0); | |
596 __ Pop(a1, a3); | |
597 | |
598 // ----------- S t a t e ------------- | |
599 // -- a1: constructor function | |
600 // -- a3: new target | |
601 // -- t0: newly allocated object | |
602 // ----------------------------------- | |
603 | |
604 // Retrieve smi-tagged arguments count from the stack. | |
605 __ lw(a0, MemOperand(sp)); | |
606 } | |
607 | |
608 __ SmiUntag(a0); | |
609 | |
610 if (create_implicit_receiver) { | |
611 // Push the allocated receiver to the stack. We need two copies | |
612 // because we may have to return the original one and the calling | |
613 // conventions dictate that the called function pops the receiver. | |
614 __ Push(t4, t4); | |
615 } else { | |
616 __ PushRoot(Heap::kTheHoleValueRootIndex); | |
617 } | |
618 | |
619 // Set up pointer to last argument. | |
620 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); | |
621 | |
622 // Copy arguments and receiver to the expression stack. | |
623 // a0: number of arguments | |
624 // a1: constructor function | |
625 // a2: address of last argument (caller sp) | |
626 // a3: new target | |
627 // t4: number of arguments (smi-tagged) | |
628 // sp[0]: receiver | |
629 // sp[1]: receiver | |
630 // sp[2]: number of arguments (smi-tagged) | |
631 Label loop, entry; | |
632 __ SmiTag(t4, a0); | |
633 __ jmp(&entry); | |
634 __ bind(&loop); | |
635 __ Lsa(t0, a2, t4, kPointerSizeLog2 - kSmiTagSize); | |
636 __ lw(t1, MemOperand(t0)); | |
637 __ push(t1); | |
638 __ bind(&entry); | |
639 __ Addu(t4, t4, Operand(-2)); | |
640 __ Branch(&loop, greater_equal, t4, Operand(zero_reg)); | |
641 | |
642 // Call the function. | |
643 // a0: number of arguments | |
644 // a1: constructor function | |
645 // a3: new target | |
646 ParameterCount actual(a0); | |
647 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION, | |
648 CheckDebugStepCallWrapper()); | |
649 | |
650 // Store offset of return address for deoptimizer. | |
651 if (create_implicit_receiver && !is_api_function) { | |
652 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); | |
653 } | |
654 | |
655 // Restore context from the frame. | |
656 __ lw(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); | |
657 | |
658 if (create_implicit_receiver) { | |
659 // If the result is an object (in the ECMA sense), we should get rid | |
660 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 | |
661 // on page 74. | |
662 Label use_receiver, exit; | |
663 | |
664 // If the result is a smi, it is *not* an object in the ECMA sense. | |
665 // v0: result | |
666 // sp[0]: receiver (newly allocated object) | |
667 // sp[1]: number of arguments (smi-tagged) | |
668 __ JumpIfSmi(v0, &use_receiver); | |
669 | |
670 // If the type of the result (stored in its map) is less than | |
671 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense. | |
672 __ GetObjectType(v0, a1, a3); | |
673 __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE)); | |
674 | |
675 // Throw away the result of the constructor invocation and use the | |
676 // on-stack receiver as the result. | |
677 __ bind(&use_receiver); | |
678 __ lw(v0, MemOperand(sp)); | |
679 | |
680 // Remove receiver from the stack, remove caller arguments, and | |
681 // return. | |
682 __ bind(&exit); | |
683 // v0: result | |
684 // sp[0]: receiver (newly allocated object) | |
685 // sp[1]: number of arguments (smi-tagged) | |
686 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); | |
687 } else { | |
688 __ lw(a1, MemOperand(sp)); | |
689 } | |
690 | |
691 // Leave construct frame. | |
692 } | |
693 | |
694 // ES6 9.2.2. Step 13+ | |
695 // Check that the result is not a Smi, indicating that the constructor result | |
696 // from a derived class is neither undefined nor an Object. | |
697 if (check_derived_construct) { | |
698 Label dont_throw; | |
699 __ JumpIfNotSmi(v0, &dont_throw); | |
700 { | |
701 FrameScope scope(masm, StackFrame::INTERNAL); | |
702 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); | |
703 } | |
704 __ bind(&dont_throw); | |
705 } | |
706 | |
707 __ Lsa(sp, sp, a1, kPointerSizeLog2 - 1); | |
708 __ Addu(sp, sp, kPointerSize); | |
709 if (create_implicit_receiver) { | |
710 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2); | |
711 } | |
712 __ Ret(); | |
713 } | |
714 | |
715 | |
716 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { | |
717 Generate_JSConstructStubHelper(masm, false, true, false); | |
718 } | |
719 | |
720 | |
721 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { | |
722 Generate_JSConstructStubHelper(masm, true, false, false); | |
723 } | |
724 | |
725 | |
726 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { | |
727 Generate_JSConstructStubHelper(masm, false, false, false); | |
728 } | |
729 | |
730 | |
731 void Builtins::Generate_JSBuiltinsConstructStubForDerived( | |
732 MacroAssembler* masm) { | |
733 Generate_JSConstructStubHelper(masm, false, false, true); | |
734 } | |
735 | |
736 | |
737 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { | |
738 FrameScope scope(masm, StackFrame::INTERNAL); | |
739 __ Push(a1); | |
740 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); | |
741 } | |
742 | |
743 | |
744 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; | |
745 | |
746 | |
747 // Clobbers a2; preserves all other registers. | |
748 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, | |
749 IsTagged argc_is_tagged) { | |
750 // Check the stack for overflow. We are not trying to catch | |
751 // interruptions (e.g. debug break and preemption) here, so the "real stack | |
752 // limit" is checked. | |
753 Label okay; | |
754 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | |
755 // Make a2 the space we have left. The stack might already be overflowed | |
756 // here which will cause a2 to become negative. | |
757 __ Subu(a2, sp, a2); | |
758 // Check if the arguments will overflow the stack. | |
759 if (argc_is_tagged == kArgcIsSmiTagged) { | |
760 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize); | |
761 } else { | |
762 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); | |
763 __ sll(t3, argc, kPointerSizeLog2); | |
764 } | |
765 // Signed comparison. | |
766 __ Branch(&okay, gt, a2, Operand(t3)); | |
767 | |
768 // Out of stack space. | |
769 __ CallRuntime(Runtime::kThrowStackOverflow); | |
770 | |
771 __ bind(&okay); | |
772 } | |
773 | |
774 | |
775 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | |
776 bool is_construct) { | |
777 // Called from JSEntryStub::GenerateBody | |
778 | |
779 // ----------- S t a t e ------------- | |
780 // -- a0: new.target | |
781 // -- a1: function | |
782 // -- a2: receiver_pointer | |
783 // -- a3: argc | |
784 // -- s0: argv | |
785 // ----------------------------------- | |
786 ProfileEntryHookStub::MaybeCallEntryHook(masm); | |
787 | |
788 // Enter an internal frame. | |
789 { | |
790 FrameScope scope(masm, StackFrame::INTERNAL); | |
791 | |
792 // Setup the context (we need to use the caller context from the isolate). | |
793 ExternalReference context_address(Isolate::kContextAddress, | |
794 masm->isolate()); | |
795 __ li(cp, Operand(context_address)); | |
796 __ lw(cp, MemOperand(cp)); | |
797 | |
798 // Push the function and the receiver onto the stack. | |
799 __ Push(a1, a2); | |
800 | |
801 // Check if we have enough stack space to push all arguments. | |
802 // Clobbers a2. | |
803 Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt); | |
804 | |
805 // Remember new.target. | |
806 __ mov(t1, a0); | |
807 | |
808 // Copy arguments to the stack in a loop. | |
809 // a3: argc | |
810 // s0: argv, i.e. points to first arg | |
811 Label loop, entry; | |
812 __ Lsa(t2, s0, a3, kPointerSizeLog2); | |
813 __ b(&entry); | |
814 __ nop(); // Branch delay slot nop. | |
815 // t2 points past last arg. | |
816 __ bind(&loop); | |
817 __ lw(t0, MemOperand(s0)); // Read next parameter. | |
818 __ addiu(s0, s0, kPointerSize); | |
819 __ lw(t0, MemOperand(t0)); // Dereference handle. | |
820 __ push(t0); // Push parameter. | |
821 __ bind(&entry); | |
822 __ Branch(&loop, ne, s0, Operand(t2)); | |
823 | |
824 // Setup new.target and argc. | |
825 __ mov(a0, a3); | |
826 __ mov(a3, t1); | |
827 | |
828 // Initialize all JavaScript callee-saved registers, since they will be seen | |
829 // by the garbage collector as part of handlers. | |
830 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); | |
831 __ mov(s1, t0); | |
832 __ mov(s2, t0); | |
833 __ mov(s3, t0); | |
834 __ mov(s4, t0); | |
835 __ mov(s5, t0); | |
836 // s6 holds the root address. Do not clobber. | |
837 // s7 is cp. Do not init. | |
838 | |
839 // Invoke the code. | |
840 Handle<Code> builtin = is_construct | |
841 ? masm->isolate()->builtins()->Construct() | |
842 : masm->isolate()->builtins()->Call(); | |
843 __ Call(builtin, RelocInfo::CODE_TARGET); | |
844 | |
845 // Leave internal frame. | |
846 } | |
847 | |
848 __ Jump(ra); | |
849 } | |
850 | |
851 | |
852 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | |
853 Generate_JSEntryTrampolineHelper(masm, false); | |
854 } | |
855 | |
856 | |
857 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | |
858 Generate_JSEntryTrampolineHelper(masm, true); | |
859 } | |
860 | |
861 // static | |
862 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { | |
863 // ----------- S t a t e ------------- | |
864 // -- v0 : the value to pass to the generator | |
865 // -- a1 : the JSGeneratorObject to resume | |
866 // -- a2 : the resume mode (tagged) | |
867 // -- ra : return address | |
868 // ----------------------------------- | |
869 __ AssertGeneratorObject(a1); | |
870 | |
871 // Store input value into generator object. | |
872 __ sw(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset)); | |
873 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3, | |
874 kRAHasNotBeenSaved, kDontSaveFPRegs); | |
875 | |
876 // Store resume mode into generator object. | |
877 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset)); | |
878 | |
879 // Load suspended function and context. | |
880 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset)); | |
881 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); | |
882 | |
883 // Flood function if we are stepping. | |
884 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator; | |
885 Label stepping_prepared; | |
886 ExternalReference last_step_action = | |
887 ExternalReference::debug_last_step_action_address(masm->isolate()); | |
888 STATIC_ASSERT(StepFrame > StepIn); | |
889 __ li(t1, Operand(last_step_action)); | |
890 __ lb(t1, MemOperand(t1)); | |
891 __ Branch(&prepare_step_in_if_stepping, ge, t1, Operand(StepIn)); | |
892 | |
893 // Flood function if we need to continue stepping in the suspended generator. | |
894 ExternalReference debug_suspended_generator = | |
895 ExternalReference::debug_suspended_generator_address(masm->isolate()); | |
896 __ li(t1, Operand(debug_suspended_generator)); | |
897 __ lw(t1, MemOperand(t1)); | |
898 __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(t1)); | |
899 __ bind(&stepping_prepared); | |
900 | |
901 // Push receiver. | |
902 __ lw(t1, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset)); | |
903 __ Push(t1); | |
904 | |
905 // ----------- S t a t e ------------- | |
906 // -- a1 : the JSGeneratorObject to resume | |
907 // -- a2 : the resume mode (tagged) | |
908 // -- t0 : generator function | |
909 // -- cp : generator context | |
910 // -- ra : return address | |
911 // -- sp[0] : generator receiver | |
912 // ----------------------------------- | |
913 | |
914 // Push holes for arguments to generator function. Since the parser forced | |
915 // context allocation for any variables in generators, the actual argument | |
916 // values have already been copied into the context and these dummy values | |
917 // will never be used. | |
918 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); | |
919 __ lw(a3, | |
920 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset)); | |
921 { | |
922 Label done_loop, loop; | |
923 __ bind(&loop); | |
924 __ Subu(a3, a3, Operand(Smi::FromInt(1))); | |
925 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); | |
926 __ PushRoot(Heap::kTheHoleValueRootIndex); | |
927 __ Branch(&loop); | |
928 __ bind(&done_loop); | |
929 } | |
930 | |
931 // Dispatch on the kind of generator object. | |
932 Label old_generator; | |
933 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); | |
934 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset)); | |
935 __ GetObjectType(a3, a3, a3); | |
936 __ Branch(&old_generator, ne, a3, Operand(BYTECODE_ARRAY_TYPE)); | |
937 | |
938 // New-style (ignition/turbofan) generator object. | |
939 { | |
940 __ lw(a0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); | |
941 __ lw(a0, | |
942 FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset)); | |
943 __ SmiUntag(a0); | |
944 // We abuse new.target both to indicate that this is a resume call and to | |
945 // pass in the generator object. In ordinary calls, new.target is always | |
946 // undefined because generator functions are non-constructable. | |
947 __ Move(a3, a1); | |
948 __ Move(a1, t0); | |
949 __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | |
950 __ Jump(a2); | |
951 } | |
952 | |
953 // Old-style (full-codegen) generator object | |
954 __ bind(&old_generator); | |
955 { | |
956 // Enter a new JavaScript frame, and initialize its slots as they were when | |
957 // the generator was suspended. | |
958 FrameScope scope(masm, StackFrame::MANUAL); | |
959 __ Push(ra, fp); | |
960 __ Move(fp, sp); | |
961 __ Push(cp, t0); | |
962 | |
963 // Restore the operand stack. | |
964 __ lw(a0, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); | |
965 __ lw(a3, FieldMemOperand(a0, FixedArray::kLengthOffset)); | |
966 __ Addu(a0, a0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
967 __ Lsa(a3, a0, a3, kPointerSizeLog2 - 1); | |
968 { | |
969 Label done_loop, loop; | |
970 __ bind(&loop); | |
971 __ Branch(&done_loop, eq, a0, Operand(a3)); | |
972 __ lw(t1, MemOperand(a0)); | |
973 __ Push(t1); | |
974 __ Branch(USE_DELAY_SLOT, &loop); | |
975 __ addiu(a0, a0, kPointerSize); // In delay slot. | |
976 __ bind(&done_loop); | |
977 } | |
978 | |
979 // Reset operand stack so we don't leak. | |
980 __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex); | |
981 __ sw(t1, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); | |
982 | |
983 // Resume the generator function at the continuation. | |
984 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); | |
985 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset)); | |
986 __ Addu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
987 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); | |
988 __ SmiUntag(a2); | |
989 __ Addu(a3, a3, Operand(a2)); | |
990 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); | |
991 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); | |
992 __ Move(v0, a1); // Continuation expects generator object in v0. | |
993 __ Jump(a3); | |
994 } | |
995 | |
996 __ bind(&prepare_step_in_if_stepping); | |
997 { | |
998 FrameScope scope(masm, StackFrame::INTERNAL); | |
999 __ Push(a1, a2, t0); | |
1000 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping); | |
1001 __ Pop(a1, a2); | |
1002 } | |
1003 __ Branch(USE_DELAY_SLOT, &stepping_prepared); | |
1004 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); | |
1005 | |
1006 __ bind(&prepare_step_in_suspended_generator); | |
1007 { | |
1008 FrameScope scope(masm, StackFrame::INTERNAL); | |
1009 __ Push(a1, a2); | |
1010 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); | |
1011 __ Pop(a1, a2); | |
1012 } | |
1013 __ Branch(USE_DELAY_SLOT, &stepping_prepared); | |
1014 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); | |
1015 } | |
1016 | |
1017 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) { | |
1018 Register args_count = scratch; | |
1019 | |
1020 // Get the arguments + receiver count. | |
1021 __ lw(args_count, | |
1022 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); | |
1023 __ lw(args_count, | |
1024 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset)); | |
1025 | |
1026 // Leave the frame (also dropping the register file). | |
1027 __ LeaveFrame(StackFrame::JAVA_SCRIPT); | |
1028 | |
1029 // Drop receiver + arguments. | |
1030 __ Addu(sp, sp, args_count); | |
1031 } | |
1032 | |
1033 // Generate code for entering a JS function with the interpreter. | |
1034 // On entry to the function the receiver and arguments have been pushed on the | |
1035 // stack left to right. The actual argument count matches the formal parameter | |
1036 // count expected by the function. | |
1037 // | |
1038 // The live registers are: | |
1039 // o a1: the JS function object being called. | |
1040 // o a3: the new target | |
1041 // o cp: our context | |
1042 // o fp: the caller's frame pointer | |
1043 // o sp: stack pointer | |
1044 // o ra: return address | |
1045 // | |
1046 // The function builds an interpreter frame. See InterpreterFrameConstants in | |
1047 // frames.h for its layout. | |
1048 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { | |
1049 ProfileEntryHookStub::MaybeCallEntryHook(masm); | |
1050 | |
1051 // Open a frame scope to indicate that there is a frame on the stack. The | |
1052 // MANUAL indicates that the scope shouldn't actually generate code to set up | |
1053 // the frame (that is done below). | |
1054 FrameScope frame_scope(masm, StackFrame::MANUAL); | |
1055 __ PushStandardFrame(a1); | |
1056 | |
1057 // Get the bytecode array from the function object (or from the DebugInfo if | |
1058 // it is present) and load it into kInterpreterBytecodeArrayRegister. | |
1059 __ lw(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
1060 Label load_debug_bytecode_array, bytecode_array_loaded; | |
1061 Register debug_info = kInterpreterBytecodeArrayRegister; | |
1062 DCHECK(!debug_info.is(a0)); | |
1063 __ lw(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset)); | |
1064 __ Branch(&load_debug_bytecode_array, ne, debug_info, | |
1065 Operand(DebugInfo::uninitialized())); | |
1066 __ lw(kInterpreterBytecodeArrayRegister, | |
1067 FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset)); | |
1068 __ bind(&bytecode_array_loaded); | |
1069 | |
1070 // Check function data field is actually a BytecodeArray object. | |
1071 Label bytecode_array_not_present; | |
1072 __ JumpIfRoot(kInterpreterBytecodeArrayRegister, | |
1073 Heap::kUndefinedValueRootIndex, &bytecode_array_not_present); | |
1074 if (FLAG_debug_code) { | |
1075 __ SmiTst(kInterpreterBytecodeArrayRegister, t0); | |
1076 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0, | |
1077 Operand(zero_reg)); | |
1078 __ GetObjectType(kInterpreterBytecodeArrayRegister, t0, t0); | |
1079 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0, | |
1080 Operand(BYTECODE_ARRAY_TYPE)); | |
1081 } | |
1082 | |
1083 // Load initial bytecode offset. | |
1084 __ li(kInterpreterBytecodeOffsetRegister, | |
1085 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag)); | |
1086 | |
1087 // Push new.target, bytecode array and Smi tagged bytecode array offset. | |
1088 __ SmiTag(t0, kInterpreterBytecodeOffsetRegister); | |
1089 __ Push(a3, kInterpreterBytecodeArrayRegister, t0); | |
1090 | |
1091 // Allocate the local and temporary register file on the stack. | |
1092 { | |
1093 // Load frame size from the BytecodeArray object. | |
1094 __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister, | |
1095 BytecodeArray::kFrameSizeOffset)); | |
1096 | |
1097 // Do a stack check to ensure we don't go over the limit. | |
1098 Label ok; | |
1099 __ Subu(t1, sp, Operand(t0)); | |
1100 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | |
1101 __ Branch(&ok, hs, t1, Operand(a2)); | |
1102 __ CallRuntime(Runtime::kThrowStackOverflow); | |
1103 __ bind(&ok); | |
1104 | |
1105 // If ok, push undefined as the initial value for all register file entries. | |
1106 Label loop_header; | |
1107 Label loop_check; | |
1108 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); | |
1109 __ Branch(&loop_check); | |
1110 __ bind(&loop_header); | |
1111 // TODO(rmcilroy): Consider doing more than one push per loop iteration. | |
1112 __ push(t1); | |
1113 // Continue loop if not done. | |
1114 __ bind(&loop_check); | |
1115 __ Subu(t0, t0, Operand(kPointerSize)); | |
1116 __ Branch(&loop_header, ge, t0, Operand(zero_reg)); | |
1117 } | |
1118 | |
1119 // Load accumulator and dispatch table into registers. | |
1120 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); | |
1121 __ li(kInterpreterDispatchTableRegister, | |
1122 Operand(ExternalReference::interpreter_dispatch_table_address( | |
1123 masm->isolate()))); | |
1124 | |
1125 // Dispatch to the first bytecode handler for the function. | |
1126 __ Addu(a0, kInterpreterBytecodeArrayRegister, | |
1127 kInterpreterBytecodeOffsetRegister); | |
1128 __ lbu(a0, MemOperand(a0)); | |
1129 __ Lsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2); | |
1130 __ lw(at, MemOperand(at)); | |
1131 __ Call(at); | |
1132 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset()); | |
1133 | |
1134 // The return value is in v0. | |
1135 LeaveInterpreterFrame(masm, t0); | |
1136 __ Jump(ra); | |
1137 | |
1138 // Load debug copy of the bytecode array. | |
1139 __ bind(&load_debug_bytecode_array); | |
1140 __ lw(kInterpreterBytecodeArrayRegister, | |
1141 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex)); | |
1142 __ Branch(&bytecode_array_loaded); | |
1143 | |
1144 // If the bytecode array is no longer present, then the underlying function | |
1145 // has been switched to a different kind of code and we heal the closure by | |
1146 // switching the code entry field over to the new code object as well. | |
1147 __ bind(&bytecode_array_not_present); | |
1148 __ LeaveFrame(StackFrame::JAVA_SCRIPT); | |
1149 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
1150 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kCodeOffset)); | |
1151 __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1152 __ sw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | |
1153 __ RecordWriteCodeEntryField(a1, t0, t1); | |
1154 __ Jump(t0); | |
1155 } | |
1156 | |
1157 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) { | |
1158 // Save the function and context for call to CompileBaseline. | |
1159 __ lw(a1, MemOperand(fp, StandardFrameConstants::kFunctionOffset)); | |
1160 __ lw(kContextRegister, | |
1161 MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
1162 | |
1163 // Leave the frame before recompiling for baseline so that we don't count as | |
1164 // an activation on the stack. | |
1165 LeaveInterpreterFrame(masm, t0); | |
1166 | |
1167 { | |
1168 FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
1169 // Push return value. | |
1170 __ push(v0); | |
1171 | |
1172 // Push function as argument and compile for baseline. | |
1173 __ push(a1); | |
1174 __ CallRuntime(Runtime::kCompileBaseline); | |
1175 | |
1176 // Restore return value. | |
1177 __ pop(v0); | |
1178 } | |
1179 __ Jump(ra); | |
1180 } | |
1181 | |
1182 // static | |
1183 void Builtins::Generate_InterpreterPushArgsAndCallImpl( | |
1184 MacroAssembler* masm, TailCallMode tail_call_mode, | |
1185 CallableType function_type) { | |
1186 // ----------- S t a t e ------------- | |
1187 // -- a0 : the number of arguments (not including the receiver) | |
1188 // -- a2 : the address of the first argument to be pushed. Subsequent | |
1189 // arguments should be consecutive above this, in the same order as | |
1190 // they are to be pushed onto the stack. | |
1191 // -- a1 : the target to call (can be any Object). | |
1192 // ----------------------------------- | |
1193 | |
1194 // Find the address of the last argument. | |
1195 __ Addu(a3, a0, Operand(1)); // Add one for receiver. | |
1196 __ sll(a3, a3, kPointerSizeLog2); | |
1197 __ Subu(a3, a2, Operand(a3)); | |
1198 | |
1199 // Push the arguments. | |
1200 Label loop_header, loop_check; | |
1201 __ Branch(&loop_check); | |
1202 __ bind(&loop_header); | |
1203 __ lw(t0, MemOperand(a2)); | |
1204 __ Addu(a2, a2, Operand(-kPointerSize)); | |
1205 __ push(t0); | |
1206 __ bind(&loop_check); | |
1207 __ Branch(&loop_header, gt, a2, Operand(a3)); | |
1208 | |
1209 // Call the target. | |
1210 if (function_type == CallableType::kJSFunction) { | |
1211 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, | |
1212 tail_call_mode), | |
1213 RelocInfo::CODE_TARGET); | |
1214 } else { | |
1215 DCHECK_EQ(function_type, CallableType::kAny); | |
1216 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, | |
1217 tail_call_mode), | |
1218 RelocInfo::CODE_TARGET); | |
1219 } | |
1220 } | |
1221 | |
1222 // static | |
1223 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) { | |
1224 // ----------- S t a t e ------------- | |
1225 // -- a0 : argument count (not including receiver) | |
1226 // -- a3 : new target | |
1227 // -- a1 : constructor to call | |
1228 // -- a2 : address of the first argument | |
1229 // ----------------------------------- | |
1230 | |
1231 // Find the address of the last argument. | |
1232 __ sll(t0, a0, kPointerSizeLog2); | |
1233 __ Subu(t0, a2, Operand(t0)); | |
1234 | |
1235 // Push a slot for the receiver. | |
1236 __ push(zero_reg); | |
1237 | |
1238 // Push the arguments. | |
1239 Label loop_header, loop_check; | |
1240 __ Branch(&loop_check); | |
1241 __ bind(&loop_header); | |
1242 __ lw(t1, MemOperand(a2)); | |
1243 __ Addu(a2, a2, Operand(-kPointerSize)); | |
1244 __ push(t1); | |
1245 __ bind(&loop_check); | |
1246 __ Branch(&loop_header, gt, a2, Operand(t0)); | |
1247 | |
1248 // Call the constructor with a0, a1, and a3 unmodified. | |
1249 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | |
1250 } | |
1251 | |
1252 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { | |
1253 // Set the return address to the correct point in the interpreter entry | |
1254 // trampoline. | |
1255 Smi* interpreter_entry_return_pc_offset( | |
1256 masm->isolate()->heap()->interpreter_entry_return_pc_offset()); | |
1257 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0)); | |
1258 __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline())); | |
1259 __ Addu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() + | |
1260 Code::kHeaderSize - kHeapObjectTag)); | |
1261 | |
1262 // Initialize the dispatch table register. | |
1263 __ li(kInterpreterDispatchTableRegister, | |
1264 Operand(ExternalReference::interpreter_dispatch_table_address( | |
1265 masm->isolate()))); | |
1266 | |
1267 // Get the bytecode array pointer from the frame. | |
1268 __ lw(kInterpreterBytecodeArrayRegister, | |
1269 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); | |
1270 | |
1271 if (FLAG_debug_code) { | |
1272 // Check function data field is actually a BytecodeArray object. | |
1273 __ SmiTst(kInterpreterBytecodeArrayRegister, at); | |
1274 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at, | |
1275 Operand(zero_reg)); | |
1276 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1); | |
1277 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1, | |
1278 Operand(BYTECODE_ARRAY_TYPE)); | |
1279 } | |
1280 | |
1281 // Get the target bytecode offset from the frame. | |
1282 __ lw(kInterpreterBytecodeOffsetRegister, | |
1283 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); | |
1284 __ SmiUntag(kInterpreterBytecodeOffsetRegister); | |
1285 | |
1286 // Dispatch to the target bytecode. | |
1287 __ Addu(a1, kInterpreterBytecodeArrayRegister, | |
1288 kInterpreterBytecodeOffsetRegister); | |
1289 __ lbu(a1, MemOperand(a1)); | |
1290 __ Lsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2); | |
1291 __ lw(a1, MemOperand(a1)); | |
1292 __ Jump(a1); | |
1293 } | |
1294 | |
1295 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | |
1296 // ----------- S t a t e ------------- | |
1297 // -- a0 : argument count (preserved for callee) | |
1298 // -- a3 : new target (preserved for callee) | |
1299 // -- a1 : target function (preserved for callee) | |
1300 // ----------------------------------- | |
1301 // First lookup code, maybe we don't need to compile! | |
1302 Label gotta_call_runtime, gotta_call_runtime_no_stack; | |
1303 Label maybe_call_runtime; | |
1304 Label try_shared; | |
1305 Label loop_top, loop_bottom; | |
1306 | |
1307 Register argument_count = a0; | |
1308 Register closure = a1; | |
1309 Register new_target = a3; | |
1310 __ push(argument_count); | |
1311 __ push(new_target); | |
1312 __ push(closure); | |
1313 | |
1314 Register map = a0; | |
1315 Register index = a2; | |
1316 __ lw(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
1317 __ lw(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); | |
1318 __ lw(index, FieldMemOperand(map, FixedArray::kLengthOffset)); | |
1319 __ Branch(&gotta_call_runtime, lt, index, Operand(Smi::FromInt(2))); | |
1320 | |
1321 // Find literals. | |
1322 // a3 : native context | |
1323 // a2 : length / index | |
1324 // a0 : optimized code map | |
1325 // stack[0] : new target | |
1326 // stack[4] : closure | |
1327 Register native_context = a3; | |
1328 __ lw(native_context, NativeContextMemOperand()); | |
1329 | |
1330 __ bind(&loop_top); | |
1331 Register temp = a1; | |
1332 Register array_pointer = t1; | |
1333 | |
1334 // Does the native context match? | |
1335 __ sll(at, index, kPointerSizeLog2 - kSmiTagSize); | |
1336 __ Addu(array_pointer, map, Operand(at)); | |
1337 __ lw(temp, FieldMemOperand(array_pointer, | |
1338 SharedFunctionInfo::kOffsetToPreviousContext)); | |
1339 __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); | |
1340 __ Branch(&loop_bottom, ne, temp, Operand(native_context)); | |
1341 // OSR id set to none? | |
1342 __ lw(temp, FieldMemOperand(array_pointer, | |
1343 SharedFunctionInfo::kOffsetToPreviousOsrAstId)); | |
1344 const int bailout_id = BailoutId::None().ToInt(); | |
1345 __ Branch(&loop_bottom, ne, temp, Operand(Smi::FromInt(bailout_id))); | |
1346 | |
1347 // Literals available? | |
1348 Label got_literals, maybe_cleared_weakcell; | |
1349 __ lw(temp, FieldMemOperand(array_pointer, | |
1350 SharedFunctionInfo::kOffsetToPreviousLiterals)); | |
1351 // temp contains either a WeakCell pointing to the literals array or the | |
1352 // literals array directly. | |
1353 STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset); | |
1354 __ lw(t0, FieldMemOperand(temp, WeakCell::kValueOffset)); | |
1355 __ JumpIfSmi(t0, &maybe_cleared_weakcell); | |
1356 // t0 is a pointer, therefore temp is a WeakCell pointing to a literals array. | |
1357 __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); | |
1358 __ jmp(&got_literals); | |
1359 | |
1360 // t0 is a smi. If it's 0, then we are looking at a cleared WeakCell | |
1361 // around the literals array, and we should visit the runtime. If it's > 0, | |
1362 // then temp already contains the literals array. | |
1363 __ bind(&maybe_cleared_weakcell); | |
1364 __ Branch(&gotta_call_runtime, eq, t0, Operand(Smi::FromInt(0))); | |
1365 | |
1366 // Save the literals in the closure. | |
1367 __ bind(&got_literals); | |
1368 __ lw(t0, MemOperand(sp, 0)); | |
1369 __ sw(temp, FieldMemOperand(t0, JSFunction::kLiteralsOffset)); | |
1370 __ push(index); | |
1371 __ RecordWriteField(t0, JSFunction::kLiteralsOffset, temp, index, | |
1372 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | |
1373 OMIT_SMI_CHECK); | |
1374 __ pop(index); | |
1375 | |
1376 // Code available? | |
1377 Register entry = t0; | |
1378 __ lw(entry, | |
1379 FieldMemOperand(array_pointer, | |
1380 SharedFunctionInfo::kOffsetToPreviousCachedCode)); | |
1381 __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); | |
1382 __ JumpIfSmi(entry, &maybe_call_runtime); | |
1383 | |
1384 // Found literals and code. Get them into the closure and return. | |
1385 __ pop(closure); | |
1386 // Store code entry in the closure. | |
1387 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1388 | |
1389 Label install_optimized_code_and_tailcall; | |
1390 __ bind(&install_optimized_code_and_tailcall); | |
1391 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); | |
1392 __ RecordWriteCodeEntryField(closure, entry, t1); | |
1393 | |
1394 // Link the closure into the optimized function list. | |
1395 // t0 : code entry | |
1396 // a3 : native context | |
1397 // a1 : closure | |
1398 __ lw(t1, | |
1399 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); | |
1400 __ sw(t1, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset)); | |
1401 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, t1, a0, | |
1402 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | |
1403 OMIT_SMI_CHECK); | |
1404 const int function_list_offset = | |
1405 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST); | |
1406 __ sw(closure, | |
1407 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); | |
1408 // Save closure before the write barrier. | |
1409 __ mov(t1, closure); | |
1410 __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0, | |
1411 kRAHasNotBeenSaved, kDontSaveFPRegs); | |
1412 __ mov(closure, t1); | |
1413 __ pop(new_target); | |
1414 __ pop(argument_count); | |
1415 __ Jump(entry); | |
1416 | |
1417 __ bind(&loop_bottom); | |
1418 __ Subu(index, index, | |
1419 Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength))); | |
1420 __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1))); | |
1421 | |
1422 // We found neither literals nor code. | |
1423 __ jmp(&gotta_call_runtime); | |
1424 | |
1425 __ bind(&maybe_call_runtime); | |
1426 __ pop(closure); | |
1427 | |
1428 // Last possibility. Check the context free optimized code map entry. | |
1429 __ lw(entry, FieldMemOperand(map, FixedArray::kHeaderSize + | |
1430 SharedFunctionInfo::kSharedCodeIndex)); | |
1431 __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); | |
1432 __ JumpIfSmi(entry, &try_shared); | |
1433 | |
1434 // Store code entry in the closure. | |
1435 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1436 __ jmp(&install_optimized_code_and_tailcall); | |
1437 | |
1438 __ bind(&try_shared); | |
1439 __ pop(new_target); | |
1440 __ pop(argument_count); | |
1441 // Is the full code valid? | |
1442 __ lw(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
1443 __ lw(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset)); | |
1444 __ lw(t1, FieldMemOperand(entry, Code::kFlagsOffset)); | |
1445 __ And(t1, t1, Operand(Code::KindField::kMask)); | |
1446 __ srl(t1, t1, Code::KindField::kShift); | |
1447 __ Branch(&gotta_call_runtime_no_stack, eq, t1, Operand(Code::BUILTIN)); | |
1448 // Yes, install the full code. | |
1449 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1450 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); | |
1451 __ RecordWriteCodeEntryField(closure, entry, t1); | |
1452 __ Jump(entry); | |
1453 | |
1454 __ bind(&gotta_call_runtime); | |
1455 __ pop(closure); | |
1456 __ pop(new_target); | |
1457 __ pop(argument_count); | |
1458 __ bind(&gotta_call_runtime_no_stack); | |
1459 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); | |
1460 } | |
1461 | |
1462 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) { | |
1463 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline); | |
1464 } | |
1465 | |
1466 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | |
1467 GenerateTailCallToReturnedCode(masm, | |
1468 Runtime::kCompileOptimized_NotConcurrent); | |
1469 } | |
1470 | |
1471 | |
1472 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | |
1473 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); | |
1474 } | |
1475 | |
1476 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { | |
1477 // ----------- S t a t e ------------- | |
1478 // -- a0 : argument count (preserved for callee) | |
1479 // -- a1 : new target (preserved for callee) | |
1480 // -- a3 : target function (preserved for callee) | |
1481 // ----------------------------------- | |
1482 Label failed; | |
1483 { | |
1484 FrameScope scope(masm, StackFrame::INTERNAL); | |
1485 // Push a copy of the target function and the new target. | |
1486 // Push function as parameter to the runtime call. | |
1487 __ SmiTag(a0); | |
1488 __ Push(a0, a1, a3, a1); | |
1489 | |
1490 // Copy arguments from caller (stdlib, foreign, heap). | |
1491 for (int i = 2; i >= 0; --i) { | |
1492 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerSPOffset + | |
1493 i * kPointerSize)); | |
1494 __ push(a3); | |
1495 } | |
1496 // Call runtime, on success unwind frame, and parent frame. | |
1497 __ CallRuntime(Runtime::kInstantiateAsmJs, 4); | |
1498 // A smi 0 is returned on failure, an object on success. | |
1499 __ JumpIfSmi(a0, &failed); | |
1500 scope.GenerateLeaveFrame(); | |
1501 __ Drop(4); | |
1502 __ Ret(); | |
1503 | |
1504 __ bind(&failed); | |
1505 // Restore target function and new target. | |
1506 __ Pop(a0, a1, a3); | |
1507 __ SmiUntag(a0); | |
1508 } | |
1509 // On failure, tail call back to regular js. | |
1510 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); | |
1511 } | |
1512 | |
1513 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | |
1514 // For now, we are relying on the fact that make_code_young doesn't do any | |
1515 // garbage collection which allows us to save/restore the registers without | |
1516 // worrying about which of them contain pointers. We also don't build an | |
1517 // internal frame to make the code faster, since we shouldn't have to do stack | |
1518 // crawls in MakeCodeYoung. This seems a bit fragile. | |
1519 | |
1520 // Set a0 to point to the head of the PlatformCodeAge sequence. | |
1521 __ Subu(a0, a0, | |
1522 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); | |
1523 | |
1524 // The following registers must be saved and restored when calling through to | |
1525 // the runtime: | |
1526 // a0 - contains return address (beginning of patch sequence) | |
1527 // a1 - isolate | |
1528 // a3 - new target | |
1529 RegList saved_regs = | |
1530 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit(); | |
1531 FrameScope scope(masm, StackFrame::MANUAL); | |
1532 __ MultiPush(saved_regs); | |
1533 __ PrepareCallCFunction(2, 0, a2); | |
1534 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); | |
1535 __ CallCFunction( | |
1536 ExternalReference::get_make_code_young_function(masm->isolate()), 2); | |
1537 __ MultiPop(saved_regs); | |
1538 __ Jump(a0); | |
1539 } | |
1540 | |
1541 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ | |
1542 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ | |
1543 MacroAssembler* masm) { \ | |
1544 GenerateMakeCodeYoungAgainCommon(masm); \ | |
1545 } \ | |
1546 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ | |
1547 MacroAssembler* masm) { \ | |
1548 GenerateMakeCodeYoungAgainCommon(masm); \ | |
1549 } | |
1550 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) | |
1551 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR | |
1552 | |
1553 | |
1554 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { | |
1555 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact | |
1556 // that make_code_young doesn't do any garbage collection which allows us to | |
1557 // save/restore the registers without worrying about which of them contain | |
1558 // pointers. | |
1559 | |
1560 // Set a0 to point to the head of the PlatformCodeAge sequence. | |
1561 __ Subu(a0, a0, | |
1562 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); | |
1563 | |
1564 // The following registers must be saved and restored when calling through to | |
1565 // the runtime: | |
1566 // a0 - contains return address (beginning of patch sequence) | |
1567 // a1 - isolate | |
1568 // a3 - new target | |
1569 RegList saved_regs = | |
1570 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit(); | |
1571 FrameScope scope(masm, StackFrame::MANUAL); | |
1572 __ MultiPush(saved_regs); | |
1573 __ PrepareCallCFunction(2, 0, a2); | |
1574 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); | |
1575 __ CallCFunction( | |
1576 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), | |
1577 2); | |
1578 __ MultiPop(saved_regs); | |
1579 | |
1580 // Perform prologue operations usually performed by the young code stub. | |
1581 __ PushStandardFrame(a1); | |
1582 | |
1583 // Jump to point after the code-age stub. | |
1584 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength)); | |
1585 __ Jump(a0); | |
1586 } | |
1587 | |
1588 | |
1589 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { | |
1590 GenerateMakeCodeYoungAgainCommon(masm); | |
1591 } | |
1592 | |
1593 | |
1594 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { | |
1595 Generate_MarkCodeAsExecutedOnce(masm); | |
1596 } | |
1597 | |
1598 | |
1599 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | |
1600 SaveFPRegsMode save_doubles) { | |
1601 { | |
1602 FrameScope scope(masm, StackFrame::INTERNAL); | |
1603 | |
1604 // Preserve registers across notification, this is important for compiled | |
1605 // stubs that tail call the runtime on deopts passing their parameters in | |
1606 // registers. | |
1607 __ MultiPush(kJSCallerSaved | kCalleeSaved); | |
1608 // Pass the function and deoptimization type to the runtime system. | |
1609 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); | |
1610 __ MultiPop(kJSCallerSaved | kCalleeSaved); | |
1611 } | |
1612 | |
1613 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state | |
1614 __ Jump(ra); // Jump to miss handler | |
1615 } | |
1616 | |
1617 | |
1618 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | |
1619 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | |
1620 } | |
1621 | |
1622 | |
1623 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | |
1624 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | |
1625 } | |
1626 | |
1627 | |
1628 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | |
1629 Deoptimizer::BailoutType type) { | |
1630 { | |
1631 FrameScope scope(masm, StackFrame::INTERNAL); | |
1632 // Pass the function and deoptimization type to the runtime system. | |
1633 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); | |
1634 __ push(a0); | |
1635 __ CallRuntime(Runtime::kNotifyDeoptimized); | |
1636 } | |
1637 | |
1638 // Get the full codegen state from the stack and untag it -> t2. | |
1639 __ lw(t2, MemOperand(sp, 0 * kPointerSize)); | |
1640 __ SmiUntag(t2); | |
1641 // Switch on the state. | |
1642 Label with_tos_register, unknown_state; | |
1643 __ Branch(&with_tos_register, ne, t2, | |
1644 Operand(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS))); | |
1645 __ Ret(USE_DELAY_SLOT); | |
1646 // Safe to fill delay slot Addu will emit one instruction. | |
1647 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state. | |
1648 | |
1649 __ bind(&with_tos_register); | |
1650 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code()); | |
1651 __ lw(v0, MemOperand(sp, 1 * kPointerSize)); | |
1652 __ Branch(&unknown_state, ne, t2, | |
1653 Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER))); | |
1654 | |
1655 __ Ret(USE_DELAY_SLOT); | |
1656 // Safe to fill delay slot Addu will emit one instruction. | |
1657 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state. | |
1658 | |
1659 __ bind(&unknown_state); | |
1660 __ stop("no cases left"); | |
1661 } | |
1662 | |
1663 | |
1664 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | |
1665 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | |
1666 } | |
1667 | |
1668 | |
1669 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { | |
1670 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); | |
1671 } | |
1672 | |
1673 | |
1674 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | |
1675 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | |
1676 } | |
1677 | |
1678 | |
1679 // Clobbers {t2, t3, t4, t5}. | |
1680 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, | |
1681 Register function_template_info, | |
1682 Label* receiver_check_failed) { | |
1683 Register signature = t2; | |
1684 Register map = t3; | |
1685 Register constructor = t4; | |
1686 Register scratch = t5; | |
1687 | |
1688 // If there is no signature, return the holder. | |
1689 __ lw(signature, FieldMemOperand(function_template_info, | |
1690 FunctionTemplateInfo::kSignatureOffset)); | |
1691 Label receiver_check_passed; | |
1692 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex, | |
1693 &receiver_check_passed); | |
1694 | |
1695 // Walk the prototype chain. | |
1696 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
1697 Label prototype_loop_start; | |
1698 __ bind(&prototype_loop_start); | |
1699 | |
1700 // Get the constructor, if any. | |
1701 __ GetMapConstructor(constructor, map, scratch, scratch); | |
1702 Label next_prototype; | |
1703 __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE)); | |
1704 Register type = constructor; | |
1705 __ lw(type, | |
1706 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset)); | |
1707 __ lw(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset)); | |
1708 | |
1709 // Loop through the chain of inheriting function templates. | |
1710 Label function_template_loop; | |
1711 __ bind(&function_template_loop); | |
1712 | |
1713 // If the signatures match, we have a compatible receiver. | |
1714 __ Branch(&receiver_check_passed, eq, signature, Operand(type), | |
1715 USE_DELAY_SLOT); | |
1716 | |
1717 // If the current type is not a FunctionTemplateInfo, load the next prototype | |
1718 // in the chain. | |
1719 __ JumpIfSmi(type, &next_prototype); | |
1720 __ GetObjectType(type, scratch, scratch); | |
1721 __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE)); | |
1722 | |
1723 // Otherwise load the parent function template and iterate. | |
1724 __ lw(type, | |
1725 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset)); | |
1726 __ Branch(&function_template_loop); | |
1727 | |
1728 // Load the next prototype and iterate. | |
1729 __ bind(&next_prototype); | |
1730 __ lw(scratch, FieldMemOperand(map, Map::kBitField3Offset)); | |
1731 __ DecodeField<Map::HasHiddenPrototype>(scratch); | |
1732 __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg)); | |
1733 __ lw(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); | |
1734 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
1735 | |
1736 __ Branch(&prototype_loop_start); | |
1737 | |
1738 __ bind(&receiver_check_passed); | |
1739 } | |
1740 | |
1741 | |
1742 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { | |
1743 // ----------- S t a t e ------------- | |
1744 // -- a0 : number of arguments excluding receiver | |
1745 // -- a1 : callee | |
1746 // -- ra : return address | |
1747 // -- sp[0] : last argument | |
1748 // -- ... | |
1749 // -- sp[4 * (argc - 1)] : first argument | |
1750 // -- sp[4 * argc] : receiver | |
1751 // ----------------------------------- | |
1752 | |
1753 // Load the FunctionTemplateInfo. | |
1754 __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
1755 __ lw(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset)); | |
1756 | |
1757 // Do the compatible receiver check. | |
1758 Label receiver_check_failed; | |
1759 __ Lsa(t8, sp, a0, kPointerSizeLog2); | |
1760 __ lw(t0, MemOperand(t8)); | |
1761 CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed); | |
1762 | |
1763 // Get the callback offset from the FunctionTemplateInfo, and jump to the | |
1764 // beginning of the code. | |
1765 __ lw(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset)); | |
1766 __ lw(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset)); | |
1767 __ Addu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1768 __ Jump(t2); | |
1769 | |
1770 // Compatible receiver check failed: throw an Illegal Invocation exception. | |
1771 __ bind(&receiver_check_failed); | |
1772 // Drop the arguments (including the receiver); | |
1773 __ Addu(t8, t8, Operand(kPointerSize)); | |
1774 __ addu(sp, t8, zero_reg); | |
1775 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); | |
1776 } | |
1777 | |
1778 | |
1779 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | |
1780 // Lookup the function in the JavaScript frame. | |
1781 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
1782 { | |
1783 FrameScope scope(masm, StackFrame::INTERNAL); | |
1784 // Pass function as argument. | |
1785 __ push(a0); | |
1786 __ CallRuntime(Runtime::kCompileForOnStackReplacement); | |
1787 } | |
1788 | |
1789 // If the code object is null, just return to the unoptimized code. | |
1790 __ Ret(eq, v0, Operand(Smi::FromInt(0))); | |
1791 | |
1792 // Load deoptimization data from the code object. | |
1793 // <deopt_data> = <code>[#deoptimization_data_offset] | |
1794 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); | |
1795 | |
1796 // Load the OSR entrypoint offset from the deoptimization data. | |
1797 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] | |
1798 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt( | |
1799 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag)); | |
1800 __ SmiUntag(a1); | |
1801 | |
1802 // Compute the target address = code_obj + header_size + osr_offset | |
1803 // <entry_addr> = <code_obj> + #header_size + <osr_offset> | |
1804 __ addu(v0, v0, a1); | |
1805 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag); | |
1806 | |
1807 // And "return" to the OSR entry point of the function. | |
1808 __ Ret(); | |
1809 } | |
1810 | |
1811 | |
1812 // static | |
1813 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, | |
1814 int field_index) { | |
1815 // ----------- S t a t e ------------- | |
1816 // -- a0 : number of arguments | |
1817 // -- a1 : function | |
1818 // -- cp : context | |
1819 // -- sp[0] : receiver | |
1820 // ----------------------------------- | |
1821 | |
1822 // 1. Pop receiver into a0 and check that it's actually a JSDate object. | |
1823 Label receiver_not_date; | |
1824 { | |
1825 __ Pop(a0); | |
1826 __ JumpIfSmi(a0, &receiver_not_date); | |
1827 __ GetObjectType(a0, t0, t0); | |
1828 __ Branch(&receiver_not_date, ne, t0, Operand(JS_DATE_TYPE)); | |
1829 } | |
1830 | |
1831 // 2. Load the specified date field, falling back to the runtime as necessary. | |
1832 if (field_index == JSDate::kDateValue) { | |
1833 __ Ret(USE_DELAY_SLOT); | |
1834 __ lw(v0, FieldMemOperand(a0, JSDate::kValueOffset)); // In delay slot. | |
1835 } else { | |
1836 if (field_index < JSDate::kFirstUncachedField) { | |
1837 Label stamp_mismatch; | |
1838 __ li(a1, Operand(ExternalReference::date_cache_stamp(masm->isolate()))); | |
1839 __ lw(a1, MemOperand(a1)); | |
1840 __ lw(t0, FieldMemOperand(a0, JSDate::kCacheStampOffset)); | |
1841 __ Branch(&stamp_mismatch, ne, t0, Operand(a1)); | |
1842 __ Ret(USE_DELAY_SLOT); | |
1843 __ lw(v0, FieldMemOperand( | |
1844 a0, JSDate::kValueOffset + | |
1845 field_index * kPointerSize)); // In delay slot. | |
1846 __ bind(&stamp_mismatch); | |
1847 } | |
1848 FrameScope scope(masm, StackFrame::INTERNAL); | |
1849 __ PrepareCallCFunction(2, t0); | |
1850 __ li(a1, Operand(Smi::FromInt(field_index))); | |
1851 __ CallCFunction( | |
1852 ExternalReference::get_date_field_function(masm->isolate()), 2); | |
1853 } | |
1854 __ Ret(); | |
1855 | |
1856 // 3. Raise a TypeError if the receiver is not a date. | |
1857 __ bind(&receiver_not_date); | |
1858 { | |
1859 FrameScope scope(masm, StackFrame::MANUAL); | |
1860 __ Push(a0); | |
1861 __ Move(a0, Smi::FromInt(0)); | |
1862 __ EnterBuiltinFrame(cp, a1, a0); | |
1863 __ CallRuntime(Runtime::kThrowNotDateError); | |
1864 } | |
1865 } | |
1866 | |
1867 // static | |
1868 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { | |
1869 // ----------- S t a t e ------------- | |
1870 // -- a0 : argc | |
1871 // -- sp[0] : argArray | |
1872 // -- sp[4] : thisArg | |
1873 // -- sp[8] : receiver | |
1874 // ----------------------------------- | |
1875 | |
1876 // 1. Load receiver into a1, argArray into a0 (if present), remove all | |
1877 // arguments from the stack (including the receiver), and push thisArg (if | |
1878 // present) instead. | |
1879 { | |
1880 Label no_arg; | |
1881 Register scratch = t0; | |
1882 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | |
1883 __ mov(a3, a2); | |
1884 // Lsa() cannot be used hare as scratch value used later. | |
1885 __ sll(scratch, a0, kPointerSizeLog2); | |
1886 __ Addu(a0, sp, Operand(scratch)); | |
1887 __ lw(a1, MemOperand(a0)); // receiver | |
1888 __ Subu(a0, a0, Operand(kPointerSize)); | |
1889 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
1890 __ lw(a2, MemOperand(a0)); // thisArg | |
1891 __ Subu(a0, a0, Operand(kPointerSize)); | |
1892 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
1893 __ lw(a3, MemOperand(a0)); // argArray | |
1894 __ bind(&no_arg); | |
1895 __ Addu(sp, sp, Operand(scratch)); | |
1896 __ sw(a2, MemOperand(sp)); | |
1897 __ mov(a0, a3); | |
1898 } | |
1899 | |
1900 // ----------- S t a t e ------------- | |
1901 // -- a0 : argArray | |
1902 // -- a1 : receiver | |
1903 // -- sp[0] : thisArg | |
1904 // ----------------------------------- | |
1905 | |
1906 // 2. Make sure the receiver is actually callable. | |
1907 Label receiver_not_callable; | |
1908 __ JumpIfSmi(a1, &receiver_not_callable); | |
1909 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); | |
1910 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); | |
1911 __ And(t0, t0, Operand(1 << Map::kIsCallable)); | |
1912 __ Branch(&receiver_not_callable, eq, t0, Operand(zero_reg)); | |
1913 | |
1914 // 3. Tail call with no arguments if argArray is null or undefined. | |
1915 Label no_arguments; | |
1916 __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments); | |
1917 __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments); | |
1918 | |
1919 // 4a. Apply the receiver to the given argArray (passing undefined for | |
1920 // new.target). | |
1921 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); | |
1922 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
1923 | |
1924 // 4b. The argArray is either null or undefined, so we tail call without any | |
1925 // arguments to the receiver. | |
1926 __ bind(&no_arguments); | |
1927 { | |
1928 __ mov(a0, zero_reg); | |
1929 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
1930 } | |
1931 | |
1932 // 4c. The receiver is not callable, throw an appropriate TypeError. | |
1933 __ bind(&receiver_not_callable); | |
1934 { | |
1935 __ sw(a1, MemOperand(sp)); | |
1936 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | |
1937 } | |
1938 } | |
1939 | |
1940 | |
1941 // static | |
1942 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { | |
1943 // 1. Make sure we have at least one argument. | |
1944 // a0: actual number of arguments | |
1945 { | |
1946 Label done; | |
1947 __ Branch(&done, ne, a0, Operand(zero_reg)); | |
1948 __ PushRoot(Heap::kUndefinedValueRootIndex); | |
1949 __ Addu(a0, a0, Operand(1)); | |
1950 __ bind(&done); | |
1951 } | |
1952 | |
1953 // 2. Get the function to call (passed as receiver) from the stack. | |
1954 // a0: actual number of arguments | |
1955 __ Lsa(at, sp, a0, kPointerSizeLog2); | |
1956 __ lw(a1, MemOperand(at)); | |
1957 | |
1958 // 3. Shift arguments and return address one slot down on the stack | |
1959 // (overwriting the original receiver). Adjust argument count to make | |
1960 // the original first argument the new receiver. | |
1961 // a0: actual number of arguments | |
1962 // a1: function | |
1963 { | |
1964 Label loop; | |
1965 // Calculate the copy start address (destination). Copy end address is sp. | |
1966 __ Lsa(a2, sp, a0, kPointerSizeLog2); | |
1967 | |
1968 __ bind(&loop); | |
1969 __ lw(at, MemOperand(a2, -kPointerSize)); | |
1970 __ sw(at, MemOperand(a2)); | |
1971 __ Subu(a2, a2, Operand(kPointerSize)); | |
1972 __ Branch(&loop, ne, a2, Operand(sp)); | |
1973 // Adjust the actual number of arguments and remove the top element | |
1974 // (which is a copy of the last argument). | |
1975 __ Subu(a0, a0, Operand(1)); | |
1976 __ Pop(); | |
1977 } | |
1978 | |
1979 // 4. Call the callable. | |
1980 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
1981 } | |
1982 | |
1983 | |
1984 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { | |
1985 // ----------- S t a t e ------------- | |
1986 // -- a0 : argc | |
1987 // -- sp[0] : argumentsList | |
1988 // -- sp[4] : thisArgument | |
1989 // -- sp[8] : target | |
1990 // -- sp[12] : receiver | |
1991 // ----------------------------------- | |
1992 | |
1993 // 1. Load target into a1 (if present), argumentsList into a0 (if present), | |
1994 // remove all arguments from the stack (including the receiver), and push | |
1995 // thisArgument (if present) instead. | |
1996 { | |
1997 Label no_arg; | |
1998 Register scratch = t0; | |
1999 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); | |
2000 __ mov(a2, a1); | |
2001 __ mov(a3, a1); | |
2002 __ sll(scratch, a0, kPointerSizeLog2); | |
2003 __ mov(a0, scratch); | |
2004 __ Subu(a0, a0, Operand(kPointerSize)); | |
2005 __ Branch(&no_arg, lt, a0, Operand(zero_reg)); | |
2006 __ Addu(a0, sp, Operand(a0)); | |
2007 __ lw(a1, MemOperand(a0)); // target | |
2008 __ Subu(a0, a0, Operand(kPointerSize)); | |
2009 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
2010 __ lw(a2, MemOperand(a0)); // thisArgument | |
2011 __ Subu(a0, a0, Operand(kPointerSize)); | |
2012 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
2013 __ lw(a3, MemOperand(a0)); // argumentsList | |
2014 __ bind(&no_arg); | |
2015 __ Addu(sp, sp, Operand(scratch)); | |
2016 __ sw(a2, MemOperand(sp)); | |
2017 __ mov(a0, a3); | |
2018 } | |
2019 | |
2020 // ----------- S t a t e ------------- | |
2021 // -- a0 : argumentsList | |
2022 // -- a1 : target | |
2023 // -- sp[0] : thisArgument | |
2024 // ----------------------------------- | |
2025 | |
2026 // 2. Make sure the target is actually callable. | |
2027 Label target_not_callable; | |
2028 __ JumpIfSmi(a1, &target_not_callable); | |
2029 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); | |
2030 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); | |
2031 __ And(t0, t0, Operand(1 << Map::kIsCallable)); | |
2032 __ Branch(&target_not_callable, eq, t0, Operand(zero_reg)); | |
2033 | |
2034 // 3a. Apply the target to the given argumentsList (passing undefined for | |
2035 // new.target). | |
2036 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); | |
2037 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
2038 | |
2039 // 3b. The target is not callable, throw an appropriate TypeError. | |
2040 __ bind(&target_not_callable); | |
2041 { | |
2042 __ sw(a1, MemOperand(sp)); | |
2043 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | |
2044 } | |
2045 } | |
2046 | |
2047 | |
2048 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { | |
2049 // ----------- S t a t e ------------- | |
2050 // -- a0 : argc | |
2051 // -- sp[0] : new.target (optional) | |
2052 // -- sp[4] : argumentsList | |
2053 // -- sp[8] : target | |
2054 // -- sp[12] : receiver | |
2055 // ----------------------------------- | |
2056 | |
2057 // 1. Load target into a1 (if present), argumentsList into a0 (if present), | |
2058 // new.target into a3 (if present, otherwise use target), remove all | |
2059 // arguments from the stack (including the receiver), and push thisArgument | |
2060 // (if present) instead. | |
2061 { | |
2062 Label no_arg; | |
2063 Register scratch = t0; | |
2064 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); | |
2065 __ mov(a2, a1); | |
2066 // Lsa() cannot be used hare as scratch value used later. | |
2067 __ sll(scratch, a0, kPointerSizeLog2); | |
2068 __ Addu(a0, sp, Operand(scratch)); | |
2069 __ sw(a2, MemOperand(a0)); // receiver | |
2070 __ Subu(a0, a0, Operand(kPointerSize)); | |
2071 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
2072 __ lw(a1, MemOperand(a0)); // target | |
2073 __ mov(a3, a1); // new.target defaults to target | |
2074 __ Subu(a0, a0, Operand(kPointerSize)); | |
2075 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
2076 __ lw(a2, MemOperand(a0)); // argumentsList | |
2077 __ Subu(a0, a0, Operand(kPointerSize)); | |
2078 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
2079 __ lw(a3, MemOperand(a0)); // new.target | |
2080 __ bind(&no_arg); | |
2081 __ Addu(sp, sp, Operand(scratch)); | |
2082 __ mov(a0, a2); | |
2083 } | |
2084 | |
2085 // ----------- S t a t e ------------- | |
2086 // -- a0 : argumentsList | |
2087 // -- a3 : new.target | |
2088 // -- a1 : target | |
2089 // -- sp[0] : receiver (undefined) | |
2090 // ----------------------------------- | |
2091 | |
2092 // 2. Make sure the target is actually a constructor. | |
2093 Label target_not_constructor; | |
2094 __ JumpIfSmi(a1, &target_not_constructor); | |
2095 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); | |
2096 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); | |
2097 __ And(t0, t0, Operand(1 << Map::kIsConstructor)); | |
2098 __ Branch(&target_not_constructor, eq, t0, Operand(zero_reg)); | |
2099 | |
2100 // 3. Make sure the target is actually a constructor. | |
2101 Label new_target_not_constructor; | |
2102 __ JumpIfSmi(a3, &new_target_not_constructor); | |
2103 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset)); | |
2104 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); | |
2105 __ And(t0, t0, Operand(1 << Map::kIsConstructor)); | |
2106 __ Branch(&new_target_not_constructor, eq, t0, Operand(zero_reg)); | |
2107 | |
2108 // 4a. Construct the target with the given new.target and argumentsList. | |
2109 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
2110 | |
2111 // 4b. The target is not a constructor, throw an appropriate TypeError. | |
2112 __ bind(&target_not_constructor); | |
2113 { | |
2114 __ sw(a1, MemOperand(sp)); | |
2115 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); | |
2116 } | |
2117 | |
2118 // 4c. The new.target is not a constructor, throw an appropriate TypeError. | |
2119 __ bind(&new_target_not_constructor); | |
2120 { | |
2121 __ sw(a3, MemOperand(sp)); | |
2122 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); | |
2123 } | |
2124 } | |
2125 | |
2126 | |
2127 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, | |
2128 Label* stack_overflow) { | |
2129 // ----------- S t a t e ------------- | |
2130 // -- a0 : actual number of arguments | |
2131 // -- a1 : function (passed through to callee) | |
2132 // -- a2 : expected number of arguments | |
2133 // -- a3 : new target (passed through to callee) | |
2134 // ----------------------------------- | |
2135 // Check the stack for overflow. We are not trying to catch | |
2136 // interruptions (e.g. debug break and preemption) here, so the "real stack | |
2137 // limit" is checked. | |
2138 __ LoadRoot(t1, Heap::kRealStackLimitRootIndex); | |
2139 // Make t1 the space we have left. The stack might already be overflowed | |
2140 // here which will cause t1 to become negative. | |
2141 __ subu(t1, sp, t1); | |
2142 // Check if the arguments will overflow the stack. | |
2143 __ sll(at, a2, kPointerSizeLog2); | |
2144 // Signed comparison. | |
2145 __ Branch(stack_overflow, le, t1, Operand(at)); | |
2146 } | |
2147 | |
2148 | |
2149 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | |
2150 __ sll(a0, a0, kSmiTagSize); | |
2151 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
2152 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit()); | |
2153 __ Addu(fp, sp, | |
2154 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize)); | |
2155 } | |
2156 | |
2157 | |
2158 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | |
2159 // ----------- S t a t e ------------- | |
2160 // -- v0 : result being passed through | |
2161 // ----------------------------------- | |
2162 // Get the number of arguments passed (as a smi), tear down the frame and | |
2163 // then tear down the parameters. | |
2164 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + | |
2165 kPointerSize))); | |
2166 __ mov(sp, fp); | |
2167 __ MultiPop(fp.bit() | ra.bit()); | |
2168 __ Lsa(sp, sp, a1, kPointerSizeLog2 - kSmiTagSize); | |
2169 // Adjust for the receiver. | |
2170 __ Addu(sp, sp, Operand(kPointerSize)); | |
2171 } | |
2172 | |
2173 | |
2174 // static | |
2175 void Builtins::Generate_Apply(MacroAssembler* masm) { | |
2176 // ----------- S t a t e ------------- | |
2177 // -- a0 : argumentsList | |
2178 // -- a1 : target | |
2179 // -- a3 : new.target (checked to be constructor or undefined) | |
2180 // -- sp[0] : thisArgument | |
2181 // ----------------------------------- | |
2182 | |
2183 // Create the list of arguments from the array-like argumentsList. | |
2184 { | |
2185 Label create_arguments, create_array, create_runtime, done_create; | |
2186 __ JumpIfSmi(a0, &create_runtime); | |
2187 | |
2188 // Load the map of argumentsList into a2. | |
2189 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); | |
2190 | |
2191 // Load native context into t0. | |
2192 __ lw(t0, NativeContextMemOperand()); | |
2193 | |
2194 // Check if argumentsList is an (unmodified) arguments object. | |
2195 __ lw(at, ContextMemOperand(t0, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); | |
2196 __ Branch(&create_arguments, eq, a2, Operand(at)); | |
2197 __ lw(at, ContextMemOperand(t0, Context::STRICT_ARGUMENTS_MAP_INDEX)); | |
2198 __ Branch(&create_arguments, eq, a2, Operand(at)); | |
2199 | |
2200 // Check if argumentsList is a fast JSArray. | |
2201 __ lw(v0, FieldMemOperand(a2, HeapObject::kMapOffset)); | |
2202 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset)); | |
2203 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE)); | |
2204 | |
2205 // Ask the runtime to create the list (actually a FixedArray). | |
2206 __ bind(&create_runtime); | |
2207 { | |
2208 FrameScope scope(masm, StackFrame::INTERNAL); | |
2209 __ Push(a1, a3, a0); | |
2210 __ CallRuntime(Runtime::kCreateListFromArrayLike); | |
2211 __ mov(a0, v0); | |
2212 __ Pop(a1, a3); | |
2213 __ lw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); | |
2214 __ SmiUntag(a2); | |
2215 } | |
2216 __ Branch(&done_create); | |
2217 | |
2218 // Try to create the list from an arguments object. | |
2219 __ bind(&create_arguments); | |
2220 __ lw(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset)); | |
2221 __ lw(t0, FieldMemOperand(a0, JSObject::kElementsOffset)); | |
2222 __ lw(at, FieldMemOperand(t0, FixedArray::kLengthOffset)); | |
2223 __ Branch(&create_runtime, ne, a2, Operand(at)); | |
2224 __ SmiUntag(a2); | |
2225 __ mov(a0, t0); | |
2226 __ Branch(&done_create); | |
2227 | |
2228 // Try to create the list from a JSArray object. | |
2229 __ bind(&create_array); | |
2230 __ lw(a2, FieldMemOperand(a2, Map::kBitField2Offset)); | |
2231 __ DecodeField<Map::ElementsKindBits>(a2); | |
2232 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
2233 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
2234 STATIC_ASSERT(FAST_ELEMENTS == 2); | |
2235 __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS)); | |
2236 __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS)); | |
2237 __ lw(a2, FieldMemOperand(a0, JSArray::kLengthOffset)); | |
2238 __ lw(a0, FieldMemOperand(a0, JSArray::kElementsOffset)); | |
2239 __ SmiUntag(a2); | |
2240 | |
2241 __ bind(&done_create); | |
2242 } | |
2243 | |
2244 // Check for stack overflow. | |
2245 { | |
2246 // Check the stack for overflow. We are not trying to catch interruptions | |
2247 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
2248 Label done; | |
2249 __ LoadRoot(t0, Heap::kRealStackLimitRootIndex); | |
2250 // Make ip the space we have left. The stack might already be overflowed | |
2251 // here which will cause ip to become negative. | |
2252 __ Subu(t0, sp, t0); | |
2253 // Check if the arguments will overflow the stack. | |
2254 __ sll(at, a2, kPointerSizeLog2); | |
2255 __ Branch(&done, gt, t0, Operand(at)); // Signed comparison. | |
2256 __ TailCallRuntime(Runtime::kThrowStackOverflow); | |
2257 __ bind(&done); | |
2258 } | |
2259 | |
2260 // ----------- S t a t e ------------- | |
2261 // -- a1 : target | |
2262 // -- a0 : args (a FixedArray built from argumentsList) | |
2263 // -- a2 : len (number of elements to push from args) | |
2264 // -- a3 : new.target (checked to be constructor or undefined) | |
2265 // -- sp[0] : thisArgument | |
2266 // ----------------------------------- | |
2267 | |
2268 // Push arguments onto the stack (thisArgument is already on the stack). | |
2269 { | |
2270 __ mov(t0, zero_reg); | |
2271 Label done, loop; | |
2272 __ bind(&loop); | |
2273 __ Branch(&done, eq, t0, Operand(a2)); | |
2274 __ Lsa(at, a0, t0, kPointerSizeLog2); | |
2275 __ lw(at, FieldMemOperand(at, FixedArray::kHeaderSize)); | |
2276 __ Push(at); | |
2277 __ Addu(t0, t0, Operand(1)); | |
2278 __ Branch(&loop); | |
2279 __ bind(&done); | |
2280 __ Move(a0, t0); | |
2281 } | |
2282 | |
2283 // Dispatch to Call or Construct depending on whether new.target is undefined. | |
2284 { | |
2285 Label construct; | |
2286 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
2287 __ Branch(&construct, ne, a3, Operand(at)); | |
2288 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
2289 __ bind(&construct); | |
2290 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | |
2291 } | |
2292 } | |
2293 | |
2294 namespace { | |
2295 | |
2296 // Drops top JavaScript frame and an arguments adaptor frame below it (if | |
2297 // present) preserving all the arguments prepared for current call. | |
2298 // Does nothing if debugger is currently active. | |
2299 // ES6 14.6.3. PrepareForTailCall | |
2300 // | |
2301 // Stack structure for the function g() tail calling f(): | |
2302 // | |
2303 // ------- Caller frame: ------- | |
2304 // | ... | |
2305 // | g()'s arg M | |
2306 // | ... | |
2307 // | g()'s arg 1 | |
2308 // | g()'s receiver arg | |
2309 // | g()'s caller pc | |
2310 // ------- g()'s frame: ------- | |
2311 // | g()'s caller fp <- fp | |
2312 // | g()'s context | |
2313 // | function pointer: g | |
2314 // | ------------------------- | |
2315 // | ... | |
2316 // | ... | |
2317 // | f()'s arg N | |
2318 // | ... | |
2319 // | f()'s arg 1 | |
2320 // | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!) | |
2321 // ---------------------- | |
2322 // | |
2323 void PrepareForTailCall(MacroAssembler* masm, Register args_reg, | |
2324 Register scratch1, Register scratch2, | |
2325 Register scratch3) { | |
2326 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); | |
2327 Comment cmnt(masm, "[ PrepareForTailCall"); | |
2328 | |
2329 // Prepare for tail call only if ES2015 tail call elimination is enabled. | |
2330 Label done; | |
2331 ExternalReference is_tail_call_elimination_enabled = | |
2332 ExternalReference::is_tail_call_elimination_enabled_address( | |
2333 masm->isolate()); | |
2334 __ li(at, Operand(is_tail_call_elimination_enabled)); | |
2335 __ lb(scratch1, MemOperand(at)); | |
2336 __ Branch(&done, eq, scratch1, Operand(zero_reg)); | |
2337 | |
2338 // Drop possible interpreter handler/stub frame. | |
2339 { | |
2340 Label no_interpreter_frame; | |
2341 __ lw(scratch3, | |
2342 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset)); | |
2343 __ Branch(&no_interpreter_frame, ne, scratch3, | |
2344 Operand(Smi::FromInt(StackFrame::STUB))); | |
2345 __ lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
2346 __ bind(&no_interpreter_frame); | |
2347 } | |
2348 | |
2349 // Check if next frame is an arguments adaptor frame. | |
2350 Register caller_args_count_reg = scratch1; | |
2351 Label no_arguments_adaptor, formal_parameter_count_loaded; | |
2352 __ lw(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
2353 __ lw(scratch3, | |
2354 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset)); | |
2355 __ Branch(&no_arguments_adaptor, ne, scratch3, | |
2356 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
2357 | |
2358 // Drop current frame and load arguments count from arguments adaptor frame. | |
2359 __ mov(fp, scratch2); | |
2360 __ lw(caller_args_count_reg, | |
2361 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
2362 __ SmiUntag(caller_args_count_reg); | |
2363 __ Branch(&formal_parameter_count_loaded); | |
2364 | |
2365 __ bind(&no_arguments_adaptor); | |
2366 // Load caller's formal parameter count | |
2367 __ lw(scratch1, | |
2368 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset)); | |
2369 __ lw(scratch1, | |
2370 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset)); | |
2371 __ lw(caller_args_count_reg, | |
2372 FieldMemOperand(scratch1, | |
2373 SharedFunctionInfo::kFormalParameterCountOffset)); | |
2374 __ SmiUntag(caller_args_count_reg); | |
2375 | |
2376 __ bind(&formal_parameter_count_loaded); | |
2377 | |
2378 ParameterCount callee_args_count(args_reg); | |
2379 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, | |
2380 scratch3); | |
2381 __ bind(&done); | |
2382 } | |
2383 } // namespace | |
2384 | |
2385 // static | |
2386 void Builtins::Generate_CallFunction(MacroAssembler* masm, | |
2387 ConvertReceiverMode mode, | |
2388 TailCallMode tail_call_mode) { | |
2389 // ----------- S t a t e ------------- | |
2390 // -- a0 : the number of arguments (not including the receiver) | |
2391 // -- a1 : the function to call (checked to be a JSFunction) | |
2392 // ----------------------------------- | |
2393 __ AssertFunction(a1); | |
2394 | |
2395 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) | |
2396 // Check that the function is not a "classConstructor". | |
2397 Label class_constructor; | |
2398 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
2399 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset)); | |
2400 __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte)); | |
2401 __ Branch(&class_constructor, ne, at, Operand(zero_reg)); | |
2402 | |
2403 // Enter the context of the function; ToObject has to run in the function | |
2404 // context, and we also need to take the global proxy from the function | |
2405 // context in case of conversion. | |
2406 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset == | |
2407 SharedFunctionInfo::kStrictModeByteOffset); | |
2408 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | |
2409 // We need to convert the receiver for non-native sloppy mode functions. | |
2410 Label done_convert; | |
2411 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset)); | |
2412 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) | | |
2413 (1 << SharedFunctionInfo::kStrictModeBitWithinByte))); | |
2414 __ Branch(&done_convert, ne, at, Operand(zero_reg)); | |
2415 { | |
2416 // ----------- S t a t e ------------- | |
2417 // -- a0 : the number of arguments (not including the receiver) | |
2418 // -- a1 : the function to call (checked to be a JSFunction) | |
2419 // -- a2 : the shared function info. | |
2420 // -- cp : the function context. | |
2421 // ----------------------------------- | |
2422 | |
2423 if (mode == ConvertReceiverMode::kNullOrUndefined) { | |
2424 // Patch receiver to global proxy. | |
2425 __ LoadGlobalProxy(a3); | |
2426 } else { | |
2427 Label convert_to_object, convert_receiver; | |
2428 __ Lsa(at, sp, a0, kPointerSizeLog2); | |
2429 __ lw(a3, MemOperand(at)); | |
2430 __ JumpIfSmi(a3, &convert_to_object); | |
2431 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); | |
2432 __ GetObjectType(a3, t0, t0); | |
2433 __ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE)); | |
2434 if (mode != ConvertReceiverMode::kNotNullOrUndefined) { | |
2435 Label convert_global_proxy; | |
2436 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex, | |
2437 &convert_global_proxy); | |
2438 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object); | |
2439 __ bind(&convert_global_proxy); | |
2440 { | |
2441 // Patch receiver to global proxy. | |
2442 __ LoadGlobalProxy(a3); | |
2443 } | |
2444 __ Branch(&convert_receiver); | |
2445 } | |
2446 __ bind(&convert_to_object); | |
2447 { | |
2448 // Convert receiver using ToObject. | |
2449 // TODO(bmeurer): Inline the allocation here to avoid building the frame | |
2450 // in the fast case? (fall back to AllocateInNewSpace?) | |
2451 FrameScope scope(masm, StackFrame::INTERNAL); | |
2452 __ sll(a0, a0, kSmiTagSize); // Smi tagged. | |
2453 __ Push(a0, a1); | |
2454 __ mov(a0, a3); | |
2455 ToObjectStub stub(masm->isolate()); | |
2456 __ CallStub(&stub); | |
2457 __ mov(a3, v0); | |
2458 __ Pop(a0, a1); | |
2459 __ sra(a0, a0, kSmiTagSize); // Un-tag. | |
2460 } | |
2461 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
2462 __ bind(&convert_receiver); | |
2463 } | |
2464 __ Lsa(at, sp, a0, kPointerSizeLog2); | |
2465 __ sw(a3, MemOperand(at)); | |
2466 } | |
2467 __ bind(&done_convert); | |
2468 | |
2469 // ----------- S t a t e ------------- | |
2470 // -- a0 : the number of arguments (not including the receiver) | |
2471 // -- a1 : the function to call (checked to be a JSFunction) | |
2472 // -- a2 : the shared function info. | |
2473 // -- cp : the function context. | |
2474 // ----------------------------------- | |
2475 | |
2476 if (tail_call_mode == TailCallMode::kAllow) { | |
2477 PrepareForTailCall(masm, a0, t0, t1, t2); | |
2478 } | |
2479 | |
2480 __ lw(a2, | |
2481 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset)); | |
2482 __ sra(a2, a2, kSmiTagSize); // Un-tag. | |
2483 ParameterCount actual(a0); | |
2484 ParameterCount expected(a2); | |
2485 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION, | |
2486 CheckDebugStepCallWrapper()); | |
2487 | |
2488 // The function is a "classConstructor", need to raise an exception. | |
2489 __ bind(&class_constructor); | |
2490 { | |
2491 FrameScope frame(masm, StackFrame::INTERNAL); | |
2492 __ Push(a1); | |
2493 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); | |
2494 } | |
2495 } | |
2496 | |
2497 | |
2498 // static | |
2499 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, | |
2500 TailCallMode tail_call_mode) { | |
2501 // ----------- S t a t e ------------- | |
2502 // -- a0 : the number of arguments (not including the receiver) | |
2503 // -- a1 : the function to call (checked to be a JSBoundFunction) | |
2504 // ----------------------------------- | |
2505 __ AssertBoundFunction(a1); | |
2506 | |
2507 if (tail_call_mode == TailCallMode::kAllow) { | |
2508 PrepareForTailCall(masm, a0, t0, t1, t2); | |
2509 } | |
2510 | |
2511 // Patch the receiver to [[BoundThis]]. | |
2512 { | |
2513 __ lw(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset)); | |
2514 __ Lsa(t0, sp, a0, kPointerSizeLog2); | |
2515 __ sw(at, MemOperand(t0)); | |
2516 } | |
2517 | |
2518 // Load [[BoundArguments]] into a2 and length of that into t0. | |
2519 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); | |
2520 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset)); | |
2521 __ SmiUntag(t0); | |
2522 | |
2523 // ----------- S t a t e ------------- | |
2524 // -- a0 : the number of arguments (not including the receiver) | |
2525 // -- a1 : the function to call (checked to be a JSBoundFunction) | |
2526 // -- a2 : the [[BoundArguments]] (implemented as FixedArray) | |
2527 // -- t0 : the number of [[BoundArguments]] | |
2528 // ----------------------------------- | |
2529 | |
2530 // Reserve stack space for the [[BoundArguments]]. | |
2531 { | |
2532 Label done; | |
2533 __ sll(t1, t0, kPointerSizeLog2); | |
2534 __ Subu(sp, sp, Operand(t1)); | |
2535 // Check the stack for overflow. We are not trying to catch interruptions | |
2536 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
2537 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); | |
2538 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. | |
2539 // Restore the stack pointer. | |
2540 __ Addu(sp, sp, Operand(t1)); | |
2541 { | |
2542 FrameScope scope(masm, StackFrame::MANUAL); | |
2543 __ EnterFrame(StackFrame::INTERNAL); | |
2544 __ CallRuntime(Runtime::kThrowStackOverflow); | |
2545 } | |
2546 __ bind(&done); | |
2547 } | |
2548 | |
2549 // Relocate arguments down the stack. | |
2550 { | |
2551 Label loop, done_loop; | |
2552 __ mov(t1, zero_reg); | |
2553 __ bind(&loop); | |
2554 __ Branch(&done_loop, gt, t1, Operand(a0)); | |
2555 __ Lsa(t2, sp, t0, kPointerSizeLog2); | |
2556 __ lw(at, MemOperand(t2)); | |
2557 __ Lsa(t2, sp, t1, kPointerSizeLog2); | |
2558 __ sw(at, MemOperand(t2)); | |
2559 __ Addu(t0, t0, Operand(1)); | |
2560 __ Addu(t1, t1, Operand(1)); | |
2561 __ Branch(&loop); | |
2562 __ bind(&done_loop); | |
2563 } | |
2564 | |
2565 // Copy [[BoundArguments]] to the stack (below the arguments). | |
2566 { | |
2567 Label loop, done_loop; | |
2568 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset)); | |
2569 __ SmiUntag(t0); | |
2570 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
2571 __ bind(&loop); | |
2572 __ Subu(t0, t0, Operand(1)); | |
2573 __ Branch(&done_loop, lt, t0, Operand(zero_reg)); | |
2574 __ Lsa(t1, a2, t0, kPointerSizeLog2); | |
2575 __ lw(at, MemOperand(t1)); | |
2576 __ Lsa(t1, sp, a0, kPointerSizeLog2); | |
2577 __ sw(at, MemOperand(t1)); | |
2578 __ Addu(a0, a0, Operand(1)); | |
2579 __ Branch(&loop); | |
2580 __ bind(&done_loop); | |
2581 } | |
2582 | |
2583 // Call the [[BoundTargetFunction]] via the Call builtin. | |
2584 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); | |
2585 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, | |
2586 masm->isolate()))); | |
2587 __ lw(at, MemOperand(at)); | |
2588 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
2589 __ Jump(at); | |
2590 } | |
2591 | |
2592 | |
2593 // static | |
2594 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, | |
2595 TailCallMode tail_call_mode) { | |
2596 // ----------- S t a t e ------------- | |
2597 // -- a0 : the number of arguments (not including the receiver) | |
2598 // -- a1 : the target to call (can be any Object). | |
2599 // ----------------------------------- | |
2600 | |
2601 Label non_callable, non_function, non_smi; | |
2602 __ JumpIfSmi(a1, &non_callable); | |
2603 __ bind(&non_smi); | |
2604 __ GetObjectType(a1, t1, t2); | |
2605 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), | |
2606 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE)); | |
2607 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), | |
2608 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE)); | |
2609 | |
2610 // Check if target has a [[Call]] internal method. | |
2611 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset)); | |
2612 __ And(t1, t1, Operand(1 << Map::kIsCallable)); | |
2613 __ Branch(&non_callable, eq, t1, Operand(zero_reg)); | |
2614 | |
2615 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE)); | |
2616 | |
2617 // 0. Prepare for tail call if necessary. | |
2618 if (tail_call_mode == TailCallMode::kAllow) { | |
2619 PrepareForTailCall(masm, a0, t0, t1, t2); | |
2620 } | |
2621 | |
2622 // 1. Runtime fallback for Proxy [[Call]]. | |
2623 __ Push(a1); | |
2624 // Increase the arguments size to include the pushed function and the | |
2625 // existing receiver on the stack. | |
2626 __ Addu(a0, a0, 2); | |
2627 // Tail-call to the runtime. | |
2628 __ JumpToExternalReference( | |
2629 ExternalReference(Runtime::kJSProxyCall, masm->isolate())); | |
2630 | |
2631 // 2. Call to something else, which might have a [[Call]] internal method (if | |
2632 // not we raise an exception). | |
2633 __ bind(&non_function); | |
2634 // Overwrite the original receiver with the (original) target. | |
2635 __ Lsa(at, sp, a0, kPointerSizeLog2); | |
2636 __ sw(a1, MemOperand(at)); | |
2637 // Let the "call_as_function_delegate" take care of the rest. | |
2638 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1); | |
2639 __ Jump(masm->isolate()->builtins()->CallFunction( | |
2640 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode), | |
2641 RelocInfo::CODE_TARGET); | |
2642 | |
2643 // 3. Call to something that is not callable. | |
2644 __ bind(&non_callable); | |
2645 { | |
2646 FrameScope scope(masm, StackFrame::INTERNAL); | |
2647 __ Push(a1); | |
2648 __ CallRuntime(Runtime::kThrowCalledNonCallable); | |
2649 } | |
2650 } | |
2651 | |
2652 | |
2653 // static | |
2654 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { | |
2655 // ----------- S t a t e ------------- | |
2656 // -- a0 : the number of arguments (not including the receiver) | |
2657 // -- a1 : the constructor to call (checked to be a JSFunction) | |
2658 // -- a3 : the new target (checked to be a constructor) | |
2659 // ----------------------------------- | |
2660 __ AssertFunction(a1); | |
2661 | |
2662 // Calling convention for function specific ConstructStubs require | |
2663 // a2 to contain either an AllocationSite or undefined. | |
2664 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | |
2665 | |
2666 // Tail call to the function-specific construct stub (still in the caller | |
2667 // context at this point). | |
2668 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
2669 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); | |
2670 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
2671 __ Jump(at); | |
2672 } | |
2673 | |
2674 | |
2675 // static | |
2676 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { | |
2677 // ----------- S t a t e ------------- | |
2678 // -- a0 : the number of arguments (not including the receiver) | |
2679 // -- a1 : the function to call (checked to be a JSBoundFunction) | |
2680 // -- a3 : the new target (checked to be a constructor) | |
2681 // ----------------------------------- | |
2682 __ AssertBoundFunction(a1); | |
2683 | |
2684 // Load [[BoundArguments]] into a2 and length of that into t0. | |
2685 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); | |
2686 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset)); | |
2687 __ SmiUntag(t0); | |
2688 | |
2689 // ----------- S t a t e ------------- | |
2690 // -- a0 : the number of arguments (not including the receiver) | |
2691 // -- a1 : the function to call (checked to be a JSBoundFunction) | |
2692 // -- a2 : the [[BoundArguments]] (implemented as FixedArray) | |
2693 // -- a3 : the new target (checked to be a constructor) | |
2694 // -- t0 : the number of [[BoundArguments]] | |
2695 // ----------------------------------- | |
2696 | |
2697 // Reserve stack space for the [[BoundArguments]]. | |
2698 { | |
2699 Label done; | |
2700 __ sll(t1, t0, kPointerSizeLog2); | |
2701 __ Subu(sp, sp, Operand(t1)); | |
2702 // Check the stack for overflow. We are not trying to catch interruptions | |
2703 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
2704 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); | |
2705 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. | |
2706 // Restore the stack pointer. | |
2707 __ Addu(sp, sp, Operand(t1)); | |
2708 { | |
2709 FrameScope scope(masm, StackFrame::MANUAL); | |
2710 __ EnterFrame(StackFrame::INTERNAL); | |
2711 __ CallRuntime(Runtime::kThrowStackOverflow); | |
2712 } | |
2713 __ bind(&done); | |
2714 } | |
2715 | |
2716 // Relocate arguments down the stack. | |
2717 { | |
2718 Label loop, done_loop; | |
2719 __ mov(t1, zero_reg); | |
2720 __ bind(&loop); | |
2721 __ Branch(&done_loop, ge, t1, Operand(a0)); | |
2722 __ Lsa(t2, sp, t0, kPointerSizeLog2); | |
2723 __ lw(at, MemOperand(t2)); | |
2724 __ Lsa(t2, sp, t1, kPointerSizeLog2); | |
2725 __ sw(at, MemOperand(t2)); | |
2726 __ Addu(t0, t0, Operand(1)); | |
2727 __ Addu(t1, t1, Operand(1)); | |
2728 __ Branch(&loop); | |
2729 __ bind(&done_loop); | |
2730 } | |
2731 | |
2732 // Copy [[BoundArguments]] to the stack (below the arguments). | |
2733 { | |
2734 Label loop, done_loop; | |
2735 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset)); | |
2736 __ SmiUntag(t0); | |
2737 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
2738 __ bind(&loop); | |
2739 __ Subu(t0, t0, Operand(1)); | |
2740 __ Branch(&done_loop, lt, t0, Operand(zero_reg)); | |
2741 __ Lsa(t1, a2, t0, kPointerSizeLog2); | |
2742 __ lw(at, MemOperand(t1)); | |
2743 __ Lsa(t1, sp, a0, kPointerSizeLog2); | |
2744 __ sw(at, MemOperand(t1)); | |
2745 __ Addu(a0, a0, Operand(1)); | |
2746 __ Branch(&loop); | |
2747 __ bind(&done_loop); | |
2748 } | |
2749 | |
2750 // Patch new.target to [[BoundTargetFunction]] if new.target equals target. | |
2751 { | |
2752 Label skip_load; | |
2753 __ Branch(&skip_load, ne, a1, Operand(a3)); | |
2754 __ lw(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); | |
2755 __ bind(&skip_load); | |
2756 } | |
2757 | |
2758 // Construct the [[BoundTargetFunction]] via the Construct builtin. | |
2759 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); | |
2760 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); | |
2761 __ lw(at, MemOperand(at)); | |
2762 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
2763 __ Jump(at); | |
2764 } | |
2765 | |
2766 | |
2767 // static | |
2768 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { | |
2769 // ----------- S t a t e ------------- | |
2770 // -- a0 : the number of arguments (not including the receiver) | |
2771 // -- a1 : the constructor to call (checked to be a JSProxy) | |
2772 // -- a3 : the new target (either the same as the constructor or | |
2773 // the JSFunction on which new was invoked initially) | |
2774 // ----------------------------------- | |
2775 | |
2776 // Call into the Runtime for Proxy [[Construct]]. | |
2777 __ Push(a1, a3); | |
2778 // Include the pushed new_target, constructor and the receiver. | |
2779 __ Addu(a0, a0, Operand(3)); | |
2780 // Tail-call to the runtime. | |
2781 __ JumpToExternalReference( | |
2782 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); | |
2783 } | |
2784 | |
2785 | |
2786 // static | |
2787 void Builtins::Generate_Construct(MacroAssembler* masm) { | |
2788 // ----------- S t a t e ------------- | |
2789 // -- a0 : the number of arguments (not including the receiver) | |
2790 // -- a1 : the constructor to call (can be any Object) | |
2791 // -- a3 : the new target (either the same as the constructor or | |
2792 // the JSFunction on which new was invoked initially) | |
2793 // ----------------------------------- | |
2794 | |
2795 // Check if target is a Smi. | |
2796 Label non_constructor; | |
2797 __ JumpIfSmi(a1, &non_constructor); | |
2798 | |
2799 // Dispatch based on instance type. | |
2800 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); | |
2801 __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset)); | |
2802 __ Jump(masm->isolate()->builtins()->ConstructFunction(), | |
2803 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE)); | |
2804 | |
2805 // Check if target has a [[Construct]] internal method. | |
2806 __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset)); | |
2807 __ And(t3, t3, Operand(1 << Map::kIsConstructor)); | |
2808 __ Branch(&non_constructor, eq, t3, Operand(zero_reg)); | |
2809 | |
2810 // Only dispatch to bound functions after checking whether they are | |
2811 // constructors. | |
2812 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(), | |
2813 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE)); | |
2814 | |
2815 // Only dispatch to proxies after checking whether they are constructors. | |
2816 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET, | |
2817 eq, t2, Operand(JS_PROXY_TYPE)); | |
2818 | |
2819 // Called Construct on an exotic Object with a [[Construct]] internal method. | |
2820 { | |
2821 // Overwrite the original receiver with the (original) target. | |
2822 __ Lsa(at, sp, a0, kPointerSizeLog2); | |
2823 __ sw(a1, MemOperand(at)); | |
2824 // Let the "call_as_constructor_delegate" take care of the rest. | |
2825 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1); | |
2826 __ Jump(masm->isolate()->builtins()->CallFunction(), | |
2827 RelocInfo::CODE_TARGET); | |
2828 } | |
2829 | |
2830 // Called Construct on an Object that doesn't have a [[Construct]] internal | |
2831 // method. | |
2832 __ bind(&non_constructor); | |
2833 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), | |
2834 RelocInfo::CODE_TARGET); | |
2835 } | |
2836 | |
2837 // static | |
2838 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { | |
2839 // ----------- S t a t e ------------- | |
2840 // -- a0 : requested object size (untagged) | |
2841 // -- ra : return address | |
2842 // ----------------------------------- | |
2843 __ SmiTag(a0); | |
2844 __ Push(a0); | |
2845 __ Move(cp, Smi::FromInt(0)); | |
2846 __ TailCallRuntime(Runtime::kAllocateInNewSpace); | |
2847 } | |
2848 | |
2849 // static | |
2850 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) { | |
2851 // ----------- S t a t e ------------- | |
2852 // -- a0 : requested object size (untagged) | |
2853 // -- ra : return address | |
2854 // ----------------------------------- | |
2855 __ SmiTag(a0); | |
2856 __ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE))); | |
2857 __ Push(a0, a1); | |
2858 __ Move(cp, Smi::FromInt(0)); | |
2859 __ TailCallRuntime(Runtime::kAllocateInTargetSpace); | |
2860 } | |
2861 | |
2862 // static | |
2863 void Builtins::Generate_StringToNumber(MacroAssembler* masm) { | |
2864 // The StringToNumber stub takes on argument in a0. | |
2865 __ AssertString(a0); | |
2866 | |
2867 // Check if string has a cached array index. | |
2868 Label runtime; | |
2869 __ lw(a2, FieldMemOperand(a0, String::kHashFieldOffset)); | |
2870 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask)); | |
2871 __ Branch(&runtime, ne, at, Operand(zero_reg)); | |
2872 __ IndexFromHash(a2, v0); | |
2873 __ Ret(); | |
2874 | |
2875 __ bind(&runtime); | |
2876 { | |
2877 FrameScope frame(masm, StackFrame::INTERNAL); | |
2878 // Push argument. | |
2879 __ Push(a0); | |
2880 // We cannot use a tail call here because this builtin can also be called | |
2881 // from wasm. | |
2882 __ CallRuntime(Runtime::kStringToNumber); | |
2883 } | |
2884 __ Ret(); | |
2885 } | |
2886 | |
2887 // static | |
2888 void Builtins::Generate_ToNumber(MacroAssembler* masm) { | |
2889 // The ToNumber stub takes one argument in a0. | |
2890 Label not_smi; | |
2891 __ JumpIfNotSmi(a0, ¬_smi); | |
2892 __ Ret(USE_DELAY_SLOT); | |
2893 __ mov(v0, a0); | |
2894 __ bind(¬_smi); | |
2895 | |
2896 Label not_heap_number; | |
2897 __ GetObjectType(a0, a1, a1); | |
2898 // a0: receiver | |
2899 // a1: receiver instance type | |
2900 __ Branch(¬_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE)); | |
2901 __ Ret(USE_DELAY_SLOT); | |
2902 __ mov(v0, a0); | |
2903 __ bind(¬_heap_number); | |
2904 | |
2905 __ Jump(masm->isolate()->builtins()->NonNumberToNumber(), | |
2906 RelocInfo::CODE_TARGET); | |
2907 } | |
2908 | |
2909 // static | |
2910 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) { | |
2911 // The NonNumberToNumber stub takes on argument in a0. | |
2912 __ AssertNotNumber(a0); | |
2913 | |
2914 Label not_string; | |
2915 __ GetObjectType(a0, a1, a1); | |
2916 // a0: receiver | |
2917 // a1: receiver instance type | |
2918 __ Branch(¬_string, hs, a1, Operand(FIRST_NONSTRING_TYPE)); | |
2919 __ Jump(masm->isolate()->builtins()->StringToNumber(), | |
2920 RelocInfo::CODE_TARGET); | |
2921 __ bind(¬_string); | |
2922 | |
2923 Label not_oddball; | |
2924 __ Branch(¬_oddball, ne, a1, Operand(ODDBALL_TYPE)); | |
2925 __ Ret(USE_DELAY_SLOT); | |
2926 __ lw(v0, FieldMemOperand(a0, Oddball::kToNumberOffset)); // In delay slot. | |
2927 __ bind(¬_oddball); | |
2928 { | |
2929 FrameScope frame(masm, StackFrame::INTERNAL); | |
2930 // Push argument. | |
2931 __ Push(a0); | |
2932 // We cannot use a tail call here because this builtin can also be called | |
2933 // from wasm. | |
2934 __ CallRuntime(Runtime::kToNumber); | |
2935 } | |
2936 __ Ret(); | |
2937 } | |
2938 | |
2939 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | |
2940 // State setup as expected by MacroAssembler::InvokePrologue. | |
2941 // ----------- S t a t e ------------- | |
2942 // -- a0: actual arguments count | |
2943 // -- a1: function (passed through to callee) | |
2944 // -- a2: expected arguments count | |
2945 // -- a3: new target (passed through to callee) | |
2946 // ----------------------------------- | |
2947 | |
2948 Label invoke, dont_adapt_arguments, stack_overflow; | |
2949 | |
2950 Label enough, too_few; | |
2951 __ Branch(&dont_adapt_arguments, eq, | |
2952 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); | |
2953 // We use Uless as the number of argument should always be greater than 0. | |
2954 __ Branch(&too_few, Uless, a0, Operand(a2)); | |
2955 | |
2956 { // Enough parameters: actual >= expected. | |
2957 // a0: actual number of arguments as a smi | |
2958 // a1: function | |
2959 // a2: expected number of arguments | |
2960 // a3: new target (passed through to callee) | |
2961 __ bind(&enough); | |
2962 EnterArgumentsAdaptorFrame(masm); | |
2963 ArgumentAdaptorStackCheck(masm, &stack_overflow); | |
2964 | |
2965 // Calculate copy start address into a0 and copy end address into t1. | |
2966 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize); | |
2967 // Adjust for return address and receiver. | |
2968 __ Addu(a0, a0, Operand(2 * kPointerSize)); | |
2969 // Compute copy end address. | |
2970 __ sll(t1, a2, kPointerSizeLog2); | |
2971 __ subu(t1, a0, t1); | |
2972 | |
2973 // Copy the arguments (including the receiver) to the new stack frame. | |
2974 // a0: copy start address | |
2975 // a1: function | |
2976 // a2: expected number of arguments | |
2977 // a3: new target (passed through to callee) | |
2978 // t1: copy end address | |
2979 | |
2980 Label copy; | |
2981 __ bind(©); | |
2982 __ lw(t0, MemOperand(a0)); | |
2983 __ push(t0); | |
2984 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t1)); | |
2985 __ addiu(a0, a0, -kPointerSize); // In delay slot. | |
2986 | |
2987 __ jmp(&invoke); | |
2988 } | |
2989 | |
2990 { // Too few parameters: Actual < expected. | |
2991 __ bind(&too_few); | |
2992 EnterArgumentsAdaptorFrame(masm); | |
2993 ArgumentAdaptorStackCheck(masm, &stack_overflow); | |
2994 | |
2995 // Calculate copy start address into a0 and copy end address into t3. | |
2996 // a0: actual number of arguments as a smi | |
2997 // a1: function | |
2998 // a2: expected number of arguments | |
2999 // a3: new target (passed through to callee) | |
3000 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize); | |
3001 // Adjust for return address and receiver. | |
3002 __ Addu(a0, a0, Operand(2 * kPointerSize)); | |
3003 // Compute copy end address. Also adjust for return address. | |
3004 __ Addu(t3, fp, kPointerSize); | |
3005 | |
3006 // Copy the arguments (including the receiver) to the new stack frame. | |
3007 // a0: copy start address | |
3008 // a1: function | |
3009 // a2: expected number of arguments | |
3010 // a3: new target (passed through to callee) | |
3011 // t3: copy end address | |
3012 Label copy; | |
3013 __ bind(©); | |
3014 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver. | |
3015 __ Subu(sp, sp, kPointerSize); | |
3016 __ Subu(a0, a0, kPointerSize); | |
3017 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t3)); | |
3018 __ sw(t0, MemOperand(sp)); // In the delay slot. | |
3019 | |
3020 // Fill the remaining expected arguments with undefined. | |
3021 // a1: function | |
3022 // a2: expected number of arguments | |
3023 // a3: new target (passed through to callee) | |
3024 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); | |
3025 __ sll(t2, a2, kPointerSizeLog2); | |
3026 __ Subu(t1, fp, Operand(t2)); | |
3027 // Adjust for frame. | |
3028 __ Subu(t1, t1, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + | |
3029 2 * kPointerSize)); | |
3030 | |
3031 Label fill; | |
3032 __ bind(&fill); | |
3033 __ Subu(sp, sp, kPointerSize); | |
3034 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(t1)); | |
3035 __ sw(t0, MemOperand(sp)); | |
3036 } | |
3037 | |
3038 // Call the entry point. | |
3039 __ bind(&invoke); | |
3040 __ mov(a0, a2); | |
3041 // a0 : expected number of arguments | |
3042 // a1 : function (passed through to callee) | |
3043 // a3 : new target (passed through to callee) | |
3044 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | |
3045 __ Call(t0); | |
3046 | |
3047 // Store offset of return address for deoptimizer. | |
3048 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); | |
3049 | |
3050 // Exit frame and return. | |
3051 LeaveArgumentsAdaptorFrame(masm); | |
3052 __ Ret(); | |
3053 | |
3054 | |
3055 // ------------------------------------------- | |
3056 // Don't adapt arguments. | |
3057 // ------------------------------------------- | |
3058 __ bind(&dont_adapt_arguments); | |
3059 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | |
3060 __ Jump(t0); | |
3061 | |
3062 __ bind(&stack_overflow); | |
3063 { | |
3064 FrameScope frame(masm, StackFrame::MANUAL); | |
3065 __ CallRuntime(Runtime::kThrowStackOverflow); | |
3066 __ break_(0xCC); | |
3067 } | |
3068 } | |
3069 | |
3070 | |
3071 #undef __ | |
3072 | |
3073 } // namespace internal | |
3074 } // namespace v8 | |
3075 | |
3076 #endif // V8_TARGET_ARCH_MIPS | |
OLD | NEW |