OLD | NEW |
| (Empty) |
1 // Copyright 2012 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #if V8_TARGET_ARCH_MIPS64 | |
6 | |
7 #include "src/codegen.h" | |
8 #include "src/debug/debug.h" | |
9 #include "src/deoptimizer.h" | |
10 #include "src/full-codegen/full-codegen.h" | |
11 #include "src/runtime/runtime.h" | |
12 | |
13 namespace v8 { | |
14 namespace internal { | |
15 | |
16 | |
17 #define __ ACCESS_MASM(masm) | |
18 | |
19 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, | |
20 ExitFrameType exit_frame_type) { | |
21 // ----------- S t a t e ------------- | |
22 // -- a0 : number of arguments excluding receiver | |
23 // -- a1 : target | |
24 // -- a3 : new.target | |
25 // -- sp[0] : last argument | |
26 // -- ... | |
27 // -- sp[8 * (argc - 1)] : first argument | |
28 // -- sp[8 * agrc] : receiver | |
29 // ----------------------------------- | |
30 __ AssertFunction(a1); | |
31 | |
32 // Make sure we operate in the context of the called function (for example | |
33 // ConstructStubs implemented in C++ will be run in the context of the caller | |
34 // instead of the callee, due to the way that [[Construct]] is defined for | |
35 // ordinary functions). | |
36 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | |
37 | |
38 // JumpToExternalReference expects a0 to contain the number of arguments | |
39 // including the receiver and the extra arguments. | |
40 const int num_extra_args = 3; | |
41 __ Daddu(a0, a0, num_extra_args + 1); | |
42 | |
43 // Insert extra arguments. | |
44 __ SmiTag(a0); | |
45 __ Push(a0, a1, a3); | |
46 __ SmiUntag(a0); | |
47 | |
48 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), PROTECT, | |
49 exit_frame_type == BUILTIN_EXIT); | |
50 } | |
51 | |
52 | |
53 // Load the built-in InternalArray function from the current context. | |
54 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, | |
55 Register result) { | |
56 // Load the InternalArray function from the native context. | |
57 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); | |
58 } | |
59 | |
60 | |
61 // Load the built-in Array function from the current context. | |
62 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { | |
63 // Load the Array function from the native context. | |
64 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); | |
65 } | |
66 | |
67 | |
68 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { | |
69 // ----------- S t a t e ------------- | |
70 // -- a0 : number of arguments | |
71 // -- ra : return address | |
72 // -- sp[...]: constructor arguments | |
73 // ----------------------------------- | |
74 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | |
75 | |
76 // Get the InternalArray function. | |
77 GenerateLoadInternalArrayFunction(masm, a1); | |
78 | |
79 if (FLAG_debug_code) { | |
80 // Initial map for the builtin InternalArray functions should be maps. | |
81 __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | |
82 __ SmiTst(a2, a4); | |
83 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, | |
84 a4, Operand(zero_reg)); | |
85 __ GetObjectType(a2, a3, a4); | |
86 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, | |
87 a4, Operand(MAP_TYPE)); | |
88 } | |
89 | |
90 // Run the native code for the InternalArray function called as a normal | |
91 // function. | |
92 // Tail call a stub. | |
93 InternalArrayConstructorStub stub(masm->isolate()); | |
94 __ TailCallStub(&stub); | |
95 } | |
96 | |
97 | |
98 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { | |
99 // ----------- S t a t e ------------- | |
100 // -- a0 : number of arguments | |
101 // -- ra : return address | |
102 // -- sp[...]: constructor arguments | |
103 // ----------------------------------- | |
104 Label generic_array_code; | |
105 | |
106 // Get the Array function. | |
107 GenerateLoadArrayFunction(masm, a1); | |
108 | |
109 if (FLAG_debug_code) { | |
110 // Initial map for the builtin Array functions should be maps. | |
111 __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | |
112 __ SmiTst(a2, a4); | |
113 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, | |
114 a4, Operand(zero_reg)); | |
115 __ GetObjectType(a2, a3, a4); | |
116 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, | |
117 a4, Operand(MAP_TYPE)); | |
118 } | |
119 | |
120 // Run the native code for the Array function called as a normal function. | |
121 // Tail call a stub. | |
122 __ mov(a3, a1); | |
123 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | |
124 ArrayConstructorStub stub(masm->isolate()); | |
125 __ TailCallStub(&stub); | |
126 } | |
127 | |
128 | |
129 // static | |
130 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { | |
131 // ----------- S t a t e ------------- | |
132 // -- a0 : number of arguments | |
133 // -- a1 : function | |
134 // -- cp : context | |
135 // -- ra : return address | |
136 // -- sp[(argc - n - 1) * 8] : arg[n] (zero-based) | |
137 // -- sp[argc * 8] : receiver | |
138 // ----------------------------------- | |
139 Heap::RootListIndex const root_index = | |
140 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex | |
141 : Heap::kMinusInfinityValueRootIndex; | |
142 | |
143 // Load the accumulator with the default return value (either -Infinity or | |
144 // +Infinity), with the tagged value in t1 and the double value in f0. | |
145 __ LoadRoot(t1, root_index); | |
146 __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset)); | |
147 | |
148 Label done_loop, loop; | |
149 __ mov(a3, a0); | |
150 __ bind(&loop); | |
151 { | |
152 // Check if all parameters done. | |
153 __ Dsubu(a3, a3, Operand(1)); | |
154 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); | |
155 | |
156 // Load the next parameter tagged value into a2. | |
157 __ Dlsa(at, sp, a3, kPointerSizeLog2); | |
158 __ ld(a2, MemOperand(at)); | |
159 | |
160 // Load the double value of the parameter into f2, maybe converting the | |
161 // parameter to a number first using the ToNumber builtin if necessary. | |
162 Label convert, convert_smi, convert_number, done_convert; | |
163 __ bind(&convert); | |
164 __ JumpIfSmi(a2, &convert_smi); | |
165 __ ld(a4, FieldMemOperand(a2, HeapObject::kMapOffset)); | |
166 __ JumpIfRoot(a4, Heap::kHeapNumberMapRootIndex, &convert_number); | |
167 { | |
168 // Parameter is not a Number, use the ToNumber builtin to convert it. | |
169 FrameScope scope(masm, StackFrame::MANUAL); | |
170 __ SmiTag(a0); | |
171 __ SmiTag(a3); | |
172 __ EnterBuiltinFrame(cp, a1, a0); | |
173 __ Push(t1, a3); | |
174 __ mov(a0, a2); | |
175 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
176 __ mov(a2, v0); | |
177 __ Pop(t1, a3); | |
178 __ LeaveBuiltinFrame(cp, a1, a0); | |
179 __ SmiUntag(a3); | |
180 __ SmiUntag(a0); | |
181 { | |
182 // Restore the double accumulator value (f0). | |
183 Label restore_smi, done_restore; | |
184 __ JumpIfSmi(t1, &restore_smi); | |
185 __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset)); | |
186 __ jmp(&done_restore); | |
187 __ bind(&restore_smi); | |
188 __ SmiToDoubleFPURegister(t1, f0, a4); | |
189 __ bind(&done_restore); | |
190 } | |
191 } | |
192 __ jmp(&convert); | |
193 __ bind(&convert_number); | |
194 __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset)); | |
195 __ jmp(&done_convert); | |
196 __ bind(&convert_smi); | |
197 __ SmiToDoubleFPURegister(a2, f2, a4); | |
198 __ bind(&done_convert); | |
199 | |
200 // Perform the actual comparison with using Min/Max macro instructions the | |
201 // accumulator value on the left hand side (f0) and the next parameter value | |
202 // on the right hand side (f2). | |
203 // We need to work out which HeapNumber (or smi) the result came from. | |
204 Label compare_nan; | |
205 __ BranchF(nullptr, &compare_nan, eq, f0, f2); | |
206 __ Move(a4, f0); | |
207 if (kind == MathMaxMinKind::kMin) { | |
208 __ MinNaNCheck_d(f0, f0, f2); | |
209 } else { | |
210 DCHECK(kind == MathMaxMinKind::kMax); | |
211 __ MaxNaNCheck_d(f0, f0, f2); | |
212 } | |
213 __ Move(at, f0); | |
214 __ Branch(&loop, eq, a4, Operand(at)); | |
215 __ mov(t1, a2); | |
216 __ jmp(&loop); | |
217 | |
218 // At least one side is NaN, which means that the result will be NaN too. | |
219 __ bind(&compare_nan); | |
220 __ LoadRoot(t1, Heap::kNanValueRootIndex); | |
221 __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset)); | |
222 __ jmp(&loop); | |
223 } | |
224 | |
225 __ bind(&done_loop); | |
226 // Drop all slots, including the receiver. | |
227 __ Daddu(a0, a0, Operand(1)); | |
228 __ Dlsa(sp, sp, a0, kPointerSizeLog2); | |
229 __ Ret(USE_DELAY_SLOT); | |
230 __ mov(v0, t1); // In delay slot. | |
231 } | |
232 | |
233 // static | |
234 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) { | |
235 // ----------- S t a t e ------------- | |
236 // -- a0 : number of arguments | |
237 // -- a1 : constructor function | |
238 // -- cp : context | |
239 // -- ra : return address | |
240 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) | |
241 // -- sp[argc * 8] : receiver | |
242 // ----------------------------------- | |
243 | |
244 // 1. Load the first argument into a0 and get rid of the rest (including the | |
245 // receiver). | |
246 Label no_arguments; | |
247 { | |
248 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); | |
249 __ Dsubu(t1, a0, Operand(1)); // In delay slot. | |
250 __ mov(t0, a0); // Store argc in t0. | |
251 __ Dlsa(at, sp, t1, kPointerSizeLog2); | |
252 __ ld(a0, MemOperand(at)); | |
253 } | |
254 | |
255 // 2a. Convert first argument to number. | |
256 { | |
257 FrameScope scope(masm, StackFrame::MANUAL); | |
258 __ SmiTag(t0); | |
259 __ EnterBuiltinFrame(cp, a1, t0); | |
260 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
261 __ LeaveBuiltinFrame(cp, a1, t0); | |
262 __ SmiUntag(t0); | |
263 } | |
264 | |
265 { | |
266 // Drop all arguments including the receiver. | |
267 __ Dlsa(sp, sp, t0, kPointerSizeLog2); | |
268 __ DropAndRet(1); | |
269 } | |
270 | |
271 // 2b. No arguments, return +0. | |
272 __ bind(&no_arguments); | |
273 __ Move(v0, Smi::FromInt(0)); | |
274 __ DropAndRet(1); | |
275 } | |
276 | |
277 | |
278 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { | |
279 // ----------- S t a t e ------------- | |
280 // -- a0 : number of arguments | |
281 // -- a1 : constructor function | |
282 // -- a3 : new target | |
283 // -- cp : context | |
284 // -- ra : return address | |
285 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) | |
286 // -- sp[argc * 8] : receiver | |
287 // ----------------------------------- | |
288 | |
289 // 1. Make sure we operate in the context of the called function. | |
290 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | |
291 | |
292 // 2. Load the first argument into a0 and get rid of the rest (including the | |
293 // receiver). | |
294 { | |
295 Label no_arguments, done; | |
296 __ mov(t0, a0); // Store argc in t0. | |
297 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); | |
298 __ Dsubu(a0, a0, Operand(1)); // In delay slot. | |
299 __ Dlsa(at, sp, a0, kPointerSizeLog2); | |
300 __ ld(a0, MemOperand(at)); | |
301 __ jmp(&done); | |
302 __ bind(&no_arguments); | |
303 __ Move(a0, Smi::FromInt(0)); | |
304 __ bind(&done); | |
305 } | |
306 | |
307 // 3. Make sure a0 is a number. | |
308 { | |
309 Label done_convert; | |
310 __ JumpIfSmi(a0, &done_convert); | |
311 __ GetObjectType(a0, a2, a2); | |
312 __ Branch(&done_convert, eq, a2, Operand(HEAP_NUMBER_TYPE)); | |
313 { | |
314 FrameScope scope(masm, StackFrame::MANUAL); | |
315 __ SmiTag(t0); | |
316 __ EnterBuiltinFrame(cp, a1, t0); | |
317 __ Push(a3); | |
318 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
319 __ Move(a0, v0); | |
320 __ Pop(a3); | |
321 __ LeaveBuiltinFrame(cp, a1, t0); | |
322 __ SmiUntag(t0); | |
323 } | |
324 __ bind(&done_convert); | |
325 } | |
326 | |
327 // 4. Check if new target and constructor differ. | |
328 Label drop_frame_and_ret, new_object; | |
329 __ Branch(&new_object, ne, a1, Operand(a3)); | |
330 | |
331 // 5. Allocate a JSValue wrapper for the number. | |
332 __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object); | |
333 __ jmp(&drop_frame_and_ret); | |
334 | |
335 // 6. Fallback to the runtime to create new object. | |
336 __ bind(&new_object); | |
337 { | |
338 FrameScope scope(masm, StackFrame::MANUAL); | |
339 FastNewObjectStub stub(masm->isolate()); | |
340 __ SmiTag(t0); | |
341 __ EnterBuiltinFrame(cp, a1, t0); | |
342 __ Push(a0); | |
343 __ CallStub(&stub); | |
344 __ Pop(a0); | |
345 __ LeaveBuiltinFrame(cp, a1, t0); | |
346 __ SmiUntag(t0); | |
347 } | |
348 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); | |
349 | |
350 __ bind(&drop_frame_and_ret); | |
351 { | |
352 __ Dlsa(sp, sp, t0, kPointerSizeLog2); | |
353 __ DropAndRet(1); | |
354 } | |
355 } | |
356 | |
357 | |
358 // static | |
359 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { | |
360 // ----------- S t a t e ------------- | |
361 // -- a0 : number of arguments | |
362 // -- a1 : constructor function | |
363 // -- cp : context | |
364 // -- ra : return address | |
365 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) | |
366 // -- sp[argc * 8] : receiver | |
367 // ----------------------------------- | |
368 | |
369 // 1. Load the first argument into a0 and get rid of the rest (including the | |
370 // receiver). | |
371 Label no_arguments; | |
372 { | |
373 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); | |
374 __ Dsubu(t1, a0, Operand(1)); // In delay slot. | |
375 __ mov(t0, a0); // Store argc in t0. | |
376 __ Dlsa(at, sp, t1, kPointerSizeLog2); | |
377 __ ld(a0, MemOperand(at)); | |
378 } | |
379 | |
380 // 2a. At least one argument, return a0 if it's a string, otherwise | |
381 // dispatch to appropriate conversion. | |
382 Label drop_frame_and_ret, to_string, symbol_descriptive_string; | |
383 { | |
384 __ JumpIfSmi(a0, &to_string); | |
385 __ GetObjectType(a0, t1, t1); | |
386 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE); | |
387 __ Subu(t1, t1, Operand(FIRST_NONSTRING_TYPE)); | |
388 __ Branch(&symbol_descriptive_string, eq, t1, Operand(zero_reg)); | |
389 __ Branch(&to_string, gt, t1, Operand(zero_reg)); | |
390 __ mov(v0, a0); | |
391 __ jmp(&drop_frame_and_ret); | |
392 } | |
393 | |
394 // 2b. No arguments, return the empty string (and pop the receiver). | |
395 __ bind(&no_arguments); | |
396 { | |
397 __ LoadRoot(v0, Heap::kempty_stringRootIndex); | |
398 __ DropAndRet(1); | |
399 } | |
400 | |
401 // 3a. Convert a0 to a string. | |
402 __ bind(&to_string); | |
403 { | |
404 FrameScope scope(masm, StackFrame::MANUAL); | |
405 ToStringStub stub(masm->isolate()); | |
406 __ SmiTag(t0); | |
407 __ EnterBuiltinFrame(cp, a1, t0); | |
408 __ CallStub(&stub); | |
409 __ LeaveBuiltinFrame(cp, a1, t0); | |
410 __ SmiUntag(t0); | |
411 } | |
412 __ jmp(&drop_frame_and_ret); | |
413 | |
414 // 3b. Convert symbol in a0 to a string. | |
415 __ bind(&symbol_descriptive_string); | |
416 { | |
417 __ Dlsa(sp, sp, t0, kPointerSizeLog2); | |
418 __ Drop(1); | |
419 __ Push(a0); | |
420 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); | |
421 } | |
422 | |
423 __ bind(&drop_frame_and_ret); | |
424 { | |
425 __ Dlsa(sp, sp, t0, kPointerSizeLog2); | |
426 __ DropAndRet(1); | |
427 } | |
428 } | |
429 | |
430 | |
431 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { | |
432 // ----------- S t a t e ------------- | |
433 // -- a0 : number of arguments | |
434 // -- a1 : constructor function | |
435 // -- a3 : new target | |
436 // -- cp : context | |
437 // -- ra : return address | |
438 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) | |
439 // -- sp[argc * 8] : receiver | |
440 // ----------------------------------- | |
441 | |
442 // 1. Make sure we operate in the context of the called function. | |
443 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | |
444 | |
445 // 2. Load the first argument into a0 and get rid of the rest (including the | |
446 // receiver). | |
447 { | |
448 Label no_arguments, done; | |
449 __ mov(t0, a0); // Store argc in t0. | |
450 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); | |
451 __ Dsubu(a0, a0, Operand(1)); | |
452 __ Dlsa(at, sp, a0, kPointerSizeLog2); | |
453 __ ld(a0, MemOperand(at)); | |
454 __ jmp(&done); | |
455 __ bind(&no_arguments); | |
456 __ LoadRoot(a0, Heap::kempty_stringRootIndex); | |
457 __ bind(&done); | |
458 } | |
459 | |
460 // 3. Make sure a0 is a string. | |
461 { | |
462 Label convert, done_convert; | |
463 __ JumpIfSmi(a0, &convert); | |
464 __ GetObjectType(a0, a2, a2); | |
465 __ And(t1, a2, Operand(kIsNotStringMask)); | |
466 __ Branch(&done_convert, eq, t1, Operand(zero_reg)); | |
467 __ bind(&convert); | |
468 { | |
469 FrameScope scope(masm, StackFrame::MANUAL); | |
470 ToStringStub stub(masm->isolate()); | |
471 __ SmiTag(t0); | |
472 __ EnterBuiltinFrame(cp, a1, t0); | |
473 __ Push(a3); | |
474 __ CallStub(&stub); | |
475 __ Move(a0, v0); | |
476 __ Pop(a3); | |
477 __ LeaveBuiltinFrame(cp, a1, t0); | |
478 __ SmiUntag(t0); | |
479 } | |
480 __ bind(&done_convert); | |
481 } | |
482 | |
483 // 4. Check if new target and constructor differ. | |
484 Label drop_frame_and_ret, new_object; | |
485 __ Branch(&new_object, ne, a1, Operand(a3)); | |
486 | |
487 // 5. Allocate a JSValue wrapper for the string. | |
488 __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object); | |
489 __ jmp(&drop_frame_and_ret); | |
490 | |
491 // 6. Fallback to the runtime to create new object. | |
492 __ bind(&new_object); | |
493 { | |
494 FrameScope scope(masm, StackFrame::MANUAL); | |
495 FastNewObjectStub stub(masm->isolate()); | |
496 __ SmiTag(t0); | |
497 __ EnterBuiltinFrame(cp, a1, t0); | |
498 __ Push(a0); | |
499 __ CallStub(&stub); | |
500 __ Pop(a0); | |
501 __ LeaveBuiltinFrame(cp, a1, t0); | |
502 __ SmiUntag(t0); | |
503 } | |
504 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); | |
505 | |
506 __ bind(&drop_frame_and_ret); | |
507 { | |
508 __ Dlsa(sp, sp, t0, kPointerSizeLog2); | |
509 __ DropAndRet(1); | |
510 } | |
511 } | |
512 | |
513 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | |
514 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
515 __ ld(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset)); | |
516 __ Daddu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
517 __ Jump(at); | |
518 } | |
519 | |
520 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, | |
521 Runtime::FunctionId function_id) { | |
522 // ----------- S t a t e ------------- | |
523 // -- a0 : argument count (preserved for callee) | |
524 // -- a1 : target function (preserved for callee) | |
525 // -- a3 : new target (preserved for callee) | |
526 // ----------------------------------- | |
527 { | |
528 FrameScope scope(masm, StackFrame::INTERNAL); | |
529 // Push a copy of the function onto the stack. | |
530 // Push a copy of the target function and the new target. | |
531 __ SmiTag(a0); | |
532 __ Push(a0, a1, a3, a1); | |
533 | |
534 __ CallRuntime(function_id, 1); | |
535 // Restore target function and new target. | |
536 __ Pop(a0, a1, a3); | |
537 __ SmiUntag(a0); | |
538 } | |
539 | |
540 __ Daddu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
541 __ Jump(at); | |
542 } | |
543 | |
544 | |
545 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | |
546 // Checking whether the queued function is ready for install is optional, | |
547 // since we come across interrupts and stack checks elsewhere. However, | |
548 // not checking may delay installing ready functions, and always checking | |
549 // would be quite expensive. A good compromise is to first check against | |
550 // stack limit as a cue for an interrupt signal. | |
551 Label ok; | |
552 __ LoadRoot(a4, Heap::kStackLimitRootIndex); | |
553 __ Branch(&ok, hs, sp, Operand(a4)); | |
554 | |
555 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); | |
556 | |
557 __ bind(&ok); | |
558 GenerateTailCallToSharedCode(masm); | |
559 } | |
560 | |
561 | |
562 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | |
563 bool is_api_function, | |
564 bool create_implicit_receiver, | |
565 bool check_derived_construct) { | |
566 // ----------- S t a t e ------------- | |
567 // -- a0 : number of arguments | |
568 // -- a1 : constructor function | |
569 // -- a2 : allocation site or undefined | |
570 // -- a3 : new target | |
571 // -- cp : context | |
572 // -- ra : return address | |
573 // -- sp[...]: constructor arguments | |
574 // ----------------------------------- | |
575 | |
576 Isolate* isolate = masm->isolate(); | |
577 | |
578 // Enter a construct frame. | |
579 { | |
580 FrameScope scope(masm, StackFrame::CONSTRUCT); | |
581 | |
582 // Preserve the incoming parameters on the stack. | |
583 __ AssertUndefinedOrAllocationSite(a2, t0); | |
584 __ SmiTag(a0); | |
585 __ Push(cp, a2, a0); | |
586 | |
587 if (create_implicit_receiver) { | |
588 __ Push(a1, a3); | |
589 FastNewObjectStub stub(masm->isolate()); | |
590 __ CallStub(&stub); | |
591 __ mov(t0, v0); | |
592 __ Pop(a1, a3); | |
593 | |
594 // ----------- S t a t e ------------- | |
595 // -- a1: constructor function | |
596 // -- a3: new target | |
597 // -- t0: newly allocated object | |
598 // ----------------------------------- | |
599 __ ld(a0, MemOperand(sp)); | |
600 } | |
601 __ SmiUntag(a0); | |
602 | |
603 if (create_implicit_receiver) { | |
604 // Push the allocated receiver to the stack. We need two copies | |
605 // because we may have to return the original one and the calling | |
606 // conventions dictate that the called function pops the receiver. | |
607 __ Push(t0, t0); | |
608 } else { | |
609 __ PushRoot(Heap::kTheHoleValueRootIndex); | |
610 } | |
611 | |
612 // Set up pointer to last argument. | |
613 __ Daddu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); | |
614 | |
615 // Copy arguments and receiver to the expression stack. | |
616 // a0: number of arguments | |
617 // a1: constructor function | |
618 // a2: address of last argument (caller sp) | |
619 // a3: new target | |
620 // t0: number of arguments (smi-tagged) | |
621 // sp[0]: receiver | |
622 // sp[1]: receiver | |
623 // sp[2]: number of arguments (smi-tagged) | |
624 Label loop, entry; | |
625 __ mov(t0, a0); | |
626 __ jmp(&entry); | |
627 __ bind(&loop); | |
628 __ Dlsa(a4, a2, t0, kPointerSizeLog2); | |
629 __ ld(a5, MemOperand(a4)); | |
630 __ push(a5); | |
631 __ bind(&entry); | |
632 __ Daddu(t0, t0, Operand(-1)); | |
633 __ Branch(&loop, greater_equal, t0, Operand(zero_reg)); | |
634 | |
635 // Call the function. | |
636 // a0: number of arguments | |
637 // a1: constructor function | |
638 // a3: new target | |
639 ParameterCount actual(a0); | |
640 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION, | |
641 CheckDebugStepCallWrapper()); | |
642 | |
643 // Store offset of return address for deoptimizer. | |
644 if (create_implicit_receiver && !is_api_function) { | |
645 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); | |
646 } | |
647 | |
648 // Restore context from the frame. | |
649 __ ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); | |
650 | |
651 if (create_implicit_receiver) { | |
652 // If the result is an object (in the ECMA sense), we should get rid | |
653 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 | |
654 // on page 74. | |
655 Label use_receiver, exit; | |
656 | |
657 // If the result is a smi, it is *not* an object in the ECMA sense. | |
658 // v0: result | |
659 // sp[0]: receiver (newly allocated object) | |
660 // sp[1]: number of arguments (smi-tagged) | |
661 __ JumpIfSmi(v0, &use_receiver); | |
662 | |
663 // If the type of the result (stored in its map) is less than | |
664 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense. | |
665 __ GetObjectType(v0, a1, a3); | |
666 __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE)); | |
667 | |
668 // Throw away the result of the constructor invocation and use the | |
669 // on-stack receiver as the result. | |
670 __ bind(&use_receiver); | |
671 __ ld(v0, MemOperand(sp)); | |
672 | |
673 // Remove receiver from the stack, remove caller arguments, and | |
674 // return. | |
675 __ bind(&exit); | |
676 // v0: result | |
677 // sp[0]: receiver (newly allocated object) | |
678 // sp[1]: number of arguments (smi-tagged) | |
679 __ ld(a1, MemOperand(sp, 1 * kPointerSize)); | |
680 } else { | |
681 __ ld(a1, MemOperand(sp)); | |
682 } | |
683 | |
684 // Leave construct frame. | |
685 } | |
686 | |
687 // ES6 9.2.2. Step 13+ | |
688 // Check that the result is not a Smi, indicating that the constructor result | |
689 // from a derived class is neither undefined nor an Object. | |
690 if (check_derived_construct) { | |
691 Label dont_throw; | |
692 __ JumpIfNotSmi(v0, &dont_throw); | |
693 { | |
694 FrameScope scope(masm, StackFrame::INTERNAL); | |
695 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); | |
696 } | |
697 __ bind(&dont_throw); | |
698 } | |
699 | |
700 __ SmiScale(a4, a1, kPointerSizeLog2); | |
701 __ Daddu(sp, sp, a4); | |
702 __ Daddu(sp, sp, kPointerSize); | |
703 if (create_implicit_receiver) { | |
704 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2); | |
705 } | |
706 __ Ret(); | |
707 } | |
708 | |
709 | |
710 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { | |
711 Generate_JSConstructStubHelper(masm, false, true, false); | |
712 } | |
713 | |
714 | |
715 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { | |
716 Generate_JSConstructStubHelper(masm, true, false, false); | |
717 } | |
718 | |
719 | |
720 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { | |
721 Generate_JSConstructStubHelper(masm, false, false, false); | |
722 } | |
723 | |
724 | |
725 void Builtins::Generate_JSBuiltinsConstructStubForDerived( | |
726 MacroAssembler* masm) { | |
727 Generate_JSConstructStubHelper(masm, false, false, true); | |
728 } | |
729 | |
730 // static | |
731 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { | |
732 // ----------- S t a t e ------------- | |
733 // -- v0 : the value to pass to the generator | |
734 // -- a1 : the JSGeneratorObject to resume | |
735 // -- a2 : the resume mode (tagged) | |
736 // -- ra : return address | |
737 // ----------------------------------- | |
738 __ AssertGeneratorObject(a1); | |
739 | |
740 // Store input value into generator object. | |
741 __ sd(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset)); | |
742 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3, | |
743 kRAHasNotBeenSaved, kDontSaveFPRegs); | |
744 | |
745 // Store resume mode into generator object. | |
746 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset)); | |
747 | |
748 // Load suspended function and context. | |
749 __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset)); | |
750 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); | |
751 | |
752 // Flood function if we are stepping. | |
753 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator; | |
754 Label stepping_prepared; | |
755 ExternalReference last_step_action = | |
756 ExternalReference::debug_last_step_action_address(masm->isolate()); | |
757 STATIC_ASSERT(StepFrame > StepIn); | |
758 __ li(a5, Operand(last_step_action)); | |
759 __ lb(a5, MemOperand(a5)); | |
760 __ Branch(&prepare_step_in_if_stepping, ge, a5, Operand(StepIn)); | |
761 | |
762 // Flood function if we need to continue stepping in the suspended generator. | |
763 ExternalReference debug_suspended_generator = | |
764 ExternalReference::debug_suspended_generator_address(masm->isolate()); | |
765 __ li(a5, Operand(debug_suspended_generator)); | |
766 __ ld(a5, MemOperand(a5)); | |
767 __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(a5)); | |
768 __ bind(&stepping_prepared); | |
769 | |
770 // Push receiver. | |
771 __ ld(a5, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset)); | |
772 __ Push(a5); | |
773 | |
774 // ----------- S t a t e ------------- | |
775 // -- a1 : the JSGeneratorObject to resume | |
776 // -- a2 : the resume mode (tagged) | |
777 // -- a4 : generator function | |
778 // -- cp : generator context | |
779 // -- ra : return address | |
780 // -- sp[0] : generator receiver | |
781 // ----------------------------------- | |
782 | |
783 // Push holes for arguments to generator function. Since the parser forced | |
784 // context allocation for any variables in generators, the actual argument | |
785 // values have already been copied into the context and these dummy values | |
786 // will never be used. | |
787 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); | |
788 __ lw(a3, | |
789 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset)); | |
790 { | |
791 Label done_loop, loop; | |
792 __ bind(&loop); | |
793 __ Dsubu(a3, a3, Operand(1)); | |
794 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); | |
795 __ PushRoot(Heap::kTheHoleValueRootIndex); | |
796 __ Branch(&loop); | |
797 __ bind(&done_loop); | |
798 } | |
799 | |
800 // Dispatch on the kind of generator object. | |
801 Label old_generator; | |
802 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); | |
803 __ ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset)); | |
804 __ GetObjectType(a3, a3, a3); | |
805 __ Branch(&old_generator, ne, a3, Operand(BYTECODE_ARRAY_TYPE)); | |
806 | |
807 // New-style (ignition/turbofan) generator object. | |
808 { | |
809 __ ld(a0, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); | |
810 __ lw(a0, | |
811 FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset)); | |
812 // We abuse new.target both to indicate that this is a resume call and to | |
813 // pass in the generator object. In ordinary calls, new.target is always | |
814 // undefined because generator functions are non-constructable. | |
815 __ Move(a3, a1); | |
816 __ Move(a1, a4); | |
817 __ ld(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | |
818 __ Jump(a2); | |
819 } | |
820 | |
821 // Old-style (full-codegen) generator object | |
822 __ bind(&old_generator); | |
823 { | |
824 // Enter a new JavaScript frame, and initialize its slots as they were when | |
825 // the generator was suspended. | |
826 FrameScope scope(masm, StackFrame::MANUAL); | |
827 __ Push(ra, fp); | |
828 __ Move(fp, sp); | |
829 __ Push(cp, a4); | |
830 | |
831 // Restore the operand stack. | |
832 __ ld(a0, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); | |
833 __ ld(a3, FieldMemOperand(a0, FixedArray::kLengthOffset)); | |
834 __ SmiUntag(a3); | |
835 __ Daddu(a0, a0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
836 __ Dlsa(a3, a0, a3, kPointerSizeLog2); | |
837 { | |
838 Label done_loop, loop; | |
839 __ bind(&loop); | |
840 __ Branch(&done_loop, eq, a0, Operand(a3)); | |
841 __ ld(a5, MemOperand(a0)); | |
842 __ Push(a5); | |
843 __ Branch(USE_DELAY_SLOT, &loop); | |
844 __ daddiu(a0, a0, kPointerSize); // In delay slot. | |
845 __ bind(&done_loop); | |
846 } | |
847 | |
848 // Reset operand stack so we don't leak. | |
849 __ LoadRoot(a5, Heap::kEmptyFixedArrayRootIndex); | |
850 __ sd(a5, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); | |
851 | |
852 // Resume the generator function at the continuation. | |
853 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); | |
854 __ ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset)); | |
855 __ Daddu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
856 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); | |
857 __ SmiUntag(a2); | |
858 __ Daddu(a3, a3, Operand(a2)); | |
859 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); | |
860 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); | |
861 __ Move(v0, a1); // Continuation expects generator object in v0. | |
862 __ Jump(a3); | |
863 } | |
864 | |
865 __ bind(&prepare_step_in_if_stepping); | |
866 { | |
867 FrameScope scope(masm, StackFrame::INTERNAL); | |
868 __ Push(a1, a2, a4); | |
869 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping); | |
870 __ Pop(a1, a2); | |
871 } | |
872 __ Branch(USE_DELAY_SLOT, &stepping_prepared); | |
873 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); | |
874 | |
875 __ bind(&prepare_step_in_suspended_generator); | |
876 { | |
877 FrameScope scope(masm, StackFrame::INTERNAL); | |
878 __ Push(a1, a2); | |
879 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); | |
880 __ Pop(a1, a2); | |
881 } | |
882 __ Branch(USE_DELAY_SLOT, &stepping_prepared); | |
883 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); | |
884 } | |
885 | |
886 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { | |
887 FrameScope scope(masm, StackFrame::INTERNAL); | |
888 __ Push(a1); | |
889 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); | |
890 } | |
891 | |
892 | |
893 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; | |
894 | |
895 | |
896 // Clobbers a2; preserves all other registers. | |
897 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, | |
898 IsTagged argc_is_tagged) { | |
899 // Check the stack for overflow. We are not trying to catch | |
900 // interruptions (e.g. debug break and preemption) here, so the "real stack | |
901 // limit" is checked. | |
902 Label okay; | |
903 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | |
904 // Make a2 the space we have left. The stack might already be overflowed | |
905 // here which will cause r2 to become negative. | |
906 __ dsubu(a2, sp, a2); | |
907 // Check if the arguments will overflow the stack. | |
908 if (argc_is_tagged == kArgcIsSmiTagged) { | |
909 __ SmiScale(a7, v0, kPointerSizeLog2); | |
910 } else { | |
911 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); | |
912 __ dsll(a7, argc, kPointerSizeLog2); | |
913 } | |
914 __ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison. | |
915 | |
916 // Out of stack space. | |
917 __ CallRuntime(Runtime::kThrowStackOverflow); | |
918 | |
919 __ bind(&okay); | |
920 } | |
921 | |
922 | |
923 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | |
924 bool is_construct) { | |
925 // Called from JSEntryStub::GenerateBody | |
926 | |
927 // ----------- S t a t e ------------- | |
928 // -- a0: new.target | |
929 // -- a1: function | |
930 // -- a2: receiver_pointer | |
931 // -- a3: argc | |
932 // -- s0: argv | |
933 // ----------------------------------- | |
934 ProfileEntryHookStub::MaybeCallEntryHook(masm); | |
935 | |
936 // Enter an internal frame. | |
937 { | |
938 FrameScope scope(masm, StackFrame::INTERNAL); | |
939 | |
940 // Setup the context (we need to use the caller context from the isolate). | |
941 ExternalReference context_address(Isolate::kContextAddress, | |
942 masm->isolate()); | |
943 __ li(cp, Operand(context_address)); | |
944 __ ld(cp, MemOperand(cp)); | |
945 | |
946 // Push the function and the receiver onto the stack. | |
947 __ Push(a1, a2); | |
948 | |
949 // Check if we have enough stack space to push all arguments. | |
950 // Clobbers a2. | |
951 Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt); | |
952 | |
953 // Remember new.target. | |
954 __ mov(a5, a0); | |
955 | |
956 // Copy arguments to the stack in a loop. | |
957 // a3: argc | |
958 // s0: argv, i.e. points to first arg | |
959 Label loop, entry; | |
960 __ Dlsa(a6, s0, a3, kPointerSizeLog2); | |
961 __ b(&entry); | |
962 __ nop(); // Branch delay slot nop. | |
963 // a6 points past last arg. | |
964 __ bind(&loop); | |
965 __ ld(a4, MemOperand(s0)); // Read next parameter. | |
966 __ daddiu(s0, s0, kPointerSize); | |
967 __ ld(a4, MemOperand(a4)); // Dereference handle. | |
968 __ push(a4); // Push parameter. | |
969 __ bind(&entry); | |
970 __ Branch(&loop, ne, s0, Operand(a6)); | |
971 | |
972 // Setup new.target and argc. | |
973 __ mov(a0, a3); | |
974 __ mov(a3, a5); | |
975 | |
976 // Initialize all JavaScript callee-saved registers, since they will be seen | |
977 // by the garbage collector as part of handlers. | |
978 __ LoadRoot(a4, Heap::kUndefinedValueRootIndex); | |
979 __ mov(s1, a4); | |
980 __ mov(s2, a4); | |
981 __ mov(s3, a4); | |
982 __ mov(s4, a4); | |
983 __ mov(s5, a4); | |
984 // s6 holds the root address. Do not clobber. | |
985 // s7 is cp. Do not init. | |
986 | |
987 // Invoke the code. | |
988 Handle<Code> builtin = is_construct | |
989 ? masm->isolate()->builtins()->Construct() | |
990 : masm->isolate()->builtins()->Call(); | |
991 __ Call(builtin, RelocInfo::CODE_TARGET); | |
992 | |
993 // Leave internal frame. | |
994 } | |
995 __ Jump(ra); | |
996 } | |
997 | |
998 | |
999 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | |
1000 Generate_JSEntryTrampolineHelper(masm, false); | |
1001 } | |
1002 | |
1003 | |
1004 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | |
1005 Generate_JSEntryTrampolineHelper(masm, true); | |
1006 } | |
1007 | |
1008 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) { | |
1009 Register args_count = scratch; | |
1010 | |
1011 // Get the arguments + receiver count. | |
1012 __ ld(args_count, | |
1013 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); | |
1014 __ lw(t0, FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset)); | |
1015 | |
1016 // Leave the frame (also dropping the register file). | |
1017 __ LeaveFrame(StackFrame::JAVA_SCRIPT); | |
1018 | |
1019 // Drop receiver + arguments. | |
1020 __ Daddu(sp, sp, args_count); | |
1021 } | |
1022 | |
1023 // Generate code for entering a JS function with the interpreter. | |
1024 // On entry to the function the receiver and arguments have been pushed on the | |
1025 // stack left to right. The actual argument count matches the formal parameter | |
1026 // count expected by the function. | |
1027 // | |
1028 // The live registers are: | |
1029 // o a1: the JS function object being called. | |
1030 // o a3: the new target | |
1031 // o cp: our context | |
1032 // o fp: the caller's frame pointer | |
1033 // o sp: stack pointer | |
1034 // o ra: return address | |
1035 // | |
1036 // The function builds an interpreter frame. See InterpreterFrameConstants in | |
1037 // frames.h for its layout. | |
1038 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { | |
1039 ProfileEntryHookStub::MaybeCallEntryHook(masm); | |
1040 | |
1041 // Open a frame scope to indicate that there is a frame on the stack. The | |
1042 // MANUAL indicates that the scope shouldn't actually generate code to set up | |
1043 // the frame (that is done below). | |
1044 FrameScope frame_scope(masm, StackFrame::MANUAL); | |
1045 __ PushStandardFrame(a1); | |
1046 | |
1047 // Get the bytecode array from the function object (or from the DebugInfo if | |
1048 // it is present) and load it into kInterpreterBytecodeArrayRegister. | |
1049 __ ld(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
1050 Label load_debug_bytecode_array, bytecode_array_loaded; | |
1051 Register debug_info = kInterpreterBytecodeArrayRegister; | |
1052 DCHECK(!debug_info.is(a0)); | |
1053 __ ld(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset)); | |
1054 __ Branch(&load_debug_bytecode_array, ne, debug_info, | |
1055 Operand(DebugInfo::uninitialized())); | |
1056 __ ld(kInterpreterBytecodeArrayRegister, | |
1057 FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset)); | |
1058 __ bind(&bytecode_array_loaded); | |
1059 | |
1060 // Check function data field is actually a BytecodeArray object. | |
1061 Label bytecode_array_not_present; | |
1062 __ JumpIfRoot(kInterpreterBytecodeArrayRegister, | |
1063 Heap::kUndefinedValueRootIndex, &bytecode_array_not_present); | |
1064 if (FLAG_debug_code) { | |
1065 __ SmiTst(kInterpreterBytecodeArrayRegister, a4); | |
1066 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4, | |
1067 Operand(zero_reg)); | |
1068 __ GetObjectType(kInterpreterBytecodeArrayRegister, a4, a4); | |
1069 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4, | |
1070 Operand(BYTECODE_ARRAY_TYPE)); | |
1071 } | |
1072 | |
1073 // Load initial bytecode offset. | |
1074 __ li(kInterpreterBytecodeOffsetRegister, | |
1075 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag)); | |
1076 | |
1077 // Push new.target, bytecode array and Smi tagged bytecode array offset. | |
1078 __ SmiTag(a4, kInterpreterBytecodeOffsetRegister); | |
1079 __ Push(a3, kInterpreterBytecodeArrayRegister, a4); | |
1080 | |
1081 // Allocate the local and temporary register file on the stack. | |
1082 { | |
1083 // Load frame size (word) from the BytecodeArray object. | |
1084 __ lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister, | |
1085 BytecodeArray::kFrameSizeOffset)); | |
1086 | |
1087 // Do a stack check to ensure we don't go over the limit. | |
1088 Label ok; | |
1089 __ Dsubu(a5, sp, Operand(a4)); | |
1090 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | |
1091 __ Branch(&ok, hs, a5, Operand(a2)); | |
1092 __ CallRuntime(Runtime::kThrowStackOverflow); | |
1093 __ bind(&ok); | |
1094 | |
1095 // If ok, push undefined as the initial value for all register file entries. | |
1096 Label loop_header; | |
1097 Label loop_check; | |
1098 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex); | |
1099 __ Branch(&loop_check); | |
1100 __ bind(&loop_header); | |
1101 // TODO(rmcilroy): Consider doing more than one push per loop iteration. | |
1102 __ push(a5); | |
1103 // Continue loop if not done. | |
1104 __ bind(&loop_check); | |
1105 __ Dsubu(a4, a4, Operand(kPointerSize)); | |
1106 __ Branch(&loop_header, ge, a4, Operand(zero_reg)); | |
1107 } | |
1108 | |
1109 // Load accumulator and dispatch table into registers. | |
1110 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); | |
1111 __ li(kInterpreterDispatchTableRegister, | |
1112 Operand(ExternalReference::interpreter_dispatch_table_address( | |
1113 masm->isolate()))); | |
1114 | |
1115 // Dispatch to the first bytecode handler for the function. | |
1116 __ Daddu(a0, kInterpreterBytecodeArrayRegister, | |
1117 kInterpreterBytecodeOffsetRegister); | |
1118 __ lbu(a0, MemOperand(a0)); | |
1119 __ Dlsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2); | |
1120 __ ld(at, MemOperand(at)); | |
1121 __ Call(at); | |
1122 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset()); | |
1123 | |
1124 // The return value is in v0. | |
1125 LeaveInterpreterFrame(masm, t0); | |
1126 __ Jump(ra); | |
1127 | |
1128 // Load debug copy of the bytecode array. | |
1129 __ bind(&load_debug_bytecode_array); | |
1130 __ ld(kInterpreterBytecodeArrayRegister, | |
1131 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex)); | |
1132 __ Branch(&bytecode_array_loaded); | |
1133 | |
1134 // If the bytecode array is no longer present, then the underlying function | |
1135 // has been switched to a different kind of code and we heal the closure by | |
1136 // switching the code entry field over to the new code object as well. | |
1137 __ bind(&bytecode_array_not_present); | |
1138 __ LeaveFrame(StackFrame::JAVA_SCRIPT); | |
1139 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
1140 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kCodeOffset)); | |
1141 __ Daddu(a4, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1142 __ sd(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | |
1143 __ RecordWriteCodeEntryField(a1, a4, a5); | |
1144 __ Jump(a4); | |
1145 } | |
1146 | |
1147 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) { | |
1148 // Save the function and context for call to CompileBaseline. | |
1149 __ ld(a1, MemOperand(fp, StandardFrameConstants::kFunctionOffset)); | |
1150 __ ld(kContextRegister, | |
1151 MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
1152 | |
1153 // Leave the frame before recompiling for baseline so that we don't count as | |
1154 // an activation on the stack. | |
1155 LeaveInterpreterFrame(masm, t0); | |
1156 | |
1157 { | |
1158 FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
1159 // Push return value. | |
1160 __ push(v0); | |
1161 | |
1162 // Push function as argument and compile for baseline. | |
1163 __ push(a1); | |
1164 __ CallRuntime(Runtime::kCompileBaseline); | |
1165 | |
1166 // Restore return value. | |
1167 __ pop(v0); | |
1168 } | |
1169 __ Jump(ra); | |
1170 } | |
1171 | |
1172 // static | |
1173 void Builtins::Generate_InterpreterPushArgsAndCallImpl( | |
1174 MacroAssembler* masm, TailCallMode tail_call_mode, | |
1175 CallableType function_type) { | |
1176 // ----------- S t a t e ------------- | |
1177 // -- a0 : the number of arguments (not including the receiver) | |
1178 // -- a2 : the address of the first argument to be pushed. Subsequent | |
1179 // arguments should be consecutive above this, in the same order as | |
1180 // they are to be pushed onto the stack. | |
1181 // -- a1 : the target to call (can be any Object). | |
1182 // ----------------------------------- | |
1183 | |
1184 // Find the address of the last argument. | |
1185 __ Daddu(a3, a0, Operand(1)); // Add one for receiver. | |
1186 __ dsll(a3, a3, kPointerSizeLog2); | |
1187 __ Dsubu(a3, a2, Operand(a3)); | |
1188 | |
1189 // Push the arguments. | |
1190 Label loop_header, loop_check; | |
1191 __ Branch(&loop_check); | |
1192 __ bind(&loop_header); | |
1193 __ ld(t0, MemOperand(a2)); | |
1194 __ Daddu(a2, a2, Operand(-kPointerSize)); | |
1195 __ push(t0); | |
1196 __ bind(&loop_check); | |
1197 __ Branch(&loop_header, gt, a2, Operand(a3)); | |
1198 | |
1199 // Call the target. | |
1200 if (function_type == CallableType::kJSFunction) { | |
1201 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, | |
1202 tail_call_mode), | |
1203 RelocInfo::CODE_TARGET); | |
1204 } else { | |
1205 DCHECK_EQ(function_type, CallableType::kAny); | |
1206 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, | |
1207 tail_call_mode), | |
1208 RelocInfo::CODE_TARGET); | |
1209 } | |
1210 } | |
1211 | |
1212 // static | |
1213 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) { | |
1214 // ----------- S t a t e ------------- | |
1215 // -- a0 : argument count (not including receiver) | |
1216 // -- a3 : new target | |
1217 // -- a1 : constructor to call | |
1218 // -- a2 : address of the first argument | |
1219 // ----------------------------------- | |
1220 | |
1221 // Find the address of the last argument. | |
1222 __ dsll(t0, a0, kPointerSizeLog2); | |
1223 __ Dsubu(t0, a2, Operand(t0)); | |
1224 | |
1225 // Push a slot for the receiver. | |
1226 __ push(zero_reg); | |
1227 | |
1228 // Push the arguments. | |
1229 Label loop_header, loop_check; | |
1230 __ Branch(&loop_check); | |
1231 __ bind(&loop_header); | |
1232 __ ld(t1, MemOperand(a2)); | |
1233 __ Daddu(a2, a2, Operand(-kPointerSize)); | |
1234 __ push(t1); | |
1235 __ bind(&loop_check); | |
1236 __ Branch(&loop_header, gt, a2, Operand(t0)); | |
1237 | |
1238 // Call the constructor with a0, a1, and a3 unmodified. | |
1239 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | |
1240 } | |
1241 | |
1242 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { | |
1243 // Set the return address to the correct point in the interpreter entry | |
1244 // trampoline. | |
1245 Smi* interpreter_entry_return_pc_offset( | |
1246 masm->isolate()->heap()->interpreter_entry_return_pc_offset()); | |
1247 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0)); | |
1248 __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline())); | |
1249 __ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() + | |
1250 Code::kHeaderSize - kHeapObjectTag)); | |
1251 | |
1252 // Initialize the dispatch table register. | |
1253 __ li(kInterpreterDispatchTableRegister, | |
1254 Operand(ExternalReference::interpreter_dispatch_table_address( | |
1255 masm->isolate()))); | |
1256 | |
1257 // Get the bytecode array pointer from the frame. | |
1258 __ ld(kInterpreterBytecodeArrayRegister, | |
1259 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); | |
1260 | |
1261 if (FLAG_debug_code) { | |
1262 // Check function data field is actually a BytecodeArray object. | |
1263 __ SmiTst(kInterpreterBytecodeArrayRegister, at); | |
1264 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at, | |
1265 Operand(zero_reg)); | |
1266 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1); | |
1267 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1, | |
1268 Operand(BYTECODE_ARRAY_TYPE)); | |
1269 } | |
1270 | |
1271 // Get the target bytecode offset from the frame. | |
1272 __ ld(kInterpreterBytecodeOffsetRegister, | |
1273 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); | |
1274 __ SmiUntag(kInterpreterBytecodeOffsetRegister); | |
1275 | |
1276 // Dispatch to the target bytecode. | |
1277 __ Daddu(a1, kInterpreterBytecodeArrayRegister, | |
1278 kInterpreterBytecodeOffsetRegister); | |
1279 __ lbu(a1, MemOperand(a1)); | |
1280 __ Dlsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2); | |
1281 __ ld(a1, MemOperand(a1)); | |
1282 __ Jump(a1); | |
1283 } | |
1284 | |
1285 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | |
1286 // ----------- S t a t e ------------- | |
1287 // -- a0 : argument count (preserved for callee) | |
1288 // -- a3 : new target (preserved for callee) | |
1289 // -- a1 : target function (preserved for callee) | |
1290 // ----------------------------------- | |
1291 // First lookup code, maybe we don't need to compile! | |
1292 Label gotta_call_runtime, gotta_call_runtime_no_stack; | |
1293 Label maybe_call_runtime; | |
1294 Label try_shared; | |
1295 Label loop_top, loop_bottom; | |
1296 | |
1297 Register argument_count = a0; | |
1298 Register closure = a1; | |
1299 Register new_target = a3; | |
1300 __ push(argument_count); | |
1301 __ push(new_target); | |
1302 __ push(closure); | |
1303 | |
1304 Register map = a0; | |
1305 Register index = a2; | |
1306 __ ld(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
1307 __ ld(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); | |
1308 __ ld(index, FieldMemOperand(map, FixedArray::kLengthOffset)); | |
1309 __ Branch(&gotta_call_runtime, lt, index, Operand(Smi::FromInt(2))); | |
1310 | |
1311 // Find literals. | |
1312 // a3 : native context | |
1313 // a2 : length / index | |
1314 // a0 : optimized code map | |
1315 // stack[0] : new target | |
1316 // stack[4] : closure | |
1317 Register native_context = a3; | |
1318 __ ld(native_context, NativeContextMemOperand()); | |
1319 | |
1320 __ bind(&loop_top); | |
1321 Register temp = a1; | |
1322 Register array_pointer = a5; | |
1323 | |
1324 // Does the native context match? | |
1325 __ SmiScale(at, index, kPointerSizeLog2); | |
1326 __ Daddu(array_pointer, map, Operand(at)); | |
1327 __ ld(temp, FieldMemOperand(array_pointer, | |
1328 SharedFunctionInfo::kOffsetToPreviousContext)); | |
1329 __ ld(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); | |
1330 __ Branch(&loop_bottom, ne, temp, Operand(native_context)); | |
1331 // OSR id set to none? | |
1332 __ ld(temp, FieldMemOperand(array_pointer, | |
1333 SharedFunctionInfo::kOffsetToPreviousOsrAstId)); | |
1334 const int bailout_id = BailoutId::None().ToInt(); | |
1335 __ Branch(&loop_bottom, ne, temp, Operand(Smi::FromInt(bailout_id))); | |
1336 | |
1337 // Literals available? | |
1338 Label got_literals, maybe_cleared_weakcell; | |
1339 __ ld(temp, FieldMemOperand(array_pointer, | |
1340 SharedFunctionInfo::kOffsetToPreviousLiterals)); | |
1341 // temp contains either a WeakCell pointing to the literals array or the | |
1342 // literals array directly. | |
1343 __ ld(a4, FieldMemOperand(temp, WeakCell::kValueOffset)); | |
1344 __ JumpIfSmi(a4, &maybe_cleared_weakcell); | |
1345 // a4 is a pointer, therefore temp is a WeakCell pointing to a literals array. | |
1346 __ ld(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); | |
1347 __ jmp(&got_literals); | |
1348 | |
1349 // a4 is a smi. If it's 0, then we are looking at a cleared WeakCell | |
1350 // around the literals array, and we should visit the runtime. If it's > 0, | |
1351 // then temp already contains the literals array. | |
1352 __ bind(&maybe_cleared_weakcell); | |
1353 __ Branch(&gotta_call_runtime, eq, a4, Operand(Smi::FromInt(0))); | |
1354 | |
1355 // Save the literals in the closure. | |
1356 __ bind(&got_literals); | |
1357 __ ld(a4, MemOperand(sp, 0)); | |
1358 __ sd(temp, FieldMemOperand(a4, JSFunction::kLiteralsOffset)); | |
1359 __ push(index); | |
1360 __ RecordWriteField(a4, JSFunction::kLiteralsOffset, temp, index, | |
1361 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | |
1362 OMIT_SMI_CHECK); | |
1363 __ pop(index); | |
1364 | |
1365 // Code available? | |
1366 Register entry = a4; | |
1367 __ ld(entry, | |
1368 FieldMemOperand(array_pointer, | |
1369 SharedFunctionInfo::kOffsetToPreviousCachedCode)); | |
1370 __ ld(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); | |
1371 __ JumpIfSmi(entry, &maybe_call_runtime); | |
1372 | |
1373 // Found literals and code. Get them into the closure and return. | |
1374 __ pop(closure); | |
1375 // Store code entry in the closure. | |
1376 __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1377 | |
1378 Label install_optimized_code_and_tailcall; | |
1379 __ bind(&install_optimized_code_and_tailcall); | |
1380 __ sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); | |
1381 __ RecordWriteCodeEntryField(closure, entry, a5); | |
1382 | |
1383 // Link the closure into the optimized function list. | |
1384 // a4 : code entry | |
1385 // a3 : native context | |
1386 // a1 : closure | |
1387 __ ld(a5, | |
1388 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); | |
1389 __ sd(a5, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset)); | |
1390 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, a5, a0, | |
1391 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | |
1392 OMIT_SMI_CHECK); | |
1393 const int function_list_offset = | |
1394 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST); | |
1395 __ sd(closure, | |
1396 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); | |
1397 // Save closure before the write barrier. | |
1398 __ mov(a5, closure); | |
1399 __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0, | |
1400 kRAHasNotBeenSaved, kDontSaveFPRegs); | |
1401 __ mov(closure, a5); | |
1402 __ pop(new_target); | |
1403 __ pop(argument_count); | |
1404 __ Jump(entry); | |
1405 | |
1406 __ bind(&loop_bottom); | |
1407 __ Dsubu(index, index, | |
1408 Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength))); | |
1409 __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1))); | |
1410 | |
1411 // We found neither literals nor code. | |
1412 __ jmp(&gotta_call_runtime); | |
1413 | |
1414 __ bind(&maybe_call_runtime); | |
1415 __ pop(closure); | |
1416 | |
1417 // Last possibility. Check the context free optimized code map entry. | |
1418 __ ld(entry, FieldMemOperand(map, FixedArray::kHeaderSize + | |
1419 SharedFunctionInfo::kSharedCodeIndex)); | |
1420 __ ld(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); | |
1421 __ JumpIfSmi(entry, &try_shared); | |
1422 | |
1423 // Store code entry in the closure. | |
1424 __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1425 __ jmp(&install_optimized_code_and_tailcall); | |
1426 | |
1427 __ bind(&try_shared); | |
1428 __ pop(new_target); | |
1429 __ pop(argument_count); | |
1430 // Is the full code valid? | |
1431 __ ld(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
1432 __ ld(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset)); | |
1433 __ lw(a5, FieldMemOperand(entry, Code::kFlagsOffset)); | |
1434 __ And(a5, a5, Operand(Code::KindField::kMask)); | |
1435 __ dsrl(a5, a5, Code::KindField::kShift); | |
1436 __ Branch(&gotta_call_runtime_no_stack, eq, a5, Operand(Code::BUILTIN)); | |
1437 // Yes, install the full code. | |
1438 __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1439 __ sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); | |
1440 __ RecordWriteCodeEntryField(closure, entry, a5); | |
1441 __ Jump(entry); | |
1442 | |
1443 __ bind(&gotta_call_runtime); | |
1444 __ pop(closure); | |
1445 __ pop(new_target); | |
1446 __ pop(argument_count); | |
1447 __ bind(&gotta_call_runtime_no_stack); | |
1448 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); | |
1449 } | |
1450 | |
1451 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) { | |
1452 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline); | |
1453 } | |
1454 | |
1455 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | |
1456 GenerateTailCallToReturnedCode(masm, | |
1457 Runtime::kCompileOptimized_NotConcurrent); | |
1458 } | |
1459 | |
1460 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | |
1461 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); | |
1462 } | |
1463 | |
1464 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { | |
1465 // ----------- S t a t e ------------- | |
1466 // -- a0 : argument count (preserved for callee) | |
1467 // -- a1 : new target (preserved for callee) | |
1468 // -- a3 : target function (preserved for callee) | |
1469 // ----------------------------------- | |
1470 Label failed; | |
1471 { | |
1472 FrameScope scope(masm, StackFrame::INTERNAL); | |
1473 // Push a copy of the target function and the new target. | |
1474 // Push function as parameter to the runtime call. | |
1475 __ SmiTag(a0); | |
1476 __ Push(a0, a1, a3, a1); | |
1477 | |
1478 // Copy arguments from caller (stdlib, foreign, heap). | |
1479 for (int i = 2; i >= 0; --i) { | |
1480 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerSPOffset + | |
1481 i * kPointerSize)); | |
1482 __ push(a3); | |
1483 } | |
1484 // Call runtime, on success unwind frame, and parent frame. | |
1485 __ CallRuntime(Runtime::kInstantiateAsmJs, 4); | |
1486 // A smi 0 is returned on failure, an object on success. | |
1487 __ JumpIfSmi(a0, &failed); | |
1488 scope.GenerateLeaveFrame(); | |
1489 __ Drop(4); | |
1490 __ Ret(); | |
1491 | |
1492 __ bind(&failed); | |
1493 // Restore target function and new target. | |
1494 __ Pop(a0, a1, a3); | |
1495 __ SmiUntag(a0); | |
1496 } | |
1497 // On failure, tail call back to regular js. | |
1498 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); | |
1499 } | |
1500 | |
1501 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | |
1502 // For now, we are relying on the fact that make_code_young doesn't do any | |
1503 // garbage collection which allows us to save/restore the registers without | |
1504 // worrying about which of them contain pointers. We also don't build an | |
1505 // internal frame to make the code faster, since we shouldn't have to do stack | |
1506 // crawls in MakeCodeYoung. This seems a bit fragile. | |
1507 | |
1508 // Set a0 to point to the head of the PlatformCodeAge sequence. | |
1509 __ Dsubu(a0, a0, | |
1510 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); | |
1511 | |
1512 // The following registers must be saved and restored when calling through to | |
1513 // the runtime: | |
1514 // a0 - contains return address (beginning of patch sequence) | |
1515 // a1 - isolate | |
1516 // a3 - new target | |
1517 RegList saved_regs = | |
1518 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit(); | |
1519 FrameScope scope(masm, StackFrame::MANUAL); | |
1520 __ MultiPush(saved_regs); | |
1521 __ PrepareCallCFunction(2, 0, a2); | |
1522 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); | |
1523 __ CallCFunction( | |
1524 ExternalReference::get_make_code_young_function(masm->isolate()), 2); | |
1525 __ MultiPop(saved_regs); | |
1526 __ Jump(a0); | |
1527 } | |
1528 | |
1529 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ | |
1530 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ | |
1531 MacroAssembler* masm) { \ | |
1532 GenerateMakeCodeYoungAgainCommon(masm); \ | |
1533 } \ | |
1534 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ | |
1535 MacroAssembler* masm) { \ | |
1536 GenerateMakeCodeYoungAgainCommon(masm); \ | |
1537 } | |
1538 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) | |
1539 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR | |
1540 | |
1541 | |
1542 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { | |
1543 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact | |
1544 // that make_code_young doesn't do any garbage collection which allows us to | |
1545 // save/restore the registers without worrying about which of them contain | |
1546 // pointers. | |
1547 | |
1548 // Set a0 to point to the head of the PlatformCodeAge sequence. | |
1549 __ Dsubu(a0, a0, | |
1550 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); | |
1551 | |
1552 // The following registers must be saved and restored when calling through to | |
1553 // the runtime: | |
1554 // a0 - contains return address (beginning of patch sequence) | |
1555 // a1 - isolate | |
1556 // a3 - new target | |
1557 RegList saved_regs = | |
1558 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit(); | |
1559 FrameScope scope(masm, StackFrame::MANUAL); | |
1560 __ MultiPush(saved_regs); | |
1561 __ PrepareCallCFunction(2, 0, a2); | |
1562 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); | |
1563 __ CallCFunction( | |
1564 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), | |
1565 2); | |
1566 __ MultiPop(saved_regs); | |
1567 | |
1568 // Perform prologue operations usually performed by the young code stub. | |
1569 __ PushStandardFrame(a1); | |
1570 | |
1571 // Jump to point after the code-age stub. | |
1572 __ Daddu(a0, a0, Operand((kNoCodeAgeSequenceLength))); | |
1573 __ Jump(a0); | |
1574 } | |
1575 | |
1576 | |
1577 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { | |
1578 GenerateMakeCodeYoungAgainCommon(masm); | |
1579 } | |
1580 | |
1581 | |
1582 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { | |
1583 Generate_MarkCodeAsExecutedOnce(masm); | |
1584 } | |
1585 | |
1586 | |
1587 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | |
1588 SaveFPRegsMode save_doubles) { | |
1589 { | |
1590 FrameScope scope(masm, StackFrame::INTERNAL); | |
1591 | |
1592 // Preserve registers across notification, this is important for compiled | |
1593 // stubs that tail call the runtime on deopts passing their parameters in | |
1594 // registers. | |
1595 __ MultiPush(kJSCallerSaved | kCalleeSaved); | |
1596 // Pass the function and deoptimization type to the runtime system. | |
1597 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); | |
1598 __ MultiPop(kJSCallerSaved | kCalleeSaved); | |
1599 } | |
1600 | |
1601 __ Daddu(sp, sp, Operand(kPointerSize)); // Ignore state | |
1602 __ Jump(ra); // Jump to miss handler | |
1603 } | |
1604 | |
1605 | |
1606 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | |
1607 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | |
1608 } | |
1609 | |
1610 | |
1611 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | |
1612 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | |
1613 } | |
1614 | |
1615 | |
1616 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | |
1617 Deoptimizer::BailoutType type) { | |
1618 { | |
1619 FrameScope scope(masm, StackFrame::INTERNAL); | |
1620 // Pass the function and deoptimization type to the runtime system. | |
1621 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); | |
1622 __ push(a0); | |
1623 __ CallRuntime(Runtime::kNotifyDeoptimized); | |
1624 } | |
1625 | |
1626 // Get the full codegen state from the stack and untag it -> a6. | |
1627 __ ld(a6, MemOperand(sp, 0 * kPointerSize)); | |
1628 __ SmiUntag(a6); | |
1629 // Switch on the state. | |
1630 Label with_tos_register, unknown_state; | |
1631 __ Branch( | |
1632 &with_tos_register, ne, a6, | |
1633 Operand(static_cast<int64_t>(Deoptimizer::BailoutState::NO_REGISTERS))); | |
1634 __ Ret(USE_DELAY_SLOT); | |
1635 // Safe to fill delay slot Addu will emit one instruction. | |
1636 __ Daddu(sp, sp, Operand(1 * kPointerSize)); // Remove state. | |
1637 | |
1638 __ bind(&with_tos_register); | |
1639 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code()); | |
1640 __ ld(v0, MemOperand(sp, 1 * kPointerSize)); | |
1641 __ Branch( | |
1642 &unknown_state, ne, a6, | |
1643 Operand(static_cast<int64_t>(Deoptimizer::BailoutState::TOS_REGISTER))); | |
1644 | |
1645 __ Ret(USE_DELAY_SLOT); | |
1646 // Safe to fill delay slot Addu will emit one instruction. | |
1647 __ Daddu(sp, sp, Operand(2 * kPointerSize)); // Remove state. | |
1648 | |
1649 __ bind(&unknown_state); | |
1650 __ stop("no cases left"); | |
1651 } | |
1652 | |
1653 | |
1654 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | |
1655 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | |
1656 } | |
1657 | |
1658 | |
1659 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { | |
1660 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); | |
1661 } | |
1662 | |
1663 | |
1664 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | |
1665 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | |
1666 } | |
1667 | |
1668 | |
1669 // Clobbers {t2, t3, a4, a5}. | |
1670 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, | |
1671 Register function_template_info, | |
1672 Label* receiver_check_failed) { | |
1673 Register signature = t2; | |
1674 Register map = t3; | |
1675 Register constructor = a4; | |
1676 Register scratch = a5; | |
1677 | |
1678 // If there is no signature, return the holder. | |
1679 __ ld(signature, FieldMemOperand(function_template_info, | |
1680 FunctionTemplateInfo::kSignatureOffset)); | |
1681 Label receiver_check_passed; | |
1682 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex, | |
1683 &receiver_check_passed); | |
1684 | |
1685 // Walk the prototype chain. | |
1686 __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
1687 Label prototype_loop_start; | |
1688 __ bind(&prototype_loop_start); | |
1689 | |
1690 // Get the constructor, if any. | |
1691 __ GetMapConstructor(constructor, map, scratch, scratch); | |
1692 Label next_prototype; | |
1693 __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE)); | |
1694 Register type = constructor; | |
1695 __ ld(type, | |
1696 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset)); | |
1697 __ ld(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset)); | |
1698 | |
1699 // Loop through the chain of inheriting function templates. | |
1700 Label function_template_loop; | |
1701 __ bind(&function_template_loop); | |
1702 | |
1703 // If the signatures match, we have a compatible receiver. | |
1704 __ Branch(&receiver_check_passed, eq, signature, Operand(type), | |
1705 USE_DELAY_SLOT); | |
1706 | |
1707 // If the current type is not a FunctionTemplateInfo, load the next prototype | |
1708 // in the chain. | |
1709 __ JumpIfSmi(type, &next_prototype); | |
1710 __ GetObjectType(type, scratch, scratch); | |
1711 __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE)); | |
1712 | |
1713 // Otherwise load the parent function template and iterate. | |
1714 __ ld(type, | |
1715 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset)); | |
1716 __ Branch(&function_template_loop); | |
1717 | |
1718 // Load the next prototype. | |
1719 __ bind(&next_prototype); | |
1720 __ lwu(scratch, FieldMemOperand(map, Map::kBitField3Offset)); | |
1721 __ DecodeField<Map::HasHiddenPrototype>(scratch); | |
1722 __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg)); | |
1723 | |
1724 __ ld(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); | |
1725 __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
1726 // Iterate. | |
1727 __ Branch(&prototype_loop_start); | |
1728 | |
1729 __ bind(&receiver_check_passed); | |
1730 } | |
1731 | |
1732 | |
1733 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { | |
1734 // ----------- S t a t e ------------- | |
1735 // -- a0 : number of arguments excluding receiver | |
1736 // -- a1 : callee | |
1737 // -- ra : return address | |
1738 // -- sp[0] : last argument | |
1739 // -- ... | |
1740 // -- sp[8 * (argc - 1)] : first argument | |
1741 // -- sp[8 * argc] : receiver | |
1742 // ----------------------------------- | |
1743 | |
1744 // Load the FunctionTemplateInfo. | |
1745 __ ld(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
1746 __ ld(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset)); | |
1747 | |
1748 // Do the compatible receiver check | |
1749 Label receiver_check_failed; | |
1750 __ Dlsa(t8, sp, a0, kPointerSizeLog2); | |
1751 __ ld(t0, MemOperand(t8)); | |
1752 CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed); | |
1753 | |
1754 // Get the callback offset from the FunctionTemplateInfo, and jump to the | |
1755 // beginning of the code. | |
1756 __ ld(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset)); | |
1757 __ ld(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset)); | |
1758 __ Daddu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
1759 __ Jump(t2); | |
1760 | |
1761 // Compatible receiver check failed: throw an Illegal Invocation exception. | |
1762 __ bind(&receiver_check_failed); | |
1763 // Drop the arguments (including the receiver); | |
1764 __ Daddu(t8, t8, Operand(kPointerSize)); | |
1765 __ daddu(sp, t8, zero_reg); | |
1766 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); | |
1767 } | |
1768 | |
1769 | |
1770 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | |
1771 // Lookup the function in the JavaScript frame. | |
1772 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
1773 { | |
1774 FrameScope scope(masm, StackFrame::INTERNAL); | |
1775 // Pass function as argument. | |
1776 __ push(a0); | |
1777 __ CallRuntime(Runtime::kCompileForOnStackReplacement); | |
1778 } | |
1779 | |
1780 // If the code object is null, just return to the unoptimized code. | |
1781 __ Ret(eq, v0, Operand(Smi::FromInt(0))); | |
1782 | |
1783 // Load deoptimization data from the code object. | |
1784 // <deopt_data> = <code>[#deoptimization_data_offset] | |
1785 __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); | |
1786 | |
1787 // Load the OSR entrypoint offset from the deoptimization data. | |
1788 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] | |
1789 __ ld(a1, MemOperand(a1, FixedArray::OffsetOfElementAt( | |
1790 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag)); | |
1791 __ SmiUntag(a1); | |
1792 | |
1793 // Compute the target address = code_obj + header_size + osr_offset | |
1794 // <entry_addr> = <code_obj> + #header_size + <osr_offset> | |
1795 __ daddu(v0, v0, a1); | |
1796 __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag); | |
1797 | |
1798 // And "return" to the OSR entry point of the function. | |
1799 __ Ret(); | |
1800 } | |
1801 | |
1802 | |
1803 // static | |
1804 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, | |
1805 int field_index) { | |
1806 // ----------- S t a t e ------------- | |
1807 // -- a0 : number of arguments | |
1808 // -- a1 : function | |
1809 // -- cp : context | |
1810 // -- sp[0] : receiver | |
1811 // ----------------------------------- | |
1812 | |
1813 // 1. Pop receiver into a0 and check that it's actually a JSDate object. | |
1814 Label receiver_not_date; | |
1815 { | |
1816 __ Pop(a0); | |
1817 __ JumpIfSmi(a0, &receiver_not_date); | |
1818 __ GetObjectType(a0, t0, t0); | |
1819 __ Branch(&receiver_not_date, ne, t0, Operand(JS_DATE_TYPE)); | |
1820 } | |
1821 | |
1822 // 2. Load the specified date field, falling back to the runtime as necessary. | |
1823 if (field_index == JSDate::kDateValue) { | |
1824 __ Ret(USE_DELAY_SLOT); | |
1825 __ ld(v0, FieldMemOperand(a0, JSDate::kValueOffset)); // In delay slot. | |
1826 } else { | |
1827 if (field_index < JSDate::kFirstUncachedField) { | |
1828 Label stamp_mismatch; | |
1829 __ li(a1, Operand(ExternalReference::date_cache_stamp(masm->isolate()))); | |
1830 __ ld(a1, MemOperand(a1)); | |
1831 __ ld(t0, FieldMemOperand(a0, JSDate::kCacheStampOffset)); | |
1832 __ Branch(&stamp_mismatch, ne, t0, Operand(a1)); | |
1833 __ Ret(USE_DELAY_SLOT); | |
1834 __ ld(v0, FieldMemOperand( | |
1835 a0, JSDate::kValueOffset + | |
1836 field_index * kPointerSize)); // In delay slot. | |
1837 __ bind(&stamp_mismatch); | |
1838 } | |
1839 FrameScope scope(masm, StackFrame::INTERNAL); | |
1840 __ PrepareCallCFunction(2, t0); | |
1841 __ li(a1, Operand(Smi::FromInt(field_index))); | |
1842 __ CallCFunction( | |
1843 ExternalReference::get_date_field_function(masm->isolate()), 2); | |
1844 } | |
1845 __ Ret(); | |
1846 | |
1847 // 3. Raise a TypeError if the receiver is not a date. | |
1848 __ bind(&receiver_not_date); | |
1849 { | |
1850 FrameScope scope(masm, StackFrame::MANUAL); | |
1851 __ Push(a0); | |
1852 __ Move(a0, Smi::FromInt(0)); | |
1853 __ EnterBuiltinFrame(cp, a1, a0); | |
1854 __ CallRuntime(Runtime::kThrowNotDateError); | |
1855 } | |
1856 } | |
1857 | |
1858 // static | |
1859 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { | |
1860 // ----------- S t a t e ------------- | |
1861 // -- a0 : argc | |
1862 // -- sp[0] : argArray | |
1863 // -- sp[4] : thisArg | |
1864 // -- sp[8] : receiver | |
1865 // ----------------------------------- | |
1866 | |
1867 // 1. Load receiver into a1, argArray into a0 (if present), remove all | |
1868 // arguments from the stack (including the receiver), and push thisArg (if | |
1869 // present) instead. | |
1870 { | |
1871 Label no_arg; | |
1872 Register scratch = a4; | |
1873 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | |
1874 __ mov(a3, a2); | |
1875 // Dlsa() cannot be used hare as scratch value used later. | |
1876 __ dsll(scratch, a0, kPointerSizeLog2); | |
1877 __ Daddu(a0, sp, Operand(scratch)); | |
1878 __ ld(a1, MemOperand(a0)); // receiver | |
1879 __ Dsubu(a0, a0, Operand(kPointerSize)); | |
1880 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
1881 __ ld(a2, MemOperand(a0)); // thisArg | |
1882 __ Dsubu(a0, a0, Operand(kPointerSize)); | |
1883 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
1884 __ ld(a3, MemOperand(a0)); // argArray | |
1885 __ bind(&no_arg); | |
1886 __ Daddu(sp, sp, Operand(scratch)); | |
1887 __ sd(a2, MemOperand(sp)); | |
1888 __ mov(a0, a3); | |
1889 } | |
1890 | |
1891 // ----------- S t a t e ------------- | |
1892 // -- a0 : argArray | |
1893 // -- a1 : receiver | |
1894 // -- sp[0] : thisArg | |
1895 // ----------------------------------- | |
1896 | |
1897 // 2. Make sure the receiver is actually callable. | |
1898 Label receiver_not_callable; | |
1899 __ JumpIfSmi(a1, &receiver_not_callable); | |
1900 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset)); | |
1901 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); | |
1902 __ And(a4, a4, Operand(1 << Map::kIsCallable)); | |
1903 __ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg)); | |
1904 | |
1905 // 3. Tail call with no arguments if argArray is null or undefined. | |
1906 Label no_arguments; | |
1907 __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments); | |
1908 __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments); | |
1909 | |
1910 // 4a. Apply the receiver to the given argArray (passing undefined for | |
1911 // new.target). | |
1912 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); | |
1913 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
1914 | |
1915 // 4b. The argArray is either null or undefined, so we tail call without any | |
1916 // arguments to the receiver. | |
1917 __ bind(&no_arguments); | |
1918 { | |
1919 __ mov(a0, zero_reg); | |
1920 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
1921 } | |
1922 | |
1923 // 4c. The receiver is not callable, throw an appropriate TypeError. | |
1924 __ bind(&receiver_not_callable); | |
1925 { | |
1926 __ sd(a1, MemOperand(sp)); | |
1927 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | |
1928 } | |
1929 } | |
1930 | |
1931 | |
1932 // static | |
1933 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { | |
1934 // 1. Make sure we have at least one argument. | |
1935 // a0: actual number of arguments | |
1936 { | |
1937 Label done; | |
1938 __ Branch(&done, ne, a0, Operand(zero_reg)); | |
1939 __ PushRoot(Heap::kUndefinedValueRootIndex); | |
1940 __ Daddu(a0, a0, Operand(1)); | |
1941 __ bind(&done); | |
1942 } | |
1943 | |
1944 // 2. Get the function to call (passed as receiver) from the stack. | |
1945 // a0: actual number of arguments | |
1946 __ Dlsa(at, sp, a0, kPointerSizeLog2); | |
1947 __ ld(a1, MemOperand(at)); | |
1948 | |
1949 // 3. Shift arguments and return address one slot down on the stack | |
1950 // (overwriting the original receiver). Adjust argument count to make | |
1951 // the original first argument the new receiver. | |
1952 // a0: actual number of arguments | |
1953 // a1: function | |
1954 { | |
1955 Label loop; | |
1956 // Calculate the copy start address (destination). Copy end address is sp. | |
1957 __ Dlsa(a2, sp, a0, kPointerSizeLog2); | |
1958 | |
1959 __ bind(&loop); | |
1960 __ ld(at, MemOperand(a2, -kPointerSize)); | |
1961 __ sd(at, MemOperand(a2)); | |
1962 __ Dsubu(a2, a2, Operand(kPointerSize)); | |
1963 __ Branch(&loop, ne, a2, Operand(sp)); | |
1964 // Adjust the actual number of arguments and remove the top element | |
1965 // (which is a copy of the last argument). | |
1966 __ Dsubu(a0, a0, Operand(1)); | |
1967 __ Pop(); | |
1968 } | |
1969 | |
1970 // 4. Call the callable. | |
1971 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
1972 } | |
1973 | |
1974 | |
1975 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { | |
1976 // ----------- S t a t e ------------- | |
1977 // -- a0 : argc | |
1978 // -- sp[0] : argumentsList | |
1979 // -- sp[4] : thisArgument | |
1980 // -- sp[8] : target | |
1981 // -- sp[12] : receiver | |
1982 // ----------------------------------- | |
1983 | |
1984 // 1. Load target into a1 (if present), argumentsList into a0 (if present), | |
1985 // remove all arguments from the stack (including the receiver), and push | |
1986 // thisArgument (if present) instead. | |
1987 { | |
1988 Label no_arg; | |
1989 Register scratch = a4; | |
1990 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); | |
1991 __ mov(a2, a1); | |
1992 __ mov(a3, a1); | |
1993 __ dsll(scratch, a0, kPointerSizeLog2); | |
1994 __ mov(a0, scratch); | |
1995 __ Dsubu(a0, a0, Operand(kPointerSize)); | |
1996 __ Branch(&no_arg, lt, a0, Operand(zero_reg)); | |
1997 __ Daddu(a0, sp, Operand(a0)); | |
1998 __ ld(a1, MemOperand(a0)); // target | |
1999 __ Dsubu(a0, a0, Operand(kPointerSize)); | |
2000 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
2001 __ ld(a2, MemOperand(a0)); // thisArgument | |
2002 __ Dsubu(a0, a0, Operand(kPointerSize)); | |
2003 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
2004 __ ld(a3, MemOperand(a0)); // argumentsList | |
2005 __ bind(&no_arg); | |
2006 __ Daddu(sp, sp, Operand(scratch)); | |
2007 __ sd(a2, MemOperand(sp)); | |
2008 __ mov(a0, a3); | |
2009 } | |
2010 | |
2011 // ----------- S t a t e ------------- | |
2012 // -- a0 : argumentsList | |
2013 // -- a1 : target | |
2014 // -- sp[0] : thisArgument | |
2015 // ----------------------------------- | |
2016 | |
2017 // 2. Make sure the target is actually callable. | |
2018 Label target_not_callable; | |
2019 __ JumpIfSmi(a1, &target_not_callable); | |
2020 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset)); | |
2021 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); | |
2022 __ And(a4, a4, Operand(1 << Map::kIsCallable)); | |
2023 __ Branch(&target_not_callable, eq, a4, Operand(zero_reg)); | |
2024 | |
2025 // 3a. Apply the target to the given argumentsList (passing undefined for | |
2026 // new.target). | |
2027 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); | |
2028 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
2029 | |
2030 // 3b. The target is not callable, throw an appropriate TypeError. | |
2031 __ bind(&target_not_callable); | |
2032 { | |
2033 __ sd(a1, MemOperand(sp)); | |
2034 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | |
2035 } | |
2036 } | |
2037 | |
2038 | |
2039 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { | |
2040 // ----------- S t a t e ------------- | |
2041 // -- a0 : argc | |
2042 // -- sp[0] : new.target (optional) | |
2043 // -- sp[4] : argumentsList | |
2044 // -- sp[8] : target | |
2045 // -- sp[12] : receiver | |
2046 // ----------------------------------- | |
2047 | |
2048 // 1. Load target into a1 (if present), argumentsList into a0 (if present), | |
2049 // new.target into a3 (if present, otherwise use target), remove all | |
2050 // arguments from the stack (including the receiver), and push thisArgument | |
2051 // (if present) instead. | |
2052 { | |
2053 Label no_arg; | |
2054 Register scratch = a4; | |
2055 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); | |
2056 __ mov(a2, a1); | |
2057 // Dlsa() cannot be used hare as scratch value used later. | |
2058 __ dsll(scratch, a0, kPointerSizeLog2); | |
2059 __ Daddu(a0, sp, Operand(scratch)); | |
2060 __ sd(a2, MemOperand(a0)); // receiver | |
2061 __ Dsubu(a0, a0, Operand(kPointerSize)); | |
2062 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
2063 __ ld(a1, MemOperand(a0)); // target | |
2064 __ mov(a3, a1); // new.target defaults to target | |
2065 __ Dsubu(a0, a0, Operand(kPointerSize)); | |
2066 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
2067 __ ld(a2, MemOperand(a0)); // argumentsList | |
2068 __ Dsubu(a0, a0, Operand(kPointerSize)); | |
2069 __ Branch(&no_arg, lt, a0, Operand(sp)); | |
2070 __ ld(a3, MemOperand(a0)); // new.target | |
2071 __ bind(&no_arg); | |
2072 __ Daddu(sp, sp, Operand(scratch)); | |
2073 __ mov(a0, a2); | |
2074 } | |
2075 | |
2076 // ----------- S t a t e ------------- | |
2077 // -- a0 : argumentsList | |
2078 // -- a3 : new.target | |
2079 // -- a1 : target | |
2080 // -- sp[0] : receiver (undefined) | |
2081 // ----------------------------------- | |
2082 | |
2083 // 2. Make sure the target is actually a constructor. | |
2084 Label target_not_constructor; | |
2085 __ JumpIfSmi(a1, &target_not_constructor); | |
2086 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset)); | |
2087 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); | |
2088 __ And(a4, a4, Operand(1 << Map::kIsConstructor)); | |
2089 __ Branch(&target_not_constructor, eq, a4, Operand(zero_reg)); | |
2090 | |
2091 // 3. Make sure the target is actually a constructor. | |
2092 Label new_target_not_constructor; | |
2093 __ JumpIfSmi(a3, &new_target_not_constructor); | |
2094 __ ld(a4, FieldMemOperand(a3, HeapObject::kMapOffset)); | |
2095 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); | |
2096 __ And(a4, a4, Operand(1 << Map::kIsConstructor)); | |
2097 __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg)); | |
2098 | |
2099 // 4a. Construct the target with the given new.target and argumentsList. | |
2100 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
2101 | |
2102 // 4b. The target is not a constructor, throw an appropriate TypeError. | |
2103 __ bind(&target_not_constructor); | |
2104 { | |
2105 __ sd(a1, MemOperand(sp)); | |
2106 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); | |
2107 } | |
2108 | |
2109 // 4c. The new.target is not a constructor, throw an appropriate TypeError. | |
2110 __ bind(&new_target_not_constructor); | |
2111 { | |
2112 __ sd(a3, MemOperand(sp)); | |
2113 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); | |
2114 } | |
2115 } | |
2116 | |
2117 | |
2118 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, | |
2119 Label* stack_overflow) { | |
2120 // ----------- S t a t e ------------- | |
2121 // -- a0 : actual number of arguments | |
2122 // -- a1 : function (passed through to callee) | |
2123 // -- a2 : expected number of arguments | |
2124 // -- a3 : new target (passed through to callee) | |
2125 // ----------------------------------- | |
2126 // Check the stack for overflow. We are not trying to catch | |
2127 // interruptions (e.g. debug break and preemption) here, so the "real stack | |
2128 // limit" is checked. | |
2129 __ LoadRoot(a5, Heap::kRealStackLimitRootIndex); | |
2130 // Make a5 the space we have left. The stack might already be overflowed | |
2131 // here which will cause a5 to become negative. | |
2132 __ dsubu(a5, sp, a5); | |
2133 // Check if the arguments will overflow the stack. | |
2134 __ dsll(at, a2, kPointerSizeLog2); | |
2135 // Signed comparison. | |
2136 __ Branch(stack_overflow, le, a5, Operand(at)); | |
2137 } | |
2138 | |
2139 | |
2140 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | |
2141 // __ sll(a0, a0, kSmiTagSize); | |
2142 __ dsll32(a0, a0, 0); | |
2143 __ li(a4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
2144 __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit()); | |
2145 __ Daddu(fp, sp, | |
2146 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize)); | |
2147 } | |
2148 | |
2149 | |
2150 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | |
2151 // ----------- S t a t e ------------- | |
2152 // -- v0 : result being passed through | |
2153 // ----------------------------------- | |
2154 // Get the number of arguments passed (as a smi), tear down the frame and | |
2155 // then tear down the parameters. | |
2156 __ ld(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + | |
2157 kPointerSize))); | |
2158 __ mov(sp, fp); | |
2159 __ MultiPop(fp.bit() | ra.bit()); | |
2160 __ SmiScale(a4, a1, kPointerSizeLog2); | |
2161 __ Daddu(sp, sp, a4); | |
2162 // Adjust for the receiver. | |
2163 __ Daddu(sp, sp, Operand(kPointerSize)); | |
2164 } | |
2165 | |
2166 | |
2167 // static | |
2168 void Builtins::Generate_Apply(MacroAssembler* masm) { | |
2169 // ----------- S t a t e ------------- | |
2170 // -- a0 : argumentsList | |
2171 // -- a1 : target | |
2172 // -- a3 : new.target (checked to be constructor or undefined) | |
2173 // -- sp[0] : thisArgument | |
2174 // ----------------------------------- | |
2175 | |
2176 // Create the list of arguments from the array-like argumentsList. | |
2177 { | |
2178 Label create_arguments, create_array, create_runtime, done_create; | |
2179 __ JumpIfSmi(a0, &create_runtime); | |
2180 | |
2181 // Load the map of argumentsList into a2. | |
2182 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); | |
2183 | |
2184 // Load native context into a4. | |
2185 __ ld(a4, NativeContextMemOperand()); | |
2186 | |
2187 // Check if argumentsList is an (unmodified) arguments object. | |
2188 __ ld(at, ContextMemOperand(a4, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); | |
2189 __ Branch(&create_arguments, eq, a2, Operand(at)); | |
2190 __ ld(at, ContextMemOperand(a4, Context::STRICT_ARGUMENTS_MAP_INDEX)); | |
2191 __ Branch(&create_arguments, eq, a2, Operand(at)); | |
2192 | |
2193 // Check if argumentsList is a fast JSArray. | |
2194 __ ld(v0, FieldMemOperand(a2, HeapObject::kMapOffset)); | |
2195 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset)); | |
2196 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE)); | |
2197 | |
2198 // Ask the runtime to create the list (actually a FixedArray). | |
2199 __ bind(&create_runtime); | |
2200 { | |
2201 FrameScope scope(masm, StackFrame::INTERNAL); | |
2202 __ Push(a1, a3, a0); | |
2203 __ CallRuntime(Runtime::kCreateListFromArrayLike); | |
2204 __ mov(a0, v0); | |
2205 __ Pop(a1, a3); | |
2206 __ ld(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); | |
2207 __ SmiUntag(a2); | |
2208 } | |
2209 __ Branch(&done_create); | |
2210 | |
2211 // Try to create the list from an arguments object. | |
2212 __ bind(&create_arguments); | |
2213 __ ld(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset)); | |
2214 __ ld(a4, FieldMemOperand(a0, JSObject::kElementsOffset)); | |
2215 __ ld(at, FieldMemOperand(a4, FixedArray::kLengthOffset)); | |
2216 __ Branch(&create_runtime, ne, a2, Operand(at)); | |
2217 __ SmiUntag(a2); | |
2218 __ mov(a0, a4); | |
2219 __ Branch(&done_create); | |
2220 | |
2221 // Try to create the list from a JSArray object. | |
2222 __ bind(&create_array); | |
2223 __ ld(a2, FieldMemOperand(a2, Map::kBitField2Offset)); | |
2224 __ DecodeField<Map::ElementsKindBits>(a2); | |
2225 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
2226 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
2227 STATIC_ASSERT(FAST_ELEMENTS == 2); | |
2228 __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS)); | |
2229 __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS)); | |
2230 __ ld(a2, FieldMemOperand(a0, JSArray::kLengthOffset)); | |
2231 __ ld(a0, FieldMemOperand(a0, JSArray::kElementsOffset)); | |
2232 __ SmiUntag(a2); | |
2233 | |
2234 __ bind(&done_create); | |
2235 } | |
2236 | |
2237 // Check for stack overflow. | |
2238 { | |
2239 // Check the stack for overflow. We are not trying to catch interruptions | |
2240 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
2241 Label done; | |
2242 __ LoadRoot(a4, Heap::kRealStackLimitRootIndex); | |
2243 // Make ip the space we have left. The stack might already be overflowed | |
2244 // here which will cause ip to become negative. | |
2245 __ Dsubu(a4, sp, a4); | |
2246 // Check if the arguments will overflow the stack. | |
2247 __ dsll(at, a2, kPointerSizeLog2); | |
2248 __ Branch(&done, gt, a4, Operand(at)); // Signed comparison. | |
2249 __ TailCallRuntime(Runtime::kThrowStackOverflow); | |
2250 __ bind(&done); | |
2251 } | |
2252 | |
2253 // ----------- S t a t e ------------- | |
2254 // -- a1 : target | |
2255 // -- a0 : args (a FixedArray built from argumentsList) | |
2256 // -- a2 : len (number of elements to push from args) | |
2257 // -- a3 : new.target (checked to be constructor or undefined) | |
2258 // -- sp[0] : thisArgument | |
2259 // ----------------------------------- | |
2260 | |
2261 // Push arguments onto the stack (thisArgument is already on the stack). | |
2262 { | |
2263 __ mov(a4, zero_reg); | |
2264 Label done, loop; | |
2265 __ bind(&loop); | |
2266 __ Branch(&done, eq, a4, Operand(a2)); | |
2267 __ Dlsa(at, a0, a4, kPointerSizeLog2); | |
2268 __ ld(at, FieldMemOperand(at, FixedArray::kHeaderSize)); | |
2269 __ Push(at); | |
2270 __ Daddu(a4, a4, Operand(1)); | |
2271 __ Branch(&loop); | |
2272 __ bind(&done); | |
2273 __ Move(a0, a4); | |
2274 } | |
2275 | |
2276 // Dispatch to Call or Construct depending on whether new.target is undefined. | |
2277 { | |
2278 Label construct; | |
2279 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
2280 __ Branch(&construct, ne, a3, Operand(at)); | |
2281 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
2282 __ bind(&construct); | |
2283 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | |
2284 } | |
2285 } | |
2286 | |
2287 namespace { | |
2288 | |
2289 // Drops top JavaScript frame and an arguments adaptor frame below it (if | |
2290 // present) preserving all the arguments prepared for current call. | |
2291 // Does nothing if debugger is currently active. | |
2292 // ES6 14.6.3. PrepareForTailCall | |
2293 // | |
2294 // Stack structure for the function g() tail calling f(): | |
2295 // | |
2296 // ------- Caller frame: ------- | |
2297 // | ... | |
2298 // | g()'s arg M | |
2299 // | ... | |
2300 // | g()'s arg 1 | |
2301 // | g()'s receiver arg | |
2302 // | g()'s caller pc | |
2303 // ------- g()'s frame: ------- | |
2304 // | g()'s caller fp <- fp | |
2305 // | g()'s context | |
2306 // | function pointer: g | |
2307 // | ------------------------- | |
2308 // | ... | |
2309 // | ... | |
2310 // | f()'s arg N | |
2311 // | ... | |
2312 // | f()'s arg 1 | |
2313 // | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!) | |
2314 // ---------------------- | |
2315 // | |
2316 void PrepareForTailCall(MacroAssembler* masm, Register args_reg, | |
2317 Register scratch1, Register scratch2, | |
2318 Register scratch3) { | |
2319 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); | |
2320 Comment cmnt(masm, "[ PrepareForTailCall"); | |
2321 | |
2322 // Prepare for tail call only if ES2015 tail call elimination is enabled. | |
2323 Label done; | |
2324 ExternalReference is_tail_call_elimination_enabled = | |
2325 ExternalReference::is_tail_call_elimination_enabled_address( | |
2326 masm->isolate()); | |
2327 __ li(at, Operand(is_tail_call_elimination_enabled)); | |
2328 __ lb(scratch1, MemOperand(at)); | |
2329 __ Branch(&done, eq, scratch1, Operand(zero_reg)); | |
2330 | |
2331 // Drop possible interpreter handler/stub frame. | |
2332 { | |
2333 Label no_interpreter_frame; | |
2334 __ ld(scratch3, | |
2335 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset)); | |
2336 __ Branch(&no_interpreter_frame, ne, scratch3, | |
2337 Operand(Smi::FromInt(StackFrame::STUB))); | |
2338 __ ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
2339 __ bind(&no_interpreter_frame); | |
2340 } | |
2341 | |
2342 // Check if next frame is an arguments adaptor frame. | |
2343 Register caller_args_count_reg = scratch1; | |
2344 Label no_arguments_adaptor, formal_parameter_count_loaded; | |
2345 __ ld(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
2346 __ ld(scratch3, | |
2347 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset)); | |
2348 __ Branch(&no_arguments_adaptor, ne, scratch3, | |
2349 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
2350 | |
2351 // Drop current frame and load arguments count from arguments adaptor frame. | |
2352 __ mov(fp, scratch2); | |
2353 __ ld(caller_args_count_reg, | |
2354 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
2355 __ SmiUntag(caller_args_count_reg); | |
2356 __ Branch(&formal_parameter_count_loaded); | |
2357 | |
2358 __ bind(&no_arguments_adaptor); | |
2359 // Load caller's formal parameter count | |
2360 __ ld(scratch1, | |
2361 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset)); | |
2362 __ ld(scratch1, | |
2363 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset)); | |
2364 __ lw(caller_args_count_reg, | |
2365 FieldMemOperand(scratch1, | |
2366 SharedFunctionInfo::kFormalParameterCountOffset)); | |
2367 | |
2368 __ bind(&formal_parameter_count_loaded); | |
2369 | |
2370 ParameterCount callee_args_count(args_reg); | |
2371 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, | |
2372 scratch3); | |
2373 __ bind(&done); | |
2374 } | |
2375 } // namespace | |
2376 | |
2377 // static | |
2378 void Builtins::Generate_CallFunction(MacroAssembler* masm, | |
2379 ConvertReceiverMode mode, | |
2380 TailCallMode tail_call_mode) { | |
2381 // ----------- S t a t e ------------- | |
2382 // -- a0 : the number of arguments (not including the receiver) | |
2383 // -- a1 : the function to call (checked to be a JSFunction) | |
2384 // ----------------------------------- | |
2385 __ AssertFunction(a1); | |
2386 | |
2387 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) | |
2388 // Check that function is not a "classConstructor". | |
2389 Label class_constructor; | |
2390 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
2391 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset)); | |
2392 __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte)); | |
2393 __ Branch(&class_constructor, ne, at, Operand(zero_reg)); | |
2394 | |
2395 // Enter the context of the function; ToObject has to run in the function | |
2396 // context, and we also need to take the global proxy from the function | |
2397 // context in case of conversion. | |
2398 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset == | |
2399 SharedFunctionInfo::kStrictModeByteOffset); | |
2400 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | |
2401 // We need to convert the receiver for non-native sloppy mode functions. | |
2402 Label done_convert; | |
2403 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset)); | |
2404 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) | | |
2405 (1 << SharedFunctionInfo::kStrictModeBitWithinByte))); | |
2406 __ Branch(&done_convert, ne, at, Operand(zero_reg)); | |
2407 { | |
2408 // ----------- S t a t e ------------- | |
2409 // -- a0 : the number of arguments (not including the receiver) | |
2410 // -- a1 : the function to call (checked to be a JSFunction) | |
2411 // -- a2 : the shared function info. | |
2412 // -- cp : the function context. | |
2413 // ----------------------------------- | |
2414 | |
2415 if (mode == ConvertReceiverMode::kNullOrUndefined) { | |
2416 // Patch receiver to global proxy. | |
2417 __ LoadGlobalProxy(a3); | |
2418 } else { | |
2419 Label convert_to_object, convert_receiver; | |
2420 __ Dlsa(at, sp, a0, kPointerSizeLog2); | |
2421 __ ld(a3, MemOperand(at)); | |
2422 __ JumpIfSmi(a3, &convert_to_object); | |
2423 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); | |
2424 __ GetObjectType(a3, a4, a4); | |
2425 __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE)); | |
2426 if (mode != ConvertReceiverMode::kNotNullOrUndefined) { | |
2427 Label convert_global_proxy; | |
2428 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex, | |
2429 &convert_global_proxy); | |
2430 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object); | |
2431 __ bind(&convert_global_proxy); | |
2432 { | |
2433 // Patch receiver to global proxy. | |
2434 __ LoadGlobalProxy(a3); | |
2435 } | |
2436 __ Branch(&convert_receiver); | |
2437 } | |
2438 __ bind(&convert_to_object); | |
2439 { | |
2440 // Convert receiver using ToObject. | |
2441 // TODO(bmeurer): Inline the allocation here to avoid building the frame | |
2442 // in the fast case? (fall back to AllocateInNewSpace?) | |
2443 FrameScope scope(masm, StackFrame::INTERNAL); | |
2444 __ SmiTag(a0); | |
2445 __ Push(a0, a1); | |
2446 __ mov(a0, a3); | |
2447 ToObjectStub stub(masm->isolate()); | |
2448 __ CallStub(&stub); | |
2449 __ mov(a3, v0); | |
2450 __ Pop(a0, a1); | |
2451 __ SmiUntag(a0); | |
2452 } | |
2453 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
2454 __ bind(&convert_receiver); | |
2455 } | |
2456 __ Dlsa(at, sp, a0, kPointerSizeLog2); | |
2457 __ sd(a3, MemOperand(at)); | |
2458 } | |
2459 __ bind(&done_convert); | |
2460 | |
2461 // ----------- S t a t e ------------- | |
2462 // -- a0 : the number of arguments (not including the receiver) | |
2463 // -- a1 : the function to call (checked to be a JSFunction) | |
2464 // -- a2 : the shared function info. | |
2465 // -- cp : the function context. | |
2466 // ----------------------------------- | |
2467 | |
2468 if (tail_call_mode == TailCallMode::kAllow) { | |
2469 PrepareForTailCall(masm, a0, t0, t1, t2); | |
2470 } | |
2471 | |
2472 __ lw(a2, | |
2473 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset)); | |
2474 ParameterCount actual(a0); | |
2475 ParameterCount expected(a2); | |
2476 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION, | |
2477 CheckDebugStepCallWrapper()); | |
2478 | |
2479 // The function is a "classConstructor", need to raise an exception. | |
2480 __ bind(&class_constructor); | |
2481 { | |
2482 FrameScope frame(masm, StackFrame::INTERNAL); | |
2483 __ Push(a1); | |
2484 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); | |
2485 } | |
2486 } | |
2487 | |
2488 | |
2489 // static | |
2490 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, | |
2491 TailCallMode tail_call_mode) { | |
2492 // ----------- S t a t e ------------- | |
2493 // -- a0 : the number of arguments (not including the receiver) | |
2494 // -- a1 : the function to call (checked to be a JSBoundFunction) | |
2495 // ----------------------------------- | |
2496 __ AssertBoundFunction(a1); | |
2497 | |
2498 if (tail_call_mode == TailCallMode::kAllow) { | |
2499 PrepareForTailCall(masm, a0, t0, t1, t2); | |
2500 } | |
2501 | |
2502 // Patch the receiver to [[BoundThis]]. | |
2503 { | |
2504 __ ld(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset)); | |
2505 __ Dlsa(a4, sp, a0, kPointerSizeLog2); | |
2506 __ sd(at, MemOperand(a4)); | |
2507 } | |
2508 | |
2509 // Load [[BoundArguments]] into a2 and length of that into a4. | |
2510 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); | |
2511 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset)); | |
2512 __ SmiUntag(a4); | |
2513 | |
2514 // ----------- S t a t e ------------- | |
2515 // -- a0 : the number of arguments (not including the receiver) | |
2516 // -- a1 : the function to call (checked to be a JSBoundFunction) | |
2517 // -- a2 : the [[BoundArguments]] (implemented as FixedArray) | |
2518 // -- a4 : the number of [[BoundArguments]] | |
2519 // ----------------------------------- | |
2520 | |
2521 // Reserve stack space for the [[BoundArguments]]. | |
2522 { | |
2523 Label done; | |
2524 __ dsll(a5, a4, kPointerSizeLog2); | |
2525 __ Dsubu(sp, sp, Operand(a5)); | |
2526 // Check the stack for overflow. We are not trying to catch interruptions | |
2527 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
2528 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); | |
2529 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. | |
2530 // Restore the stack pointer. | |
2531 __ Daddu(sp, sp, Operand(a5)); | |
2532 { | |
2533 FrameScope scope(masm, StackFrame::MANUAL); | |
2534 __ EnterFrame(StackFrame::INTERNAL); | |
2535 __ CallRuntime(Runtime::kThrowStackOverflow); | |
2536 } | |
2537 __ bind(&done); | |
2538 } | |
2539 | |
2540 // Relocate arguments down the stack. | |
2541 { | |
2542 Label loop, done_loop; | |
2543 __ mov(a5, zero_reg); | |
2544 __ bind(&loop); | |
2545 __ Branch(&done_loop, gt, a5, Operand(a0)); | |
2546 __ Dlsa(a6, sp, a4, kPointerSizeLog2); | |
2547 __ ld(at, MemOperand(a6)); | |
2548 __ Dlsa(a6, sp, a5, kPointerSizeLog2); | |
2549 __ sd(at, MemOperand(a6)); | |
2550 __ Daddu(a4, a4, Operand(1)); | |
2551 __ Daddu(a5, a5, Operand(1)); | |
2552 __ Branch(&loop); | |
2553 __ bind(&done_loop); | |
2554 } | |
2555 | |
2556 // Copy [[BoundArguments]] to the stack (below the arguments). | |
2557 { | |
2558 Label loop, done_loop; | |
2559 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset)); | |
2560 __ SmiUntag(a4); | |
2561 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
2562 __ bind(&loop); | |
2563 __ Dsubu(a4, a4, Operand(1)); | |
2564 __ Branch(&done_loop, lt, a4, Operand(zero_reg)); | |
2565 __ Dlsa(a5, a2, a4, kPointerSizeLog2); | |
2566 __ ld(at, MemOperand(a5)); | |
2567 __ Dlsa(a5, sp, a0, kPointerSizeLog2); | |
2568 __ sd(at, MemOperand(a5)); | |
2569 __ Daddu(a0, a0, Operand(1)); | |
2570 __ Branch(&loop); | |
2571 __ bind(&done_loop); | |
2572 } | |
2573 | |
2574 // Call the [[BoundTargetFunction]] via the Call builtin. | |
2575 __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); | |
2576 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, | |
2577 masm->isolate()))); | |
2578 __ ld(at, MemOperand(at)); | |
2579 __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
2580 __ Jump(at); | |
2581 } | |
2582 | |
2583 | |
2584 // static | |
2585 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, | |
2586 TailCallMode tail_call_mode) { | |
2587 // ----------- S t a t e ------------- | |
2588 // -- a0 : the number of arguments (not including the receiver) | |
2589 // -- a1 : the target to call (can be any Object). | |
2590 // ----------------------------------- | |
2591 | |
2592 Label non_callable, non_function, non_smi; | |
2593 __ JumpIfSmi(a1, &non_callable); | |
2594 __ bind(&non_smi); | |
2595 __ GetObjectType(a1, t1, t2); | |
2596 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), | |
2597 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE)); | |
2598 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), | |
2599 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE)); | |
2600 | |
2601 // Check if target has a [[Call]] internal method. | |
2602 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset)); | |
2603 __ And(t1, t1, Operand(1 << Map::kIsCallable)); | |
2604 __ Branch(&non_callable, eq, t1, Operand(zero_reg)); | |
2605 | |
2606 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE)); | |
2607 | |
2608 // 0. Prepare for tail call if necessary. | |
2609 if (tail_call_mode == TailCallMode::kAllow) { | |
2610 PrepareForTailCall(masm, a0, t0, t1, t2); | |
2611 } | |
2612 | |
2613 // 1. Runtime fallback for Proxy [[Call]]. | |
2614 __ Push(a1); | |
2615 // Increase the arguments size to include the pushed function and the | |
2616 // existing receiver on the stack. | |
2617 __ Daddu(a0, a0, 2); | |
2618 // Tail-call to the runtime. | |
2619 __ JumpToExternalReference( | |
2620 ExternalReference(Runtime::kJSProxyCall, masm->isolate())); | |
2621 | |
2622 // 2. Call to something else, which might have a [[Call]] internal method (if | |
2623 // not we raise an exception). | |
2624 __ bind(&non_function); | |
2625 // Overwrite the original receiver with the (original) target. | |
2626 __ Dlsa(at, sp, a0, kPointerSizeLog2); | |
2627 __ sd(a1, MemOperand(at)); | |
2628 // Let the "call_as_function_delegate" take care of the rest. | |
2629 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1); | |
2630 __ Jump(masm->isolate()->builtins()->CallFunction( | |
2631 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode), | |
2632 RelocInfo::CODE_TARGET); | |
2633 | |
2634 // 3. Call to something that is not callable. | |
2635 __ bind(&non_callable); | |
2636 { | |
2637 FrameScope scope(masm, StackFrame::INTERNAL); | |
2638 __ Push(a1); | |
2639 __ CallRuntime(Runtime::kThrowCalledNonCallable); | |
2640 } | |
2641 } | |
2642 | |
2643 | |
2644 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { | |
2645 // ----------- S t a t e ------------- | |
2646 // -- a0 : the number of arguments (not including the receiver) | |
2647 // -- a1 : the constructor to call (checked to be a JSFunction) | |
2648 // -- a3 : the new target (checked to be a constructor) | |
2649 // ----------------------------------- | |
2650 __ AssertFunction(a1); | |
2651 | |
2652 // Calling convention for function specific ConstructStubs require | |
2653 // a2 to contain either an AllocationSite or undefined. | |
2654 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | |
2655 | |
2656 // Tail call to the function-specific construct stub (still in the caller | |
2657 // context at this point). | |
2658 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
2659 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset)); | |
2660 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
2661 __ Jump(at); | |
2662 } | |
2663 | |
2664 | |
2665 // static | |
2666 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { | |
2667 // ----------- S t a t e ------------- | |
2668 // -- a0 : the number of arguments (not including the receiver) | |
2669 // -- a1 : the function to call (checked to be a JSBoundFunction) | |
2670 // -- a3 : the new target (checked to be a constructor) | |
2671 // ----------------------------------- | |
2672 __ AssertBoundFunction(a1); | |
2673 | |
2674 // Load [[BoundArguments]] into a2 and length of that into a4. | |
2675 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); | |
2676 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset)); | |
2677 __ SmiUntag(a4); | |
2678 | |
2679 // ----------- S t a t e ------------- | |
2680 // -- a0 : the number of arguments (not including the receiver) | |
2681 // -- a1 : the function to call (checked to be a JSBoundFunction) | |
2682 // -- a2 : the [[BoundArguments]] (implemented as FixedArray) | |
2683 // -- a3 : the new target (checked to be a constructor) | |
2684 // -- a4 : the number of [[BoundArguments]] | |
2685 // ----------------------------------- | |
2686 | |
2687 // Reserve stack space for the [[BoundArguments]]. | |
2688 { | |
2689 Label done; | |
2690 __ dsll(a5, a4, kPointerSizeLog2); | |
2691 __ Dsubu(sp, sp, Operand(a5)); | |
2692 // Check the stack for overflow. We are not trying to catch interruptions | |
2693 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
2694 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); | |
2695 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. | |
2696 // Restore the stack pointer. | |
2697 __ Daddu(sp, sp, Operand(a5)); | |
2698 { | |
2699 FrameScope scope(masm, StackFrame::MANUAL); | |
2700 __ EnterFrame(StackFrame::INTERNAL); | |
2701 __ CallRuntime(Runtime::kThrowStackOverflow); | |
2702 } | |
2703 __ bind(&done); | |
2704 } | |
2705 | |
2706 // Relocate arguments down the stack. | |
2707 { | |
2708 Label loop, done_loop; | |
2709 __ mov(a5, zero_reg); | |
2710 __ bind(&loop); | |
2711 __ Branch(&done_loop, ge, a5, Operand(a0)); | |
2712 __ Dlsa(a6, sp, a4, kPointerSizeLog2); | |
2713 __ ld(at, MemOperand(a6)); | |
2714 __ Dlsa(a6, sp, a5, kPointerSizeLog2); | |
2715 __ sd(at, MemOperand(a6)); | |
2716 __ Daddu(a4, a4, Operand(1)); | |
2717 __ Daddu(a5, a5, Operand(1)); | |
2718 __ Branch(&loop); | |
2719 __ bind(&done_loop); | |
2720 } | |
2721 | |
2722 // Copy [[BoundArguments]] to the stack (below the arguments). | |
2723 { | |
2724 Label loop, done_loop; | |
2725 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset)); | |
2726 __ SmiUntag(a4); | |
2727 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
2728 __ bind(&loop); | |
2729 __ Dsubu(a4, a4, Operand(1)); | |
2730 __ Branch(&done_loop, lt, a4, Operand(zero_reg)); | |
2731 __ Dlsa(a5, a2, a4, kPointerSizeLog2); | |
2732 __ ld(at, MemOperand(a5)); | |
2733 __ Dlsa(a5, sp, a0, kPointerSizeLog2); | |
2734 __ sd(at, MemOperand(a5)); | |
2735 __ Daddu(a0, a0, Operand(1)); | |
2736 __ Branch(&loop); | |
2737 __ bind(&done_loop); | |
2738 } | |
2739 | |
2740 // Patch new.target to [[BoundTargetFunction]] if new.target equals target. | |
2741 { | |
2742 Label skip_load; | |
2743 __ Branch(&skip_load, ne, a1, Operand(a3)); | |
2744 __ ld(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); | |
2745 __ bind(&skip_load); | |
2746 } | |
2747 | |
2748 // Construct the [[BoundTargetFunction]] via the Construct builtin. | |
2749 __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); | |
2750 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); | |
2751 __ ld(at, MemOperand(at)); | |
2752 __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
2753 __ Jump(at); | |
2754 } | |
2755 | |
2756 | |
2757 // static | |
2758 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { | |
2759 // ----------- S t a t e ------------- | |
2760 // -- a0 : the number of arguments (not including the receiver) | |
2761 // -- a1 : the constructor to call (checked to be a JSProxy) | |
2762 // -- a3 : the new target (either the same as the constructor or | |
2763 // the JSFunction on which new was invoked initially) | |
2764 // ----------------------------------- | |
2765 | |
2766 // Call into the Runtime for Proxy [[Construct]]. | |
2767 __ Push(a1, a3); | |
2768 // Include the pushed new_target, constructor and the receiver. | |
2769 __ Daddu(a0, a0, Operand(3)); | |
2770 // Tail-call to the runtime. | |
2771 __ JumpToExternalReference( | |
2772 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); | |
2773 } | |
2774 | |
2775 | |
2776 // static | |
2777 void Builtins::Generate_Construct(MacroAssembler* masm) { | |
2778 // ----------- S t a t e ------------- | |
2779 // -- a0 : the number of arguments (not including the receiver) | |
2780 // -- a1 : the constructor to call (can be any Object) | |
2781 // -- a3 : the new target (either the same as the constructor or | |
2782 // the JSFunction on which new was invoked initially) | |
2783 // ----------------------------------- | |
2784 | |
2785 // Check if target is a Smi. | |
2786 Label non_constructor; | |
2787 __ JumpIfSmi(a1, &non_constructor); | |
2788 | |
2789 // Dispatch based on instance type. | |
2790 __ ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); | |
2791 __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset)); | |
2792 __ Jump(masm->isolate()->builtins()->ConstructFunction(), | |
2793 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE)); | |
2794 | |
2795 // Check if target has a [[Construct]] internal method. | |
2796 __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset)); | |
2797 __ And(t3, t3, Operand(1 << Map::kIsConstructor)); | |
2798 __ Branch(&non_constructor, eq, t3, Operand(zero_reg)); | |
2799 | |
2800 // Only dispatch to bound functions after checking whether they are | |
2801 // constructors. | |
2802 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(), | |
2803 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE)); | |
2804 | |
2805 // Only dispatch to proxies after checking whether they are constructors. | |
2806 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET, | |
2807 eq, t2, Operand(JS_PROXY_TYPE)); | |
2808 | |
2809 // Called Construct on an exotic Object with a [[Construct]] internal method. | |
2810 { | |
2811 // Overwrite the original receiver with the (original) target. | |
2812 __ Dlsa(at, sp, a0, kPointerSizeLog2); | |
2813 __ sd(a1, MemOperand(at)); | |
2814 // Let the "call_as_constructor_delegate" take care of the rest. | |
2815 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1); | |
2816 __ Jump(masm->isolate()->builtins()->CallFunction(), | |
2817 RelocInfo::CODE_TARGET); | |
2818 } | |
2819 | |
2820 // Called Construct on an Object that doesn't have a [[Construct]] internal | |
2821 // method. | |
2822 __ bind(&non_constructor); | |
2823 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), | |
2824 RelocInfo::CODE_TARGET); | |
2825 } | |
2826 | |
2827 // static | |
2828 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { | |
2829 // ----------- S t a t e ------------- | |
2830 // -- a0 : requested object size (untagged) | |
2831 // -- ra : return address | |
2832 // ----------------------------------- | |
2833 __ SmiTag(a0); | |
2834 __ Push(a0); | |
2835 __ Move(cp, Smi::FromInt(0)); | |
2836 __ TailCallRuntime(Runtime::kAllocateInNewSpace); | |
2837 } | |
2838 | |
2839 // static | |
2840 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) { | |
2841 // ----------- S t a t e ------------- | |
2842 // -- a0 : requested object size (untagged) | |
2843 // -- ra : return address | |
2844 // ----------------------------------- | |
2845 __ SmiTag(a0); | |
2846 __ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE))); | |
2847 __ Push(a0, a1); | |
2848 __ Move(cp, Smi::FromInt(0)); | |
2849 __ TailCallRuntime(Runtime::kAllocateInTargetSpace); | |
2850 } | |
2851 | |
2852 // static | |
2853 void Builtins::Generate_StringToNumber(MacroAssembler* masm) { | |
2854 // The StringToNumber stub takes on argument in a0. | |
2855 __ AssertString(a0); | |
2856 | |
2857 // Check if string has a cached array index. | |
2858 Label runtime; | |
2859 __ lwu(a2, FieldMemOperand(a0, String::kHashFieldOffset)); | |
2860 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask)); | |
2861 __ Branch(&runtime, ne, at, Operand(zero_reg)); | |
2862 __ IndexFromHash(a2, v0); | |
2863 __ Ret(); | |
2864 | |
2865 __ bind(&runtime); | |
2866 { | |
2867 FrameScope frame(masm, StackFrame::INTERNAL); | |
2868 // Push argument. | |
2869 __ Push(a0); | |
2870 // We cannot use a tail call here because this builtin can also be called | |
2871 // from wasm. | |
2872 __ CallRuntime(Runtime::kStringToNumber); | |
2873 } | |
2874 __ Ret(); | |
2875 } | |
2876 | |
2877 // static | |
2878 void Builtins::Generate_ToNumber(MacroAssembler* masm) { | |
2879 // The ToNumber stub takes one argument in a0. | |
2880 Label not_smi; | |
2881 __ JumpIfNotSmi(a0, ¬_smi); | |
2882 __ Ret(USE_DELAY_SLOT); | |
2883 __ mov(v0, a0); | |
2884 __ bind(¬_smi); | |
2885 | |
2886 Label not_heap_number; | |
2887 __ GetObjectType(a0, a1, a1); | |
2888 // a0: receiver | |
2889 // a1: receiver instance type | |
2890 __ Branch(¬_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE)); | |
2891 __ Ret(USE_DELAY_SLOT); | |
2892 __ mov(v0, a0); | |
2893 __ bind(¬_heap_number); | |
2894 | |
2895 __ Jump(masm->isolate()->builtins()->NonNumberToNumber(), | |
2896 RelocInfo::CODE_TARGET); | |
2897 } | |
2898 | |
2899 // static | |
2900 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) { | |
2901 // The NonNumberToNumber stub takes on argument in a0. | |
2902 __ AssertNotNumber(a0); | |
2903 | |
2904 Label not_string; | |
2905 __ GetObjectType(a0, a1, a1); | |
2906 // a0: receiver | |
2907 // a1: receiver instance type | |
2908 __ Branch(¬_string, hs, a1, Operand(FIRST_NONSTRING_TYPE)); | |
2909 __ Jump(masm->isolate()->builtins()->StringToNumber(), | |
2910 RelocInfo::CODE_TARGET); | |
2911 __ bind(¬_string); | |
2912 | |
2913 Label not_oddball; | |
2914 __ Branch(¬_oddball, ne, a1, Operand(ODDBALL_TYPE)); | |
2915 __ Ret(USE_DELAY_SLOT); | |
2916 __ ld(v0, FieldMemOperand(a0, Oddball::kToNumberOffset)); // In delay slot. | |
2917 __ bind(¬_oddball); | |
2918 { | |
2919 FrameScope frame(masm, StackFrame::INTERNAL); | |
2920 // Push argument. | |
2921 __ Push(a0); | |
2922 // We cannot use a tail call here because this builtin can also be called | |
2923 // from wasm. | |
2924 __ CallRuntime(Runtime::kToNumber); | |
2925 } | |
2926 __ Ret(); | |
2927 } | |
2928 | |
2929 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | |
2930 // State setup as expected by MacroAssembler::InvokePrologue. | |
2931 // ----------- S t a t e ------------- | |
2932 // -- a0: actual arguments count | |
2933 // -- a1: function (passed through to callee) | |
2934 // -- a2: expected arguments count | |
2935 // -- a3: new target (passed through to callee) | |
2936 // ----------------------------------- | |
2937 | |
2938 Label invoke, dont_adapt_arguments, stack_overflow; | |
2939 | |
2940 Label enough, too_few; | |
2941 __ Branch(&dont_adapt_arguments, eq, | |
2942 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); | |
2943 // We use Uless as the number of argument should always be greater than 0. | |
2944 __ Branch(&too_few, Uless, a0, Operand(a2)); | |
2945 | |
2946 { // Enough parameters: actual >= expected. | |
2947 // a0: actual number of arguments as a smi | |
2948 // a1: function | |
2949 // a2: expected number of arguments | |
2950 // a3: new target (passed through to callee) | |
2951 __ bind(&enough); | |
2952 EnterArgumentsAdaptorFrame(masm); | |
2953 ArgumentAdaptorStackCheck(masm, &stack_overflow); | |
2954 | |
2955 // Calculate copy start address into a0 and copy end address into a4. | |
2956 __ SmiScale(a0, a0, kPointerSizeLog2); | |
2957 __ Daddu(a0, fp, a0); | |
2958 // Adjust for return address and receiver. | |
2959 __ Daddu(a0, a0, Operand(2 * kPointerSize)); | |
2960 // Compute copy end address. | |
2961 __ dsll(a4, a2, kPointerSizeLog2); | |
2962 __ dsubu(a4, a0, a4); | |
2963 | |
2964 // Copy the arguments (including the receiver) to the new stack frame. | |
2965 // a0: copy start address | |
2966 // a1: function | |
2967 // a2: expected number of arguments | |
2968 // a3: new target (passed through to callee) | |
2969 // a4: copy end address | |
2970 | |
2971 Label copy; | |
2972 __ bind(©); | |
2973 __ ld(a5, MemOperand(a0)); | |
2974 __ push(a5); | |
2975 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(a4)); | |
2976 __ daddiu(a0, a0, -kPointerSize); // In delay slot. | |
2977 | |
2978 __ jmp(&invoke); | |
2979 } | |
2980 | |
2981 { // Too few parameters: Actual < expected. | |
2982 __ bind(&too_few); | |
2983 EnterArgumentsAdaptorFrame(masm); | |
2984 ArgumentAdaptorStackCheck(masm, &stack_overflow); | |
2985 | |
2986 // Calculate copy start address into a0 and copy end address into a7. | |
2987 // a0: actual number of arguments as a smi | |
2988 // a1: function | |
2989 // a2: expected number of arguments | |
2990 // a3: new target (passed through to callee) | |
2991 __ SmiScale(a0, a0, kPointerSizeLog2); | |
2992 __ Daddu(a0, fp, a0); | |
2993 // Adjust for return address and receiver. | |
2994 __ Daddu(a0, a0, Operand(2 * kPointerSize)); | |
2995 // Compute copy end address. Also adjust for return address. | |
2996 __ Daddu(a7, fp, kPointerSize); | |
2997 | |
2998 // Copy the arguments (including the receiver) to the new stack frame. | |
2999 // a0: copy start address | |
3000 // a1: function | |
3001 // a2: expected number of arguments | |
3002 // a3: new target (passed through to callee) | |
3003 // a7: copy end address | |
3004 Label copy; | |
3005 __ bind(©); | |
3006 __ ld(a4, MemOperand(a0)); // Adjusted above for return addr and receiver. | |
3007 __ Dsubu(sp, sp, kPointerSize); | |
3008 __ Dsubu(a0, a0, kPointerSize); | |
3009 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(a7)); | |
3010 __ sd(a4, MemOperand(sp)); // In the delay slot. | |
3011 | |
3012 // Fill the remaining expected arguments with undefined. | |
3013 // a1: function | |
3014 // a2: expected number of arguments | |
3015 // a3: new target (passed through to callee) | |
3016 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex); | |
3017 __ dsll(a6, a2, kPointerSizeLog2); | |
3018 __ Dsubu(a4, fp, Operand(a6)); | |
3019 // Adjust for frame. | |
3020 __ Dsubu(a4, a4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + | |
3021 2 * kPointerSize)); | |
3022 | |
3023 Label fill; | |
3024 __ bind(&fill); | |
3025 __ Dsubu(sp, sp, kPointerSize); | |
3026 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a4)); | |
3027 __ sd(a5, MemOperand(sp)); | |
3028 } | |
3029 | |
3030 // Call the entry point. | |
3031 __ bind(&invoke); | |
3032 __ mov(a0, a2); | |
3033 // a0 : expected number of arguments | |
3034 // a1 : function (passed through to callee) | |
3035 // a3: new target (passed through to callee) | |
3036 __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | |
3037 __ Call(a4); | |
3038 | |
3039 // Store offset of return address for deoptimizer. | |
3040 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); | |
3041 | |
3042 // Exit frame and return. | |
3043 LeaveArgumentsAdaptorFrame(masm); | |
3044 __ Ret(); | |
3045 | |
3046 | |
3047 // ------------------------------------------- | |
3048 // Don't adapt arguments. | |
3049 // ------------------------------------------- | |
3050 __ bind(&dont_adapt_arguments); | |
3051 __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | |
3052 __ Jump(a4); | |
3053 | |
3054 __ bind(&stack_overflow); | |
3055 { | |
3056 FrameScope frame(masm, StackFrame::MANUAL); | |
3057 __ CallRuntime(Runtime::kThrowStackOverflow); | |
3058 __ break_(0xCC); | |
3059 } | |
3060 } | |
3061 | |
3062 | |
3063 #undef __ | |
3064 | |
3065 } // namespace internal | |
3066 } // namespace v8 | |
3067 | |
3068 #endif // V8_TARGET_ARCH_MIPS64 | |
OLD | NEW |