Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(22)

Side by Side Diff: src/x87/builtins-x87.cc

Issue 2145023002: [builtins] move builtin files to src/builtins/. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: rebase Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_X87
6
7 #include "src/code-factory.h"
8 #include "src/codegen.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/x87/frames-x87.h"
12
13 namespace v8 {
14 namespace internal {
15
16
17 #define __ ACCESS_MASM(masm)
18
19 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
20 ExitFrameType exit_frame_type) {
21 // ----------- S t a t e -------------
22 // -- eax : number of arguments excluding receiver
23 // -- edi : target
24 // -- edx : new.target
25 // -- esp[0] : return address
26 // -- esp[4] : last argument
27 // -- ...
28 // -- esp[4 * argc] : first argument
29 // -- esp[4 * (argc +1)] : receiver
30 // -----------------------------------
31 __ AssertFunction(edi);
32
33 // Make sure we operate in the context of the called function (for example
34 // ConstructStubs implemented in C++ will be run in the context of the caller
35 // instead of the callee, due to the way that [[Construct]] is defined for
36 // ordinary functions).
37 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
38
39 // JumpToExternalReference expects eax to contain the number of arguments
40 // including the receiver and the extra arguments.
41 const int num_extra_args = 3;
42 __ add(eax, Immediate(num_extra_args + 1));
43
44 // Insert extra arguments.
45 __ PopReturnAddressTo(ecx);
46 __ SmiTag(eax);
47 __ Push(eax);
48 __ SmiUntag(eax);
49 __ Push(edi);
50 __ Push(edx);
51 __ PushReturnAddressFrom(ecx);
52
53 __ JumpToExternalReference(ExternalReference(id, masm->isolate()),
54 exit_frame_type == BUILTIN_EXIT);
55 }
56
57 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
58 Runtime::FunctionId function_id) {
59 // ----------- S t a t e -------------
60 // -- eax : argument count (preserved for callee)
61 // -- edx : new target (preserved for callee)
62 // -- edi : target function (preserved for callee)
63 // -----------------------------------
64 {
65 FrameScope scope(masm, StackFrame::INTERNAL);
66 // Push the number of arguments to the callee.
67 __ SmiTag(eax);
68 __ push(eax);
69 // Push a copy of the target function and the new target.
70 __ push(edi);
71 __ push(edx);
72 // Function is also the parameter to the runtime call.
73 __ push(edi);
74
75 __ CallRuntime(function_id, 1);
76 __ mov(ebx, eax);
77
78 // Restore target function and new target.
79 __ pop(edx);
80 __ pop(edi);
81 __ pop(eax);
82 __ SmiUntag(eax);
83 }
84
85 __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
86 __ jmp(ebx);
87 }
88
89 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
90 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
91 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kCodeOffset));
92 __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
93 __ jmp(ebx);
94 }
95
96 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
97 // Checking whether the queued function is ready for install is optional,
98 // since we come across interrupts and stack checks elsewhere. However,
99 // not checking may delay installing ready functions, and always checking
100 // would be quite expensive. A good compromise is to first check against
101 // stack limit as a cue for an interrupt signal.
102 Label ok;
103 ExternalReference stack_limit =
104 ExternalReference::address_of_stack_limit(masm->isolate());
105 __ cmp(esp, Operand::StaticVariable(stack_limit));
106 __ j(above_equal, &ok, Label::kNear);
107
108 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
109
110 __ bind(&ok);
111 GenerateTailCallToSharedCode(masm);
112 }
113
114 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
115 bool is_api_function,
116 bool create_implicit_receiver,
117 bool check_derived_construct) {
118 // ----------- S t a t e -------------
119 // -- eax: number of arguments
120 // -- esi: context
121 // -- edi: constructor function
122 // -- ebx: allocation site or undefined
123 // -- edx: new target
124 // -----------------------------------
125
126 // Enter a construct frame.
127 {
128 FrameScope scope(masm, StackFrame::CONSTRUCT);
129
130 // Preserve the incoming parameters on the stack.
131 __ AssertUndefinedOrAllocationSite(ebx);
132 __ push(esi);
133 __ push(ebx);
134 __ SmiTag(eax);
135 __ push(eax);
136
137 if (create_implicit_receiver) {
138 // Allocate the new receiver object.
139 __ Push(edi);
140 __ Push(edx);
141 FastNewObjectStub stub(masm->isolate());
142 __ CallStub(&stub);
143 __ mov(ebx, eax);
144 __ Pop(edx);
145 __ Pop(edi);
146
147 // ----------- S t a t e -------------
148 // -- edi: constructor function
149 // -- ebx: newly allocated object
150 // -- edx: new target
151 // -----------------------------------
152
153 // Retrieve smi-tagged arguments count from the stack.
154 __ mov(eax, Operand(esp, 0));
155 }
156
157 __ SmiUntag(eax);
158
159 if (create_implicit_receiver) {
160 // Push the allocated receiver to the stack. We need two copies
161 // because we may have to return the original one and the calling
162 // conventions dictate that the called function pops the receiver.
163 __ push(ebx);
164 __ push(ebx);
165 } else {
166 __ PushRoot(Heap::kTheHoleValueRootIndex);
167 }
168
169 // Set up pointer to last argument.
170 __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
171
172 // Copy arguments and receiver to the expression stack.
173 Label loop, entry;
174 __ mov(ecx, eax);
175 __ jmp(&entry);
176 __ bind(&loop);
177 __ push(Operand(ebx, ecx, times_4, 0));
178 __ bind(&entry);
179 __ dec(ecx);
180 __ j(greater_equal, &loop);
181
182 // Call the function.
183 ParameterCount actual(eax);
184 __ InvokeFunction(edi, edx, actual, CALL_FUNCTION,
185 CheckDebugStepCallWrapper());
186
187 // Store offset of return address for deoptimizer.
188 if (create_implicit_receiver && !is_api_function) {
189 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
190 }
191
192 // Restore context from the frame.
193 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
194
195 if (create_implicit_receiver) {
196 // If the result is an object (in the ECMA sense), we should get rid
197 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
198 // on page 74.
199 Label use_receiver, exit;
200
201 // If the result is a smi, it is *not* an object in the ECMA sense.
202 __ JumpIfSmi(eax, &use_receiver);
203
204 // If the type of the result (stored in its map) is less than
205 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
206 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
207 __ j(above_equal, &exit);
208
209 // Throw away the result of the constructor invocation and use the
210 // on-stack receiver as the result.
211 __ bind(&use_receiver);
212 __ mov(eax, Operand(esp, 0));
213
214 // Restore the arguments count and leave the construct frame. The
215 // arguments count is stored below the receiver.
216 __ bind(&exit);
217 __ mov(ebx, Operand(esp, 1 * kPointerSize));
218 } else {
219 __ mov(ebx, Operand(esp, 0));
220 }
221
222 // Leave construct frame.
223 }
224
225 // ES6 9.2.2. Step 13+
226 // Check that the result is not a Smi, indicating that the constructor result
227 // from a derived class is neither undefined nor an Object.
228 if (check_derived_construct) {
229 Label dont_throw;
230 __ JumpIfNotSmi(eax, &dont_throw);
231 {
232 FrameScope scope(masm, StackFrame::INTERNAL);
233 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
234 }
235 __ bind(&dont_throw);
236 }
237
238 // Remove caller arguments from the stack and return.
239 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
240 __ pop(ecx);
241 __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
242 __ push(ecx);
243 if (create_implicit_receiver) {
244 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
245 }
246 __ ret(0);
247 }
248
249
250 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
251 Generate_JSConstructStubHelper(masm, false, true, false);
252 }
253
254
255 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
256 Generate_JSConstructStubHelper(masm, true, false, false);
257 }
258
259
260 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
261 Generate_JSConstructStubHelper(masm, false, false, false);
262 }
263
264
265 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
266 MacroAssembler* masm) {
267 Generate_JSConstructStubHelper(masm, false, false, true);
268 }
269
270
271 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
272 FrameScope scope(masm, StackFrame::INTERNAL);
273 __ push(edi);
274 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
275 }
276
277
278 enum IsTagged { kEaxIsSmiTagged, kEaxIsUntaggedInt };
279
280
281 // Clobbers ecx, edx, edi; preserves all other registers.
282 static void Generate_CheckStackOverflow(MacroAssembler* masm,
283 IsTagged eax_is_tagged) {
284 // eax : the number of items to be pushed to the stack
285 //
286 // Check the stack for overflow. We are not trying to catch
287 // interruptions (e.g. debug break and preemption) here, so the "real stack
288 // limit" is checked.
289 Label okay;
290 ExternalReference real_stack_limit =
291 ExternalReference::address_of_real_stack_limit(masm->isolate());
292 __ mov(edi, Operand::StaticVariable(real_stack_limit));
293 // Make ecx the space we have left. The stack might already be overflowed
294 // here which will cause ecx to become negative.
295 __ mov(ecx, esp);
296 __ sub(ecx, edi);
297 // Make edx the space we need for the array when it is unrolled onto the
298 // stack.
299 __ mov(edx, eax);
300 int smi_tag = eax_is_tagged == kEaxIsSmiTagged ? kSmiTagSize : 0;
301 __ shl(edx, kPointerSizeLog2 - smi_tag);
302 // Check if the arguments will overflow the stack.
303 __ cmp(ecx, edx);
304 __ j(greater, &okay); // Signed comparison.
305
306 // Out of stack space.
307 __ CallRuntime(Runtime::kThrowStackOverflow);
308
309 __ bind(&okay);
310 }
311
312
313 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
314 bool is_construct) {
315 ProfileEntryHookStub::MaybeCallEntryHook(masm);
316
317 {
318 FrameScope scope(masm, StackFrame::INTERNAL);
319
320 // Setup the context (we need to use the caller context from the isolate).
321 ExternalReference context_address(Isolate::kContextAddress,
322 masm->isolate());
323 __ mov(esi, Operand::StaticVariable(context_address));
324
325 // Load the previous frame pointer (ebx) to access C arguments
326 __ mov(ebx, Operand(ebp, 0));
327
328 // Push the function and the receiver onto the stack.
329 __ push(Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
330 __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset));
331
332 // Load the number of arguments and setup pointer to the arguments.
333 __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
334 __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
335
336 // Check if we have enough stack space to push all arguments.
337 // Expects argument count in eax. Clobbers ecx, edx, edi.
338 Generate_CheckStackOverflow(masm, kEaxIsUntaggedInt);
339
340 // Copy arguments to the stack in a loop.
341 Label loop, entry;
342 __ Move(ecx, Immediate(0));
343 __ jmp(&entry, Label::kNear);
344 __ bind(&loop);
345 __ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv
346 __ push(Operand(edx, 0)); // dereference handle
347 __ inc(ecx);
348 __ bind(&entry);
349 __ cmp(ecx, eax);
350 __ j(not_equal, &loop);
351
352 // Load the previous frame pointer (ebx) to access C arguments
353 __ mov(ebx, Operand(ebp, 0));
354
355 // Get the new.target and function from the frame.
356 __ mov(edx, Operand(ebx, EntryFrameConstants::kNewTargetArgOffset));
357 __ mov(edi, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
358
359 // Invoke the code.
360 Handle<Code> builtin = is_construct
361 ? masm->isolate()->builtins()->Construct()
362 : masm->isolate()->builtins()->Call();
363 __ Call(builtin, RelocInfo::CODE_TARGET);
364
365 // Exit the internal frame. Notice that this also removes the empty.
366 // context and the function left on the stack by the code
367 // invocation.
368 }
369 __ ret(kPointerSize); // Remove receiver.
370 }
371
372
373 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
374 Generate_JSEntryTrampolineHelper(masm, false);
375 }
376
377
378 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
379 Generate_JSEntryTrampolineHelper(masm, true);
380 }
381
382 // static
383 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
384 // ----------- S t a t e -------------
385 // -- eax : the value to pass to the generator
386 // -- ebx : the JSGeneratorObject to resume
387 // -- edx : the resume mode (tagged)
388 // -- esp[0] : return address
389 // -----------------------------------
390 __ AssertGeneratorObject(ebx);
391
392 // Store input value into generator object.
393 __ mov(FieldOperand(ebx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
394 __ RecordWriteField(ebx, JSGeneratorObject::kInputOrDebugPosOffset, eax, ecx,
395 kDontSaveFPRegs);
396
397 // Store resume mode into generator object.
398 __ mov(FieldOperand(ebx, JSGeneratorObject::kResumeModeOffset), edx);
399
400 // Load suspended function and context.
401 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
402 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
403
404 // Flood function if we are stepping.
405 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
406 Label stepping_prepared;
407 ExternalReference last_step_action =
408 ExternalReference::debug_last_step_action_address(masm->isolate());
409 STATIC_ASSERT(StepFrame > StepIn);
410 __ cmpb(Operand::StaticVariable(last_step_action), Immediate(StepIn));
411 __ j(greater_equal, &prepare_step_in_if_stepping);
412
413 // Flood function if we need to continue stepping in the suspended generator.
414 ExternalReference debug_suspended_generator =
415 ExternalReference::debug_suspended_generator_address(masm->isolate());
416 __ cmp(ebx, Operand::StaticVariable(debug_suspended_generator));
417 __ j(equal, &prepare_step_in_suspended_generator);
418 __ bind(&stepping_prepared);
419
420 // Pop return address.
421 __ PopReturnAddressTo(eax);
422
423 // Push receiver.
424 __ Push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
425
426 // ----------- S t a t e -------------
427 // -- eax : return address
428 // -- ebx : the JSGeneratorObject to resume
429 // -- edx : the resume mode (tagged)
430 // -- edi : generator function
431 // -- esi : generator context
432 // -- esp[0] : generator receiver
433 // -----------------------------------
434
435 // Push holes for arguments to generator function. Since the parser forced
436 // context allocation for any variables in generators, the actual argument
437 // values have already been copied into the context and these dummy values
438 // will never be used.
439 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
440 __ mov(ecx,
441 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
442 {
443 Label done_loop, loop;
444 __ bind(&loop);
445 __ sub(ecx, Immediate(Smi::FromInt(1)));
446 __ j(carry, &done_loop, Label::kNear);
447 __ PushRoot(Heap::kTheHoleValueRootIndex);
448 __ jmp(&loop);
449 __ bind(&done_loop);
450 }
451
452 // Dispatch on the kind of generator object.
453 Label old_generator;
454 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
455 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
456 __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, ecx);
457 __ j(not_equal, &old_generator);
458
459 // New-style (ignition/turbofan) generator object
460 {
461 __ PushReturnAddressFrom(eax);
462 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
463 __ mov(eax,
464 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
465 // We abuse new.target both to indicate that this is a resume call and to
466 // pass in the generator object. In ordinary calls, new.target is always
467 // undefined because generator functions are non-constructable.
468 __ mov(edx, ebx);
469 __ jmp(FieldOperand(edi, JSFunction::kCodeEntryOffset));
470 }
471
472 // Old-style (full-codegen) generator object
473 __ bind(&old_generator);
474 {
475 // Enter a new JavaScript frame, and initialize its slots as they were when
476 // the generator was suspended.
477 FrameScope scope(masm, StackFrame::MANUAL);
478 __ PushReturnAddressFrom(eax); // Return address.
479 __ Push(ebp); // Caller's frame pointer.
480 __ Move(ebp, esp);
481 __ Push(esi); // Callee's context.
482 __ Push(edi); // Callee's JS Function.
483
484 // Restore the operand stack.
485 __ mov(eax, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
486 {
487 Label done_loop, loop;
488 __ Move(ecx, Smi::FromInt(0));
489 __ bind(&loop);
490 __ cmp(ecx, FieldOperand(eax, FixedArray::kLengthOffset));
491 __ j(equal, &done_loop, Label::kNear);
492 __ Push(FieldOperand(eax, ecx, times_half_pointer_size,
493 FixedArray::kHeaderSize));
494 __ add(ecx, Immediate(Smi::FromInt(1)));
495 __ jmp(&loop);
496 __ bind(&done_loop);
497 }
498
499 // Reset operand stack so we don't leak.
500 __ mov(FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset),
501 Immediate(masm->isolate()->factory()->empty_fixed_array()));
502
503 // Resume the generator function at the continuation.
504 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
505 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
506 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
507 __ SmiUntag(ecx);
508 __ lea(edx, FieldOperand(edx, ecx, times_1, Code::kHeaderSize));
509 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
510 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
511 __ mov(eax, ebx); // Continuation expects generator object in eax.
512 __ jmp(edx);
513 }
514
515 __ bind(&prepare_step_in_if_stepping);
516 {
517 FrameScope scope(masm, StackFrame::INTERNAL);
518 __ Push(ebx);
519 __ Push(edx);
520 __ Push(edi);
521 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
522 __ Pop(edx);
523 __ Pop(ebx);
524 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
525 }
526 __ jmp(&stepping_prepared);
527
528 __ bind(&prepare_step_in_suspended_generator);
529 {
530 FrameScope scope(masm, StackFrame::INTERNAL);
531 __ Push(ebx);
532 __ Push(edx);
533 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
534 __ Pop(edx);
535 __ Pop(ebx);
536 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
537 }
538 __ jmp(&stepping_prepared);
539 }
540
541 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
542 Register scratch2) {
543 Register args_count = scratch1;
544 Register return_pc = scratch2;
545
546 // Get the arguments + reciever count.
547 __ mov(args_count,
548 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
549 __ mov(args_count,
550 FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
551
552 // Leave the frame (also dropping the register file).
553 __ leave();
554
555 // Drop receiver + arguments.
556 __ pop(return_pc);
557 __ add(esp, args_count);
558 __ push(return_pc);
559 }
560
561 // Generate code for entering a JS function with the interpreter.
562 // On entry to the function the receiver and arguments have been pushed on the
563 // stack left to right. The actual argument count matches the formal parameter
564 // count expected by the function.
565 //
566 // The live registers are:
567 // o edi: the JS function object being called
568 // o edx: the new target
569 // o esi: our context
570 // o ebp: the caller's frame pointer
571 // o esp: stack pointer (pointing to return address)
572 //
573 // The function builds an interpreter frame. See InterpreterFrameConstants in
574 // frames.h for its layout.
575 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
576 ProfileEntryHookStub::MaybeCallEntryHook(masm);
577
578 // Open a frame scope to indicate that there is a frame on the stack. The
579 // MANUAL indicates that the scope shouldn't actually generate code to set up
580 // the frame (that is done below).
581 FrameScope frame_scope(masm, StackFrame::MANUAL);
582 __ push(ebp); // Caller's frame pointer.
583 __ mov(ebp, esp);
584 __ push(esi); // Callee's context.
585 __ push(edi); // Callee's JS function.
586 __ push(edx); // Callee's new target.
587
588 // Get the bytecode array from the function object (or from the DebugInfo if
589 // it is present) and load it into kInterpreterBytecodeArrayRegister.
590 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
591 Label load_debug_bytecode_array, bytecode_array_loaded;
592 __ cmp(FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset),
593 Immediate(DebugInfo::uninitialized()));
594 __ j(not_equal, &load_debug_bytecode_array);
595 __ mov(kInterpreterBytecodeArrayRegister,
596 FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
597 __ bind(&bytecode_array_loaded);
598
599 // Check function data field is actually a BytecodeArray object.
600 Label bytecode_array_not_present;
601 __ CompareRoot(kInterpreterBytecodeArrayRegister,
602 Heap::kUndefinedValueRootIndex);
603 __ j(equal, &bytecode_array_not_present);
604 if (FLAG_debug_code) {
605 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
606 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
607 eax);
608 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
609 }
610
611 // Push bytecode array.
612 __ push(kInterpreterBytecodeArrayRegister);
613 // Push Smi tagged initial bytecode array offset.
614 __ push(Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag)));
615
616 // Allocate the local and temporary register file on the stack.
617 {
618 // Load frame size from the BytecodeArray object.
619 __ mov(ebx, FieldOperand(kInterpreterBytecodeArrayRegister,
620 BytecodeArray::kFrameSizeOffset));
621
622 // Do a stack check to ensure we don't go over the limit.
623 Label ok;
624 __ mov(ecx, esp);
625 __ sub(ecx, ebx);
626 ExternalReference stack_limit =
627 ExternalReference::address_of_real_stack_limit(masm->isolate());
628 __ cmp(ecx, Operand::StaticVariable(stack_limit));
629 __ j(above_equal, &ok);
630 __ CallRuntime(Runtime::kThrowStackOverflow);
631 __ bind(&ok);
632
633 // If ok, push undefined as the initial value for all register file entries.
634 Label loop_header;
635 Label loop_check;
636 __ mov(eax, Immediate(masm->isolate()->factory()->undefined_value()));
637 __ jmp(&loop_check);
638 __ bind(&loop_header);
639 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
640 __ push(eax);
641 // Continue loop if not done.
642 __ bind(&loop_check);
643 __ sub(ebx, Immediate(kPointerSize));
644 __ j(greater_equal, &loop_header);
645 }
646
647 // Load accumulator, bytecode offset and dispatch table into registers.
648 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
649 __ mov(kInterpreterBytecodeOffsetRegister,
650 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
651 __ mov(kInterpreterDispatchTableRegister,
652 Immediate(ExternalReference::interpreter_dispatch_table_address(
653 masm->isolate())));
654
655 // Dispatch to the first bytecode handler for the function.
656 __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
657 kInterpreterBytecodeOffsetRegister, times_1, 0));
658 __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx,
659 times_pointer_size, 0));
660 __ call(ebx);
661 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
662
663 // The return value is in eax.
664 LeaveInterpreterFrame(masm, ebx, ecx);
665 __ ret(0);
666
667 // Load debug copy of the bytecode array.
668 __ bind(&load_debug_bytecode_array);
669 Register debug_info = kInterpreterBytecodeArrayRegister;
670 __ mov(debug_info, FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset));
671 __ mov(kInterpreterBytecodeArrayRegister,
672 FieldOperand(debug_info, DebugInfo::kAbstractCodeIndex));
673 __ jmp(&bytecode_array_loaded);
674
675 // If the bytecode array is no longer present, then the underlying function
676 // has been switched to a different kind of code and we heal the closure by
677 // switching the code entry field over to the new code object as well.
678 __ bind(&bytecode_array_not_present);
679 __ pop(edx); // Callee's new target.
680 __ pop(edi); // Callee's JS function.
681 __ pop(esi); // Callee's context.
682 __ leave(); // Leave the frame so we can tail call.
683 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
684 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kCodeOffset));
685 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
686 __ mov(FieldOperand(edi, JSFunction::kCodeEntryOffset), ecx);
687 __ RecordWriteCodeEntryField(edi, ecx, ebx);
688 __ jmp(ecx);
689 }
690
691 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
692 // Save the function and context for call to CompileBaseline.
693 __ mov(edi, Operand(ebp, StandardFrameConstants::kFunctionOffset));
694 __ mov(kContextRegister,
695 Operand(ebp, StandardFrameConstants::kContextOffset));
696
697 // Leave the frame before recompiling for baseline so that we don't count as
698 // an activation on the stack.
699 LeaveInterpreterFrame(masm, ebx, ecx);
700
701 {
702 FrameScope frame_scope(masm, StackFrame::INTERNAL);
703 // Push return value.
704 __ push(eax);
705
706 // Push function as argument and compile for baseline.
707 __ push(edi);
708 __ CallRuntime(Runtime::kCompileBaseline);
709
710 // Restore return value.
711 __ pop(eax);
712 }
713 __ ret(0);
714 }
715
716 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
717 Register array_limit) {
718 // ----------- S t a t e -------------
719 // -- ebx : Pointer to the last argument in the args array.
720 // -- array_limit : Pointer to one before the first argument in the
721 // args array.
722 // -----------------------------------
723 Label loop_header, loop_check;
724 __ jmp(&loop_check);
725 __ bind(&loop_header);
726 __ Push(Operand(ebx, 0));
727 __ sub(ebx, Immediate(kPointerSize));
728 __ bind(&loop_check);
729 __ cmp(ebx, array_limit);
730 __ j(greater, &loop_header, Label::kNear);
731 }
732
733 // static
734 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
735 MacroAssembler* masm, TailCallMode tail_call_mode,
736 CallableType function_type) {
737 // ----------- S t a t e -------------
738 // -- eax : the number of arguments (not including the receiver)
739 // -- ebx : the address of the first argument to be pushed. Subsequent
740 // arguments should be consecutive above this, in the same order as
741 // they are to be pushed onto the stack.
742 // -- edi : the target to call (can be any Object).
743 // -----------------------------------
744
745 // Pop return address to allow tail-call after pushing arguments.
746 __ Pop(edx);
747
748 // Find the address of the last argument.
749 __ mov(ecx, eax);
750 __ add(ecx, Immediate(1)); // Add one for receiver.
751 __ shl(ecx, kPointerSizeLog2);
752 __ neg(ecx);
753 __ add(ecx, ebx);
754
755 Generate_InterpreterPushArgs(masm, ecx);
756
757 // Call the target.
758 __ Push(edx); // Re-push return address.
759
760 if (function_type == CallableType::kJSFunction) {
761 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
762 tail_call_mode),
763 RelocInfo::CODE_TARGET);
764 } else {
765 DCHECK_EQ(function_type, CallableType::kAny);
766 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
767 tail_call_mode),
768 RelocInfo::CODE_TARGET);
769 }
770 }
771
772
773 // static
774 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
775 // ----------- S t a t e -------------
776 // -- eax : the number of arguments (not including the receiver)
777 // -- edx : the new target
778 // -- edi : the constructor
779 // -- ebx : the address of the first argument to be pushed. Subsequent
780 // arguments should be consecutive above this, in the same order as
781 // they are to be pushed onto the stack.
782 // -----------------------------------
783
784 // Pop return address to allow tail-call after pushing arguments.
785 __ Pop(ecx);
786
787 // Push edi in the slot meant for receiver. We need an extra register
788 // so store edi temporarily on stack.
789 __ Push(edi);
790
791 // Find the address of the last argument.
792 __ mov(edi, eax);
793 __ neg(edi);
794 __ shl(edi, kPointerSizeLog2);
795 __ add(edi, ebx);
796
797 Generate_InterpreterPushArgs(masm, edi);
798
799 // Restore the constructor from slot on stack. It was pushed at the slot
800 // meant for receiver.
801 __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
802
803 // Re-push return address.
804 __ Push(ecx);
805
806 // Call the constructor with unmodified eax, edi, ebi values.
807 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
808 }
809
810 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
811 // Set the return address to the correct point in the interpreter entry
812 // trampoline.
813 Smi* interpreter_entry_return_pc_offset(
814 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
815 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
816 __ LoadHeapObject(ebx,
817 masm->isolate()->builtins()->InterpreterEntryTrampoline());
818 __ add(ebx, Immediate(interpreter_entry_return_pc_offset->value() +
819 Code::kHeaderSize - kHeapObjectTag));
820 __ push(ebx);
821
822 // Initialize the dispatch table register.
823 __ mov(kInterpreterDispatchTableRegister,
824 Immediate(ExternalReference::interpreter_dispatch_table_address(
825 masm->isolate())));
826
827 // Get the bytecode array pointer from the frame.
828 __ mov(kInterpreterBytecodeArrayRegister,
829 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
830
831 if (FLAG_debug_code) {
832 // Check function data field is actually a BytecodeArray object.
833 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
834 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
835 ebx);
836 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
837 }
838
839 // Get the target bytecode offset from the frame.
840 __ mov(kInterpreterBytecodeOffsetRegister,
841 Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
842 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
843
844 // Dispatch to the target bytecode.
845 __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
846 kInterpreterBytecodeOffsetRegister, times_1, 0));
847 __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx,
848 times_pointer_size, 0));
849 __ jmp(ebx);
850 }
851
852 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
853 // ----------- S t a t e -------------
854 // -- eax : argument count (preserved for callee)
855 // -- edx : new target (preserved for callee)
856 // -- edi : target function (preserved for callee)
857 // -----------------------------------
858 // First lookup code, maybe we don't need to compile!
859 Label gotta_call_runtime, gotta_call_runtime_no_stack;
860 Label maybe_call_runtime;
861 Label try_shared;
862 Label loop_top, loop_bottom;
863
864 Register closure = edi;
865 Register new_target = edx;
866 Register argument_count = eax;
867
868 __ push(argument_count);
869 __ push(new_target);
870 __ push(closure);
871
872 Register map = argument_count;
873 Register index = ebx;
874 __ mov(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
875 __ mov(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
876 __ mov(index, FieldOperand(map, FixedArray::kLengthOffset));
877 __ cmp(index, Immediate(Smi::FromInt(2)));
878 __ j(less, &gotta_call_runtime);
879
880 // Find literals.
881 // edx : native context
882 // ebx : length / index
883 // eax : optimized code map
884 // stack[0] : new target
885 // stack[4] : closure
886 Register native_context = edx;
887 __ mov(native_context, NativeContextOperand());
888
889 __ bind(&loop_top);
890 Register temp = edi;
891
892 // Does the native context match?
893 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
894 SharedFunctionInfo::kOffsetToPreviousContext));
895 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
896 __ cmp(temp, native_context);
897 __ j(not_equal, &loop_bottom);
898 // OSR id set to none?
899 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
900 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
901 const int bailout_id = BailoutId::None().ToInt();
902 __ cmp(temp, Immediate(Smi::FromInt(bailout_id)));
903 __ j(not_equal, &loop_bottom);
904
905 // Literals available?
906 Label got_literals, maybe_cleared_weakcell;
907 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
908 SharedFunctionInfo::kOffsetToPreviousLiterals));
909
910 // temp contains either a WeakCell pointing to the literals array or the
911 // literals array directly.
912 STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset);
913 __ JumpIfSmi(FieldOperand(temp, WeakCell::kValueOffset),
914 &maybe_cleared_weakcell);
915 // The WeakCell value is a pointer, therefore it's a valid literals array.
916 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
917 __ jmp(&got_literals);
918
919 // We have a smi. If it's 0, then we are looking at a cleared WeakCell
920 // around the literals array, and we should visit the runtime. If it's > 0,
921 // then temp already contains the literals array.
922 __ bind(&maybe_cleared_weakcell);
923 __ cmp(FieldOperand(temp, WeakCell::kValueOffset), Immediate(0));
924 __ j(equal, &gotta_call_runtime);
925
926 // Save the literals in the closure.
927 __ bind(&got_literals);
928 __ mov(ecx, Operand(esp, 0));
929 __ mov(FieldOperand(ecx, JSFunction::kLiteralsOffset), temp);
930 __ push(index);
931 __ RecordWriteField(ecx, JSFunction::kLiteralsOffset, temp, index,
932 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
933 __ pop(index);
934
935 // Code available?
936 Register entry = ecx;
937 __ mov(entry, FieldOperand(map, index, times_half_pointer_size,
938 SharedFunctionInfo::kOffsetToPreviousCachedCode));
939 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
940 __ JumpIfSmi(entry, &maybe_call_runtime);
941
942 // Found literals and code. Get them into the closure and return.
943 __ pop(closure);
944 // Store code entry in the closure.
945 __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
946
947 Label install_optimized_code_and_tailcall;
948 __ bind(&install_optimized_code_and_tailcall);
949 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
950 __ RecordWriteCodeEntryField(closure, entry, eax);
951
952 // Link the closure into the optimized function list.
953 // ecx : code entry
954 // edx : native context
955 // edi : closure
956 __ mov(ebx,
957 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
958 __ mov(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), ebx);
959 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, ebx, eax,
960 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
961 const int function_list_offset =
962 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
963 __ mov(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
964 closure);
965 // Save closure before the write barrier.
966 __ mov(ebx, closure);
967 __ RecordWriteContextSlot(native_context, function_list_offset, closure, eax,
968 kDontSaveFPRegs);
969 __ mov(closure, ebx);
970 __ pop(new_target);
971 __ pop(argument_count);
972 __ jmp(entry);
973
974 __ bind(&loop_bottom);
975 __ sub(index, Immediate(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
976 __ cmp(index, Immediate(Smi::FromInt(1)));
977 __ j(greater, &loop_top);
978
979 // We found neither literals nor code.
980 __ jmp(&gotta_call_runtime);
981
982 __ bind(&maybe_call_runtime);
983 __ pop(closure);
984
985 // Last possibility. Check the context free optimized code map entry.
986 __ mov(entry, FieldOperand(map, FixedArray::kHeaderSize +
987 SharedFunctionInfo::kSharedCodeIndex));
988 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
989 __ JumpIfSmi(entry, &try_shared);
990
991 // Store code entry in the closure.
992 __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
993 __ jmp(&install_optimized_code_and_tailcall);
994
995 __ bind(&try_shared);
996 __ pop(new_target);
997 __ pop(argument_count);
998 // Is the full code valid?
999 __ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1000 __ mov(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
1001 __ mov(ebx, FieldOperand(entry, Code::kFlagsOffset));
1002 __ and_(ebx, Code::KindField::kMask);
1003 __ shr(ebx, Code::KindField::kShift);
1004 __ cmp(ebx, Immediate(Code::BUILTIN));
1005 __ j(equal, &gotta_call_runtime_no_stack);
1006 // Yes, install the full code.
1007 __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
1008 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
1009 __ RecordWriteCodeEntryField(closure, entry, ebx);
1010 __ jmp(entry);
1011
1012 __ bind(&gotta_call_runtime);
1013 __ pop(closure);
1014 __ pop(new_target);
1015 __ pop(argument_count);
1016 __ bind(&gotta_call_runtime_no_stack);
1017
1018 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1019 }
1020
1021 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1022 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1023 }
1024
1025 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1026 GenerateTailCallToReturnedCode(masm,
1027 Runtime::kCompileOptimized_NotConcurrent);
1028 }
1029
1030
1031 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1032 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1033 }
1034
1035 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1036 // ----------- S t a t e -------------
1037 // -- eax : argument count (preserved for callee)
1038 // -- edx : new target (preserved for callee)
1039 // -- edi : target function (preserved for callee)
1040 // -----------------------------------
1041 Label failed;
1042 {
1043 FrameScope scope(masm, StackFrame::INTERNAL);
1044 // Push the number of arguments to the callee.
1045 __ SmiTag(eax);
1046 __ push(eax);
1047 // Push a copy of the target function and the new target.
1048 __ push(edi);
1049 __ push(edx);
1050
1051 // The function.
1052 __ push(edi);
1053 // Copy arguments from caller (stdlib, foreign, heap).
1054 for (int i = 2; i >= 0; --i) {
1055 __ push(Operand(
1056 ebp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
1057 }
1058 // Call runtime, on success unwind frame, and parent frame.
1059 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1060 // A smi 0 is returned on failure, an object on success.
1061 __ JumpIfSmi(eax, &failed, Label::kNear);
1062 scope.GenerateLeaveFrame();
1063 __ ret(4 * kPointerSize);
1064
1065 __ bind(&failed);
1066 // Restore target function and new target.
1067 __ pop(edx);
1068 __ pop(edi);
1069 __ pop(eax);
1070 __ SmiUntag(eax);
1071 }
1072 // On failure, tail call back to regular js.
1073 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1074 }
1075
1076 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1077 // For now, we are relying on the fact that make_code_young doesn't do any
1078 // garbage collection which allows us to save/restore the registers without
1079 // worrying about which of them contain pointers. We also don't build an
1080 // internal frame to make the code faster, since we shouldn't have to do stack
1081 // crawls in MakeCodeYoung. This seems a bit fragile.
1082
1083 // Re-execute the code that was patched back to the young age when
1084 // the stub returns.
1085 __ sub(Operand(esp, 0), Immediate(5));
1086 __ pushad();
1087 __ mov(eax, Operand(esp, 8 * kPointerSize));
1088 {
1089 FrameScope scope(masm, StackFrame::MANUAL);
1090 __ PrepareCallCFunction(2, ebx);
1091 __ mov(Operand(esp, 1 * kPointerSize),
1092 Immediate(ExternalReference::isolate_address(masm->isolate())));
1093 __ mov(Operand(esp, 0), eax);
1094 __ CallCFunction(
1095 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1096 }
1097 __ popad();
1098 __ ret(0);
1099 }
1100
1101 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1102 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1103 MacroAssembler* masm) { \
1104 GenerateMakeCodeYoungAgainCommon(masm); \
1105 } \
1106 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1107 MacroAssembler* masm) { \
1108 GenerateMakeCodeYoungAgainCommon(masm); \
1109 }
1110 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1111 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1112
1113
1114 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1115 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1116 // that make_code_young doesn't do any garbage collection which allows us to
1117 // save/restore the registers without worrying about which of them contain
1118 // pointers.
1119 __ pushad();
1120 __ mov(eax, Operand(esp, 8 * kPointerSize));
1121 __ sub(eax, Immediate(Assembler::kCallInstructionLength));
1122 { // NOLINT
1123 FrameScope scope(masm, StackFrame::MANUAL);
1124 __ PrepareCallCFunction(2, ebx);
1125 __ mov(Operand(esp, 1 * kPointerSize),
1126 Immediate(ExternalReference::isolate_address(masm->isolate())));
1127 __ mov(Operand(esp, 0), eax);
1128 __ CallCFunction(
1129 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1130 2);
1131 }
1132 __ popad();
1133
1134 // Perform prologue operations usually performed by the young code stub.
1135 __ pop(eax); // Pop return address into scratch register.
1136 __ push(ebp); // Caller's frame pointer.
1137 __ mov(ebp, esp);
1138 __ push(esi); // Callee's context.
1139 __ push(edi); // Callee's JS Function.
1140 __ push(eax); // Push return address after frame prologue.
1141
1142 // Jump to point after the code-age stub.
1143 __ ret(0);
1144 }
1145
1146
1147 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1148 GenerateMakeCodeYoungAgainCommon(masm);
1149 }
1150
1151
1152 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1153 Generate_MarkCodeAsExecutedOnce(masm);
1154 }
1155
1156
1157 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1158 SaveFPRegsMode save_doubles) {
1159 // Enter an internal frame.
1160 {
1161 FrameScope scope(masm, StackFrame::INTERNAL);
1162
1163 // Preserve registers across notification, this is important for compiled
1164 // stubs that tail call the runtime on deopts passing their parameters in
1165 // registers.
1166 __ pushad();
1167 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1168 __ popad();
1169 // Tear down internal frame.
1170 }
1171
1172 __ pop(MemOperand(esp, 0)); // Ignore state offset
1173 __ ret(0); // Return to IC Miss stub, continuation still on stack.
1174 }
1175
1176
1177 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1178 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1179 }
1180
1181
1182 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1183 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1184 }
1185
1186
1187 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1188 Deoptimizer::BailoutType type) {
1189 {
1190 FrameScope scope(masm, StackFrame::INTERNAL);
1191
1192 // Pass deoptimization type to the runtime system.
1193 __ push(Immediate(Smi::FromInt(static_cast<int>(type))));
1194 __ CallRuntime(Runtime::kNotifyDeoptimized);
1195
1196 // Tear down internal frame.
1197 }
1198
1199 // Get the full codegen state from the stack and untag it.
1200 __ mov(ecx, Operand(esp, 1 * kPointerSize));
1201 __ SmiUntag(ecx);
1202
1203 // Switch on the state.
1204 Label not_no_registers, not_tos_eax;
1205 __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS));
1206 __ j(not_equal, &not_no_registers, Label::kNear);
1207 __ ret(1 * kPointerSize); // Remove state.
1208
1209 __ bind(&not_no_registers);
1210 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
1211 __ mov(eax, Operand(esp, 2 * kPointerSize));
1212 __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER));
1213 __ j(not_equal, &not_tos_eax, Label::kNear);
1214 __ ret(2 * kPointerSize); // Remove state, eax.
1215
1216 __ bind(&not_tos_eax);
1217 __ Abort(kNoCasesLeft);
1218 }
1219
1220
1221 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1222 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1223 }
1224
1225
1226 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1227 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1228 }
1229
1230
1231 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1232 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1233 }
1234
1235
1236 // static
1237 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1238 int field_index) {
1239 // ----------- S t a t e -------------
1240 // -- eax : number of arguments
1241 // -- edi : function
1242 // -- esi : context
1243 // -- esp[0] : return address
1244 // -- esp[4] : receiver
1245 // -----------------------------------
1246
1247 // 1. Load receiver into eax and check that it's actually a JSDate object.
1248 Label receiver_not_date;
1249 {
1250 __ mov(eax, Operand(esp, kPointerSize));
1251 __ JumpIfSmi(eax, &receiver_not_date);
1252 __ CmpObjectType(eax, JS_DATE_TYPE, ebx);
1253 __ j(not_equal, &receiver_not_date);
1254 }
1255
1256 // 2. Load the specified date field, falling back to the runtime as necessary.
1257 if (field_index == JSDate::kDateValue) {
1258 __ mov(eax, FieldOperand(eax, JSDate::kValueOffset));
1259 } else {
1260 if (field_index < JSDate::kFirstUncachedField) {
1261 Label stamp_mismatch;
1262 __ mov(edx, Operand::StaticVariable(
1263 ExternalReference::date_cache_stamp(masm->isolate())));
1264 __ cmp(edx, FieldOperand(eax, JSDate::kCacheStampOffset));
1265 __ j(not_equal, &stamp_mismatch, Label::kNear);
1266 __ mov(eax, FieldOperand(
1267 eax, JSDate::kValueOffset + field_index * kPointerSize));
1268 __ ret(1 * kPointerSize);
1269 __ bind(&stamp_mismatch);
1270 }
1271 FrameScope scope(masm, StackFrame::INTERNAL);
1272 __ PrepareCallCFunction(2, ebx);
1273 __ mov(Operand(esp, 0), eax);
1274 __ mov(Operand(esp, 1 * kPointerSize),
1275 Immediate(Smi::FromInt(field_index)));
1276 __ CallCFunction(
1277 ExternalReference::get_date_field_function(masm->isolate()), 2);
1278 }
1279 __ ret(1 * kPointerSize);
1280
1281 // 3. Raise a TypeError if the receiver is not a date.
1282 __ bind(&receiver_not_date);
1283 {
1284 FrameScope scope(masm, StackFrame::MANUAL);
1285 __ Move(ebx, Immediate(0));
1286 __ EnterBuiltinFrame(esi, edi, ebx);
1287 __ CallRuntime(Runtime::kThrowNotDateError);
1288 }
1289 }
1290
1291 // static
1292 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1293 // ----------- S t a t e -------------
1294 // -- eax : argc
1295 // -- esp[0] : return address
1296 // -- esp[4] : argArray
1297 // -- esp[8] : thisArg
1298 // -- esp[12] : receiver
1299 // -----------------------------------
1300
1301 // 1. Load receiver into edi, argArray into eax (if present), remove all
1302 // arguments from the stack (including the receiver), and push thisArg (if
1303 // present) instead.
1304 {
1305 Label no_arg_array, no_this_arg;
1306 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1307 __ mov(ebx, edx);
1308 __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1309 __ test(eax, eax);
1310 __ j(zero, &no_this_arg, Label::kNear);
1311 {
1312 __ mov(edx, Operand(esp, eax, times_pointer_size, 0));
1313 __ cmp(eax, Immediate(1));
1314 __ j(equal, &no_arg_array, Label::kNear);
1315 __ mov(ebx, Operand(esp, eax, times_pointer_size, -kPointerSize));
1316 __ bind(&no_arg_array);
1317 }
1318 __ bind(&no_this_arg);
1319 __ PopReturnAddressTo(ecx);
1320 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1321 __ Push(edx);
1322 __ PushReturnAddressFrom(ecx);
1323 __ Move(eax, ebx);
1324 }
1325
1326 // ----------- S t a t e -------------
1327 // -- eax : argArray
1328 // -- edi : receiver
1329 // -- esp[0] : return address
1330 // -- esp[4] : thisArg
1331 // -----------------------------------
1332
1333 // 2. Make sure the receiver is actually callable.
1334 Label receiver_not_callable;
1335 __ JumpIfSmi(edi, &receiver_not_callable, Label::kNear);
1336 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1337 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1338 Immediate(1 << Map::kIsCallable));
1339 __ j(zero, &receiver_not_callable, Label::kNear);
1340
1341 // 3. Tail call with no arguments if argArray is null or undefined.
1342 Label no_arguments;
1343 __ JumpIfRoot(eax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1344 __ JumpIfRoot(eax, Heap::kUndefinedValueRootIndex, &no_arguments,
1345 Label::kNear);
1346
1347 // 4a. Apply the receiver to the given argArray (passing undefined for
1348 // new.target).
1349 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1350 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1351
1352 // 4b. The argArray is either null or undefined, so we tail call without any
1353 // arguments to the receiver.
1354 __ bind(&no_arguments);
1355 {
1356 __ Set(eax, 0);
1357 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1358 }
1359
1360 // 4c. The receiver is not callable, throw an appropriate TypeError.
1361 __ bind(&receiver_not_callable);
1362 {
1363 __ mov(Operand(esp, kPointerSize), edi);
1364 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1365 }
1366 }
1367
1368
1369 // static
1370 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1371 // Stack Layout:
1372 // esp[0] : Return address
1373 // esp[8] : Argument n
1374 // esp[16] : Argument n-1
1375 // ...
1376 // esp[8 * n] : Argument 1
1377 // esp[8 * (n + 1)] : Receiver (callable to call)
1378 //
1379 // eax contains the number of arguments, n, not counting the receiver.
1380 //
1381 // 1. Make sure we have at least one argument.
1382 {
1383 Label done;
1384 __ test(eax, eax);
1385 __ j(not_zero, &done, Label::kNear);
1386 __ PopReturnAddressTo(ebx);
1387 __ PushRoot(Heap::kUndefinedValueRootIndex);
1388 __ PushReturnAddressFrom(ebx);
1389 __ inc(eax);
1390 __ bind(&done);
1391 }
1392
1393 // 2. Get the callable to call (passed as receiver) from the stack.
1394 __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1395
1396 // 3. Shift arguments and return address one slot down on the stack
1397 // (overwriting the original receiver). Adjust argument count to make
1398 // the original first argument the new receiver.
1399 {
1400 Label loop;
1401 __ mov(ecx, eax);
1402 __ bind(&loop);
1403 __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
1404 __ mov(Operand(esp, ecx, times_pointer_size, kPointerSize), ebx);
1405 __ dec(ecx);
1406 __ j(not_sign, &loop); // While non-negative (to copy return address).
1407 __ pop(ebx); // Discard copy of return address.
1408 __ dec(eax); // One fewer argument (first argument is new receiver).
1409 }
1410
1411 // 4. Call the callable.
1412 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1413 }
1414
1415
1416 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1417 // ----------- S t a t e -------------
1418 // -- eax : argc
1419 // -- esp[0] : return address
1420 // -- esp[4] : argumentsList
1421 // -- esp[8] : thisArgument
1422 // -- esp[12] : target
1423 // -- esp[16] : receiver
1424 // -----------------------------------
1425
1426 // 1. Load target into edi (if present), argumentsList into eax (if present),
1427 // remove all arguments from the stack (including the receiver), and push
1428 // thisArgument (if present) instead.
1429 {
1430 Label done;
1431 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
1432 __ mov(edx, edi);
1433 __ mov(ebx, edi);
1434 __ cmp(eax, Immediate(1));
1435 __ j(below, &done, Label::kNear);
1436 __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1437 __ j(equal, &done, Label::kNear);
1438 __ mov(edx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1439 __ cmp(eax, Immediate(3));
1440 __ j(below, &done, Label::kNear);
1441 __ mov(ebx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1442 __ bind(&done);
1443 __ PopReturnAddressTo(ecx);
1444 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1445 __ Push(edx);
1446 __ PushReturnAddressFrom(ecx);
1447 __ Move(eax, ebx);
1448 }
1449
1450 // ----------- S t a t e -------------
1451 // -- eax : argumentsList
1452 // -- edi : target
1453 // -- esp[0] : return address
1454 // -- esp[4] : thisArgument
1455 // -----------------------------------
1456
1457 // 2. Make sure the target is actually callable.
1458 Label target_not_callable;
1459 __ JumpIfSmi(edi, &target_not_callable, Label::kNear);
1460 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1461 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1462 Immediate(1 << Map::kIsCallable));
1463 __ j(zero, &target_not_callable, Label::kNear);
1464
1465 // 3a. Apply the target to the given argumentsList (passing undefined for
1466 // new.target).
1467 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1468 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1469
1470 // 3b. The target is not callable, throw an appropriate TypeError.
1471 __ bind(&target_not_callable);
1472 {
1473 __ mov(Operand(esp, kPointerSize), edi);
1474 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1475 }
1476 }
1477
1478 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1479 // ----------- S t a t e -------------
1480 // -- eax : argc
1481 // -- esp[0] : return address
1482 // -- esp[4] : new.target (optional)
1483 // -- esp[8] : argumentsList
1484 // -- esp[12] : target
1485 // -- esp[16] : receiver
1486 // -----------------------------------
1487
1488 // 1. Load target into edi (if present), argumentsList into eax (if present),
1489 // new.target into edx (if present, otherwise use target), remove all
1490 // arguments from the stack (including the receiver), and push thisArgument
1491 // (if present) instead.
1492 {
1493 Label done;
1494 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
1495 __ mov(edx, edi);
1496 __ mov(ebx, edi);
1497 __ cmp(eax, Immediate(1));
1498 __ j(below, &done, Label::kNear);
1499 __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1500 __ mov(edx, edi);
1501 __ j(equal, &done, Label::kNear);
1502 __ mov(ebx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1503 __ cmp(eax, Immediate(3));
1504 __ j(below, &done, Label::kNear);
1505 __ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1506 __ bind(&done);
1507 __ PopReturnAddressTo(ecx);
1508 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1509 __ PushRoot(Heap::kUndefinedValueRootIndex);
1510 __ PushReturnAddressFrom(ecx);
1511 __ Move(eax, ebx);
1512 }
1513
1514 // ----------- S t a t e -------------
1515 // -- eax : argumentsList
1516 // -- edx : new.target
1517 // -- edi : target
1518 // -- esp[0] : return address
1519 // -- esp[4] : receiver (undefined)
1520 // -----------------------------------
1521
1522 // 2. Make sure the target is actually a constructor.
1523 Label target_not_constructor;
1524 __ JumpIfSmi(edi, &target_not_constructor, Label::kNear);
1525 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1526 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1527 Immediate(1 << Map::kIsConstructor));
1528 __ j(zero, &target_not_constructor, Label::kNear);
1529
1530 // 3. Make sure the target is actually a constructor.
1531 Label new_target_not_constructor;
1532 __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
1533 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
1534 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1535 Immediate(1 << Map::kIsConstructor));
1536 __ j(zero, &new_target_not_constructor, Label::kNear);
1537
1538 // 4a. Construct the target with the given new.target and argumentsList.
1539 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1540
1541 // 4b. The target is not a constructor, throw an appropriate TypeError.
1542 __ bind(&target_not_constructor);
1543 {
1544 __ mov(Operand(esp, kPointerSize), edi);
1545 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1546 }
1547
1548 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1549 __ bind(&new_target_not_constructor);
1550 {
1551 __ mov(Operand(esp, kPointerSize), edx);
1552 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1553 }
1554 }
1555
1556
1557 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1558 // ----------- S t a t e -------------
1559 // -- eax : argc
1560 // -- esp[0] : return address
1561 // -- esp[4] : last argument
1562 // -----------------------------------
1563 Label generic_array_code;
1564
1565 // Get the InternalArray function.
1566 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, edi);
1567
1568 if (FLAG_debug_code) {
1569 // Initial map for the builtin InternalArray function should be a map.
1570 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1571 // Will both indicate a NULL and a Smi.
1572 __ test(ebx, Immediate(kSmiTagMask));
1573 __ Assert(not_zero, kUnexpectedInitialMapForInternalArrayFunction);
1574 __ CmpObjectType(ebx, MAP_TYPE, ecx);
1575 __ Assert(equal, kUnexpectedInitialMapForInternalArrayFunction);
1576 }
1577
1578 // Run the native code for the InternalArray function called as a normal
1579 // function.
1580 // tail call a stub
1581 InternalArrayConstructorStub stub(masm->isolate());
1582 __ TailCallStub(&stub);
1583 }
1584
1585
1586 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1587 // ----------- S t a t e -------------
1588 // -- eax : argc
1589 // -- esp[0] : return address
1590 // -- esp[4] : last argument
1591 // -----------------------------------
1592 Label generic_array_code;
1593
1594 // Get the Array function.
1595 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, edi);
1596 __ mov(edx, edi);
1597
1598 if (FLAG_debug_code) {
1599 // Initial map for the builtin Array function should be a map.
1600 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1601 // Will both indicate a NULL and a Smi.
1602 __ test(ebx, Immediate(kSmiTagMask));
1603 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
1604 __ CmpObjectType(ebx, MAP_TYPE, ecx);
1605 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
1606 }
1607
1608 // Run the native code for the Array function called as a normal function.
1609 // tail call a stub
1610 __ mov(ebx, masm->isolate()->factory()->undefined_value());
1611 ArrayConstructorStub stub(masm->isolate());
1612 __ TailCallStub(&stub);
1613 }
1614
1615
1616 // static
1617 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
1618 // ----------- S t a t e -------------
1619 // -- eax : number of arguments
1620 // -- edi : function
1621 // -- esi : context
1622 // -- esp[0] : return address
1623 // -- esp[(argc - n) * 8] : arg[n] (zero-based)
1624 // -- esp[(argc + 1) * 8] : receiver
1625 // -----------------------------------
1626 Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above;
1627 Heap::RootListIndex const root_index =
1628 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
1629 : Heap::kMinusInfinityValueRootIndex;
1630 const int reg_sel = (kind == MathMaxMinKind::kMin) ? 1 : 0;
1631
1632 // Load the accumulator with the default return value (either -Infinity or
1633 // +Infinity), with the tagged value in edx and the double value in stx_0.
1634 __ LoadRoot(edx, root_index);
1635 __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
1636 __ Move(ecx, eax);
1637
1638 Label done_loop, loop;
1639 __ bind(&loop);
1640 {
1641 // Check if all parameters done.
1642 __ test(ecx, ecx);
1643 __ j(zero, &done_loop);
1644
1645 // Load the next parameter tagged value into ebx.
1646 __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
1647
1648 // Load the double value of the parameter into stx_1, maybe converting the
1649 // parameter to a number first using the ToNumber builtin if necessary.
1650 Label convert, convert_smi, convert_number, done_convert;
1651 __ bind(&convert);
1652 __ JumpIfSmi(ebx, &convert_smi);
1653 __ JumpIfRoot(FieldOperand(ebx, HeapObject::kMapOffset),
1654 Heap::kHeapNumberMapRootIndex, &convert_number);
1655 {
1656 // Parameter is not a Number, use the ToNumber builtin to convert it.
1657 FrameScope scope(masm, StackFrame::MANUAL);
1658 __ SmiTag(eax);
1659 __ SmiTag(ecx);
1660 __ EnterBuiltinFrame(esi, edi, eax);
1661 __ Push(ecx);
1662 __ Push(edx);
1663 __ mov(eax, ebx);
1664 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1665 __ mov(ebx, eax);
1666 __ Pop(edx);
1667 __ Pop(ecx);
1668 __ LeaveBuiltinFrame(esi, edi, eax);
1669 __ SmiUntag(ecx);
1670 __ SmiUntag(eax);
1671 {
1672 // Restore the double accumulator value (stX_0).
1673 Label restore_smi, done_restore;
1674 __ JumpIfSmi(edx, &restore_smi, Label::kNear);
1675 __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
1676 __ jmp(&done_restore, Label::kNear);
1677 __ bind(&restore_smi);
1678 __ SmiUntag(edx);
1679 __ push(edx);
1680 __ fild_s(Operand(esp, 0));
1681 __ pop(edx);
1682 __ SmiTag(edx);
1683 __ bind(&done_restore);
1684 }
1685 }
1686 __ jmp(&convert);
1687 __ bind(&convert_number);
1688 // Load another value into stx_1
1689 __ fld_d(FieldOperand(ebx, HeapNumber::kValueOffset));
1690 __ fxch();
1691 __ jmp(&done_convert, Label::kNear);
1692 __ bind(&convert_smi);
1693 __ SmiUntag(ebx);
1694 __ push(ebx);
1695 __ fild_s(Operand(esp, 0));
1696 __ pop(ebx);
1697 __ fxch();
1698 __ SmiTag(ebx);
1699 __ bind(&done_convert);
1700
1701 // Perform the actual comparison with the accumulator value on the left hand
1702 // side (stx_0) and the next parameter value on the right hand side (stx_1).
1703 Label compare_equal, compare_nan, compare_swap, done_compare;
1704
1705 // Duplicates the 2 float data for FCmp
1706 __ fld(1);
1707 __ fld(1);
1708 __ FCmp();
1709 __ j(parity_even, &compare_nan, Label::kNear);
1710 __ j(cc, &done_compare, Label::kNear);
1711 __ j(equal, &compare_equal, Label::kNear);
1712
1713 // Result is on the right hand side(stx_0).
1714 __ bind(&compare_swap);
1715 __ fxch();
1716 __ mov(edx, ebx);
1717 __ jmp(&done_compare, Label::kNear);
1718
1719 // At least one side is NaN, which means that the result will be NaN too.
1720 __ bind(&compare_nan);
1721 // Set the result on the right hand side (stx_0) to nan
1722 __ fstp(0);
1723 __ LoadRoot(edx, Heap::kNanValueRootIndex);
1724 __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
1725 __ jmp(&done_compare, Label::kNear);
1726
1727 // Left and right hand side are equal, check for -0 vs. +0.
1728 __ bind(&compare_equal);
1729 // Check the sign of the value in reg_sel
1730 __ fld(reg_sel);
1731 __ FXamSign();
1732 __ j(not_zero, &compare_swap);
1733
1734 __ bind(&done_compare);
1735 // The right result is on the right hand side(stx_0)
1736 // and can remove the useless stx_1 now.
1737 __ fxch();
1738 __ fstp(0);
1739 __ dec(ecx);
1740 __ jmp(&loop);
1741 }
1742
1743 __ bind(&done_loop);
1744 __ PopReturnAddressTo(ecx);
1745 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1746 __ PushReturnAddressFrom(ecx);
1747 __ mov(eax, edx);
1748 __ Ret();
1749 }
1750
1751 // static
1752 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
1753 // ----------- S t a t e -------------
1754 // -- eax : number of arguments
1755 // -- edi : constructor function
1756 // -- esi : context
1757 // -- esp[0] : return address
1758 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1759 // -- esp[(argc + 1) * 4] : receiver
1760 // -----------------------------------
1761
1762 // 1. Load the first argument into ebx.
1763 Label no_arguments;
1764 {
1765 __ test(eax, eax);
1766 __ j(zero, &no_arguments, Label::kNear);
1767 __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1768 }
1769
1770 // 2a. Convert the first argument to a number.
1771 {
1772 FrameScope scope(masm, StackFrame::MANUAL);
1773 __ SmiTag(eax);
1774 __ EnterBuiltinFrame(esi, edi, eax);
1775 __ mov(eax, ebx);
1776 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1777 __ LeaveBuiltinFrame(esi, edi, ebx); // Argc popped to ebx.
1778 __ SmiUntag(ebx);
1779 }
1780
1781 {
1782 // Drop all arguments including the receiver.
1783 __ PopReturnAddressTo(ecx);
1784 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
1785 __ PushReturnAddressFrom(ecx);
1786 __ Ret();
1787 }
1788
1789 // 2b. No arguments, return +0 (already in eax).
1790 __ bind(&no_arguments);
1791 __ ret(1 * kPointerSize);
1792 }
1793
1794
1795 // static
1796 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
1797 // ----------- S t a t e -------------
1798 // -- eax : number of arguments
1799 // -- edi : constructor function
1800 // -- edx : new target
1801 // -- esi : context
1802 // -- esp[0] : return address
1803 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1804 // -- esp[(argc + 1) * 4] : receiver
1805 // -----------------------------------
1806
1807 // 1. Make sure we operate in the context of the called function.
1808 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1809
1810 // Store argc in r8.
1811 __ mov(ecx, eax);
1812 __ SmiTag(ecx);
1813
1814 // 2. Load the first argument into ebx.
1815 {
1816 Label no_arguments, done;
1817 __ test(eax, eax);
1818 __ j(zero, &no_arguments, Label::kNear);
1819 __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1820 __ jmp(&done, Label::kNear);
1821 __ bind(&no_arguments);
1822 __ Move(ebx, Smi::FromInt(0));
1823 __ bind(&done);
1824 }
1825
1826 // 3. Make sure ebx is a number.
1827 {
1828 Label done_convert;
1829 __ JumpIfSmi(ebx, &done_convert);
1830 __ CompareRoot(FieldOperand(ebx, HeapObject::kMapOffset),
1831 Heap::kHeapNumberMapRootIndex);
1832 __ j(equal, &done_convert);
1833 {
1834 FrameScope scope(masm, StackFrame::MANUAL);
1835 __ EnterBuiltinFrame(esi, edi, ecx);
1836 __ Push(edx);
1837 __ Move(eax, ebx);
1838 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1839 __ Move(ebx, eax);
1840 __ Pop(edx);
1841 __ LeaveBuiltinFrame(esi, edi, ecx);
1842 }
1843 __ bind(&done_convert);
1844 }
1845
1846 // 4. Check if new target and constructor differ.
1847 Label drop_frame_and_ret, done_alloc, new_object;
1848 __ cmp(edx, edi);
1849 __ j(not_equal, &new_object);
1850
1851 // 5. Allocate a JSValue wrapper for the number.
1852 __ AllocateJSValue(eax, edi, ebx, esi, &done_alloc);
1853 __ jmp(&drop_frame_and_ret);
1854
1855 __ bind(&done_alloc);
1856 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); // Restore esi.
1857
1858 // 6. Fallback to the runtime to create new object.
1859 __ bind(&new_object);
1860 {
1861 FrameScope scope(masm, StackFrame::MANUAL);
1862 __ EnterBuiltinFrame(esi, edi, ecx);
1863 __ Push(ebx); // the first argument
1864 FastNewObjectStub stub(masm->isolate());
1865 __ CallStub(&stub);
1866 __ Pop(FieldOperand(eax, JSValue::kValueOffset));
1867 __ LeaveBuiltinFrame(esi, edi, ecx);
1868 }
1869
1870 __ bind(&drop_frame_and_ret);
1871 {
1872 // Drop all arguments including the receiver.
1873 __ PopReturnAddressTo(esi);
1874 __ SmiUntag(ecx);
1875 __ lea(esp, Operand(esp, ecx, times_pointer_size, kPointerSize));
1876 __ PushReturnAddressFrom(esi);
1877 __ Ret();
1878 }
1879 }
1880
1881
1882 // static
1883 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
1884 // ----------- S t a t e -------------
1885 // -- eax : number of arguments
1886 // -- edi : constructor function
1887 // -- esi : context
1888 // -- esp[0] : return address
1889 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1890 // -- esp[(argc + 1) * 4] : receiver
1891 // -----------------------------------
1892
1893 // 1. Load the first argument into eax.
1894 Label no_arguments;
1895 {
1896 __ mov(ebx, eax); // Store argc in ebx.
1897 __ test(eax, eax);
1898 __ j(zero, &no_arguments, Label::kNear);
1899 __ mov(eax, Operand(esp, eax, times_pointer_size, 0));
1900 }
1901
1902 // 2a. At least one argument, return eax if it's a string, otherwise
1903 // dispatch to appropriate conversion.
1904 Label drop_frame_and_ret, to_string, symbol_descriptive_string;
1905 {
1906 __ JumpIfSmi(eax, &to_string, Label::kNear);
1907 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
1908 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
1909 __ j(above, &to_string, Label::kNear);
1910 __ j(equal, &symbol_descriptive_string, Label::kNear);
1911 __ jmp(&drop_frame_and_ret, Label::kNear);
1912 }
1913
1914 // 2b. No arguments, return the empty string (and pop the receiver).
1915 __ bind(&no_arguments);
1916 {
1917 __ LoadRoot(eax, Heap::kempty_stringRootIndex);
1918 __ ret(1 * kPointerSize);
1919 }
1920
1921 // 3a. Convert eax to a string.
1922 __ bind(&to_string);
1923 {
1924 FrameScope scope(masm, StackFrame::MANUAL);
1925 ToStringStub stub(masm->isolate());
1926 __ SmiTag(ebx);
1927 __ EnterBuiltinFrame(esi, edi, ebx);
1928 __ CallStub(&stub);
1929 __ LeaveBuiltinFrame(esi, edi, ebx);
1930 __ SmiUntag(ebx);
1931 }
1932 __ jmp(&drop_frame_and_ret, Label::kNear);
1933
1934 // 3b. Convert symbol in eax to a string.
1935 __ bind(&symbol_descriptive_string);
1936 {
1937 __ PopReturnAddressTo(ecx);
1938 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
1939 __ Push(eax);
1940 __ PushReturnAddressFrom(ecx);
1941 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
1942 }
1943
1944 __ bind(&drop_frame_and_ret);
1945 {
1946 // Drop all arguments including the receiver.
1947 __ PopReturnAddressTo(ecx);
1948 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
1949 __ PushReturnAddressFrom(ecx);
1950 __ Ret();
1951 }
1952 }
1953
1954
1955 // static
1956 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
1957 // ----------- S t a t e -------------
1958 // -- eax : number of arguments
1959 // -- edi : constructor function
1960 // -- edx : new target
1961 // -- esi : context
1962 // -- esp[0] : return address
1963 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1964 // -- esp[(argc + 1) * 4] : receiver
1965 // -----------------------------------
1966
1967 // 1. Make sure we operate in the context of the called function.
1968 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1969
1970 __ mov(ebx, eax);
1971
1972 // 2. Load the first argument into eax.
1973 {
1974 Label no_arguments, done;
1975 __ test(ebx, ebx);
1976 __ j(zero, &no_arguments, Label::kNear);
1977 __ mov(eax, Operand(esp, ebx, times_pointer_size, 0));
1978 __ jmp(&done, Label::kNear);
1979 __ bind(&no_arguments);
1980 __ LoadRoot(eax, Heap::kempty_stringRootIndex);
1981 __ bind(&done);
1982 }
1983
1984 // 3. Make sure eax is a string.
1985 {
1986 Label convert, done_convert;
1987 __ JumpIfSmi(eax, &convert, Label::kNear);
1988 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ecx);
1989 __ j(below, &done_convert);
1990 __ bind(&convert);
1991 {
1992 FrameScope scope(masm, StackFrame::MANUAL);
1993 ToStringStub stub(masm->isolate());
1994 __ SmiTag(ebx);
1995 __ EnterBuiltinFrame(esi, edi, ebx);
1996 __ Push(edx);
1997 __ CallStub(&stub);
1998 __ Pop(edx);
1999 __ LeaveBuiltinFrame(esi, edi, ebx);
2000 __ SmiUntag(ebx);
2001 }
2002 __ bind(&done_convert);
2003 }
2004
2005 // 4. Check if new target and constructor differ.
2006 Label drop_frame_and_ret, done_alloc, new_object;
2007 __ cmp(edx, edi);
2008 __ j(not_equal, &new_object);
2009
2010 // 5. Allocate a JSValue wrapper for the string.
2011 // AllocateJSValue can't handle src == dst register. Reuse esi and restore it
2012 // as needed after the call.
2013 __ mov(esi, eax);
2014 __ AllocateJSValue(eax, edi, esi, ecx, &done_alloc);
2015 __ jmp(&drop_frame_and_ret);
2016
2017 __ bind(&done_alloc);
2018 {
2019 // Restore eax to the first argument and esi to the context.
2020 __ mov(eax, esi);
2021 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2022 }
2023
2024 // 6. Fallback to the runtime to create new object.
2025 __ bind(&new_object);
2026 {
2027 FrameScope scope(masm, StackFrame::MANUAL);
2028 __ SmiTag(ebx);
2029 __ EnterBuiltinFrame(esi, edi, ebx);
2030 __ Push(eax); // the first argument
2031 FastNewObjectStub stub(masm->isolate());
2032 __ CallStub(&stub);
2033 __ Pop(FieldOperand(eax, JSValue::kValueOffset));
2034 __ LeaveBuiltinFrame(esi, edi, ebx);
2035 __ SmiUntag(ebx);
2036 }
2037
2038 __ bind(&drop_frame_and_ret);
2039 {
2040 // Drop all arguments including the receiver.
2041 __ PopReturnAddressTo(ecx);
2042 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
2043 __ PushReturnAddressFrom(ecx);
2044 __ Ret();
2045 }
2046 }
2047
2048
2049 static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
2050 Label* stack_overflow) {
2051 // ----------- S t a t e -------------
2052 // -- eax : actual number of arguments
2053 // -- ebx : expected number of arguments
2054 // -- edx : new target (passed through to callee)
2055 // -----------------------------------
2056 // Check the stack for overflow. We are not trying to catch
2057 // interruptions (e.g. debug break and preemption) here, so the "real stack
2058 // limit" is checked.
2059 ExternalReference real_stack_limit =
2060 ExternalReference::address_of_real_stack_limit(masm->isolate());
2061 __ mov(edi, Operand::StaticVariable(real_stack_limit));
2062 // Make ecx the space we have left. The stack might already be overflowed
2063 // here which will cause ecx to become negative.
2064 __ mov(ecx, esp);
2065 __ sub(ecx, edi);
2066 // Make edi the space we need for the array when it is unrolled onto the
2067 // stack.
2068 __ mov(edi, ebx);
2069 __ shl(edi, kPointerSizeLog2);
2070 // Check if the arguments will overflow the stack.
2071 __ cmp(ecx, edi);
2072 __ j(less_equal, stack_overflow); // Signed comparison.
2073 }
2074
2075
2076 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2077 __ push(ebp);
2078 __ mov(ebp, esp);
2079
2080 // Store the arguments adaptor context sentinel.
2081 __ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2082
2083 // Push the function on the stack.
2084 __ push(edi);
2085
2086 // Preserve the number of arguments on the stack. Must preserve eax,
2087 // ebx and ecx because these registers are used when copying the
2088 // arguments and the receiver.
2089 STATIC_ASSERT(kSmiTagSize == 1);
2090 __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
2091 __ push(edi);
2092 }
2093
2094
2095 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2096 // Retrieve the number of arguments from the stack.
2097 __ mov(ebx, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2098
2099 // Leave the frame.
2100 __ leave();
2101
2102 // Remove caller arguments from the stack.
2103 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
2104 __ pop(ecx);
2105 __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
2106 __ push(ecx);
2107 }
2108
2109
2110 // static
2111 void Builtins::Generate_Apply(MacroAssembler* masm) {
2112 // ----------- S t a t e -------------
2113 // -- eax : argumentsList
2114 // -- edi : target
2115 // -- edx : new.target (checked to be constructor or undefined)
2116 // -- esp[0] : return address.
2117 // -- esp[4] : thisArgument
2118 // -----------------------------------
2119
2120 // Create the list of arguments from the array-like argumentsList.
2121 {
2122 Label create_arguments, create_array, create_runtime, done_create;
2123 __ JumpIfSmi(eax, &create_runtime);
2124
2125 // Load the map of argumentsList into ecx.
2126 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
2127
2128 // Load native context into ebx.
2129 __ mov(ebx, NativeContextOperand());
2130
2131 // Check if argumentsList is an (unmodified) arguments object.
2132 __ cmp(ecx, ContextOperand(ebx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2133 __ j(equal, &create_arguments);
2134 __ cmp(ecx, ContextOperand(ebx, Context::STRICT_ARGUMENTS_MAP_INDEX));
2135 __ j(equal, &create_arguments);
2136
2137 // Check if argumentsList is a fast JSArray.
2138 __ CmpInstanceType(ecx, JS_ARRAY_TYPE);
2139 __ j(equal, &create_array);
2140
2141 // Ask the runtime to create the list (actually a FixedArray).
2142 __ bind(&create_runtime);
2143 {
2144 FrameScope scope(masm, StackFrame::INTERNAL);
2145 __ Push(edi);
2146 __ Push(edx);
2147 __ Push(eax);
2148 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2149 __ Pop(edx);
2150 __ Pop(edi);
2151 __ mov(ebx, FieldOperand(eax, FixedArray::kLengthOffset));
2152 __ SmiUntag(ebx);
2153 }
2154 __ jmp(&done_create);
2155
2156 // Try to create the list from an arguments object.
2157 __ bind(&create_arguments);
2158 __ mov(ebx, FieldOperand(eax, JSArgumentsObject::kLengthOffset));
2159 __ mov(ecx, FieldOperand(eax, JSObject::kElementsOffset));
2160 __ cmp(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2161 __ j(not_equal, &create_runtime);
2162 __ SmiUntag(ebx);
2163 __ mov(eax, ecx);
2164 __ jmp(&done_create);
2165
2166 // Try to create the list from a JSArray object.
2167 __ bind(&create_array);
2168 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
2169 __ DecodeField<Map::ElementsKindBits>(ecx);
2170 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2171 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2172 STATIC_ASSERT(FAST_ELEMENTS == 2);
2173 __ cmp(ecx, Immediate(FAST_ELEMENTS));
2174 __ j(above, &create_runtime);
2175 __ cmp(ecx, Immediate(FAST_HOLEY_SMI_ELEMENTS));
2176 __ j(equal, &create_runtime);
2177 __ mov(ebx, FieldOperand(eax, JSArray::kLengthOffset));
2178 __ SmiUntag(ebx);
2179 __ mov(eax, FieldOperand(eax, JSArray::kElementsOffset));
2180
2181 __ bind(&done_create);
2182 }
2183
2184 // Check for stack overflow.
2185 {
2186 // Check the stack for overflow. We are not trying to catch interruptions
2187 // (i.e. debug break and preemption) here, so check the "real stack limit".
2188 Label done;
2189 ExternalReference real_stack_limit =
2190 ExternalReference::address_of_real_stack_limit(masm->isolate());
2191 __ mov(ecx, Operand::StaticVariable(real_stack_limit));
2192 // Make ecx the space we have left. The stack might already be overflowed
2193 // here which will cause ecx to become negative.
2194 __ neg(ecx);
2195 __ add(ecx, esp);
2196 __ sar(ecx, kPointerSizeLog2);
2197 // Check if the arguments will overflow the stack.
2198 __ cmp(ecx, ebx);
2199 __ j(greater, &done, Label::kNear); // Signed comparison.
2200 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2201 __ bind(&done);
2202 }
2203
2204 // ----------- S t a t e -------------
2205 // -- edi : target
2206 // -- eax : args (a FixedArray built from argumentsList)
2207 // -- ebx : len (number of elements to push from args)
2208 // -- edx : new.target (checked to be constructor or undefined)
2209 // -- esp[0] : return address.
2210 // -- esp[4] : thisArgument
2211 // -----------------------------------
2212
2213 // Push arguments onto the stack (thisArgument is already on the stack).
2214 {
2215 __ push(edx);
2216 __ fld_s(MemOperand(esp, 0));
2217 __ lea(esp, Operand(esp, kFloatSize));
2218
2219 __ PopReturnAddressTo(edx);
2220 __ Move(ecx, Immediate(0));
2221 Label done, loop;
2222 __ bind(&loop);
2223 __ cmp(ecx, ebx);
2224 __ j(equal, &done, Label::kNear);
2225 __ Push(
2226 FieldOperand(eax, ecx, times_pointer_size, FixedArray::kHeaderSize));
2227 __ inc(ecx);
2228 __ jmp(&loop);
2229 __ bind(&done);
2230 __ PushReturnAddressFrom(edx);
2231
2232 __ lea(esp, Operand(esp, -kFloatSize));
2233 __ fstp_s(MemOperand(esp, 0));
2234 __ pop(edx);
2235
2236 __ Move(eax, ebx);
2237 }
2238
2239 // Dispatch to Call or Construct depending on whether new.target is undefined.
2240 {
2241 __ CompareRoot(edx, Heap::kUndefinedValueRootIndex);
2242 __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2243 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2244 }
2245 }
2246
2247 namespace {
2248
2249 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2250 // present) preserving all the arguments prepared for current call.
2251 // Does nothing if debugger is currently active.
2252 // ES6 14.6.3. PrepareForTailCall
2253 //
2254 // Stack structure for the function g() tail calling f():
2255 //
2256 // ------- Caller frame: -------
2257 // | ...
2258 // | g()'s arg M
2259 // | ...
2260 // | g()'s arg 1
2261 // | g()'s receiver arg
2262 // | g()'s caller pc
2263 // ------- g()'s frame: -------
2264 // | g()'s caller fp <- fp
2265 // | g()'s context
2266 // | function pointer: g
2267 // | -------------------------
2268 // | ...
2269 // | ...
2270 // | f()'s arg N
2271 // | ...
2272 // | f()'s arg 1
2273 // | f()'s receiver arg
2274 // | f()'s caller pc <- sp
2275 // ----------------------
2276 //
2277 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2278 Register scratch1, Register scratch2,
2279 Register scratch3) {
2280 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2281 Comment cmnt(masm, "[ PrepareForTailCall");
2282
2283 // Prepare for tail call only if ES2015 tail call elimination is enabled.
2284 Label done;
2285 ExternalReference is_tail_call_elimination_enabled =
2286 ExternalReference::is_tail_call_elimination_enabled_address(
2287 masm->isolate());
2288 __ movzx_b(scratch1,
2289 Operand::StaticVariable(is_tail_call_elimination_enabled));
2290 __ cmp(scratch1, Immediate(0));
2291 __ j(equal, &done, Label::kNear);
2292
2293 // Drop possible interpreter handler/stub frame.
2294 {
2295 Label no_interpreter_frame;
2296 __ cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
2297 Immediate(Smi::FromInt(StackFrame::STUB)));
2298 __ j(not_equal, &no_interpreter_frame, Label::kNear);
2299 __ mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2300 __ bind(&no_interpreter_frame);
2301 }
2302
2303 // Check if next frame is an arguments adaptor frame.
2304 Register caller_args_count_reg = scratch1;
2305 Label no_arguments_adaptor, formal_parameter_count_loaded;
2306 __ mov(scratch2, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2307 __ cmp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset),
2308 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2309 __ j(not_equal, &no_arguments_adaptor, Label::kNear);
2310
2311 // Drop current frame and load arguments count from arguments adaptor frame.
2312 __ mov(ebp, scratch2);
2313 __ mov(caller_args_count_reg,
2314 Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2315 __ SmiUntag(caller_args_count_reg);
2316 __ jmp(&formal_parameter_count_loaded, Label::kNear);
2317
2318 __ bind(&no_arguments_adaptor);
2319 // Load caller's formal parameter count
2320 __ mov(scratch1, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2321 __ mov(scratch1,
2322 FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2323 __ mov(
2324 caller_args_count_reg,
2325 FieldOperand(scratch1, SharedFunctionInfo::kFormalParameterCountOffset));
2326 __ SmiUntag(caller_args_count_reg);
2327
2328 __ bind(&formal_parameter_count_loaded);
2329
2330 ParameterCount callee_args_count(args_reg);
2331 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2332 scratch3, ReturnAddressState::kOnStack, 0);
2333 __ bind(&done);
2334 }
2335 } // namespace
2336
2337 // static
2338 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2339 ConvertReceiverMode mode,
2340 TailCallMode tail_call_mode) {
2341 // ----------- S t a t e -------------
2342 // -- eax : the number of arguments (not including the receiver)
2343 // -- edi : the function to call (checked to be a JSFunction)
2344 // -----------------------------------
2345 __ AssertFunction(edi);
2346
2347 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2348 // Check that the function is not a "classConstructor".
2349 Label class_constructor;
2350 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2351 __ test_b(FieldOperand(edx, SharedFunctionInfo::kFunctionKindByteOffset),
2352 Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2353 __ j(not_zero, &class_constructor);
2354
2355 // Enter the context of the function; ToObject has to run in the function
2356 // context, and we also need to take the global proxy from the function
2357 // context in case of conversion.
2358 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2359 SharedFunctionInfo::kStrictModeByteOffset);
2360 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2361 // We need to convert the receiver for non-native sloppy mode functions.
2362 Label done_convert;
2363 __ test_b(FieldOperand(edx, SharedFunctionInfo::kNativeByteOffset),
2364 Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2365 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2366 __ j(not_zero, &done_convert);
2367 {
2368 // ----------- S t a t e -------------
2369 // -- eax : the number of arguments (not including the receiver)
2370 // -- edx : the shared function info.
2371 // -- edi : the function to call (checked to be a JSFunction)
2372 // -- esi : the function context.
2373 // -----------------------------------
2374
2375 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2376 // Patch receiver to global proxy.
2377 __ LoadGlobalProxy(ecx);
2378 } else {
2379 Label convert_to_object, convert_receiver;
2380 __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
2381 __ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
2382 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2383 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ebx);
2384 __ j(above_equal, &done_convert);
2385 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2386 Label convert_global_proxy;
2387 __ JumpIfRoot(ecx, Heap::kUndefinedValueRootIndex,
2388 &convert_global_proxy, Label::kNear);
2389 __ JumpIfNotRoot(ecx, Heap::kNullValueRootIndex, &convert_to_object,
2390 Label::kNear);
2391 __ bind(&convert_global_proxy);
2392 {
2393 // Patch receiver to global proxy.
2394 __ LoadGlobalProxy(ecx);
2395 }
2396 __ jmp(&convert_receiver);
2397 }
2398 __ bind(&convert_to_object);
2399 {
2400 // Convert receiver using ToObject.
2401 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2402 // in the fast case? (fall back to AllocateInNewSpace?)
2403 FrameScope scope(masm, StackFrame::INTERNAL);
2404 __ SmiTag(eax);
2405 __ Push(eax);
2406 __ Push(edi);
2407 __ mov(eax, ecx);
2408 ToObjectStub stub(masm->isolate());
2409 __ CallStub(&stub);
2410 __ mov(ecx, eax);
2411 __ Pop(edi);
2412 __ Pop(eax);
2413 __ SmiUntag(eax);
2414 }
2415 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2416 __ bind(&convert_receiver);
2417 }
2418 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx);
2419 }
2420 __ bind(&done_convert);
2421
2422 // ----------- S t a t e -------------
2423 // -- eax : the number of arguments (not including the receiver)
2424 // -- edx : the shared function info.
2425 // -- edi : the function to call (checked to be a JSFunction)
2426 // -- esi : the function context.
2427 // -----------------------------------
2428
2429 if (tail_call_mode == TailCallMode::kAllow) {
2430 PrepareForTailCall(masm, eax, ebx, ecx, edx);
2431 // Reload shared function info.
2432 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2433 }
2434
2435 __ mov(ebx,
2436 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2437 __ SmiUntag(ebx);
2438 ParameterCount actual(eax);
2439 ParameterCount expected(ebx);
2440 __ InvokeFunctionCode(edi, no_reg, expected, actual, JUMP_FUNCTION,
2441 CheckDebugStepCallWrapper());
2442 // The function is a "classConstructor", need to raise an exception.
2443 __ bind(&class_constructor);
2444 {
2445 FrameScope frame(masm, StackFrame::INTERNAL);
2446 __ push(edi);
2447 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2448 }
2449 }
2450
2451
2452 namespace {
2453
2454 void Generate_PushBoundArguments(MacroAssembler* masm) {
2455 // ----------- S t a t e -------------
2456 // -- eax : the number of arguments (not including the receiver)
2457 // -- edx : new.target (only in case of [[Construct]])
2458 // -- edi : target (checked to be a JSBoundFunction)
2459 // -----------------------------------
2460
2461 // Load [[BoundArguments]] into ecx and length of that into ebx.
2462 Label no_bound_arguments;
2463 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2464 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2465 __ SmiUntag(ebx);
2466 __ test(ebx, ebx);
2467 __ j(zero, &no_bound_arguments);
2468 {
2469 // ----------- S t a t e -------------
2470 // -- eax : the number of arguments (not including the receiver)
2471 // -- edx : new.target (only in case of [[Construct]])
2472 // -- edi : target (checked to be a JSBoundFunction)
2473 // -- ecx : the [[BoundArguments]] (implemented as FixedArray)
2474 // -- ebx : the number of [[BoundArguments]]
2475 // -----------------------------------
2476
2477 // Reserve stack space for the [[BoundArguments]].
2478 {
2479 Label done;
2480 __ lea(ecx, Operand(ebx, times_pointer_size, 0));
2481 __ sub(esp, ecx);
2482 // Check the stack for overflow. We are not trying to catch interruptions
2483 // (i.e. debug break and preemption) here, so check the "real stack
2484 // limit".
2485 __ CompareRoot(esp, ecx, Heap::kRealStackLimitRootIndex);
2486 __ j(greater, &done, Label::kNear); // Signed comparison.
2487 // Restore the stack pointer.
2488 __ lea(esp, Operand(esp, ebx, times_pointer_size, 0));
2489 {
2490 FrameScope scope(masm, StackFrame::MANUAL);
2491 __ EnterFrame(StackFrame::INTERNAL);
2492 __ CallRuntime(Runtime::kThrowStackOverflow);
2493 }
2494 __ bind(&done);
2495 }
2496
2497 // Adjust effective number of arguments to include return address.
2498 __ inc(eax);
2499
2500 // Relocate arguments and return address down the stack.
2501 {
2502 Label loop;
2503 __ Set(ecx, 0);
2504 __ lea(ebx, Operand(esp, ebx, times_pointer_size, 0));
2505 __ bind(&loop);
2506 __ fld_s(Operand(ebx, ecx, times_pointer_size, 0));
2507 __ fstp_s(Operand(esp, ecx, times_pointer_size, 0));
2508 __ inc(ecx);
2509 __ cmp(ecx, eax);
2510 __ j(less, &loop);
2511 }
2512
2513 // Copy [[BoundArguments]] to the stack (below the arguments).
2514 {
2515 Label loop;
2516 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2517 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2518 __ SmiUntag(ebx);
2519 __ bind(&loop);
2520 __ dec(ebx);
2521 __ fld_s(
2522 FieldOperand(ecx, ebx, times_pointer_size, FixedArray::kHeaderSize));
2523 __ fstp_s(Operand(esp, eax, times_pointer_size, 0));
2524 __ lea(eax, Operand(eax, 1));
2525 __ j(greater, &loop);
2526 }
2527
2528 // Adjust effective number of arguments (eax contains the number of
2529 // arguments from the call plus return address plus the number of
2530 // [[BoundArguments]]), so we need to subtract one for the return address.
2531 __ dec(eax);
2532 }
2533 __ bind(&no_bound_arguments);
2534 }
2535
2536 } // namespace
2537
2538
2539 // static
2540 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2541 TailCallMode tail_call_mode) {
2542 // ----------- S t a t e -------------
2543 // -- eax : the number of arguments (not including the receiver)
2544 // -- edi : the function to call (checked to be a JSBoundFunction)
2545 // -----------------------------------
2546 __ AssertBoundFunction(edi);
2547
2548 if (tail_call_mode == TailCallMode::kAllow) {
2549 PrepareForTailCall(masm, eax, ebx, ecx, edx);
2550 }
2551
2552 // Patch the receiver to [[BoundThis]].
2553 __ mov(ebx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
2554 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ebx);
2555
2556 // Push the [[BoundArguments]] onto the stack.
2557 Generate_PushBoundArguments(masm);
2558
2559 // Call the [[BoundTargetFunction]] via the Call builtin.
2560 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2561 __ mov(ecx, Operand::StaticVariable(ExternalReference(
2562 Builtins::kCall_ReceiverIsAny, masm->isolate())));
2563 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2564 __ jmp(ecx);
2565 }
2566
2567
2568 // static
2569 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2570 TailCallMode tail_call_mode) {
2571 // ----------- S t a t e -------------
2572 // -- eax : the number of arguments (not including the receiver)
2573 // -- edi : the target to call (can be any Object).
2574 // -----------------------------------
2575
2576 Label non_callable, non_function, non_smi;
2577 __ JumpIfSmi(edi, &non_callable);
2578 __ bind(&non_smi);
2579 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2580 __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2581 RelocInfo::CODE_TARGET);
2582 __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2583 __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2584 RelocInfo::CODE_TARGET);
2585
2586 // Check if target has a [[Call]] internal method.
2587 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2588 Immediate(1 << Map::kIsCallable));
2589 __ j(zero, &non_callable);
2590
2591 __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2592 __ j(not_equal, &non_function);
2593
2594 // 0. Prepare for tail call if necessary.
2595 if (tail_call_mode == TailCallMode::kAllow) {
2596 PrepareForTailCall(masm, eax, ebx, ecx, edx);
2597 }
2598
2599 // 1. Runtime fallback for Proxy [[Call]].
2600 __ PopReturnAddressTo(ecx);
2601 __ Push(edi);
2602 __ PushReturnAddressFrom(ecx);
2603 // Increase the arguments size to include the pushed function and the
2604 // existing receiver on the stack.
2605 __ add(eax, Immediate(2));
2606 // Tail-call to the runtime.
2607 __ JumpToExternalReference(
2608 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2609
2610 // 2. Call to something else, which might have a [[Call]] internal method (if
2611 // not we raise an exception).
2612 __ bind(&non_function);
2613 // Overwrite the original receiver with the (original) target.
2614 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2615 // Let the "call_as_function_delegate" take care of the rest.
2616 __ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, edi);
2617 __ Jump(masm->isolate()->builtins()->CallFunction(
2618 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2619 RelocInfo::CODE_TARGET);
2620
2621 // 3. Call to something that is not callable.
2622 __ bind(&non_callable);
2623 {
2624 FrameScope scope(masm, StackFrame::INTERNAL);
2625 __ Push(edi);
2626 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2627 }
2628 }
2629
2630
2631 // static
2632 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2633 // ----------- S t a t e -------------
2634 // -- eax : the number of arguments (not including the receiver)
2635 // -- edx : the new target (checked to be a constructor)
2636 // -- edi : the constructor to call (checked to be a JSFunction)
2637 // -----------------------------------
2638 __ AssertFunction(edi);
2639
2640 // Calling convention for function specific ConstructStubs require
2641 // ebx to contain either an AllocationSite or undefined.
2642 __ LoadRoot(ebx, Heap::kUndefinedValueRootIndex);
2643
2644 // Tail call to the function-specific construct stub (still in the caller
2645 // context at this point).
2646 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2647 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
2648 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2649 __ jmp(ecx);
2650 }
2651
2652
2653 // static
2654 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2655 // ----------- S t a t e -------------
2656 // -- eax : the number of arguments (not including the receiver)
2657 // -- edx : the new target (checked to be a constructor)
2658 // -- edi : the constructor to call (checked to be a JSBoundFunction)
2659 // -----------------------------------
2660 __ AssertBoundFunction(edi);
2661
2662 // Push the [[BoundArguments]] onto the stack.
2663 Generate_PushBoundArguments(masm);
2664
2665 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2666 {
2667 Label done;
2668 __ cmp(edi, edx);
2669 __ j(not_equal, &done, Label::kNear);
2670 __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2671 __ bind(&done);
2672 }
2673
2674 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2675 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2676 __ mov(ecx, Operand::StaticVariable(
2677 ExternalReference(Builtins::kConstruct, masm->isolate())));
2678 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2679 __ jmp(ecx);
2680 }
2681
2682
2683 // static
2684 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2685 // ----------- S t a t e -------------
2686 // -- eax : the number of arguments (not including the receiver)
2687 // -- edi : the constructor to call (checked to be a JSProxy)
2688 // -- edx : the new target (either the same as the constructor or
2689 // the JSFunction on which new was invoked initially)
2690 // -----------------------------------
2691
2692 // Call into the Runtime for Proxy [[Construct]].
2693 __ PopReturnAddressTo(ecx);
2694 __ Push(edi);
2695 __ Push(edx);
2696 __ PushReturnAddressFrom(ecx);
2697 // Include the pushed new_target, constructor and the receiver.
2698 __ add(eax, Immediate(3));
2699 // Tail-call to the runtime.
2700 __ JumpToExternalReference(
2701 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2702 }
2703
2704
2705 // static
2706 void Builtins::Generate_Construct(MacroAssembler* masm) {
2707 // ----------- S t a t e -------------
2708 // -- eax : the number of arguments (not including the receiver)
2709 // -- edx : the new target (either the same as the constructor or
2710 // the JSFunction on which new was invoked initially)
2711 // -- edi : the constructor to call (can be any Object)
2712 // -----------------------------------
2713
2714 // Check if target is a Smi.
2715 Label non_constructor;
2716 __ JumpIfSmi(edi, &non_constructor, Label::kNear);
2717
2718 // Dispatch based on instance type.
2719 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2720 __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
2721 RelocInfo::CODE_TARGET);
2722
2723 // Check if target has a [[Construct]] internal method.
2724 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2725 Immediate(1 << Map::kIsConstructor));
2726 __ j(zero, &non_constructor, Label::kNear);
2727
2728 // Only dispatch to bound functions after checking whether they are
2729 // constructors.
2730 __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2731 __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(),
2732 RelocInfo::CODE_TARGET);
2733
2734 // Only dispatch to proxies after checking whether they are constructors.
2735 __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2736 __ j(equal, masm->isolate()->builtins()->ConstructProxy(),
2737 RelocInfo::CODE_TARGET);
2738
2739 // Called Construct on an exotic Object with a [[Construct]] internal method.
2740 {
2741 // Overwrite the original receiver with the (original) target.
2742 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2743 // Let the "call_as_constructor_delegate" take care of the rest.
2744 __ LoadGlobalFunction(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, edi);
2745 __ Jump(masm->isolate()->builtins()->CallFunction(),
2746 RelocInfo::CODE_TARGET);
2747 }
2748
2749 // Called Construct on an Object that doesn't have a [[Construct]] internal
2750 // method.
2751 __ bind(&non_constructor);
2752 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2753 RelocInfo::CODE_TARGET);
2754 }
2755
2756 // static
2757 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2758 // ----------- S t a t e -------------
2759 // -- edx : requested object size (untagged)
2760 // -- esp[0] : return address
2761 // -----------------------------------
2762 __ SmiTag(edx);
2763 __ PopReturnAddressTo(ecx);
2764 __ Push(edx);
2765 __ PushReturnAddressFrom(ecx);
2766 __ Move(esi, Smi::FromInt(0));
2767 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2768 }
2769
2770 // static
2771 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2772 // ----------- S t a t e -------------
2773 // -- edx : requested object size (untagged)
2774 // -- esp[0] : return address
2775 // -----------------------------------
2776 __ SmiTag(edx);
2777 __ PopReturnAddressTo(ecx);
2778 __ Push(edx);
2779 __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2780 __ PushReturnAddressFrom(ecx);
2781 __ Move(esi, Smi::FromInt(0));
2782 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2783 }
2784
2785 // static
2786 void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
2787 // The StringToNumber stub takes one argument in eax.
2788 __ AssertString(eax);
2789
2790 // Check if string has a cached array index.
2791 Label runtime;
2792 __ test(FieldOperand(eax, String::kHashFieldOffset),
2793 Immediate(String::kContainsCachedArrayIndexMask));
2794 __ j(not_zero, &runtime, Label::kNear);
2795 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
2796 __ IndexFromHash(eax, eax);
2797 __ Ret();
2798
2799 __ bind(&runtime);
2800 {
2801 FrameScope frame(masm, StackFrame::INTERNAL);
2802 // Push argument.
2803 __ push(eax);
2804 // We cannot use a tail call here because this builtin can also be called
2805 // from wasm.
2806 __ CallRuntime(Runtime::kStringToNumber);
2807 }
2808 __ Ret();
2809 }
2810
2811 // static
2812 void Builtins::Generate_ToNumber(MacroAssembler* masm) {
2813 // The ToNumber stub takes one argument in eax.
2814 Label not_smi;
2815 __ JumpIfNotSmi(eax, &not_smi, Label::kNear);
2816 __ Ret();
2817 __ bind(&not_smi);
2818
2819 Label not_heap_number;
2820 __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
2821 __ j(not_equal, &not_heap_number, Label::kNear);
2822 __ Ret();
2823 __ bind(&not_heap_number);
2824
2825 __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
2826 RelocInfo::CODE_TARGET);
2827 }
2828
2829 // static
2830 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
2831 // The NonNumberToNumber stub takes one argument in eax.
2832 __ AssertNotNumber(eax);
2833
2834 Label not_string;
2835 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
2836 // eax: object
2837 // edi: object map
2838 __ j(above_equal, &not_string, Label::kNear);
2839 __ Jump(masm->isolate()->builtins()->StringToNumber(),
2840 RelocInfo::CODE_TARGET);
2841 __ bind(&not_string);
2842
2843 Label not_oddball;
2844 __ CmpInstanceType(edi, ODDBALL_TYPE);
2845 __ j(not_equal, &not_oddball, Label::kNear);
2846 __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
2847 __ Ret();
2848 __ bind(&not_oddball);
2849 {
2850 FrameScope frame(masm, StackFrame::INTERNAL);
2851 // Push argument.
2852 __ push(eax);
2853 // We cannot use a tail call here because this builtin can also be called
2854 // from wasm.
2855 __ CallRuntime(Runtime::kToNumber);
2856 }
2857 __ Ret();
2858 }
2859
2860 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2861 // ----------- S t a t e -------------
2862 // -- eax : actual number of arguments
2863 // -- ebx : expected number of arguments
2864 // -- edx : new target (passed through to callee)
2865 // -- edi : function (passed through to callee)
2866 // -----------------------------------
2867
2868 Label invoke, dont_adapt_arguments, stack_overflow;
2869 __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
2870
2871 Label enough, too_few;
2872 __ cmp(eax, ebx);
2873 __ j(less, &too_few);
2874 __ cmp(ebx, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2875 __ j(equal, &dont_adapt_arguments);
2876
2877 { // Enough parameters: Actual >= expected.
2878 __ bind(&enough);
2879 EnterArgumentsAdaptorFrame(masm);
2880 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
2881
2882 // Copy receiver and all expected arguments.
2883 const int offset = StandardFrameConstants::kCallerSPOffset;
2884 __ lea(edi, Operand(ebp, eax, times_4, offset));
2885 __ mov(eax, -1); // account for receiver
2886
2887 Label copy;
2888 __ bind(&copy);
2889 __ inc(eax);
2890 __ push(Operand(edi, 0));
2891 __ sub(edi, Immediate(kPointerSize));
2892 __ cmp(eax, ebx);
2893 __ j(less, &copy);
2894 // eax now contains the expected number of arguments.
2895 __ jmp(&invoke);
2896 }
2897
2898 { // Too few parameters: Actual < expected.
2899 __ bind(&too_few);
2900
2901 EnterArgumentsAdaptorFrame(masm);
2902 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
2903
2904 // Remember expected arguments in ecx.
2905 __ mov(ecx, ebx);
2906
2907 // Copy receiver and all actual arguments.
2908 const int offset = StandardFrameConstants::kCallerSPOffset;
2909 __ lea(edi, Operand(ebp, eax, times_4, offset));
2910 // ebx = expected - actual.
2911 __ sub(ebx, eax);
2912 // eax = -actual - 1
2913 __ neg(eax);
2914 __ sub(eax, Immediate(1));
2915
2916 Label copy;
2917 __ bind(&copy);
2918 __ inc(eax);
2919 __ push(Operand(edi, 0));
2920 __ sub(edi, Immediate(kPointerSize));
2921 __ test(eax, eax);
2922 __ j(not_zero, &copy);
2923
2924 // Fill remaining expected arguments with undefined values.
2925 Label fill;
2926 __ bind(&fill);
2927 __ inc(eax);
2928 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
2929 __ cmp(eax, ebx);
2930 __ j(less, &fill);
2931
2932 // Restore expected arguments.
2933 __ mov(eax, ecx);
2934 }
2935
2936 // Call the entry point.
2937 __ bind(&invoke);
2938 // Restore function pointer.
2939 __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2940 // eax : expected number of arguments
2941 // edx : new target (passed through to callee)
2942 // edi : function (passed through to callee)
2943 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2944 __ call(ecx);
2945
2946 // Store offset of return address for deoptimizer.
2947 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2948
2949 // Leave frame and return.
2950 LeaveArgumentsAdaptorFrame(masm);
2951 __ ret(0);
2952
2953 // -------------------------------------------
2954 // Dont adapt arguments.
2955 // -------------------------------------------
2956 __ bind(&dont_adapt_arguments);
2957 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2958 __ jmp(ecx);
2959
2960 __ bind(&stack_overflow);
2961 {
2962 FrameScope frame(masm, StackFrame::MANUAL);
2963 __ CallRuntime(Runtime::kThrowStackOverflow);
2964 __ int3();
2965 }
2966 }
2967
2968
2969 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
2970 Register function_template_info,
2971 Register scratch0, Register scratch1,
2972 Label* receiver_check_failed) {
2973 // If there is no signature, return the holder.
2974 __ CompareRoot(FieldOperand(function_template_info,
2975 FunctionTemplateInfo::kSignatureOffset),
2976 Heap::kUndefinedValueRootIndex);
2977 Label receiver_check_passed;
2978 __ j(equal, &receiver_check_passed, Label::kNear);
2979
2980 // Walk the prototype chain.
2981 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
2982 Label prototype_loop_start;
2983 __ bind(&prototype_loop_start);
2984
2985 // Get the constructor, if any.
2986 __ GetMapConstructor(scratch0, scratch0, scratch1);
2987 __ CmpInstanceType(scratch1, JS_FUNCTION_TYPE);
2988 Label next_prototype;
2989 __ j(not_equal, &next_prototype, Label::kNear);
2990
2991 // Get the constructor's signature.
2992 __ mov(scratch0,
2993 FieldOperand(scratch0, JSFunction::kSharedFunctionInfoOffset));
2994 __ mov(scratch0,
2995 FieldOperand(scratch0, SharedFunctionInfo::kFunctionDataOffset));
2996
2997 // Loop through the chain of inheriting function templates.
2998 Label function_template_loop;
2999 __ bind(&function_template_loop);
3000
3001 // If the signatures match, we have a compatible receiver.
3002 __ cmp(scratch0, FieldOperand(function_template_info,
3003 FunctionTemplateInfo::kSignatureOffset));
3004 __ j(equal, &receiver_check_passed, Label::kNear);
3005
3006 // If the current type is not a FunctionTemplateInfo, load the next prototype
3007 // in the chain.
3008 __ JumpIfSmi(scratch0, &next_prototype, Label::kNear);
3009 __ CmpObjectType(scratch0, FUNCTION_TEMPLATE_INFO_TYPE, scratch1);
3010 __ j(not_equal, &next_prototype, Label::kNear);
3011
3012 // Otherwise load the parent function template and iterate.
3013 __ mov(scratch0,
3014 FieldOperand(scratch0, FunctionTemplateInfo::kParentTemplateOffset));
3015 __ jmp(&function_template_loop, Label::kNear);
3016
3017 // Load the next prototype.
3018 __ bind(&next_prototype);
3019 __ mov(receiver, FieldOperand(receiver, HeapObject::kMapOffset));
3020 __ test(FieldOperand(receiver, Map::kBitField3Offset),
3021 Immediate(Map::HasHiddenPrototype::kMask));
3022 __ j(zero, receiver_check_failed);
3023
3024 __ mov(receiver, FieldOperand(receiver, Map::kPrototypeOffset));
3025 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
3026 // Iterate.
3027 __ jmp(&prototype_loop_start, Label::kNear);
3028
3029 __ bind(&receiver_check_passed);
3030 }
3031
3032
3033 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
3034 // ----------- S t a t e -------------
3035 // -- eax : number of arguments (not including the receiver)
3036 // -- edi : callee
3037 // -- esi : context
3038 // -- esp[0] : return address
3039 // -- esp[4] : last argument
3040 // -- ...
3041 // -- esp[eax * 4] : first argument
3042 // -- esp[(eax + 1) * 4] : receiver
3043 // -----------------------------------
3044
3045 // Load the FunctionTemplateInfo.
3046 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
3047 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFunctionDataOffset));
3048
3049 // Do the compatible receiver check.
3050 Label receiver_check_failed;
3051 __ mov(ecx, Operand(esp, eax, times_pointer_size, kPCOnStackSize));
3052 __ Push(eax);
3053 CompatibleReceiverCheck(masm, ecx, ebx, edx, eax, &receiver_check_failed);
3054 __ Pop(eax);
3055 // Get the callback offset from the FunctionTemplateInfo, and jump to the
3056 // beginning of the code.
3057 __ mov(edx, FieldOperand(ebx, FunctionTemplateInfo::kCallCodeOffset));
3058 __ mov(edx, FieldOperand(edx, CallHandlerInfo::kFastHandlerOffset));
3059 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
3060 __ jmp(edx);
3061
3062 // Compatible receiver check failed: pop return address, arguments and
3063 // receiver and throw an Illegal Invocation exception.
3064 __ bind(&receiver_check_failed);
3065 __ Pop(eax);
3066 __ PopReturnAddressTo(ebx);
3067 __ lea(eax, Operand(eax, times_pointer_size, 1 * kPointerSize));
3068 __ add(esp, eax);
3069 __ PushReturnAddressFrom(ebx);
3070 {
3071 FrameScope scope(masm, StackFrame::INTERNAL);
3072 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
3073 }
3074 }
3075
3076
3077 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
3078 // Lookup the function in the JavaScript frame.
3079 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3080 {
3081 FrameScope scope(masm, StackFrame::INTERNAL);
3082 // Pass function as argument.
3083 __ push(eax);
3084 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
3085 }
3086
3087 Label skip;
3088 // If the code object is null, just return to the unoptimized code.
3089 __ cmp(eax, Immediate(0));
3090 __ j(not_equal, &skip, Label::kNear);
3091 __ ret(0);
3092
3093 __ bind(&skip);
3094
3095 // Load deoptimization data from the code object.
3096 __ mov(ebx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
3097
3098 // Load the OSR entrypoint offset from the deoptimization data.
3099 __ mov(ebx, Operand(ebx, FixedArray::OffsetOfElementAt(
3100 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
3101 __ SmiUntag(ebx);
3102
3103 // Compute the target address = code_obj + header_size + osr_offset
3104 __ lea(eax, Operand(eax, ebx, times_1, Code::kHeaderSize - kHeapObjectTag));
3105
3106 // Overwrite the return address on the stack.
3107 __ mov(Operand(esp, 0), eax);
3108
3109 // And "return" to the OSR entry point of the function.
3110 __ ret(0);
3111 }
3112
3113
3114 #undef __
3115 } // namespace internal
3116 } // namespace v8
3117
3118 #endif // V8_TARGET_ARCH_X87
OLDNEW
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698