Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(258)

Side by Side Diff: src/ia32/builtins-ia32.cc

Issue 2145023002: [builtins] move builtin files to src/builtins/. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: rebase Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/counters.h ('k') | src/interpreter/interpreter.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_IA32
6
7 #include "src/code-factory.h"
8 #include "src/codegen.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/ia32/frames-ia32.h"
12
13 namespace v8 {
14 namespace internal {
15
16
17 #define __ ACCESS_MASM(masm)
18
19 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
20 ExitFrameType exit_frame_type) {
21 // ----------- S t a t e -------------
22 // -- eax : number of arguments excluding receiver
23 // -- edi : target
24 // -- edx : new.target
25 // -- esp[0] : return address
26 // -- esp[4] : last argument
27 // -- ...
28 // -- esp[4 * argc] : first argument
29 // -- esp[4 * (argc +1)] : receiver
30 // -----------------------------------
31 __ AssertFunction(edi);
32
33 // Make sure we operate in the context of the called function (for example
34 // ConstructStubs implemented in C++ will be run in the context of the caller
35 // instead of the callee, due to the way that [[Construct]] is defined for
36 // ordinary functions).
37 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
38
39 // JumpToExternalReference expects eax to contain the number of arguments
40 // including the receiver and the extra arguments.
41 const int num_extra_args = 3;
42 __ add(eax, Immediate(num_extra_args + 1));
43
44 // Insert extra arguments.
45 __ PopReturnAddressTo(ecx);
46 __ SmiTag(eax);
47 __ Push(eax);
48 __ SmiUntag(eax);
49 __ Push(edi);
50 __ Push(edx);
51 __ PushReturnAddressFrom(ecx);
52
53 __ JumpToExternalReference(ExternalReference(id, masm->isolate()),
54 exit_frame_type == BUILTIN_EXIT);
55 }
56
57 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
58 Runtime::FunctionId function_id) {
59 // ----------- S t a t e -------------
60 // -- eax : argument count (preserved for callee)
61 // -- edx : new target (preserved for callee)
62 // -- edi : target function (preserved for callee)
63 // -----------------------------------
64 {
65 FrameScope scope(masm, StackFrame::INTERNAL);
66 // Push the number of arguments to the callee.
67 __ SmiTag(eax);
68 __ push(eax);
69 // Push a copy of the target function and the new target.
70 __ push(edi);
71 __ push(edx);
72 // Function is also the parameter to the runtime call.
73 __ push(edi);
74
75 __ CallRuntime(function_id, 1);
76 __ mov(ebx, eax);
77
78 // Restore target function and new target.
79 __ pop(edx);
80 __ pop(edi);
81 __ pop(eax);
82 __ SmiUntag(eax);
83 }
84
85 __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
86 __ jmp(ebx);
87 }
88
89 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
90 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
91 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kCodeOffset));
92 __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
93 __ jmp(ebx);
94 }
95
96 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
97 // Checking whether the queued function is ready for install is optional,
98 // since we come across interrupts and stack checks elsewhere. However,
99 // not checking may delay installing ready functions, and always checking
100 // would be quite expensive. A good compromise is to first check against
101 // stack limit as a cue for an interrupt signal.
102 Label ok;
103 ExternalReference stack_limit =
104 ExternalReference::address_of_stack_limit(masm->isolate());
105 __ cmp(esp, Operand::StaticVariable(stack_limit));
106 __ j(above_equal, &ok, Label::kNear);
107
108 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
109
110 __ bind(&ok);
111 GenerateTailCallToSharedCode(masm);
112 }
113
114 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
115 bool is_api_function,
116 bool create_implicit_receiver,
117 bool check_derived_construct) {
118 // ----------- S t a t e -------------
119 // -- eax: number of arguments
120 // -- esi: context
121 // -- edi: constructor function
122 // -- ebx: allocation site or undefined
123 // -- edx: new target
124 // -----------------------------------
125
126 // Enter a construct frame.
127 {
128 FrameScope scope(masm, StackFrame::CONSTRUCT);
129
130 // Preserve the incoming parameters on the stack.
131 __ AssertUndefinedOrAllocationSite(ebx);
132 __ push(esi);
133 __ push(ebx);
134 __ SmiTag(eax);
135 __ push(eax);
136
137 if (create_implicit_receiver) {
138 // Allocate the new receiver object.
139 __ Push(edi);
140 __ Push(edx);
141 FastNewObjectStub stub(masm->isolate());
142 __ CallStub(&stub);
143 __ mov(ebx, eax);
144 __ Pop(edx);
145 __ Pop(edi);
146
147 // ----------- S t a t e -------------
148 // -- edi: constructor function
149 // -- ebx: newly allocated object
150 // -- edx: new target
151 // -----------------------------------
152
153 // Retrieve smi-tagged arguments count from the stack.
154 __ mov(eax, Operand(esp, 0));
155 }
156
157 __ SmiUntag(eax);
158
159 if (create_implicit_receiver) {
160 // Push the allocated receiver to the stack. We need two copies
161 // because we may have to return the original one and the calling
162 // conventions dictate that the called function pops the receiver.
163 __ push(ebx);
164 __ push(ebx);
165 } else {
166 __ PushRoot(Heap::kTheHoleValueRootIndex);
167 }
168
169 // Set up pointer to last argument.
170 __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
171
172 // Copy arguments and receiver to the expression stack.
173 Label loop, entry;
174 __ mov(ecx, eax);
175 __ jmp(&entry);
176 __ bind(&loop);
177 __ push(Operand(ebx, ecx, times_4, 0));
178 __ bind(&entry);
179 __ dec(ecx);
180 __ j(greater_equal, &loop);
181
182 // Call the function.
183 ParameterCount actual(eax);
184 __ InvokeFunction(edi, edx, actual, CALL_FUNCTION,
185 CheckDebugStepCallWrapper());
186
187 // Store offset of return address for deoptimizer.
188 if (create_implicit_receiver && !is_api_function) {
189 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
190 }
191
192 // Restore context from the frame.
193 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
194
195 if (create_implicit_receiver) {
196 // If the result is an object (in the ECMA sense), we should get rid
197 // of the receiver and use the result.
198 Label use_receiver, exit;
199
200 // If the result is a smi, it is *not* an object in the ECMA sense.
201 __ JumpIfSmi(eax, &use_receiver);
202
203 // If the type of the result (stored in its map) is less than
204 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
205 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
206 __ j(above_equal, &exit);
207
208 // Throw away the result of the constructor invocation and use the
209 // on-stack receiver as the result.
210 __ bind(&use_receiver);
211 __ mov(eax, Operand(esp, 0));
212
213 // Restore the arguments count and leave the construct frame. The
214 // arguments count is stored below the receiver.
215 __ bind(&exit);
216 __ mov(ebx, Operand(esp, 1 * kPointerSize));
217 } else {
218 __ mov(ebx, Operand(esp, 0));
219 }
220
221 // Leave construct frame.
222 }
223
224 // ES6 9.2.2. Step 13+
225 // Check that the result is not a Smi, indicating that the constructor result
226 // from a derived class is neither undefined nor an Object.
227 if (check_derived_construct) {
228 Label dont_throw;
229 __ JumpIfNotSmi(eax, &dont_throw);
230 {
231 FrameScope scope(masm, StackFrame::INTERNAL);
232 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
233 }
234 __ bind(&dont_throw);
235 }
236
237 // Remove caller arguments from the stack and return.
238 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
239 __ pop(ecx);
240 __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
241 __ push(ecx);
242 if (create_implicit_receiver) {
243 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
244 }
245 __ ret(0);
246 }
247
248
249 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
250 Generate_JSConstructStubHelper(masm, false, true, false);
251 }
252
253
254 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
255 Generate_JSConstructStubHelper(masm, true, false, false);
256 }
257
258
259 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
260 Generate_JSConstructStubHelper(masm, false, false, false);
261 }
262
263
264 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
265 MacroAssembler* masm) {
266 Generate_JSConstructStubHelper(masm, false, false, true);
267 }
268
269
270 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
271 FrameScope scope(masm, StackFrame::INTERNAL);
272 __ push(edi);
273 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
274 }
275
276
277 enum IsTagged { kEaxIsSmiTagged, kEaxIsUntaggedInt };
278
279
280 // Clobbers ecx, edx, edi; preserves all other registers.
281 static void Generate_CheckStackOverflow(MacroAssembler* masm,
282 IsTagged eax_is_tagged) {
283 // eax : the number of items to be pushed to the stack
284 //
285 // Check the stack for overflow. We are not trying to catch
286 // interruptions (e.g. debug break and preemption) here, so the "real stack
287 // limit" is checked.
288 Label okay;
289 ExternalReference real_stack_limit =
290 ExternalReference::address_of_real_stack_limit(masm->isolate());
291 __ mov(edi, Operand::StaticVariable(real_stack_limit));
292 // Make ecx the space we have left. The stack might already be overflowed
293 // here which will cause ecx to become negative.
294 __ mov(ecx, esp);
295 __ sub(ecx, edi);
296 // Make edx the space we need for the array when it is unrolled onto the
297 // stack.
298 __ mov(edx, eax);
299 int smi_tag = eax_is_tagged == kEaxIsSmiTagged ? kSmiTagSize : 0;
300 __ shl(edx, kPointerSizeLog2 - smi_tag);
301 // Check if the arguments will overflow the stack.
302 __ cmp(ecx, edx);
303 __ j(greater, &okay); // Signed comparison.
304
305 // Out of stack space.
306 __ CallRuntime(Runtime::kThrowStackOverflow);
307
308 __ bind(&okay);
309 }
310
311
312 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
313 bool is_construct) {
314 ProfileEntryHookStub::MaybeCallEntryHook(masm);
315
316 {
317 FrameScope scope(masm, StackFrame::INTERNAL);
318
319 // Setup the context (we need to use the caller context from the isolate).
320 ExternalReference context_address(Isolate::kContextAddress,
321 masm->isolate());
322 __ mov(esi, Operand::StaticVariable(context_address));
323
324 // Load the previous frame pointer (ebx) to access C arguments
325 __ mov(ebx, Operand(ebp, 0));
326
327 // Push the function and the receiver onto the stack.
328 __ push(Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
329 __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset));
330
331 // Load the number of arguments and setup pointer to the arguments.
332 __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
333 __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
334
335 // Check if we have enough stack space to push all arguments.
336 // Expects argument count in eax. Clobbers ecx, edx, edi.
337 Generate_CheckStackOverflow(masm, kEaxIsUntaggedInt);
338
339 // Copy arguments to the stack in a loop.
340 Label loop, entry;
341 __ Move(ecx, Immediate(0));
342 __ jmp(&entry, Label::kNear);
343 __ bind(&loop);
344 __ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv
345 __ push(Operand(edx, 0)); // dereference handle
346 __ inc(ecx);
347 __ bind(&entry);
348 __ cmp(ecx, eax);
349 __ j(not_equal, &loop);
350
351 // Load the previous frame pointer (ebx) to access C arguments
352 __ mov(ebx, Operand(ebp, 0));
353
354 // Get the new.target and function from the frame.
355 __ mov(edx, Operand(ebx, EntryFrameConstants::kNewTargetArgOffset));
356 __ mov(edi, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
357
358 // Invoke the code.
359 Handle<Code> builtin = is_construct
360 ? masm->isolate()->builtins()->Construct()
361 : masm->isolate()->builtins()->Call();
362 __ Call(builtin, RelocInfo::CODE_TARGET);
363
364 // Exit the internal frame. Notice that this also removes the empty.
365 // context and the function left on the stack by the code
366 // invocation.
367 }
368 __ ret(kPointerSize); // Remove receiver.
369 }
370
371
372 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
373 Generate_JSEntryTrampolineHelper(masm, false);
374 }
375
376
377 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
378 Generate_JSEntryTrampolineHelper(masm, true);
379 }
380
381 // static
382 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
383 // ----------- S t a t e -------------
384 // -- eax : the value to pass to the generator
385 // -- ebx : the JSGeneratorObject to resume
386 // -- edx : the resume mode (tagged)
387 // -- esp[0] : return address
388 // -----------------------------------
389 __ AssertGeneratorObject(ebx);
390
391 // Store input value into generator object.
392 __ mov(FieldOperand(ebx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
393 __ RecordWriteField(ebx, JSGeneratorObject::kInputOrDebugPosOffset, eax, ecx,
394 kDontSaveFPRegs);
395
396 // Store resume mode into generator object.
397 __ mov(FieldOperand(ebx, JSGeneratorObject::kResumeModeOffset), edx);
398
399 // Load suspended function and context.
400 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
401 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
402
403 // Flood function if we are stepping.
404 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
405 Label stepping_prepared;
406 ExternalReference last_step_action =
407 ExternalReference::debug_last_step_action_address(masm->isolate());
408 STATIC_ASSERT(StepFrame > StepIn);
409 __ cmpb(Operand::StaticVariable(last_step_action), Immediate(StepIn));
410 __ j(greater_equal, &prepare_step_in_if_stepping);
411
412 // Flood function if we need to continue stepping in the suspended generator.
413 ExternalReference debug_suspended_generator =
414 ExternalReference::debug_suspended_generator_address(masm->isolate());
415 __ cmp(ebx, Operand::StaticVariable(debug_suspended_generator));
416 __ j(equal, &prepare_step_in_suspended_generator);
417 __ bind(&stepping_prepared);
418
419 // Pop return address.
420 __ PopReturnAddressTo(eax);
421
422 // Push receiver.
423 __ Push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
424
425 // ----------- S t a t e -------------
426 // -- eax : return address
427 // -- ebx : the JSGeneratorObject to resume
428 // -- edx : the resume mode (tagged)
429 // -- edi : generator function
430 // -- esi : generator context
431 // -- esp[0] : generator receiver
432 // -----------------------------------
433
434 // Push holes for arguments to generator function. Since the parser forced
435 // context allocation for any variables in generators, the actual argument
436 // values have already been copied into the context and these dummy values
437 // will never be used.
438 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
439 __ mov(ecx,
440 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
441 {
442 Label done_loop, loop;
443 __ bind(&loop);
444 __ sub(ecx, Immediate(Smi::FromInt(1)));
445 __ j(carry, &done_loop, Label::kNear);
446 __ PushRoot(Heap::kTheHoleValueRootIndex);
447 __ jmp(&loop);
448 __ bind(&done_loop);
449 }
450
451 // Dispatch on the kind of generator object.
452 Label old_generator;
453 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
454 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
455 __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, ecx);
456 __ j(not_equal, &old_generator);
457
458 // New-style (ignition/turbofan) generator object
459 {
460 __ PushReturnAddressFrom(eax);
461 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
462 __ mov(eax,
463 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
464 // We abuse new.target both to indicate that this is a resume call and to
465 // pass in the generator object. In ordinary calls, new.target is always
466 // undefined because generator functions are non-constructable.
467 __ mov(edx, ebx);
468 __ jmp(FieldOperand(edi, JSFunction::kCodeEntryOffset));
469 }
470
471 // Old-style (full-codegen) generator object
472 __ bind(&old_generator);
473 {
474 // Enter a new JavaScript frame, and initialize its slots as they were when
475 // the generator was suspended.
476 FrameScope scope(masm, StackFrame::MANUAL);
477 __ PushReturnAddressFrom(eax); // Return address.
478 __ Push(ebp); // Caller's frame pointer.
479 __ Move(ebp, esp);
480 __ Push(esi); // Callee's context.
481 __ Push(edi); // Callee's JS Function.
482
483 // Restore the operand stack.
484 __ mov(eax, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
485 {
486 Label done_loop, loop;
487 __ Move(ecx, Smi::FromInt(0));
488 __ bind(&loop);
489 __ cmp(ecx, FieldOperand(eax, FixedArray::kLengthOffset));
490 __ j(equal, &done_loop, Label::kNear);
491 __ Push(FieldOperand(eax, ecx, times_half_pointer_size,
492 FixedArray::kHeaderSize));
493 __ add(ecx, Immediate(Smi::FromInt(1)));
494 __ jmp(&loop);
495 __ bind(&done_loop);
496 }
497
498 // Reset operand stack so we don't leak.
499 __ mov(FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset),
500 Immediate(masm->isolate()->factory()->empty_fixed_array()));
501
502 // Resume the generator function at the continuation.
503 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
504 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
505 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
506 __ SmiUntag(ecx);
507 __ lea(edx, FieldOperand(edx, ecx, times_1, Code::kHeaderSize));
508 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
509 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
510 __ mov(eax, ebx); // Continuation expects generator object in eax.
511 __ jmp(edx);
512 }
513
514 __ bind(&prepare_step_in_if_stepping);
515 {
516 FrameScope scope(masm, StackFrame::INTERNAL);
517 __ Push(ebx);
518 __ Push(edx);
519 __ Push(edi);
520 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
521 __ Pop(edx);
522 __ Pop(ebx);
523 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
524 }
525 __ jmp(&stepping_prepared);
526
527 __ bind(&prepare_step_in_suspended_generator);
528 {
529 FrameScope scope(masm, StackFrame::INTERNAL);
530 __ Push(ebx);
531 __ Push(edx);
532 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
533 __ Pop(edx);
534 __ Pop(ebx);
535 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
536 }
537 __ jmp(&stepping_prepared);
538 }
539
540 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
541 Register scratch2) {
542 Register args_count = scratch1;
543 Register return_pc = scratch2;
544
545 // Get the arguments + reciever count.
546 __ mov(args_count,
547 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
548 __ mov(args_count,
549 FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
550
551 // Leave the frame (also dropping the register file).
552 __ leave();
553
554 // Drop receiver + arguments.
555 __ pop(return_pc);
556 __ add(esp, args_count);
557 __ push(return_pc);
558 }
559
560 // Generate code for entering a JS function with the interpreter.
561 // On entry to the function the receiver and arguments have been pushed on the
562 // stack left to right. The actual argument count matches the formal parameter
563 // count expected by the function.
564 //
565 // The live registers are:
566 // o edi: the JS function object being called
567 // o edx: the new target
568 // o esi: our context
569 // o ebp: the caller's frame pointer
570 // o esp: stack pointer (pointing to return address)
571 //
572 // The function builds an interpreter frame. See InterpreterFrameConstants in
573 // frames.h for its layout.
574 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
575 ProfileEntryHookStub::MaybeCallEntryHook(masm);
576
577 // Open a frame scope to indicate that there is a frame on the stack. The
578 // MANUAL indicates that the scope shouldn't actually generate code to set up
579 // the frame (that is done below).
580 FrameScope frame_scope(masm, StackFrame::MANUAL);
581 __ push(ebp); // Caller's frame pointer.
582 __ mov(ebp, esp);
583 __ push(esi); // Callee's context.
584 __ push(edi); // Callee's JS function.
585 __ push(edx); // Callee's new target.
586
587 // Get the bytecode array from the function object (or from the DebugInfo if
588 // it is present) and load it into kInterpreterBytecodeArrayRegister.
589 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
590 Label load_debug_bytecode_array, bytecode_array_loaded;
591 __ cmp(FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset),
592 Immediate(DebugInfo::uninitialized()));
593 __ j(not_equal, &load_debug_bytecode_array);
594 __ mov(kInterpreterBytecodeArrayRegister,
595 FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
596 __ bind(&bytecode_array_loaded);
597
598 // Check function data field is actually a BytecodeArray object.
599 Label bytecode_array_not_present;
600 __ CompareRoot(kInterpreterBytecodeArrayRegister,
601 Heap::kUndefinedValueRootIndex);
602 __ j(equal, &bytecode_array_not_present);
603 if (FLAG_debug_code) {
604 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
605 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
606 eax);
607 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
608 }
609
610 // Push bytecode array.
611 __ push(kInterpreterBytecodeArrayRegister);
612 // Push Smi tagged initial bytecode array offset.
613 __ push(Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag)));
614
615 // Allocate the local and temporary register file on the stack.
616 {
617 // Load frame size from the BytecodeArray object.
618 __ mov(ebx, FieldOperand(kInterpreterBytecodeArrayRegister,
619 BytecodeArray::kFrameSizeOffset));
620
621 // Do a stack check to ensure we don't go over the limit.
622 Label ok;
623 __ mov(ecx, esp);
624 __ sub(ecx, ebx);
625 ExternalReference stack_limit =
626 ExternalReference::address_of_real_stack_limit(masm->isolate());
627 __ cmp(ecx, Operand::StaticVariable(stack_limit));
628 __ j(above_equal, &ok);
629 __ CallRuntime(Runtime::kThrowStackOverflow);
630 __ bind(&ok);
631
632 // If ok, push undefined as the initial value for all register file entries.
633 Label loop_header;
634 Label loop_check;
635 __ mov(eax, Immediate(masm->isolate()->factory()->undefined_value()));
636 __ jmp(&loop_check);
637 __ bind(&loop_header);
638 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
639 __ push(eax);
640 // Continue loop if not done.
641 __ bind(&loop_check);
642 __ sub(ebx, Immediate(kPointerSize));
643 __ j(greater_equal, &loop_header);
644 }
645
646 // Load accumulator, bytecode offset and dispatch table into registers.
647 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
648 __ mov(kInterpreterBytecodeOffsetRegister,
649 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
650 __ mov(kInterpreterDispatchTableRegister,
651 Immediate(ExternalReference::interpreter_dispatch_table_address(
652 masm->isolate())));
653
654 // Dispatch to the first bytecode handler for the function.
655 __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
656 kInterpreterBytecodeOffsetRegister, times_1, 0));
657 __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx,
658 times_pointer_size, 0));
659 __ call(ebx);
660 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
661
662 // The return value is in eax.
663 LeaveInterpreterFrame(masm, ebx, ecx);
664 __ ret(0);
665
666 // Load debug copy of the bytecode array.
667 __ bind(&load_debug_bytecode_array);
668 Register debug_info = kInterpreterBytecodeArrayRegister;
669 __ mov(debug_info, FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset));
670 __ mov(kInterpreterBytecodeArrayRegister,
671 FieldOperand(debug_info, DebugInfo::kAbstractCodeIndex));
672 __ jmp(&bytecode_array_loaded);
673
674 // If the bytecode array is no longer present, then the underlying function
675 // has been switched to a different kind of code and we heal the closure by
676 // switching the code entry field over to the new code object as well.
677 __ bind(&bytecode_array_not_present);
678 __ pop(edx); // Callee's new target.
679 __ pop(edi); // Callee's JS function.
680 __ pop(esi); // Callee's context.
681 __ leave(); // Leave the frame so we can tail call.
682 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
683 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kCodeOffset));
684 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
685 __ mov(FieldOperand(edi, JSFunction::kCodeEntryOffset), ecx);
686 __ RecordWriteCodeEntryField(edi, ecx, ebx);
687 __ jmp(ecx);
688 }
689
690 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
691 // Save the function and context for call to CompileBaseline.
692 __ mov(edi, Operand(ebp, StandardFrameConstants::kFunctionOffset));
693 __ mov(kContextRegister,
694 Operand(ebp, StandardFrameConstants::kContextOffset));
695
696 // Leave the frame before recompiling for baseline so that we don't count as
697 // an activation on the stack.
698 LeaveInterpreterFrame(masm, ebx, ecx);
699
700 {
701 FrameScope frame_scope(masm, StackFrame::INTERNAL);
702 // Push return value.
703 __ push(eax);
704
705 // Push function as argument and compile for baseline.
706 __ push(edi);
707 __ CallRuntime(Runtime::kCompileBaseline);
708
709 // Restore return value.
710 __ pop(eax);
711 }
712 __ ret(0);
713 }
714
715 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
716 Register array_limit) {
717 // ----------- S t a t e -------------
718 // -- ebx : Pointer to the last argument in the args array.
719 // -- array_limit : Pointer to one before the first argument in the
720 // args array.
721 // -----------------------------------
722 Label loop_header, loop_check;
723 __ jmp(&loop_check);
724 __ bind(&loop_header);
725 __ Push(Operand(ebx, 0));
726 __ sub(ebx, Immediate(kPointerSize));
727 __ bind(&loop_check);
728 __ cmp(ebx, array_limit);
729 __ j(greater, &loop_header, Label::kNear);
730 }
731
732 // static
733 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
734 MacroAssembler* masm, TailCallMode tail_call_mode,
735 CallableType function_type) {
736 // ----------- S t a t e -------------
737 // -- eax : the number of arguments (not including the receiver)
738 // -- ebx : the address of the first argument to be pushed. Subsequent
739 // arguments should be consecutive above this, in the same order as
740 // they are to be pushed onto the stack.
741 // -- edi : the target to call (can be any Object).
742 // -----------------------------------
743
744 // Pop return address to allow tail-call after pushing arguments.
745 __ Pop(edx);
746
747 // Find the address of the last argument.
748 __ mov(ecx, eax);
749 __ add(ecx, Immediate(1)); // Add one for receiver.
750 __ shl(ecx, kPointerSizeLog2);
751 __ neg(ecx);
752 __ add(ecx, ebx);
753
754 Generate_InterpreterPushArgs(masm, ecx);
755
756 // Call the target.
757 __ Push(edx); // Re-push return address.
758
759 if (function_type == CallableType::kJSFunction) {
760 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
761 tail_call_mode),
762 RelocInfo::CODE_TARGET);
763 } else {
764 DCHECK_EQ(function_type, CallableType::kAny);
765 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
766 tail_call_mode),
767 RelocInfo::CODE_TARGET);
768 }
769 }
770
771
772 // static
773 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
774 // ----------- S t a t e -------------
775 // -- eax : the number of arguments (not including the receiver)
776 // -- edx : the new target
777 // -- edi : the constructor
778 // -- ebx : the address of the first argument to be pushed. Subsequent
779 // arguments should be consecutive above this, in the same order as
780 // they are to be pushed onto the stack.
781 // -----------------------------------
782
783 // Pop return address to allow tail-call after pushing arguments.
784 __ Pop(ecx);
785
786 // Push edi in the slot meant for receiver. We need an extra register
787 // so store edi temporarily on stack.
788 __ Push(edi);
789
790 // Find the address of the last argument.
791 __ mov(edi, eax);
792 __ neg(edi);
793 __ shl(edi, kPointerSizeLog2);
794 __ add(edi, ebx);
795
796 Generate_InterpreterPushArgs(masm, edi);
797
798 // Restore the constructor from slot on stack. It was pushed at the slot
799 // meant for receiver.
800 __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
801
802 // Re-push return address.
803 __ Push(ecx);
804
805 // Call the constructor with unmodified eax, edi, ebi values.
806 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
807 }
808
809 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
810 // Set the return address to the correct point in the interpreter entry
811 // trampoline.
812 Smi* interpreter_entry_return_pc_offset(
813 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
814 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
815 __ LoadHeapObject(ebx,
816 masm->isolate()->builtins()->InterpreterEntryTrampoline());
817 __ add(ebx, Immediate(interpreter_entry_return_pc_offset->value() +
818 Code::kHeaderSize - kHeapObjectTag));
819 __ push(ebx);
820
821 // Initialize the dispatch table register.
822 __ mov(kInterpreterDispatchTableRegister,
823 Immediate(ExternalReference::interpreter_dispatch_table_address(
824 masm->isolate())));
825
826 // Get the bytecode array pointer from the frame.
827 __ mov(kInterpreterBytecodeArrayRegister,
828 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
829
830 if (FLAG_debug_code) {
831 // Check function data field is actually a BytecodeArray object.
832 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
833 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
834 ebx);
835 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
836 }
837
838 // Get the target bytecode offset from the frame.
839 __ mov(kInterpreterBytecodeOffsetRegister,
840 Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
841 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
842
843 // Dispatch to the target bytecode.
844 __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
845 kInterpreterBytecodeOffsetRegister, times_1, 0));
846 __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx,
847 times_pointer_size, 0));
848 __ jmp(ebx);
849 }
850
851 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
852 // ----------- S t a t e -------------
853 // -- eax : argument count (preserved for callee)
854 // -- edx : new target (preserved for callee)
855 // -- edi : target function (preserved for callee)
856 // -----------------------------------
857 // First lookup code, maybe we don't need to compile!
858 Label gotta_call_runtime, gotta_call_runtime_no_stack;
859 Label maybe_call_runtime;
860 Label try_shared;
861 Label loop_top, loop_bottom;
862
863 Register closure = edi;
864 Register new_target = edx;
865 Register argument_count = eax;
866
867 __ push(argument_count);
868 __ push(new_target);
869 __ push(closure);
870
871 Register map = argument_count;
872 Register index = ebx;
873 __ mov(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
874 __ mov(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
875 __ mov(index, FieldOperand(map, FixedArray::kLengthOffset));
876 __ cmp(index, Immediate(Smi::FromInt(2)));
877 __ j(less, &gotta_call_runtime);
878
879 // Find literals.
880 // edx : native context
881 // ebx : length / index
882 // eax : optimized code map
883 // stack[0] : new target
884 // stack[4] : closure
885 Register native_context = edx;
886 __ mov(native_context, NativeContextOperand());
887
888 __ bind(&loop_top);
889 Register temp = edi;
890
891 // Does the native context match?
892 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
893 SharedFunctionInfo::kOffsetToPreviousContext));
894 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
895 __ cmp(temp, native_context);
896 __ j(not_equal, &loop_bottom);
897 // OSR id set to none?
898 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
899 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
900 const int bailout_id = BailoutId::None().ToInt();
901 __ cmp(temp, Immediate(Smi::FromInt(bailout_id)));
902 __ j(not_equal, &loop_bottom);
903
904 // Literals available?
905 Label got_literals, maybe_cleared_weakcell;
906 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
907 SharedFunctionInfo::kOffsetToPreviousLiterals));
908
909 // temp contains either a WeakCell pointing to the literals array or the
910 // literals array directly.
911 STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset);
912 __ JumpIfSmi(FieldOperand(temp, WeakCell::kValueOffset),
913 &maybe_cleared_weakcell);
914 // The WeakCell value is a pointer, therefore it's a valid literals array.
915 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
916 __ jmp(&got_literals);
917
918 // We have a smi. If it's 0, then we are looking at a cleared WeakCell
919 // around the literals array, and we should visit the runtime. If it's > 0,
920 // then temp already contains the literals array.
921 __ bind(&maybe_cleared_weakcell);
922 __ cmp(FieldOperand(temp, WeakCell::kValueOffset), Immediate(0));
923 __ j(equal, &gotta_call_runtime);
924
925 // Save the literals in the closure.
926 __ bind(&got_literals);
927 __ mov(ecx, Operand(esp, 0));
928 __ mov(FieldOperand(ecx, JSFunction::kLiteralsOffset), temp);
929 __ push(index);
930 __ RecordWriteField(ecx, JSFunction::kLiteralsOffset, temp, index,
931 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
932 __ pop(index);
933
934 // Code available?
935 Register entry = ecx;
936 __ mov(entry, FieldOperand(map, index, times_half_pointer_size,
937 SharedFunctionInfo::kOffsetToPreviousCachedCode));
938 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
939 __ JumpIfSmi(entry, &maybe_call_runtime);
940
941 // Found literals and code. Get them into the closure and return.
942 __ pop(closure);
943 // Store code entry in the closure.
944 __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
945
946 Label install_optimized_code_and_tailcall;
947 __ bind(&install_optimized_code_and_tailcall);
948 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
949 __ RecordWriteCodeEntryField(closure, entry, eax);
950
951 // Link the closure into the optimized function list.
952 // ecx : code entry
953 // edx : native context
954 // edi : closure
955 __ mov(ebx,
956 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
957 __ mov(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), ebx);
958 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, ebx, eax,
959 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
960 const int function_list_offset =
961 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
962 __ mov(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
963 closure);
964 // Save closure before the write barrier.
965 __ mov(ebx, closure);
966 __ RecordWriteContextSlot(native_context, function_list_offset, closure, eax,
967 kDontSaveFPRegs);
968 __ mov(closure, ebx);
969 __ pop(new_target);
970 __ pop(argument_count);
971 __ jmp(entry);
972
973 __ bind(&loop_bottom);
974 __ sub(index, Immediate(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
975 __ cmp(index, Immediate(Smi::FromInt(1)));
976 __ j(greater, &loop_top);
977
978 // We found neither literals nor code.
979 __ jmp(&gotta_call_runtime);
980
981 __ bind(&maybe_call_runtime);
982 __ pop(closure);
983
984 // Last possibility. Check the context free optimized code map entry.
985 __ mov(entry, FieldOperand(map, FixedArray::kHeaderSize +
986 SharedFunctionInfo::kSharedCodeIndex));
987 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
988 __ JumpIfSmi(entry, &try_shared);
989
990 // Store code entry in the closure.
991 __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
992 __ jmp(&install_optimized_code_and_tailcall);
993
994 __ bind(&try_shared);
995 __ pop(new_target);
996 __ pop(argument_count);
997 // Is the full code valid?
998 __ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
999 __ mov(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
1000 __ mov(ebx, FieldOperand(entry, Code::kFlagsOffset));
1001 __ and_(ebx, Code::KindField::kMask);
1002 __ shr(ebx, Code::KindField::kShift);
1003 __ cmp(ebx, Immediate(Code::BUILTIN));
1004 __ j(equal, &gotta_call_runtime_no_stack);
1005 // Yes, install the full code.
1006 __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
1007 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
1008 __ RecordWriteCodeEntryField(closure, entry, ebx);
1009 __ jmp(entry);
1010
1011 __ bind(&gotta_call_runtime);
1012 __ pop(closure);
1013 __ pop(new_target);
1014 __ pop(argument_count);
1015 __ bind(&gotta_call_runtime_no_stack);
1016
1017 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1018 }
1019
1020 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1021 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1022 }
1023
1024 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1025 GenerateTailCallToReturnedCode(masm,
1026 Runtime::kCompileOptimized_NotConcurrent);
1027 }
1028
1029
1030 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1031 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1032 }
1033
1034 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1035 // ----------- S t a t e -------------
1036 // -- eax : argument count (preserved for callee)
1037 // -- edx : new target (preserved for callee)
1038 // -- edi : target function (preserved for callee)
1039 // -----------------------------------
1040 Label failed;
1041 {
1042 FrameScope scope(masm, StackFrame::INTERNAL);
1043 // Push the number of arguments to the callee.
1044 __ SmiTag(eax);
1045 __ push(eax);
1046 // Push a copy of the target function and the new target.
1047 __ push(edi);
1048 __ push(edx);
1049
1050 // The function.
1051 __ push(edi);
1052 // Copy arguments from caller (stdlib, foreign, heap).
1053 for (int i = 2; i >= 0; --i) {
1054 __ push(Operand(
1055 ebp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
1056 }
1057 // Call runtime, on success unwind frame, and parent frame.
1058 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1059 // A smi 0 is returned on failure, an object on success.
1060 __ JumpIfSmi(eax, &failed, Label::kNear);
1061 scope.GenerateLeaveFrame();
1062 __ ret(4 * kPointerSize);
1063
1064 __ bind(&failed);
1065 // Restore target function and new target.
1066 __ pop(edx);
1067 __ pop(edi);
1068 __ pop(eax);
1069 __ SmiUntag(eax);
1070 }
1071 // On failure, tail call back to regular js.
1072 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1073 }
1074
1075 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1076 // For now, we are relying on the fact that make_code_young doesn't do any
1077 // garbage collection which allows us to save/restore the registers without
1078 // worrying about which of them contain pointers. We also don't build an
1079 // internal frame to make the code faster, since we shouldn't have to do stack
1080 // crawls in MakeCodeYoung. This seems a bit fragile.
1081
1082 // Re-execute the code that was patched back to the young age when
1083 // the stub returns.
1084 __ sub(Operand(esp, 0), Immediate(5));
1085 __ pushad();
1086 __ mov(eax, Operand(esp, 8 * kPointerSize));
1087 {
1088 FrameScope scope(masm, StackFrame::MANUAL);
1089 __ PrepareCallCFunction(2, ebx);
1090 __ mov(Operand(esp, 1 * kPointerSize),
1091 Immediate(ExternalReference::isolate_address(masm->isolate())));
1092 __ mov(Operand(esp, 0), eax);
1093 __ CallCFunction(
1094 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1095 }
1096 __ popad();
1097 __ ret(0);
1098 }
1099
1100 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1101 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1102 MacroAssembler* masm) { \
1103 GenerateMakeCodeYoungAgainCommon(masm); \
1104 } \
1105 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1106 MacroAssembler* masm) { \
1107 GenerateMakeCodeYoungAgainCommon(masm); \
1108 }
1109 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1110 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1111
1112
1113 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1114 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1115 // that make_code_young doesn't do any garbage collection which allows us to
1116 // save/restore the registers without worrying about which of them contain
1117 // pointers.
1118 __ pushad();
1119 __ mov(eax, Operand(esp, 8 * kPointerSize));
1120 __ sub(eax, Immediate(Assembler::kCallInstructionLength));
1121 { // NOLINT
1122 FrameScope scope(masm, StackFrame::MANUAL);
1123 __ PrepareCallCFunction(2, ebx);
1124 __ mov(Operand(esp, 1 * kPointerSize),
1125 Immediate(ExternalReference::isolate_address(masm->isolate())));
1126 __ mov(Operand(esp, 0), eax);
1127 __ CallCFunction(
1128 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1129 2);
1130 }
1131 __ popad();
1132
1133 // Perform prologue operations usually performed by the young code stub.
1134 __ pop(eax); // Pop return address into scratch register.
1135 __ push(ebp); // Caller's frame pointer.
1136 __ mov(ebp, esp);
1137 __ push(esi); // Callee's context.
1138 __ push(edi); // Callee's JS Function.
1139 __ push(eax); // Push return address after frame prologue.
1140
1141 // Jump to point after the code-age stub.
1142 __ ret(0);
1143 }
1144
1145
1146 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1147 GenerateMakeCodeYoungAgainCommon(masm);
1148 }
1149
1150
1151 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1152 Generate_MarkCodeAsExecutedOnce(masm);
1153 }
1154
1155
1156 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1157 SaveFPRegsMode save_doubles) {
1158 // Enter an internal frame.
1159 {
1160 FrameScope scope(masm, StackFrame::INTERNAL);
1161
1162 // Preserve registers across notification, this is important for compiled
1163 // stubs that tail call the runtime on deopts passing their parameters in
1164 // registers.
1165 __ pushad();
1166 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1167 __ popad();
1168 // Tear down internal frame.
1169 }
1170
1171 __ pop(MemOperand(esp, 0)); // Ignore state offset
1172 __ ret(0); // Return to IC Miss stub, continuation still on stack.
1173 }
1174
1175
1176 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1177 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1178 }
1179
1180
1181 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1182 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1183 }
1184
1185
1186 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1187 Deoptimizer::BailoutType type) {
1188 {
1189 FrameScope scope(masm, StackFrame::INTERNAL);
1190
1191 // Pass deoptimization type to the runtime system.
1192 __ push(Immediate(Smi::FromInt(static_cast<int>(type))));
1193 __ CallRuntime(Runtime::kNotifyDeoptimized);
1194
1195 // Tear down internal frame.
1196 }
1197
1198 // Get the full codegen state from the stack and untag it.
1199 __ mov(ecx, Operand(esp, 1 * kPointerSize));
1200 __ SmiUntag(ecx);
1201
1202 // Switch on the state.
1203 Label not_no_registers, not_tos_eax;
1204 __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS));
1205 __ j(not_equal, &not_no_registers, Label::kNear);
1206 __ ret(1 * kPointerSize); // Remove state.
1207
1208 __ bind(&not_no_registers);
1209 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
1210 __ mov(eax, Operand(esp, 2 * kPointerSize));
1211 __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER));
1212 __ j(not_equal, &not_tos_eax, Label::kNear);
1213 __ ret(2 * kPointerSize); // Remove state, eax.
1214
1215 __ bind(&not_tos_eax);
1216 __ Abort(kNoCasesLeft);
1217 }
1218
1219
1220 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1221 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1222 }
1223
1224
1225 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1226 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1227 }
1228
1229
1230 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1231 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1232 }
1233
1234
1235 // static
1236 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1237 int field_index) {
1238 // ----------- S t a t e -------------
1239 // -- eax : number of arguments
1240 // -- edi : function
1241 // -- esi : context
1242 // -- esp[0] : return address
1243 // -- esp[4] : receiver
1244 // -----------------------------------
1245
1246 // 1. Load receiver into eax and check that it's actually a JSDate object.
1247 Label receiver_not_date;
1248 {
1249 __ mov(eax, Operand(esp, kPointerSize));
1250 __ JumpIfSmi(eax, &receiver_not_date);
1251 __ CmpObjectType(eax, JS_DATE_TYPE, ebx);
1252 __ j(not_equal, &receiver_not_date);
1253 }
1254
1255 // 2. Load the specified date field, falling back to the runtime as necessary.
1256 if (field_index == JSDate::kDateValue) {
1257 __ mov(eax, FieldOperand(eax, JSDate::kValueOffset));
1258 } else {
1259 if (field_index < JSDate::kFirstUncachedField) {
1260 Label stamp_mismatch;
1261 __ mov(edx, Operand::StaticVariable(
1262 ExternalReference::date_cache_stamp(masm->isolate())));
1263 __ cmp(edx, FieldOperand(eax, JSDate::kCacheStampOffset));
1264 __ j(not_equal, &stamp_mismatch, Label::kNear);
1265 __ mov(eax, FieldOperand(
1266 eax, JSDate::kValueOffset + field_index * kPointerSize));
1267 __ ret(1 * kPointerSize);
1268 __ bind(&stamp_mismatch);
1269 }
1270 FrameScope scope(masm, StackFrame::INTERNAL);
1271 __ PrepareCallCFunction(2, ebx);
1272 __ mov(Operand(esp, 0), eax);
1273 __ mov(Operand(esp, 1 * kPointerSize),
1274 Immediate(Smi::FromInt(field_index)));
1275 __ CallCFunction(
1276 ExternalReference::get_date_field_function(masm->isolate()), 2);
1277 }
1278 __ ret(1 * kPointerSize);
1279
1280 // 3. Raise a TypeError if the receiver is not a date.
1281 __ bind(&receiver_not_date);
1282 {
1283 FrameScope scope(masm, StackFrame::MANUAL);
1284 __ Move(ebx, Immediate(0));
1285 __ EnterBuiltinFrame(esi, edi, ebx);
1286 __ CallRuntime(Runtime::kThrowNotDateError);
1287 }
1288 }
1289
1290 // static
1291 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1292 // ----------- S t a t e -------------
1293 // -- eax : argc
1294 // -- esp[0] : return address
1295 // -- esp[4] : argArray
1296 // -- esp[8] : thisArg
1297 // -- esp[12] : receiver
1298 // -----------------------------------
1299
1300 // 1. Load receiver into edi, argArray into eax (if present), remove all
1301 // arguments from the stack (including the receiver), and push thisArg (if
1302 // present) instead.
1303 {
1304 Label no_arg_array, no_this_arg;
1305 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1306 __ mov(ebx, edx);
1307 __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1308 __ test(eax, eax);
1309 __ j(zero, &no_this_arg, Label::kNear);
1310 {
1311 __ mov(edx, Operand(esp, eax, times_pointer_size, 0));
1312 __ cmp(eax, Immediate(1));
1313 __ j(equal, &no_arg_array, Label::kNear);
1314 __ mov(ebx, Operand(esp, eax, times_pointer_size, -kPointerSize));
1315 __ bind(&no_arg_array);
1316 }
1317 __ bind(&no_this_arg);
1318 __ PopReturnAddressTo(ecx);
1319 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1320 __ Push(edx);
1321 __ PushReturnAddressFrom(ecx);
1322 __ Move(eax, ebx);
1323 }
1324
1325 // ----------- S t a t e -------------
1326 // -- eax : argArray
1327 // -- edi : receiver
1328 // -- esp[0] : return address
1329 // -- esp[4] : thisArg
1330 // -----------------------------------
1331
1332 // 2. Make sure the receiver is actually callable.
1333 Label receiver_not_callable;
1334 __ JumpIfSmi(edi, &receiver_not_callable, Label::kNear);
1335 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1336 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1337 Immediate(1 << Map::kIsCallable));
1338 __ j(zero, &receiver_not_callable, Label::kNear);
1339
1340 // 3. Tail call with no arguments if argArray is null or undefined.
1341 Label no_arguments;
1342 __ JumpIfRoot(eax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1343 __ JumpIfRoot(eax, Heap::kUndefinedValueRootIndex, &no_arguments,
1344 Label::kNear);
1345
1346 // 4a. Apply the receiver to the given argArray (passing undefined for
1347 // new.target).
1348 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1349 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1350
1351 // 4b. The argArray is either null or undefined, so we tail call without any
1352 // arguments to the receiver.
1353 __ bind(&no_arguments);
1354 {
1355 __ Set(eax, 0);
1356 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1357 }
1358
1359 // 4c. The receiver is not callable, throw an appropriate TypeError.
1360 __ bind(&receiver_not_callable);
1361 {
1362 __ mov(Operand(esp, kPointerSize), edi);
1363 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1364 }
1365 }
1366
1367
1368 // static
1369 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1370 // Stack Layout:
1371 // esp[0] : Return address
1372 // esp[8] : Argument n
1373 // esp[16] : Argument n-1
1374 // ...
1375 // esp[8 * n] : Argument 1
1376 // esp[8 * (n + 1)] : Receiver (callable to call)
1377 //
1378 // eax contains the number of arguments, n, not counting the receiver.
1379 //
1380 // 1. Make sure we have at least one argument.
1381 {
1382 Label done;
1383 __ test(eax, eax);
1384 __ j(not_zero, &done, Label::kNear);
1385 __ PopReturnAddressTo(ebx);
1386 __ PushRoot(Heap::kUndefinedValueRootIndex);
1387 __ PushReturnAddressFrom(ebx);
1388 __ inc(eax);
1389 __ bind(&done);
1390 }
1391
1392 // 2. Get the callable to call (passed as receiver) from the stack.
1393 __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1394
1395 // 3. Shift arguments and return address one slot down on the stack
1396 // (overwriting the original receiver). Adjust argument count to make
1397 // the original first argument the new receiver.
1398 {
1399 Label loop;
1400 __ mov(ecx, eax);
1401 __ bind(&loop);
1402 __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
1403 __ mov(Operand(esp, ecx, times_pointer_size, kPointerSize), ebx);
1404 __ dec(ecx);
1405 __ j(not_sign, &loop); // While non-negative (to copy return address).
1406 __ pop(ebx); // Discard copy of return address.
1407 __ dec(eax); // One fewer argument (first argument is new receiver).
1408 }
1409
1410 // 4. Call the callable.
1411 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1412 }
1413
1414
1415 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1416 // ----------- S t a t e -------------
1417 // -- eax : argc
1418 // -- esp[0] : return address
1419 // -- esp[4] : argumentsList
1420 // -- esp[8] : thisArgument
1421 // -- esp[12] : target
1422 // -- esp[16] : receiver
1423 // -----------------------------------
1424
1425 // 1. Load target into edi (if present), argumentsList into eax (if present),
1426 // remove all arguments from the stack (including the receiver), and push
1427 // thisArgument (if present) instead.
1428 {
1429 Label done;
1430 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
1431 __ mov(edx, edi);
1432 __ mov(ebx, edi);
1433 __ cmp(eax, Immediate(1));
1434 __ j(below, &done, Label::kNear);
1435 __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1436 __ j(equal, &done, Label::kNear);
1437 __ mov(edx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1438 __ cmp(eax, Immediate(3));
1439 __ j(below, &done, Label::kNear);
1440 __ mov(ebx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1441 __ bind(&done);
1442 __ PopReturnAddressTo(ecx);
1443 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1444 __ Push(edx);
1445 __ PushReturnAddressFrom(ecx);
1446 __ Move(eax, ebx);
1447 }
1448
1449 // ----------- S t a t e -------------
1450 // -- eax : argumentsList
1451 // -- edi : target
1452 // -- esp[0] : return address
1453 // -- esp[4] : thisArgument
1454 // -----------------------------------
1455
1456 // 2. Make sure the target is actually callable.
1457 Label target_not_callable;
1458 __ JumpIfSmi(edi, &target_not_callable, Label::kNear);
1459 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1460 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1461 Immediate(1 << Map::kIsCallable));
1462 __ j(zero, &target_not_callable, Label::kNear);
1463
1464 // 3a. Apply the target to the given argumentsList (passing undefined for
1465 // new.target).
1466 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1467 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1468
1469 // 3b. The target is not callable, throw an appropriate TypeError.
1470 __ bind(&target_not_callable);
1471 {
1472 __ mov(Operand(esp, kPointerSize), edi);
1473 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1474 }
1475 }
1476
1477 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1478 // ----------- S t a t e -------------
1479 // -- eax : argc
1480 // -- esp[0] : return address
1481 // -- esp[4] : new.target (optional)
1482 // -- esp[8] : argumentsList
1483 // -- esp[12] : target
1484 // -- esp[16] : receiver
1485 // -----------------------------------
1486
1487 // 1. Load target into edi (if present), argumentsList into eax (if present),
1488 // new.target into edx (if present, otherwise use target), remove all
1489 // arguments from the stack (including the receiver), and push thisArgument
1490 // (if present) instead.
1491 {
1492 Label done;
1493 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
1494 __ mov(edx, edi);
1495 __ mov(ebx, edi);
1496 __ cmp(eax, Immediate(1));
1497 __ j(below, &done, Label::kNear);
1498 __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1499 __ mov(edx, edi);
1500 __ j(equal, &done, Label::kNear);
1501 __ mov(ebx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1502 __ cmp(eax, Immediate(3));
1503 __ j(below, &done, Label::kNear);
1504 __ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1505 __ bind(&done);
1506 __ PopReturnAddressTo(ecx);
1507 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1508 __ PushRoot(Heap::kUndefinedValueRootIndex);
1509 __ PushReturnAddressFrom(ecx);
1510 __ Move(eax, ebx);
1511 }
1512
1513 // ----------- S t a t e -------------
1514 // -- eax : argumentsList
1515 // -- edx : new.target
1516 // -- edi : target
1517 // -- esp[0] : return address
1518 // -- esp[4] : receiver (undefined)
1519 // -----------------------------------
1520
1521 // 2. Make sure the target is actually a constructor.
1522 Label target_not_constructor;
1523 __ JumpIfSmi(edi, &target_not_constructor, Label::kNear);
1524 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1525 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1526 Immediate(1 << Map::kIsConstructor));
1527 __ j(zero, &target_not_constructor, Label::kNear);
1528
1529 // 3. Make sure the target is actually a constructor.
1530 Label new_target_not_constructor;
1531 __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
1532 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
1533 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1534 Immediate(1 << Map::kIsConstructor));
1535 __ j(zero, &new_target_not_constructor, Label::kNear);
1536
1537 // 4a. Construct the target with the given new.target and argumentsList.
1538 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1539
1540 // 4b. The target is not a constructor, throw an appropriate TypeError.
1541 __ bind(&target_not_constructor);
1542 {
1543 __ mov(Operand(esp, kPointerSize), edi);
1544 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1545 }
1546
1547 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1548 __ bind(&new_target_not_constructor);
1549 {
1550 __ mov(Operand(esp, kPointerSize), edx);
1551 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1552 }
1553 }
1554
1555
1556 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1557 // ----------- S t a t e -------------
1558 // -- eax : argc
1559 // -- esp[0] : return address
1560 // -- esp[4] : last argument
1561 // -----------------------------------
1562 Label generic_array_code;
1563
1564 // Get the InternalArray function.
1565 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, edi);
1566
1567 if (FLAG_debug_code) {
1568 // Initial map for the builtin InternalArray function should be a map.
1569 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1570 // Will both indicate a NULL and a Smi.
1571 __ test(ebx, Immediate(kSmiTagMask));
1572 __ Assert(not_zero, kUnexpectedInitialMapForInternalArrayFunction);
1573 __ CmpObjectType(ebx, MAP_TYPE, ecx);
1574 __ Assert(equal, kUnexpectedInitialMapForInternalArrayFunction);
1575 }
1576
1577 // Run the native code for the InternalArray function called as a normal
1578 // function.
1579 // tail call a stub
1580 InternalArrayConstructorStub stub(masm->isolate());
1581 __ TailCallStub(&stub);
1582 }
1583
1584
1585 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1586 // ----------- S t a t e -------------
1587 // -- eax : argc
1588 // -- esp[0] : return address
1589 // -- esp[4] : last argument
1590 // -----------------------------------
1591 Label generic_array_code;
1592
1593 // Get the Array function.
1594 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, edi);
1595 __ mov(edx, edi);
1596
1597 if (FLAG_debug_code) {
1598 // Initial map for the builtin Array function should be a map.
1599 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1600 // Will both indicate a NULL and a Smi.
1601 __ test(ebx, Immediate(kSmiTagMask));
1602 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
1603 __ CmpObjectType(ebx, MAP_TYPE, ecx);
1604 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
1605 }
1606
1607 // Run the native code for the Array function called as a normal function.
1608 // tail call a stub
1609 __ mov(ebx, masm->isolate()->factory()->undefined_value());
1610 ArrayConstructorStub stub(masm->isolate());
1611 __ TailCallStub(&stub);
1612 }
1613
1614
1615 // static
1616 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
1617 // ----------- S t a t e -------------
1618 // -- eax : number of arguments
1619 // -- edi : function
1620 // -- esi : context
1621 // -- esp[0] : return address
1622 // -- esp[(argc - n) * 8] : arg[n] (zero-based)
1623 // -- esp[(argc + 1) * 8] : receiver
1624 // -----------------------------------
1625 Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above;
1626 Heap::RootListIndex const root_index =
1627 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
1628 : Heap::kMinusInfinityValueRootIndex;
1629 XMMRegister const reg = (kind == MathMaxMinKind::kMin) ? xmm1 : xmm0;
1630
1631 // Load the accumulator with the default return value (either -Infinity or
1632 // +Infinity), with the tagged value in edx and the double value in xmm0.
1633 __ LoadRoot(edx, root_index);
1634 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
1635 __ Move(ecx, eax);
1636
1637 Label done_loop, loop;
1638 __ bind(&loop);
1639 {
1640 // Check if all parameters done.
1641 __ test(ecx, ecx);
1642 __ j(zero, &done_loop);
1643
1644 // Load the next parameter tagged value into ebx.
1645 __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
1646
1647 // Load the double value of the parameter into xmm1, maybe converting the
1648 // parameter to a number first using the ToNumber builtin if necessary.
1649 Label convert, convert_smi, convert_number, done_convert;
1650 __ bind(&convert);
1651 __ JumpIfSmi(ebx, &convert_smi);
1652 __ JumpIfRoot(FieldOperand(ebx, HeapObject::kMapOffset),
1653 Heap::kHeapNumberMapRootIndex, &convert_number);
1654 {
1655 // Parameter is not a Number, use the ToNumber builtin to convert it.
1656 FrameScope scope(masm, StackFrame::MANUAL);
1657 __ SmiTag(eax);
1658 __ SmiTag(ecx);
1659 __ EnterBuiltinFrame(esi, edi, eax);
1660 __ Push(ecx);
1661 __ Push(edx);
1662 __ mov(eax, ebx);
1663 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1664 __ mov(ebx, eax);
1665 __ Pop(edx);
1666 __ Pop(ecx);
1667 __ LeaveBuiltinFrame(esi, edi, eax);
1668 __ SmiUntag(ecx);
1669 __ SmiUntag(eax);
1670 {
1671 // Restore the double accumulator value (xmm0).
1672 Label restore_smi, done_restore;
1673 __ JumpIfSmi(edx, &restore_smi, Label::kNear);
1674 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
1675 __ jmp(&done_restore, Label::kNear);
1676 __ bind(&restore_smi);
1677 __ SmiUntag(edx);
1678 __ Cvtsi2sd(xmm0, edx);
1679 __ SmiTag(edx);
1680 __ bind(&done_restore);
1681 }
1682 }
1683 __ jmp(&convert);
1684 __ bind(&convert_number);
1685 __ movsd(xmm1, FieldOperand(ebx, HeapNumber::kValueOffset));
1686 __ jmp(&done_convert, Label::kNear);
1687 __ bind(&convert_smi);
1688 __ SmiUntag(ebx);
1689 __ Cvtsi2sd(xmm1, ebx);
1690 __ SmiTag(ebx);
1691 __ bind(&done_convert);
1692
1693 // Perform the actual comparison with the accumulator value on the left hand
1694 // side (xmm0) and the next parameter value on the right hand side (xmm1).
1695 Label compare_equal, compare_nan, compare_swap, done_compare;
1696 __ ucomisd(xmm0, xmm1);
1697 __ j(parity_even, &compare_nan, Label::kNear);
1698 __ j(cc, &done_compare, Label::kNear);
1699 __ j(equal, &compare_equal, Label::kNear);
1700
1701 // Result is on the right hand side.
1702 __ bind(&compare_swap);
1703 __ movaps(xmm0, xmm1);
1704 __ mov(edx, ebx);
1705 __ jmp(&done_compare, Label::kNear);
1706
1707 // At least one side is NaN, which means that the result will be NaN too.
1708 __ bind(&compare_nan);
1709 __ LoadRoot(edx, Heap::kNanValueRootIndex);
1710 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
1711 __ jmp(&done_compare, Label::kNear);
1712
1713 // Left and right hand side are equal, check for -0 vs. +0.
1714 __ bind(&compare_equal);
1715 __ Push(edi); // Preserve function in edi.
1716 __ movmskpd(edi, reg);
1717 __ test(edi, Immediate(1));
1718 __ Pop(edi);
1719 __ j(not_zero, &compare_swap);
1720
1721 __ bind(&done_compare);
1722 __ dec(ecx);
1723 __ jmp(&loop);
1724 }
1725
1726 __ bind(&done_loop);
1727 __ PopReturnAddressTo(ecx);
1728 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1729 __ PushReturnAddressFrom(ecx);
1730 __ mov(eax, edx);
1731 __ Ret();
1732 }
1733
1734 // static
1735 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
1736 // ----------- S t a t e -------------
1737 // -- eax : number of arguments
1738 // -- edi : constructor function
1739 // -- esi : context
1740 // -- esp[0] : return address
1741 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1742 // -- esp[(argc + 1) * 4] : receiver
1743 // -----------------------------------
1744
1745 // 1. Load the first argument into ebx.
1746 Label no_arguments;
1747 {
1748 __ test(eax, eax);
1749 __ j(zero, &no_arguments, Label::kNear);
1750 __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1751 }
1752
1753 // 2a. Convert the first argument to a number.
1754 {
1755 FrameScope scope(masm, StackFrame::MANUAL);
1756 __ SmiTag(eax);
1757 __ EnterBuiltinFrame(esi, edi, eax);
1758 __ mov(eax, ebx);
1759 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1760 __ LeaveBuiltinFrame(esi, edi, ebx); // Argc popped to ebx.
1761 __ SmiUntag(ebx);
1762 }
1763
1764 {
1765 // Drop all arguments including the receiver.
1766 __ PopReturnAddressTo(ecx);
1767 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
1768 __ PushReturnAddressFrom(ecx);
1769 __ Ret();
1770 }
1771
1772 // 2b. No arguments, return +0 (already in eax).
1773 __ bind(&no_arguments);
1774 __ ret(1 * kPointerSize);
1775 }
1776
1777
1778 // static
1779 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
1780 // ----------- S t a t e -------------
1781 // -- eax : number of arguments
1782 // -- edi : constructor function
1783 // -- edx : new target
1784 // -- esi : context
1785 // -- esp[0] : return address
1786 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1787 // -- esp[(argc + 1) * 4] : receiver
1788 // -----------------------------------
1789
1790 // 1. Make sure we operate in the context of the called function.
1791 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1792
1793 // Store argc in r8.
1794 __ mov(ecx, eax);
1795 __ SmiTag(ecx);
1796
1797 // 2. Load the first argument into ebx.
1798 {
1799 Label no_arguments, done;
1800 __ test(eax, eax);
1801 __ j(zero, &no_arguments, Label::kNear);
1802 __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1803 __ jmp(&done, Label::kNear);
1804 __ bind(&no_arguments);
1805 __ Move(ebx, Smi::FromInt(0));
1806 __ bind(&done);
1807 }
1808
1809 // 3. Make sure ebx is a number.
1810 {
1811 Label done_convert;
1812 __ JumpIfSmi(ebx, &done_convert);
1813 __ CompareRoot(FieldOperand(ebx, HeapObject::kMapOffset),
1814 Heap::kHeapNumberMapRootIndex);
1815 __ j(equal, &done_convert);
1816 {
1817 FrameScope scope(masm, StackFrame::MANUAL);
1818 __ EnterBuiltinFrame(esi, edi, ecx);
1819 __ Push(edx);
1820 __ Move(eax, ebx);
1821 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1822 __ Move(ebx, eax);
1823 __ Pop(edx);
1824 __ LeaveBuiltinFrame(esi, edi, ecx);
1825 }
1826 __ bind(&done_convert);
1827 }
1828
1829 // 4. Check if new target and constructor differ.
1830 Label drop_frame_and_ret, done_alloc, new_object;
1831 __ cmp(edx, edi);
1832 __ j(not_equal, &new_object);
1833
1834 // 5. Allocate a JSValue wrapper for the number.
1835 __ AllocateJSValue(eax, edi, ebx, esi, &done_alloc);
1836 __ jmp(&drop_frame_and_ret);
1837
1838 __ bind(&done_alloc);
1839 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); // Restore esi.
1840
1841 // 6. Fallback to the runtime to create new object.
1842 __ bind(&new_object);
1843 {
1844 FrameScope scope(masm, StackFrame::MANUAL);
1845 __ EnterBuiltinFrame(esi, edi, ecx);
1846 __ Push(ebx); // the first argument
1847 FastNewObjectStub stub(masm->isolate());
1848 __ CallStub(&stub);
1849 __ Pop(FieldOperand(eax, JSValue::kValueOffset));
1850 __ LeaveBuiltinFrame(esi, edi, ecx);
1851 }
1852
1853 __ bind(&drop_frame_and_ret);
1854 {
1855 // Drop all arguments including the receiver.
1856 __ PopReturnAddressTo(esi);
1857 __ SmiUntag(ecx);
1858 __ lea(esp, Operand(esp, ecx, times_pointer_size, kPointerSize));
1859 __ PushReturnAddressFrom(esi);
1860 __ Ret();
1861 }
1862 }
1863
1864
1865 // static
1866 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
1867 // ----------- S t a t e -------------
1868 // -- eax : number of arguments
1869 // -- edi : constructor function
1870 // -- esi : context
1871 // -- esp[0] : return address
1872 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1873 // -- esp[(argc + 1) * 4] : receiver
1874 // -----------------------------------
1875
1876 // 1. Load the first argument into eax.
1877 Label no_arguments;
1878 {
1879 __ mov(ebx, eax); // Store argc in ebx.
1880 __ test(eax, eax);
1881 __ j(zero, &no_arguments, Label::kNear);
1882 __ mov(eax, Operand(esp, eax, times_pointer_size, 0));
1883 }
1884
1885 // 2a. At least one argument, return eax if it's a string, otherwise
1886 // dispatch to appropriate conversion.
1887 Label drop_frame_and_ret, to_string, symbol_descriptive_string;
1888 {
1889 __ JumpIfSmi(eax, &to_string, Label::kNear);
1890 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
1891 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
1892 __ j(above, &to_string, Label::kNear);
1893 __ j(equal, &symbol_descriptive_string, Label::kNear);
1894 __ jmp(&drop_frame_and_ret, Label::kNear);
1895 }
1896
1897 // 2b. No arguments, return the empty string (and pop the receiver).
1898 __ bind(&no_arguments);
1899 {
1900 __ LoadRoot(eax, Heap::kempty_stringRootIndex);
1901 __ ret(1 * kPointerSize);
1902 }
1903
1904 // 3a. Convert eax to a string.
1905 __ bind(&to_string);
1906 {
1907 FrameScope scope(masm, StackFrame::MANUAL);
1908 ToStringStub stub(masm->isolate());
1909 __ SmiTag(ebx);
1910 __ EnterBuiltinFrame(esi, edi, ebx);
1911 __ CallStub(&stub);
1912 __ LeaveBuiltinFrame(esi, edi, ebx);
1913 __ SmiUntag(ebx);
1914 }
1915 __ jmp(&drop_frame_and_ret, Label::kNear);
1916
1917 // 3b. Convert symbol in eax to a string.
1918 __ bind(&symbol_descriptive_string);
1919 {
1920 __ PopReturnAddressTo(ecx);
1921 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
1922 __ Push(eax);
1923 __ PushReturnAddressFrom(ecx);
1924 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
1925 }
1926
1927 __ bind(&drop_frame_and_ret);
1928 {
1929 // Drop all arguments including the receiver.
1930 __ PopReturnAddressTo(ecx);
1931 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
1932 __ PushReturnAddressFrom(ecx);
1933 __ Ret();
1934 }
1935 }
1936
1937
1938 // static
1939 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
1940 // ----------- S t a t e -------------
1941 // -- eax : number of arguments
1942 // -- edi : constructor function
1943 // -- edx : new target
1944 // -- esi : context
1945 // -- esp[0] : return address
1946 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1947 // -- esp[(argc + 1) * 4] : receiver
1948 // -----------------------------------
1949
1950 // 1. Make sure we operate in the context of the called function.
1951 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1952
1953 __ mov(ebx, eax);
1954
1955 // 2. Load the first argument into eax.
1956 {
1957 Label no_arguments, done;
1958 __ test(ebx, ebx);
1959 __ j(zero, &no_arguments, Label::kNear);
1960 __ mov(eax, Operand(esp, ebx, times_pointer_size, 0));
1961 __ jmp(&done, Label::kNear);
1962 __ bind(&no_arguments);
1963 __ LoadRoot(eax, Heap::kempty_stringRootIndex);
1964 __ bind(&done);
1965 }
1966
1967 // 3. Make sure eax is a string.
1968 {
1969 Label convert, done_convert;
1970 __ JumpIfSmi(eax, &convert, Label::kNear);
1971 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ecx);
1972 __ j(below, &done_convert);
1973 __ bind(&convert);
1974 {
1975 FrameScope scope(masm, StackFrame::MANUAL);
1976 ToStringStub stub(masm->isolate());
1977 __ SmiTag(ebx);
1978 __ EnterBuiltinFrame(esi, edi, ebx);
1979 __ Push(edx);
1980 __ CallStub(&stub);
1981 __ Pop(edx);
1982 __ LeaveBuiltinFrame(esi, edi, ebx);
1983 __ SmiUntag(ebx);
1984 }
1985 __ bind(&done_convert);
1986 }
1987
1988 // 4. Check if new target and constructor differ.
1989 Label drop_frame_and_ret, done_alloc, new_object;
1990 __ cmp(edx, edi);
1991 __ j(not_equal, &new_object);
1992
1993 // 5. Allocate a JSValue wrapper for the string.
1994 // AllocateJSValue can't handle src == dst register. Reuse esi and restore it
1995 // as needed after the call.
1996 __ mov(esi, eax);
1997 __ AllocateJSValue(eax, edi, esi, ecx, &done_alloc);
1998 __ jmp(&drop_frame_and_ret);
1999
2000 __ bind(&done_alloc);
2001 {
2002 // Restore eax to the first argument and esi to the context.
2003 __ mov(eax, esi);
2004 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2005 }
2006
2007 // 6. Fallback to the runtime to create new object.
2008 __ bind(&new_object);
2009 {
2010 FrameScope scope(masm, StackFrame::MANUAL);
2011 __ SmiTag(ebx);
2012 __ EnterBuiltinFrame(esi, edi, ebx);
2013 __ Push(eax); // the first argument
2014 FastNewObjectStub stub(masm->isolate());
2015 __ CallStub(&stub);
2016 __ Pop(FieldOperand(eax, JSValue::kValueOffset));
2017 __ LeaveBuiltinFrame(esi, edi, ebx);
2018 __ SmiUntag(ebx);
2019 }
2020
2021 __ bind(&drop_frame_and_ret);
2022 {
2023 // Drop all arguments including the receiver.
2024 __ PopReturnAddressTo(ecx);
2025 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
2026 __ PushReturnAddressFrom(ecx);
2027 __ Ret();
2028 }
2029 }
2030
2031
2032 static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
2033 Label* stack_overflow) {
2034 // ----------- S t a t e -------------
2035 // -- eax : actual number of arguments
2036 // -- ebx : expected number of arguments
2037 // -- edx : new target (passed through to callee)
2038 // -----------------------------------
2039 // Check the stack for overflow. We are not trying to catch
2040 // interruptions (e.g. debug break and preemption) here, so the "real stack
2041 // limit" is checked.
2042 ExternalReference real_stack_limit =
2043 ExternalReference::address_of_real_stack_limit(masm->isolate());
2044 __ mov(edi, Operand::StaticVariable(real_stack_limit));
2045 // Make ecx the space we have left. The stack might already be overflowed
2046 // here which will cause ecx to become negative.
2047 __ mov(ecx, esp);
2048 __ sub(ecx, edi);
2049 // Make edi the space we need for the array when it is unrolled onto the
2050 // stack.
2051 __ mov(edi, ebx);
2052 __ shl(edi, kPointerSizeLog2);
2053 // Check if the arguments will overflow the stack.
2054 __ cmp(ecx, edi);
2055 __ j(less_equal, stack_overflow); // Signed comparison.
2056 }
2057
2058
2059 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2060 __ push(ebp);
2061 __ mov(ebp, esp);
2062
2063 // Store the arguments adaptor context sentinel.
2064 __ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2065
2066 // Push the function on the stack.
2067 __ push(edi);
2068
2069 // Preserve the number of arguments on the stack. Must preserve eax,
2070 // ebx and ecx because these registers are used when copying the
2071 // arguments and the receiver.
2072 STATIC_ASSERT(kSmiTagSize == 1);
2073 __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
2074 __ push(edi);
2075 }
2076
2077
2078 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2079 // Retrieve the number of arguments from the stack.
2080 __ mov(ebx, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2081
2082 // Leave the frame.
2083 __ leave();
2084
2085 // Remove caller arguments from the stack.
2086 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
2087 __ pop(ecx);
2088 __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
2089 __ push(ecx);
2090 }
2091
2092
2093 // static
2094 void Builtins::Generate_Apply(MacroAssembler* masm) {
2095 // ----------- S t a t e -------------
2096 // -- eax : argumentsList
2097 // -- edi : target
2098 // -- edx : new.target (checked to be constructor or undefined)
2099 // -- esp[0] : return address.
2100 // -- esp[4] : thisArgument
2101 // -----------------------------------
2102
2103 // Create the list of arguments from the array-like argumentsList.
2104 {
2105 Label create_arguments, create_array, create_runtime, done_create;
2106 __ JumpIfSmi(eax, &create_runtime);
2107
2108 // Load the map of argumentsList into ecx.
2109 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
2110
2111 // Load native context into ebx.
2112 __ mov(ebx, NativeContextOperand());
2113
2114 // Check if argumentsList is an (unmodified) arguments object.
2115 __ cmp(ecx, ContextOperand(ebx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2116 __ j(equal, &create_arguments);
2117 __ cmp(ecx, ContextOperand(ebx, Context::STRICT_ARGUMENTS_MAP_INDEX));
2118 __ j(equal, &create_arguments);
2119
2120 // Check if argumentsList is a fast JSArray.
2121 __ CmpInstanceType(ecx, JS_ARRAY_TYPE);
2122 __ j(equal, &create_array);
2123
2124 // Ask the runtime to create the list (actually a FixedArray).
2125 __ bind(&create_runtime);
2126 {
2127 FrameScope scope(masm, StackFrame::INTERNAL);
2128 __ Push(edi);
2129 __ Push(edx);
2130 __ Push(eax);
2131 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2132 __ Pop(edx);
2133 __ Pop(edi);
2134 __ mov(ebx, FieldOperand(eax, FixedArray::kLengthOffset));
2135 __ SmiUntag(ebx);
2136 }
2137 __ jmp(&done_create);
2138
2139 // Try to create the list from an arguments object.
2140 __ bind(&create_arguments);
2141 __ mov(ebx, FieldOperand(eax, JSArgumentsObject::kLengthOffset));
2142 __ mov(ecx, FieldOperand(eax, JSObject::kElementsOffset));
2143 __ cmp(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2144 __ j(not_equal, &create_runtime);
2145 __ SmiUntag(ebx);
2146 __ mov(eax, ecx);
2147 __ jmp(&done_create);
2148
2149 // Try to create the list from a JSArray object.
2150 __ bind(&create_array);
2151 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
2152 __ DecodeField<Map::ElementsKindBits>(ecx);
2153 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2154 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2155 STATIC_ASSERT(FAST_ELEMENTS == 2);
2156 __ cmp(ecx, Immediate(FAST_ELEMENTS));
2157 __ j(above, &create_runtime);
2158 __ cmp(ecx, Immediate(FAST_HOLEY_SMI_ELEMENTS));
2159 __ j(equal, &create_runtime);
2160 __ mov(ebx, FieldOperand(eax, JSArray::kLengthOffset));
2161 __ SmiUntag(ebx);
2162 __ mov(eax, FieldOperand(eax, JSArray::kElementsOffset));
2163
2164 __ bind(&done_create);
2165 }
2166
2167 // Check for stack overflow.
2168 {
2169 // Check the stack for overflow. We are not trying to catch interruptions
2170 // (i.e. debug break and preemption) here, so check the "real stack limit".
2171 Label done;
2172 ExternalReference real_stack_limit =
2173 ExternalReference::address_of_real_stack_limit(masm->isolate());
2174 __ mov(ecx, Operand::StaticVariable(real_stack_limit));
2175 // Make ecx the space we have left. The stack might already be overflowed
2176 // here which will cause ecx to become negative.
2177 __ neg(ecx);
2178 __ add(ecx, esp);
2179 __ sar(ecx, kPointerSizeLog2);
2180 // Check if the arguments will overflow the stack.
2181 __ cmp(ecx, ebx);
2182 __ j(greater, &done, Label::kNear); // Signed comparison.
2183 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2184 __ bind(&done);
2185 }
2186
2187 // ----------- S t a t e -------------
2188 // -- edi : target
2189 // -- eax : args (a FixedArray built from argumentsList)
2190 // -- ebx : len (number of elements to push from args)
2191 // -- edx : new.target (checked to be constructor or undefined)
2192 // -- esp[0] : return address.
2193 // -- esp[4] : thisArgument
2194 // -----------------------------------
2195
2196 // Push arguments onto the stack (thisArgument is already on the stack).
2197 {
2198 __ movd(xmm0, edx);
2199 __ PopReturnAddressTo(edx);
2200 __ Move(ecx, Immediate(0));
2201 Label done, loop;
2202 __ bind(&loop);
2203 __ cmp(ecx, ebx);
2204 __ j(equal, &done, Label::kNear);
2205 __ Push(
2206 FieldOperand(eax, ecx, times_pointer_size, FixedArray::kHeaderSize));
2207 __ inc(ecx);
2208 __ jmp(&loop);
2209 __ bind(&done);
2210 __ PushReturnAddressFrom(edx);
2211 __ movd(edx, xmm0);
2212 __ Move(eax, ebx);
2213 }
2214
2215 // Dispatch to Call or Construct depending on whether new.target is undefined.
2216 {
2217 __ CompareRoot(edx, Heap::kUndefinedValueRootIndex);
2218 __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2219 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2220 }
2221 }
2222
2223 namespace {
2224
2225 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2226 // present) preserving all the arguments prepared for current call.
2227 // Does nothing if debugger is currently active.
2228 // ES6 14.6.3. PrepareForTailCall
2229 //
2230 // Stack structure for the function g() tail calling f():
2231 //
2232 // ------- Caller frame: -------
2233 // | ...
2234 // | g()'s arg M
2235 // | ...
2236 // | g()'s arg 1
2237 // | g()'s receiver arg
2238 // | g()'s caller pc
2239 // ------- g()'s frame: -------
2240 // | g()'s caller fp <- fp
2241 // | g()'s context
2242 // | function pointer: g
2243 // | -------------------------
2244 // | ...
2245 // | ...
2246 // | f()'s arg N
2247 // | ...
2248 // | f()'s arg 1
2249 // | f()'s receiver arg
2250 // | f()'s caller pc <- sp
2251 // ----------------------
2252 //
2253 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2254 Register scratch1, Register scratch2,
2255 Register scratch3) {
2256 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2257 Comment cmnt(masm, "[ PrepareForTailCall");
2258
2259 // Prepare for tail call only if ES2015 tail call elimination is enabled.
2260 Label done;
2261 ExternalReference is_tail_call_elimination_enabled =
2262 ExternalReference::is_tail_call_elimination_enabled_address(
2263 masm->isolate());
2264 __ movzx_b(scratch1,
2265 Operand::StaticVariable(is_tail_call_elimination_enabled));
2266 __ cmp(scratch1, Immediate(0));
2267 __ j(equal, &done, Label::kNear);
2268
2269 // Drop possible interpreter handler/stub frame.
2270 {
2271 Label no_interpreter_frame;
2272 __ cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
2273 Immediate(Smi::FromInt(StackFrame::STUB)));
2274 __ j(not_equal, &no_interpreter_frame, Label::kNear);
2275 __ mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2276 __ bind(&no_interpreter_frame);
2277 }
2278
2279 // Check if next frame is an arguments adaptor frame.
2280 Register caller_args_count_reg = scratch1;
2281 Label no_arguments_adaptor, formal_parameter_count_loaded;
2282 __ mov(scratch2, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2283 __ cmp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset),
2284 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2285 __ j(not_equal, &no_arguments_adaptor, Label::kNear);
2286
2287 // Drop current frame and load arguments count from arguments adaptor frame.
2288 __ mov(ebp, scratch2);
2289 __ mov(caller_args_count_reg,
2290 Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2291 __ SmiUntag(caller_args_count_reg);
2292 __ jmp(&formal_parameter_count_loaded, Label::kNear);
2293
2294 __ bind(&no_arguments_adaptor);
2295 // Load caller's formal parameter count
2296 __ mov(scratch1, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2297 __ mov(scratch1,
2298 FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2299 __ mov(
2300 caller_args_count_reg,
2301 FieldOperand(scratch1, SharedFunctionInfo::kFormalParameterCountOffset));
2302 __ SmiUntag(caller_args_count_reg);
2303
2304 __ bind(&formal_parameter_count_loaded);
2305
2306 ParameterCount callee_args_count(args_reg);
2307 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2308 scratch3, ReturnAddressState::kOnStack, 0);
2309 __ bind(&done);
2310 }
2311 } // namespace
2312
2313 // static
2314 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2315 ConvertReceiverMode mode,
2316 TailCallMode tail_call_mode) {
2317 // ----------- S t a t e -------------
2318 // -- eax : the number of arguments (not including the receiver)
2319 // -- edi : the function to call (checked to be a JSFunction)
2320 // -----------------------------------
2321 __ AssertFunction(edi);
2322
2323 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2324 // Check that the function is not a "classConstructor".
2325 Label class_constructor;
2326 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2327 __ test_b(FieldOperand(edx, SharedFunctionInfo::kFunctionKindByteOffset),
2328 Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2329 __ j(not_zero, &class_constructor);
2330
2331 // Enter the context of the function; ToObject has to run in the function
2332 // context, and we also need to take the global proxy from the function
2333 // context in case of conversion.
2334 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2335 SharedFunctionInfo::kStrictModeByteOffset);
2336 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2337 // We need to convert the receiver for non-native sloppy mode functions.
2338 Label done_convert;
2339 __ test_b(FieldOperand(edx, SharedFunctionInfo::kNativeByteOffset),
2340 Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2341 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2342 __ j(not_zero, &done_convert);
2343 {
2344 // ----------- S t a t e -------------
2345 // -- eax : the number of arguments (not including the receiver)
2346 // -- edx : the shared function info.
2347 // -- edi : the function to call (checked to be a JSFunction)
2348 // -- esi : the function context.
2349 // -----------------------------------
2350
2351 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2352 // Patch receiver to global proxy.
2353 __ LoadGlobalProxy(ecx);
2354 } else {
2355 Label convert_to_object, convert_receiver;
2356 __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
2357 __ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
2358 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2359 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ebx);
2360 __ j(above_equal, &done_convert);
2361 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2362 Label convert_global_proxy;
2363 __ JumpIfRoot(ecx, Heap::kUndefinedValueRootIndex,
2364 &convert_global_proxy, Label::kNear);
2365 __ JumpIfNotRoot(ecx, Heap::kNullValueRootIndex, &convert_to_object,
2366 Label::kNear);
2367 __ bind(&convert_global_proxy);
2368 {
2369 // Patch receiver to global proxy.
2370 __ LoadGlobalProxy(ecx);
2371 }
2372 __ jmp(&convert_receiver);
2373 }
2374 __ bind(&convert_to_object);
2375 {
2376 // Convert receiver using ToObject.
2377 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2378 // in the fast case? (fall back to AllocateInNewSpace?)
2379 FrameScope scope(masm, StackFrame::INTERNAL);
2380 __ SmiTag(eax);
2381 __ Push(eax);
2382 __ Push(edi);
2383 __ mov(eax, ecx);
2384 ToObjectStub stub(masm->isolate());
2385 __ CallStub(&stub);
2386 __ mov(ecx, eax);
2387 __ Pop(edi);
2388 __ Pop(eax);
2389 __ SmiUntag(eax);
2390 }
2391 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2392 __ bind(&convert_receiver);
2393 }
2394 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx);
2395 }
2396 __ bind(&done_convert);
2397
2398 // ----------- S t a t e -------------
2399 // -- eax : the number of arguments (not including the receiver)
2400 // -- edx : the shared function info.
2401 // -- edi : the function to call (checked to be a JSFunction)
2402 // -- esi : the function context.
2403 // -----------------------------------
2404
2405 if (tail_call_mode == TailCallMode::kAllow) {
2406 PrepareForTailCall(masm, eax, ebx, ecx, edx);
2407 // Reload shared function info.
2408 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2409 }
2410
2411 __ mov(ebx,
2412 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2413 __ SmiUntag(ebx);
2414 ParameterCount actual(eax);
2415 ParameterCount expected(ebx);
2416 __ InvokeFunctionCode(edi, no_reg, expected, actual, JUMP_FUNCTION,
2417 CheckDebugStepCallWrapper());
2418 // The function is a "classConstructor", need to raise an exception.
2419 __ bind(&class_constructor);
2420 {
2421 FrameScope frame(masm, StackFrame::INTERNAL);
2422 __ push(edi);
2423 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2424 }
2425 }
2426
2427
2428 namespace {
2429
2430 void Generate_PushBoundArguments(MacroAssembler* masm) {
2431 // ----------- S t a t e -------------
2432 // -- eax : the number of arguments (not including the receiver)
2433 // -- edx : new.target (only in case of [[Construct]])
2434 // -- edi : target (checked to be a JSBoundFunction)
2435 // -----------------------------------
2436
2437 // Load [[BoundArguments]] into ecx and length of that into ebx.
2438 Label no_bound_arguments;
2439 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2440 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2441 __ SmiUntag(ebx);
2442 __ test(ebx, ebx);
2443 __ j(zero, &no_bound_arguments);
2444 {
2445 // ----------- S t a t e -------------
2446 // -- eax : the number of arguments (not including the receiver)
2447 // -- edx : new.target (only in case of [[Construct]])
2448 // -- edi : target (checked to be a JSBoundFunction)
2449 // -- ecx : the [[BoundArguments]] (implemented as FixedArray)
2450 // -- ebx : the number of [[BoundArguments]]
2451 // -----------------------------------
2452
2453 // Reserve stack space for the [[BoundArguments]].
2454 {
2455 Label done;
2456 __ lea(ecx, Operand(ebx, times_pointer_size, 0));
2457 __ sub(esp, ecx);
2458 // Check the stack for overflow. We are not trying to catch interruptions
2459 // (i.e. debug break and preemption) here, so check the "real stack
2460 // limit".
2461 __ CompareRoot(esp, ecx, Heap::kRealStackLimitRootIndex);
2462 __ j(greater, &done, Label::kNear); // Signed comparison.
2463 // Restore the stack pointer.
2464 __ lea(esp, Operand(esp, ebx, times_pointer_size, 0));
2465 {
2466 FrameScope scope(masm, StackFrame::MANUAL);
2467 __ EnterFrame(StackFrame::INTERNAL);
2468 __ CallRuntime(Runtime::kThrowStackOverflow);
2469 }
2470 __ bind(&done);
2471 }
2472
2473 // Adjust effective number of arguments to include return address.
2474 __ inc(eax);
2475
2476 // Relocate arguments and return address down the stack.
2477 {
2478 Label loop;
2479 __ Set(ecx, 0);
2480 __ lea(ebx, Operand(esp, ebx, times_pointer_size, 0));
2481 __ bind(&loop);
2482 __ movd(xmm0, Operand(ebx, ecx, times_pointer_size, 0));
2483 __ movd(Operand(esp, ecx, times_pointer_size, 0), xmm0);
2484 __ inc(ecx);
2485 __ cmp(ecx, eax);
2486 __ j(less, &loop);
2487 }
2488
2489 // Copy [[BoundArguments]] to the stack (below the arguments).
2490 {
2491 Label loop;
2492 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2493 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2494 __ SmiUntag(ebx);
2495 __ bind(&loop);
2496 __ dec(ebx);
2497 __ movd(xmm0, FieldOperand(ecx, ebx, times_pointer_size,
2498 FixedArray::kHeaderSize));
2499 __ movd(Operand(esp, eax, times_pointer_size, 0), xmm0);
2500 __ lea(eax, Operand(eax, 1));
2501 __ j(greater, &loop);
2502 }
2503
2504 // Adjust effective number of arguments (eax contains the number of
2505 // arguments from the call plus return address plus the number of
2506 // [[BoundArguments]]), so we need to subtract one for the return address.
2507 __ dec(eax);
2508 }
2509 __ bind(&no_bound_arguments);
2510 }
2511
2512 } // namespace
2513
2514
2515 // static
2516 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2517 TailCallMode tail_call_mode) {
2518 // ----------- S t a t e -------------
2519 // -- eax : the number of arguments (not including the receiver)
2520 // -- edi : the function to call (checked to be a JSBoundFunction)
2521 // -----------------------------------
2522 __ AssertBoundFunction(edi);
2523
2524 if (tail_call_mode == TailCallMode::kAllow) {
2525 PrepareForTailCall(masm, eax, ebx, ecx, edx);
2526 }
2527
2528 // Patch the receiver to [[BoundThis]].
2529 __ mov(ebx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
2530 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ebx);
2531
2532 // Push the [[BoundArguments]] onto the stack.
2533 Generate_PushBoundArguments(masm);
2534
2535 // Call the [[BoundTargetFunction]] via the Call builtin.
2536 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2537 __ mov(ecx, Operand::StaticVariable(ExternalReference(
2538 Builtins::kCall_ReceiverIsAny, masm->isolate())));
2539 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2540 __ jmp(ecx);
2541 }
2542
2543
2544 // static
2545 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2546 TailCallMode tail_call_mode) {
2547 // ----------- S t a t e -------------
2548 // -- eax : the number of arguments (not including the receiver)
2549 // -- edi : the target to call (can be any Object).
2550 // -----------------------------------
2551
2552 Label non_callable, non_function, non_smi;
2553 __ JumpIfSmi(edi, &non_callable);
2554 __ bind(&non_smi);
2555 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2556 __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2557 RelocInfo::CODE_TARGET);
2558 __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2559 __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2560 RelocInfo::CODE_TARGET);
2561
2562 // Check if target has a [[Call]] internal method.
2563 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2564 Immediate(1 << Map::kIsCallable));
2565 __ j(zero, &non_callable);
2566
2567 __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2568 __ j(not_equal, &non_function);
2569
2570 // 0. Prepare for tail call if necessary.
2571 if (tail_call_mode == TailCallMode::kAllow) {
2572 PrepareForTailCall(masm, eax, ebx, ecx, edx);
2573 }
2574
2575 // 1. Runtime fallback for Proxy [[Call]].
2576 __ PopReturnAddressTo(ecx);
2577 __ Push(edi);
2578 __ PushReturnAddressFrom(ecx);
2579 // Increase the arguments size to include the pushed function and the
2580 // existing receiver on the stack.
2581 __ add(eax, Immediate(2));
2582 // Tail-call to the runtime.
2583 __ JumpToExternalReference(
2584 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2585
2586 // 2. Call to something else, which might have a [[Call]] internal method (if
2587 // not we raise an exception).
2588 __ bind(&non_function);
2589 // Overwrite the original receiver with the (original) target.
2590 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2591 // Let the "call_as_function_delegate" take care of the rest.
2592 __ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, edi);
2593 __ Jump(masm->isolate()->builtins()->CallFunction(
2594 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2595 RelocInfo::CODE_TARGET);
2596
2597 // 3. Call to something that is not callable.
2598 __ bind(&non_callable);
2599 {
2600 FrameScope scope(masm, StackFrame::INTERNAL);
2601 __ Push(edi);
2602 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2603 }
2604 }
2605
2606
2607 // static
2608 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2609 // ----------- S t a t e -------------
2610 // -- eax : the number of arguments (not including the receiver)
2611 // -- edx : the new target (checked to be a constructor)
2612 // -- edi : the constructor to call (checked to be a JSFunction)
2613 // -----------------------------------
2614 __ AssertFunction(edi);
2615
2616 // Calling convention for function specific ConstructStubs require
2617 // ebx to contain either an AllocationSite or undefined.
2618 __ LoadRoot(ebx, Heap::kUndefinedValueRootIndex);
2619
2620 // Tail call to the function-specific construct stub (still in the caller
2621 // context at this point).
2622 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2623 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
2624 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2625 __ jmp(ecx);
2626 }
2627
2628
2629 // static
2630 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2631 // ----------- S t a t e -------------
2632 // -- eax : the number of arguments (not including the receiver)
2633 // -- edx : the new target (checked to be a constructor)
2634 // -- edi : the constructor to call (checked to be a JSBoundFunction)
2635 // -----------------------------------
2636 __ AssertBoundFunction(edi);
2637
2638 // Push the [[BoundArguments]] onto the stack.
2639 Generate_PushBoundArguments(masm);
2640
2641 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2642 {
2643 Label done;
2644 __ cmp(edi, edx);
2645 __ j(not_equal, &done, Label::kNear);
2646 __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2647 __ bind(&done);
2648 }
2649
2650 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2651 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2652 __ mov(ecx, Operand::StaticVariable(
2653 ExternalReference(Builtins::kConstruct, masm->isolate())));
2654 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2655 __ jmp(ecx);
2656 }
2657
2658
2659 // static
2660 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2661 // ----------- S t a t e -------------
2662 // -- eax : the number of arguments (not including the receiver)
2663 // -- edi : the constructor to call (checked to be a JSProxy)
2664 // -- edx : the new target (either the same as the constructor or
2665 // the JSFunction on which new was invoked initially)
2666 // -----------------------------------
2667
2668 // Call into the Runtime for Proxy [[Construct]].
2669 __ PopReturnAddressTo(ecx);
2670 __ Push(edi);
2671 __ Push(edx);
2672 __ PushReturnAddressFrom(ecx);
2673 // Include the pushed new_target, constructor and the receiver.
2674 __ add(eax, Immediate(3));
2675 // Tail-call to the runtime.
2676 __ JumpToExternalReference(
2677 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2678 }
2679
2680
2681 // static
2682 void Builtins::Generate_Construct(MacroAssembler* masm) {
2683 // ----------- S t a t e -------------
2684 // -- eax : the number of arguments (not including the receiver)
2685 // -- edx : the new target (either the same as the constructor or
2686 // the JSFunction on which new was invoked initially)
2687 // -- edi : the constructor to call (can be any Object)
2688 // -----------------------------------
2689
2690 // Check if target is a Smi.
2691 Label non_constructor;
2692 __ JumpIfSmi(edi, &non_constructor, Label::kNear);
2693
2694 // Dispatch based on instance type.
2695 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2696 __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
2697 RelocInfo::CODE_TARGET);
2698
2699 // Check if target has a [[Construct]] internal method.
2700 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2701 Immediate(1 << Map::kIsConstructor));
2702 __ j(zero, &non_constructor, Label::kNear);
2703
2704 // Only dispatch to bound functions after checking whether they are
2705 // constructors.
2706 __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2707 __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(),
2708 RelocInfo::CODE_TARGET);
2709
2710 // Only dispatch to proxies after checking whether they are constructors.
2711 __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2712 __ j(equal, masm->isolate()->builtins()->ConstructProxy(),
2713 RelocInfo::CODE_TARGET);
2714
2715 // Called Construct on an exotic Object with a [[Construct]] internal method.
2716 {
2717 // Overwrite the original receiver with the (original) target.
2718 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2719 // Let the "call_as_constructor_delegate" take care of the rest.
2720 __ LoadGlobalFunction(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, edi);
2721 __ Jump(masm->isolate()->builtins()->CallFunction(),
2722 RelocInfo::CODE_TARGET);
2723 }
2724
2725 // Called Construct on an Object that doesn't have a [[Construct]] internal
2726 // method.
2727 __ bind(&non_constructor);
2728 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2729 RelocInfo::CODE_TARGET);
2730 }
2731
2732 // static
2733 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2734 // ----------- S t a t e -------------
2735 // -- edx : requested object size (untagged)
2736 // -- esp[0] : return address
2737 // -----------------------------------
2738 __ SmiTag(edx);
2739 __ PopReturnAddressTo(ecx);
2740 __ Push(edx);
2741 __ PushReturnAddressFrom(ecx);
2742 __ Move(esi, Smi::FromInt(0));
2743 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2744 }
2745
2746 // static
2747 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2748 // ----------- S t a t e -------------
2749 // -- edx : requested object size (untagged)
2750 // -- esp[0] : return address
2751 // -----------------------------------
2752 __ SmiTag(edx);
2753 __ PopReturnAddressTo(ecx);
2754 __ Push(edx);
2755 __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2756 __ PushReturnAddressFrom(ecx);
2757 __ Move(esi, Smi::FromInt(0));
2758 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2759 }
2760
2761 // static
2762 void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
2763 // The StringToNumber stub takes one argument in eax.
2764 __ AssertString(eax);
2765
2766 // Check if string has a cached array index.
2767 Label runtime;
2768 __ test(FieldOperand(eax, String::kHashFieldOffset),
2769 Immediate(String::kContainsCachedArrayIndexMask));
2770 __ j(not_zero, &runtime, Label::kNear);
2771 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
2772 __ IndexFromHash(eax, eax);
2773 __ Ret();
2774
2775 __ bind(&runtime);
2776 {
2777 FrameScope frame(masm, StackFrame::INTERNAL);
2778 // Push argument.
2779 __ push(eax);
2780 // We cannot use a tail call here because this builtin can also be called
2781 // from wasm.
2782 __ CallRuntime(Runtime::kStringToNumber);
2783 }
2784 __ Ret();
2785 }
2786
2787 // static
2788 void Builtins::Generate_ToNumber(MacroAssembler* masm) {
2789 // The ToNumber stub takes one argument in eax.
2790 Label not_smi;
2791 __ JumpIfNotSmi(eax, &not_smi, Label::kNear);
2792 __ Ret();
2793 __ bind(&not_smi);
2794
2795 Label not_heap_number;
2796 __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
2797 __ j(not_equal, &not_heap_number, Label::kNear);
2798 __ Ret();
2799 __ bind(&not_heap_number);
2800
2801 __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
2802 RelocInfo::CODE_TARGET);
2803 }
2804
2805 // static
2806 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
2807 // The NonNumberToNumber stub takes one argument in eax.
2808 __ AssertNotNumber(eax);
2809
2810 Label not_string;
2811 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
2812 // eax: object
2813 // edi: object map
2814 __ j(above_equal, &not_string, Label::kNear);
2815 __ Jump(masm->isolate()->builtins()->StringToNumber(),
2816 RelocInfo::CODE_TARGET);
2817 __ bind(&not_string);
2818
2819 Label not_oddball;
2820 __ CmpInstanceType(edi, ODDBALL_TYPE);
2821 __ j(not_equal, &not_oddball, Label::kNear);
2822 __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
2823 __ Ret();
2824 __ bind(&not_oddball);
2825 {
2826 FrameScope frame(masm, StackFrame::INTERNAL);
2827 // Push argument.
2828 __ push(eax);
2829 // We cannot use a tail call here because this builtin can also be called
2830 // from wasm.
2831 __ CallRuntime(Runtime::kToNumber);
2832 }
2833 __ Ret();
2834 }
2835
2836 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2837 // ----------- S t a t e -------------
2838 // -- eax : actual number of arguments
2839 // -- ebx : expected number of arguments
2840 // -- edx : new target (passed through to callee)
2841 // -- edi : function (passed through to callee)
2842 // -----------------------------------
2843
2844 Label invoke, dont_adapt_arguments, stack_overflow;
2845 __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
2846
2847 Label enough, too_few;
2848 __ cmp(eax, ebx);
2849 __ j(less, &too_few);
2850 __ cmp(ebx, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2851 __ j(equal, &dont_adapt_arguments);
2852
2853 { // Enough parameters: Actual >= expected.
2854 __ bind(&enough);
2855 EnterArgumentsAdaptorFrame(masm);
2856 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
2857
2858 // Copy receiver and all expected arguments.
2859 const int offset = StandardFrameConstants::kCallerSPOffset;
2860 __ lea(edi, Operand(ebp, eax, times_4, offset));
2861 __ mov(eax, -1); // account for receiver
2862
2863 Label copy;
2864 __ bind(&copy);
2865 __ inc(eax);
2866 __ push(Operand(edi, 0));
2867 __ sub(edi, Immediate(kPointerSize));
2868 __ cmp(eax, ebx);
2869 __ j(less, &copy);
2870 // eax now contains the expected number of arguments.
2871 __ jmp(&invoke);
2872 }
2873
2874 { // Too few parameters: Actual < expected.
2875 __ bind(&too_few);
2876 EnterArgumentsAdaptorFrame(masm);
2877 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
2878
2879 // Remember expected arguments in ecx.
2880 __ mov(ecx, ebx);
2881
2882 // Copy receiver and all actual arguments.
2883 const int offset = StandardFrameConstants::kCallerSPOffset;
2884 __ lea(edi, Operand(ebp, eax, times_4, offset));
2885 // ebx = expected - actual.
2886 __ sub(ebx, eax);
2887 // eax = -actual - 1
2888 __ neg(eax);
2889 __ sub(eax, Immediate(1));
2890
2891 Label copy;
2892 __ bind(&copy);
2893 __ inc(eax);
2894 __ push(Operand(edi, 0));
2895 __ sub(edi, Immediate(kPointerSize));
2896 __ test(eax, eax);
2897 __ j(not_zero, &copy);
2898
2899 // Fill remaining expected arguments with undefined values.
2900 Label fill;
2901 __ bind(&fill);
2902 __ inc(eax);
2903 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
2904 __ cmp(eax, ebx);
2905 __ j(less, &fill);
2906
2907 // Restore expected arguments.
2908 __ mov(eax, ecx);
2909 }
2910
2911 // Call the entry point.
2912 __ bind(&invoke);
2913 // Restore function pointer.
2914 __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2915 // eax : expected number of arguments
2916 // edx : new target (passed through to callee)
2917 // edi : function (passed through to callee)
2918 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2919 __ call(ecx);
2920
2921 // Store offset of return address for deoptimizer.
2922 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2923
2924 // Leave frame and return.
2925 LeaveArgumentsAdaptorFrame(masm);
2926 __ ret(0);
2927
2928 // -------------------------------------------
2929 // Dont adapt arguments.
2930 // -------------------------------------------
2931 __ bind(&dont_adapt_arguments);
2932 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2933 __ jmp(ecx);
2934
2935 __ bind(&stack_overflow);
2936 {
2937 FrameScope frame(masm, StackFrame::MANUAL);
2938 __ CallRuntime(Runtime::kThrowStackOverflow);
2939 __ int3();
2940 }
2941 }
2942
2943
2944 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
2945 Register function_template_info,
2946 Register scratch0, Register scratch1,
2947 Label* receiver_check_failed) {
2948 // If there is no signature, return the holder.
2949 __ CompareRoot(FieldOperand(function_template_info,
2950 FunctionTemplateInfo::kSignatureOffset),
2951 Heap::kUndefinedValueRootIndex);
2952 Label receiver_check_passed;
2953 __ j(equal, &receiver_check_passed, Label::kNear);
2954
2955 // Walk the prototype chain.
2956 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
2957 Label prototype_loop_start;
2958 __ bind(&prototype_loop_start);
2959
2960 // Get the constructor, if any.
2961 __ GetMapConstructor(scratch0, scratch0, scratch1);
2962 __ CmpInstanceType(scratch1, JS_FUNCTION_TYPE);
2963 Label next_prototype;
2964 __ j(not_equal, &next_prototype, Label::kNear);
2965
2966 // Get the constructor's signature.
2967 __ mov(scratch0,
2968 FieldOperand(scratch0, JSFunction::kSharedFunctionInfoOffset));
2969 __ mov(scratch0,
2970 FieldOperand(scratch0, SharedFunctionInfo::kFunctionDataOffset));
2971
2972 // Loop through the chain of inheriting function templates.
2973 Label function_template_loop;
2974 __ bind(&function_template_loop);
2975
2976 // If the signatures match, we have a compatible receiver.
2977 __ cmp(scratch0, FieldOperand(function_template_info,
2978 FunctionTemplateInfo::kSignatureOffset));
2979 __ j(equal, &receiver_check_passed, Label::kNear);
2980
2981 // If the current type is not a FunctionTemplateInfo, load the next prototype
2982 // in the chain.
2983 __ JumpIfSmi(scratch0, &next_prototype, Label::kNear);
2984 __ CmpObjectType(scratch0, FUNCTION_TEMPLATE_INFO_TYPE, scratch1);
2985 __ j(not_equal, &next_prototype, Label::kNear);
2986
2987 // Otherwise load the parent function template and iterate.
2988 __ mov(scratch0,
2989 FieldOperand(scratch0, FunctionTemplateInfo::kParentTemplateOffset));
2990 __ jmp(&function_template_loop, Label::kNear);
2991
2992 // Load the next prototype.
2993 __ bind(&next_prototype);
2994 __ mov(receiver, FieldOperand(receiver, HeapObject::kMapOffset));
2995 __ test(FieldOperand(receiver, Map::kBitField3Offset),
2996 Immediate(Map::HasHiddenPrototype::kMask));
2997 __ j(zero, receiver_check_failed);
2998
2999 __ mov(receiver, FieldOperand(receiver, Map::kPrototypeOffset));
3000 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
3001 // Iterate.
3002 __ jmp(&prototype_loop_start, Label::kNear);
3003
3004 __ bind(&receiver_check_passed);
3005 }
3006
3007
3008 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
3009 // ----------- S t a t e -------------
3010 // -- eax : number of arguments (not including the receiver)
3011 // -- edi : callee
3012 // -- esi : context
3013 // -- esp[0] : return address
3014 // -- esp[4] : last argument
3015 // -- ...
3016 // -- esp[eax * 4] : first argument
3017 // -- esp[(eax + 1) * 4] : receiver
3018 // -----------------------------------
3019
3020 // Load the FunctionTemplateInfo.
3021 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
3022 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFunctionDataOffset));
3023
3024 // Do the compatible receiver check.
3025 Label receiver_check_failed;
3026 __ mov(ecx, Operand(esp, eax, times_pointer_size, kPCOnStackSize));
3027 __ Push(eax);
3028 CompatibleReceiverCheck(masm, ecx, ebx, edx, eax, &receiver_check_failed);
3029 __ Pop(eax);
3030 // Get the callback offset from the FunctionTemplateInfo, and jump to the
3031 // beginning of the code.
3032 __ mov(edx, FieldOperand(ebx, FunctionTemplateInfo::kCallCodeOffset));
3033 __ mov(edx, FieldOperand(edx, CallHandlerInfo::kFastHandlerOffset));
3034 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
3035 __ jmp(edx);
3036
3037 // Compatible receiver check failed: pop return address, arguments and
3038 // receiver and throw an Illegal Invocation exception.
3039 __ bind(&receiver_check_failed);
3040 __ Pop(eax);
3041 __ PopReturnAddressTo(ebx);
3042 __ lea(eax, Operand(eax, times_pointer_size, 1 * kPointerSize));
3043 __ add(esp, eax);
3044 __ PushReturnAddressFrom(ebx);
3045 {
3046 FrameScope scope(masm, StackFrame::INTERNAL);
3047 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
3048 }
3049 }
3050
3051
3052 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
3053 // Lookup the function in the JavaScript frame.
3054 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3055 {
3056 FrameScope scope(masm, StackFrame::INTERNAL);
3057 // Pass function as argument.
3058 __ push(eax);
3059 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
3060 }
3061
3062 Label skip;
3063 // If the code object is null, just return to the unoptimized code.
3064 __ cmp(eax, Immediate(0));
3065 __ j(not_equal, &skip, Label::kNear);
3066 __ ret(0);
3067
3068 __ bind(&skip);
3069
3070 // Load deoptimization data from the code object.
3071 __ mov(ebx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
3072
3073 // Load the OSR entrypoint offset from the deoptimization data.
3074 __ mov(ebx, Operand(ebx, FixedArray::OffsetOfElementAt(
3075 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
3076 __ SmiUntag(ebx);
3077
3078 // Compute the target address = code_obj + header_size + osr_offset
3079 __ lea(eax, Operand(eax, ebx, times_1, Code::kHeaderSize - kHeapObjectTag));
3080
3081 // Overwrite the return address on the stack.
3082 __ mov(Operand(esp, 0), eax);
3083
3084 // And "return" to the OSR entry point of the function.
3085 __ ret(0);
3086 }
3087
3088
3089 #undef __
3090 } // namespace internal
3091 } // namespace v8
3092
3093 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/counters.h ('k') | src/interpreter/interpreter.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698