| OLD | NEW |
| (Empty) |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #if V8_TARGET_ARCH_X64 | |
| 6 | |
| 7 #include "src/code-factory.h" | |
| 8 #include "src/codegen.h" | |
| 9 #include "src/deoptimizer.h" | |
| 10 #include "src/full-codegen/full-codegen.h" | |
| 11 | |
| 12 namespace v8 { | |
| 13 namespace internal { | |
| 14 | |
| 15 | |
| 16 #define __ ACCESS_MASM(masm) | |
| 17 | |
| 18 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, | |
| 19 ExitFrameType exit_frame_type) { | |
| 20 // ----------- S t a t e ------------- | |
| 21 // -- rax : number of arguments excluding receiver | |
| 22 // -- rdi : target | |
| 23 // -- rdx : new.target | |
| 24 // -- rsp[0] : return address | |
| 25 // -- rsp[8] : last argument | |
| 26 // -- ... | |
| 27 // -- rsp[8 * argc] : first argument | |
| 28 // -- rsp[8 * (argc + 1)] : receiver | |
| 29 // ----------------------------------- | |
| 30 __ AssertFunction(rdi); | |
| 31 | |
| 32 // Make sure we operate in the context of the called function (for example | |
| 33 // ConstructStubs implemented in C++ will be run in the context of the caller | |
| 34 // instead of the callee, due to the way that [[Construct]] is defined for | |
| 35 // ordinary functions). | |
| 36 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | |
| 37 | |
| 38 // JumpToExternalReference expects rax to contain the number of arguments | |
| 39 // including the receiver and the extra arguments. | |
| 40 const int num_extra_args = 3; | |
| 41 __ addp(rax, Immediate(num_extra_args + 1)); | |
| 42 | |
| 43 // Unconditionally insert argc, target and new target as extra arguments. They | |
| 44 // will be used by stack frame iterators when constructing the stack trace. | |
| 45 __ PopReturnAddressTo(kScratchRegister); | |
| 46 __ Integer32ToSmi(rax, rax); | |
| 47 __ Push(rax); | |
| 48 __ SmiToInteger32(rax, rax); | |
| 49 __ Push(rdi); | |
| 50 __ Push(rdx); | |
| 51 __ PushReturnAddressFrom(kScratchRegister); | |
| 52 | |
| 53 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), | |
| 54 exit_frame_type == BUILTIN_EXIT); | |
| 55 } | |
| 56 | |
| 57 | |
| 58 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | |
| 59 __ movp(kScratchRegister, | |
| 60 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
| 61 __ movp(kScratchRegister, | |
| 62 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset)); | |
| 63 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize)); | |
| 64 __ jmp(kScratchRegister); | |
| 65 } | |
| 66 | |
| 67 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, | |
| 68 Runtime::FunctionId function_id) { | |
| 69 // ----------- S t a t e ------------- | |
| 70 // -- rax : argument count (preserved for callee) | |
| 71 // -- rdx : new target (preserved for callee) | |
| 72 // -- rdi : target function (preserved for callee) | |
| 73 // ----------------------------------- | |
| 74 { | |
| 75 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 76 // Push the number of arguments to the callee. | |
| 77 __ Integer32ToSmi(rax, rax); | |
| 78 __ Push(rax); | |
| 79 // Push a copy of the target function and the new target. | |
| 80 __ Push(rdi); | |
| 81 __ Push(rdx); | |
| 82 // Function is also the parameter to the runtime call. | |
| 83 __ Push(rdi); | |
| 84 | |
| 85 __ CallRuntime(function_id, 1); | |
| 86 __ movp(rbx, rax); | |
| 87 | |
| 88 // Restore target function and new target. | |
| 89 __ Pop(rdx); | |
| 90 __ Pop(rdi); | |
| 91 __ Pop(rax); | |
| 92 __ SmiToInteger32(rax, rax); | |
| 93 } | |
| 94 __ leap(rbx, FieldOperand(rbx, Code::kHeaderSize)); | |
| 95 __ jmp(rbx); | |
| 96 } | |
| 97 | |
| 98 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | |
| 99 // Checking whether the queued function is ready for install is optional, | |
| 100 // since we come across interrupts and stack checks elsewhere. However, | |
| 101 // not checking may delay installing ready functions, and always checking | |
| 102 // would be quite expensive. A good compromise is to first check against | |
| 103 // stack limit as a cue for an interrupt signal. | |
| 104 Label ok; | |
| 105 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | |
| 106 __ j(above_equal, &ok); | |
| 107 | |
| 108 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); | |
| 109 | |
| 110 __ bind(&ok); | |
| 111 GenerateTailCallToSharedCode(masm); | |
| 112 } | |
| 113 | |
| 114 | |
| 115 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | |
| 116 bool is_api_function, | |
| 117 bool create_implicit_receiver, | |
| 118 bool check_derived_construct) { | |
| 119 // ----------- S t a t e ------------- | |
| 120 // -- rax: number of arguments | |
| 121 // -- rsi: context | |
| 122 // -- rdi: constructor function | |
| 123 // -- rbx: allocation site or undefined | |
| 124 // -- rdx: new target | |
| 125 // ----------------------------------- | |
| 126 | |
| 127 // Enter a construct frame. | |
| 128 { | |
| 129 FrameScope scope(masm, StackFrame::CONSTRUCT); | |
| 130 | |
| 131 // Preserve the incoming parameters on the stack. | |
| 132 __ AssertUndefinedOrAllocationSite(rbx); | |
| 133 __ Push(rsi); | |
| 134 __ Push(rbx); | |
| 135 __ Integer32ToSmi(rcx, rax); | |
| 136 __ Push(rcx); | |
| 137 | |
| 138 if (create_implicit_receiver) { | |
| 139 // Allocate the new receiver object. | |
| 140 __ Push(rdi); | |
| 141 __ Push(rdx); | |
| 142 FastNewObjectStub stub(masm->isolate()); | |
| 143 __ CallStub(&stub); | |
| 144 __ movp(rbx, rax); | |
| 145 __ Pop(rdx); | |
| 146 __ Pop(rdi); | |
| 147 | |
| 148 // ----------- S t a t e ------------- | |
| 149 // -- rdi: constructor function | |
| 150 // -- rbx: newly allocated object | |
| 151 // -- rdx: new target | |
| 152 // ----------------------------------- | |
| 153 | |
| 154 // Retrieve smi-tagged arguments count from the stack. | |
| 155 __ SmiToInteger32(rax, Operand(rsp, 0 * kPointerSize)); | |
| 156 } | |
| 157 | |
| 158 if (create_implicit_receiver) { | |
| 159 // Push the allocated receiver to the stack. We need two copies | |
| 160 // because we may have to return the original one and the calling | |
| 161 // conventions dictate that the called function pops the receiver. | |
| 162 __ Push(rbx); | |
| 163 __ Push(rbx); | |
| 164 } else { | |
| 165 __ PushRoot(Heap::kTheHoleValueRootIndex); | |
| 166 } | |
| 167 | |
| 168 // Set up pointer to last argument. | |
| 169 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset)); | |
| 170 | |
| 171 // Copy arguments and receiver to the expression stack. | |
| 172 Label loop, entry; | |
| 173 __ movp(rcx, rax); | |
| 174 __ jmp(&entry); | |
| 175 __ bind(&loop); | |
| 176 __ Push(Operand(rbx, rcx, times_pointer_size, 0)); | |
| 177 __ bind(&entry); | |
| 178 __ decp(rcx); | |
| 179 __ j(greater_equal, &loop); | |
| 180 | |
| 181 // Call the function. | |
| 182 ParameterCount actual(rax); | |
| 183 __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION, | |
| 184 CheckDebugStepCallWrapper()); | |
| 185 | |
| 186 // Store offset of return address for deoptimizer. | |
| 187 if (create_implicit_receiver && !is_api_function) { | |
| 188 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); | |
| 189 } | |
| 190 | |
| 191 // Restore context from the frame. | |
| 192 __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset)); | |
| 193 | |
| 194 if (create_implicit_receiver) { | |
| 195 // If the result is an object (in the ECMA sense), we should get rid | |
| 196 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 | |
| 197 // on page 74. | |
| 198 Label use_receiver, exit; | |
| 199 // If the result is a smi, it is *not* an object in the ECMA sense. | |
| 200 __ JumpIfSmi(rax, &use_receiver); | |
| 201 | |
| 202 // If the type of the result (stored in its map) is less than | |
| 203 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense. | |
| 204 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); | |
| 205 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx); | |
| 206 __ j(above_equal, &exit); | |
| 207 | |
| 208 // Throw away the result of the constructor invocation and use the | |
| 209 // on-stack receiver as the result. | |
| 210 __ bind(&use_receiver); | |
| 211 __ movp(rax, Operand(rsp, 0)); | |
| 212 | |
| 213 // Restore the arguments count and leave the construct frame. The | |
| 214 // arguments count is stored below the receiver. | |
| 215 __ bind(&exit); | |
| 216 __ movp(rbx, Operand(rsp, 1 * kPointerSize)); | |
| 217 } else { | |
| 218 __ movp(rbx, Operand(rsp, 0)); | |
| 219 } | |
| 220 | |
| 221 // Leave construct frame. | |
| 222 } | |
| 223 | |
| 224 // ES6 9.2.2. Step 13+ | |
| 225 // Check that the result is not a Smi, indicating that the constructor result | |
| 226 // from a derived class is neither undefined nor an Object. | |
| 227 if (check_derived_construct) { | |
| 228 Label dont_throw; | |
| 229 __ JumpIfNotSmi(rax, &dont_throw); | |
| 230 { | |
| 231 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 232 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); | |
| 233 } | |
| 234 __ bind(&dont_throw); | |
| 235 } | |
| 236 | |
| 237 // Remove caller arguments from the stack and return. | |
| 238 __ PopReturnAddressTo(rcx); | |
| 239 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); | |
| 240 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); | |
| 241 __ PushReturnAddressFrom(rcx); | |
| 242 if (create_implicit_receiver) { | |
| 243 Counters* counters = masm->isolate()->counters(); | |
| 244 __ IncrementCounter(counters->constructed_objects(), 1); | |
| 245 } | |
| 246 __ ret(0); | |
| 247 } | |
| 248 | |
| 249 | |
| 250 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { | |
| 251 Generate_JSConstructStubHelper(masm, false, true, false); | |
| 252 } | |
| 253 | |
| 254 | |
| 255 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { | |
| 256 Generate_JSConstructStubHelper(masm, true, false, false); | |
| 257 } | |
| 258 | |
| 259 | |
| 260 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { | |
| 261 Generate_JSConstructStubHelper(masm, false, false, false); | |
| 262 } | |
| 263 | |
| 264 | |
| 265 void Builtins::Generate_JSBuiltinsConstructStubForDerived( | |
| 266 MacroAssembler* masm) { | |
| 267 Generate_JSConstructStubHelper(masm, false, false, true); | |
| 268 } | |
| 269 | |
| 270 | |
| 271 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { | |
| 272 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 273 __ Push(rdi); | |
| 274 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); | |
| 275 } | |
| 276 | |
| 277 | |
| 278 enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt }; | |
| 279 | |
| 280 | |
| 281 // Clobbers rcx, r11, kScratchRegister; preserves all other registers. | |
| 282 static void Generate_CheckStackOverflow(MacroAssembler* masm, | |
| 283 IsTagged rax_is_tagged) { | |
| 284 // rax : the number of items to be pushed to the stack | |
| 285 // | |
| 286 // Check the stack for overflow. We are not trying to catch | |
| 287 // interruptions (e.g. debug break and preemption) here, so the "real stack | |
| 288 // limit" is checked. | |
| 289 Label okay; | |
| 290 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); | |
| 291 __ movp(rcx, rsp); | |
| 292 // Make rcx the space we have left. The stack might already be overflowed | |
| 293 // here which will cause rcx to become negative. | |
| 294 __ subp(rcx, kScratchRegister); | |
| 295 // Make r11 the space we need for the array when it is unrolled onto the | |
| 296 // stack. | |
| 297 if (rax_is_tagged == kRaxIsSmiTagged) { | |
| 298 __ PositiveSmiTimesPowerOfTwoToInteger64(r11, rax, kPointerSizeLog2); | |
| 299 } else { | |
| 300 DCHECK(rax_is_tagged == kRaxIsUntaggedInt); | |
| 301 __ movp(r11, rax); | |
| 302 __ shlq(r11, Immediate(kPointerSizeLog2)); | |
| 303 } | |
| 304 // Check if the arguments will overflow the stack. | |
| 305 __ cmpp(rcx, r11); | |
| 306 __ j(greater, &okay); // Signed comparison. | |
| 307 | |
| 308 // Out of stack space. | |
| 309 __ CallRuntime(Runtime::kThrowStackOverflow); | |
| 310 | |
| 311 __ bind(&okay); | |
| 312 } | |
| 313 | |
| 314 | |
| 315 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | |
| 316 bool is_construct) { | |
| 317 ProfileEntryHookStub::MaybeCallEntryHook(masm); | |
| 318 | |
| 319 // Expects five C++ function parameters. | |
| 320 // - Object* new_target | |
| 321 // - JSFunction* function | |
| 322 // - Object* receiver | |
| 323 // - int argc | |
| 324 // - Object*** argv | |
| 325 // (see Handle::Invoke in execution.cc). | |
| 326 | |
| 327 // Open a C++ scope for the FrameScope. | |
| 328 { | |
| 329 // Platform specific argument handling. After this, the stack contains | |
| 330 // an internal frame and the pushed function and receiver, and | |
| 331 // register rax and rbx holds the argument count and argument array, | |
| 332 // while rdi holds the function pointer, rsi the context, and rdx the | |
| 333 // new.target. | |
| 334 | |
| 335 #ifdef _WIN64 | |
| 336 // MSVC parameters in: | |
| 337 // rcx : new_target | |
| 338 // rdx : function | |
| 339 // r8 : receiver | |
| 340 // r9 : argc | |
| 341 // [rsp+0x20] : argv | |
| 342 | |
| 343 // Enter an internal frame. | |
| 344 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 345 | |
| 346 // Setup the context (we need to use the caller context from the isolate). | |
| 347 ExternalReference context_address(Isolate::kContextAddress, | |
| 348 masm->isolate()); | |
| 349 __ movp(rsi, masm->ExternalOperand(context_address)); | |
| 350 | |
| 351 // Push the function and the receiver onto the stack. | |
| 352 __ Push(rdx); | |
| 353 __ Push(r8); | |
| 354 | |
| 355 // Load the number of arguments and setup pointer to the arguments. | |
| 356 __ movp(rax, r9); | |
| 357 // Load the previous frame pointer to access C argument on stack | |
| 358 __ movp(kScratchRegister, Operand(rbp, 0)); | |
| 359 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset)); | |
| 360 // Load the function pointer into rdi. | |
| 361 __ movp(rdi, rdx); | |
| 362 // Load the new.target into rdx. | |
| 363 __ movp(rdx, rcx); | |
| 364 #else // _WIN64 | |
| 365 // GCC parameters in: | |
| 366 // rdi : new_target | |
| 367 // rsi : function | |
| 368 // rdx : receiver | |
| 369 // rcx : argc | |
| 370 // r8 : argv | |
| 371 | |
| 372 __ movp(r11, rdi); | |
| 373 __ movp(rdi, rsi); | |
| 374 // rdi : function | |
| 375 // r11 : new_target | |
| 376 | |
| 377 // Clear the context before we push it when entering the internal frame. | |
| 378 __ Set(rsi, 0); | |
| 379 | |
| 380 // Enter an internal frame. | |
| 381 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 382 | |
| 383 // Setup the context (we need to use the caller context from the isolate). | |
| 384 ExternalReference context_address(Isolate::kContextAddress, | |
| 385 masm->isolate()); | |
| 386 __ movp(rsi, masm->ExternalOperand(context_address)); | |
| 387 | |
| 388 // Push the function and receiver onto the stack. | |
| 389 __ Push(rdi); | |
| 390 __ Push(rdx); | |
| 391 | |
| 392 // Load the number of arguments and setup pointer to the arguments. | |
| 393 __ movp(rax, rcx); | |
| 394 __ movp(rbx, r8); | |
| 395 | |
| 396 // Load the new.target into rdx. | |
| 397 __ movp(rdx, r11); | |
| 398 #endif // _WIN64 | |
| 399 | |
| 400 // Current stack contents: | |
| 401 // [rsp + 2 * kPointerSize ... ] : Internal frame | |
| 402 // [rsp + kPointerSize] : function | |
| 403 // [rsp] : receiver | |
| 404 // Current register contents: | |
| 405 // rax : argc | |
| 406 // rbx : argv | |
| 407 // rsi : context | |
| 408 // rdi : function | |
| 409 // rdx : new.target | |
| 410 | |
| 411 // Check if we have enough stack space to push all arguments. | |
| 412 // Expects argument count in rax. Clobbers rcx, r11. | |
| 413 Generate_CheckStackOverflow(masm, kRaxIsUntaggedInt); | |
| 414 | |
| 415 // Copy arguments to the stack in a loop. | |
| 416 // Register rbx points to array of pointers to handle locations. | |
| 417 // Push the values of these handles. | |
| 418 Label loop, entry; | |
| 419 __ Set(rcx, 0); // Set loop variable to 0. | |
| 420 __ jmp(&entry, Label::kNear); | |
| 421 __ bind(&loop); | |
| 422 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0)); | |
| 423 __ Push(Operand(kScratchRegister, 0)); // dereference handle | |
| 424 __ addp(rcx, Immediate(1)); | |
| 425 __ bind(&entry); | |
| 426 __ cmpp(rcx, rax); | |
| 427 __ j(not_equal, &loop); | |
| 428 | |
| 429 // Invoke the builtin code. | |
| 430 Handle<Code> builtin = is_construct | |
| 431 ? masm->isolate()->builtins()->Construct() | |
| 432 : masm->isolate()->builtins()->Call(); | |
| 433 __ Call(builtin, RelocInfo::CODE_TARGET); | |
| 434 | |
| 435 // Exit the internal frame. Notice that this also removes the empty | |
| 436 // context and the function left on the stack by the code | |
| 437 // invocation. | |
| 438 } | |
| 439 | |
| 440 // TODO(X64): Is argument correct? Is there a receiver to remove? | |
| 441 __ ret(1 * kPointerSize); // Remove receiver. | |
| 442 } | |
| 443 | |
| 444 | |
| 445 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | |
| 446 Generate_JSEntryTrampolineHelper(masm, false); | |
| 447 } | |
| 448 | |
| 449 | |
| 450 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | |
| 451 Generate_JSEntryTrampolineHelper(masm, true); | |
| 452 } | |
| 453 | |
| 454 // static | |
| 455 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { | |
| 456 // ----------- S t a t e ------------- | |
| 457 // -- rax : the value to pass to the generator | |
| 458 // -- rbx : the JSGeneratorObject to resume | |
| 459 // -- rdx : the resume mode (tagged) | |
| 460 // -- rsp[0] : return address | |
| 461 // ----------------------------------- | |
| 462 __ AssertGeneratorObject(rbx); | |
| 463 | |
| 464 // Store input value into generator object. | |
| 465 __ movp(FieldOperand(rbx, JSGeneratorObject::kInputOrDebugPosOffset), rax); | |
| 466 __ RecordWriteField(rbx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx, | |
| 467 kDontSaveFPRegs); | |
| 468 | |
| 469 // Store resume mode into generator object. | |
| 470 __ movp(FieldOperand(rbx, JSGeneratorObject::kResumeModeOffset), rdx); | |
| 471 | |
| 472 // Load suspended function and context. | |
| 473 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset)); | |
| 474 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset)); | |
| 475 | |
| 476 // Flood function if we are stepping. | |
| 477 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator; | |
| 478 Label stepping_prepared; | |
| 479 ExternalReference last_step_action = | |
| 480 ExternalReference::debug_last_step_action_address(masm->isolate()); | |
| 481 Operand last_step_action_operand = masm->ExternalOperand(last_step_action); | |
| 482 STATIC_ASSERT(StepFrame > StepIn); | |
| 483 __ cmpb(last_step_action_operand, Immediate(StepIn)); | |
| 484 __ j(greater_equal, &prepare_step_in_if_stepping); | |
| 485 | |
| 486 // Flood function if we need to continue stepping in the suspended generator. | |
| 487 ExternalReference debug_suspended_generator = | |
| 488 ExternalReference::debug_suspended_generator_address(masm->isolate()); | |
| 489 Operand debug_suspended_generator_operand = | |
| 490 masm->ExternalOperand(debug_suspended_generator); | |
| 491 __ cmpp(rbx, debug_suspended_generator_operand); | |
| 492 __ j(equal, &prepare_step_in_suspended_generator); | |
| 493 __ bind(&stepping_prepared); | |
| 494 | |
| 495 // Pop return address. | |
| 496 __ PopReturnAddressTo(rax); | |
| 497 | |
| 498 // Push receiver. | |
| 499 __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset)); | |
| 500 | |
| 501 // ----------- S t a t e ------------- | |
| 502 // -- rax : return address | |
| 503 // -- rbx : the JSGeneratorObject to resume | |
| 504 // -- rdx : the resume mode (tagged) | |
| 505 // -- rdi : generator function | |
| 506 // -- rsi : generator context | |
| 507 // -- rsp[0] : generator receiver | |
| 508 // ----------------------------------- | |
| 509 | |
| 510 // Push holes for arguments to generator function. Since the parser forced | |
| 511 // context allocation for any variables in generators, the actual argument | |
| 512 // values have already been copied into the context and these dummy values | |
| 513 // will never be used. | |
| 514 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
| 515 __ LoadSharedFunctionInfoSpecialField( | |
| 516 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset); | |
| 517 { | |
| 518 Label done_loop, loop; | |
| 519 __ bind(&loop); | |
| 520 __ subl(rcx, Immediate(1)); | |
| 521 __ j(carry, &done_loop, Label::kNear); | |
| 522 __ PushRoot(Heap::kTheHoleValueRootIndex); | |
| 523 __ jmp(&loop); | |
| 524 __ bind(&done_loop); | |
| 525 } | |
| 526 | |
| 527 // Dispatch on the kind of generator object. | |
| 528 Label old_generator; | |
| 529 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
| 530 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset)); | |
| 531 __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx); | |
| 532 __ j(not_equal, &old_generator); | |
| 533 | |
| 534 // New-style (ignition/turbofan) generator object. | |
| 535 { | |
| 536 __ PushReturnAddressFrom(rax); | |
| 537 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
| 538 __ LoadSharedFunctionInfoSpecialField( | |
| 539 rax, rax, SharedFunctionInfo::kFormalParameterCountOffset); | |
| 540 // We abuse new.target both to indicate that this is a resume call and to | |
| 541 // pass in the generator object. In ordinary calls, new.target is always | |
| 542 // undefined because generator functions are non-constructable. | |
| 543 __ movp(rdx, rbx); | |
| 544 __ jmp(FieldOperand(rdi, JSFunction::kCodeEntryOffset)); | |
| 545 } | |
| 546 | |
| 547 // Old-style (full-codegen) generator object. | |
| 548 __ bind(&old_generator); | |
| 549 { | |
| 550 // Enter a new JavaScript frame, and initialize its slots as they were when | |
| 551 // the generator was suspended. | |
| 552 FrameScope scope(masm, StackFrame::MANUAL); | |
| 553 __ PushReturnAddressFrom(rax); // Return address. | |
| 554 __ Push(rbp); // Caller's frame pointer. | |
| 555 __ Move(rbp, rsp); | |
| 556 __ Push(rsi); // Callee's context. | |
| 557 __ Push(rdi); // Callee's JS Function. | |
| 558 | |
| 559 // Restore the operand stack. | |
| 560 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset)); | |
| 561 __ SmiToInteger32(rax, FieldOperand(rsi, FixedArray::kLengthOffset)); | |
| 562 { | |
| 563 Label done_loop, loop; | |
| 564 __ Set(rcx, 0); | |
| 565 __ bind(&loop); | |
| 566 __ cmpl(rcx, rax); | |
| 567 __ j(equal, &done_loop, Label::kNear); | |
| 568 __ Push( | |
| 569 FieldOperand(rsi, rcx, times_pointer_size, FixedArray::kHeaderSize)); | |
| 570 __ addl(rcx, Immediate(1)); | |
| 571 __ jmp(&loop); | |
| 572 __ bind(&done_loop); | |
| 573 } | |
| 574 | |
| 575 // Reset operand stack so we don't leak. | |
| 576 __ LoadRoot(FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset), | |
| 577 Heap::kEmptyFixedArrayRootIndex); | |
| 578 | |
| 579 // Restore context. | |
| 580 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset)); | |
| 581 | |
| 582 // Resume the generator function at the continuation. | |
| 583 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
| 584 __ movp(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset)); | |
| 585 __ SmiToInteger64( | |
| 586 rcx, FieldOperand(rbx, JSGeneratorObject::kContinuationOffset)); | |
| 587 __ leap(rdx, FieldOperand(rdx, rcx, times_1, Code::kHeaderSize)); | |
| 588 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset), | |
| 589 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); | |
| 590 __ movp(rax, rbx); // Continuation expects generator object in rax. | |
| 591 __ jmp(rdx); | |
| 592 } | |
| 593 | |
| 594 __ bind(&prepare_step_in_if_stepping); | |
| 595 { | |
| 596 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 597 __ Push(rbx); | |
| 598 __ Push(rdx); | |
| 599 __ Push(rdi); | |
| 600 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping); | |
| 601 __ Pop(rdx); | |
| 602 __ Pop(rbx); | |
| 603 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset)); | |
| 604 } | |
| 605 __ jmp(&stepping_prepared); | |
| 606 | |
| 607 __ bind(&prepare_step_in_suspended_generator); | |
| 608 { | |
| 609 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 610 __ Push(rbx); | |
| 611 __ Push(rdx); | |
| 612 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); | |
| 613 __ Pop(rdx); | |
| 614 __ Pop(rbx); | |
| 615 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset)); | |
| 616 } | |
| 617 __ jmp(&stepping_prepared); | |
| 618 } | |
| 619 | |
| 620 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1, | |
| 621 Register scratch2) { | |
| 622 Register args_count = scratch1; | |
| 623 Register return_pc = scratch2; | |
| 624 | |
| 625 // Get the arguments + receiver count. | |
| 626 __ movp(args_count, | |
| 627 Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp)); | |
| 628 __ movl(args_count, | |
| 629 FieldOperand(args_count, BytecodeArray::kParameterSizeOffset)); | |
| 630 | |
| 631 // Leave the frame (also dropping the register file). | |
| 632 __ leave(); | |
| 633 | |
| 634 // Drop receiver + arguments. | |
| 635 __ PopReturnAddressTo(return_pc); | |
| 636 __ addp(rsp, args_count); | |
| 637 __ PushReturnAddressFrom(return_pc); | |
| 638 } | |
| 639 | |
| 640 // Generate code for entering a JS function with the interpreter. | |
| 641 // On entry to the function the receiver and arguments have been pushed on the | |
| 642 // stack left to right. The actual argument count matches the formal parameter | |
| 643 // count expected by the function. | |
| 644 // | |
| 645 // The live registers are: | |
| 646 // o rdi: the JS function object being called | |
| 647 // o rdx: the new target | |
| 648 // o rsi: our context | |
| 649 // o rbp: the caller's frame pointer | |
| 650 // o rsp: stack pointer (pointing to return address) | |
| 651 // | |
| 652 // The function builds an interpreter frame. See InterpreterFrameConstants in | |
| 653 // frames.h for its layout. | |
| 654 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { | |
| 655 ProfileEntryHookStub::MaybeCallEntryHook(masm); | |
| 656 | |
| 657 // Open a frame scope to indicate that there is a frame on the stack. The | |
| 658 // MANUAL indicates that the scope shouldn't actually generate code to set up | |
| 659 // the frame (that is done below). | |
| 660 FrameScope frame_scope(masm, StackFrame::MANUAL); | |
| 661 __ pushq(rbp); // Caller's frame pointer. | |
| 662 __ movp(rbp, rsp); | |
| 663 __ Push(rsi); // Callee's context. | |
| 664 __ Push(rdi); // Callee's JS function. | |
| 665 __ Push(rdx); // Callee's new target. | |
| 666 | |
| 667 // Get the bytecode array from the function object (or from the DebugInfo if | |
| 668 // it is present) and load it into kInterpreterBytecodeArrayRegister. | |
| 669 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
| 670 Label load_debug_bytecode_array, bytecode_array_loaded; | |
| 671 DCHECK_EQ(Smi::FromInt(0), DebugInfo::uninitialized()); | |
| 672 __ cmpp(FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset), | |
| 673 Immediate(0)); | |
| 674 __ j(not_equal, &load_debug_bytecode_array); | |
| 675 __ movp(kInterpreterBytecodeArrayRegister, | |
| 676 FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset)); | |
| 677 __ bind(&bytecode_array_loaded); | |
| 678 | |
| 679 // Check function data field is actually a BytecodeArray object. | |
| 680 Label bytecode_array_not_present; | |
| 681 __ CompareRoot(kInterpreterBytecodeArrayRegister, | |
| 682 Heap::kUndefinedValueRootIndex); | |
| 683 __ j(equal, &bytecode_array_not_present); | |
| 684 if (FLAG_debug_code) { | |
| 685 __ AssertNotSmi(kInterpreterBytecodeArrayRegister); | |
| 686 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE, | |
| 687 rax); | |
| 688 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | |
| 689 } | |
| 690 | |
| 691 // Load initial bytecode offset. | |
| 692 __ movp(kInterpreterBytecodeOffsetRegister, | |
| 693 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag)); | |
| 694 | |
| 695 // Push bytecode array and Smi tagged bytecode offset. | |
| 696 __ Push(kInterpreterBytecodeArrayRegister); | |
| 697 __ Integer32ToSmi(rcx, kInterpreterBytecodeOffsetRegister); | |
| 698 __ Push(rcx); | |
| 699 | |
| 700 // Allocate the local and temporary register file on the stack. | |
| 701 { | |
| 702 // Load frame size from the BytecodeArray object. | |
| 703 __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister, | |
| 704 BytecodeArray::kFrameSizeOffset)); | |
| 705 | |
| 706 // Do a stack check to ensure we don't go over the limit. | |
| 707 Label ok; | |
| 708 __ movp(rdx, rsp); | |
| 709 __ subp(rdx, rcx); | |
| 710 __ CompareRoot(rdx, Heap::kRealStackLimitRootIndex); | |
| 711 __ j(above_equal, &ok, Label::kNear); | |
| 712 __ CallRuntime(Runtime::kThrowStackOverflow); | |
| 713 __ bind(&ok); | |
| 714 | |
| 715 // If ok, push undefined as the initial value for all register file entries. | |
| 716 Label loop_header; | |
| 717 Label loop_check; | |
| 718 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); | |
| 719 __ j(always, &loop_check); | |
| 720 __ bind(&loop_header); | |
| 721 // TODO(rmcilroy): Consider doing more than one push per loop iteration. | |
| 722 __ Push(rdx); | |
| 723 // Continue loop if not done. | |
| 724 __ bind(&loop_check); | |
| 725 __ subp(rcx, Immediate(kPointerSize)); | |
| 726 __ j(greater_equal, &loop_header, Label::kNear); | |
| 727 } | |
| 728 | |
| 729 // Load accumulator and dispatch table into registers. | |
| 730 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); | |
| 731 __ Move( | |
| 732 kInterpreterDispatchTableRegister, | |
| 733 ExternalReference::interpreter_dispatch_table_address(masm->isolate())); | |
| 734 | |
| 735 // Dispatch to the first bytecode handler for the function. | |
| 736 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister, | |
| 737 kInterpreterBytecodeOffsetRegister, times_1, 0)); | |
| 738 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx, | |
| 739 times_pointer_size, 0)); | |
| 740 __ call(rbx); | |
| 741 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset()); | |
| 742 | |
| 743 // The return value is in rax. | |
| 744 LeaveInterpreterFrame(masm, rbx, rcx); | |
| 745 __ ret(0); | |
| 746 | |
| 747 // Load debug copy of the bytecode array. | |
| 748 __ bind(&load_debug_bytecode_array); | |
| 749 Register debug_info = kInterpreterBytecodeArrayRegister; | |
| 750 __ movp(debug_info, FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset)); | |
| 751 __ movp(kInterpreterBytecodeArrayRegister, | |
| 752 FieldOperand(debug_info, DebugInfo::kAbstractCodeIndex)); | |
| 753 __ jmp(&bytecode_array_loaded); | |
| 754 | |
| 755 // If the bytecode array is no longer present, then the underlying function | |
| 756 // has been switched to a different kind of code and we heal the closure by | |
| 757 // switching the code entry field over to the new code object as well. | |
| 758 __ bind(&bytecode_array_not_present); | |
| 759 __ leave(); // Leave the frame so we can tail call. | |
| 760 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
| 761 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset)); | |
| 762 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); | |
| 763 __ movp(FieldOperand(rdi, JSFunction::kCodeEntryOffset), rcx); | |
| 764 __ RecordWriteCodeEntryField(rdi, rcx, r15); | |
| 765 __ jmp(rcx); | |
| 766 } | |
| 767 | |
| 768 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) { | |
| 769 // Save the function and context for call to CompileBaseline. | |
| 770 __ movp(rdi, Operand(rbp, StandardFrameConstants::kFunctionOffset)); | |
| 771 __ movp(kContextRegister, | |
| 772 Operand(rbp, StandardFrameConstants::kContextOffset)); | |
| 773 | |
| 774 // Leave the frame before recompiling for baseline so that we don't count as | |
| 775 // an activation on the stack. | |
| 776 LeaveInterpreterFrame(masm, rbx, rcx); | |
| 777 | |
| 778 { | |
| 779 FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
| 780 // Push return value. | |
| 781 __ Push(rax); | |
| 782 | |
| 783 // Push function as argument and compile for baseline. | |
| 784 __ Push(rdi); | |
| 785 __ CallRuntime(Runtime::kCompileBaseline); | |
| 786 | |
| 787 // Restore return value. | |
| 788 __ Pop(rax); | |
| 789 } | |
| 790 __ ret(0); | |
| 791 } | |
| 792 | |
| 793 static void Generate_InterpreterPushArgs(MacroAssembler* masm, | |
| 794 bool push_receiver) { | |
| 795 // ----------- S t a t e ------------- | |
| 796 // -- rax : the number of arguments (not including the receiver) | |
| 797 // -- rbx : the address of the first argument to be pushed. Subsequent | |
| 798 // arguments should be consecutive above this, in the same order as | |
| 799 // they are to be pushed onto the stack. | |
| 800 // ----------------------------------- | |
| 801 | |
| 802 // Find the address of the last argument. | |
| 803 __ movp(rcx, rax); | |
| 804 if (push_receiver) { | |
| 805 __ addp(rcx, Immediate(1)); // Add one for receiver. | |
| 806 } | |
| 807 | |
| 808 __ shlp(rcx, Immediate(kPointerSizeLog2)); | |
| 809 __ negp(rcx); | |
| 810 __ addp(rcx, rbx); | |
| 811 | |
| 812 // Push the arguments. | |
| 813 Label loop_header, loop_check; | |
| 814 __ j(always, &loop_check); | |
| 815 __ bind(&loop_header); | |
| 816 __ Push(Operand(rbx, 0)); | |
| 817 __ subp(rbx, Immediate(kPointerSize)); | |
| 818 __ bind(&loop_check); | |
| 819 __ cmpp(rbx, rcx); | |
| 820 __ j(greater, &loop_header, Label::kNear); | |
| 821 } | |
| 822 | |
| 823 // static | |
| 824 void Builtins::Generate_InterpreterPushArgsAndCallImpl( | |
| 825 MacroAssembler* masm, TailCallMode tail_call_mode, | |
| 826 CallableType function_type) { | |
| 827 // ----------- S t a t e ------------- | |
| 828 // -- rax : the number of arguments (not including the receiver) | |
| 829 // -- rbx : the address of the first argument to be pushed. Subsequent | |
| 830 // arguments should be consecutive above this, in the same order as | |
| 831 // they are to be pushed onto the stack. | |
| 832 // -- rdi : the target to call (can be any Object). | |
| 833 // ----------------------------------- | |
| 834 | |
| 835 // Pop return address to allow tail-call after pushing arguments. | |
| 836 __ PopReturnAddressTo(kScratchRegister); | |
| 837 | |
| 838 Generate_InterpreterPushArgs(masm, true); | |
| 839 | |
| 840 // Call the target. | |
| 841 __ PushReturnAddressFrom(kScratchRegister); // Re-push return address. | |
| 842 | |
| 843 if (function_type == CallableType::kJSFunction) { | |
| 844 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, | |
| 845 tail_call_mode), | |
| 846 RelocInfo::CODE_TARGET); | |
| 847 } else { | |
| 848 DCHECK_EQ(function_type, CallableType::kAny); | |
| 849 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, | |
| 850 tail_call_mode), | |
| 851 RelocInfo::CODE_TARGET); | |
| 852 } | |
| 853 } | |
| 854 | |
| 855 // static | |
| 856 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) { | |
| 857 // ----------- S t a t e ------------- | |
| 858 // -- rax : the number of arguments (not including the receiver) | |
| 859 // -- rdx : the new target (either the same as the constructor or | |
| 860 // the JSFunction on which new was invoked initially) | |
| 861 // -- rdi : the constructor to call (can be any Object) | |
| 862 // -- rbx : the address of the first argument to be pushed. Subsequent | |
| 863 // arguments should be consecutive above this, in the same order as | |
| 864 // they are to be pushed onto the stack. | |
| 865 // ----------------------------------- | |
| 866 | |
| 867 // Pop return address to allow tail-call after pushing arguments. | |
| 868 __ PopReturnAddressTo(kScratchRegister); | |
| 869 | |
| 870 // Push slot for the receiver to be constructed. | |
| 871 __ Push(Immediate(0)); | |
| 872 | |
| 873 Generate_InterpreterPushArgs(masm, false); | |
| 874 | |
| 875 // Push return address in preparation for the tail-call. | |
| 876 __ PushReturnAddressFrom(kScratchRegister); | |
| 877 | |
| 878 // Call the constructor (rax, rdx, rdi passed on). | |
| 879 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | |
| 880 } | |
| 881 | |
| 882 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { | |
| 883 // Set the return address to the correct point in the interpreter entry | |
| 884 // trampoline. | |
| 885 Smi* interpreter_entry_return_pc_offset( | |
| 886 masm->isolate()->heap()->interpreter_entry_return_pc_offset()); | |
| 887 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0)); | |
| 888 __ Move(rbx, masm->isolate()->builtins()->InterpreterEntryTrampoline()); | |
| 889 __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value() + | |
| 890 Code::kHeaderSize - kHeapObjectTag)); | |
| 891 __ Push(rbx); | |
| 892 | |
| 893 // Initialize dispatch table register. | |
| 894 __ Move( | |
| 895 kInterpreterDispatchTableRegister, | |
| 896 ExternalReference::interpreter_dispatch_table_address(masm->isolate())); | |
| 897 | |
| 898 // Get the bytecode array pointer from the frame. | |
| 899 __ movp(kInterpreterBytecodeArrayRegister, | |
| 900 Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp)); | |
| 901 | |
| 902 if (FLAG_debug_code) { | |
| 903 // Check function data field is actually a BytecodeArray object. | |
| 904 __ AssertNotSmi(kInterpreterBytecodeArrayRegister); | |
| 905 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE, | |
| 906 rbx); | |
| 907 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | |
| 908 } | |
| 909 | |
| 910 // Get the target bytecode offset from the frame. | |
| 911 __ movp(kInterpreterBytecodeOffsetRegister, | |
| 912 Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); | |
| 913 __ SmiToInteger32(kInterpreterBytecodeOffsetRegister, | |
| 914 kInterpreterBytecodeOffsetRegister); | |
| 915 | |
| 916 // Dispatch to the target bytecode. | |
| 917 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister, | |
| 918 kInterpreterBytecodeOffsetRegister, times_1, 0)); | |
| 919 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx, | |
| 920 times_pointer_size, 0)); | |
| 921 __ jmp(rbx); | |
| 922 } | |
| 923 | |
| 924 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | |
| 925 // ----------- S t a t e ------------- | |
| 926 // -- rax : argument count (preserved for callee) | |
| 927 // -- rdx : new target (preserved for callee) | |
| 928 // -- rdi : target function (preserved for callee) | |
| 929 // ----------------------------------- | |
| 930 // First lookup code, maybe we don't need to compile! | |
| 931 Label gotta_call_runtime; | |
| 932 Label maybe_call_runtime; | |
| 933 Label try_shared; | |
| 934 Label loop_top, loop_bottom; | |
| 935 | |
| 936 Register closure = rdi; | |
| 937 Register map = r8; | |
| 938 Register index = r9; | |
| 939 __ movp(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
| 940 __ movp(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); | |
| 941 __ SmiToInteger32(index, FieldOperand(map, FixedArray::kLengthOffset)); | |
| 942 __ cmpl(index, Immediate(2)); | |
| 943 __ j(less, &gotta_call_runtime); | |
| 944 | |
| 945 // Find literals. | |
| 946 // r14 : native context | |
| 947 // r9 : length / index | |
| 948 // r8 : optimized code map | |
| 949 // rdx : new target | |
| 950 // rdi : closure | |
| 951 Register native_context = r14; | |
| 952 __ movp(native_context, NativeContextOperand()); | |
| 953 | |
| 954 __ bind(&loop_top); | |
| 955 // Native context match? | |
| 956 Register temp = r11; | |
| 957 __ movp(temp, FieldOperand(map, index, times_pointer_size, | |
| 958 SharedFunctionInfo::kOffsetToPreviousContext)); | |
| 959 __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset)); | |
| 960 __ cmpp(temp, native_context); | |
| 961 __ j(not_equal, &loop_bottom); | |
| 962 // OSR id set to none? | |
| 963 __ movp(temp, FieldOperand(map, index, times_pointer_size, | |
| 964 SharedFunctionInfo::kOffsetToPreviousOsrAstId)); | |
| 965 __ SmiToInteger32(temp, temp); | |
| 966 const int bailout_id = BailoutId::None().ToInt(); | |
| 967 __ cmpl(temp, Immediate(bailout_id)); | |
| 968 __ j(not_equal, &loop_bottom); | |
| 969 | |
| 970 // Literals available? | |
| 971 Label got_literals, maybe_cleared_weakcell; | |
| 972 __ movp(temp, FieldOperand(map, index, times_pointer_size, | |
| 973 SharedFunctionInfo::kOffsetToPreviousLiterals)); | |
| 974 // temp contains either a WeakCell pointing to the literals array or the | |
| 975 // literals array directly. | |
| 976 STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset); | |
| 977 __ movp(r15, FieldOperand(temp, WeakCell::kValueOffset)); | |
| 978 __ JumpIfSmi(r15, &maybe_cleared_weakcell); | |
| 979 // r15 is a pointer, therefore temp is a WeakCell pointing to a literals | |
| 980 // array. | |
| 981 __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset)); | |
| 982 __ jmp(&got_literals); | |
| 983 | |
| 984 // r15 is a smi. If it's 0, then we are looking at a cleared WeakCell | |
| 985 // around the literals array, and we should visit the runtime. If it's > 0, | |
| 986 // then temp already contains the literals array. | |
| 987 __ bind(&maybe_cleared_weakcell); | |
| 988 __ cmpp(r15, Immediate(0)); | |
| 989 __ j(equal, &gotta_call_runtime); | |
| 990 | |
| 991 // Save the literals in the closure. | |
| 992 __ bind(&got_literals); | |
| 993 __ movp(FieldOperand(closure, JSFunction::kLiteralsOffset), temp); | |
| 994 __ movp(r15, index); | |
| 995 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r15, | |
| 996 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
| 997 | |
| 998 // Code available? | |
| 999 Register entry = rcx; | |
| 1000 __ movp(entry, FieldOperand(map, index, times_pointer_size, | |
| 1001 SharedFunctionInfo::kOffsetToPreviousCachedCode)); | |
| 1002 __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset)); | |
| 1003 __ JumpIfSmi(entry, &maybe_call_runtime); | |
| 1004 | |
| 1005 // Found literals and code. Get them into the closure and return. | |
| 1006 __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); | |
| 1007 | |
| 1008 Label install_optimized_code_and_tailcall; | |
| 1009 __ bind(&install_optimized_code_and_tailcall); | |
| 1010 __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); | |
| 1011 __ RecordWriteCodeEntryField(closure, entry, r15); | |
| 1012 | |
| 1013 // Link the closure into the optimized function list. | |
| 1014 // rcx : code entry (entry) | |
| 1015 // r14 : native context | |
| 1016 // rdx : new target | |
| 1017 // rdi : closure | |
| 1018 __ movp(rbx, | |
| 1019 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); | |
| 1020 __ movp(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), rbx); | |
| 1021 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, rbx, r15, | |
| 1022 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
| 1023 const int function_list_offset = | |
| 1024 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST); | |
| 1025 __ movp(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), | |
| 1026 closure); | |
| 1027 // Save closure before the write barrier. | |
| 1028 __ movp(rbx, closure); | |
| 1029 __ RecordWriteContextSlot(native_context, function_list_offset, closure, r15, | |
| 1030 kDontSaveFPRegs); | |
| 1031 __ movp(closure, rbx); | |
| 1032 __ jmp(entry); | |
| 1033 | |
| 1034 __ bind(&loop_bottom); | |
| 1035 __ subl(index, Immediate(SharedFunctionInfo::kEntryLength)); | |
| 1036 __ cmpl(index, Immediate(1)); | |
| 1037 __ j(greater, &loop_top); | |
| 1038 | |
| 1039 // We found neither literals nor code. | |
| 1040 __ jmp(&gotta_call_runtime); | |
| 1041 | |
| 1042 __ bind(&maybe_call_runtime); | |
| 1043 | |
| 1044 // Last possibility. Check the context free optimized code map entry. | |
| 1045 __ movp(entry, FieldOperand(map, FixedArray::kHeaderSize + | |
| 1046 SharedFunctionInfo::kSharedCodeIndex)); | |
| 1047 __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset)); | |
| 1048 __ JumpIfSmi(entry, &try_shared); | |
| 1049 | |
| 1050 // Store code entry in the closure. | |
| 1051 __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); | |
| 1052 __ jmp(&install_optimized_code_and_tailcall); | |
| 1053 | |
| 1054 __ bind(&try_shared); | |
| 1055 // Is the full code valid? | |
| 1056 __ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
| 1057 __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset)); | |
| 1058 __ movl(rbx, FieldOperand(entry, Code::kFlagsOffset)); | |
| 1059 __ andl(rbx, Immediate(Code::KindField::kMask)); | |
| 1060 __ shrl(rbx, Immediate(Code::KindField::kShift)); | |
| 1061 __ cmpl(rbx, Immediate(Code::BUILTIN)); | |
| 1062 __ j(equal, &gotta_call_runtime); | |
| 1063 // Yes, install the full code. | |
| 1064 __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); | |
| 1065 __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); | |
| 1066 __ RecordWriteCodeEntryField(closure, entry, r15); | |
| 1067 __ jmp(entry); | |
| 1068 | |
| 1069 __ bind(&gotta_call_runtime); | |
| 1070 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); | |
| 1071 } | |
| 1072 | |
| 1073 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) { | |
| 1074 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline); | |
| 1075 } | |
| 1076 | |
| 1077 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | |
| 1078 GenerateTailCallToReturnedCode(masm, | |
| 1079 Runtime::kCompileOptimized_NotConcurrent); | |
| 1080 } | |
| 1081 | |
| 1082 | |
| 1083 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | |
| 1084 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); | |
| 1085 } | |
| 1086 | |
| 1087 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { | |
| 1088 // ----------- S t a t e ------------- | |
| 1089 // -- rax : argument count (preserved for callee) | |
| 1090 // -- rdx : new target (preserved for callee) | |
| 1091 // -- rdi : target function (preserved for callee) | |
| 1092 // ----------------------------------- | |
| 1093 Label failed; | |
| 1094 { | |
| 1095 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 1096 // Push the number of arguments to the callee. | |
| 1097 __ Integer32ToSmi(rax, rax); | |
| 1098 __ Push(rax); | |
| 1099 // Push a copy of the target function and the new target. | |
| 1100 __ Push(rdi); | |
| 1101 __ Push(rdx); | |
| 1102 | |
| 1103 // The function. | |
| 1104 __ Push(rdi); | |
| 1105 // Copy arguments from caller (stdlib, foreign, heap). | |
| 1106 for (int i = 2; i >= 0; --i) { | |
| 1107 __ Push(Operand( | |
| 1108 rbp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize)); | |
| 1109 } | |
| 1110 // Call runtime, on success unwind frame, and parent frame. | |
| 1111 __ CallRuntime(Runtime::kInstantiateAsmJs, 4); | |
| 1112 // A smi 0 is returned on failure, an object on success. | |
| 1113 __ JumpIfSmi(rax, &failed, Label::kNear); | |
| 1114 scope.GenerateLeaveFrame(); | |
| 1115 __ ret(4 * kPointerSize); | |
| 1116 | |
| 1117 __ bind(&failed); | |
| 1118 // Restore target function and new target. | |
| 1119 __ Pop(rdx); | |
| 1120 __ Pop(rdi); | |
| 1121 __ Pop(rax); | |
| 1122 __ SmiToInteger32(rax, rax); | |
| 1123 } | |
| 1124 // On failure, tail call back to regular js. | |
| 1125 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); | |
| 1126 } | |
| 1127 | |
| 1128 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | |
| 1129 // For now, we are relying on the fact that make_code_young doesn't do any | |
| 1130 // garbage collection which allows us to save/restore the registers without | |
| 1131 // worrying about which of them contain pointers. We also don't build an | |
| 1132 // internal frame to make the code faster, since we shouldn't have to do stack | |
| 1133 // crawls in MakeCodeYoung. This seems a bit fragile. | |
| 1134 | |
| 1135 // Re-execute the code that was patched back to the young age when | |
| 1136 // the stub returns. | |
| 1137 __ subp(Operand(rsp, 0), Immediate(5)); | |
| 1138 __ Pushad(); | |
| 1139 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate())); | |
| 1140 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize)); | |
| 1141 { // NOLINT | |
| 1142 FrameScope scope(masm, StackFrame::MANUAL); | |
| 1143 __ PrepareCallCFunction(2); | |
| 1144 __ CallCFunction( | |
| 1145 ExternalReference::get_make_code_young_function(masm->isolate()), 2); | |
| 1146 } | |
| 1147 __ Popad(); | |
| 1148 __ ret(0); | |
| 1149 } | |
| 1150 | |
| 1151 | |
| 1152 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ | |
| 1153 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ | |
| 1154 MacroAssembler* masm) { \ | |
| 1155 GenerateMakeCodeYoungAgainCommon(masm); \ | |
| 1156 } \ | |
| 1157 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ | |
| 1158 MacroAssembler* masm) { \ | |
| 1159 GenerateMakeCodeYoungAgainCommon(masm); \ | |
| 1160 } | |
| 1161 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) | |
| 1162 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR | |
| 1163 | |
| 1164 | |
| 1165 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { | |
| 1166 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact | |
| 1167 // that make_code_young doesn't do any garbage collection which allows us to | |
| 1168 // save/restore the registers without worrying about which of them contain | |
| 1169 // pointers. | |
| 1170 __ Pushad(); | |
| 1171 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate())); | |
| 1172 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize)); | |
| 1173 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); | |
| 1174 { // NOLINT | |
| 1175 FrameScope scope(masm, StackFrame::MANUAL); | |
| 1176 __ PrepareCallCFunction(2); | |
| 1177 __ CallCFunction( | |
| 1178 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), | |
| 1179 2); | |
| 1180 } | |
| 1181 __ Popad(); | |
| 1182 | |
| 1183 // Perform prologue operations usually performed by the young code stub. | |
| 1184 __ PopReturnAddressTo(kScratchRegister); | |
| 1185 __ pushq(rbp); // Caller's frame pointer. | |
| 1186 __ movp(rbp, rsp); | |
| 1187 __ Push(rsi); // Callee's context. | |
| 1188 __ Push(rdi); // Callee's JS Function. | |
| 1189 __ PushReturnAddressFrom(kScratchRegister); | |
| 1190 | |
| 1191 // Jump to point after the code-age stub. | |
| 1192 __ ret(0); | |
| 1193 } | |
| 1194 | |
| 1195 | |
| 1196 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { | |
| 1197 GenerateMakeCodeYoungAgainCommon(masm); | |
| 1198 } | |
| 1199 | |
| 1200 | |
| 1201 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { | |
| 1202 Generate_MarkCodeAsExecutedOnce(masm); | |
| 1203 } | |
| 1204 | |
| 1205 | |
| 1206 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | |
| 1207 SaveFPRegsMode save_doubles) { | |
| 1208 // Enter an internal frame. | |
| 1209 { | |
| 1210 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 1211 | |
| 1212 // Preserve registers across notification, this is important for compiled | |
| 1213 // stubs that tail call the runtime on deopts passing their parameters in | |
| 1214 // registers. | |
| 1215 __ Pushad(); | |
| 1216 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); | |
| 1217 __ Popad(); | |
| 1218 // Tear down internal frame. | |
| 1219 } | |
| 1220 | |
| 1221 __ DropUnderReturnAddress(1); // Ignore state offset | |
| 1222 __ ret(0); // Return to IC Miss stub, continuation still on stack. | |
| 1223 } | |
| 1224 | |
| 1225 | |
| 1226 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | |
| 1227 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | |
| 1228 } | |
| 1229 | |
| 1230 | |
| 1231 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | |
| 1232 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | |
| 1233 } | |
| 1234 | |
| 1235 | |
| 1236 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | |
| 1237 Deoptimizer::BailoutType type) { | |
| 1238 // Enter an internal frame. | |
| 1239 { | |
| 1240 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 1241 | |
| 1242 // Pass the deoptimization type to the runtime system. | |
| 1243 __ Push(Smi::FromInt(static_cast<int>(type))); | |
| 1244 | |
| 1245 __ CallRuntime(Runtime::kNotifyDeoptimized); | |
| 1246 // Tear down internal frame. | |
| 1247 } | |
| 1248 | |
| 1249 // Get the full codegen state from the stack and untag it. | |
| 1250 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize)); | |
| 1251 | |
| 1252 // Switch on the state. | |
| 1253 Label not_no_registers, not_tos_rax; | |
| 1254 __ cmpp(kScratchRegister, | |
| 1255 Immediate(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS))); | |
| 1256 __ j(not_equal, ¬_no_registers, Label::kNear); | |
| 1257 __ ret(1 * kPointerSize); // Remove state. | |
| 1258 | |
| 1259 __ bind(¬_no_registers); | |
| 1260 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code()); | |
| 1261 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize)); | |
| 1262 __ cmpp(kScratchRegister, | |
| 1263 Immediate(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER))); | |
| 1264 __ j(not_equal, ¬_tos_rax, Label::kNear); | |
| 1265 __ ret(2 * kPointerSize); // Remove state, rax. | |
| 1266 | |
| 1267 __ bind(¬_tos_rax); | |
| 1268 __ Abort(kNoCasesLeft); | |
| 1269 } | |
| 1270 | |
| 1271 | |
| 1272 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | |
| 1273 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | |
| 1274 } | |
| 1275 | |
| 1276 | |
| 1277 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { | |
| 1278 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); | |
| 1279 } | |
| 1280 | |
| 1281 | |
| 1282 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | |
| 1283 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | |
| 1284 } | |
| 1285 | |
| 1286 | |
| 1287 // static | |
| 1288 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, | |
| 1289 int field_index) { | |
| 1290 // ----------- S t a t e ------------- | |
| 1291 // -- rax : number of arguments | |
| 1292 // -- rdi : function | |
| 1293 // -- rsi : context | |
| 1294 // -- rsp[0] : return address | |
| 1295 // -- rsp[8] : receiver | |
| 1296 // ----------------------------------- | |
| 1297 | |
| 1298 // 1. Load receiver into rax and check that it's actually a JSDate object. | |
| 1299 Label receiver_not_date; | |
| 1300 { | |
| 1301 StackArgumentsAccessor args(rsp, 0); | |
| 1302 __ movp(rax, args.GetReceiverOperand()); | |
| 1303 __ JumpIfSmi(rax, &receiver_not_date); | |
| 1304 __ CmpObjectType(rax, JS_DATE_TYPE, rbx); | |
| 1305 __ j(not_equal, &receiver_not_date); | |
| 1306 } | |
| 1307 | |
| 1308 // 2. Load the specified date field, falling back to the runtime as necessary. | |
| 1309 if (field_index == JSDate::kDateValue) { | |
| 1310 __ movp(rax, FieldOperand(rax, JSDate::kValueOffset)); | |
| 1311 } else { | |
| 1312 if (field_index < JSDate::kFirstUncachedField) { | |
| 1313 Label stamp_mismatch; | |
| 1314 __ Load(rdx, ExternalReference::date_cache_stamp(masm->isolate())); | |
| 1315 __ cmpp(rdx, FieldOperand(rax, JSDate::kCacheStampOffset)); | |
| 1316 __ j(not_equal, &stamp_mismatch, Label::kNear); | |
| 1317 __ movp(rax, FieldOperand( | |
| 1318 rax, JSDate::kValueOffset + field_index * kPointerSize)); | |
| 1319 __ ret(1 * kPointerSize); | |
| 1320 __ bind(&stamp_mismatch); | |
| 1321 } | |
| 1322 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 1323 __ PrepareCallCFunction(2); | |
| 1324 __ Move(arg_reg_1, rax); | |
| 1325 __ Move(arg_reg_2, Smi::FromInt(field_index)); | |
| 1326 __ CallCFunction( | |
| 1327 ExternalReference::get_date_field_function(masm->isolate()), 2); | |
| 1328 } | |
| 1329 __ ret(1 * kPointerSize); | |
| 1330 | |
| 1331 // 3. Raise a TypeError if the receiver is not a date. | |
| 1332 __ bind(&receiver_not_date); | |
| 1333 { | |
| 1334 FrameScope scope(masm, StackFrame::MANUAL); | |
| 1335 __ Move(rbx, Smi::FromInt(0)); | |
| 1336 __ EnterBuiltinFrame(rsi, rdi, rbx); | |
| 1337 __ CallRuntime(Runtime::kThrowNotDateError); | |
| 1338 } | |
| 1339 } | |
| 1340 | |
| 1341 // static | |
| 1342 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { | |
| 1343 // ----------- S t a t e ------------- | |
| 1344 // -- rax : argc | |
| 1345 // -- rsp[0] : return address | |
| 1346 // -- rsp[8] : argArray | |
| 1347 // -- rsp[16] : thisArg | |
| 1348 // -- rsp[24] : receiver | |
| 1349 // ----------------------------------- | |
| 1350 | |
| 1351 // 1. Load receiver into rdi, argArray into rax (if present), remove all | |
| 1352 // arguments from the stack (including the receiver), and push thisArg (if | |
| 1353 // present) instead. | |
| 1354 { | |
| 1355 Label no_arg_array, no_this_arg; | |
| 1356 StackArgumentsAccessor args(rsp, rax); | |
| 1357 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); | |
| 1358 __ movp(rbx, rdx); | |
| 1359 __ movp(rdi, args.GetReceiverOperand()); | |
| 1360 __ testp(rax, rax); | |
| 1361 __ j(zero, &no_this_arg, Label::kNear); | |
| 1362 { | |
| 1363 __ movp(rdx, args.GetArgumentOperand(1)); | |
| 1364 __ cmpp(rax, Immediate(1)); | |
| 1365 __ j(equal, &no_arg_array, Label::kNear); | |
| 1366 __ movp(rbx, args.GetArgumentOperand(2)); | |
| 1367 __ bind(&no_arg_array); | |
| 1368 } | |
| 1369 __ bind(&no_this_arg); | |
| 1370 __ PopReturnAddressTo(rcx); | |
| 1371 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); | |
| 1372 __ Push(rdx); | |
| 1373 __ PushReturnAddressFrom(rcx); | |
| 1374 __ movp(rax, rbx); | |
| 1375 } | |
| 1376 | |
| 1377 // ----------- S t a t e ------------- | |
| 1378 // -- rax : argArray | |
| 1379 // -- rdi : receiver | |
| 1380 // -- rsp[0] : return address | |
| 1381 // -- rsp[8] : thisArg | |
| 1382 // ----------------------------------- | |
| 1383 | |
| 1384 // 2. Make sure the receiver is actually callable. | |
| 1385 Label receiver_not_callable; | |
| 1386 __ JumpIfSmi(rdi, &receiver_not_callable, Label::kNear); | |
| 1387 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset)); | |
| 1388 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), | |
| 1389 Immediate(1 << Map::kIsCallable)); | |
| 1390 __ j(zero, &receiver_not_callable, Label::kNear); | |
| 1391 | |
| 1392 // 3. Tail call with no arguments if argArray is null or undefined. | |
| 1393 Label no_arguments; | |
| 1394 __ JumpIfRoot(rax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear); | |
| 1395 __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &no_arguments, | |
| 1396 Label::kNear); | |
| 1397 | |
| 1398 // 4a. Apply the receiver to the given argArray (passing undefined for | |
| 1399 // new.target). | |
| 1400 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); | |
| 1401 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
| 1402 | |
| 1403 // 4b. The argArray is either null or undefined, so we tail call without any | |
| 1404 // arguments to the receiver. Since we did not create a frame for | |
| 1405 // Function.prototype.apply() yet, we use a normal Call builtin here. | |
| 1406 __ bind(&no_arguments); | |
| 1407 { | |
| 1408 __ Set(rax, 0); | |
| 1409 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
| 1410 } | |
| 1411 | |
| 1412 // 4c. The receiver is not callable, throw an appropriate TypeError. | |
| 1413 __ bind(&receiver_not_callable); | |
| 1414 { | |
| 1415 StackArgumentsAccessor args(rsp, 0); | |
| 1416 __ movp(args.GetReceiverOperand(), rdi); | |
| 1417 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | |
| 1418 } | |
| 1419 } | |
| 1420 | |
| 1421 | |
| 1422 // static | |
| 1423 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { | |
| 1424 // Stack Layout: | |
| 1425 // rsp[0] : Return address | |
| 1426 // rsp[8] : Argument n | |
| 1427 // rsp[16] : Argument n-1 | |
| 1428 // ... | |
| 1429 // rsp[8 * n] : Argument 1 | |
| 1430 // rsp[8 * (n + 1)] : Receiver (callable to call) | |
| 1431 // | |
| 1432 // rax contains the number of arguments, n, not counting the receiver. | |
| 1433 // | |
| 1434 // 1. Make sure we have at least one argument. | |
| 1435 { | |
| 1436 Label done; | |
| 1437 __ testp(rax, rax); | |
| 1438 __ j(not_zero, &done, Label::kNear); | |
| 1439 __ PopReturnAddressTo(rbx); | |
| 1440 __ PushRoot(Heap::kUndefinedValueRootIndex); | |
| 1441 __ PushReturnAddressFrom(rbx); | |
| 1442 __ incp(rax); | |
| 1443 __ bind(&done); | |
| 1444 } | |
| 1445 | |
| 1446 // 2. Get the callable to call (passed as receiver) from the stack. | |
| 1447 { | |
| 1448 StackArgumentsAccessor args(rsp, rax); | |
| 1449 __ movp(rdi, args.GetReceiverOperand()); | |
| 1450 } | |
| 1451 | |
| 1452 // 3. Shift arguments and return address one slot down on the stack | |
| 1453 // (overwriting the original receiver). Adjust argument count to make | |
| 1454 // the original first argument the new receiver. | |
| 1455 { | |
| 1456 Label loop; | |
| 1457 __ movp(rcx, rax); | |
| 1458 StackArgumentsAccessor args(rsp, rcx); | |
| 1459 __ bind(&loop); | |
| 1460 __ movp(rbx, args.GetArgumentOperand(1)); | |
| 1461 __ movp(args.GetArgumentOperand(0), rbx); | |
| 1462 __ decp(rcx); | |
| 1463 __ j(not_zero, &loop); // While non-zero. | |
| 1464 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address. | |
| 1465 __ decp(rax); // One fewer argument (first argument is new receiver). | |
| 1466 } | |
| 1467 | |
| 1468 // 4. Call the callable. | |
| 1469 // Since we did not create a frame for Function.prototype.call() yet, | |
| 1470 // we use a normal Call builtin here. | |
| 1471 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
| 1472 } | |
| 1473 | |
| 1474 | |
| 1475 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { | |
| 1476 // ----------- S t a t e ------------- | |
| 1477 // -- rax : argc | |
| 1478 // -- rsp[0] : return address | |
| 1479 // -- rsp[8] : argumentsList | |
| 1480 // -- rsp[16] : thisArgument | |
| 1481 // -- rsp[24] : target | |
| 1482 // -- rsp[32] : receiver | |
| 1483 // ----------------------------------- | |
| 1484 | |
| 1485 // 1. Load target into rdi (if present), argumentsList into rax (if present), | |
| 1486 // remove all arguments from the stack (including the receiver), and push | |
| 1487 // thisArgument (if present) instead. | |
| 1488 { | |
| 1489 Label done; | |
| 1490 StackArgumentsAccessor args(rsp, rax); | |
| 1491 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex); | |
| 1492 __ movp(rdx, rdi); | |
| 1493 __ movp(rbx, rdi); | |
| 1494 __ cmpp(rax, Immediate(1)); | |
| 1495 __ j(below, &done, Label::kNear); | |
| 1496 __ movp(rdi, args.GetArgumentOperand(1)); // target | |
| 1497 __ j(equal, &done, Label::kNear); | |
| 1498 __ movp(rdx, args.GetArgumentOperand(2)); // thisArgument | |
| 1499 __ cmpp(rax, Immediate(3)); | |
| 1500 __ j(below, &done, Label::kNear); | |
| 1501 __ movp(rbx, args.GetArgumentOperand(3)); // argumentsList | |
| 1502 __ bind(&done); | |
| 1503 __ PopReturnAddressTo(rcx); | |
| 1504 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); | |
| 1505 __ Push(rdx); | |
| 1506 __ PushReturnAddressFrom(rcx); | |
| 1507 __ movp(rax, rbx); | |
| 1508 } | |
| 1509 | |
| 1510 // ----------- S t a t e ------------- | |
| 1511 // -- rax : argumentsList | |
| 1512 // -- rdi : target | |
| 1513 // -- rsp[0] : return address | |
| 1514 // -- rsp[8] : thisArgument | |
| 1515 // ----------------------------------- | |
| 1516 | |
| 1517 // 2. Make sure the target is actually callable. | |
| 1518 Label target_not_callable; | |
| 1519 __ JumpIfSmi(rdi, &target_not_callable, Label::kNear); | |
| 1520 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset)); | |
| 1521 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), | |
| 1522 Immediate(1 << Map::kIsCallable)); | |
| 1523 __ j(zero, &target_not_callable, Label::kNear); | |
| 1524 | |
| 1525 // 3a. Apply the target to the given argumentsList (passing undefined for | |
| 1526 // new.target). | |
| 1527 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); | |
| 1528 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
| 1529 | |
| 1530 // 3b. The target is not callable, throw an appropriate TypeError. | |
| 1531 __ bind(&target_not_callable); | |
| 1532 { | |
| 1533 StackArgumentsAccessor args(rsp, 0); | |
| 1534 __ movp(args.GetReceiverOperand(), rdi); | |
| 1535 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | |
| 1536 } | |
| 1537 } | |
| 1538 | |
| 1539 | |
| 1540 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { | |
| 1541 // ----------- S t a t e ------------- | |
| 1542 // -- rax : argc | |
| 1543 // -- rsp[0] : return address | |
| 1544 // -- rsp[8] : new.target (optional) | |
| 1545 // -- rsp[16] : argumentsList | |
| 1546 // -- rsp[24] : target | |
| 1547 // -- rsp[32] : receiver | |
| 1548 // ----------------------------------- | |
| 1549 | |
| 1550 // 1. Load target into rdi (if present), argumentsList into rax (if present), | |
| 1551 // new.target into rdx (if present, otherwise use target), remove all | |
| 1552 // arguments from the stack (including the receiver), and push thisArgument | |
| 1553 // (if present) instead. | |
| 1554 { | |
| 1555 Label done; | |
| 1556 StackArgumentsAccessor args(rsp, rax); | |
| 1557 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex); | |
| 1558 __ movp(rdx, rdi); | |
| 1559 __ movp(rbx, rdi); | |
| 1560 __ cmpp(rax, Immediate(1)); | |
| 1561 __ j(below, &done, Label::kNear); | |
| 1562 __ movp(rdi, args.GetArgumentOperand(1)); // target | |
| 1563 __ movp(rdx, rdi); // new.target defaults to target | |
| 1564 __ j(equal, &done, Label::kNear); | |
| 1565 __ movp(rbx, args.GetArgumentOperand(2)); // argumentsList | |
| 1566 __ cmpp(rax, Immediate(3)); | |
| 1567 __ j(below, &done, Label::kNear); | |
| 1568 __ movp(rdx, args.GetArgumentOperand(3)); // new.target | |
| 1569 __ bind(&done); | |
| 1570 __ PopReturnAddressTo(rcx); | |
| 1571 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); | |
| 1572 __ PushRoot(Heap::kUndefinedValueRootIndex); | |
| 1573 __ PushReturnAddressFrom(rcx); | |
| 1574 __ movp(rax, rbx); | |
| 1575 } | |
| 1576 | |
| 1577 // ----------- S t a t e ------------- | |
| 1578 // -- rax : argumentsList | |
| 1579 // -- rdx : new.target | |
| 1580 // -- rdi : target | |
| 1581 // -- rsp[0] : return address | |
| 1582 // -- rsp[8] : receiver (undefined) | |
| 1583 // ----------------------------------- | |
| 1584 | |
| 1585 // 2. Make sure the target is actually a constructor. | |
| 1586 Label target_not_constructor; | |
| 1587 __ JumpIfSmi(rdi, &target_not_constructor, Label::kNear); | |
| 1588 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset)); | |
| 1589 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), | |
| 1590 Immediate(1 << Map::kIsConstructor)); | |
| 1591 __ j(zero, &target_not_constructor, Label::kNear); | |
| 1592 | |
| 1593 // 3. Make sure the target is actually a constructor. | |
| 1594 Label new_target_not_constructor; | |
| 1595 __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear); | |
| 1596 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); | |
| 1597 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), | |
| 1598 Immediate(1 << Map::kIsConstructor)); | |
| 1599 __ j(zero, &new_target_not_constructor, Label::kNear); | |
| 1600 | |
| 1601 // 4a. Construct the target with the given new.target and argumentsList. | |
| 1602 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
| 1603 | |
| 1604 // 4b. The target is not a constructor, throw an appropriate TypeError. | |
| 1605 __ bind(&target_not_constructor); | |
| 1606 { | |
| 1607 StackArgumentsAccessor args(rsp, 0); | |
| 1608 __ movp(args.GetReceiverOperand(), rdi); | |
| 1609 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); | |
| 1610 } | |
| 1611 | |
| 1612 // 4c. The new.target is not a constructor, throw an appropriate TypeError. | |
| 1613 __ bind(&new_target_not_constructor); | |
| 1614 { | |
| 1615 StackArgumentsAccessor args(rsp, 0); | |
| 1616 __ movp(args.GetReceiverOperand(), rdx); | |
| 1617 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); | |
| 1618 } | |
| 1619 } | |
| 1620 | |
| 1621 | |
| 1622 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { | |
| 1623 // ----------- S t a t e ------------- | |
| 1624 // -- rax : argc | |
| 1625 // -- rsp[0] : return address | |
| 1626 // -- rsp[8] : last argument | |
| 1627 // ----------------------------------- | |
| 1628 Label generic_array_code; | |
| 1629 | |
| 1630 // Get the InternalArray function. | |
| 1631 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi); | |
| 1632 | |
| 1633 if (FLAG_debug_code) { | |
| 1634 // Initial map for the builtin InternalArray functions should be maps. | |
| 1635 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | |
| 1636 // Will both indicate a NULL and a Smi. | |
| 1637 STATIC_ASSERT(kSmiTag == 0); | |
| 1638 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); | |
| 1639 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction); | |
| 1640 __ CmpObjectType(rbx, MAP_TYPE, rcx); | |
| 1641 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction); | |
| 1642 } | |
| 1643 | |
| 1644 // Run the native code for the InternalArray function called as a normal | |
| 1645 // function. | |
| 1646 // tail call a stub | |
| 1647 InternalArrayConstructorStub stub(masm->isolate()); | |
| 1648 __ TailCallStub(&stub); | |
| 1649 } | |
| 1650 | |
| 1651 | |
| 1652 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { | |
| 1653 // ----------- S t a t e ------------- | |
| 1654 // -- rax : argc | |
| 1655 // -- rsp[0] : return address | |
| 1656 // -- rsp[8] : last argument | |
| 1657 // ----------------------------------- | |
| 1658 Label generic_array_code; | |
| 1659 | |
| 1660 // Get the Array function. | |
| 1661 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rdi); | |
| 1662 | |
| 1663 if (FLAG_debug_code) { | |
| 1664 // Initial map for the builtin Array functions should be maps. | |
| 1665 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | |
| 1666 // Will both indicate a NULL and a Smi. | |
| 1667 STATIC_ASSERT(kSmiTag == 0); | |
| 1668 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); | |
| 1669 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); | |
| 1670 __ CmpObjectType(rbx, MAP_TYPE, rcx); | |
| 1671 __ Check(equal, kUnexpectedInitialMapForArrayFunction); | |
| 1672 } | |
| 1673 | |
| 1674 __ movp(rdx, rdi); | |
| 1675 // Run the native code for the Array function called as a normal function. | |
| 1676 // tail call a stub | |
| 1677 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); | |
| 1678 ArrayConstructorStub stub(masm->isolate()); | |
| 1679 __ TailCallStub(&stub); | |
| 1680 } | |
| 1681 | |
| 1682 // static | |
| 1683 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { | |
| 1684 // ----------- S t a t e ------------- | |
| 1685 // -- rax : number of arguments | |
| 1686 // -- rdi : function | |
| 1687 // -- rsi : context | |
| 1688 // -- rsp[0] : return address | |
| 1689 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) | |
| 1690 // -- rsp[(argc + 1) * 8] : receiver | |
| 1691 // ----------------------------------- | |
| 1692 Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above; | |
| 1693 Heap::RootListIndex const root_index = | |
| 1694 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex | |
| 1695 : Heap::kMinusInfinityValueRootIndex; | |
| 1696 XMMRegister const reg = (kind == MathMaxMinKind::kMin) ? xmm1 : xmm0; | |
| 1697 | |
| 1698 // Load the accumulator with the default return value (either -Infinity or | |
| 1699 // +Infinity), with the tagged value in rdx and the double value in xmm0. | |
| 1700 __ LoadRoot(rdx, root_index); | |
| 1701 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); | |
| 1702 __ Move(rcx, rax); | |
| 1703 | |
| 1704 Label done_loop, loop; | |
| 1705 __ bind(&loop); | |
| 1706 { | |
| 1707 // Check if all parameters done. | |
| 1708 __ testp(rcx, rcx); | |
| 1709 __ j(zero, &done_loop); | |
| 1710 | |
| 1711 // Load the next parameter tagged value into rbx. | |
| 1712 __ movp(rbx, Operand(rsp, rcx, times_pointer_size, 0)); | |
| 1713 | |
| 1714 // Load the double value of the parameter into xmm1, maybe converting the | |
| 1715 // parameter to a number first using the ToNumber builtin if necessary. | |
| 1716 Label convert, convert_smi, convert_number, done_convert; | |
| 1717 __ bind(&convert); | |
| 1718 __ JumpIfSmi(rbx, &convert_smi); | |
| 1719 __ JumpIfRoot(FieldOperand(rbx, HeapObject::kMapOffset), | |
| 1720 Heap::kHeapNumberMapRootIndex, &convert_number); | |
| 1721 { | |
| 1722 // Parameter is not a Number, use the ToNumber builtin to convert it. | |
| 1723 FrameScope scope(masm, StackFrame::MANUAL); | |
| 1724 __ Integer32ToSmi(rax, rax); | |
| 1725 __ Integer32ToSmi(rcx, rcx); | |
| 1726 __ EnterBuiltinFrame(rsi, rdi, rax); | |
| 1727 __ Push(rcx); | |
| 1728 __ Push(rdx); | |
| 1729 __ movp(rax, rbx); | |
| 1730 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
| 1731 __ movp(rbx, rax); | |
| 1732 __ Pop(rdx); | |
| 1733 __ Pop(rcx); | |
| 1734 __ LeaveBuiltinFrame(rsi, rdi, rax); | |
| 1735 __ SmiToInteger32(rcx, rcx); | |
| 1736 __ SmiToInteger32(rax, rax); | |
| 1737 { | |
| 1738 // Restore the double accumulator value (xmm0). | |
| 1739 Label restore_smi, done_restore; | |
| 1740 __ JumpIfSmi(rdx, &restore_smi, Label::kNear); | |
| 1741 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); | |
| 1742 __ jmp(&done_restore, Label::kNear); | |
| 1743 __ bind(&restore_smi); | |
| 1744 __ SmiToDouble(xmm0, rdx); | |
| 1745 __ bind(&done_restore); | |
| 1746 } | |
| 1747 } | |
| 1748 __ jmp(&convert); | |
| 1749 __ bind(&convert_number); | |
| 1750 __ Movsd(xmm1, FieldOperand(rbx, HeapNumber::kValueOffset)); | |
| 1751 __ jmp(&done_convert, Label::kNear); | |
| 1752 __ bind(&convert_smi); | |
| 1753 __ SmiToDouble(xmm1, rbx); | |
| 1754 __ bind(&done_convert); | |
| 1755 | |
| 1756 // Perform the actual comparison with the accumulator value on the left hand | |
| 1757 // side (xmm0) and the next parameter value on the right hand side (xmm1). | |
| 1758 Label compare_equal, compare_nan, compare_swap, done_compare; | |
| 1759 __ Ucomisd(xmm0, xmm1); | |
| 1760 __ j(parity_even, &compare_nan, Label::kNear); | |
| 1761 __ j(cc, &done_compare, Label::kNear); | |
| 1762 __ j(equal, &compare_equal, Label::kNear); | |
| 1763 | |
| 1764 // Result is on the right hand side. | |
| 1765 __ bind(&compare_swap); | |
| 1766 __ Movaps(xmm0, xmm1); | |
| 1767 __ Move(rdx, rbx); | |
| 1768 __ jmp(&done_compare, Label::kNear); | |
| 1769 | |
| 1770 // At least one side is NaN, which means that the result will be NaN too. | |
| 1771 __ bind(&compare_nan); | |
| 1772 __ LoadRoot(rdx, Heap::kNanValueRootIndex); | |
| 1773 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); | |
| 1774 __ jmp(&done_compare, Label::kNear); | |
| 1775 | |
| 1776 // Left and right hand side are equal, check for -0 vs. +0. | |
| 1777 __ bind(&compare_equal); | |
| 1778 __ Movmskpd(kScratchRegister, reg); | |
| 1779 __ testl(kScratchRegister, Immediate(1)); | |
| 1780 __ j(not_zero, &compare_swap); | |
| 1781 | |
| 1782 __ bind(&done_compare); | |
| 1783 __ decp(rcx); | |
| 1784 __ jmp(&loop); | |
| 1785 } | |
| 1786 | |
| 1787 __ bind(&done_loop); | |
| 1788 __ PopReturnAddressTo(rcx); | |
| 1789 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); | |
| 1790 __ PushReturnAddressFrom(rcx); | |
| 1791 __ movp(rax, rdx); | |
| 1792 __ Ret(); | |
| 1793 } | |
| 1794 | |
| 1795 // static | |
| 1796 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) { | |
| 1797 // ----------- S t a t e ------------- | |
| 1798 // -- rax : number of arguments | |
| 1799 // -- rdi : constructor function | |
| 1800 // -- rsi : context | |
| 1801 // -- rsp[0] : return address | |
| 1802 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) | |
| 1803 // -- rsp[(argc + 1) * 8] : receiver | |
| 1804 // ----------------------------------- | |
| 1805 | |
| 1806 // 1. Load the first argument into rbx. | |
| 1807 Label no_arguments; | |
| 1808 { | |
| 1809 StackArgumentsAccessor args(rsp, rax); | |
| 1810 __ testp(rax, rax); | |
| 1811 __ j(zero, &no_arguments, Label::kNear); | |
| 1812 __ movp(rbx, args.GetArgumentOperand(1)); | |
| 1813 } | |
| 1814 | |
| 1815 // 2a. Convert the first argument to a number. | |
| 1816 { | |
| 1817 FrameScope scope(masm, StackFrame::MANUAL); | |
| 1818 __ Integer32ToSmi(rax, rax); | |
| 1819 __ EnterBuiltinFrame(rsi, rdi, rax); | |
| 1820 __ movp(rax, rbx); | |
| 1821 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
| 1822 __ LeaveBuiltinFrame(rsi, rdi, rbx); // Argc popped to rbx. | |
| 1823 __ SmiToInteger32(rbx, rbx); | |
| 1824 } | |
| 1825 | |
| 1826 { | |
| 1827 // Drop all arguments including the receiver. | |
| 1828 __ PopReturnAddressTo(rcx); | |
| 1829 __ leap(rsp, Operand(rsp, rbx, times_pointer_size, kPointerSize)); | |
| 1830 __ PushReturnAddressFrom(rcx); | |
| 1831 __ Ret(); | |
| 1832 } | |
| 1833 | |
| 1834 // 2b. No arguments, return +0 (already in rax). | |
| 1835 __ bind(&no_arguments); | |
| 1836 __ ret(1 * kPointerSize); | |
| 1837 } | |
| 1838 | |
| 1839 | |
| 1840 // static | |
| 1841 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { | |
| 1842 // ----------- S t a t e ------------- | |
| 1843 // -- rax : number of arguments | |
| 1844 // -- rdi : constructor function | |
| 1845 // -- rdx : new target | |
| 1846 // -- rsi : context | |
| 1847 // -- rsp[0] : return address | |
| 1848 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) | |
| 1849 // -- rsp[(argc + 1) * 8] : receiver | |
| 1850 // ----------------------------------- | |
| 1851 | |
| 1852 // 1. Make sure we operate in the context of the called function. | |
| 1853 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | |
| 1854 | |
| 1855 // Store argc in r8. | |
| 1856 __ Integer32ToSmi(r8, rax); | |
| 1857 | |
| 1858 // 2. Load the first argument into rbx. | |
| 1859 { | |
| 1860 StackArgumentsAccessor args(rsp, rax); | |
| 1861 Label no_arguments, done; | |
| 1862 __ testp(rax, rax); | |
| 1863 __ j(zero, &no_arguments, Label::kNear); | |
| 1864 __ movp(rbx, args.GetArgumentOperand(1)); | |
| 1865 __ jmp(&done, Label::kNear); | |
| 1866 __ bind(&no_arguments); | |
| 1867 __ Move(rbx, Smi::FromInt(0)); | |
| 1868 __ bind(&done); | |
| 1869 } | |
| 1870 | |
| 1871 // 3. Make sure rbx is a number. | |
| 1872 { | |
| 1873 Label done_convert; | |
| 1874 __ JumpIfSmi(rbx, &done_convert); | |
| 1875 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), | |
| 1876 Heap::kHeapNumberMapRootIndex); | |
| 1877 __ j(equal, &done_convert); | |
| 1878 { | |
| 1879 FrameScope scope(masm, StackFrame::MANUAL); | |
| 1880 __ EnterBuiltinFrame(rsi, rdi, r8); | |
| 1881 __ Push(rdx); | |
| 1882 __ Move(rax, rbx); | |
| 1883 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
| 1884 __ Move(rbx, rax); | |
| 1885 __ Pop(rdx); | |
| 1886 __ LeaveBuiltinFrame(rsi, rdi, r8); | |
| 1887 } | |
| 1888 __ bind(&done_convert); | |
| 1889 } | |
| 1890 | |
| 1891 // 4. Check if new target and constructor differ. | |
| 1892 Label drop_frame_and_ret, new_object; | |
| 1893 __ cmpp(rdx, rdi); | |
| 1894 __ j(not_equal, &new_object); | |
| 1895 | |
| 1896 // 5. Allocate a JSValue wrapper for the number. | |
| 1897 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object); | |
| 1898 __ jmp(&drop_frame_and_ret, Label::kNear); | |
| 1899 | |
| 1900 // 6. Fallback to the runtime to create new object. | |
| 1901 __ bind(&new_object); | |
| 1902 { | |
| 1903 FrameScope scope(masm, StackFrame::MANUAL); | |
| 1904 __ EnterBuiltinFrame(rsi, rdi, r8); | |
| 1905 __ Push(rbx); // the first argument | |
| 1906 FastNewObjectStub stub(masm->isolate()); | |
| 1907 __ CallStub(&stub); | |
| 1908 __ Pop(FieldOperand(rax, JSValue::kValueOffset)); | |
| 1909 __ LeaveBuiltinFrame(rsi, rdi, r8); | |
| 1910 } | |
| 1911 | |
| 1912 __ bind(&drop_frame_and_ret); | |
| 1913 { | |
| 1914 // Drop all arguments including the receiver. | |
| 1915 __ PopReturnAddressTo(rcx); | |
| 1916 __ SmiToInteger32(r8, r8); | |
| 1917 __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize)); | |
| 1918 __ PushReturnAddressFrom(rcx); | |
| 1919 __ Ret(); | |
| 1920 } | |
| 1921 } | |
| 1922 | |
| 1923 | |
| 1924 // static | |
| 1925 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { | |
| 1926 // ----------- S t a t e ------------- | |
| 1927 // -- rax : number of arguments | |
| 1928 // -- rdi : constructor function | |
| 1929 // -- rsi : context | |
| 1930 // -- rsp[0] : return address | |
| 1931 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) | |
| 1932 // -- rsp[(argc + 1) * 8] : receiver | |
| 1933 // ----------------------------------- | |
| 1934 | |
| 1935 // 1. Load the first argument into rax. | |
| 1936 Label no_arguments; | |
| 1937 { | |
| 1938 StackArgumentsAccessor args(rsp, rax); | |
| 1939 __ Integer32ToSmi(r8, rax); // Store argc in r8. | |
| 1940 __ testp(rax, rax); | |
| 1941 __ j(zero, &no_arguments, Label::kNear); | |
| 1942 __ movp(rax, args.GetArgumentOperand(1)); | |
| 1943 } | |
| 1944 | |
| 1945 // 2a. At least one argument, return rax if it's a string, otherwise | |
| 1946 // dispatch to appropriate conversion. | |
| 1947 Label drop_frame_and_ret, to_string, symbol_descriptive_string; | |
| 1948 { | |
| 1949 __ JumpIfSmi(rax, &to_string, Label::kNear); | |
| 1950 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE); | |
| 1951 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); | |
| 1952 __ j(above, &to_string, Label::kNear); | |
| 1953 __ j(equal, &symbol_descriptive_string, Label::kNear); | |
| 1954 __ jmp(&drop_frame_and_ret, Label::kNear); | |
| 1955 } | |
| 1956 | |
| 1957 // 2b. No arguments, return the empty string (and pop the receiver). | |
| 1958 __ bind(&no_arguments); | |
| 1959 { | |
| 1960 __ LoadRoot(rax, Heap::kempty_stringRootIndex); | |
| 1961 __ ret(1 * kPointerSize); | |
| 1962 } | |
| 1963 | |
| 1964 // 3a. Convert rax to a string. | |
| 1965 __ bind(&to_string); | |
| 1966 { | |
| 1967 FrameScope scope(masm, StackFrame::MANUAL); | |
| 1968 ToStringStub stub(masm->isolate()); | |
| 1969 __ EnterBuiltinFrame(rsi, rdi, r8); | |
| 1970 __ CallStub(&stub); | |
| 1971 __ LeaveBuiltinFrame(rsi, rdi, r8); | |
| 1972 } | |
| 1973 __ jmp(&drop_frame_and_ret, Label::kNear); | |
| 1974 | |
| 1975 // 3b. Convert symbol in rax to a string. | |
| 1976 __ bind(&symbol_descriptive_string); | |
| 1977 { | |
| 1978 __ PopReturnAddressTo(rcx); | |
| 1979 __ SmiToInteger32(r8, r8); | |
| 1980 __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize)); | |
| 1981 __ Push(rax); | |
| 1982 __ PushReturnAddressFrom(rcx); | |
| 1983 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); | |
| 1984 } | |
| 1985 | |
| 1986 __ bind(&drop_frame_and_ret); | |
| 1987 { | |
| 1988 // Drop all arguments including the receiver. | |
| 1989 __ PopReturnAddressTo(rcx); | |
| 1990 __ SmiToInteger32(r8, r8); | |
| 1991 __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize)); | |
| 1992 __ PushReturnAddressFrom(rcx); | |
| 1993 __ Ret(); | |
| 1994 } | |
| 1995 } | |
| 1996 | |
| 1997 | |
| 1998 // static | |
| 1999 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { | |
| 2000 // ----------- S t a t e ------------- | |
| 2001 // -- rax : number of arguments | |
| 2002 // -- rdi : constructor function | |
| 2003 // -- rdx : new target | |
| 2004 // -- rsi : context | |
| 2005 // -- rsp[0] : return address | |
| 2006 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) | |
| 2007 // -- rsp[(argc + 1) * 8] : receiver | |
| 2008 // ----------------------------------- | |
| 2009 | |
| 2010 // 1. Make sure we operate in the context of the called function. | |
| 2011 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | |
| 2012 | |
| 2013 // Store argc in r8. | |
| 2014 __ Integer32ToSmi(r8, rax); | |
| 2015 | |
| 2016 // 2. Load the first argument into rbx. | |
| 2017 { | |
| 2018 StackArgumentsAccessor args(rsp, rax); | |
| 2019 Label no_arguments, done; | |
| 2020 __ testp(rax, rax); | |
| 2021 __ j(zero, &no_arguments, Label::kNear); | |
| 2022 __ movp(rbx, args.GetArgumentOperand(1)); | |
| 2023 __ jmp(&done, Label::kNear); | |
| 2024 __ bind(&no_arguments); | |
| 2025 __ LoadRoot(rbx, Heap::kempty_stringRootIndex); | |
| 2026 __ bind(&done); | |
| 2027 } | |
| 2028 | |
| 2029 // 3. Make sure rbx is a string. | |
| 2030 { | |
| 2031 Label convert, done_convert; | |
| 2032 __ JumpIfSmi(rbx, &convert, Label::kNear); | |
| 2033 __ CmpObjectType(rbx, FIRST_NONSTRING_TYPE, rcx); | |
| 2034 __ j(below, &done_convert); | |
| 2035 __ bind(&convert); | |
| 2036 { | |
| 2037 FrameScope scope(masm, StackFrame::MANUAL); | |
| 2038 ToStringStub stub(masm->isolate()); | |
| 2039 __ EnterBuiltinFrame(rsi, rdi, r8); | |
| 2040 __ Push(rdx); | |
| 2041 __ Move(rax, rbx); | |
| 2042 __ CallStub(&stub); | |
| 2043 __ Move(rbx, rax); | |
| 2044 __ Pop(rdx); | |
| 2045 __ LeaveBuiltinFrame(rsi, rdi, r8); | |
| 2046 } | |
| 2047 __ bind(&done_convert); | |
| 2048 } | |
| 2049 | |
| 2050 // 4. Check if new target and constructor differ. | |
| 2051 Label drop_frame_and_ret, new_object; | |
| 2052 __ cmpp(rdx, rdi); | |
| 2053 __ j(not_equal, &new_object); | |
| 2054 | |
| 2055 // 5. Allocate a JSValue wrapper for the string. | |
| 2056 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object); | |
| 2057 __ jmp(&drop_frame_and_ret, Label::kNear); | |
| 2058 | |
| 2059 // 6. Fallback to the runtime to create new object. | |
| 2060 __ bind(&new_object); | |
| 2061 { | |
| 2062 FrameScope scope(masm, StackFrame::MANUAL); | |
| 2063 __ EnterBuiltinFrame(rsi, rdi, r8); | |
| 2064 __ Push(rbx); // the first argument | |
| 2065 FastNewObjectStub stub(masm->isolate()); | |
| 2066 __ CallStub(&stub); | |
| 2067 __ Pop(FieldOperand(rax, JSValue::kValueOffset)); | |
| 2068 __ LeaveBuiltinFrame(rsi, rdi, r8); | |
| 2069 } | |
| 2070 | |
| 2071 __ bind(&drop_frame_and_ret); | |
| 2072 { | |
| 2073 // Drop all arguments including the receiver. | |
| 2074 __ PopReturnAddressTo(rcx); | |
| 2075 __ SmiToInteger32(r8, r8); | |
| 2076 __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize)); | |
| 2077 __ PushReturnAddressFrom(rcx); | |
| 2078 __ Ret(); | |
| 2079 } | |
| 2080 } | |
| 2081 | |
| 2082 | |
| 2083 static void ArgumentsAdaptorStackCheck(MacroAssembler* masm, | |
| 2084 Label* stack_overflow) { | |
| 2085 // ----------- S t a t e ------------- | |
| 2086 // -- rax : actual number of arguments | |
| 2087 // -- rbx : expected number of arguments | |
| 2088 // -- rdx : new target (passed through to callee) | |
| 2089 // -- rdi : function (passed through to callee) | |
| 2090 // ----------------------------------- | |
| 2091 // Check the stack for overflow. We are not trying to catch | |
| 2092 // interruptions (e.g. debug break and preemption) here, so the "real stack | |
| 2093 // limit" is checked. | |
| 2094 Label okay; | |
| 2095 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex); | |
| 2096 __ movp(rcx, rsp); | |
| 2097 // Make rcx the space we have left. The stack might already be overflowed | |
| 2098 // here which will cause rcx to become negative. | |
| 2099 __ subp(rcx, r8); | |
| 2100 // Make r8 the space we need for the array when it is unrolled onto the | |
| 2101 // stack. | |
| 2102 __ movp(r8, rbx); | |
| 2103 __ shlp(r8, Immediate(kPointerSizeLog2)); | |
| 2104 // Check if the arguments will overflow the stack. | |
| 2105 __ cmpp(rcx, r8); | |
| 2106 __ j(less_equal, stack_overflow); // Signed comparison. | |
| 2107 } | |
| 2108 | |
| 2109 | |
| 2110 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | |
| 2111 __ pushq(rbp); | |
| 2112 __ movp(rbp, rsp); | |
| 2113 | |
| 2114 // Store the arguments adaptor context sentinel. | |
| 2115 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | |
| 2116 | |
| 2117 // Push the function on the stack. | |
| 2118 __ Push(rdi); | |
| 2119 | |
| 2120 // Preserve the number of arguments on the stack. Must preserve rax, | |
| 2121 // rbx and rcx because these registers are used when copying the | |
| 2122 // arguments and the receiver. | |
| 2123 __ Integer32ToSmi(r8, rax); | |
| 2124 __ Push(r8); | |
| 2125 } | |
| 2126 | |
| 2127 | |
| 2128 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | |
| 2129 // Retrieve the number of arguments from the stack. Number is a Smi. | |
| 2130 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
| 2131 | |
| 2132 // Leave the frame. | |
| 2133 __ movp(rsp, rbp); | |
| 2134 __ popq(rbp); | |
| 2135 | |
| 2136 // Remove caller arguments from the stack. | |
| 2137 __ PopReturnAddressTo(rcx); | |
| 2138 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); | |
| 2139 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); | |
| 2140 __ PushReturnAddressFrom(rcx); | |
| 2141 } | |
| 2142 | |
| 2143 // static | |
| 2144 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { | |
| 2145 // ----------- S t a t e ------------- | |
| 2146 // -- rdx : requested object size (untagged) | |
| 2147 // -- rsp[0] : return address | |
| 2148 // ----------------------------------- | |
| 2149 __ Integer32ToSmi(rdx, rdx); | |
| 2150 __ PopReturnAddressTo(rcx); | |
| 2151 __ Push(rdx); | |
| 2152 __ PushReturnAddressFrom(rcx); | |
| 2153 __ Move(rsi, Smi::FromInt(0)); | |
| 2154 __ TailCallRuntime(Runtime::kAllocateInNewSpace); | |
| 2155 } | |
| 2156 | |
| 2157 // static | |
| 2158 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) { | |
| 2159 // ----------- S t a t e ------------- | |
| 2160 // -- rdx : requested object size (untagged) | |
| 2161 // -- rsp[0] : return address | |
| 2162 // ----------------------------------- | |
| 2163 __ Integer32ToSmi(rdx, rdx); | |
| 2164 __ PopReturnAddressTo(rcx); | |
| 2165 __ Push(rdx); | |
| 2166 __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE))); | |
| 2167 __ PushReturnAddressFrom(rcx); | |
| 2168 __ Move(rsi, Smi::FromInt(0)); | |
| 2169 __ TailCallRuntime(Runtime::kAllocateInTargetSpace); | |
| 2170 } | |
| 2171 | |
| 2172 void Builtins::Generate_StringToNumber(MacroAssembler* masm) { | |
| 2173 // The StringToNumber stub takes one argument in rax. | |
| 2174 __ AssertString(rax); | |
| 2175 | |
| 2176 // Check if string has a cached array index. | |
| 2177 Label runtime; | |
| 2178 __ testl(FieldOperand(rax, String::kHashFieldOffset), | |
| 2179 Immediate(String::kContainsCachedArrayIndexMask)); | |
| 2180 __ j(not_zero, &runtime, Label::kNear); | |
| 2181 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset)); | |
| 2182 __ IndexFromHash(rax, rax); | |
| 2183 __ Ret(); | |
| 2184 | |
| 2185 __ bind(&runtime); | |
| 2186 { | |
| 2187 FrameScope frame(masm, StackFrame::INTERNAL); | |
| 2188 // Push argument. | |
| 2189 __ Push(rax); | |
| 2190 // We cannot use a tail call here because this builtin can also be called | |
| 2191 // from wasm. | |
| 2192 __ CallRuntime(Runtime::kStringToNumber); | |
| 2193 } | |
| 2194 __ Ret(); | |
| 2195 } | |
| 2196 | |
| 2197 // static | |
| 2198 void Builtins::Generate_ToNumber(MacroAssembler* masm) { | |
| 2199 // The ToNumber stub takes one argument in rax. | |
| 2200 Label not_smi; | |
| 2201 __ JumpIfNotSmi(rax, ¬_smi, Label::kNear); | |
| 2202 __ Ret(); | |
| 2203 __ bind(¬_smi); | |
| 2204 | |
| 2205 Label not_heap_number; | |
| 2206 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | |
| 2207 Heap::kHeapNumberMapRootIndex); | |
| 2208 __ j(not_equal, ¬_heap_number, Label::kNear); | |
| 2209 __ Ret(); | |
| 2210 __ bind(¬_heap_number); | |
| 2211 | |
| 2212 __ Jump(masm->isolate()->builtins()->NonNumberToNumber(), | |
| 2213 RelocInfo::CODE_TARGET); | |
| 2214 } | |
| 2215 | |
| 2216 // static | |
| 2217 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) { | |
| 2218 // The NonNumberToNumber stub takes one argument in rax. | |
| 2219 __ AssertNotNumber(rax); | |
| 2220 | |
| 2221 Label not_string; | |
| 2222 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi); | |
| 2223 // rax: object | |
| 2224 // rdi: object map | |
| 2225 __ j(above_equal, ¬_string, Label::kNear); | |
| 2226 __ Jump(masm->isolate()->builtins()->StringToNumber(), | |
| 2227 RelocInfo::CODE_TARGET); | |
| 2228 __ bind(¬_string); | |
| 2229 | |
| 2230 Label not_oddball; | |
| 2231 __ CmpInstanceType(rdi, ODDBALL_TYPE); | |
| 2232 __ j(not_equal, ¬_oddball, Label::kNear); | |
| 2233 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset)); | |
| 2234 __ Ret(); | |
| 2235 __ bind(¬_oddball); | |
| 2236 { | |
| 2237 FrameScope frame(masm, StackFrame::INTERNAL); | |
| 2238 // Push argument. | |
| 2239 __ Push(rax); | |
| 2240 // We cannot use a tail call here because this builtin can also be called | |
| 2241 // from wasm. | |
| 2242 __ CallRuntime(Runtime::kToNumber); | |
| 2243 } | |
| 2244 __ Ret(); | |
| 2245 } | |
| 2246 | |
| 2247 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | |
| 2248 // ----------- S t a t e ------------- | |
| 2249 // -- rax : actual number of arguments | |
| 2250 // -- rbx : expected number of arguments | |
| 2251 // -- rdx : new target (passed through to callee) | |
| 2252 // -- rdi : function (passed through to callee) | |
| 2253 // ----------------------------------- | |
| 2254 | |
| 2255 Label invoke, dont_adapt_arguments, stack_overflow; | |
| 2256 Counters* counters = masm->isolate()->counters(); | |
| 2257 __ IncrementCounter(counters->arguments_adaptors(), 1); | |
| 2258 | |
| 2259 Label enough, too_few; | |
| 2260 __ cmpp(rax, rbx); | |
| 2261 __ j(less, &too_few); | |
| 2262 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); | |
| 2263 __ j(equal, &dont_adapt_arguments); | |
| 2264 | |
| 2265 { // Enough parameters: Actual >= expected. | |
| 2266 __ bind(&enough); | |
| 2267 EnterArgumentsAdaptorFrame(masm); | |
| 2268 ArgumentsAdaptorStackCheck(masm, &stack_overflow); | |
| 2269 | |
| 2270 // Copy receiver and all expected arguments. | |
| 2271 const int offset = StandardFrameConstants::kCallerSPOffset; | |
| 2272 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset)); | |
| 2273 __ Set(r8, -1); // account for receiver | |
| 2274 | |
| 2275 Label copy; | |
| 2276 __ bind(©); | |
| 2277 __ incp(r8); | |
| 2278 __ Push(Operand(rax, 0)); | |
| 2279 __ subp(rax, Immediate(kPointerSize)); | |
| 2280 __ cmpp(r8, rbx); | |
| 2281 __ j(less, ©); | |
| 2282 __ jmp(&invoke); | |
| 2283 } | |
| 2284 | |
| 2285 { // Too few parameters: Actual < expected. | |
| 2286 __ bind(&too_few); | |
| 2287 | |
| 2288 EnterArgumentsAdaptorFrame(masm); | |
| 2289 ArgumentsAdaptorStackCheck(masm, &stack_overflow); | |
| 2290 | |
| 2291 // Copy receiver and all actual arguments. | |
| 2292 const int offset = StandardFrameConstants::kCallerSPOffset; | |
| 2293 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset)); | |
| 2294 __ Set(r8, -1); // account for receiver | |
| 2295 | |
| 2296 Label copy; | |
| 2297 __ bind(©); | |
| 2298 __ incp(r8); | |
| 2299 __ Push(Operand(rdi, 0)); | |
| 2300 __ subp(rdi, Immediate(kPointerSize)); | |
| 2301 __ cmpp(r8, rax); | |
| 2302 __ j(less, ©); | |
| 2303 | |
| 2304 // Fill remaining expected arguments with undefined values. | |
| 2305 Label fill; | |
| 2306 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); | |
| 2307 __ bind(&fill); | |
| 2308 __ incp(r8); | |
| 2309 __ Push(kScratchRegister); | |
| 2310 __ cmpp(r8, rbx); | |
| 2311 __ j(less, &fill); | |
| 2312 | |
| 2313 // Restore function pointer. | |
| 2314 __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset)); | |
| 2315 } | |
| 2316 | |
| 2317 // Call the entry point. | |
| 2318 __ bind(&invoke); | |
| 2319 __ movp(rax, rbx); | |
| 2320 // rax : expected number of arguments | |
| 2321 // rdx : new target (passed through to callee) | |
| 2322 // rdi : function (passed through to callee) | |
| 2323 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); | |
| 2324 __ call(rcx); | |
| 2325 | |
| 2326 // Store offset of return address for deoptimizer. | |
| 2327 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); | |
| 2328 | |
| 2329 // Leave frame and return. | |
| 2330 LeaveArgumentsAdaptorFrame(masm); | |
| 2331 __ ret(0); | |
| 2332 | |
| 2333 // ------------------------------------------- | |
| 2334 // Dont adapt arguments. | |
| 2335 // ------------------------------------------- | |
| 2336 __ bind(&dont_adapt_arguments); | |
| 2337 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); | |
| 2338 __ jmp(rcx); | |
| 2339 | |
| 2340 __ bind(&stack_overflow); | |
| 2341 { | |
| 2342 FrameScope frame(masm, StackFrame::MANUAL); | |
| 2343 __ CallRuntime(Runtime::kThrowStackOverflow); | |
| 2344 __ int3(); | |
| 2345 } | |
| 2346 } | |
| 2347 | |
| 2348 | |
| 2349 // static | |
| 2350 void Builtins::Generate_Apply(MacroAssembler* masm) { | |
| 2351 // ----------- S t a t e ------------- | |
| 2352 // -- rax : argumentsList | |
| 2353 // -- rdi : target | |
| 2354 // -- rdx : new.target (checked to be constructor or undefined) | |
| 2355 // -- rsp[0] : return address. | |
| 2356 // -- rsp[8] : thisArgument | |
| 2357 // ----------------------------------- | |
| 2358 | |
| 2359 // Create the list of arguments from the array-like argumentsList. | |
| 2360 { | |
| 2361 Label create_arguments, create_array, create_runtime, done_create; | |
| 2362 __ JumpIfSmi(rax, &create_runtime); | |
| 2363 | |
| 2364 // Load the map of argumentsList into rcx. | |
| 2365 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset)); | |
| 2366 | |
| 2367 // Load native context into rbx. | |
| 2368 __ movp(rbx, NativeContextOperand()); | |
| 2369 | |
| 2370 // Check if argumentsList is an (unmodified) arguments object. | |
| 2371 __ cmpp(rcx, ContextOperand(rbx, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); | |
| 2372 __ j(equal, &create_arguments); | |
| 2373 __ cmpp(rcx, ContextOperand(rbx, Context::STRICT_ARGUMENTS_MAP_INDEX)); | |
| 2374 __ j(equal, &create_arguments); | |
| 2375 | |
| 2376 // Check if argumentsList is a fast JSArray. | |
| 2377 __ CmpInstanceType(rcx, JS_ARRAY_TYPE); | |
| 2378 __ j(equal, &create_array); | |
| 2379 | |
| 2380 // Ask the runtime to create the list (actually a FixedArray). | |
| 2381 __ bind(&create_runtime); | |
| 2382 { | |
| 2383 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 2384 __ Push(rdi); | |
| 2385 __ Push(rdx); | |
| 2386 __ Push(rax); | |
| 2387 __ CallRuntime(Runtime::kCreateListFromArrayLike); | |
| 2388 __ Pop(rdx); | |
| 2389 __ Pop(rdi); | |
| 2390 __ SmiToInteger32(rbx, FieldOperand(rax, FixedArray::kLengthOffset)); | |
| 2391 } | |
| 2392 __ jmp(&done_create); | |
| 2393 | |
| 2394 // Try to create the list from an arguments object. | |
| 2395 __ bind(&create_arguments); | |
| 2396 __ movp(rbx, FieldOperand(rax, JSArgumentsObject::kLengthOffset)); | |
| 2397 __ movp(rcx, FieldOperand(rax, JSObject::kElementsOffset)); | |
| 2398 __ cmpp(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); | |
| 2399 __ j(not_equal, &create_runtime); | |
| 2400 __ SmiToInteger32(rbx, rbx); | |
| 2401 __ movp(rax, rcx); | |
| 2402 __ jmp(&done_create); | |
| 2403 | |
| 2404 // Try to create the list from a JSArray object. | |
| 2405 __ bind(&create_array); | |
| 2406 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset)); | |
| 2407 __ DecodeField<Map::ElementsKindBits>(rcx); | |
| 2408 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
| 2409 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
| 2410 STATIC_ASSERT(FAST_ELEMENTS == 2); | |
| 2411 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); | |
| 2412 __ j(above, &create_runtime); | |
| 2413 __ cmpl(rcx, Immediate(FAST_HOLEY_SMI_ELEMENTS)); | |
| 2414 __ j(equal, &create_runtime); | |
| 2415 __ SmiToInteger32(rbx, FieldOperand(rax, JSArray::kLengthOffset)); | |
| 2416 __ movp(rax, FieldOperand(rax, JSArray::kElementsOffset)); | |
| 2417 | |
| 2418 __ bind(&done_create); | |
| 2419 } | |
| 2420 | |
| 2421 // Check for stack overflow. | |
| 2422 { | |
| 2423 // Check the stack for overflow. We are not trying to catch interruptions | |
| 2424 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
| 2425 Label done; | |
| 2426 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); | |
| 2427 __ movp(rcx, rsp); | |
| 2428 // Make rcx the space we have left. The stack might already be overflowed | |
| 2429 // here which will cause rcx to become negative. | |
| 2430 __ subp(rcx, kScratchRegister); | |
| 2431 __ sarp(rcx, Immediate(kPointerSizeLog2)); | |
| 2432 // Check if the arguments will overflow the stack. | |
| 2433 __ cmpp(rcx, rbx); | |
| 2434 __ j(greater, &done, Label::kNear); // Signed comparison. | |
| 2435 __ TailCallRuntime(Runtime::kThrowStackOverflow); | |
| 2436 __ bind(&done); | |
| 2437 } | |
| 2438 | |
| 2439 // ----------- S t a t e ------------- | |
| 2440 // -- rdi : target | |
| 2441 // -- rax : args (a FixedArray built from argumentsList) | |
| 2442 // -- rbx : len (number of elements to push from args) | |
| 2443 // -- rdx : new.target (checked to be constructor or undefined) | |
| 2444 // -- rsp[0] : return address. | |
| 2445 // -- rsp[8] : thisArgument | |
| 2446 // ----------------------------------- | |
| 2447 | |
| 2448 // Push arguments onto the stack (thisArgument is already on the stack). | |
| 2449 { | |
| 2450 __ PopReturnAddressTo(r8); | |
| 2451 __ Set(rcx, 0); | |
| 2452 Label done, loop; | |
| 2453 __ bind(&loop); | |
| 2454 __ cmpl(rcx, rbx); | |
| 2455 __ j(equal, &done, Label::kNear); | |
| 2456 __ Push( | |
| 2457 FieldOperand(rax, rcx, times_pointer_size, FixedArray::kHeaderSize)); | |
| 2458 __ incl(rcx); | |
| 2459 __ jmp(&loop); | |
| 2460 __ bind(&done); | |
| 2461 __ PushReturnAddressFrom(r8); | |
| 2462 __ Move(rax, rcx); | |
| 2463 } | |
| 2464 | |
| 2465 // Dispatch to Call or Construct depending on whether new.target is undefined. | |
| 2466 { | |
| 2467 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); | |
| 2468 __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
| 2469 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | |
| 2470 } | |
| 2471 } | |
| 2472 | |
| 2473 namespace { | |
| 2474 | |
| 2475 // Drops top JavaScript frame and an arguments adaptor frame below it (if | |
| 2476 // present) preserving all the arguments prepared for current call. | |
| 2477 // Does nothing if debugger is currently active. | |
| 2478 // ES6 14.6.3. PrepareForTailCall | |
| 2479 // | |
| 2480 // Stack structure for the function g() tail calling f(): | |
| 2481 // | |
| 2482 // ------- Caller frame: ------- | |
| 2483 // | ... | |
| 2484 // | g()'s arg M | |
| 2485 // | ... | |
| 2486 // | g()'s arg 1 | |
| 2487 // | g()'s receiver arg | |
| 2488 // | g()'s caller pc | |
| 2489 // ------- g()'s frame: ------- | |
| 2490 // | g()'s caller fp <- fp | |
| 2491 // | g()'s context | |
| 2492 // | function pointer: g | |
| 2493 // | ------------------------- | |
| 2494 // | ... | |
| 2495 // | ... | |
| 2496 // | f()'s arg N | |
| 2497 // | ... | |
| 2498 // | f()'s arg 1 | |
| 2499 // | f()'s receiver arg | |
| 2500 // | f()'s caller pc <- sp | |
| 2501 // ---------------------- | |
| 2502 // | |
| 2503 void PrepareForTailCall(MacroAssembler* masm, Register args_reg, | |
| 2504 Register scratch1, Register scratch2, | |
| 2505 Register scratch3) { | |
| 2506 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); | |
| 2507 Comment cmnt(masm, "[ PrepareForTailCall"); | |
| 2508 | |
| 2509 // Prepare for tail call only if ES2015 tail call elimination is active. | |
| 2510 Label done; | |
| 2511 ExternalReference is_tail_call_elimination_enabled = | |
| 2512 ExternalReference::is_tail_call_elimination_enabled_address( | |
| 2513 masm->isolate()); | |
| 2514 __ Move(kScratchRegister, is_tail_call_elimination_enabled); | |
| 2515 __ cmpb(Operand(kScratchRegister, 0), Immediate(0)); | |
| 2516 __ j(equal, &done); | |
| 2517 | |
| 2518 // Drop possible interpreter handler/stub frame. | |
| 2519 { | |
| 2520 Label no_interpreter_frame; | |
| 2521 __ Cmp(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset), | |
| 2522 Smi::FromInt(StackFrame::STUB)); | |
| 2523 __ j(not_equal, &no_interpreter_frame, Label::kNear); | |
| 2524 __ movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | |
| 2525 __ bind(&no_interpreter_frame); | |
| 2526 } | |
| 2527 | |
| 2528 // Check if next frame is an arguments adaptor frame. | |
| 2529 Register caller_args_count_reg = scratch1; | |
| 2530 Label no_arguments_adaptor, formal_parameter_count_loaded; | |
| 2531 __ movp(scratch2, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | |
| 2532 __ Cmp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset), | |
| 2533 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | |
| 2534 __ j(not_equal, &no_arguments_adaptor, Label::kNear); | |
| 2535 | |
| 2536 // Drop current frame and load arguments count from arguments adaptor frame. | |
| 2537 __ movp(rbp, scratch2); | |
| 2538 __ SmiToInteger32( | |
| 2539 caller_args_count_reg, | |
| 2540 Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
| 2541 __ jmp(&formal_parameter_count_loaded, Label::kNear); | |
| 2542 | |
| 2543 __ bind(&no_arguments_adaptor); | |
| 2544 // Load caller's formal parameter count | |
| 2545 __ movp(scratch1, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 2546 __ movp(scratch1, | |
| 2547 FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset)); | |
| 2548 __ LoadSharedFunctionInfoSpecialField( | |
| 2549 caller_args_count_reg, scratch1, | |
| 2550 SharedFunctionInfo::kFormalParameterCountOffset); | |
| 2551 | |
| 2552 __ bind(&formal_parameter_count_loaded); | |
| 2553 | |
| 2554 ParameterCount callee_args_count(args_reg); | |
| 2555 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, | |
| 2556 scratch3, ReturnAddressState::kOnStack); | |
| 2557 __ bind(&done); | |
| 2558 } | |
| 2559 } // namespace | |
| 2560 | |
| 2561 // static | |
| 2562 void Builtins::Generate_CallFunction(MacroAssembler* masm, | |
| 2563 ConvertReceiverMode mode, | |
| 2564 TailCallMode tail_call_mode) { | |
| 2565 // ----------- S t a t e ------------- | |
| 2566 // -- rax : the number of arguments (not including the receiver) | |
| 2567 // -- rdi : the function to call (checked to be a JSFunction) | |
| 2568 // ----------------------------------- | |
| 2569 StackArgumentsAccessor args(rsp, rax); | |
| 2570 __ AssertFunction(rdi); | |
| 2571 | |
| 2572 // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) | |
| 2573 // Check that the function is not a "classConstructor". | |
| 2574 Label class_constructor; | |
| 2575 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
| 2576 __ testb(FieldOperand(rdx, SharedFunctionInfo::kFunctionKindByteOffset), | |
| 2577 Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte)); | |
| 2578 __ j(not_zero, &class_constructor); | |
| 2579 | |
| 2580 // ----------- S t a t e ------------- | |
| 2581 // -- rax : the number of arguments (not including the receiver) | |
| 2582 // -- rdx : the shared function info. | |
| 2583 // -- rdi : the function to call (checked to be a JSFunction) | |
| 2584 // ----------------------------------- | |
| 2585 | |
| 2586 // Enter the context of the function; ToObject has to run in the function | |
| 2587 // context, and we also need to take the global proxy from the function | |
| 2588 // context in case of conversion. | |
| 2589 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset == | |
| 2590 SharedFunctionInfo::kStrictModeByteOffset); | |
| 2591 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | |
| 2592 // We need to convert the receiver for non-native sloppy mode functions. | |
| 2593 Label done_convert; | |
| 2594 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset), | |
| 2595 Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) | | |
| 2596 (1 << SharedFunctionInfo::kStrictModeBitWithinByte))); | |
| 2597 __ j(not_zero, &done_convert); | |
| 2598 { | |
| 2599 // ----------- S t a t e ------------- | |
| 2600 // -- rax : the number of arguments (not including the receiver) | |
| 2601 // -- rdx : the shared function info. | |
| 2602 // -- rdi : the function to call (checked to be a JSFunction) | |
| 2603 // -- rsi : the function context. | |
| 2604 // ----------------------------------- | |
| 2605 | |
| 2606 if (mode == ConvertReceiverMode::kNullOrUndefined) { | |
| 2607 // Patch receiver to global proxy. | |
| 2608 __ LoadGlobalProxy(rcx); | |
| 2609 } else { | |
| 2610 Label convert_to_object, convert_receiver; | |
| 2611 __ movp(rcx, args.GetReceiverOperand()); | |
| 2612 __ JumpIfSmi(rcx, &convert_to_object, Label::kNear); | |
| 2613 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); | |
| 2614 __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx); | |
| 2615 __ j(above_equal, &done_convert); | |
| 2616 if (mode != ConvertReceiverMode::kNotNullOrUndefined) { | |
| 2617 Label convert_global_proxy; | |
| 2618 __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex, | |
| 2619 &convert_global_proxy, Label::kNear); | |
| 2620 __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object, | |
| 2621 Label::kNear); | |
| 2622 __ bind(&convert_global_proxy); | |
| 2623 { | |
| 2624 // Patch receiver to global proxy. | |
| 2625 __ LoadGlobalProxy(rcx); | |
| 2626 } | |
| 2627 __ jmp(&convert_receiver); | |
| 2628 } | |
| 2629 __ bind(&convert_to_object); | |
| 2630 { | |
| 2631 // Convert receiver using ToObject. | |
| 2632 // TODO(bmeurer): Inline the allocation here to avoid building the frame | |
| 2633 // in the fast case? (fall back to AllocateInNewSpace?) | |
| 2634 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 2635 __ Integer32ToSmi(rax, rax); | |
| 2636 __ Push(rax); | |
| 2637 __ Push(rdi); | |
| 2638 __ movp(rax, rcx); | |
| 2639 ToObjectStub stub(masm->isolate()); | |
| 2640 __ CallStub(&stub); | |
| 2641 __ movp(rcx, rax); | |
| 2642 __ Pop(rdi); | |
| 2643 __ Pop(rax); | |
| 2644 __ SmiToInteger32(rax, rax); | |
| 2645 } | |
| 2646 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
| 2647 __ bind(&convert_receiver); | |
| 2648 } | |
| 2649 __ movp(args.GetReceiverOperand(), rcx); | |
| 2650 } | |
| 2651 __ bind(&done_convert); | |
| 2652 | |
| 2653 // ----------- S t a t e ------------- | |
| 2654 // -- rax : the number of arguments (not including the receiver) | |
| 2655 // -- rdx : the shared function info. | |
| 2656 // -- rdi : the function to call (checked to be a JSFunction) | |
| 2657 // -- rsi : the function context. | |
| 2658 // ----------------------------------- | |
| 2659 | |
| 2660 if (tail_call_mode == TailCallMode::kAllow) { | |
| 2661 PrepareForTailCall(masm, rax, rbx, rcx, r8); | |
| 2662 } | |
| 2663 | |
| 2664 __ LoadSharedFunctionInfoSpecialField( | |
| 2665 rbx, rdx, SharedFunctionInfo::kFormalParameterCountOffset); | |
| 2666 ParameterCount actual(rax); | |
| 2667 ParameterCount expected(rbx); | |
| 2668 | |
| 2669 __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION, | |
| 2670 CheckDebugStepCallWrapper()); | |
| 2671 | |
| 2672 // The function is a "classConstructor", need to raise an exception. | |
| 2673 __ bind(&class_constructor); | |
| 2674 { | |
| 2675 FrameScope frame(masm, StackFrame::INTERNAL); | |
| 2676 __ Push(rdi); | |
| 2677 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); | |
| 2678 } | |
| 2679 } | |
| 2680 | |
| 2681 | |
| 2682 namespace { | |
| 2683 | |
| 2684 void Generate_PushBoundArguments(MacroAssembler* masm) { | |
| 2685 // ----------- S t a t e ------------- | |
| 2686 // -- rax : the number of arguments (not including the receiver) | |
| 2687 // -- rdx : new.target (only in case of [[Construct]]) | |
| 2688 // -- rdi : target (checked to be a JSBoundFunction) | |
| 2689 // ----------------------------------- | |
| 2690 | |
| 2691 // Load [[BoundArguments]] into rcx and length of that into rbx. | |
| 2692 Label no_bound_arguments; | |
| 2693 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset)); | |
| 2694 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); | |
| 2695 __ testl(rbx, rbx); | |
| 2696 __ j(zero, &no_bound_arguments); | |
| 2697 { | |
| 2698 // ----------- S t a t e ------------- | |
| 2699 // -- rax : the number of arguments (not including the receiver) | |
| 2700 // -- rdx : new.target (only in case of [[Construct]]) | |
| 2701 // -- rdi : target (checked to be a JSBoundFunction) | |
| 2702 // -- rcx : the [[BoundArguments]] (implemented as FixedArray) | |
| 2703 // -- rbx : the number of [[BoundArguments]] (checked to be non-zero) | |
| 2704 // ----------------------------------- | |
| 2705 | |
| 2706 // Reserve stack space for the [[BoundArguments]]. | |
| 2707 { | |
| 2708 Label done; | |
| 2709 __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0)); | |
| 2710 __ subp(rsp, kScratchRegister); | |
| 2711 // Check the stack for overflow. We are not trying to catch interruptions | |
| 2712 // (i.e. debug break and preemption) here, so check the "real stack | |
| 2713 // limit". | |
| 2714 __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex); | |
| 2715 __ j(greater, &done, Label::kNear); // Signed comparison. | |
| 2716 // Restore the stack pointer. | |
| 2717 __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0)); | |
| 2718 { | |
| 2719 FrameScope scope(masm, StackFrame::MANUAL); | |
| 2720 __ EnterFrame(StackFrame::INTERNAL); | |
| 2721 __ CallRuntime(Runtime::kThrowStackOverflow); | |
| 2722 } | |
| 2723 __ bind(&done); | |
| 2724 } | |
| 2725 | |
| 2726 // Adjust effective number of arguments to include return address. | |
| 2727 __ incl(rax); | |
| 2728 | |
| 2729 // Relocate arguments and return address down the stack. | |
| 2730 { | |
| 2731 Label loop; | |
| 2732 __ Set(rcx, 0); | |
| 2733 __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0)); | |
| 2734 __ bind(&loop); | |
| 2735 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0)); | |
| 2736 __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister); | |
| 2737 __ incl(rcx); | |
| 2738 __ cmpl(rcx, rax); | |
| 2739 __ j(less, &loop); | |
| 2740 } | |
| 2741 | |
| 2742 // Copy [[BoundArguments]] to the stack (below the arguments). | |
| 2743 { | |
| 2744 Label loop; | |
| 2745 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset)); | |
| 2746 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); | |
| 2747 __ bind(&loop); | |
| 2748 __ decl(rbx); | |
| 2749 __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size, | |
| 2750 FixedArray::kHeaderSize)); | |
| 2751 __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister); | |
| 2752 __ leal(rax, Operand(rax, 1)); | |
| 2753 __ j(greater, &loop); | |
| 2754 } | |
| 2755 | |
| 2756 // Adjust effective number of arguments (rax contains the number of | |
| 2757 // arguments from the call plus return address plus the number of | |
| 2758 // [[BoundArguments]]), so we need to subtract one for the return address. | |
| 2759 __ decl(rax); | |
| 2760 } | |
| 2761 __ bind(&no_bound_arguments); | |
| 2762 } | |
| 2763 | |
| 2764 } // namespace | |
| 2765 | |
| 2766 | |
| 2767 // static | |
| 2768 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, | |
| 2769 TailCallMode tail_call_mode) { | |
| 2770 // ----------- S t a t e ------------- | |
| 2771 // -- rax : the number of arguments (not including the receiver) | |
| 2772 // -- rdi : the function to call (checked to be a JSBoundFunction) | |
| 2773 // ----------------------------------- | |
| 2774 __ AssertBoundFunction(rdi); | |
| 2775 | |
| 2776 if (tail_call_mode == TailCallMode::kAllow) { | |
| 2777 PrepareForTailCall(masm, rax, rbx, rcx, r8); | |
| 2778 } | |
| 2779 | |
| 2780 // Patch the receiver to [[BoundThis]]. | |
| 2781 StackArgumentsAccessor args(rsp, rax); | |
| 2782 __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset)); | |
| 2783 __ movp(args.GetReceiverOperand(), rbx); | |
| 2784 | |
| 2785 // Push the [[BoundArguments]] onto the stack. | |
| 2786 Generate_PushBoundArguments(masm); | |
| 2787 | |
| 2788 // Call the [[BoundTargetFunction]] via the Call builtin. | |
| 2789 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset)); | |
| 2790 __ Load(rcx, | |
| 2791 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate())); | |
| 2792 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); | |
| 2793 __ jmp(rcx); | |
| 2794 } | |
| 2795 | |
| 2796 | |
| 2797 // static | |
| 2798 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, | |
| 2799 TailCallMode tail_call_mode) { | |
| 2800 // ----------- S t a t e ------------- | |
| 2801 // -- rax : the number of arguments (not including the receiver) | |
| 2802 // -- rdi : the target to call (can be any Object) | |
| 2803 // ----------------------------------- | |
| 2804 StackArgumentsAccessor args(rsp, rax); | |
| 2805 | |
| 2806 Label non_callable, non_function, non_smi; | |
| 2807 __ JumpIfSmi(rdi, &non_callable); | |
| 2808 __ bind(&non_smi); | |
| 2809 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | |
| 2810 __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), | |
| 2811 RelocInfo::CODE_TARGET); | |
| 2812 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE); | |
| 2813 __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), | |
| 2814 RelocInfo::CODE_TARGET); | |
| 2815 | |
| 2816 // Check if target has a [[Call]] internal method. | |
| 2817 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), | |
| 2818 Immediate(1 << Map::kIsCallable)); | |
| 2819 __ j(zero, &non_callable); | |
| 2820 | |
| 2821 __ CmpInstanceType(rcx, JS_PROXY_TYPE); | |
| 2822 __ j(not_equal, &non_function); | |
| 2823 | |
| 2824 // 0. Prepare for tail call if necessary. | |
| 2825 if (tail_call_mode == TailCallMode::kAllow) { | |
| 2826 PrepareForTailCall(masm, rax, rbx, rcx, r8); | |
| 2827 } | |
| 2828 | |
| 2829 // 1. Runtime fallback for Proxy [[Call]]. | |
| 2830 __ PopReturnAddressTo(kScratchRegister); | |
| 2831 __ Push(rdi); | |
| 2832 __ PushReturnAddressFrom(kScratchRegister); | |
| 2833 // Increase the arguments size to include the pushed function and the | |
| 2834 // existing receiver on the stack. | |
| 2835 __ addp(rax, Immediate(2)); | |
| 2836 // Tail-call to the runtime. | |
| 2837 __ JumpToExternalReference( | |
| 2838 ExternalReference(Runtime::kJSProxyCall, masm->isolate())); | |
| 2839 | |
| 2840 // 2. Call to something else, which might have a [[Call]] internal method (if | |
| 2841 // not we raise an exception). | |
| 2842 __ bind(&non_function); | |
| 2843 // Overwrite the original receiver with the (original) target. | |
| 2844 __ movp(args.GetReceiverOperand(), rdi); | |
| 2845 // Let the "call_as_function_delegate" take care of the rest. | |
| 2846 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi); | |
| 2847 __ Jump(masm->isolate()->builtins()->CallFunction( | |
| 2848 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode), | |
| 2849 RelocInfo::CODE_TARGET); | |
| 2850 | |
| 2851 // 3. Call to something that is not callable. | |
| 2852 __ bind(&non_callable); | |
| 2853 { | |
| 2854 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 2855 __ Push(rdi); | |
| 2856 __ CallRuntime(Runtime::kThrowCalledNonCallable); | |
| 2857 } | |
| 2858 } | |
| 2859 | |
| 2860 | |
| 2861 // static | |
| 2862 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { | |
| 2863 // ----------- S t a t e ------------- | |
| 2864 // -- rax : the number of arguments (not including the receiver) | |
| 2865 // -- rdx : the new target (checked to be a constructor) | |
| 2866 // -- rdi : the constructor to call (checked to be a JSFunction) | |
| 2867 // ----------------------------------- | |
| 2868 __ AssertFunction(rdi); | |
| 2869 | |
| 2870 // Calling convention for function specific ConstructStubs require | |
| 2871 // rbx to contain either an AllocationSite or undefined. | |
| 2872 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); | |
| 2873 | |
| 2874 // Tail call to the function-specific construct stub (still in the caller | |
| 2875 // context at this point). | |
| 2876 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
| 2877 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset)); | |
| 2878 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); | |
| 2879 __ jmp(rcx); | |
| 2880 } | |
| 2881 | |
| 2882 | |
| 2883 // static | |
| 2884 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { | |
| 2885 // ----------- S t a t e ------------- | |
| 2886 // -- rax : the number of arguments (not including the receiver) | |
| 2887 // -- rdx : the new target (checked to be a constructor) | |
| 2888 // -- rdi : the constructor to call (checked to be a JSBoundFunction) | |
| 2889 // ----------------------------------- | |
| 2890 __ AssertBoundFunction(rdi); | |
| 2891 | |
| 2892 // Push the [[BoundArguments]] onto the stack. | |
| 2893 Generate_PushBoundArguments(masm); | |
| 2894 | |
| 2895 // Patch new.target to [[BoundTargetFunction]] if new.target equals target. | |
| 2896 { | |
| 2897 Label done; | |
| 2898 __ cmpp(rdi, rdx); | |
| 2899 __ j(not_equal, &done, Label::kNear); | |
| 2900 __ movp(rdx, | |
| 2901 FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset)); | |
| 2902 __ bind(&done); | |
| 2903 } | |
| 2904 | |
| 2905 // Construct the [[BoundTargetFunction]] via the Construct builtin. | |
| 2906 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset)); | |
| 2907 __ Load(rcx, ExternalReference(Builtins::kConstruct, masm->isolate())); | |
| 2908 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); | |
| 2909 __ jmp(rcx); | |
| 2910 } | |
| 2911 | |
| 2912 | |
| 2913 // static | |
| 2914 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { | |
| 2915 // ----------- S t a t e ------------- | |
| 2916 // -- rax : the number of arguments (not including the receiver) | |
| 2917 // -- rdi : the constructor to call (checked to be a JSProxy) | |
| 2918 // -- rdx : the new target (either the same as the constructor or | |
| 2919 // the JSFunction on which new was invoked initially) | |
| 2920 // ----------------------------------- | |
| 2921 | |
| 2922 // Call into the Runtime for Proxy [[Construct]]. | |
| 2923 __ PopReturnAddressTo(kScratchRegister); | |
| 2924 __ Push(rdi); | |
| 2925 __ Push(rdx); | |
| 2926 __ PushReturnAddressFrom(kScratchRegister); | |
| 2927 // Include the pushed new_target, constructor and the receiver. | |
| 2928 __ addp(rax, Immediate(3)); | |
| 2929 __ JumpToExternalReference( | |
| 2930 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); | |
| 2931 } | |
| 2932 | |
| 2933 | |
| 2934 // static | |
| 2935 void Builtins::Generate_Construct(MacroAssembler* masm) { | |
| 2936 // ----------- S t a t e ------------- | |
| 2937 // -- rax : the number of arguments (not including the receiver) | |
| 2938 // -- rdx : the new target (either the same as the constructor or | |
| 2939 // the JSFunction on which new was invoked initially) | |
| 2940 // -- rdi : the constructor to call (can be any Object) | |
| 2941 // ----------------------------------- | |
| 2942 StackArgumentsAccessor args(rsp, rax); | |
| 2943 | |
| 2944 // Check if target is a Smi. | |
| 2945 Label non_constructor; | |
| 2946 __ JumpIfSmi(rdi, &non_constructor, Label::kNear); | |
| 2947 | |
| 2948 // Dispatch based on instance type. | |
| 2949 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | |
| 2950 __ j(equal, masm->isolate()->builtins()->ConstructFunction(), | |
| 2951 RelocInfo::CODE_TARGET); | |
| 2952 | |
| 2953 // Check if target has a [[Construct]] internal method. | |
| 2954 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), | |
| 2955 Immediate(1 << Map::kIsConstructor)); | |
| 2956 __ j(zero, &non_constructor, Label::kNear); | |
| 2957 | |
| 2958 // Only dispatch to bound functions after checking whether they are | |
| 2959 // constructors. | |
| 2960 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE); | |
| 2961 __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(), | |
| 2962 RelocInfo::CODE_TARGET); | |
| 2963 | |
| 2964 // Only dispatch to proxies after checking whether they are constructors. | |
| 2965 __ CmpInstanceType(rcx, JS_PROXY_TYPE); | |
| 2966 __ j(equal, masm->isolate()->builtins()->ConstructProxy(), | |
| 2967 RelocInfo::CODE_TARGET); | |
| 2968 | |
| 2969 // Called Construct on an exotic Object with a [[Construct]] internal method. | |
| 2970 { | |
| 2971 // Overwrite the original receiver with the (original) target. | |
| 2972 __ movp(args.GetReceiverOperand(), rdi); | |
| 2973 // Let the "call_as_constructor_delegate" take care of the rest. | |
| 2974 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi); | |
| 2975 __ Jump(masm->isolate()->builtins()->CallFunction(), | |
| 2976 RelocInfo::CODE_TARGET); | |
| 2977 } | |
| 2978 | |
| 2979 // Called Construct on an Object that doesn't have a [[Construct]] internal | |
| 2980 // method. | |
| 2981 __ bind(&non_constructor); | |
| 2982 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), | |
| 2983 RelocInfo::CODE_TARGET); | |
| 2984 } | |
| 2985 | |
| 2986 | |
| 2987 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, | |
| 2988 Register function_template_info, | |
| 2989 Register scratch0, Register scratch1, | |
| 2990 Register scratch2, | |
| 2991 Label* receiver_check_failed) { | |
| 2992 Register signature = scratch0; | |
| 2993 Register map = scratch1; | |
| 2994 Register constructor = scratch2; | |
| 2995 | |
| 2996 // If there is no signature, return the holder. | |
| 2997 __ movp(signature, FieldOperand(function_template_info, | |
| 2998 FunctionTemplateInfo::kSignatureOffset)); | |
| 2999 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex); | |
| 3000 Label receiver_check_passed; | |
| 3001 __ j(equal, &receiver_check_passed, Label::kNear); | |
| 3002 | |
| 3003 // Walk the prototype chain. | |
| 3004 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 3005 Label prototype_loop_start; | |
| 3006 __ bind(&prototype_loop_start); | |
| 3007 | |
| 3008 // Get the constructor, if any. | |
| 3009 __ GetMapConstructor(constructor, map, kScratchRegister); | |
| 3010 __ CmpInstanceType(kScratchRegister, JS_FUNCTION_TYPE); | |
| 3011 Label next_prototype; | |
| 3012 __ j(not_equal, &next_prototype, Label::kNear); | |
| 3013 | |
| 3014 // Get the constructor's signature. | |
| 3015 Register type = constructor; | |
| 3016 __ movp(type, | |
| 3017 FieldOperand(constructor, JSFunction::kSharedFunctionInfoOffset)); | |
| 3018 __ movp(type, FieldOperand(type, SharedFunctionInfo::kFunctionDataOffset)); | |
| 3019 | |
| 3020 // Loop through the chain of inheriting function templates. | |
| 3021 Label function_template_loop; | |
| 3022 __ bind(&function_template_loop); | |
| 3023 | |
| 3024 // If the signatures match, we have a compatible receiver. | |
| 3025 __ cmpp(signature, type); | |
| 3026 __ j(equal, &receiver_check_passed, Label::kNear); | |
| 3027 | |
| 3028 // If the current type is not a FunctionTemplateInfo, load the next prototype | |
| 3029 // in the chain. | |
| 3030 __ JumpIfSmi(type, &next_prototype, Label::kNear); | |
| 3031 __ CmpObjectType(type, FUNCTION_TEMPLATE_INFO_TYPE, kScratchRegister); | |
| 3032 __ j(not_equal, &next_prototype, Label::kNear); | |
| 3033 | |
| 3034 // Otherwise load the parent function template and iterate. | |
| 3035 __ movp(type, | |
| 3036 FieldOperand(type, FunctionTemplateInfo::kParentTemplateOffset)); | |
| 3037 __ jmp(&function_template_loop, Label::kNear); | |
| 3038 | |
| 3039 // Load the next prototype. | |
| 3040 __ bind(&next_prototype); | |
| 3041 __ testq(FieldOperand(map, Map::kBitField3Offset), | |
| 3042 Immediate(Map::HasHiddenPrototype::kMask)); | |
| 3043 __ j(zero, receiver_check_failed); | |
| 3044 __ movp(receiver, FieldOperand(map, Map::kPrototypeOffset)); | |
| 3045 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 3046 // Iterate. | |
| 3047 __ jmp(&prototype_loop_start, Label::kNear); | |
| 3048 | |
| 3049 __ bind(&receiver_check_passed); | |
| 3050 } | |
| 3051 | |
| 3052 | |
| 3053 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { | |
| 3054 // ----------- S t a t e ------------- | |
| 3055 // -- rax : number of arguments (not including the receiver) | |
| 3056 // -- rdi : callee | |
| 3057 // -- rsi : context | |
| 3058 // -- rsp[0] : return address | |
| 3059 // -- rsp[8] : last argument | |
| 3060 // -- ... | |
| 3061 // -- rsp[rax * 8] : first argument | |
| 3062 // -- rsp[(rax + 1) * 8] : receiver | |
| 3063 // ----------------------------------- | |
| 3064 | |
| 3065 StackArgumentsAccessor args(rsp, rax); | |
| 3066 | |
| 3067 // Load the FunctionTemplateInfo. | |
| 3068 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
| 3069 __ movp(rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset)); | |
| 3070 | |
| 3071 // Do the compatible receiver check. | |
| 3072 Label receiver_check_failed; | |
| 3073 __ movp(rcx, args.GetReceiverOperand()); | |
| 3074 CompatibleReceiverCheck(masm, rcx, rbx, rdx, r8, r9, &receiver_check_failed); | |
| 3075 | |
| 3076 // Get the callback offset from the FunctionTemplateInfo, and jump to the | |
| 3077 // beginning of the code. | |
| 3078 __ movp(rdx, FieldOperand(rbx, FunctionTemplateInfo::kCallCodeOffset)); | |
| 3079 __ movp(rdx, FieldOperand(rdx, CallHandlerInfo::kFastHandlerOffset)); | |
| 3080 __ addp(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag)); | |
| 3081 __ jmp(rdx); | |
| 3082 | |
| 3083 // Compatible receiver check failed: pop return address, arguments and | |
| 3084 // receiver and throw an Illegal Invocation exception. | |
| 3085 __ bind(&receiver_check_failed); | |
| 3086 __ PopReturnAddressTo(rbx); | |
| 3087 __ leap(rax, Operand(rax, times_pointer_size, 1 * kPointerSize)); | |
| 3088 __ addp(rsp, rax); | |
| 3089 __ PushReturnAddressFrom(rbx); | |
| 3090 { | |
| 3091 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 3092 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); | |
| 3093 } | |
| 3094 } | |
| 3095 | |
| 3096 | |
| 3097 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | |
| 3098 // Lookup the function in the JavaScript frame. | |
| 3099 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 3100 { | |
| 3101 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 3102 // Pass function as argument. | |
| 3103 __ Push(rax); | |
| 3104 __ CallRuntime(Runtime::kCompileForOnStackReplacement); | |
| 3105 } | |
| 3106 | |
| 3107 Label skip; | |
| 3108 // If the code object is null, just return to the unoptimized code. | |
| 3109 __ cmpp(rax, Immediate(0)); | |
| 3110 __ j(not_equal, &skip, Label::kNear); | |
| 3111 __ ret(0); | |
| 3112 | |
| 3113 __ bind(&skip); | |
| 3114 | |
| 3115 // Load deoptimization data from the code object. | |
| 3116 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag)); | |
| 3117 | |
| 3118 // Load the OSR entrypoint offset from the deoptimization data. | |
| 3119 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt( | |
| 3120 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag)); | |
| 3121 | |
| 3122 // Compute the target address = code_obj + header_size + osr_offset | |
| 3123 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag)); | |
| 3124 | |
| 3125 // Overwrite the return address on the stack. | |
| 3126 __ movq(StackOperandForReturnAddress(0), rax); | |
| 3127 | |
| 3128 // And "return" to the OSR entry point of the function. | |
| 3129 __ ret(0); | |
| 3130 } | |
| 3131 | |
| 3132 | |
| 3133 #undef __ | |
| 3134 | |
| 3135 } // namespace internal | |
| 3136 } // namespace v8 | |
| 3137 | |
| 3138 #endif // V8_TARGET_ARCH_X64 | |
| OLD | NEW |