| OLD | NEW |
| (Empty) |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #if V8_TARGET_ARCH_S390 | |
| 6 | |
| 7 #include "src/codegen.h" | |
| 8 #include "src/debug/debug.h" | |
| 9 #include "src/deoptimizer.h" | |
| 10 #include "src/full-codegen/full-codegen.h" | |
| 11 #include "src/runtime/runtime.h" | |
| 12 | |
| 13 namespace v8 { | |
| 14 namespace internal { | |
| 15 | |
| 16 #define __ ACCESS_MASM(masm) | |
| 17 | |
| 18 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, | |
| 19 ExitFrameType exit_frame_type) { | |
| 20 // ----------- S t a t e ------------- | |
| 21 // -- r2 : number of arguments excluding receiver | |
| 22 // -- r3 : target | |
| 23 // -- r5 : new.target | |
| 24 // -- sp[0] : last argument | |
| 25 // -- ... | |
| 26 // -- sp[4 * (argc - 1)] : first argument | |
| 27 // -- sp[4 * argc] : receiver | |
| 28 // ----------------------------------- | |
| 29 __ AssertFunction(r3); | |
| 30 | |
| 31 // Make sure we operate in the context of the called function (for example | |
| 32 // ConstructStubs implemented in C++ will be run in the context of the caller | |
| 33 // instead of the callee, due to the way that [[Construct]] is defined for | |
| 34 // ordinary functions). | |
| 35 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); | |
| 36 | |
| 37 // JumpToExternalReference expects r2 to contain the number of arguments | |
| 38 // including the receiver and the extra arguments. | |
| 39 const int num_extra_args = 3; | |
| 40 __ AddP(r2, r2, Operand(num_extra_args + 1)); | |
| 41 | |
| 42 // Insert extra arguments. | |
| 43 __ SmiTag(r2); | |
| 44 __ Push(r2, r3, r5); | |
| 45 __ SmiUntag(r2); | |
| 46 | |
| 47 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), | |
| 48 exit_frame_type == BUILTIN_EXIT); | |
| 49 } | |
| 50 | |
| 51 // Load the built-in InternalArray function from the current context. | |
| 52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, | |
| 53 Register result) { | |
| 54 // Load the InternalArray function from the current native context. | |
| 55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); | |
| 56 } | |
| 57 | |
| 58 // Load the built-in Array function from the current context. | |
| 59 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { | |
| 60 // Load the Array function from the current native context. | |
| 61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); | |
| 62 } | |
| 63 | |
| 64 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { | |
| 65 // ----------- S t a t e ------------- | |
| 66 // -- r2 : number of arguments | |
| 67 // -- lr : return address | |
| 68 // -- sp[...]: constructor arguments | |
| 69 // ----------------------------------- | |
| 70 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | |
| 71 | |
| 72 // Get the InternalArray function. | |
| 73 GenerateLoadInternalArrayFunction(masm, r3); | |
| 74 | |
| 75 if (FLAG_debug_code) { | |
| 76 // Initial map for the builtin InternalArray functions should be maps. | |
| 77 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset)); | |
| 78 __ TestIfSmi(r4); | |
| 79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0); | |
| 80 __ CompareObjectType(r4, r5, r6, MAP_TYPE); | |
| 81 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); | |
| 82 } | |
| 83 | |
| 84 // Run the native code for the InternalArray function called as a normal | |
| 85 // function. | |
| 86 // tail call a stub | |
| 87 InternalArrayConstructorStub stub(masm->isolate()); | |
| 88 __ TailCallStub(&stub); | |
| 89 } | |
| 90 | |
| 91 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { | |
| 92 // ----------- S t a t e ------------- | |
| 93 // -- r2 : number of arguments | |
| 94 // -- lr : return address | |
| 95 // -- sp[...]: constructor arguments | |
| 96 // ----------------------------------- | |
| 97 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | |
| 98 | |
| 99 // Get the Array function. | |
| 100 GenerateLoadArrayFunction(masm, r3); | |
| 101 | |
| 102 if (FLAG_debug_code) { | |
| 103 // Initial map for the builtin Array functions should be maps. | |
| 104 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset)); | |
| 105 __ TestIfSmi(r4); | |
| 106 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); | |
| 107 __ CompareObjectType(r4, r5, r6, MAP_TYPE); | |
| 108 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); | |
| 109 } | |
| 110 | |
| 111 __ LoadRR(r5, r3); | |
| 112 // Run the native code for the Array function called as a normal function. | |
| 113 // tail call a stub | |
| 114 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); | |
| 115 ArrayConstructorStub stub(masm->isolate()); | |
| 116 __ TailCallStub(&stub); | |
| 117 } | |
| 118 | |
| 119 // static | |
| 120 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { | |
| 121 // ----------- S t a t e ------------- | |
| 122 // -- r2 : number of arguments | |
| 123 // -- r3 : function | |
| 124 // -- cp : context | |
| 125 // -- lr : return address | |
| 126 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
| 127 // -- sp[argc * 4] : receiver | |
| 128 // ----------------------------------- | |
| 129 Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt; | |
| 130 Heap::RootListIndex const root_index = | |
| 131 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex | |
| 132 : Heap::kMinusInfinityValueRootIndex; | |
| 133 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1; | |
| 134 | |
| 135 // Load the accumulator with the default return value (either -Infinity or | |
| 136 // +Infinity), with the tagged value in r7 and the double value in d1. | |
| 137 __ LoadRoot(r7, root_index); | |
| 138 __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset)); | |
| 139 | |
| 140 // Setup state for loop | |
| 141 // r4: address of arg[0] + kPointerSize | |
| 142 // r5: number of slots to drop at exit (arguments + receiver) | |
| 143 __ AddP(r6, r2, Operand(1)); | |
| 144 | |
| 145 Label done_loop, loop; | |
| 146 __ LoadRR(r6, r2); | |
| 147 __ bind(&loop); | |
| 148 { | |
| 149 // Check if all parameters done. | |
| 150 __ SubP(r6, Operand(1)); | |
| 151 __ blt(&done_loop); | |
| 152 | |
| 153 // Load the next parameter tagged value into r2. | |
| 154 __ ShiftLeftP(r1, r6, Operand(kPointerSizeLog2)); | |
| 155 __ LoadP(r4, MemOperand(sp, r1)); | |
| 156 | |
| 157 // Load the double value of the parameter into d2, maybe converting the | |
| 158 // parameter to a number first using the ToNumber builtin if necessary. | |
| 159 Label convert, convert_smi, convert_number, done_convert; | |
| 160 __ bind(&convert); | |
| 161 __ JumpIfSmi(r4, &convert_smi); | |
| 162 __ LoadP(r5, FieldMemOperand(r4, HeapObject::kMapOffset)); | |
| 163 __ JumpIfRoot(r5, Heap::kHeapNumberMapRootIndex, &convert_number); | |
| 164 { | |
| 165 // Parameter is not a Number, use the ToNumber builtin to convert it. | |
| 166 DCHECK(!FLAG_enable_embedded_constant_pool); | |
| 167 FrameScope scope(masm, StackFrame::MANUAL); | |
| 168 __ SmiTag(r2); | |
| 169 __ SmiTag(r6); | |
| 170 __ EnterBuiltinFrame(cp, r3, r2); | |
| 171 __ Push(r6, r7); | |
| 172 __ LoadRR(r2, r4); | |
| 173 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
| 174 __ LoadRR(r4, r2); | |
| 175 __ Pop(r6, r7); | |
| 176 __ LeaveBuiltinFrame(cp, r3, r2); | |
| 177 __ SmiUntag(r6); | |
| 178 __ SmiUntag(r2); | |
| 179 { | |
| 180 // Restore the double accumulator value (d1). | |
| 181 Label done_restore; | |
| 182 __ SmiToDouble(d1, r7); | |
| 183 __ JumpIfSmi(r7, &done_restore); | |
| 184 __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset)); | |
| 185 __ bind(&done_restore); | |
| 186 } | |
| 187 } | |
| 188 __ b(&convert); | |
| 189 __ bind(&convert_number); | |
| 190 __ LoadDouble(d2, FieldMemOperand(r4, HeapNumber::kValueOffset)); | |
| 191 __ b(&done_convert); | |
| 192 __ bind(&convert_smi); | |
| 193 __ SmiToDouble(d2, r4); | |
| 194 __ bind(&done_convert); | |
| 195 | |
| 196 // Perform the actual comparison with the accumulator value on the left hand | |
| 197 // side (d1) and the next parameter value on the right hand side (d2). | |
| 198 Label compare_nan, compare_swap; | |
| 199 __ cdbr(d1, d2); | |
| 200 __ bunordered(&compare_nan); | |
| 201 __ b(cond_done, &loop); | |
| 202 __ b(CommuteCondition(cond_done), &compare_swap); | |
| 203 | |
| 204 // Left and right hand side are equal, check for -0 vs. +0. | |
| 205 __ TestDoubleIsMinusZero(reg, r1, r0); | |
| 206 __ bne(&loop); | |
| 207 | |
| 208 // Update accumulator. Result is on the right hand side. | |
| 209 __ bind(&compare_swap); | |
| 210 __ ldr(d1, d2); | |
| 211 __ LoadRR(r7, r4); | |
| 212 __ b(&loop); | |
| 213 | |
| 214 // At least one side is NaN, which means that the result will be NaN too. | |
| 215 // We still need to visit the rest of the arguments. | |
| 216 __ bind(&compare_nan); | |
| 217 __ LoadRoot(r7, Heap::kNanValueRootIndex); | |
| 218 __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset)); | |
| 219 __ b(&loop); | |
| 220 } | |
| 221 | |
| 222 __ bind(&done_loop); | |
| 223 // Drop all slots, including the receiver. | |
| 224 __ AddP(r2, Operand(1)); | |
| 225 __ Drop(r2); | |
| 226 __ LoadRR(r2, r7); | |
| 227 __ Ret(); | |
| 228 } | |
| 229 | |
| 230 // static | |
| 231 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) { | |
| 232 // ----------- S t a t e ------------- | |
| 233 // -- r2 : number of arguments | |
| 234 // -- r3 : constructor function | |
| 235 // -- cp : context | |
| 236 // -- lr : return address | |
| 237 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
| 238 // -- sp[argc * 4] : receiver | |
| 239 // ----------------------------------- | |
| 240 | |
| 241 // 1. Load the first argument into r2. | |
| 242 Label no_arguments; | |
| 243 { | |
| 244 __ LoadRR(r4, r2); // Store argc in r4. | |
| 245 __ CmpP(r2, Operand::Zero()); | |
| 246 __ beq(&no_arguments); | |
| 247 __ SubP(r2, r2, Operand(1)); | |
| 248 __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2)); | |
| 249 __ LoadP(r2, MemOperand(sp, r2)); | |
| 250 } | |
| 251 | |
| 252 // 2a. Convert the first argument to a number. | |
| 253 { | |
| 254 FrameScope scope(masm, StackFrame::MANUAL); | |
| 255 __ SmiTag(r4); | |
| 256 __ EnterBuiltinFrame(cp, r3, r4); | |
| 257 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
| 258 __ LeaveBuiltinFrame(cp, r3, r4); | |
| 259 __ SmiUntag(r4); | |
| 260 } | |
| 261 | |
| 262 { | |
| 263 // Drop all arguments including the receiver. | |
| 264 __ Drop(r4); | |
| 265 __ Ret(1); | |
| 266 } | |
| 267 | |
| 268 // 2b. No arguments, return +0. | |
| 269 __ bind(&no_arguments); | |
| 270 __ LoadSmiLiteral(r2, Smi::FromInt(0)); | |
| 271 __ Ret(1); | |
| 272 } | |
| 273 | |
| 274 // static | |
| 275 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { | |
| 276 // ----------- S t a t e ------------- | |
| 277 // -- r2 : number of arguments | |
| 278 // -- r3 : constructor function | |
| 279 // -- r5 : new target | |
| 280 // -- lr : return address | |
| 281 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
| 282 // -- sp[argc * 4] : receiver | |
| 283 // ----------------------------------- | |
| 284 | |
| 285 // 1. Make sure we operate in the context of the called function. | |
| 286 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); | |
| 287 | |
| 288 // 2. Load the first argument into r4. | |
| 289 { | |
| 290 Label no_arguments, done; | |
| 291 __ LoadRR(r8, r2); // Store argc in r8. | |
| 292 __ CmpP(r2, Operand::Zero()); | |
| 293 __ beq(&no_arguments); | |
| 294 __ SubP(r2, r2, Operand(1)); | |
| 295 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2)); | |
| 296 __ LoadP(r4, MemOperand(sp, r4)); | |
| 297 __ b(&done); | |
| 298 __ bind(&no_arguments); | |
| 299 __ LoadSmiLiteral(r4, Smi::FromInt(0)); | |
| 300 __ bind(&done); | |
| 301 } | |
| 302 | |
| 303 // 3. Make sure r4 is a number. | |
| 304 { | |
| 305 Label done_convert; | |
| 306 __ JumpIfSmi(r4, &done_convert); | |
| 307 __ CompareObjectType(r4, r6, r6, HEAP_NUMBER_TYPE); | |
| 308 __ beq(&done_convert); | |
| 309 { | |
| 310 FrameScope scope(masm, StackFrame::MANUAL); | |
| 311 __ SmiTag(r8); | |
| 312 __ EnterBuiltinFrame(cp, r3, r8); | |
| 313 __ Push(r5); | |
| 314 __ LoadRR(r2, r4); | |
| 315 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); | |
| 316 __ LoadRR(r4, r2); | |
| 317 __ Pop(r5); | |
| 318 __ LeaveBuiltinFrame(cp, r3, r8); | |
| 319 __ SmiUntag(r8); | |
| 320 } | |
| 321 __ bind(&done_convert); | |
| 322 } | |
| 323 | |
| 324 // 4. Check if new target and constructor differ. | |
| 325 Label drop_frame_and_ret, new_object; | |
| 326 __ CmpP(r3, r5); | |
| 327 __ bne(&new_object); | |
| 328 | |
| 329 // 5. Allocate a JSValue wrapper for the number. | |
| 330 __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object); | |
| 331 __ b(&drop_frame_and_ret); | |
| 332 | |
| 333 // 6. Fallback to the runtime to create new object. | |
| 334 __ bind(&new_object); | |
| 335 { | |
| 336 FrameScope scope(masm, StackFrame::MANUAL); | |
| 337 FastNewObjectStub stub(masm->isolate()); | |
| 338 __ SmiTag(r8); | |
| 339 __ EnterBuiltinFrame(cp, r3, r8); | |
| 340 __ Push(r4); // first argument | |
| 341 __ CallStub(&stub); | |
| 342 __ Pop(r4); | |
| 343 __ LeaveBuiltinFrame(cp, r3, r8); | |
| 344 __ SmiUntag(r8); | |
| 345 } | |
| 346 __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0); | |
| 347 | |
| 348 __ bind(&drop_frame_and_ret); | |
| 349 { | |
| 350 __ Drop(r8); | |
| 351 __ Ret(1); | |
| 352 } | |
| 353 } | |
| 354 | |
| 355 // static | |
| 356 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { | |
| 357 // ----------- S t a t e ------------- | |
| 358 // -- r2 : number of arguments | |
| 359 // -- r3 : constructor function | |
| 360 // -- cp : context | |
| 361 // -- lr : return address | |
| 362 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
| 363 // -- sp[argc * 4] : receiver | |
| 364 // ----------------------------------- | |
| 365 // 1. Load the first argument into r2. | |
| 366 Label no_arguments; | |
| 367 { | |
| 368 __ LoadRR(r4, r2); // Store argc in r4 | |
| 369 __ CmpP(r2, Operand::Zero()); | |
| 370 __ beq(&no_arguments); | |
| 371 __ SubP(r2, r2, Operand(1)); | |
| 372 __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2)); | |
| 373 __ LoadP(r2, MemOperand(sp, r2)); | |
| 374 } | |
| 375 | |
| 376 // 2a. At least one argument, return r2 if it's a string, otherwise | |
| 377 // dispatch to appropriate conversion. | |
| 378 Label drop_frame_and_ret, to_string, symbol_descriptive_string; | |
| 379 { | |
| 380 __ JumpIfSmi(r2, &to_string); | |
| 381 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE); | |
| 382 __ CompareObjectType(r2, r5, r5, FIRST_NONSTRING_TYPE); | |
| 383 __ bgt(&to_string); | |
| 384 __ beq(&symbol_descriptive_string); | |
| 385 __ b(&drop_frame_and_ret); | |
| 386 } | |
| 387 | |
| 388 // 2b. No arguments, return the empty string (and pop the receiver). | |
| 389 __ bind(&no_arguments); | |
| 390 { | |
| 391 __ LoadRoot(r2, Heap::kempty_stringRootIndex); | |
| 392 __ Ret(1); | |
| 393 } | |
| 394 | |
| 395 // 3a. Convert r2 to a string. | |
| 396 __ bind(&to_string); | |
| 397 { | |
| 398 FrameScope scope(masm, StackFrame::MANUAL); | |
| 399 ToStringStub stub(masm->isolate()); | |
| 400 __ SmiTag(r4); | |
| 401 __ EnterBuiltinFrame(cp, r3, r4); | |
| 402 __ CallStub(&stub); | |
| 403 __ LeaveBuiltinFrame(cp, r3, r4); | |
| 404 __ SmiUntag(r4); | |
| 405 } | |
| 406 __ b(&drop_frame_and_ret); | |
| 407 // 3b. Convert symbol in r2 to a string. | |
| 408 __ bind(&symbol_descriptive_string); | |
| 409 { | |
| 410 __ Drop(r4); | |
| 411 __ Drop(1); | |
| 412 __ Push(r2); | |
| 413 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); | |
| 414 } | |
| 415 | |
| 416 __ bind(&drop_frame_and_ret); | |
| 417 { | |
| 418 __ Drop(r4); | |
| 419 __ Ret(1); | |
| 420 } | |
| 421 } | |
| 422 | |
| 423 // static | |
| 424 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { | |
| 425 // ----------- S t a t e ------------- | |
| 426 // -- r2 : number of arguments | |
| 427 // -- r3 : constructor function | |
| 428 // -- r5 : new target | |
| 429 // -- cp : context | |
| 430 // -- lr : return address | |
| 431 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | |
| 432 // -- sp[argc * 4] : receiver | |
| 433 // ----------------------------------- | |
| 434 | |
| 435 // 1. Make sure we operate in the context of the called function. | |
| 436 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); | |
| 437 | |
| 438 // 2. Load the first argument into r4. | |
| 439 { | |
| 440 Label no_arguments, done; | |
| 441 __ LoadRR(r8, r2); // Store argc in r8. | |
| 442 __ CmpP(r2, Operand::Zero()); | |
| 443 __ beq(&no_arguments); | |
| 444 __ SubP(r2, r2, Operand(1)); | |
| 445 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2)); | |
| 446 __ LoadP(r4, MemOperand(sp, r4)); | |
| 447 __ b(&done); | |
| 448 __ bind(&no_arguments); | |
| 449 __ LoadRoot(r4, Heap::kempty_stringRootIndex); | |
| 450 __ bind(&done); | |
| 451 } | |
| 452 | |
| 453 // 3. Make sure r4 is a string. | |
| 454 { | |
| 455 Label convert, done_convert; | |
| 456 __ JumpIfSmi(r4, &convert); | |
| 457 __ CompareObjectType(r4, r6, r6, FIRST_NONSTRING_TYPE); | |
| 458 __ blt(&done_convert); | |
| 459 __ bind(&convert); | |
| 460 { | |
| 461 FrameScope scope(masm, StackFrame::MANUAL); | |
| 462 ToStringStub stub(masm->isolate()); | |
| 463 __ SmiTag(r8); | |
| 464 __ EnterBuiltinFrame(cp, r3, r8); | |
| 465 __ Push(r5); | |
| 466 __ LoadRR(r2, r4); | |
| 467 __ CallStub(&stub); | |
| 468 __ LoadRR(r4, r2); | |
| 469 __ Pop(r5); | |
| 470 __ LeaveBuiltinFrame(cp, r3, r8); | |
| 471 __ SmiUntag(r8); | |
| 472 } | |
| 473 __ bind(&done_convert); | |
| 474 } | |
| 475 | |
| 476 // 4. Check if new target and constructor differ. | |
| 477 Label drop_frame_and_ret, new_object; | |
| 478 __ CmpP(r3, r5); | |
| 479 __ bne(&new_object); | |
| 480 | |
| 481 // 5. Allocate a JSValue wrapper for the string. | |
| 482 __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object); | |
| 483 __ b(&drop_frame_and_ret); | |
| 484 | |
| 485 // 6. Fallback to the runtime to create new object. | |
| 486 __ bind(&new_object); | |
| 487 { | |
| 488 FrameScope scope(masm, StackFrame::MANUAL); | |
| 489 FastNewObjectStub stub(masm->isolate()); | |
| 490 __ SmiTag(r8); | |
| 491 __ EnterBuiltinFrame(cp, r3, r8); | |
| 492 __ Push(r4); // first argument | |
| 493 __ CallStub(&stub); | |
| 494 __ Pop(r4); | |
| 495 __ LeaveBuiltinFrame(cp, r3, r8); | |
| 496 __ SmiUntag(r8); | |
| 497 } | |
| 498 __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0); | |
| 499 | |
| 500 __ bind(&drop_frame_and_ret); | |
| 501 { | |
| 502 __ Drop(r8); | |
| 503 __ Ret(1); | |
| 504 } | |
| 505 } | |
| 506 | |
| 507 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | |
| 508 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); | |
| 509 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset)); | |
| 510 __ AddP(ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 511 __ JumpToJSEntry(ip); | |
| 512 } | |
| 513 | |
| 514 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, | |
| 515 Runtime::FunctionId function_id) { | |
| 516 // ----------- S t a t e ------------- | |
| 517 // -- r2 : argument count (preserved for callee) | |
| 518 // -- r3 : target function (preserved for callee) | |
| 519 // -- r5 : new target (preserved for callee) | |
| 520 // ----------------------------------- | |
| 521 { | |
| 522 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
| 523 // Push the number of arguments to the callee. | |
| 524 // Push a copy of the target function and the new target. | |
| 525 // Push function as parameter to the runtime call. | |
| 526 __ SmiTag(r2); | |
| 527 __ Push(r2, r3, r5, r3); | |
| 528 | |
| 529 __ CallRuntime(function_id, 1); | |
| 530 __ LoadRR(r4, r2); | |
| 531 | |
| 532 // Restore target function and new target. | |
| 533 __ Pop(r2, r3, r5); | |
| 534 __ SmiUntag(r2); | |
| 535 } | |
| 536 __ AddP(ip, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 537 __ JumpToJSEntry(ip); | |
| 538 } | |
| 539 | |
| 540 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | |
| 541 // Checking whether the queued function is ready for install is optional, | |
| 542 // since we come across interrupts and stack checks elsewhere. However, | |
| 543 // not checking may delay installing ready functions, and always checking | |
| 544 // would be quite expensive. A good compromise is to first check against | |
| 545 // stack limit as a cue for an interrupt signal. | |
| 546 Label ok; | |
| 547 __ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex)); | |
| 548 __ bge(&ok, Label::kNear); | |
| 549 | |
| 550 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); | |
| 551 | |
| 552 __ bind(&ok); | |
| 553 GenerateTailCallToSharedCode(masm); | |
| 554 } | |
| 555 | |
| 556 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | |
| 557 bool is_api_function, | |
| 558 bool create_implicit_receiver, | |
| 559 bool check_derived_construct) { | |
| 560 // ----------- S t a t e ------------- | |
| 561 // -- r2 : number of arguments | |
| 562 // -- r3 : constructor function | |
| 563 // -- r4 : allocation site or undefined | |
| 564 // -- r5 : new target | |
| 565 // -- cp : context | |
| 566 // -- lr : return address | |
| 567 // -- sp[...]: constructor arguments | |
| 568 // ----------------------------------- | |
| 569 | |
| 570 Isolate* isolate = masm->isolate(); | |
| 571 | |
| 572 // Enter a construct frame. | |
| 573 { | |
| 574 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); | |
| 575 | |
| 576 // Preserve the incoming parameters on the stack. | |
| 577 __ AssertUndefinedOrAllocationSite(r4, r6); | |
| 578 | |
| 579 if (!create_implicit_receiver) { | |
| 580 __ SmiTag(r6, r2); | |
| 581 __ LoadAndTestP(r6, r6); | |
| 582 __ Push(cp, r4, r6); | |
| 583 __ PushRoot(Heap::kTheHoleValueRootIndex); | |
| 584 } else { | |
| 585 __ SmiTag(r2); | |
| 586 __ Push(cp, r4, r2); | |
| 587 | |
| 588 // Allocate the new receiver object. | |
| 589 __ Push(r3, r5); | |
| 590 FastNewObjectStub stub(masm->isolate()); | |
| 591 __ CallStub(&stub); | |
| 592 __ LoadRR(r6, r2); | |
| 593 __ Pop(r3, r5); | |
| 594 | |
| 595 // ----------- S t a t e ------------- | |
| 596 // -- r3: constructor function | |
| 597 // -- r5: new target | |
| 598 // -- r6: newly allocated object | |
| 599 // ----------------------------------- | |
| 600 | |
| 601 // Retrieve smi-tagged arguments count from the stack. | |
| 602 __ LoadP(r2, MemOperand(sp)); | |
| 603 __ SmiUntag(r2); | |
| 604 __ LoadAndTestP(r2, r2); | |
| 605 | |
| 606 // Push the allocated receiver to the stack. We need two copies | |
| 607 // because we may have to return the original one and the calling | |
| 608 // conventions dictate that the called function pops the receiver. | |
| 609 __ Push(r6, r6); | |
| 610 } | |
| 611 | |
| 612 // Set up pointer to last argument. | |
| 613 __ la(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset)); | |
| 614 | |
| 615 // Copy arguments and receiver to the expression stack. | |
| 616 // r2: number of arguments | |
| 617 // r3: constructor function | |
| 618 // r4: address of last argument (caller sp) | |
| 619 // r5: new target | |
| 620 // cr0: condition indicating whether r2 is zero | |
| 621 // sp[0]: receiver | |
| 622 // sp[1]: receiver | |
| 623 // sp[2]: number of arguments (smi-tagged) | |
| 624 Label loop, no_args; | |
| 625 __ beq(&no_args); | |
| 626 __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2)); | |
| 627 __ SubP(sp, sp, ip); | |
| 628 __ LoadRR(r1, r2); | |
| 629 __ bind(&loop); | |
| 630 __ lay(ip, MemOperand(ip, -kPointerSize)); | |
| 631 __ LoadP(r0, MemOperand(ip, r4)); | |
| 632 __ StoreP(r0, MemOperand(ip, sp)); | |
| 633 __ BranchOnCount(r1, &loop); | |
| 634 __ bind(&no_args); | |
| 635 | |
| 636 // Call the function. | |
| 637 // r2: number of arguments | |
| 638 // r3: constructor function | |
| 639 // r5: new target | |
| 640 | |
| 641 ParameterCount actual(r2); | |
| 642 __ InvokeFunction(r3, r5, actual, CALL_FUNCTION, | |
| 643 CheckDebugStepCallWrapper()); | |
| 644 | |
| 645 // Store offset of return address for deoptimizer. | |
| 646 if (create_implicit_receiver && !is_api_function) { | |
| 647 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); | |
| 648 } | |
| 649 | |
| 650 // Restore context from the frame. | |
| 651 // r2: result | |
| 652 // sp[0]: receiver | |
| 653 // sp[1]: number of arguments (smi-tagged) | |
| 654 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); | |
| 655 | |
| 656 if (create_implicit_receiver) { | |
| 657 // If the result is an object (in the ECMA sense), we should get rid | |
| 658 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 | |
| 659 // on page 74. | |
| 660 Label use_receiver, exit; | |
| 661 | |
| 662 // If the result is a smi, it is *not* an object in the ECMA sense. | |
| 663 // r2: result | |
| 664 // sp[0]: receiver | |
| 665 // sp[1]: new.target | |
| 666 // sp[2]: number of arguments (smi-tagged) | |
| 667 __ JumpIfSmi(r2, &use_receiver); | |
| 668 | |
| 669 // If the type of the result (stored in its map) is less than | |
| 670 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense. | |
| 671 __ CompareObjectType(r2, r3, r5, FIRST_JS_RECEIVER_TYPE); | |
| 672 __ bge(&exit); | |
| 673 | |
| 674 // Throw away the result of the constructor invocation and use the | |
| 675 // on-stack receiver as the result. | |
| 676 __ bind(&use_receiver); | |
| 677 __ LoadP(r2, MemOperand(sp)); | |
| 678 | |
| 679 // Remove receiver from the stack, remove caller arguments, and | |
| 680 // return. | |
| 681 __ bind(&exit); | |
| 682 // r2: result | |
| 683 // sp[0]: receiver (newly allocated object) | |
| 684 // sp[1]: number of arguments (smi-tagged) | |
| 685 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize)); | |
| 686 } else { | |
| 687 __ LoadP(r3, MemOperand(sp)); | |
| 688 } | |
| 689 | |
| 690 // Leave construct frame. | |
| 691 } | |
| 692 | |
| 693 // ES6 9.2.2. Step 13+ | |
| 694 // Check that the result is not a Smi, indicating that the constructor result | |
| 695 // from a derived class is neither undefined nor an Object. | |
| 696 if (check_derived_construct) { | |
| 697 Label dont_throw; | |
| 698 __ JumpIfNotSmi(r2, &dont_throw); | |
| 699 { | |
| 700 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
| 701 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); | |
| 702 } | |
| 703 __ bind(&dont_throw); | |
| 704 } | |
| 705 | |
| 706 __ SmiToPtrArrayOffset(r3, r3); | |
| 707 __ AddP(sp, sp, r3); | |
| 708 __ AddP(sp, sp, Operand(kPointerSize)); | |
| 709 if (create_implicit_receiver) { | |
| 710 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r3, r4); | |
| 711 } | |
| 712 __ Ret(); | |
| 713 } | |
| 714 | |
| 715 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { | |
| 716 Generate_JSConstructStubHelper(masm, false, true, false); | |
| 717 } | |
| 718 | |
| 719 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { | |
| 720 Generate_JSConstructStubHelper(masm, true, false, false); | |
| 721 } | |
| 722 | |
| 723 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { | |
| 724 Generate_JSConstructStubHelper(masm, false, false, false); | |
| 725 } | |
| 726 | |
| 727 void Builtins::Generate_JSBuiltinsConstructStubForDerived( | |
| 728 MacroAssembler* masm) { | |
| 729 Generate_JSConstructStubHelper(masm, false, false, true); | |
| 730 } | |
| 731 | |
| 732 // static | |
| 733 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { | |
| 734 // ----------- S t a t e ------------- | |
| 735 // -- r2 : the value to pass to the generator | |
| 736 // -- r3 : the JSGeneratorObject to resume | |
| 737 // -- r4 : the resume mode (tagged) | |
| 738 // -- lr : return address | |
| 739 // ----------------------------------- | |
| 740 __ AssertGeneratorObject(r3); | |
| 741 | |
| 742 // Store input value into generator object. | |
| 743 __ StoreP(r2, FieldMemOperand(r3, JSGeneratorObject::kInputOrDebugPosOffset), | |
| 744 r0); | |
| 745 __ RecordWriteField(r3, JSGeneratorObject::kInputOrDebugPosOffset, r2, r5, | |
| 746 kLRHasNotBeenSaved, kDontSaveFPRegs); | |
| 747 | |
| 748 // Store resume mode into generator object. | |
| 749 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kResumeModeOffset)); | |
| 750 | |
| 751 // Load suspended function and context. | |
| 752 __ LoadP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset)); | |
| 753 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset)); | |
| 754 | |
| 755 // Flood function if we are stepping. | |
| 756 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator; | |
| 757 Label stepping_prepared; | |
| 758 ExternalReference last_step_action = | |
| 759 ExternalReference::debug_last_step_action_address(masm->isolate()); | |
| 760 STATIC_ASSERT(StepFrame > StepIn); | |
| 761 __ mov(ip, Operand(last_step_action)); | |
| 762 __ LoadB(ip, MemOperand(ip)); | |
| 763 __ CmpP(ip, Operand(StepIn)); | |
| 764 __ bge(&prepare_step_in_if_stepping); | |
| 765 | |
| 766 // Flood function if we need to continue stepping in the suspended generator. | |
| 767 | |
| 768 ExternalReference debug_suspended_generator = | |
| 769 ExternalReference::debug_suspended_generator_address(masm->isolate()); | |
| 770 | |
| 771 __ mov(ip, Operand(debug_suspended_generator)); | |
| 772 __ LoadP(ip, MemOperand(ip)); | |
| 773 __ CmpP(ip, r3); | |
| 774 __ beq(&prepare_step_in_suspended_generator); | |
| 775 __ bind(&stepping_prepared); | |
| 776 | |
| 777 // Push receiver. | |
| 778 __ LoadP(ip, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset)); | |
| 779 __ Push(ip); | |
| 780 | |
| 781 // ----------- S t a t e ------------- | |
| 782 // -- r3 : the JSGeneratorObject to resume | |
| 783 // -- r4 : the resume mode (tagged) | |
| 784 // -- r6 : generator function | |
| 785 // -- cp : generator context | |
| 786 // -- lr : return address | |
| 787 // -- sp[0] : generator receiver | |
| 788 // ----------------------------------- | |
| 789 | |
| 790 // Push holes for arguments to generator function. Since the parser forced | |
| 791 // context allocation for any variables in generators, the actual argument | |
| 792 // values have already been copied into the context and these dummy values | |
| 793 // will never be used. | |
| 794 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset)); | |
| 795 __ LoadW( | |
| 796 r2, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset)); | |
| 797 { | |
| 798 Label loop, done_loop; | |
| 799 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | |
| 800 #if V8_TARGET_ARCH_S390X | |
| 801 __ CmpP(r2, Operand::Zero()); | |
| 802 __ beq(&done_loop); | |
| 803 #else | |
| 804 __ SmiUntag(r2); | |
| 805 __ LoadAndTestP(r2, r2); | |
| 806 __ beq(&done_loop); | |
| 807 #endif | |
| 808 __ LoadRR(r1, r2); | |
| 809 __ bind(&loop); | |
| 810 __ push(ip); | |
| 811 __ BranchOnCount(r1, &loop); | |
| 812 __ bind(&done_loop); | |
| 813 } | |
| 814 | |
| 815 // Dispatch on the kind of generator object. | |
| 816 Label old_generator; | |
| 817 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset)); | |
| 818 __ CompareObjectType(r5, r5, r5, BYTECODE_ARRAY_TYPE); | |
| 819 __ bne(&old_generator, Label::kNear); | |
| 820 | |
| 821 // New-style (ignition/turbofan) generator object | |
| 822 { | |
| 823 // We abuse new.target both to indicate that this is a resume call and to | |
| 824 // pass in the generator object. In ordinary calls, new.target is always | |
| 825 // undefined because generator functions are non-constructable. | |
| 826 __ LoadRR(r5, r3); | |
| 827 __ LoadRR(r3, r6); | |
| 828 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset)); | |
| 829 __ JumpToJSEntry(ip); | |
| 830 } | |
| 831 // Old-style (full-codegen) generator object | |
| 832 __ bind(&old_generator); | |
| 833 { | |
| 834 // Enter a new JavaScript frame, and initialize its slots as they were when | |
| 835 // the generator was suspended. | |
| 836 FrameScope scope(masm, StackFrame::MANUAL); | |
| 837 __ PushStandardFrame(r6); | |
| 838 | |
| 839 // Restore the operand stack. | |
| 840 __ LoadP(r2, FieldMemOperand(r3, JSGeneratorObject::kOperandStackOffset)); | |
| 841 __ LoadP(r5, FieldMemOperand(r2, FixedArray::kLengthOffset)); | |
| 842 __ AddP(r2, r2, | |
| 843 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize)); | |
| 844 { | |
| 845 Label loop, done_loop; | |
| 846 __ SmiUntag(r5); | |
| 847 __ LoadAndTestP(r5, r5); | |
| 848 __ beq(&done_loop); | |
| 849 __ LoadRR(r1, r5); | |
| 850 __ bind(&loop); | |
| 851 __ LoadP(ip, MemOperand(r2, kPointerSize)); | |
| 852 __ la(r2, MemOperand(r2, kPointerSize)); | |
| 853 __ Push(ip); | |
| 854 __ BranchOnCount(r1, &loop); | |
| 855 __ bind(&done_loop); | |
| 856 } | |
| 857 | |
| 858 // Reset operand stack so we don't leak. | |
| 859 __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex); | |
| 860 __ StoreP(ip, FieldMemOperand(r3, JSGeneratorObject::kOperandStackOffset), | |
| 861 r0); | |
| 862 | |
| 863 // Resume the generator function at the continuation. | |
| 864 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset)); | |
| 865 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kCodeOffset)); | |
| 866 __ AddP(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 867 { | |
| 868 ConstantPoolUnavailableScope constant_pool_unavailable(masm); | |
| 869 __ LoadP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset)); | |
| 870 __ SmiUntag(r4); | |
| 871 __ AddP(r5, r5, r4); | |
| 872 __ LoadSmiLiteral(r4, | |
| 873 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); | |
| 874 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset), | |
| 875 r0); | |
| 876 __ LoadRR(r2, r3); // Continuation expects generator object in r2. | |
| 877 __ Jump(r5); | |
| 878 } | |
| 879 } | |
| 880 | |
| 881 __ bind(&prepare_step_in_if_stepping); | |
| 882 { | |
| 883 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
| 884 __ Push(r3, r4, r6); | |
| 885 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping); | |
| 886 __ Pop(r3, r4); | |
| 887 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset)); | |
| 888 } | |
| 889 __ b(&stepping_prepared); | |
| 890 | |
| 891 __ bind(&prepare_step_in_suspended_generator); | |
| 892 { | |
| 893 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
| 894 __ Push(r3, r4); | |
| 895 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); | |
| 896 __ Pop(r3, r4); | |
| 897 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset)); | |
| 898 } | |
| 899 __ b(&stepping_prepared); | |
| 900 } | |
| 901 | |
| 902 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { | |
| 903 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
| 904 __ push(r3); | |
| 905 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); | |
| 906 } | |
| 907 | |
| 908 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; | |
| 909 | |
| 910 // Clobbers r4; preserves all other registers. | |
| 911 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, | |
| 912 IsTagged argc_is_tagged) { | |
| 913 // Check the stack for overflow. We are not trying to catch | |
| 914 // interruptions (e.g. debug break and preemption) here, so the "real stack | |
| 915 // limit" is checked. | |
| 916 Label okay; | |
| 917 __ LoadRoot(r4, Heap::kRealStackLimitRootIndex); | |
| 918 // Make r4 the space we have left. The stack might already be overflowed | |
| 919 // here which will cause r4 to become negative. | |
| 920 __ SubP(r4, sp, r4); | |
| 921 // Check if the arguments will overflow the stack. | |
| 922 if (argc_is_tagged == kArgcIsSmiTagged) { | |
| 923 __ SmiToPtrArrayOffset(r0, argc); | |
| 924 } else { | |
| 925 DCHECK(argc_is_tagged == kArgcIsUntaggedInt); | |
| 926 __ ShiftLeftP(r0, argc, Operand(kPointerSizeLog2)); | |
| 927 } | |
| 928 __ CmpP(r4, r0); | |
| 929 __ bgt(&okay); // Signed comparison. | |
| 930 | |
| 931 // Out of stack space. | |
| 932 __ CallRuntime(Runtime::kThrowStackOverflow); | |
| 933 | |
| 934 __ bind(&okay); | |
| 935 } | |
| 936 | |
| 937 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | |
| 938 bool is_construct) { | |
| 939 // Called from Generate_JS_Entry | |
| 940 // r2: new.target | |
| 941 // r3: function | |
| 942 // r4: receiver | |
| 943 // r5: argc | |
| 944 // r6: argv | |
| 945 // r0,r7-r9, cp may be clobbered | |
| 946 ProfileEntryHookStub::MaybeCallEntryHook(masm); | |
| 947 | |
| 948 // Enter an internal frame. | |
| 949 { | |
| 950 // FrameScope ends up calling MacroAssembler::EnterFrame here | |
| 951 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 952 | |
| 953 // Setup the context (we need to use the caller context from the isolate). | |
| 954 ExternalReference context_address(Isolate::kContextAddress, | |
| 955 masm->isolate()); | |
| 956 __ mov(cp, Operand(context_address)); | |
| 957 __ LoadP(cp, MemOperand(cp)); | |
| 958 | |
| 959 __ InitializeRootRegister(); | |
| 960 | |
| 961 // Push the function and the receiver onto the stack. | |
| 962 __ Push(r3, r4); | |
| 963 | |
| 964 // Check if we have enough stack space to push all arguments. | |
| 965 // Clobbers r4. | |
| 966 Generate_CheckStackOverflow(masm, r5, kArgcIsUntaggedInt); | |
| 967 | |
| 968 // Copy arguments to the stack in a loop from argv to sp. | |
| 969 // The arguments are actually placed in reverse order on sp | |
| 970 // compared to argv (i.e. arg1 is highest memory in sp). | |
| 971 // r3: function | |
| 972 // r5: argc | |
| 973 // r6: argv, i.e. points to first arg | |
| 974 // r7: scratch reg to hold scaled argc | |
| 975 // r8: scratch reg to hold arg handle | |
| 976 // r9: scratch reg to hold index into argv | |
| 977 Label argLoop, argExit; | |
| 978 intptr_t zero = 0; | |
| 979 __ ShiftLeftP(r7, r5, Operand(kPointerSizeLog2)); | |
| 980 __ SubRR(sp, r7); // Buy the stack frame to fit args | |
| 981 __ LoadImmP(r9, Operand(zero)); // Initialize argv index | |
| 982 __ bind(&argLoop); | |
| 983 __ CmpPH(r7, Operand(zero)); | |
| 984 __ beq(&argExit, Label::kNear); | |
| 985 __ lay(r7, MemOperand(r7, -kPointerSize)); | |
| 986 __ LoadP(r8, MemOperand(r9, r6)); // read next parameter | |
| 987 __ la(r9, MemOperand(r9, kPointerSize)); // r9++; | |
| 988 __ LoadP(r0, MemOperand(r8)); // dereference handle | |
| 989 __ StoreP(r0, MemOperand(r7, sp)); // push parameter | |
| 990 __ b(&argLoop); | |
| 991 __ bind(&argExit); | |
| 992 | |
| 993 // Setup new.target and argc. | |
| 994 __ LoadRR(r6, r2); | |
| 995 __ LoadRR(r2, r5); | |
| 996 __ LoadRR(r5, r6); | |
| 997 | |
| 998 // Initialize all JavaScript callee-saved registers, since they will be seen | |
| 999 // by the garbage collector as part of handlers. | |
| 1000 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); | |
| 1001 __ LoadRR(r7, r6); | |
| 1002 __ LoadRR(r8, r6); | |
| 1003 __ LoadRR(r9, r6); | |
| 1004 | |
| 1005 // Invoke the code. | |
| 1006 Handle<Code> builtin = is_construct | |
| 1007 ? masm->isolate()->builtins()->Construct() | |
| 1008 : masm->isolate()->builtins()->Call(); | |
| 1009 __ Call(builtin, RelocInfo::CODE_TARGET); | |
| 1010 | |
| 1011 // Exit the JS frame and remove the parameters (except function), and | |
| 1012 // return. | |
| 1013 } | |
| 1014 __ b(r14); | |
| 1015 | |
| 1016 // r2: result | |
| 1017 } | |
| 1018 | |
| 1019 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | |
| 1020 Generate_JSEntryTrampolineHelper(masm, false); | |
| 1021 } | |
| 1022 | |
| 1023 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | |
| 1024 Generate_JSEntryTrampolineHelper(masm, true); | |
| 1025 } | |
| 1026 | |
| 1027 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) { | |
| 1028 Register args_count = scratch; | |
| 1029 | |
| 1030 // Get the arguments + receiver count. | |
| 1031 __ LoadP(args_count, | |
| 1032 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); | |
| 1033 __ LoadlW(args_count, | |
| 1034 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset)); | |
| 1035 | |
| 1036 // Leave the frame (also dropping the register file). | |
| 1037 __ LeaveFrame(StackFrame::JAVA_SCRIPT); | |
| 1038 | |
| 1039 __ AddP(sp, sp, args_count); | |
| 1040 } | |
| 1041 | |
| 1042 // Generate code for entering a JS function with the interpreter. | |
| 1043 // On entry to the function the receiver and arguments have been pushed on the | |
| 1044 // stack left to right. The actual argument count matches the formal parameter | |
| 1045 // count expected by the function. | |
| 1046 // | |
| 1047 // The live registers are: | |
| 1048 // o r3: the JS function object being called. | |
| 1049 // o r5: the new target | |
| 1050 // o cp: our context | |
| 1051 // o pp: the caller's constant pool pointer (if enabled) | |
| 1052 // o fp: the caller's frame pointer | |
| 1053 // o sp: stack pointer | |
| 1054 // o lr: return address | |
| 1055 // | |
| 1056 // The function builds an interpreter frame. See InterpreterFrameConstants in | |
| 1057 // frames.h for its layout. | |
| 1058 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { | |
| 1059 ProfileEntryHookStub::MaybeCallEntryHook(masm); | |
| 1060 | |
| 1061 // Open a frame scope to indicate that there is a frame on the stack. The | |
| 1062 // MANUAL indicates that the scope shouldn't actually generate code to set up | |
| 1063 // the frame (that is done below). | |
| 1064 FrameScope frame_scope(masm, StackFrame::MANUAL); | |
| 1065 __ PushStandardFrame(r3); | |
| 1066 | |
| 1067 // Get the bytecode array from the function object (or from the DebugInfo if | |
| 1068 // it is present) and load it into kInterpreterBytecodeArrayRegister. | |
| 1069 __ LoadP(r2, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); | |
| 1070 Label array_done; | |
| 1071 Register debug_info = r4; | |
| 1072 DCHECK(!debug_info.is(r2)); | |
| 1073 __ LoadP(debug_info, | |
| 1074 FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset)); | |
| 1075 // Load original bytecode array or the debug copy. | |
| 1076 __ LoadP(kInterpreterBytecodeArrayRegister, | |
| 1077 FieldMemOperand(r2, SharedFunctionInfo::kFunctionDataOffset)); | |
| 1078 __ CmpSmiLiteral(debug_info, DebugInfo::uninitialized(), r0); | |
| 1079 __ beq(&array_done); | |
| 1080 __ LoadP(kInterpreterBytecodeArrayRegister, | |
| 1081 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex)); | |
| 1082 __ bind(&array_done); | |
| 1083 | |
| 1084 // Check function data field is actually a BytecodeArray object. | |
| 1085 Label bytecode_array_not_present; | |
| 1086 __ CompareRoot(kInterpreterBytecodeArrayRegister, | |
| 1087 Heap::kUndefinedValueRootIndex); | |
| 1088 __ beq(&bytecode_array_not_present); | |
| 1089 | |
| 1090 if (FLAG_debug_code) { | |
| 1091 __ TestIfSmi(kInterpreterBytecodeArrayRegister); | |
| 1092 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | |
| 1093 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r2, no_reg, | |
| 1094 BYTECODE_ARRAY_TYPE); | |
| 1095 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | |
| 1096 } | |
| 1097 | |
| 1098 // Load the initial bytecode offset. | |
| 1099 __ mov(kInterpreterBytecodeOffsetRegister, | |
| 1100 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag)); | |
| 1101 | |
| 1102 // Push new.target, bytecode array and Smi tagged bytecode array offset. | |
| 1103 __ SmiTag(r4, kInterpreterBytecodeOffsetRegister); | |
| 1104 __ Push(r5, kInterpreterBytecodeArrayRegister, r4); | |
| 1105 | |
| 1106 // Allocate the local and temporary register file on the stack. | |
| 1107 { | |
| 1108 // Load frame size (word) from the BytecodeArray object. | |
| 1109 __ LoadlW(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister, | |
| 1110 BytecodeArray::kFrameSizeOffset)); | |
| 1111 | |
| 1112 // Do a stack check to ensure we don't go over the limit. | |
| 1113 Label ok; | |
| 1114 __ SubP(r5, sp, r4); | |
| 1115 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex); | |
| 1116 __ CmpLogicalP(r5, r0); | |
| 1117 __ bge(&ok); | |
| 1118 __ CallRuntime(Runtime::kThrowStackOverflow); | |
| 1119 __ bind(&ok); | |
| 1120 | |
| 1121 // If ok, push undefined as the initial value for all register file entries. | |
| 1122 // TODO(rmcilroy): Consider doing more than one push per loop iteration. | |
| 1123 Label loop, no_args; | |
| 1124 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | |
| 1125 __ ShiftRightP(r4, r4, Operand(kPointerSizeLog2)); | |
| 1126 __ LoadAndTestP(r4, r4); | |
| 1127 __ beq(&no_args); | |
| 1128 __ LoadRR(r1, r4); | |
| 1129 __ bind(&loop); | |
| 1130 __ push(r5); | |
| 1131 __ SubP(r1, Operand(1)); | |
| 1132 __ bne(&loop); | |
| 1133 __ bind(&no_args); | |
| 1134 } | |
| 1135 | |
| 1136 // Load accumulator and dispatch table into registers. | |
| 1137 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); | |
| 1138 __ mov(kInterpreterDispatchTableRegister, | |
| 1139 Operand(ExternalReference::interpreter_dispatch_table_address( | |
| 1140 masm->isolate()))); | |
| 1141 | |
| 1142 // Dispatch to the first bytecode handler for the function. | |
| 1143 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister, | |
| 1144 kInterpreterBytecodeOffsetRegister)); | |
| 1145 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2)); | |
| 1146 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); | |
| 1147 __ Call(ip); | |
| 1148 | |
| 1149 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset()); | |
| 1150 | |
| 1151 // The return value is in r2. | |
| 1152 LeaveInterpreterFrame(masm, r4); | |
| 1153 __ Ret(); | |
| 1154 | |
| 1155 // If the bytecode array is no longer present, then the underlying function | |
| 1156 // has been switched to a different kind of code and we heal the closure by | |
| 1157 // switching the code entry field over to the new code object as well. | |
| 1158 __ bind(&bytecode_array_not_present); | |
| 1159 __ LeaveFrame(StackFrame::JAVA_SCRIPT); | |
| 1160 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); | |
| 1161 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kCodeOffset)); | |
| 1162 __ AddP(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 1163 __ StoreP(r6, FieldMemOperand(r3, JSFunction::kCodeEntryOffset), r0); | |
| 1164 __ RecordWriteCodeEntryField(r3, r6, r7); | |
| 1165 __ JumpToJSEntry(r6); | |
| 1166 } | |
| 1167 | |
| 1168 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) { | |
| 1169 // Save the function and context for call to CompileBaseline. | |
| 1170 __ LoadP(r3, MemOperand(fp, StandardFrameConstants::kFunctionOffset)); | |
| 1171 __ LoadP(kContextRegister, | |
| 1172 MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 1173 | |
| 1174 // Leave the frame before recompiling for baseline so that we don't count as | |
| 1175 // an activation on the stack. | |
| 1176 LeaveInterpreterFrame(masm, r4); | |
| 1177 | |
| 1178 { | |
| 1179 FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
| 1180 // Push return value. | |
| 1181 __ push(r2); | |
| 1182 | |
| 1183 // Push function as argument and compile for baseline. | |
| 1184 __ push(r3); | |
| 1185 __ CallRuntime(Runtime::kCompileBaseline); | |
| 1186 | |
| 1187 // Restore return value. | |
| 1188 __ pop(r2); | |
| 1189 } | |
| 1190 __ Ret(); | |
| 1191 } | |
| 1192 | |
| 1193 static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index, | |
| 1194 Register count, Register scratch) { | |
| 1195 Label loop; | |
| 1196 __ AddP(index, index, Operand(kPointerSize)); // Bias up for LoadPU | |
| 1197 __ LoadRR(r0, count); | |
| 1198 __ bind(&loop); | |
| 1199 __ LoadP(scratch, MemOperand(index, -kPointerSize)); | |
| 1200 __ lay(index, MemOperand(index, -kPointerSize)); | |
| 1201 __ push(scratch); | |
| 1202 __ SubP(r0, Operand(1)); | |
| 1203 __ bne(&loop); | |
| 1204 } | |
| 1205 | |
| 1206 // static | |
| 1207 void Builtins::Generate_InterpreterPushArgsAndCallImpl( | |
| 1208 MacroAssembler* masm, TailCallMode tail_call_mode, | |
| 1209 CallableType function_type) { | |
| 1210 // ----------- S t a t e ------------- | |
| 1211 // -- r2 : the number of arguments (not including the receiver) | |
| 1212 // -- r4 : the address of the first argument to be pushed. Subsequent | |
| 1213 // arguments should be consecutive above this, in the same order as | |
| 1214 // they are to be pushed onto the stack. | |
| 1215 // -- r3 : the target to call (can be any Object). | |
| 1216 // ----------------------------------- | |
| 1217 | |
| 1218 // Calculate number of arguments (AddP one for receiver). | |
| 1219 __ AddP(r5, r2, Operand(1)); | |
| 1220 | |
| 1221 // Push the arguments. | |
| 1222 Generate_InterpreterPushArgs(masm, r4, r5, r6); | |
| 1223 | |
| 1224 // Call the target. | |
| 1225 if (function_type == CallableType::kJSFunction) { | |
| 1226 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, | |
| 1227 tail_call_mode), | |
| 1228 RelocInfo::CODE_TARGET); | |
| 1229 } else { | |
| 1230 DCHECK_EQ(function_type, CallableType::kAny); | |
| 1231 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, | |
| 1232 tail_call_mode), | |
| 1233 RelocInfo::CODE_TARGET); | |
| 1234 } | |
| 1235 } | |
| 1236 | |
| 1237 // static | |
| 1238 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) { | |
| 1239 // ----------- S t a t e ------------- | |
| 1240 // -- r2 : argument count (not including receiver) | |
| 1241 // -- r5 : new target | |
| 1242 // -- r3 : constructor to call | |
| 1243 // -- r4 : address of the first argument | |
| 1244 // ----------------------------------- | |
| 1245 | |
| 1246 // Push a slot for the receiver to be constructed. | |
| 1247 __ LoadImmP(r0, Operand::Zero()); | |
| 1248 __ push(r0); | |
| 1249 | |
| 1250 // Push the arguments (skip if none). | |
| 1251 Label skip; | |
| 1252 __ CmpP(r2, Operand::Zero()); | |
| 1253 __ beq(&skip); | |
| 1254 Generate_InterpreterPushArgs(masm, r4, r2, r6); | |
| 1255 __ bind(&skip); | |
| 1256 | |
| 1257 // Call the constructor with r2, r3, and r5 unmodified. | |
| 1258 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | |
| 1259 } | |
| 1260 | |
| 1261 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { | |
| 1262 // Set the return address to the correct point in the interpreter entry | |
| 1263 // trampoline. | |
| 1264 Smi* interpreter_entry_return_pc_offset( | |
| 1265 masm->isolate()->heap()->interpreter_entry_return_pc_offset()); | |
| 1266 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0)); | |
| 1267 __ Move(r4, masm->isolate()->builtins()->InterpreterEntryTrampoline()); | |
| 1268 __ AddP(r14, r4, Operand(interpreter_entry_return_pc_offset->value() + | |
| 1269 Code::kHeaderSize - kHeapObjectTag)); | |
| 1270 | |
| 1271 // Initialize the dispatch table register. | |
| 1272 __ mov(kInterpreterDispatchTableRegister, | |
| 1273 Operand(ExternalReference::interpreter_dispatch_table_address( | |
| 1274 masm->isolate()))); | |
| 1275 | |
| 1276 // Get the bytecode array pointer from the frame. | |
| 1277 __ LoadP(kInterpreterBytecodeArrayRegister, | |
| 1278 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); | |
| 1279 | |
| 1280 if (FLAG_debug_code) { | |
| 1281 // Check function data field is actually a BytecodeArray object. | |
| 1282 __ TestIfSmi(kInterpreterBytecodeArrayRegister); | |
| 1283 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | |
| 1284 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg, | |
| 1285 BYTECODE_ARRAY_TYPE); | |
| 1286 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); | |
| 1287 } | |
| 1288 | |
| 1289 // Get the target bytecode offset from the frame. | |
| 1290 __ LoadP(kInterpreterBytecodeOffsetRegister, | |
| 1291 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); | |
| 1292 __ SmiUntag(kInterpreterBytecodeOffsetRegister); | |
| 1293 | |
| 1294 // Dispatch to the target bytecode. | |
| 1295 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister, | |
| 1296 kInterpreterBytecodeOffsetRegister)); | |
| 1297 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2)); | |
| 1298 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); | |
| 1299 __ Jump(ip); | |
| 1300 } | |
| 1301 | |
| 1302 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | |
| 1303 // ----------- S t a t e ------------- | |
| 1304 // -- r2 : argument count (preserved for callee) | |
| 1305 // -- r5 : new target (preserved for callee) | |
| 1306 // -- r3 : target function (preserved for callee) | |
| 1307 // ----------------------------------- | |
| 1308 // First lookup code, maybe we don't need to compile! | |
| 1309 Label gotta_call_runtime; | |
| 1310 Label maybe_call_runtime; | |
| 1311 Label try_shared; | |
| 1312 Label loop_top, loop_bottom; | |
| 1313 | |
| 1314 Register closure = r3; | |
| 1315 Register map = r8; | |
| 1316 Register index = r4; | |
| 1317 __ LoadP(map, | |
| 1318 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
| 1319 __ LoadP(map, | |
| 1320 FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); | |
| 1321 __ LoadP(index, FieldMemOperand(map, FixedArray::kLengthOffset)); | |
| 1322 __ CmpSmiLiteral(index, Smi::FromInt(2), r0); | |
| 1323 __ blt(&gotta_call_runtime); | |
| 1324 | |
| 1325 // Find literals. | |
| 1326 // r9 : native context | |
| 1327 // r4 : length / index | |
| 1328 // r8 : optimized code map | |
| 1329 // r5 : new target | |
| 1330 // r3 : closure | |
| 1331 Register native_context = r9; | |
| 1332 __ LoadP(native_context, NativeContextMemOperand()); | |
| 1333 | |
| 1334 __ bind(&loop_top); | |
| 1335 Register temp = r1; | |
| 1336 Register array_pointer = r7; | |
| 1337 | |
| 1338 // Does the native context match? | |
| 1339 __ SmiToPtrArrayOffset(array_pointer, index); | |
| 1340 __ AddP(array_pointer, map, array_pointer); | |
| 1341 __ LoadP(temp, FieldMemOperand(array_pointer, | |
| 1342 SharedFunctionInfo::kOffsetToPreviousContext)); | |
| 1343 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); | |
| 1344 __ CmpP(temp, native_context); | |
| 1345 __ bne(&loop_bottom, Label::kNear); | |
| 1346 // OSR id set to none? | |
| 1347 __ LoadP(temp, | |
| 1348 FieldMemOperand(array_pointer, | |
| 1349 SharedFunctionInfo::kOffsetToPreviousOsrAstId)); | |
| 1350 const int bailout_id = BailoutId::None().ToInt(); | |
| 1351 __ CmpSmiLiteral(temp, Smi::FromInt(bailout_id), r0); | |
| 1352 __ bne(&loop_bottom, Label::kNear); | |
| 1353 // Literals available? | |
| 1354 __ LoadP(temp, | |
| 1355 FieldMemOperand(array_pointer, | |
| 1356 SharedFunctionInfo::kOffsetToPreviousLiterals)); | |
| 1357 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); | |
| 1358 __ JumpIfSmi(temp, &gotta_call_runtime); | |
| 1359 | |
| 1360 // Save the literals in the closure. | |
| 1361 __ StoreP(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset), r0); | |
| 1362 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r6, | |
| 1363 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | |
| 1364 OMIT_SMI_CHECK); | |
| 1365 | |
| 1366 // Code available? | |
| 1367 Register entry = r6; | |
| 1368 __ LoadP(entry, | |
| 1369 FieldMemOperand(array_pointer, | |
| 1370 SharedFunctionInfo::kOffsetToPreviousCachedCode)); | |
| 1371 __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); | |
| 1372 __ JumpIfSmi(entry, &maybe_call_runtime); | |
| 1373 | |
| 1374 // Found literals and code. Get them into the closure and return. | |
| 1375 // Store code entry in the closure. | |
| 1376 __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 1377 | |
| 1378 Label install_optimized_code_and_tailcall; | |
| 1379 __ bind(&install_optimized_code_and_tailcall); | |
| 1380 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0); | |
| 1381 __ RecordWriteCodeEntryField(closure, entry, r7); | |
| 1382 | |
| 1383 // Link the closure into the optimized function list. | |
| 1384 // r6 : code entry | |
| 1385 // r9: native context | |
| 1386 // r3 : closure | |
| 1387 __ LoadP( | |
| 1388 r7, ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); | |
| 1389 __ StoreP(r7, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset), | |
| 1390 r0); | |
| 1391 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r7, temp, | |
| 1392 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | |
| 1393 OMIT_SMI_CHECK); | |
| 1394 const int function_list_offset = | |
| 1395 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST); | |
| 1396 __ StoreP( | |
| 1397 closure, | |
| 1398 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), r0); | |
| 1399 // Save closure before the write barrier. | |
| 1400 __ LoadRR(r7, closure); | |
| 1401 __ RecordWriteContextSlot(native_context, function_list_offset, r7, temp, | |
| 1402 kLRHasNotBeenSaved, kDontSaveFPRegs); | |
| 1403 __ JumpToJSEntry(entry); | |
| 1404 | |
| 1405 __ bind(&loop_bottom); | |
| 1406 __ SubSmiLiteral(index, index, Smi::FromInt(SharedFunctionInfo::kEntryLength), | |
| 1407 r0); | |
| 1408 __ CmpSmiLiteral(index, Smi::FromInt(1), r0); | |
| 1409 __ bgt(&loop_top); | |
| 1410 | |
| 1411 // We found neither literals nor code. | |
| 1412 __ b(&gotta_call_runtime); | |
| 1413 | |
| 1414 __ bind(&maybe_call_runtime); | |
| 1415 | |
| 1416 // Last possibility. Check the context free optimized code map entry. | |
| 1417 __ LoadP(entry, | |
| 1418 FieldMemOperand(map, FixedArray::kHeaderSize + | |
| 1419 SharedFunctionInfo::kSharedCodeIndex)); | |
| 1420 __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); | |
| 1421 __ JumpIfSmi(entry, &try_shared); | |
| 1422 | |
| 1423 // Store code entry in the closure. | |
| 1424 __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 1425 __ b(&install_optimized_code_and_tailcall); | |
| 1426 | |
| 1427 __ bind(&try_shared); | |
| 1428 // Is the full code valid? | |
| 1429 __ LoadP(entry, | |
| 1430 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
| 1431 __ LoadP(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset)); | |
| 1432 __ LoadlW(r7, FieldMemOperand(entry, Code::kFlagsOffset)); | |
| 1433 __ DecodeField<Code::KindField>(r7); | |
| 1434 __ CmpP(r7, Operand(Code::BUILTIN)); | |
| 1435 __ beq(&gotta_call_runtime); | |
| 1436 // Yes, install the full code. | |
| 1437 __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 1438 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0); | |
| 1439 __ RecordWriteCodeEntryField(closure, entry, r7); | |
| 1440 __ JumpToJSEntry(entry); | |
| 1441 | |
| 1442 __ bind(&gotta_call_runtime); | |
| 1443 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); | |
| 1444 } | |
| 1445 | |
| 1446 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) { | |
| 1447 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline); | |
| 1448 } | |
| 1449 | |
| 1450 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | |
| 1451 GenerateTailCallToReturnedCode(masm, | |
| 1452 Runtime::kCompileOptimized_NotConcurrent); | |
| 1453 } | |
| 1454 | |
| 1455 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | |
| 1456 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); | |
| 1457 } | |
| 1458 | |
| 1459 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { | |
| 1460 // ----------- S t a t e ------------- | |
| 1461 // -- r2 : argument count (preserved for callee) | |
| 1462 // -- r3 : new target (preserved for callee) | |
| 1463 // -- r5 : target function (preserved for callee) | |
| 1464 // ----------------------------------- | |
| 1465 Label failed; | |
| 1466 { | |
| 1467 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 1468 // Push a copy of the target function and the new target. | |
| 1469 __ SmiTag(r2); | |
| 1470 // Push another copy as a parameter to the runtime call. | |
| 1471 __ Push(r2, r3, r5, r3); | |
| 1472 | |
| 1473 // Copy arguments from caller (stdlib, foreign, heap). | |
| 1474 for (int i = 2; i >= 0; --i) { | |
| 1475 __ LoadP(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset + | |
| 1476 i * kPointerSize)); | |
| 1477 __ push(r4); | |
| 1478 } | |
| 1479 // Call runtime, on success unwind frame, and parent frame. | |
| 1480 __ CallRuntime(Runtime::kInstantiateAsmJs, 4); | |
| 1481 // A smi 0 is returned on failure, an object on success. | |
| 1482 __ JumpIfSmi(r2, &failed); | |
| 1483 scope.GenerateLeaveFrame(); | |
| 1484 __ Drop(4); | |
| 1485 __ Ret(); | |
| 1486 | |
| 1487 __ bind(&failed); | |
| 1488 // Restore target function and new target. | |
| 1489 __ Pop(r2, r3, r5); | |
| 1490 __ SmiUntag(r2); | |
| 1491 } | |
| 1492 // On failure, tail call back to regular js. | |
| 1493 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); | |
| 1494 } | |
| 1495 | |
| 1496 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | |
| 1497 // For now, we are relying on the fact that make_code_young doesn't do any | |
| 1498 // garbage collection which allows us to save/restore the registers without | |
| 1499 // worrying about which of them contain pointers. We also don't build an | |
| 1500 // internal frame to make the code faster, since we shouldn't have to do stack | |
| 1501 // crawls in MakeCodeYoung. This seems a bit fragile. | |
| 1502 | |
| 1503 // Point r2 at the start of the PlatformCodeAge sequence. | |
| 1504 __ CleanseP(r14); | |
| 1505 __ SubP(r14, Operand(kCodeAgingSequenceLength)); | |
| 1506 __ LoadRR(r2, r14); | |
| 1507 | |
| 1508 __ pop(r14); | |
| 1509 | |
| 1510 // The following registers must be saved and restored when calling through to | |
| 1511 // the runtime: | |
| 1512 // r2 - contains return address (beginning of patch sequence) | |
| 1513 // r3 - isolate | |
| 1514 // r5 - new target | |
| 1515 // lr - return address | |
| 1516 FrameScope scope(masm, StackFrame::MANUAL); | |
| 1517 __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit()); | |
| 1518 __ PrepareCallCFunction(2, 0, r4); | |
| 1519 __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate()))); | |
| 1520 __ CallCFunction( | |
| 1521 ExternalReference::get_make_code_young_function(masm->isolate()), 2); | |
| 1522 __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit()); | |
| 1523 __ LoadRR(ip, r2); | |
| 1524 __ Jump(ip); | |
| 1525 } | |
| 1526 | |
| 1527 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ | |
| 1528 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ | |
| 1529 MacroAssembler* masm) { \ | |
| 1530 GenerateMakeCodeYoungAgainCommon(masm); \ | |
| 1531 } \ | |
| 1532 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ | |
| 1533 MacroAssembler* masm) { \ | |
| 1534 GenerateMakeCodeYoungAgainCommon(masm); \ | |
| 1535 } | |
| 1536 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) | |
| 1537 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR | |
| 1538 | |
| 1539 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { | |
| 1540 // For now, we are relying on the fact that make_code_young doesn't do any | |
| 1541 // garbage collection which allows us to save/restore the registers without | |
| 1542 // worrying about which of them contain pointers. We also don't build an | |
| 1543 // internal frame to make the code faster, since we shouldn't have to do stack | |
| 1544 // crawls in MakeCodeYoung. This seems a bit fragile. | |
| 1545 | |
| 1546 // Point r2 at the start of the PlatformCodeAge sequence. | |
| 1547 __ CleanseP(r14); | |
| 1548 __ SubP(r14, Operand(kCodeAgingSequenceLength)); | |
| 1549 __ LoadRR(r2, r14); | |
| 1550 | |
| 1551 __ pop(r14); | |
| 1552 | |
| 1553 // The following registers must be saved and restored when calling through to | |
| 1554 // the runtime: | |
| 1555 // r2 - contains return address (beginning of patch sequence) | |
| 1556 // r3 - isolate | |
| 1557 // r5 - new target | |
| 1558 // lr - return address | |
| 1559 FrameScope scope(masm, StackFrame::MANUAL); | |
| 1560 __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit()); | |
| 1561 __ PrepareCallCFunction(2, 0, r4); | |
| 1562 __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate()))); | |
| 1563 __ CallCFunction( | |
| 1564 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), | |
| 1565 2); | |
| 1566 __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit()); | |
| 1567 __ LoadRR(ip, r2); | |
| 1568 | |
| 1569 // Perform prologue operations usually performed by the young code stub. | |
| 1570 __ PushStandardFrame(r3); | |
| 1571 | |
| 1572 // Jump to point after the code-age stub. | |
| 1573 __ AddP(r2, ip, Operand(kNoCodeAgeSequenceLength)); | |
| 1574 __ Jump(r2); | |
| 1575 } | |
| 1576 | |
| 1577 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { | |
| 1578 GenerateMakeCodeYoungAgainCommon(masm); | |
| 1579 } | |
| 1580 | |
| 1581 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { | |
| 1582 Generate_MarkCodeAsExecutedOnce(masm); | |
| 1583 } | |
| 1584 | |
| 1585 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | |
| 1586 SaveFPRegsMode save_doubles) { | |
| 1587 { | |
| 1588 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 1589 | |
| 1590 // Preserve registers across notification, this is important for compiled | |
| 1591 // stubs that tail call the runtime on deopts passing their parameters in | |
| 1592 // registers. | |
| 1593 __ MultiPush(kJSCallerSaved | kCalleeSaved); | |
| 1594 // Pass the function and deoptimization type to the runtime system. | |
| 1595 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); | |
| 1596 __ MultiPop(kJSCallerSaved | kCalleeSaved); | |
| 1597 } | |
| 1598 | |
| 1599 __ la(sp, MemOperand(sp, kPointerSize)); // Ignore state | |
| 1600 __ Ret(); // Jump to miss handler | |
| 1601 } | |
| 1602 | |
| 1603 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | |
| 1604 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | |
| 1605 } | |
| 1606 | |
| 1607 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | |
| 1608 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | |
| 1609 } | |
| 1610 | |
| 1611 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | |
| 1612 Deoptimizer::BailoutType type) { | |
| 1613 { | |
| 1614 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 1615 // Pass the function and deoptimization type to the runtime system. | |
| 1616 __ LoadSmiLiteral(r2, Smi::FromInt(static_cast<int>(type))); | |
| 1617 __ push(r2); | |
| 1618 __ CallRuntime(Runtime::kNotifyDeoptimized); | |
| 1619 } | |
| 1620 | |
| 1621 // Get the full codegen state from the stack and untag it -> r8. | |
| 1622 __ LoadP(r8, MemOperand(sp, 0 * kPointerSize)); | |
| 1623 __ SmiUntag(r8); | |
| 1624 // Switch on the state. | |
| 1625 Label with_tos_register, unknown_state; | |
| 1626 __ CmpP( | |
| 1627 r8, | |
| 1628 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::NO_REGISTERS))); | |
| 1629 __ bne(&with_tos_register); | |
| 1630 __ la(sp, MemOperand(sp, 1 * kPointerSize)); // Remove state. | |
| 1631 __ Ret(); | |
| 1632 | |
| 1633 __ bind(&with_tos_register); | |
| 1634 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r2.code()); | |
| 1635 __ LoadP(r2, MemOperand(sp, 1 * kPointerSize)); | |
| 1636 __ CmpP( | |
| 1637 r8, | |
| 1638 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::TOS_REGISTER))); | |
| 1639 __ bne(&unknown_state); | |
| 1640 __ la(sp, MemOperand(sp, 2 * kPointerSize)); // Remove state. | |
| 1641 __ Ret(); | |
| 1642 | |
| 1643 __ bind(&unknown_state); | |
| 1644 __ stop("no cases left"); | |
| 1645 } | |
| 1646 | |
| 1647 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | |
| 1648 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | |
| 1649 } | |
| 1650 | |
| 1651 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { | |
| 1652 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); | |
| 1653 } | |
| 1654 | |
| 1655 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | |
| 1656 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | |
| 1657 } | |
| 1658 | |
| 1659 // Clobbers registers {r6, r7, r8, r9}. | |
| 1660 void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, | |
| 1661 Register function_template_info, | |
| 1662 Label* receiver_check_failed) { | |
| 1663 Register signature = r6; | |
| 1664 Register map = r7; | |
| 1665 Register constructor = r8; | |
| 1666 Register scratch = r9; | |
| 1667 | |
| 1668 // If there is no signature, return the holder. | |
| 1669 __ LoadP(signature, FieldMemOperand(function_template_info, | |
| 1670 FunctionTemplateInfo::kSignatureOffset)); | |
| 1671 Label receiver_check_passed; | |
| 1672 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex, | |
| 1673 &receiver_check_passed); | |
| 1674 | |
| 1675 // Walk the prototype chain. | |
| 1676 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
| 1677 Label prototype_loop_start; | |
| 1678 __ bind(&prototype_loop_start); | |
| 1679 | |
| 1680 // Get the constructor, if any. | |
| 1681 __ GetMapConstructor(constructor, map, scratch, scratch); | |
| 1682 __ CmpP(scratch, Operand(JS_FUNCTION_TYPE)); | |
| 1683 Label next_prototype; | |
| 1684 __ bne(&next_prototype); | |
| 1685 Register type = constructor; | |
| 1686 __ LoadP(type, | |
| 1687 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset)); | |
| 1688 __ LoadP(type, | |
| 1689 FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset)); | |
| 1690 | |
| 1691 // Loop through the chain of inheriting function templates. | |
| 1692 Label function_template_loop; | |
| 1693 __ bind(&function_template_loop); | |
| 1694 | |
| 1695 // If the signatures match, we have a compatible receiver. | |
| 1696 __ CmpP(signature, type); | |
| 1697 __ beq(&receiver_check_passed); | |
| 1698 | |
| 1699 // If the current type is not a FunctionTemplateInfo, load the next prototype | |
| 1700 // in the chain. | |
| 1701 __ JumpIfSmi(type, &next_prototype); | |
| 1702 __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE); | |
| 1703 __ bne(&next_prototype); | |
| 1704 | |
| 1705 // Otherwise load the parent function template and iterate. | |
| 1706 __ LoadP(type, | |
| 1707 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset)); | |
| 1708 __ b(&function_template_loop); | |
| 1709 | |
| 1710 // Load the next prototype. | |
| 1711 __ bind(&next_prototype); | |
| 1712 __ LoadlW(scratch, FieldMemOperand(map, Map::kBitField3Offset)); | |
| 1713 __ DecodeField<Map::HasHiddenPrototype>(scratch); | |
| 1714 __ beq(receiver_check_failed); | |
| 1715 | |
| 1716 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); | |
| 1717 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
| 1718 // Iterate. | |
| 1719 __ b(&prototype_loop_start); | |
| 1720 | |
| 1721 __ bind(&receiver_check_passed); | |
| 1722 } | |
| 1723 | |
| 1724 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { | |
| 1725 // ----------- S t a t e ------------- | |
| 1726 // -- r2 : number of arguments excluding receiver | |
| 1727 // -- r3 : callee | |
| 1728 // -- lr : return address | |
| 1729 // -- sp[0] : last argument | |
| 1730 // -- ... | |
| 1731 // -- sp[4 * (argc - 1)] : first argument | |
| 1732 // -- sp[4 * argc] : receiver | |
| 1733 // ----------------------------------- | |
| 1734 | |
| 1735 // Load the FunctionTemplateInfo. | |
| 1736 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); | |
| 1737 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset)); | |
| 1738 | |
| 1739 // Do the compatible receiver check. | |
| 1740 Label receiver_check_failed; | |
| 1741 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2)); | |
| 1742 __ LoadP(r4, MemOperand(sp, r1)); | |
| 1743 CompatibleReceiverCheck(masm, r4, r5, &receiver_check_failed); | |
| 1744 | |
| 1745 // Get the callback offset from the FunctionTemplateInfo, and jump to the | |
| 1746 // beginning of the code. | |
| 1747 __ LoadP(r6, FieldMemOperand(r5, FunctionTemplateInfo::kCallCodeOffset)); | |
| 1748 __ LoadP(r6, FieldMemOperand(r6, CallHandlerInfo::kFastHandlerOffset)); | |
| 1749 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 1750 __ JumpToJSEntry(ip); | |
| 1751 | |
| 1752 // Compatible receiver check failed: throw an Illegal Invocation exception. | |
| 1753 __ bind(&receiver_check_failed); | |
| 1754 // Drop the arguments (including the receiver); | |
| 1755 __ AddP(r1, r1, Operand(kPointerSize)); | |
| 1756 __ AddP(sp, sp, r1); | |
| 1757 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); | |
| 1758 } | |
| 1759 | |
| 1760 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | |
| 1761 // Lookup the function in the JavaScript frame. | |
| 1762 __ LoadP(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 1763 { | |
| 1764 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 1765 // Pass function as argument. | |
| 1766 __ push(r2); | |
| 1767 __ CallRuntime(Runtime::kCompileForOnStackReplacement); | |
| 1768 } | |
| 1769 | |
| 1770 // If the code object is null, just return to the unoptimized code. | |
| 1771 Label skip; | |
| 1772 __ CmpSmiLiteral(r2, Smi::FromInt(0), r0); | |
| 1773 __ bne(&skip); | |
| 1774 __ Ret(); | |
| 1775 | |
| 1776 __ bind(&skip); | |
| 1777 | |
| 1778 // Load deoptimization data from the code object. | |
| 1779 // <deopt_data> = <code>[#deoptimization_data_offset] | |
| 1780 __ LoadP(r3, FieldMemOperand(r2, Code::kDeoptimizationDataOffset)); | |
| 1781 | |
| 1782 // Load the OSR entrypoint offset from the deoptimization data. | |
| 1783 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] | |
| 1784 __ LoadP( | |
| 1785 r3, FieldMemOperand(r3, FixedArray::OffsetOfElementAt( | |
| 1786 DeoptimizationInputData::kOsrPcOffsetIndex))); | |
| 1787 __ SmiUntag(r3); | |
| 1788 | |
| 1789 // Compute the target address = code_obj + header_size + osr_offset | |
| 1790 // <entry_addr> = <code_obj> + #header_size + <osr_offset> | |
| 1791 __ AddP(r2, r3); | |
| 1792 __ AddP(r0, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 1793 __ LoadRR(r14, r0); | |
| 1794 | |
| 1795 // And "return" to the OSR entry point of the function. | |
| 1796 __ Ret(); | |
| 1797 } | |
| 1798 | |
| 1799 // static | |
| 1800 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, | |
| 1801 int field_index) { | |
| 1802 // ----------- S t a t e ------------- | |
| 1803 // -- r2 : number of arguments | |
| 1804 // -- r3 : function | |
| 1805 // -- cp : context | |
| 1806 | |
| 1807 // -- lr : return address | |
| 1808 // -- sp[0] : receiver | |
| 1809 // ----------------------------------- | |
| 1810 | |
| 1811 // 1. Pop receiver into r2 and check that it's actually a JSDate object. | |
| 1812 Label receiver_not_date; | |
| 1813 { | |
| 1814 __ Pop(r2); | |
| 1815 __ JumpIfSmi(r2, &receiver_not_date); | |
| 1816 __ CompareObjectType(r2, r4, r5, JS_DATE_TYPE); | |
| 1817 __ bne(&receiver_not_date); | |
| 1818 } | |
| 1819 | |
| 1820 // 2. Load the specified date field, falling back to the runtime as necessary. | |
| 1821 if (field_index == JSDate::kDateValue) { | |
| 1822 __ LoadP(r2, FieldMemOperand(r2, JSDate::kValueOffset)); | |
| 1823 } else { | |
| 1824 if (field_index < JSDate::kFirstUncachedField) { | |
| 1825 Label stamp_mismatch; | |
| 1826 __ mov(r3, Operand(ExternalReference::date_cache_stamp(masm->isolate()))); | |
| 1827 __ LoadP(r3, MemOperand(r3)); | |
| 1828 __ LoadP(ip, FieldMemOperand(r2, JSDate::kCacheStampOffset)); | |
| 1829 __ CmpP(r3, ip); | |
| 1830 __ bne(&stamp_mismatch); | |
| 1831 __ LoadP(r2, FieldMemOperand( | |
| 1832 r2, JSDate::kValueOffset + field_index * kPointerSize)); | |
| 1833 __ Ret(); | |
| 1834 __ bind(&stamp_mismatch); | |
| 1835 } | |
| 1836 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
| 1837 __ PrepareCallCFunction(2, r3); | |
| 1838 __ LoadSmiLiteral(r3, Smi::FromInt(field_index)); | |
| 1839 __ CallCFunction( | |
| 1840 ExternalReference::get_date_field_function(masm->isolate()), 2); | |
| 1841 } | |
| 1842 __ Ret(); | |
| 1843 | |
| 1844 // 3. Raise a TypeError if the receiver is not a date. | |
| 1845 __ bind(&receiver_not_date); | |
| 1846 { | |
| 1847 FrameScope scope(masm, StackFrame::MANUAL); | |
| 1848 __ push(r2); | |
| 1849 __ LoadSmiLiteral(r2, Smi::FromInt(0)); | |
| 1850 __ EnterBuiltinFrame(cp, r3, r2); | |
| 1851 __ CallRuntime(Runtime::kThrowNotDateError); | |
| 1852 } | |
| 1853 } | |
| 1854 | |
| 1855 // static | |
| 1856 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { | |
| 1857 // ----------- S t a t e ------------- | |
| 1858 // -- r2 : argc | |
| 1859 // -- sp[0] : argArray | |
| 1860 // -- sp[4] : thisArg | |
| 1861 // -- sp[8] : receiver | |
| 1862 // ----------------------------------- | |
| 1863 | |
| 1864 // 1. Load receiver into r3, argArray into r2 (if present), remove all | |
| 1865 // arguments from the stack (including the receiver), and push thisArg (if | |
| 1866 // present) instead. | |
| 1867 { | |
| 1868 Label skip; | |
| 1869 Register arg_size = r4; | |
| 1870 Register new_sp = r5; | |
| 1871 Register scratch = r6; | |
| 1872 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2)); | |
| 1873 __ AddP(new_sp, sp, arg_size); | |
| 1874 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | |
| 1875 __ LoadRR(scratch, r2); | |
| 1876 __ LoadP(r3, MemOperand(new_sp, 0)); // receiver | |
| 1877 __ CmpP(arg_size, Operand(kPointerSize)); | |
| 1878 __ blt(&skip); | |
| 1879 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg | |
| 1880 __ beq(&skip); | |
| 1881 __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize)); // argArray | |
| 1882 __ bind(&skip); | |
| 1883 __ LoadRR(sp, new_sp); | |
| 1884 __ StoreP(scratch, MemOperand(sp, 0)); | |
| 1885 } | |
| 1886 | |
| 1887 // ----------- S t a t e ------------- | |
| 1888 // -- r2 : argArray | |
| 1889 // -- r3 : receiver | |
| 1890 // -- sp[0] : thisArg | |
| 1891 // ----------------------------------- | |
| 1892 | |
| 1893 // 2. Make sure the receiver is actually callable. | |
| 1894 Label receiver_not_callable; | |
| 1895 __ JumpIfSmi(r3, &receiver_not_callable); | |
| 1896 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset)); | |
| 1897 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); | |
| 1898 __ TestBit(r6, Map::kIsCallable); | |
| 1899 __ beq(&receiver_not_callable); | |
| 1900 | |
| 1901 // 3. Tail call with no arguments if argArray is null or undefined. | |
| 1902 Label no_arguments; | |
| 1903 __ JumpIfRoot(r2, Heap::kNullValueRootIndex, &no_arguments); | |
| 1904 __ JumpIfRoot(r2, Heap::kUndefinedValueRootIndex, &no_arguments); | |
| 1905 | |
| 1906 // 4a. Apply the receiver to the given argArray (passing undefined for | |
| 1907 // new.target). | |
| 1908 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | |
| 1909 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
| 1910 | |
| 1911 // 4b. The argArray is either null or undefined, so we tail call without any | |
| 1912 // arguments to the receiver. | |
| 1913 __ bind(&no_arguments); | |
| 1914 { | |
| 1915 __ LoadImmP(r2, Operand::Zero()); | |
| 1916 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
| 1917 } | |
| 1918 | |
| 1919 // 4c. The receiver is not callable, throw an appropriate TypeError. | |
| 1920 __ bind(&receiver_not_callable); | |
| 1921 { | |
| 1922 __ StoreP(r3, MemOperand(sp, 0)); | |
| 1923 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | |
| 1924 } | |
| 1925 } | |
| 1926 | |
| 1927 // static | |
| 1928 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { | |
| 1929 // 1. Make sure we have at least one argument. | |
| 1930 // r2: actual number of arguments | |
| 1931 { | |
| 1932 Label done; | |
| 1933 __ CmpP(r2, Operand::Zero()); | |
| 1934 __ bne(&done, Label::kNear); | |
| 1935 __ PushRoot(Heap::kUndefinedValueRootIndex); | |
| 1936 __ AddP(r2, Operand(1)); | |
| 1937 __ bind(&done); | |
| 1938 } | |
| 1939 | |
| 1940 // r2: actual number of arguments | |
| 1941 // 2. Get the callable to call (passed as receiver) from the stack. | |
| 1942 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2)); | |
| 1943 __ LoadP(r3, MemOperand(sp, r4)); | |
| 1944 | |
| 1945 // 3. Shift arguments and return address one slot down on the stack | |
| 1946 // (overwriting the original receiver). Adjust argument count to make | |
| 1947 // the original first argument the new receiver. | |
| 1948 // r2: actual number of arguments | |
| 1949 // r3: callable | |
| 1950 { | |
| 1951 Label loop; | |
| 1952 // Calculate the copy start address (destination). Copy end address is sp. | |
| 1953 __ AddP(r4, sp, r4); | |
| 1954 | |
| 1955 __ bind(&loop); | |
| 1956 __ LoadP(ip, MemOperand(r4, -kPointerSize)); | |
| 1957 __ StoreP(ip, MemOperand(r4)); | |
| 1958 __ SubP(r4, Operand(kPointerSize)); | |
| 1959 __ CmpP(r4, sp); | |
| 1960 __ bne(&loop); | |
| 1961 // Adjust the actual number of arguments and remove the top element | |
| 1962 // (which is a copy of the last argument). | |
| 1963 __ SubP(r2, Operand(1)); | |
| 1964 __ pop(); | |
| 1965 } | |
| 1966 | |
| 1967 // 4. Call the callable. | |
| 1968 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
| 1969 } | |
| 1970 | |
| 1971 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { | |
| 1972 // ----------- S t a t e ------------- | |
| 1973 // -- r2 : argc | |
| 1974 // -- sp[0] : argumentsList | |
| 1975 // -- sp[4] : thisArgument | |
| 1976 // -- sp[8] : target | |
| 1977 // -- sp[12] : receiver | |
| 1978 // ----------------------------------- | |
| 1979 | |
| 1980 // 1. Load target into r3 (if present), argumentsList into r2 (if present), | |
| 1981 // remove all arguments from the stack (including the receiver), and push | |
| 1982 // thisArgument (if present) instead. | |
| 1983 { | |
| 1984 Label skip; | |
| 1985 Register arg_size = r4; | |
| 1986 Register new_sp = r5; | |
| 1987 Register scratch = r6; | |
| 1988 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2)); | |
| 1989 __ AddP(new_sp, sp, arg_size); | |
| 1990 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | |
| 1991 __ LoadRR(scratch, r3); | |
| 1992 __ LoadRR(r2, r3); | |
| 1993 __ CmpP(arg_size, Operand(kPointerSize)); | |
| 1994 __ blt(&skip); | |
| 1995 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target | |
| 1996 __ beq(&skip); | |
| 1997 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument | |
| 1998 __ CmpP(arg_size, Operand(2 * kPointerSize)); | |
| 1999 __ beq(&skip); | |
| 2000 __ LoadP(r2, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList | |
| 2001 __ bind(&skip); | |
| 2002 __ LoadRR(sp, new_sp); | |
| 2003 __ StoreP(scratch, MemOperand(sp, 0)); | |
| 2004 } | |
| 2005 | |
| 2006 // ----------- S t a t e ------------- | |
| 2007 // -- r2 : argumentsList | |
| 2008 // -- r3 : target | |
| 2009 // -- sp[0] : thisArgument | |
| 2010 // ----------------------------------- | |
| 2011 | |
| 2012 // 2. Make sure the target is actually callable. | |
| 2013 Label target_not_callable; | |
| 2014 __ JumpIfSmi(r3, &target_not_callable); | |
| 2015 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset)); | |
| 2016 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); | |
| 2017 __ TestBit(r6, Map::kIsCallable); | |
| 2018 __ beq(&target_not_callable); | |
| 2019 | |
| 2020 // 3a. Apply the target to the given argumentsList (passing undefined for | |
| 2021 // new.target). | |
| 2022 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | |
| 2023 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
| 2024 | |
| 2025 // 3b. The target is not callable, throw an appropriate TypeError. | |
| 2026 __ bind(&target_not_callable); | |
| 2027 { | |
| 2028 __ StoreP(r3, MemOperand(sp, 0)); | |
| 2029 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); | |
| 2030 } | |
| 2031 } | |
| 2032 | |
| 2033 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { | |
| 2034 // ----------- S t a t e ------------- | |
| 2035 // -- r2 : argc | |
| 2036 // -- sp[0] : new.target (optional) | |
| 2037 // -- sp[4] : argumentsList | |
| 2038 // -- sp[8] : target | |
| 2039 // -- sp[12] : receiver | |
| 2040 // ----------------------------------- | |
| 2041 | |
| 2042 // 1. Load target into r3 (if present), argumentsList into r2 (if present), | |
| 2043 // new.target into r5 (if present, otherwise use target), remove all | |
| 2044 // arguments from the stack (including the receiver), and push thisArgument | |
| 2045 // (if present) instead. | |
| 2046 { | |
| 2047 Label skip; | |
| 2048 Register arg_size = r4; | |
| 2049 Register new_sp = r6; | |
| 2050 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2)); | |
| 2051 __ AddP(new_sp, sp, arg_size); | |
| 2052 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | |
| 2053 __ LoadRR(r2, r3); | |
| 2054 __ LoadRR(r5, r3); | |
| 2055 __ StoreP(r3, MemOperand(new_sp, 0)); // receiver (undefined) | |
| 2056 __ CmpP(arg_size, Operand(kPointerSize)); | |
| 2057 __ blt(&skip); | |
| 2058 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target | |
| 2059 __ LoadRR(r5, r3); // new.target defaults to target | |
| 2060 __ beq(&skip); | |
| 2061 __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList | |
| 2062 __ CmpP(arg_size, Operand(2 * kPointerSize)); | |
| 2063 __ beq(&skip); | |
| 2064 __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize)); // new.target | |
| 2065 __ bind(&skip); | |
| 2066 __ LoadRR(sp, new_sp); | |
| 2067 } | |
| 2068 | |
| 2069 // ----------- S t a t e ------------- | |
| 2070 // -- r2 : argumentsList | |
| 2071 // -- r5 : new.target | |
| 2072 // -- r3 : target | |
| 2073 // -- sp[0] : receiver (undefined) | |
| 2074 // ----------------------------------- | |
| 2075 | |
| 2076 // 2. Make sure the target is actually a constructor. | |
| 2077 Label target_not_constructor; | |
| 2078 __ JumpIfSmi(r3, &target_not_constructor); | |
| 2079 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset)); | |
| 2080 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); | |
| 2081 __ TestBit(r6, Map::kIsConstructor); | |
| 2082 __ beq(&target_not_constructor); | |
| 2083 | |
| 2084 // 3. Make sure the target is actually a constructor. | |
| 2085 Label new_target_not_constructor; | |
| 2086 __ JumpIfSmi(r5, &new_target_not_constructor); | |
| 2087 __ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset)); | |
| 2088 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); | |
| 2089 __ TestBit(r6, Map::kIsConstructor); | |
| 2090 __ beq(&new_target_not_constructor); | |
| 2091 | |
| 2092 // 4a. Construct the target with the given new.target and argumentsList. | |
| 2093 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); | |
| 2094 | |
| 2095 // 4b. The target is not a constructor, throw an appropriate TypeError. | |
| 2096 __ bind(&target_not_constructor); | |
| 2097 { | |
| 2098 __ StoreP(r3, MemOperand(sp, 0)); | |
| 2099 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); | |
| 2100 } | |
| 2101 | |
| 2102 // 4c. The new.target is not a constructor, throw an appropriate TypeError. | |
| 2103 __ bind(&new_target_not_constructor); | |
| 2104 { | |
| 2105 __ StoreP(r5, MemOperand(sp, 0)); | |
| 2106 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); | |
| 2107 } | |
| 2108 } | |
| 2109 | |
| 2110 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, | |
| 2111 Label* stack_overflow) { | |
| 2112 // ----------- S t a t e ------------- | |
| 2113 // -- r2 : actual number of arguments | |
| 2114 // -- r3 : function (passed through to callee) | |
| 2115 // -- r4 : expected number of arguments | |
| 2116 // -- r5 : new target (passed through to callee) | |
| 2117 // ----------------------------------- | |
| 2118 // Check the stack for overflow. We are not trying to catch | |
| 2119 // interruptions (e.g. debug break and preemption) here, so the "real stack | |
| 2120 // limit" is checked. | |
| 2121 __ LoadRoot(r7, Heap::kRealStackLimitRootIndex); | |
| 2122 // Make r7 the space we have left. The stack might already be overflowed | |
| 2123 // here which will cause r7 to become negative. | |
| 2124 __ SubP(r7, sp, r7); | |
| 2125 // Check if the arguments will overflow the stack. | |
| 2126 __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2)); | |
| 2127 __ CmpP(r7, r0); | |
| 2128 __ ble(stack_overflow); // Signed comparison. | |
| 2129 } | |
| 2130 | |
| 2131 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | |
| 2132 __ SmiTag(r2); | |
| 2133 __ LoadSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | |
| 2134 // Stack updated as such: | |
| 2135 // old SP ---> | |
| 2136 // R14 Return Addr | |
| 2137 // Old FP <--- New FP | |
| 2138 // Argument Adapter SMI | |
| 2139 // Function | |
| 2140 // ArgC as SMI <--- New SP | |
| 2141 __ lay(sp, MemOperand(sp, -5 * kPointerSize)); | |
| 2142 | |
| 2143 // Cleanse the top nibble of 31-bit pointers. | |
| 2144 __ CleanseP(r14); | |
| 2145 __ StoreP(r14, MemOperand(sp, 4 * kPointerSize)); | |
| 2146 __ StoreP(fp, MemOperand(sp, 3 * kPointerSize)); | |
| 2147 __ StoreP(r6, MemOperand(sp, 2 * kPointerSize)); | |
| 2148 __ StoreP(r3, MemOperand(sp, 1 * kPointerSize)); | |
| 2149 __ StoreP(r2, MemOperand(sp, 0 * kPointerSize)); | |
| 2150 __ la(fp, MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp + | |
| 2151 kPointerSize)); | |
| 2152 } | |
| 2153 | |
| 2154 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | |
| 2155 // ----------- S t a t e ------------- | |
| 2156 // -- r2 : result being passed through | |
| 2157 // ----------------------------------- | |
| 2158 // Get the number of arguments passed (as a smi), tear down the frame and | |
| 2159 // then tear down the parameters. | |
| 2160 __ LoadP(r3, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + | |
| 2161 kPointerSize))); | |
| 2162 int stack_adjustment = kPointerSize; // adjust for receiver | |
| 2163 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment); | |
| 2164 __ SmiToPtrArrayOffset(r3, r3); | |
| 2165 __ lay(sp, MemOperand(sp, r3)); | |
| 2166 } | |
| 2167 | |
| 2168 // static | |
| 2169 void Builtins::Generate_Apply(MacroAssembler* masm) { | |
| 2170 // ----------- S t a t e ------------- | |
| 2171 // -- r2 : argumentsList | |
| 2172 // -- r3 : target | |
| 2173 // -- r5 : new.target (checked to be constructor or undefined) | |
| 2174 // -- sp[0] : thisArgument | |
| 2175 // ----------------------------------- | |
| 2176 | |
| 2177 // Create the list of arguments from the array-like argumentsList. | |
| 2178 { | |
| 2179 Label create_arguments, create_array, create_runtime, done_create; | |
| 2180 __ JumpIfSmi(r2, &create_runtime); | |
| 2181 | |
| 2182 // Load the map of argumentsList into r4. | |
| 2183 __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset)); | |
| 2184 | |
| 2185 // Load native context into r6. | |
| 2186 __ LoadP(r6, NativeContextMemOperand()); | |
| 2187 | |
| 2188 // Check if argumentsList is an (unmodified) arguments object. | |
| 2189 __ LoadP(ip, ContextMemOperand(r6, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); | |
| 2190 __ CmpP(ip, r4); | |
| 2191 __ beq(&create_arguments); | |
| 2192 __ LoadP(ip, ContextMemOperand(r6, Context::STRICT_ARGUMENTS_MAP_INDEX)); | |
| 2193 __ CmpP(ip, r4); | |
| 2194 __ beq(&create_arguments); | |
| 2195 | |
| 2196 // Check if argumentsList is a fast JSArray. | |
| 2197 __ CompareInstanceType(r4, ip, JS_ARRAY_TYPE); | |
| 2198 __ beq(&create_array); | |
| 2199 | |
| 2200 // Ask the runtime to create the list (actually a FixedArray). | |
| 2201 __ bind(&create_runtime); | |
| 2202 { | |
| 2203 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
| 2204 __ Push(r3, r5, r2); | |
| 2205 __ CallRuntime(Runtime::kCreateListFromArrayLike); | |
| 2206 __ Pop(r3, r5); | |
| 2207 __ LoadP(r4, FieldMemOperand(r2, FixedArray::kLengthOffset)); | |
| 2208 __ SmiUntag(r4); | |
| 2209 } | |
| 2210 __ b(&done_create); | |
| 2211 | |
| 2212 // Try to create the list from an arguments object. | |
| 2213 __ bind(&create_arguments); | |
| 2214 __ LoadP(r4, FieldMemOperand(r2, JSArgumentsObject::kLengthOffset)); | |
| 2215 __ LoadP(r6, FieldMemOperand(r2, JSObject::kElementsOffset)); | |
| 2216 __ LoadP(ip, FieldMemOperand(r6, FixedArray::kLengthOffset)); | |
| 2217 __ CmpP(r4, ip); | |
| 2218 __ bne(&create_runtime); | |
| 2219 __ SmiUntag(r4); | |
| 2220 __ LoadRR(r2, r6); | |
| 2221 __ b(&done_create); | |
| 2222 | |
| 2223 // Try to create the list from a JSArray object. | |
| 2224 __ bind(&create_array); | |
| 2225 __ LoadlB(r4, FieldMemOperand(r4, Map::kBitField2Offset)); | |
| 2226 __ DecodeField<Map::ElementsKindBits>(r4); | |
| 2227 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
| 2228 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
| 2229 STATIC_ASSERT(FAST_ELEMENTS == 2); | |
| 2230 __ CmpP(r4, Operand(FAST_ELEMENTS)); | |
| 2231 __ bgt(&create_runtime); | |
| 2232 __ CmpP(r4, Operand(FAST_HOLEY_SMI_ELEMENTS)); | |
| 2233 __ beq(&create_runtime); | |
| 2234 __ LoadP(r4, FieldMemOperand(r2, JSArray::kLengthOffset)); | |
| 2235 __ LoadP(r2, FieldMemOperand(r2, JSArray::kElementsOffset)); | |
| 2236 __ SmiUntag(r4); | |
| 2237 | |
| 2238 __ bind(&done_create); | |
| 2239 } | |
| 2240 | |
| 2241 // Check for stack overflow. | |
| 2242 { | |
| 2243 // Check the stack for overflow. We are not trying to catch interruptions | |
| 2244 // (i.e. debug break and preemption) here, so check the "real stack limit". | |
| 2245 Label done; | |
| 2246 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex); | |
| 2247 // Make ip the space we have left. The stack might already be overflowed | |
| 2248 // here which will cause ip to become negative. | |
| 2249 __ SubP(ip, sp, ip); | |
| 2250 // Check if the arguments will overflow the stack. | |
| 2251 __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2)); | |
| 2252 __ CmpP(ip, r0); // Signed comparison. | |
| 2253 __ bgt(&done); | |
| 2254 __ TailCallRuntime(Runtime::kThrowStackOverflow); | |
| 2255 __ bind(&done); | |
| 2256 } | |
| 2257 | |
| 2258 // ----------- S t a t e ------------- | |
| 2259 // -- r3 : target | |
| 2260 // -- r2 : args (a FixedArray built from argumentsList) | |
| 2261 // -- r4 : len (number of elements to push from args) | |
| 2262 // -- r5 : new.target (checked to be constructor or undefined) | |
| 2263 // -- sp[0] : thisArgument | |
| 2264 // ----------------------------------- | |
| 2265 | |
| 2266 // Push arguments onto the stack (thisArgument is already on the stack). | |
| 2267 { | |
| 2268 Label loop, no_args; | |
| 2269 __ CmpP(r4, Operand::Zero()); | |
| 2270 __ beq(&no_args); | |
| 2271 __ AddP(r2, r2, | |
| 2272 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize)); | |
| 2273 __ LoadRR(r1, r4); | |
| 2274 __ bind(&loop); | |
| 2275 __ LoadP(r0, MemOperand(r2, kPointerSize)); | |
| 2276 __ la(r2, MemOperand(r2, kPointerSize)); | |
| 2277 __ push(r0); | |
| 2278 __ BranchOnCount(r1, &loop); | |
| 2279 __ bind(&no_args); | |
| 2280 __ LoadRR(r2, r4); | |
| 2281 } | |
| 2282 | |
| 2283 // Dispatch to Call or Construct depending on whether new.target is undefined. | |
| 2284 { | |
| 2285 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex); | |
| 2286 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq); | |
| 2287 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | |
| 2288 } | |
| 2289 } | |
| 2290 | |
| 2291 namespace { | |
| 2292 | |
| 2293 // Drops top JavaScript frame and an arguments adaptor frame below it (if | |
| 2294 // present) preserving all the arguments prepared for current call. | |
| 2295 // Does nothing if debugger is currently active. | |
| 2296 // ES6 14.6.3. PrepareForTailCall | |
| 2297 // | |
| 2298 // Stack structure for the function g() tail calling f(): | |
| 2299 // | |
| 2300 // ------- Caller frame: ------- | |
| 2301 // | ... | |
| 2302 // | g()'s arg M | |
| 2303 // | ... | |
| 2304 // | g()'s arg 1 | |
| 2305 // | g()'s receiver arg | |
| 2306 // | g()'s caller pc | |
| 2307 // ------- g()'s frame: ------- | |
| 2308 // | g()'s caller fp <- fp | |
| 2309 // | g()'s context | |
| 2310 // | function pointer: g | |
| 2311 // | ------------------------- | |
| 2312 // | ... | |
| 2313 // | ... | |
| 2314 // | f()'s arg N | |
| 2315 // | ... | |
| 2316 // | f()'s arg 1 | |
| 2317 // | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!) | |
| 2318 // ---------------------- | |
| 2319 // | |
| 2320 void PrepareForTailCall(MacroAssembler* masm, Register args_reg, | |
| 2321 Register scratch1, Register scratch2, | |
| 2322 Register scratch3) { | |
| 2323 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); | |
| 2324 Comment cmnt(masm, "[ PrepareForTailCall"); | |
| 2325 | |
| 2326 // Prepare for tail call only if ES2015 tail call elimination is active. | |
| 2327 Label done; | |
| 2328 ExternalReference is_tail_call_elimination_enabled = | |
| 2329 ExternalReference::is_tail_call_elimination_enabled_address( | |
| 2330 masm->isolate()); | |
| 2331 __ mov(scratch1, Operand(is_tail_call_elimination_enabled)); | |
| 2332 __ LoadlB(scratch1, MemOperand(scratch1)); | |
| 2333 __ CmpP(scratch1, Operand::Zero()); | |
| 2334 __ beq(&done); | |
| 2335 | |
| 2336 // Drop possible interpreter handler/stub frame. | |
| 2337 { | |
| 2338 Label no_interpreter_frame; | |
| 2339 __ LoadP(scratch3, | |
| 2340 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset)); | |
| 2341 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::STUB), r0); | |
| 2342 __ bne(&no_interpreter_frame); | |
| 2343 __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
| 2344 __ bind(&no_interpreter_frame); | |
| 2345 } | |
| 2346 | |
| 2347 // Check if next frame is an arguments adaptor frame. | |
| 2348 Register caller_args_count_reg = scratch1; | |
| 2349 Label no_arguments_adaptor, formal_parameter_count_loaded; | |
| 2350 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
| 2351 __ LoadP( | |
| 2352 scratch3, | |
| 2353 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset)); | |
| 2354 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); | |
| 2355 __ bne(&no_arguments_adaptor); | |
| 2356 | |
| 2357 // Drop current frame and load arguments count from arguments adaptor frame. | |
| 2358 __ LoadRR(fp, scratch2); | |
| 2359 __ LoadP(caller_args_count_reg, | |
| 2360 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
| 2361 __ SmiUntag(caller_args_count_reg); | |
| 2362 __ b(&formal_parameter_count_loaded); | |
| 2363 | |
| 2364 __ bind(&no_arguments_adaptor); | |
| 2365 // Load caller's formal parameter count | |
| 2366 __ LoadP(scratch1, | |
| 2367 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset)); | |
| 2368 __ LoadP(scratch1, | |
| 2369 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset)); | |
| 2370 __ LoadW(caller_args_count_reg, | |
| 2371 FieldMemOperand(scratch1, | |
| 2372 SharedFunctionInfo::kFormalParameterCountOffset)); | |
| 2373 #if !V8_TARGET_ARCH_S390X | |
| 2374 __ SmiUntag(caller_args_count_reg); | |
| 2375 #endif | |
| 2376 | |
| 2377 __ bind(&formal_parameter_count_loaded); | |
| 2378 | |
| 2379 ParameterCount callee_args_count(args_reg); | |
| 2380 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, | |
| 2381 scratch3); | |
| 2382 __ bind(&done); | |
| 2383 } | |
| 2384 } // namespace | |
| 2385 | |
| 2386 // static | |
| 2387 void Builtins::Generate_CallFunction(MacroAssembler* masm, | |
| 2388 ConvertReceiverMode mode, | |
| 2389 TailCallMode tail_call_mode) { | |
| 2390 // ----------- S t a t e ------------- | |
| 2391 // -- r2 : the number of arguments (not including the receiver) | |
| 2392 // -- r3 : the function to call (checked to be a JSFunction) | |
| 2393 // ----------------------------------- | |
| 2394 __ AssertFunction(r3); | |
| 2395 | |
| 2396 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) | |
| 2397 // Check that the function is not a "classConstructor". | |
| 2398 Label class_constructor; | |
| 2399 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); | |
| 2400 __ LoadlW(r5, FieldMemOperand(r4, SharedFunctionInfo::kCompilerHintsOffset)); | |
| 2401 __ TestBitMask(r5, SharedFunctionInfo::kClassConstructorBits, r0); | |
| 2402 __ bne(&class_constructor); | |
| 2403 | |
| 2404 // Enter the context of the function; ToObject has to run in the function | |
| 2405 // context, and we also need to take the global proxy from the function | |
| 2406 // context in case of conversion. | |
| 2407 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); | |
| 2408 // We need to convert the receiver for non-native sloppy mode functions. | |
| 2409 Label done_convert; | |
| 2410 __ AndP(r0, r5, Operand((1 << SharedFunctionInfo::kStrictModeBit) | | |
| 2411 (1 << SharedFunctionInfo::kNativeBit))); | |
| 2412 __ bne(&done_convert); | |
| 2413 { | |
| 2414 // ----------- S t a t e ------------- | |
| 2415 // -- r2 : the number of arguments (not including the receiver) | |
| 2416 // -- r3 : the function to call (checked to be a JSFunction) | |
| 2417 // -- r4 : the shared function info. | |
| 2418 // -- cp : the function context. | |
| 2419 // ----------------------------------- | |
| 2420 | |
| 2421 if (mode == ConvertReceiverMode::kNullOrUndefined) { | |
| 2422 // Patch receiver to global proxy. | |
| 2423 __ LoadGlobalProxy(r5); | |
| 2424 } else { | |
| 2425 Label convert_to_object, convert_receiver; | |
| 2426 __ ShiftLeftP(r5, r2, Operand(kPointerSizeLog2)); | |
| 2427 __ LoadP(r5, MemOperand(sp, r5)); | |
| 2428 __ JumpIfSmi(r5, &convert_to_object); | |
| 2429 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); | |
| 2430 __ CompareObjectType(r5, r6, r6, FIRST_JS_RECEIVER_TYPE); | |
| 2431 __ bge(&done_convert); | |
| 2432 if (mode != ConvertReceiverMode::kNotNullOrUndefined) { | |
| 2433 Label convert_global_proxy; | |
| 2434 __ JumpIfRoot(r5, Heap::kUndefinedValueRootIndex, | |
| 2435 &convert_global_proxy); | |
| 2436 __ JumpIfNotRoot(r5, Heap::kNullValueRootIndex, &convert_to_object); | |
| 2437 __ bind(&convert_global_proxy); | |
| 2438 { | |
| 2439 // Patch receiver to global proxy. | |
| 2440 __ LoadGlobalProxy(r5); | |
| 2441 } | |
| 2442 __ b(&convert_receiver); | |
| 2443 } | |
| 2444 __ bind(&convert_to_object); | |
| 2445 { | |
| 2446 // Convert receiver using ToObject. | |
| 2447 // TODO(bmeurer): Inline the allocation here to avoid building the frame | |
| 2448 // in the fast case? (fall back to AllocateInNewSpace?) | |
| 2449 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
| 2450 __ SmiTag(r2); | |
| 2451 __ Push(r2, r3); | |
| 2452 __ LoadRR(r2, r5); | |
| 2453 ToObjectStub stub(masm->isolate()); | |
| 2454 __ CallStub(&stub); | |
| 2455 __ LoadRR(r5, r2); | |
| 2456 __ Pop(r2, r3); | |
| 2457 __ SmiUntag(r2); | |
| 2458 } | |
| 2459 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); | |
| 2460 __ bind(&convert_receiver); | |
| 2461 } | |
| 2462 __ ShiftLeftP(r6, r2, Operand(kPointerSizeLog2)); | |
| 2463 __ StoreP(r5, MemOperand(sp, r6)); | |
| 2464 } | |
| 2465 __ bind(&done_convert); | |
| 2466 | |
| 2467 // ----------- S t a t e ------------- | |
| 2468 // -- r2 : the number of arguments (not including the receiver) | |
| 2469 // -- r3 : the function to call (checked to be a JSFunction) | |
| 2470 // -- r4 : the shared function info. | |
| 2471 // -- cp : the function context. | |
| 2472 // ----------------------------------- | |
| 2473 | |
| 2474 if (tail_call_mode == TailCallMode::kAllow) { | |
| 2475 PrepareForTailCall(masm, r2, r5, r6, r7); | |
| 2476 } | |
| 2477 | |
| 2478 __ LoadW( | |
| 2479 r4, FieldMemOperand(r4, SharedFunctionInfo::kFormalParameterCountOffset)); | |
| 2480 #if !V8_TARGET_ARCH_S390X | |
| 2481 __ SmiUntag(r4); | |
| 2482 #endif | |
| 2483 ParameterCount actual(r2); | |
| 2484 ParameterCount expected(r4); | |
| 2485 __ InvokeFunctionCode(r3, no_reg, expected, actual, JUMP_FUNCTION, | |
| 2486 CheckDebugStepCallWrapper()); | |
| 2487 | |
| 2488 // The function is a "classConstructor", need to raise an exception. | |
| 2489 __ bind(&class_constructor); | |
| 2490 { | |
| 2491 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL); | |
| 2492 __ push(r3); | |
| 2493 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); | |
| 2494 } | |
| 2495 } | |
| 2496 | |
| 2497 namespace { | |
| 2498 | |
| 2499 void Generate_PushBoundArguments(MacroAssembler* masm) { | |
| 2500 // ----------- S t a t e ------------- | |
| 2501 // -- r2 : the number of arguments (not including the receiver) | |
| 2502 // -- r3 : target (checked to be a JSBoundFunction) | |
| 2503 // -- r5 : new.target (only in case of [[Construct]]) | |
| 2504 // ----------------------------------- | |
| 2505 | |
| 2506 // Load [[BoundArguments]] into r4 and length of that into r6. | |
| 2507 Label no_bound_arguments; | |
| 2508 __ LoadP(r4, FieldMemOperand(r3, JSBoundFunction::kBoundArgumentsOffset)); | |
| 2509 __ LoadP(r6, FieldMemOperand(r4, FixedArray::kLengthOffset)); | |
| 2510 __ SmiUntag(r6); | |
| 2511 __ LoadAndTestP(r6, r6); | |
| 2512 __ beq(&no_bound_arguments); | |
| 2513 { | |
| 2514 // ----------- S t a t e ------------- | |
| 2515 // -- r2 : the number of arguments (not including the receiver) | |
| 2516 // -- r3 : target (checked to be a JSBoundFunction) | |
| 2517 // -- r4 : the [[BoundArguments]] (implemented as FixedArray) | |
| 2518 // -- r5 : new.target (only in case of [[Construct]]) | |
| 2519 // -- r6 : the number of [[BoundArguments]] | |
| 2520 // ----------------------------------- | |
| 2521 | |
| 2522 // Reserve stack space for the [[BoundArguments]]. | |
| 2523 { | |
| 2524 Label done; | |
| 2525 __ LoadRR(r8, sp); // preserve previous stack pointer | |
| 2526 __ ShiftLeftP(r9, r6, Operand(kPointerSizeLog2)); | |
| 2527 __ SubP(sp, sp, r9); | |
| 2528 // Check the stack for overflow. We are not trying to catch interruptions | |
| 2529 // (i.e. debug break and preemption) here, so check the "real stack | |
| 2530 // limit". | |
| 2531 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex); | |
| 2532 __ bgt(&done); // Signed comparison. | |
| 2533 // Restore the stack pointer. | |
| 2534 __ LoadRR(sp, r8); | |
| 2535 { | |
| 2536 FrameScope scope(masm, StackFrame::MANUAL); | |
| 2537 __ EnterFrame(StackFrame::INTERNAL); | |
| 2538 __ CallRuntime(Runtime::kThrowStackOverflow); | |
| 2539 } | |
| 2540 __ bind(&done); | |
| 2541 } | |
| 2542 | |
| 2543 // Relocate arguments down the stack. | |
| 2544 // -- r2 : the number of arguments (not including the receiver) | |
| 2545 // -- r8 : the previous stack pointer | |
| 2546 // -- r9: the size of the [[BoundArguments]] | |
| 2547 { | |
| 2548 Label skip, loop; | |
| 2549 __ LoadImmP(r7, Operand::Zero()); | |
| 2550 __ CmpP(r2, Operand::Zero()); | |
| 2551 __ beq(&skip); | |
| 2552 __ LoadRR(r1, r2); | |
| 2553 __ bind(&loop); | |
| 2554 __ LoadP(r0, MemOperand(r8, r7)); | |
| 2555 __ StoreP(r0, MemOperand(sp, r7)); | |
| 2556 __ AddP(r7, r7, Operand(kPointerSize)); | |
| 2557 __ BranchOnCount(r1, &loop); | |
| 2558 __ bind(&skip); | |
| 2559 } | |
| 2560 | |
| 2561 // Copy [[BoundArguments]] to the stack (below the arguments). | |
| 2562 { | |
| 2563 Label loop; | |
| 2564 __ AddP(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 2565 __ AddP(r4, r4, r9); | |
| 2566 __ LoadRR(r1, r6); | |
| 2567 __ bind(&loop); | |
| 2568 __ LoadP(r0, MemOperand(r4, -kPointerSize)); | |
| 2569 __ lay(r4, MemOperand(r4, -kPointerSize)); | |
| 2570 __ StoreP(r0, MemOperand(sp, r7)); | |
| 2571 __ AddP(r7, r7, Operand(kPointerSize)); | |
| 2572 __ BranchOnCount(r1, &loop); | |
| 2573 __ AddP(r2, r2, r6); | |
| 2574 } | |
| 2575 } | |
| 2576 __ bind(&no_bound_arguments); | |
| 2577 } | |
| 2578 | |
| 2579 } // namespace | |
| 2580 | |
| 2581 // static | |
| 2582 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, | |
| 2583 TailCallMode tail_call_mode) { | |
| 2584 // ----------- S t a t e ------------- | |
| 2585 // -- r2 : the number of arguments (not including the receiver) | |
| 2586 // -- r3 : the function to call (checked to be a JSBoundFunction) | |
| 2587 // ----------------------------------- | |
| 2588 __ AssertBoundFunction(r3); | |
| 2589 | |
| 2590 if (tail_call_mode == TailCallMode::kAllow) { | |
| 2591 PrepareForTailCall(masm, r2, r5, r6, r7); | |
| 2592 } | |
| 2593 | |
| 2594 // Patch the receiver to [[BoundThis]]. | |
| 2595 __ LoadP(ip, FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset)); | |
| 2596 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2)); | |
| 2597 __ StoreP(ip, MemOperand(sp, r1)); | |
| 2598 | |
| 2599 // Push the [[BoundArguments]] onto the stack. | |
| 2600 Generate_PushBoundArguments(masm); | |
| 2601 | |
| 2602 // Call the [[BoundTargetFunction]] via the Call builtin. | |
| 2603 __ LoadP(r3, | |
| 2604 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset)); | |
| 2605 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, | |
| 2606 masm->isolate()))); | |
| 2607 __ LoadP(ip, MemOperand(ip)); | |
| 2608 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 2609 __ JumpToJSEntry(ip); | |
| 2610 } | |
| 2611 | |
| 2612 // static | |
| 2613 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, | |
| 2614 TailCallMode tail_call_mode) { | |
| 2615 // ----------- S t a t e ------------- | |
| 2616 // -- r2 : the number of arguments (not including the receiver) | |
| 2617 // -- r3 : the target to call (can be any Object). | |
| 2618 // ----------------------------------- | |
| 2619 | |
| 2620 Label non_callable, non_function, non_smi; | |
| 2621 __ JumpIfSmi(r3, &non_callable); | |
| 2622 __ bind(&non_smi); | |
| 2623 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE); | |
| 2624 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), | |
| 2625 RelocInfo::CODE_TARGET, eq); | |
| 2626 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE)); | |
| 2627 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), | |
| 2628 RelocInfo::CODE_TARGET, eq); | |
| 2629 | |
| 2630 // Check if target has a [[Call]] internal method. | |
| 2631 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset)); | |
| 2632 __ TestBit(r6, Map::kIsCallable); | |
| 2633 __ beq(&non_callable); | |
| 2634 | |
| 2635 __ CmpP(r7, Operand(JS_PROXY_TYPE)); | |
| 2636 __ bne(&non_function); | |
| 2637 | |
| 2638 // 0. Prepare for tail call if necessary. | |
| 2639 if (tail_call_mode == TailCallMode::kAllow) { | |
| 2640 PrepareForTailCall(masm, r2, r5, r6, r7); | |
| 2641 } | |
| 2642 | |
| 2643 // 1. Runtime fallback for Proxy [[Call]]. | |
| 2644 __ Push(r3); | |
| 2645 // Increase the arguments size to include the pushed function and the | |
| 2646 // existing receiver on the stack. | |
| 2647 __ AddP(r2, r2, Operand(2)); | |
| 2648 // Tail-call to the runtime. | |
| 2649 __ JumpToExternalReference( | |
| 2650 ExternalReference(Runtime::kJSProxyCall, masm->isolate())); | |
| 2651 | |
| 2652 // 2. Call to something else, which might have a [[Call]] internal method (if | |
| 2653 // not we raise an exception). | |
| 2654 __ bind(&non_function); | |
| 2655 // Overwrite the original receiver the (original) target. | |
| 2656 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2)); | |
| 2657 __ StoreP(r3, MemOperand(sp, r7)); | |
| 2658 // Let the "call_as_function_delegate" take care of the rest. | |
| 2659 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r3); | |
| 2660 __ Jump(masm->isolate()->builtins()->CallFunction( | |
| 2661 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode), | |
| 2662 RelocInfo::CODE_TARGET); | |
| 2663 | |
| 2664 // 3. Call to something that is not callable. | |
| 2665 __ bind(&non_callable); | |
| 2666 { | |
| 2667 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | |
| 2668 __ Push(r3); | |
| 2669 __ CallRuntime(Runtime::kThrowCalledNonCallable); | |
| 2670 } | |
| 2671 } | |
| 2672 | |
| 2673 // static | |
| 2674 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { | |
| 2675 // ----------- S t a t e ------------- | |
| 2676 // -- r2 : the number of arguments (not including the receiver) | |
| 2677 // -- r3 : the constructor to call (checked to be a JSFunction) | |
| 2678 // -- r5 : the new target (checked to be a constructor) | |
| 2679 // ----------------------------------- | |
| 2680 __ AssertFunction(r3); | |
| 2681 | |
| 2682 // Calling convention for function specific ConstructStubs require | |
| 2683 // r4 to contain either an AllocationSite or undefined. | |
| 2684 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); | |
| 2685 | |
| 2686 // Tail call to the function-specific construct stub (still in the caller | |
| 2687 // context at this point). | |
| 2688 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); | |
| 2689 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset)); | |
| 2690 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 2691 __ JumpToJSEntry(ip); | |
| 2692 } | |
| 2693 | |
| 2694 // static | |
| 2695 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { | |
| 2696 // ----------- S t a t e ------------- | |
| 2697 // -- r2 : the number of arguments (not including the receiver) | |
| 2698 // -- r3 : the function to call (checked to be a JSBoundFunction) | |
| 2699 // -- r5 : the new target (checked to be a constructor) | |
| 2700 // ----------------------------------- | |
| 2701 __ AssertBoundFunction(r3); | |
| 2702 | |
| 2703 // Push the [[BoundArguments]] onto the stack. | |
| 2704 Generate_PushBoundArguments(masm); | |
| 2705 | |
| 2706 // Patch new.target to [[BoundTargetFunction]] if new.target equals target. | |
| 2707 Label skip; | |
| 2708 __ CmpP(r3, r5); | |
| 2709 __ bne(&skip); | |
| 2710 __ LoadP(r5, | |
| 2711 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset)); | |
| 2712 __ bind(&skip); | |
| 2713 | |
| 2714 // Construct the [[BoundTargetFunction]] via the Construct builtin. | |
| 2715 __ LoadP(r3, | |
| 2716 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset)); | |
| 2717 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); | |
| 2718 __ LoadP(ip, MemOperand(ip)); | |
| 2719 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 2720 __ JumpToJSEntry(ip); | |
| 2721 } | |
| 2722 | |
| 2723 // static | |
| 2724 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { | |
| 2725 // ----------- S t a t e ------------- | |
| 2726 // -- r2 : the number of arguments (not including the receiver) | |
| 2727 // -- r3 : the constructor to call (checked to be a JSProxy) | |
| 2728 // -- r5 : the new target (either the same as the constructor or | |
| 2729 // the JSFunction on which new was invoked initially) | |
| 2730 // ----------------------------------- | |
| 2731 | |
| 2732 // Call into the Runtime for Proxy [[Construct]]. | |
| 2733 __ Push(r3, r5); | |
| 2734 // Include the pushed new_target, constructor and the receiver. | |
| 2735 __ AddP(r2, r2, Operand(3)); | |
| 2736 // Tail-call to the runtime. | |
| 2737 __ JumpToExternalReference( | |
| 2738 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); | |
| 2739 } | |
| 2740 | |
| 2741 // static | |
| 2742 void Builtins::Generate_Construct(MacroAssembler* masm) { | |
| 2743 // ----------- S t a t e ------------- | |
| 2744 // -- r2 : the number of arguments (not including the receiver) | |
| 2745 // -- r3 : the constructor to call (can be any Object) | |
| 2746 // -- r5 : the new target (either the same as the constructor or | |
| 2747 // the JSFunction on which new was invoked initially) | |
| 2748 // ----------------------------------- | |
| 2749 | |
| 2750 // Check if target is a Smi. | |
| 2751 Label non_constructor; | |
| 2752 __ JumpIfSmi(r3, &non_constructor); | |
| 2753 | |
| 2754 // Dispatch based on instance type. | |
| 2755 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE); | |
| 2756 __ Jump(masm->isolate()->builtins()->ConstructFunction(), | |
| 2757 RelocInfo::CODE_TARGET, eq); | |
| 2758 | |
| 2759 // Check if target has a [[Construct]] internal method. | |
| 2760 __ LoadlB(r4, FieldMemOperand(r6, Map::kBitFieldOffset)); | |
| 2761 __ TestBit(r4, Map::kIsConstructor); | |
| 2762 __ beq(&non_constructor); | |
| 2763 | |
| 2764 // Only dispatch to bound functions after checking whether they are | |
| 2765 // constructors. | |
| 2766 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE)); | |
| 2767 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(), | |
| 2768 RelocInfo::CODE_TARGET, eq); | |
| 2769 | |
| 2770 // Only dispatch to proxies after checking whether they are constructors. | |
| 2771 __ CmpP(r7, Operand(JS_PROXY_TYPE)); | |
| 2772 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET, | |
| 2773 eq); | |
| 2774 | |
| 2775 // Called Construct on an exotic Object with a [[Construct]] internal method. | |
| 2776 { | |
| 2777 // Overwrite the original receiver with the (original) target. | |
| 2778 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2)); | |
| 2779 __ StoreP(r3, MemOperand(sp, r7)); | |
| 2780 // Let the "call_as_constructor_delegate" take care of the rest. | |
| 2781 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r3); | |
| 2782 __ Jump(masm->isolate()->builtins()->CallFunction(), | |
| 2783 RelocInfo::CODE_TARGET); | |
| 2784 } | |
| 2785 | |
| 2786 // Called Construct on an Object that doesn't have a [[Construct]] internal | |
| 2787 // method. | |
| 2788 __ bind(&non_constructor); | |
| 2789 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), | |
| 2790 RelocInfo::CODE_TARGET); | |
| 2791 } | |
| 2792 | |
| 2793 // static | |
| 2794 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { | |
| 2795 // ----------- S t a t e ------------- | |
| 2796 // -- r3 : requested object size (untagged) | |
| 2797 // -- lr : return address | |
| 2798 // ----------------------------------- | |
| 2799 __ SmiTag(r3); | |
| 2800 __ Push(r3); | |
| 2801 __ LoadSmiLiteral(cp, Smi::FromInt(0)); | |
| 2802 __ TailCallRuntime(Runtime::kAllocateInNewSpace); | |
| 2803 } | |
| 2804 | |
| 2805 // static | |
| 2806 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) { | |
| 2807 // ----------- S t a t e ------------- | |
| 2808 // -- r3 : requested object size (untagged) | |
| 2809 // -- lr : return address | |
| 2810 // ----------------------------------- | |
| 2811 __ SmiTag(r3); | |
| 2812 __ LoadSmiLiteral(r4, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE))); | |
| 2813 __ Push(r3, r4); | |
| 2814 __ LoadSmiLiteral(cp, Smi::FromInt(0)); | |
| 2815 __ TailCallRuntime(Runtime::kAllocateInTargetSpace); | |
| 2816 } | |
| 2817 | |
| 2818 // static | |
| 2819 void Builtins::Generate_StringToNumber(MacroAssembler* masm) { | |
| 2820 // The StringToNumber stub takes one argument in r2. | |
| 2821 __ AssertString(r2); | |
| 2822 | |
| 2823 // Check if string has a cached array index. | |
| 2824 Label runtime; | |
| 2825 __ LoadlW(r4, FieldMemOperand(r2, String::kHashFieldOffset)); | |
| 2826 __ And(r0, r4, Operand(String::kContainsCachedArrayIndexMask)); | |
| 2827 __ bne(&runtime); | |
| 2828 __ IndexFromHash(r4, r2); | |
| 2829 __ Ret(); | |
| 2830 | |
| 2831 __ bind(&runtime); | |
| 2832 { | |
| 2833 FrameScope frame(masm, StackFrame::INTERNAL); | |
| 2834 // Push argument. | |
| 2835 __ push(r2); | |
| 2836 // We cannot use a tail call here because this builtin can also be called | |
| 2837 // from wasm. | |
| 2838 __ CallRuntime(Runtime::kStringToNumber); | |
| 2839 } | |
| 2840 __ Ret(); | |
| 2841 } | |
| 2842 | |
| 2843 // static | |
| 2844 void Builtins::Generate_ToNumber(MacroAssembler* masm) { | |
| 2845 // The ToNumber stub takes one argument in r2. | |
| 2846 STATIC_ASSERT(kSmiTag == 0); | |
| 2847 __ TestIfSmi(r2); | |
| 2848 __ Ret(eq); | |
| 2849 | |
| 2850 __ CompareObjectType(r2, r3, r3, HEAP_NUMBER_TYPE); | |
| 2851 // r2: receiver | |
| 2852 // r3: receiver instance type | |
| 2853 __ Ret(eq); | |
| 2854 | |
| 2855 __ Jump(masm->isolate()->builtins()->NonNumberToNumber(), | |
| 2856 RelocInfo::CODE_TARGET); | |
| 2857 } | |
| 2858 | |
| 2859 // static | |
| 2860 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) { | |
| 2861 // The NonNumberToNumber stub takes one argument in r2. | |
| 2862 __ AssertNotNumber(r2); | |
| 2863 | |
| 2864 __ CompareObjectType(r2, r3, r3, FIRST_NONSTRING_TYPE); | |
| 2865 // r2: receiver | |
| 2866 // r3: receiver instance type | |
| 2867 __ Jump(masm->isolate()->builtins()->StringToNumber(), RelocInfo::CODE_TARGET, | |
| 2868 lt); | |
| 2869 | |
| 2870 Label not_oddball; | |
| 2871 __ CmpP(r3, Operand(ODDBALL_TYPE)); | |
| 2872 __ bne(¬_oddball); | |
| 2873 __ LoadP(r2, FieldMemOperand(r2, Oddball::kToNumberOffset)); | |
| 2874 __ Ret(); | |
| 2875 __ bind(¬_oddball); | |
| 2876 | |
| 2877 { | |
| 2878 FrameScope frame(masm, StackFrame::INTERNAL); | |
| 2879 // Push argument. | |
| 2880 __ push(r2); | |
| 2881 // We cannot use a tail call here because this builtin can also be called | |
| 2882 // from wasm. | |
| 2883 __ CallRuntime(Runtime::kToNumber); | |
| 2884 } | |
| 2885 __ Ret(); | |
| 2886 } | |
| 2887 | |
| 2888 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | |
| 2889 // ----------- S t a t e ------------- | |
| 2890 // -- r2 : actual number of arguments | |
| 2891 // -- r3 : function (passed through to callee) | |
| 2892 // -- r4 : expected number of arguments | |
| 2893 // -- r5 : new target (passed through to callee) | |
| 2894 // ----------------------------------- | |
| 2895 | |
| 2896 Label invoke, dont_adapt_arguments, stack_overflow; | |
| 2897 | |
| 2898 Label enough, too_few; | |
| 2899 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset)); | |
| 2900 __ CmpP(r2, r4); | |
| 2901 __ blt(&too_few); | |
| 2902 __ CmpP(r4, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); | |
| 2903 __ beq(&dont_adapt_arguments); | |
| 2904 | |
| 2905 { // Enough parameters: actual >= expected | |
| 2906 __ bind(&enough); | |
| 2907 EnterArgumentsAdaptorFrame(masm); | |
| 2908 ArgumentAdaptorStackCheck(masm, &stack_overflow); | |
| 2909 | |
| 2910 // Calculate copy start address into r2 and copy end address into r6. | |
| 2911 // r2: actual number of arguments as a smi | |
| 2912 // r3: function | |
| 2913 // r4: expected number of arguments | |
| 2914 // r5: new target (passed through to callee) | |
| 2915 // ip: code entry to call | |
| 2916 __ SmiToPtrArrayOffset(r2, r2); | |
| 2917 __ AddP(r2, fp); | |
| 2918 // adjust for return address and receiver | |
| 2919 __ AddP(r2, r2, Operand(2 * kPointerSize)); | |
| 2920 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2)); | |
| 2921 __ SubP(r6, r2, r6); | |
| 2922 | |
| 2923 // Copy the arguments (including the receiver) to the new stack frame. | |
| 2924 // r2: copy start address | |
| 2925 // r3: function | |
| 2926 // r4: expected number of arguments | |
| 2927 // r5: new target (passed through to callee) | |
| 2928 // r6: copy end address | |
| 2929 // ip: code entry to call | |
| 2930 | |
| 2931 Label copy; | |
| 2932 __ bind(©); | |
| 2933 __ LoadP(r0, MemOperand(r2, 0)); | |
| 2934 __ push(r0); | |
| 2935 __ CmpP(r2, r6); // Compare before moving to next argument. | |
| 2936 __ lay(r2, MemOperand(r2, -kPointerSize)); | |
| 2937 __ bne(©); | |
| 2938 | |
| 2939 __ b(&invoke); | |
| 2940 } | |
| 2941 | |
| 2942 { // Too few parameters: Actual < expected | |
| 2943 __ bind(&too_few); | |
| 2944 | |
| 2945 EnterArgumentsAdaptorFrame(masm); | |
| 2946 ArgumentAdaptorStackCheck(masm, &stack_overflow); | |
| 2947 | |
| 2948 // Calculate copy start address into r0 and copy end address is fp. | |
| 2949 // r2: actual number of arguments as a smi | |
| 2950 // r3: function | |
| 2951 // r4: expected number of arguments | |
| 2952 // r5: new target (passed through to callee) | |
| 2953 // ip: code entry to call | |
| 2954 __ SmiToPtrArrayOffset(r2, r2); | |
| 2955 __ lay(r2, MemOperand(r2, fp)); | |
| 2956 | |
| 2957 // Copy the arguments (including the receiver) to the new stack frame. | |
| 2958 // r2: copy start address | |
| 2959 // r3: function | |
| 2960 // r4: expected number of arguments | |
| 2961 // r5: new target (passed through to callee) | |
| 2962 // ip: code entry to call | |
| 2963 Label copy; | |
| 2964 __ bind(©); | |
| 2965 // Adjust load for return address and receiver. | |
| 2966 __ LoadP(r0, MemOperand(r2, 2 * kPointerSize)); | |
| 2967 __ push(r0); | |
| 2968 __ CmpP(r2, fp); // Compare before moving to next argument. | |
| 2969 __ lay(r2, MemOperand(r2, -kPointerSize)); | |
| 2970 __ bne(©); | |
| 2971 | |
| 2972 // Fill the remaining expected arguments with undefined. | |
| 2973 // r3: function | |
| 2974 // r4: expected number of argumentus | |
| 2975 // ip: code entry to call | |
| 2976 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | |
| 2977 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2)); | |
| 2978 __ SubP(r6, fp, r6); | |
| 2979 // Adjust for frame. | |
| 2980 __ SubP(r6, r6, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + | |
| 2981 2 * kPointerSize)); | |
| 2982 | |
| 2983 Label fill; | |
| 2984 __ bind(&fill); | |
| 2985 __ push(r0); | |
| 2986 __ CmpP(sp, r6); | |
| 2987 __ bne(&fill); | |
| 2988 } | |
| 2989 | |
| 2990 // Call the entry point. | |
| 2991 __ bind(&invoke); | |
| 2992 __ LoadRR(r2, r4); | |
| 2993 // r2 : expected number of arguments | |
| 2994 // r3 : function (passed through to callee) | |
| 2995 // r5 : new target (passed through to callee) | |
| 2996 __ CallJSEntry(ip); | |
| 2997 | |
| 2998 // Store offset of return address for deoptimizer. | |
| 2999 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); | |
| 3000 | |
| 3001 // Exit frame and return. | |
| 3002 LeaveArgumentsAdaptorFrame(masm); | |
| 3003 __ Ret(); | |
| 3004 | |
| 3005 // ------------------------------------------- | |
| 3006 // Dont adapt arguments. | |
| 3007 // ------------------------------------------- | |
| 3008 __ bind(&dont_adapt_arguments); | |
| 3009 __ JumpToJSEntry(ip); | |
| 3010 | |
| 3011 __ bind(&stack_overflow); | |
| 3012 { | |
| 3013 FrameScope frame(masm, StackFrame::MANUAL); | |
| 3014 __ CallRuntime(Runtime::kThrowStackOverflow); | |
| 3015 __ bkpt(0); | |
| 3016 } | |
| 3017 } | |
| 3018 | |
| 3019 #undef __ | |
| 3020 | |
| 3021 } // namespace internal | |
| 3022 } // namespace v8 | |
| 3023 | |
| 3024 #endif // V8_TARGET_ARCH_S390 | |
| OLD | NEW |