OLD | NEW |
(Empty) | |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // |
| 3 // Copyright IBM Corp. 2012, 2013. All rights reserved. |
| 4 // |
| 5 // Use of this source code is governed by a BSD-style license that can be |
| 6 // found in the LICENSE file. |
| 7 |
| 8 #include "src/v8.h" |
| 9 |
| 10 #if V8_TARGET_ARCH_PPC |
| 11 |
| 12 #include "src/codegen.h" |
| 13 #include "src/debug.h" |
| 14 #include "src/deoptimizer.h" |
| 15 #include "src/full-codegen.h" |
| 16 #include "src/runtime.h" |
| 17 |
| 18 namespace v8 { |
| 19 namespace internal { |
| 20 |
| 21 |
| 22 #define __ ACCESS_MASM(masm) |
| 23 |
| 24 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, |
| 25 BuiltinExtraArguments extra_args) { |
| 26 // ----------- S t a t e ------------- |
| 27 // -- r3 : number of arguments excluding receiver |
| 28 // -- r4 : called function (only guaranteed when |
| 29 // extra_args requires it) |
| 30 // -- cp : context |
| 31 // -- sp[0] : last argument |
| 32 // -- ... |
| 33 // -- sp[4 * (argc - 1)] : first argument (argc == r0) |
| 34 // -- sp[4 * argc] : receiver |
| 35 // ----------------------------------- |
| 36 |
| 37 // Insert extra arguments. |
| 38 int num_extra_args = 0; |
| 39 if (extra_args == NEEDS_CALLED_FUNCTION) { |
| 40 num_extra_args = 1; |
| 41 __ push(r4); |
| 42 } else { |
| 43 DCHECK(extra_args == NO_EXTRA_ARGUMENTS); |
| 44 } |
| 45 |
| 46 // JumpToExternalReference expects r0 to contain the number of arguments |
| 47 // including the receiver and the extra arguments. |
| 48 __ addi(r3, r3, Operand(num_extra_args + 1)); |
| 49 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); |
| 50 } |
| 51 |
| 52 |
| 53 // Load the built-in InternalArray function from the current context. |
| 54 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, |
| 55 Register result) { |
| 56 // Load the native context. |
| 57 |
| 58 __ LoadP(result, |
| 59 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 60 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset)); |
| 61 // Load the InternalArray function from the native context. |
| 62 __ LoadP(result, |
| 63 MemOperand(result, Context::SlotOffset( |
| 64 Context::INTERNAL_ARRAY_FUNCTION_INDEX))); |
| 65 } |
| 66 |
| 67 |
| 68 // Load the built-in Array function from the current context. |
| 69 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { |
| 70 // Load the native context. |
| 71 |
| 72 __ LoadP(result, |
| 73 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 74 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset)); |
| 75 // Load the Array function from the native context. |
| 76 __ LoadP( |
| 77 result, |
| 78 MemOperand(result, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); |
| 79 } |
| 80 |
| 81 |
| 82 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { |
| 83 // ----------- S t a t e ------------- |
| 84 // -- r3 : number of arguments |
| 85 // -- lr : return address |
| 86 // -- sp[...]: constructor arguments |
| 87 // ----------------------------------- |
| 88 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; |
| 89 |
| 90 // Get the InternalArray function. |
| 91 GenerateLoadInternalArrayFunction(masm, r4); |
| 92 |
| 93 if (FLAG_debug_code) { |
| 94 // Initial map for the builtin InternalArray functions should be maps. |
| 95 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); |
| 96 __ TestIfSmi(r5, r0); |
| 97 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0); |
| 98 __ CompareObjectType(r5, r6, r7, MAP_TYPE); |
| 99 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); |
| 100 } |
| 101 |
| 102 // Run the native code for the InternalArray function called as a normal |
| 103 // function. |
| 104 // tail call a stub |
| 105 InternalArrayConstructorStub stub(masm->isolate()); |
| 106 __ TailCallStub(&stub); |
| 107 } |
| 108 |
| 109 |
| 110 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { |
| 111 // ----------- S t a t e ------------- |
| 112 // -- r3 : number of arguments |
| 113 // -- lr : return address |
| 114 // -- sp[...]: constructor arguments |
| 115 // ----------------------------------- |
| 116 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; |
| 117 |
| 118 // Get the Array function. |
| 119 GenerateLoadArrayFunction(masm, r4); |
| 120 |
| 121 if (FLAG_debug_code) { |
| 122 // Initial map for the builtin Array functions should be maps. |
| 123 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); |
| 124 __ TestIfSmi(r5, r0); |
| 125 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); |
| 126 __ CompareObjectType(r5, r6, r7, MAP_TYPE); |
| 127 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); |
| 128 } |
| 129 |
| 130 // Run the native code for the Array function called as a normal function. |
| 131 // tail call a stub |
| 132 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
| 133 ArrayConstructorStub stub(masm->isolate()); |
| 134 __ TailCallStub(&stub); |
| 135 } |
| 136 |
| 137 |
| 138 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { |
| 139 // ----------- S t a t e ------------- |
| 140 // -- r3 : number of arguments |
| 141 // -- r4 : constructor function |
| 142 // -- lr : return address |
| 143 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) |
| 144 // -- sp[argc * 4] : receiver |
| 145 // ----------------------------------- |
| 146 Counters* counters = masm->isolate()->counters(); |
| 147 __ IncrementCounter(counters->string_ctor_calls(), 1, r5, r6); |
| 148 |
| 149 Register function = r4; |
| 150 if (FLAG_debug_code) { |
| 151 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r5); |
| 152 __ cmp(function, r5); |
| 153 __ Assert(eq, kUnexpectedStringFunction); |
| 154 } |
| 155 |
| 156 // Load the first arguments in r3 and get rid of the rest. |
| 157 Label no_arguments; |
| 158 __ cmpi(r3, Operand::Zero()); |
| 159 __ beq(&no_arguments); |
| 160 // First args = sp[(argc - 1) * 4]. |
| 161 __ subi(r3, r3, Operand(1)); |
| 162 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2)); |
| 163 __ add(sp, sp, r3); |
| 164 __ LoadP(r3, MemOperand(sp)); |
| 165 // sp now point to args[0], drop args[0] + receiver. |
| 166 __ Drop(2); |
| 167 |
| 168 Register argument = r5; |
| 169 Label not_cached, argument_is_string; |
| 170 __ LookupNumberStringCache(r3, // Input. |
| 171 argument, // Result. |
| 172 r6, // Scratch. |
| 173 r7, // Scratch. |
| 174 r8, // Scratch. |
| 175 ¬_cached); |
| 176 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r6, r7); |
| 177 __ bind(&argument_is_string); |
| 178 |
| 179 // ----------- S t a t e ------------- |
| 180 // -- r5 : argument converted to string |
| 181 // -- r4 : constructor function |
| 182 // -- lr : return address |
| 183 // ----------------------------------- |
| 184 |
| 185 Label gc_required; |
| 186 __ Allocate(JSValue::kSize, |
| 187 r3, // Result. |
| 188 r6, // Scratch. |
| 189 r7, // Scratch. |
| 190 &gc_required, TAG_OBJECT); |
| 191 |
| 192 // Initialising the String Object. |
| 193 Register map = r6; |
| 194 __ LoadGlobalFunctionInitialMap(function, map, r7); |
| 195 if (FLAG_debug_code) { |
| 196 __ lbz(r7, FieldMemOperand(map, Map::kInstanceSizeOffset)); |
| 197 __ cmpi(r7, Operand(JSValue::kSize >> kPointerSizeLog2)); |
| 198 __ Assert(eq, kUnexpectedStringWrapperInstanceSize); |
| 199 __ lbz(r7, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset)); |
| 200 __ cmpi(r7, Operand::Zero()); |
| 201 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper); |
| 202 } |
| 203 __ StoreP(map, FieldMemOperand(r3, HeapObject::kMapOffset), r0); |
| 204 |
| 205 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); |
| 206 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); |
| 207 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0); |
| 208 |
| 209 __ StoreP(argument, FieldMemOperand(r3, JSValue::kValueOffset), r0); |
| 210 |
| 211 // Ensure the object is fully initialized. |
| 212 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize); |
| 213 |
| 214 __ Ret(); |
| 215 |
| 216 // The argument was not found in the number to string cache. Check |
| 217 // if it's a string already before calling the conversion builtin. |
| 218 Label convert_argument; |
| 219 __ bind(¬_cached); |
| 220 __ JumpIfSmi(r3, &convert_argument); |
| 221 |
| 222 // Is it a String? |
| 223 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 224 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceTypeOffset)); |
| 225 STATIC_ASSERT(kNotStringTag != 0); |
| 226 __ andi(r0, r6, Operand(kIsNotStringMask)); |
| 227 __ bne(&convert_argument, cr0); |
| 228 __ mr(argument, r3); |
| 229 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7); |
| 230 __ b(&argument_is_string); |
| 231 |
| 232 // Invoke the conversion builtin and put the result into r5. |
| 233 __ bind(&convert_argument); |
| 234 __ push(function); // Preserve the function. |
| 235 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7); |
| 236 { |
| 237 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 238 __ push(r3); |
| 239 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); |
| 240 } |
| 241 __ pop(function); |
| 242 __ mr(argument, r3); |
| 243 __ b(&argument_is_string); |
| 244 |
| 245 // Load the empty string into r5, remove the receiver from the |
| 246 // stack, and jump back to the case where the argument is a string. |
| 247 __ bind(&no_arguments); |
| 248 __ LoadRoot(argument, Heap::kempty_stringRootIndex); |
| 249 __ Drop(1); |
| 250 __ b(&argument_is_string); |
| 251 |
| 252 // At this point the argument is already a string. Call runtime to |
| 253 // create a string wrapper. |
| 254 __ bind(&gc_required); |
| 255 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7); |
| 256 { |
| 257 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 258 __ push(argument); |
| 259 __ CallRuntime(Runtime::kNewStringWrapper, 1); |
| 260 } |
| 261 __ Ret(); |
| 262 } |
| 263 |
| 264 |
| 265 static void CallRuntimePassFunction(MacroAssembler* masm, |
| 266 Runtime::FunctionId function_id) { |
| 267 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 268 // Push a copy of the function onto the stack. |
| 269 // Push function as parameter to the runtime call. |
| 270 __ Push(r4, r4); |
| 271 |
| 272 __ CallRuntime(function_id, 1); |
| 273 // Restore reciever. |
| 274 __ Pop(r4); |
| 275 } |
| 276 |
| 277 |
| 278 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
| 279 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
| 280 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kCodeOffset)); |
| 281 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 282 __ Jump(r5); |
| 283 } |
| 284 |
| 285 |
| 286 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) { |
| 287 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 288 __ Jump(r3); |
| 289 } |
| 290 |
| 291 |
| 292 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
| 293 // Checking whether the queued function is ready for install is optional, |
| 294 // since we come across interrupts and stack checks elsewhere. However, |
| 295 // not checking may delay installing ready functions, and always checking |
| 296 // would be quite expensive. A good compromise is to first check against |
| 297 // stack limit as a cue for an interrupt signal. |
| 298 Label ok; |
| 299 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| 300 __ cmpl(sp, ip); |
| 301 __ bge(&ok); |
| 302 |
| 303 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); |
| 304 GenerateTailCallToReturnedCode(masm); |
| 305 |
| 306 __ bind(&ok); |
| 307 GenerateTailCallToSharedCode(masm); |
| 308 } |
| 309 |
| 310 |
| 311 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
| 312 bool is_api_function, |
| 313 bool create_memento) { |
| 314 // ----------- S t a t e ------------- |
| 315 // -- r3 : number of arguments |
| 316 // -- r4 : constructor function |
| 317 // -- r5 : allocation site or undefined |
| 318 // -- lr : return address |
| 319 // -- sp[...]: constructor arguments |
| 320 // ----------------------------------- |
| 321 |
| 322 // Should never create mementos for api functions. |
| 323 DCHECK(!is_api_function || !create_memento); |
| 324 |
| 325 Isolate* isolate = masm->isolate(); |
| 326 |
| 327 // Enter a construct frame. |
| 328 { |
| 329 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); |
| 330 |
| 331 if (create_memento) { |
| 332 __ AssertUndefinedOrAllocationSite(r5, r6); |
| 333 __ push(r5); |
| 334 } |
| 335 |
| 336 // Preserve the two incoming parameters on the stack. |
| 337 __ SmiTag(r3); |
| 338 __ push(r3); // Smi-tagged arguments count. |
| 339 __ push(r4); // Constructor function. |
| 340 |
| 341 // Try to allocate the object without transitioning into C code. If any of |
| 342 // the preconditions is not met, the code bails out to the runtime call. |
| 343 Label rt_call, allocated; |
| 344 if (FLAG_inline_new) { |
| 345 Label undo_allocation; |
| 346 ExternalReference debug_step_in_fp = |
| 347 ExternalReference::debug_step_in_fp_address(isolate); |
| 348 __ mov(r5, Operand(debug_step_in_fp)); |
| 349 __ LoadP(r5, MemOperand(r5)); |
| 350 __ cmpi(r5, Operand::Zero()); |
| 351 __ bne(&rt_call); |
| 352 |
| 353 // Load the initial map and verify that it is in fact a map. |
| 354 // r4: constructor function |
| 355 __ LoadP(r5, |
| 356 FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); |
| 357 __ JumpIfSmi(r5, &rt_call); |
| 358 __ CompareObjectType(r5, r6, r7, MAP_TYPE); |
| 359 __ bne(&rt_call); |
| 360 |
| 361 // Check that the constructor is not constructing a JSFunction (see |
| 362 // comments in Runtime_NewObject in runtime.cc). In which case the |
| 363 // initial map's instance type would be JS_FUNCTION_TYPE. |
| 364 // r4: constructor function |
| 365 // r5: initial map |
| 366 __ CompareInstanceType(r5, r6, JS_FUNCTION_TYPE); |
| 367 __ beq(&rt_call); |
| 368 |
| 369 if (!is_api_function) { |
| 370 Label allocate; |
| 371 MemOperand bit_field3 = FieldMemOperand(r5, Map::kBitField3Offset); |
| 372 // Check if slack tracking is enabled. |
| 373 __ lwz(r7, bit_field3); |
| 374 __ DecodeField<Map::ConstructionCount>(r11, r7); |
| 375 STATIC_ASSERT(JSFunction::kNoSlackTracking == 0); |
| 376 __ cmpi(r11, Operand::Zero()); // JSFunction::kNoSlackTracking |
| 377 __ beq(&allocate); |
| 378 // Decrease generous allocation count. |
| 379 __ Add(r7, r7, -(1 << Map::ConstructionCount::kShift), r0); |
| 380 __ stw(r7, bit_field3); |
| 381 __ cmpi(r11, Operand(JSFunction::kFinishSlackTracking)); |
| 382 __ bne(&allocate); |
| 383 |
| 384 __ push(r4); |
| 385 |
| 386 __ Push(r5, r4); // r4 = constructor |
| 387 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); |
| 388 |
| 389 __ Pop(r4, r5); |
| 390 |
| 391 __ bind(&allocate); |
| 392 } |
| 393 |
| 394 // Now allocate the JSObject on the heap. |
| 395 // r4: constructor function |
| 396 // r5: initial map |
| 397 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceSizeOffset)); |
| 398 if (create_memento) { |
| 399 __ addi(r6, r6, Operand(AllocationMemento::kSize / kPointerSize)); |
| 400 } |
| 401 |
| 402 __ Allocate(r6, r7, r8, r9, &rt_call, SIZE_IN_WORDS); |
| 403 |
| 404 // Allocated the JSObject, now initialize the fields. Map is set to |
| 405 // initial map and properties and elements are set to empty fixed array. |
| 406 // r4: constructor function |
| 407 // r5: initial map |
| 408 // r6: object size (not including memento if create_memento) |
| 409 // r7: JSObject (not tagged) |
| 410 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex); |
| 411 __ mr(r8, r7); |
| 412 __ StoreP(r5, MemOperand(r8, JSObject::kMapOffset)); |
| 413 __ StoreP(r9, MemOperand(r8, JSObject::kPropertiesOffset)); |
| 414 __ StoreP(r9, MemOperand(r8, JSObject::kElementsOffset)); |
| 415 __ addi(r8, r8, Operand(JSObject::kElementsOffset + kPointerSize)); |
| 416 |
| 417 __ ShiftLeftImm(r9, r6, Operand(kPointerSizeLog2)); |
| 418 __ add(r9, r7, r9); // End of object. |
| 419 |
| 420 // Fill all the in-object properties with the appropriate filler. |
| 421 // r4: constructor function |
| 422 // r5: initial map |
| 423 // r6: object size (in words, including memento if create_memento) |
| 424 // r7: JSObject (not tagged) |
| 425 // r8: First in-object property of JSObject (not tagged) |
| 426 // r9: End of object |
| 427 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize); |
| 428 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex); |
| 429 |
| 430 if (!is_api_function) { |
| 431 Label no_inobject_slack_tracking; |
| 432 |
| 433 // Check if slack tracking is enabled. |
| 434 STATIC_ASSERT(JSFunction::kNoSlackTracking == 0); |
| 435 __ cmpi(r11, Operand::Zero()); // JSFunction::kNoSlackTracking |
| 436 __ beq(&no_inobject_slack_tracking); |
| 437 |
| 438 // Allocate object with a slack. |
| 439 __ lbz(r3, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset)); |
| 440 if (FLAG_debug_code) { |
| 441 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2)); |
| 442 __ add(r0, r8, r0); |
| 443 // r0: offset of first field after pre-allocated fields |
| 444 __ cmp(r0, r9); |
| 445 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields); |
| 446 } |
| 447 { |
| 448 Label done; |
| 449 __ cmpi(r3, Operand::Zero()); |
| 450 __ beq(&done); |
| 451 __ InitializeNFieldsWithFiller(r8, r3, r10); |
| 452 __ bind(&done); |
| 453 } |
| 454 // To allow for truncation. |
| 455 __ LoadRoot(r10, Heap::kOnePointerFillerMapRootIndex); |
| 456 // Fill the remaining fields with one pointer filler map. |
| 457 |
| 458 __ bind(&no_inobject_slack_tracking); |
| 459 } |
| 460 |
| 461 if (create_memento) { |
| 462 __ subi(r3, r9, Operand(AllocationMemento::kSize)); |
| 463 __ InitializeFieldsWithFiller(r8, r3, r10); |
| 464 |
| 465 // Fill in memento fields. |
| 466 // r8: points to the allocated but uninitialized memento. |
| 467 __ LoadRoot(r10, Heap::kAllocationMementoMapRootIndex); |
| 468 __ StoreP(r10, MemOperand(r8, AllocationMemento::kMapOffset)); |
| 469 // Load the AllocationSite |
| 470 __ LoadP(r10, MemOperand(sp, 2 * kPointerSize)); |
| 471 __ StoreP(r10, |
| 472 MemOperand(r8, AllocationMemento::kAllocationSiteOffset)); |
| 473 __ addi(r8, r8, Operand(AllocationMemento::kAllocationSiteOffset + |
| 474 kPointerSize)); |
| 475 } else { |
| 476 __ InitializeFieldsWithFiller(r8, r9, r10); |
| 477 } |
| 478 |
| 479 // Add the object tag to make the JSObject real, so that we can continue |
| 480 // and jump into the continuation code at any time from now on. Any |
| 481 // failures need to undo the allocation, so that the heap is in a |
| 482 // consistent state and verifiable. |
| 483 __ addi(r7, r7, Operand(kHeapObjectTag)); |
| 484 |
| 485 // Check if a non-empty properties array is needed. Continue with |
| 486 // allocated object if not fall through to runtime call if it is. |
| 487 // r4: constructor function |
| 488 // r7: JSObject |
| 489 // r8: start of next object (not tagged) |
| 490 __ lbz(r6, FieldMemOperand(r5, Map::kUnusedPropertyFieldsOffset)); |
| 491 // The field instance sizes contains both pre-allocated property fields |
| 492 // and in-object properties. |
| 493 __ lbz(r0, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset)); |
| 494 __ add(r6, r6, r0); |
| 495 __ lbz(r0, FieldMemOperand(r5, Map::kInObjectPropertiesOffset)); |
| 496 __ sub(r6, r6, r0, LeaveOE, SetRC); |
| 497 |
| 498 // Done if no extra properties are to be allocated. |
| 499 __ beq(&allocated, cr0); |
| 500 __ Assert(ge, kPropertyAllocationCountFailed, cr0); |
| 501 |
| 502 // Scale the number of elements by pointer size and add the header for |
| 503 // FixedArrays to the start of the next object calculation from above. |
| 504 // r4: constructor |
| 505 // r6: number of elements in properties array |
| 506 // r7: JSObject |
| 507 // r8: start of next object |
| 508 __ addi(r3, r6, Operand(FixedArray::kHeaderSize / kPointerSize)); |
| 509 __ Allocate( |
| 510 r3, r8, r9, r5, &undo_allocation, |
| 511 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS)); |
| 512 |
| 513 // Initialize the FixedArray. |
| 514 // r4: constructor |
| 515 // r6: number of elements in properties array |
| 516 // r7: JSObject |
| 517 // r8: FixedArray (not tagged) |
| 518 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex); |
| 519 __ mr(r5, r8); |
| 520 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset); |
| 521 __ StoreP(r9, MemOperand(r5)); |
| 522 DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset); |
| 523 __ SmiTag(r3, r6); |
| 524 __ StorePU(r3, MemOperand(r5, kPointerSize)); |
| 525 __ addi(r5, r5, Operand(kPointerSize)); |
| 526 |
| 527 // Initialize the fields to undefined. |
| 528 // r4: constructor function |
| 529 // r5: First element of FixedArray (not tagged) |
| 530 // r6: number of elements in properties array |
| 531 // r7: JSObject |
| 532 // r8: FixedArray (not tagged) |
| 533 DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize); |
| 534 { |
| 535 Label done; |
| 536 __ cmpi(r6, Operand::Zero()); |
| 537 __ beq(&done); |
| 538 if (!is_api_function || create_memento) { |
| 539 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex); |
| 540 } else if (FLAG_debug_code) { |
| 541 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex); |
| 542 __ cmp(r10, r11); |
| 543 __ Assert(eq, kUndefinedValueNotLoaded); |
| 544 } |
| 545 __ InitializeNFieldsWithFiller(r5, r6, r10); |
| 546 __ bind(&done); |
| 547 } |
| 548 |
| 549 // Store the initialized FixedArray into the properties field of |
| 550 // the JSObject |
| 551 // r4: constructor function |
| 552 // r7: JSObject |
| 553 // r8: FixedArray (not tagged) |
| 554 __ addi(r8, r8, Operand(kHeapObjectTag)); // Add the heap tag. |
| 555 __ StoreP(r8, FieldMemOperand(r7, JSObject::kPropertiesOffset), r0); |
| 556 |
| 557 // Continue with JSObject being successfully allocated |
| 558 // r4: constructor function |
| 559 // r7: JSObject |
| 560 __ b(&allocated); |
| 561 |
| 562 // Undo the setting of the new top so that the heap is verifiable. For |
| 563 // example, the map's unused properties potentially do not match the |
| 564 // allocated objects unused properties. |
| 565 // r7: JSObject (previous new top) |
| 566 __ bind(&undo_allocation); |
| 567 __ UndoAllocationInNewSpace(r7, r8); |
| 568 } |
| 569 |
| 570 // Allocate the new receiver object using the runtime call. |
| 571 // r4: constructor function |
| 572 __ bind(&rt_call); |
| 573 if (create_memento) { |
| 574 // Get the cell or allocation site. |
| 575 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize)); |
| 576 __ push(r5); |
| 577 } |
| 578 |
| 579 __ push(r4); // argument for Runtime_NewObject |
| 580 if (create_memento) { |
| 581 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); |
| 582 } else { |
| 583 __ CallRuntime(Runtime::kNewObject, 1); |
| 584 } |
| 585 __ mr(r7, r3); |
| 586 |
| 587 // If we ended up using the runtime, and we want a memento, then the |
| 588 // runtime call made it for us, and we shouldn't do create count |
| 589 // increment. |
| 590 Label count_incremented; |
| 591 if (create_memento) { |
| 592 __ b(&count_incremented); |
| 593 } |
| 594 |
| 595 // Receiver for constructor call allocated. |
| 596 // r7: JSObject |
| 597 __ bind(&allocated); |
| 598 |
| 599 if (create_memento) { |
| 600 __ LoadP(r5, MemOperand(sp, kPointerSize * 2)); |
| 601 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex); |
| 602 __ cmp(r5, r8); |
| 603 __ beq(&count_incremented); |
| 604 // r5 is an AllocationSite. We are creating a memento from it, so we |
| 605 // need to increment the memento create count. |
| 606 __ LoadP( |
| 607 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset)); |
| 608 __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0); |
| 609 __ StoreP( |
| 610 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset), |
| 611 r0); |
| 612 __ bind(&count_incremented); |
| 613 } |
| 614 |
| 615 __ Push(r7, r7); |
| 616 |
| 617 // Reload the number of arguments and the constructor from the stack. |
| 618 // sp[0]: receiver |
| 619 // sp[1]: receiver |
| 620 // sp[2]: constructor function |
| 621 // sp[3]: number of arguments (smi-tagged) |
| 622 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize)); |
| 623 __ LoadP(r6, MemOperand(sp, 3 * kPointerSize)); |
| 624 |
| 625 // Set up pointer to last argument. |
| 626 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset)); |
| 627 |
| 628 // Set up number of arguments for function call below |
| 629 __ SmiUntag(r3, r6); |
| 630 |
| 631 // Copy arguments and receiver to the expression stack. |
| 632 // r3: number of arguments |
| 633 // r4: constructor function |
| 634 // r5: address of last argument (caller sp) |
| 635 // r6: number of arguments (smi-tagged) |
| 636 // sp[0]: receiver |
| 637 // sp[1]: receiver |
| 638 // sp[2]: constructor function |
| 639 // sp[3]: number of arguments (smi-tagged) |
| 640 Label loop, no_args; |
| 641 __ cmpi(r3, Operand::Zero()); |
| 642 __ beq(&no_args); |
| 643 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); |
| 644 __ mtctr(r3); |
| 645 __ bind(&loop); |
| 646 __ subi(ip, ip, Operand(kPointerSize)); |
| 647 __ LoadPX(r0, MemOperand(r5, ip)); |
| 648 __ push(r0); |
| 649 __ bdnz(&loop); |
| 650 __ bind(&no_args); |
| 651 |
| 652 // Call the function. |
| 653 // r3: number of arguments |
| 654 // r4: constructor function |
| 655 if (is_api_function) { |
| 656 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); |
| 657 Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct(); |
| 658 __ Call(code, RelocInfo::CODE_TARGET); |
| 659 } else { |
| 660 ParameterCount actual(r3); |
| 661 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper()); |
| 662 } |
| 663 |
| 664 // Store offset of return address for deoptimizer. |
| 665 if (!is_api_function) { |
| 666 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); |
| 667 } |
| 668 |
| 669 // Restore context from the frame. |
| 670 // r3: result |
| 671 // sp[0]: receiver |
| 672 // sp[1]: constructor function |
| 673 // sp[2]: number of arguments (smi-tagged) |
| 674 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 675 |
| 676 // If the result is an object (in the ECMA sense), we should get rid |
| 677 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 |
| 678 // on page 74. |
| 679 Label use_receiver, exit; |
| 680 |
| 681 // If the result is a smi, it is *not* an object in the ECMA sense. |
| 682 // r3: result |
| 683 // sp[0]: receiver (newly allocated object) |
| 684 // sp[1]: constructor function |
| 685 // sp[2]: number of arguments (smi-tagged) |
| 686 __ JumpIfSmi(r3, &use_receiver); |
| 687 |
| 688 // If the type of the result (stored in its map) is less than |
| 689 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. |
| 690 __ CompareObjectType(r3, r4, r6, FIRST_SPEC_OBJECT_TYPE); |
| 691 __ bge(&exit); |
| 692 |
| 693 // Throw away the result of the constructor invocation and use the |
| 694 // on-stack receiver as the result. |
| 695 __ bind(&use_receiver); |
| 696 __ LoadP(r3, MemOperand(sp)); |
| 697 |
| 698 // Remove receiver from the stack, remove caller arguments, and |
| 699 // return. |
| 700 __ bind(&exit); |
| 701 // r3: result |
| 702 // sp[0]: receiver (newly allocated object) |
| 703 // sp[1]: constructor function |
| 704 // sp[2]: number of arguments (smi-tagged) |
| 705 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize)); |
| 706 |
| 707 // Leave construct frame. |
| 708 } |
| 709 |
| 710 __ SmiToPtrArrayOffset(r4, r4); |
| 711 __ add(sp, sp, r4); |
| 712 __ addi(sp, sp, Operand(kPointerSize)); |
| 713 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5); |
| 714 __ blr(); |
| 715 } |
| 716 |
| 717 |
| 718 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { |
| 719 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new); |
| 720 } |
| 721 |
| 722 |
| 723 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { |
| 724 Generate_JSConstructStubHelper(masm, true, false); |
| 725 } |
| 726 |
| 727 |
| 728 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
| 729 bool is_construct) { |
| 730 // Called from Generate_JS_Entry |
| 731 // r3: code entry |
| 732 // r4: function |
| 733 // r5: receiver |
| 734 // r6: argc |
| 735 // r7: argv |
| 736 // r0,r8-r9, cp may be clobbered |
| 737 ProfileEntryHookStub::MaybeCallEntryHook(masm); |
| 738 |
| 739 // Clear the context before we push it when entering the internal frame. |
| 740 __ li(cp, Operand::Zero()); |
| 741 |
| 742 // Enter an internal frame. |
| 743 { |
| 744 FrameScope scope(masm, StackFrame::INTERNAL); |
| 745 |
| 746 // Set up the context from the function argument. |
| 747 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); |
| 748 |
| 749 __ InitializeRootRegister(); |
| 750 |
| 751 // Push the function and the receiver onto the stack. |
| 752 __ push(r4); |
| 753 __ push(r5); |
| 754 |
| 755 // Copy arguments to the stack in a loop. |
| 756 // r4: function |
| 757 // r6: argc |
| 758 // r7: argv, i.e. points to first arg |
| 759 Label loop, entry; |
| 760 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2)); |
| 761 __ add(r5, r7, r0); |
| 762 // r5 points past last arg. |
| 763 __ b(&entry); |
| 764 __ bind(&loop); |
| 765 __ LoadP(r8, MemOperand(r7)); // read next parameter |
| 766 __ addi(r7, r7, Operand(kPointerSize)); |
| 767 __ LoadP(r0, MemOperand(r8)); // dereference handle |
| 768 __ push(r0); // push parameter |
| 769 __ bind(&entry); |
| 770 __ cmp(r7, r5); |
| 771 __ bne(&loop); |
| 772 |
| 773 // Initialize all JavaScript callee-saved registers, since they will be seen |
| 774 // by the garbage collector as part of handlers. |
| 775 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); |
| 776 __ mr(r14, r7); |
| 777 __ mr(r15, r7); |
| 778 __ mr(r16, r7); |
| 779 __ mr(r17, r7); |
| 780 |
| 781 // Invoke the code and pass argc as r3. |
| 782 __ mr(r3, r6); |
| 783 if (is_construct) { |
| 784 // No type feedback cell is available |
| 785 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
| 786 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); |
| 787 __ CallStub(&stub); |
| 788 } else { |
| 789 ParameterCount actual(r3); |
| 790 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper()); |
| 791 } |
| 792 // Exit the JS frame and remove the parameters (except function), and |
| 793 // return. |
| 794 } |
| 795 __ blr(); |
| 796 |
| 797 // r3: result |
| 798 } |
| 799 |
| 800 |
| 801 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { |
| 802 Generate_JSEntryTrampolineHelper(masm, false); |
| 803 } |
| 804 |
| 805 |
| 806 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
| 807 Generate_JSEntryTrampolineHelper(masm, true); |
| 808 } |
| 809 |
| 810 |
| 811 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
| 812 CallRuntimePassFunction(masm, Runtime::kCompileLazy); |
| 813 GenerateTailCallToReturnedCode(masm); |
| 814 } |
| 815 |
| 816 |
| 817 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { |
| 818 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 819 // Push a copy of the function onto the stack. |
| 820 // Push function as parameter to the runtime call. |
| 821 __ Push(r4, r4); |
| 822 // Whether to compile in a background thread. |
| 823 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); |
| 824 |
| 825 __ CallRuntime(Runtime::kCompileOptimized, 2); |
| 826 // Restore receiver. |
| 827 __ pop(r4); |
| 828 } |
| 829 |
| 830 |
| 831 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
| 832 CallCompileOptimized(masm, false); |
| 833 GenerateTailCallToReturnedCode(masm); |
| 834 } |
| 835 |
| 836 |
| 837 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { |
| 838 CallCompileOptimized(masm, true); |
| 839 GenerateTailCallToReturnedCode(masm); |
| 840 } |
| 841 |
| 842 |
| 843 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
| 844 // For now, we are relying on the fact that make_code_young doesn't do any |
| 845 // garbage collection which allows us to save/restore the registers without |
| 846 // worrying about which of them contain pointers. We also don't build an |
| 847 // internal frame to make the code faster, since we shouldn't have to do stack |
| 848 // crawls in MakeCodeYoung. This seems a bit fragile. |
| 849 |
| 850 __ mflr(r3); |
| 851 // Adjust r3 to point to the start of the PlatformCodeAge sequence |
| 852 __ subi(r3, r3, Operand(kCodeAgingPatchDelta)); |
| 853 |
| 854 // The following registers must be saved and restored when calling through to |
| 855 // the runtime: |
| 856 // r3 - contains return address (beginning of patch sequence) |
| 857 // r4 - isolate |
| 858 // ip - return address |
| 859 FrameScope scope(masm, StackFrame::MANUAL); |
| 860 __ MultiPush(ip.bit() | r3.bit() | r4.bit() | fp.bit()); |
| 861 __ PrepareCallCFunction(2, 0, r5); |
| 862 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate()))); |
| 863 __ CallCFunction( |
| 864 ExternalReference::get_make_code_young_function(masm->isolate()), 2); |
| 865 __ MultiPop(ip.bit() | r3.bit() | r4.bit() | fp.bit()); |
| 866 __ mtlr(ip); |
| 867 __ Jump(r3); |
| 868 } |
| 869 |
| 870 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ |
| 871 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ |
| 872 MacroAssembler* masm) { \ |
| 873 GenerateMakeCodeYoungAgainCommon(masm); \ |
| 874 } \ |
| 875 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ |
| 876 MacroAssembler* masm) { \ |
| 877 GenerateMakeCodeYoungAgainCommon(masm); \ |
| 878 } |
| 879 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) |
| 880 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR |
| 881 |
| 882 |
| 883 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { |
| 884 // For now, we are relying on the fact that make_code_young doesn't do any |
| 885 // garbage collection which allows us to save/restore the registers without |
| 886 // worrying about which of them contain pointers. We also don't build an |
| 887 // internal frame to make the code faster, since we shouldn't have to do stack |
| 888 // crawls in MakeCodeYoung. This seems a bit fragile. |
| 889 |
| 890 __ mflr(r3); |
| 891 // Adjust r3 to point to the start of the PlatformCodeAge sequence |
| 892 __ subi(r3, r3, Operand(kCodeAgingPatchDelta)); |
| 893 |
| 894 // The following registers must be saved and restored when calling through to |
| 895 // the runtime: |
| 896 // r3 - contains return address (beginning of patch sequence) |
| 897 // r4 - isolate |
| 898 // ip - return address |
| 899 FrameScope scope(masm, StackFrame::MANUAL); |
| 900 __ MultiPush(ip.bit() | r3.bit() | r4.bit() | fp.bit()); |
| 901 __ PrepareCallCFunction(2, 0, r5); |
| 902 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate()))); |
| 903 __ CallCFunction( |
| 904 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), |
| 905 2); |
| 906 __ MultiPop(ip.bit() | r3.bit() | r4.bit() | fp.bit()); |
| 907 __ mtlr(ip); |
| 908 |
| 909 // Perform prologue operations usually performed by the young code stub. |
| 910 __ PushFixedFrame(r4); |
| 911 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); |
| 912 |
| 913 // Jump to point after the code-age stub. |
| 914 __ addi(r3, r3, Operand(kNoCodeAgeSequenceLength)); |
| 915 __ Jump(r3); |
| 916 } |
| 917 |
| 918 |
| 919 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { |
| 920 GenerateMakeCodeYoungAgainCommon(masm); |
| 921 } |
| 922 |
| 923 |
| 924 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
| 925 SaveFPRegsMode save_doubles) { |
| 926 { |
| 927 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 928 |
| 929 // Preserve registers across notification, this is important for compiled |
| 930 // stubs that tail call the runtime on deopts passing their parameters in |
| 931 // registers. |
| 932 __ MultiPush(kJSCallerSaved | kCalleeSaved); |
| 933 // Pass the function and deoptimization type to the runtime system. |
| 934 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); |
| 935 __ MultiPop(kJSCallerSaved | kCalleeSaved); |
| 936 } |
| 937 |
| 938 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state |
| 939 __ blr(); // Jump to miss handler |
| 940 } |
| 941 |
| 942 |
| 943 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
| 944 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
| 945 } |
| 946 |
| 947 |
| 948 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
| 949 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
| 950 } |
| 951 |
| 952 |
| 953 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
| 954 Deoptimizer::BailoutType type) { |
| 955 { |
| 956 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 957 // Pass the function and deoptimization type to the runtime system. |
| 958 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type))); |
| 959 __ push(r3); |
| 960 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); |
| 961 } |
| 962 |
| 963 // Get the full codegen state from the stack and untag it -> r9. |
| 964 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize)); |
| 965 __ SmiUntag(r9); |
| 966 // Switch on the state. |
| 967 Label with_tos_register, unknown_state; |
| 968 __ cmpi(r9, Operand(FullCodeGenerator::NO_REGISTERS)); |
| 969 __ bne(&with_tos_register); |
| 970 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state. |
| 971 __ Ret(); |
| 972 |
| 973 __ bind(&with_tos_register); |
| 974 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize)); |
| 975 __ cmpi(r9, Operand(FullCodeGenerator::TOS_REG)); |
| 976 __ bne(&unknown_state); |
| 977 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state. |
| 978 __ Ret(); |
| 979 |
| 980 __ bind(&unknown_state); |
| 981 __ stop("no cases left"); |
| 982 } |
| 983 |
| 984 |
| 985 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { |
| 986 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); |
| 987 } |
| 988 |
| 989 |
| 990 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { |
| 991 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); |
| 992 } |
| 993 |
| 994 |
| 995 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
| 996 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
| 997 } |
| 998 |
| 999 |
| 1000 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
| 1001 // Lookup the function in the JavaScript frame. |
| 1002 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1003 { |
| 1004 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 1005 // Pass function as argument. |
| 1006 __ push(r3); |
| 1007 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); |
| 1008 } |
| 1009 |
| 1010 // If the code object is null, just return to the unoptimized code. |
| 1011 Label skip; |
| 1012 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0); |
| 1013 __ bne(&skip); |
| 1014 __ Ret(); |
| 1015 |
| 1016 __ bind(&skip); |
| 1017 |
| 1018 // Load deoptimization data from the code object. |
| 1019 // <deopt_data> = <code>[#deoptimization_data_offset] |
| 1020 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset)); |
| 1021 |
| 1022 #if V8_OOL_CONSTANT_POOL |
| 1023 { |
| 1024 ConstantPoolUnavailableScope constant_pool_unavailable(masm); |
| 1025 __ LoadP(kConstantPoolRegister, |
| 1026 FieldMemOperand(r3, Code::kConstantPoolOffset)); |
| 1027 #endif |
| 1028 |
| 1029 // Load the OSR entrypoint offset from the deoptimization data. |
| 1030 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] |
| 1031 __ LoadP(r4, FieldMemOperand( |
| 1032 r4, FixedArray::OffsetOfElementAt( |
| 1033 DeoptimizationInputData::kOsrPcOffsetIndex))); |
| 1034 __ SmiUntag(r4); |
| 1035 |
| 1036 // Compute the target address = code_obj + header_size + osr_offset |
| 1037 // <entry_addr> = <code_obj> + #header_size + <osr_offset> |
| 1038 __ add(r3, r3, r4); |
| 1039 __ addi(r0, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 1040 __ mtlr(r0); |
| 1041 |
| 1042 // And "return" to the OSR entry point of the function. |
| 1043 __ Ret(); |
| 1044 #if V8_OOL_CONSTANT_POOL |
| 1045 } |
| 1046 #endif |
| 1047 } |
| 1048 |
| 1049 |
| 1050 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { |
| 1051 // We check the stack limit as indicator that recompilation might be done. |
| 1052 Label ok; |
| 1053 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| 1054 __ cmpl(sp, ip); |
| 1055 __ bge(&ok); |
| 1056 { |
| 1057 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 1058 __ CallRuntime(Runtime::kStackGuard, 0); |
| 1059 } |
| 1060 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), |
| 1061 RelocInfo::CODE_TARGET); |
| 1062 |
| 1063 __ bind(&ok); |
| 1064 __ Ret(); |
| 1065 } |
| 1066 |
| 1067 |
| 1068 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { |
| 1069 // 1. Make sure we have at least one argument. |
| 1070 // r3: actual number of arguments |
| 1071 { |
| 1072 Label done; |
| 1073 __ cmpi(r3, Operand::Zero()); |
| 1074 __ bne(&done); |
| 1075 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
| 1076 __ push(r5); |
| 1077 __ addi(r3, r3, Operand(1)); |
| 1078 __ bind(&done); |
| 1079 } |
| 1080 |
| 1081 // 2. Get the function to call (passed as receiver) from the stack, check |
| 1082 // if it is a function. |
| 1083 // r3: actual number of arguments |
| 1084 Label slow, non_function; |
| 1085 __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2)); |
| 1086 __ add(r4, sp, r4); |
| 1087 __ LoadP(r4, MemOperand(r4)); |
| 1088 __ JumpIfSmi(r4, &non_function); |
| 1089 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE); |
| 1090 __ bne(&slow); |
| 1091 |
| 1092 // 3a. Patch the first argument if necessary when calling a function. |
| 1093 // r3: actual number of arguments |
| 1094 // r4: function |
| 1095 Label shift_arguments; |
| 1096 __ li(r7, Operand::Zero()); // indicate regular JS_FUNCTION |
| 1097 { |
| 1098 Label convert_to_object, use_global_proxy, patch_receiver; |
| 1099 // Change context eagerly in case we need the global receiver. |
| 1100 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); |
| 1101 |
| 1102 // Do not transform the receiver for strict mode functions. |
| 1103 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
| 1104 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset)); |
| 1105 __ TestBit(r6, |
| 1106 #if V8_TARGET_ARCH_PPC64 |
| 1107 SharedFunctionInfo::kStrictModeFunction, |
| 1108 #else |
| 1109 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize, |
| 1110 #endif |
| 1111 r0); |
| 1112 __ bne(&shift_arguments, cr0); |
| 1113 |
| 1114 // Do not transform the receiver for native (Compilerhints already in r6). |
| 1115 __ TestBit(r6, |
| 1116 #if V8_TARGET_ARCH_PPC64 |
| 1117 SharedFunctionInfo::kNative, |
| 1118 #else |
| 1119 SharedFunctionInfo::kNative + kSmiTagSize, |
| 1120 #endif |
| 1121 r0); |
| 1122 __ bne(&shift_arguments, cr0); |
| 1123 |
| 1124 // Compute the receiver in sloppy mode. |
| 1125 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); |
| 1126 __ add(r5, sp, ip); |
| 1127 __ LoadP(r5, MemOperand(r5, -kPointerSize)); |
| 1128 // r3: actual number of arguments |
| 1129 // r4: function |
| 1130 // r5: first argument |
| 1131 __ JumpIfSmi(r5, &convert_to_object); |
| 1132 |
| 1133 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); |
| 1134 __ cmp(r5, r6); |
| 1135 __ beq(&use_global_proxy); |
| 1136 __ LoadRoot(r6, Heap::kNullValueRootIndex); |
| 1137 __ cmp(r5, r6); |
| 1138 __ beq(&use_global_proxy); |
| 1139 |
| 1140 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
| 1141 __ CompareObjectType(r5, r6, r6, FIRST_SPEC_OBJECT_TYPE); |
| 1142 __ bge(&shift_arguments); |
| 1143 |
| 1144 __ bind(&convert_to_object); |
| 1145 |
| 1146 { |
| 1147 // Enter an internal frame in order to preserve argument count. |
| 1148 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 1149 __ SmiTag(r3); |
| 1150 __ Push(r3, r5); |
| 1151 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 1152 __ mr(r5, r3); |
| 1153 |
| 1154 __ pop(r3); |
| 1155 __ SmiUntag(r3); |
| 1156 |
| 1157 // Exit the internal frame. |
| 1158 } |
| 1159 |
| 1160 // Restore the function to r4, and the flag to r7. |
| 1161 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2)); |
| 1162 __ add(r7, sp, r7); |
| 1163 __ LoadP(r4, MemOperand(r7)); |
| 1164 __ li(r7, Operand::Zero()); |
| 1165 __ b(&patch_receiver); |
| 1166 |
| 1167 __ bind(&use_global_proxy); |
| 1168 __ LoadP(r5, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
| 1169 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset)); |
| 1170 |
| 1171 __ bind(&patch_receiver); |
| 1172 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); |
| 1173 __ add(r6, sp, ip); |
| 1174 __ StoreP(r5, MemOperand(r6, -kPointerSize)); |
| 1175 |
| 1176 __ b(&shift_arguments); |
| 1177 } |
| 1178 |
| 1179 // 3b. Check for function proxy. |
| 1180 __ bind(&slow); |
| 1181 __ li(r7, Operand(1, RelocInfo::NONE32)); // indicate function proxy |
| 1182 __ cmpi(r5, Operand(JS_FUNCTION_PROXY_TYPE)); |
| 1183 __ beq(&shift_arguments); |
| 1184 __ bind(&non_function); |
| 1185 __ li(r7, Operand(2, RelocInfo::NONE32)); // indicate non-function |
| 1186 |
| 1187 // 3c. Patch the first argument when calling a non-function. The |
| 1188 // CALL_NON_FUNCTION builtin expects the non-function callee as |
| 1189 // receiver, so overwrite the first argument which will ultimately |
| 1190 // become the receiver. |
| 1191 // r3: actual number of arguments |
| 1192 // r4: function |
| 1193 // r7: call type (0: JS function, 1: function proxy, 2: non-function) |
| 1194 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); |
| 1195 __ add(r5, sp, ip); |
| 1196 __ StoreP(r4, MemOperand(r5, -kPointerSize)); |
| 1197 |
| 1198 // 4. Shift arguments and return address one slot down on the stack |
| 1199 // (overwriting the original receiver). Adjust argument count to make |
| 1200 // the original first argument the new receiver. |
| 1201 // r3: actual number of arguments |
| 1202 // r4: function |
| 1203 // r7: call type (0: JS function, 1: function proxy, 2: non-function) |
| 1204 __ bind(&shift_arguments); |
| 1205 { |
| 1206 Label loop; |
| 1207 // Calculate the copy start address (destination). Copy end address is sp. |
| 1208 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); |
| 1209 __ add(r5, sp, ip); |
| 1210 |
| 1211 __ bind(&loop); |
| 1212 __ LoadP(ip, MemOperand(r5, -kPointerSize)); |
| 1213 __ StoreP(ip, MemOperand(r5)); |
| 1214 __ subi(r5, r5, Operand(kPointerSize)); |
| 1215 __ cmp(r5, sp); |
| 1216 __ bne(&loop); |
| 1217 // Adjust the actual number of arguments and remove the top element |
| 1218 // (which is a copy of the last argument). |
| 1219 __ subi(r3, r3, Operand(1)); |
| 1220 __ pop(); |
| 1221 } |
| 1222 |
| 1223 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, |
| 1224 // or a function proxy via CALL_FUNCTION_PROXY. |
| 1225 // r3: actual number of arguments |
| 1226 // r4: function |
| 1227 // r7: call type (0: JS function, 1: function proxy, 2: non-function) |
| 1228 { |
| 1229 Label function, non_proxy; |
| 1230 __ cmpi(r7, Operand::Zero()); |
| 1231 __ beq(&function); |
| 1232 // Expected number of arguments is 0 for CALL_NON_FUNCTION. |
| 1233 __ li(r5, Operand::Zero()); |
| 1234 __ cmpi(r7, Operand(1)); |
| 1235 __ bne(&non_proxy); |
| 1236 |
| 1237 __ push(r4); // re-add proxy object as additional argument |
| 1238 __ addi(r3, r3, Operand(1)); |
| 1239 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY); |
| 1240 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 1241 RelocInfo::CODE_TARGET); |
| 1242 |
| 1243 __ bind(&non_proxy); |
| 1244 __ GetBuiltinFunction(r4, Builtins::CALL_NON_FUNCTION); |
| 1245 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 1246 RelocInfo::CODE_TARGET); |
| 1247 __ bind(&function); |
| 1248 } |
| 1249 |
| 1250 // 5b. Get the code to call from the function and check that the number of |
| 1251 // expected arguments matches what we're providing. If so, jump |
| 1252 // (tail-call) to the code in register edx without checking arguments. |
| 1253 // r3: actual number of arguments |
| 1254 // r4: function |
| 1255 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
| 1256 __ LoadWordArith( |
| 1257 r5, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset)); |
| 1258 #if !V8_TARGET_ARCH_PPC64 |
| 1259 __ SmiUntag(r5); |
| 1260 #endif |
| 1261 __ cmp(r5, r3); // Check formal and actual parameter counts. |
| 1262 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 1263 RelocInfo::CODE_TARGET, ne); |
| 1264 |
| 1265 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); |
| 1266 ParameterCount expected(0); |
| 1267 __ InvokeCode(r6, expected, expected, JUMP_FUNCTION, NullCallWrapper()); |
| 1268 } |
| 1269 |
| 1270 |
| 1271 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { |
| 1272 const int kIndexOffset = |
| 1273 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); |
| 1274 const int kLimitOffset = |
| 1275 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); |
| 1276 const int kArgsOffset = 2 * kPointerSize; |
| 1277 const int kRecvOffset = 3 * kPointerSize; |
| 1278 const int kFunctionOffset = 4 * kPointerSize; |
| 1279 |
| 1280 { |
| 1281 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
| 1282 |
| 1283 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function |
| 1284 __ push(r3); |
| 1285 __ LoadP(r3, MemOperand(fp, kArgsOffset)); // get the args array |
| 1286 __ push(r3); |
| 1287 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); |
| 1288 |
| 1289 // Check the stack for overflow. We are not trying to catch |
| 1290 // interruptions (e.g. debug break and preemption) here, so the "real stack |
| 1291 // limit" is checked. |
| 1292 Label okay; |
| 1293 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); |
| 1294 // Make r5 the space we have left. The stack might already be overflowed |
| 1295 // here which will cause r5 to become negative. |
| 1296 __ sub(r5, sp, r5); |
| 1297 // Check if the arguments will overflow the stack. |
| 1298 __ SmiToPtrArrayOffset(r0, r3); |
| 1299 __ cmp(r5, r0); |
| 1300 __ bgt(&okay); // Signed comparison. |
| 1301 |
| 1302 // Out of stack space. |
| 1303 __ LoadP(r4, MemOperand(fp, kFunctionOffset)); |
| 1304 __ Push(r4, r3); |
| 1305 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); |
| 1306 // End of stack check. |
| 1307 |
| 1308 // Push current limit and index. |
| 1309 __ bind(&okay); |
| 1310 __ li(r4, Operand::Zero()); |
| 1311 __ Push(r3, r4); // limit and initial index. |
| 1312 |
| 1313 // Get the receiver. |
| 1314 __ LoadP(r3, MemOperand(fp, kRecvOffset)); |
| 1315 |
| 1316 // Check that the function is a JS function (otherwise it must be a proxy). |
| 1317 Label push_receiver; |
| 1318 __ LoadP(r4, MemOperand(fp, kFunctionOffset)); |
| 1319 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE); |
| 1320 __ bne(&push_receiver); |
| 1321 |
| 1322 // Change context eagerly to get the right global object if necessary. |
| 1323 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); |
| 1324 // Load the shared function info while the function is still in r4. |
| 1325 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
| 1326 |
| 1327 // Compute the receiver. |
| 1328 // Do not transform the receiver for strict mode functions. |
| 1329 Label call_to_object, use_global_proxy; |
| 1330 __ lwz(r5, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset)); |
| 1331 __ TestBit(r5, |
| 1332 #if V8_TARGET_ARCH_PPC64 |
| 1333 SharedFunctionInfo::kStrictModeFunction, |
| 1334 #else |
| 1335 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize, |
| 1336 #endif |
| 1337 r0); |
| 1338 __ bne(&push_receiver, cr0); |
| 1339 |
| 1340 // Do not transform the receiver for strict mode functions. |
| 1341 __ TestBit(r5, |
| 1342 #if V8_TARGET_ARCH_PPC64 |
| 1343 SharedFunctionInfo::kNative, |
| 1344 #else |
| 1345 SharedFunctionInfo::kNative + kSmiTagSize, |
| 1346 #endif |
| 1347 r0); |
| 1348 __ bne(&push_receiver, cr0); |
| 1349 |
| 1350 // Compute the receiver in sloppy mode. |
| 1351 __ JumpIfSmi(r3, &call_to_object); |
| 1352 __ LoadRoot(r4, Heap::kNullValueRootIndex); |
| 1353 __ cmp(r3, r4); |
| 1354 __ beq(&use_global_proxy); |
| 1355 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); |
| 1356 __ cmp(r3, r4); |
| 1357 __ beq(&use_global_proxy); |
| 1358 |
| 1359 // Check if the receiver is already a JavaScript object. |
| 1360 // r3: receiver |
| 1361 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
| 1362 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE); |
| 1363 __ bge(&push_receiver); |
| 1364 |
| 1365 // Convert the receiver to a regular object. |
| 1366 // r3: receiver |
| 1367 __ bind(&call_to_object); |
| 1368 __ push(r3); |
| 1369 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 1370 __ b(&push_receiver); |
| 1371 |
| 1372 __ bind(&use_global_proxy); |
| 1373 __ LoadP(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
| 1374 __ LoadP(r3, FieldMemOperand(r3, GlobalObject::kGlobalProxyOffset)); |
| 1375 |
| 1376 // Push the receiver. |
| 1377 // r3: receiver |
| 1378 __ bind(&push_receiver); |
| 1379 __ push(r3); |
| 1380 |
| 1381 // Copy all arguments from the array to the stack. |
| 1382 Label entry, loop; |
| 1383 __ LoadP(r3, MemOperand(fp, kIndexOffset)); |
| 1384 __ b(&entry); |
| 1385 |
| 1386 // Load the current argument from the arguments array and push it to the |
| 1387 // stack. |
| 1388 // r3: current argument index |
| 1389 __ bind(&loop); |
| 1390 __ LoadP(r4, MemOperand(fp, kArgsOffset)); |
| 1391 __ Push(r4, r3); |
| 1392 |
| 1393 // Call the runtime to access the property in the arguments array. |
| 1394 __ CallRuntime(Runtime::kGetProperty, 2); |
| 1395 __ push(r3); |
| 1396 |
| 1397 // Use inline caching to access the arguments. |
| 1398 __ LoadP(r3, MemOperand(fp, kIndexOffset)); |
| 1399 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0); |
| 1400 __ StoreP(r3, MemOperand(fp, kIndexOffset)); |
| 1401 |
| 1402 // Test if the copy loop has finished copying all the elements from the |
| 1403 // arguments object. |
| 1404 __ bind(&entry); |
| 1405 __ LoadP(r4, MemOperand(fp, kLimitOffset)); |
| 1406 __ cmp(r3, r4); |
| 1407 __ bne(&loop); |
| 1408 |
| 1409 // Call the function. |
| 1410 Label call_proxy; |
| 1411 ParameterCount actual(r3); |
| 1412 __ SmiUntag(r3); |
| 1413 __ LoadP(r4, MemOperand(fp, kFunctionOffset)); |
| 1414 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE); |
| 1415 __ bne(&call_proxy); |
| 1416 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper()); |
| 1417 |
| 1418 frame_scope.GenerateLeaveFrame(); |
| 1419 __ addi(sp, sp, Operand(3 * kPointerSize)); |
| 1420 __ blr(); |
| 1421 |
| 1422 // Call the function proxy. |
| 1423 __ bind(&call_proxy); |
| 1424 __ push(r4); // add function proxy as last argument |
| 1425 __ addi(r3, r3, Operand(1)); |
| 1426 __ li(r5, Operand::Zero()); |
| 1427 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY); |
| 1428 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 1429 RelocInfo::CODE_TARGET); |
| 1430 |
| 1431 // Tear down the internal frame and remove function, receiver and args. |
| 1432 } |
| 1433 __ addi(sp, sp, Operand(3 * kPointerSize)); |
| 1434 __ blr(); |
| 1435 } |
| 1436 |
| 1437 |
| 1438 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, |
| 1439 Label* stack_overflow) { |
| 1440 // ----------- S t a t e ------------- |
| 1441 // -- r3 : actual number of arguments |
| 1442 // -- r4 : function (passed through to callee) |
| 1443 // -- r5 : expected number of arguments |
| 1444 // ----------------------------------- |
| 1445 // Check the stack for overflow. We are not trying to catch |
| 1446 // interruptions (e.g. debug break and preemption) here, so the "real stack |
| 1447 // limit" is checked. |
| 1448 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex); |
| 1449 // Make r8 the space we have left. The stack might already be overflowed |
| 1450 // here which will cause r8 to become negative. |
| 1451 __ sub(r8, sp, r8); |
| 1452 // Check if the arguments will overflow the stack. |
| 1453 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2)); |
| 1454 __ cmp(r8, r0); |
| 1455 __ ble(stack_overflow); // Signed comparison. |
| 1456 } |
| 1457 |
| 1458 |
| 1459 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { |
| 1460 __ SmiTag(r3); |
| 1461 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 1462 __ mflr(r0); |
| 1463 __ push(r0); |
| 1464 #if V8_OOL_CONSTANT_POOL |
| 1465 __ Push(fp, kConstantPoolRegister, r7, r4, r3); |
| 1466 #else |
| 1467 __ Push(fp, r7, r4, r3); |
| 1468 #endif |
| 1469 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + |
| 1470 kPointerSize)); |
| 1471 } |
| 1472 |
| 1473 |
| 1474 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { |
| 1475 // ----------- S t a t e ------------- |
| 1476 // -- r3 : result being passed through |
| 1477 // ----------------------------------- |
| 1478 // Get the number of arguments passed (as a smi), tear down the frame and |
| 1479 // then tear down the parameters. |
| 1480 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + |
| 1481 kPointerSize))); |
| 1482 // Could use LeaveFrame(StackFrame::ARGUMENTS_ADAPTER) here |
| 1483 // however, the sequence below is slightly more optimal |
| 1484 #if V8_OOL_CONSTANT_POOL |
| 1485 __ addi(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset)); |
| 1486 __ LoadP(kConstantPoolRegister, MemOperand(sp)); |
| 1487 __ LoadP(fp, MemOperand(sp, kPointerSize)); |
| 1488 __ LoadP(r0, MemOperand(sp, 2 * kPointerSize)); |
| 1489 int slots = 3; // adjust for kConstantPoolRegister + fp + lr below |
| 1490 #else |
| 1491 __ mr(sp, fp); |
| 1492 __ LoadP(fp, MemOperand(sp)); |
| 1493 __ LoadP(r0, MemOperand(sp, kPointerSize)); |
| 1494 int slots = 2; // adjust for fp + lr below |
| 1495 #endif |
| 1496 __ mtlr(r0); |
| 1497 __ SmiToPtrArrayOffset(r0, r4); |
| 1498 __ add(sp, sp, r0); |
| 1499 __ addi(sp, sp, |
| 1500 Operand((1 + slots) * kPointerSize)); // adjust for receiver + others |
| 1501 } |
| 1502 |
| 1503 |
| 1504 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
| 1505 // ----------- S t a t e ------------- |
| 1506 // -- r3 : actual number of arguments |
| 1507 // -- r4 : function (passed through to callee) |
| 1508 // -- r5 : expected number of arguments |
| 1509 // ----------------------------------- |
| 1510 |
| 1511 Label stack_overflow; |
| 1512 ArgumentAdaptorStackCheck(masm, &stack_overflow); |
| 1513 Label invoke, dont_adapt_arguments; |
| 1514 |
| 1515 Label enough, too_few; |
| 1516 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); |
| 1517 __ cmp(r3, r5); |
| 1518 __ blt(&too_few); |
| 1519 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); |
| 1520 __ beq(&dont_adapt_arguments); |
| 1521 |
| 1522 { // Enough parameters: actual >= expected |
| 1523 __ bind(&enough); |
| 1524 EnterArgumentsAdaptorFrame(masm); |
| 1525 |
| 1526 // Calculate copy start address into r3 and copy end address into r5. |
| 1527 // r3: actual number of arguments as a smi |
| 1528 // r4: function |
| 1529 // r5: expected number of arguments |
| 1530 // r6: code entry to call |
| 1531 __ SmiToPtrArrayOffset(r3, r3); |
| 1532 __ add(r3, r3, fp); |
| 1533 // adjust for return address and receiver |
| 1534 __ addi(r3, r3, Operand(2 * kPointerSize)); |
| 1535 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2)); |
| 1536 __ sub(r5, r3, r5); |
| 1537 |
| 1538 // Copy the arguments (including the receiver) to the new stack frame. |
| 1539 // r3: copy start address |
| 1540 // r4: function |
| 1541 // r5: copy end address |
| 1542 // r6: code entry to call |
| 1543 |
| 1544 Label copy; |
| 1545 __ bind(©); |
| 1546 __ LoadP(ip, MemOperand(r3, 0)); |
| 1547 __ push(ip); |
| 1548 __ cmp(r3, r5); // Compare before moving to next argument. |
| 1549 __ subi(r3, r3, Operand(kPointerSize)); |
| 1550 __ bne(©); |
| 1551 |
| 1552 __ b(&invoke); |
| 1553 } |
| 1554 |
| 1555 { // Too few parameters: Actual < expected |
| 1556 __ bind(&too_few); |
| 1557 EnterArgumentsAdaptorFrame(masm); |
| 1558 |
| 1559 // Calculate copy start address into r0 and copy end address is fp. |
| 1560 // r3: actual number of arguments as a smi |
| 1561 // r4: function |
| 1562 // r5: expected number of arguments |
| 1563 // r6: code entry to call |
| 1564 __ SmiToPtrArrayOffset(r3, r3); |
| 1565 __ add(r3, r3, fp); |
| 1566 |
| 1567 // Copy the arguments (including the receiver) to the new stack frame. |
| 1568 // r3: copy start address |
| 1569 // r4: function |
| 1570 // r5: expected number of arguments |
| 1571 // r6: code entry to call |
| 1572 Label copy; |
| 1573 __ bind(©); |
| 1574 // Adjust load for return address and receiver. |
| 1575 __ LoadP(ip, MemOperand(r3, 2 * kPointerSize)); |
| 1576 __ push(ip); |
| 1577 __ cmp(r3, fp); // Compare before moving to next argument. |
| 1578 __ subi(r3, r3, Operand(kPointerSize)); |
| 1579 __ bne(©); |
| 1580 |
| 1581 // Fill the remaining expected arguments with undefined. |
| 1582 // r4: function |
| 1583 // r5: expected number of arguments |
| 1584 // r6: code entry to call |
| 1585 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 1586 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2)); |
| 1587 __ sub(r5, fp, r5); |
| 1588 // Adjust for frame. |
| 1589 __ subi(r5, r5, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + |
| 1590 2 * kPointerSize)); |
| 1591 |
| 1592 Label fill; |
| 1593 __ bind(&fill); |
| 1594 __ push(ip); |
| 1595 __ cmp(sp, r5); |
| 1596 __ bne(&fill); |
| 1597 } |
| 1598 |
| 1599 // Call the entry point. |
| 1600 __ bind(&invoke); |
| 1601 __ Call(r6); |
| 1602 |
| 1603 // Store offset of return address for deoptimizer. |
| 1604 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); |
| 1605 |
| 1606 // Exit frame and return. |
| 1607 LeaveArgumentsAdaptorFrame(masm); |
| 1608 __ blr(); |
| 1609 |
| 1610 |
| 1611 // ------------------------------------------- |
| 1612 // Dont adapt arguments. |
| 1613 // ------------------------------------------- |
| 1614 __ bind(&dont_adapt_arguments); |
| 1615 __ Jump(r6); |
| 1616 |
| 1617 __ bind(&stack_overflow); |
| 1618 { |
| 1619 FrameScope frame(masm, StackFrame::MANUAL); |
| 1620 EnterArgumentsAdaptorFrame(masm); |
| 1621 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); |
| 1622 __ bkpt(0); |
| 1623 } |
| 1624 } |
| 1625 |
| 1626 |
| 1627 #undef __ |
| 1628 } |
| 1629 } // namespace v8::internal |
| 1630 |
| 1631 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |