| OLD | NEW |
| (Empty) |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | |
| 2 // Redistribution and use in source and binary forms, with or without | |
| 3 // modification, are permitted provided that the following conditions are | |
| 4 // met: | |
| 5 // | |
| 6 // * Redistributions of source code must retain the above copyright | |
| 7 // notice, this list of conditions and the following disclaimer. | |
| 8 // * Redistributions in binary form must reproduce the above | |
| 9 // copyright notice, this list of conditions and the following | |
| 10 // disclaimer in the documentation and/or other materials provided | |
| 11 // with the distribution. | |
| 12 // * Neither the name of Google Inc. nor the names of its | |
| 13 // contributors may be used to endorse or promote products derived | |
| 14 // from this software without specific prior written permission. | |
| 15 // | |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
| 27 | |
| 28 #include "v8.h" | |
| 29 | |
| 30 #if V8_TARGET_ARCH_A64 | |
| 31 | |
| 32 #include "codegen.h" | |
| 33 #include "debug.h" | |
| 34 #include "deoptimizer.h" | |
| 35 #include "full-codegen.h" | |
| 36 #include "runtime.h" | |
| 37 #include "stub-cache.h" | |
| 38 | |
| 39 namespace v8 { | |
| 40 namespace internal { | |
| 41 | |
| 42 | |
| 43 #define __ ACCESS_MASM(masm) | |
| 44 | |
| 45 | |
| 46 // Load the built-in Array function from the current context. | |
| 47 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { | |
| 48 // Load the native context. | |
| 49 __ Ldr(result, GlobalObjectMemOperand()); | |
| 50 __ Ldr(result, | |
| 51 FieldMemOperand(result, GlobalObject::kNativeContextOffset)); | |
| 52 // Load the InternalArray function from the native context. | |
| 53 __ Ldr(result, | |
| 54 MemOperand(result, | |
| 55 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); | |
| 56 } | |
| 57 | |
| 58 | |
| 59 // Load the built-in InternalArray function from the current context. | |
| 60 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, | |
| 61 Register result) { | |
| 62 // Load the native context. | |
| 63 __ Ldr(result, GlobalObjectMemOperand()); | |
| 64 __ Ldr(result, | |
| 65 FieldMemOperand(result, GlobalObject::kNativeContextOffset)); | |
| 66 // Load the InternalArray function from the native context. | |
| 67 __ Ldr(result, ContextMemOperand(result, | |
| 68 Context::INTERNAL_ARRAY_FUNCTION_INDEX)); | |
| 69 } | |
| 70 | |
| 71 | |
| 72 void Builtins::Generate_Adaptor(MacroAssembler* masm, | |
| 73 CFunctionId id, | |
| 74 BuiltinExtraArguments extra_args) { | |
| 75 // ----------- S t a t e ------------- | |
| 76 // -- x0 : number of arguments excluding receiver | |
| 77 // -- x1 : called function (only guaranteed when | |
| 78 // extra_args requires it) | |
| 79 // -- cp : context | |
| 80 // -- sp[0] : last argument | |
| 81 // -- ... | |
| 82 // -- sp[4 * (argc - 1)] : first argument (argc == x0) | |
| 83 // -- sp[4 * argc] : receiver | |
| 84 // ----------------------------------- | |
| 85 | |
| 86 // Insert extra arguments. | |
| 87 int num_extra_args = 0; | |
| 88 if (extra_args == NEEDS_CALLED_FUNCTION) { | |
| 89 num_extra_args = 1; | |
| 90 __ Push(x1); | |
| 91 } else { | |
| 92 ASSERT(extra_args == NO_EXTRA_ARGUMENTS); | |
| 93 } | |
| 94 | |
| 95 // JumpToExternalReference expects x0 to contain the number of arguments | |
| 96 // including the receiver and the extra arguments. | |
| 97 __ Add(x0, x0, num_extra_args + 1); | |
| 98 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); | |
| 99 } | |
| 100 | |
| 101 | |
| 102 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { | |
| 103 // ----------- S t a t e ------------- | |
| 104 // -- x0 : number of arguments | |
| 105 // -- lr : return address | |
| 106 // -- sp[...]: constructor arguments | |
| 107 // ----------------------------------- | |
| 108 ASM_LOCATION("Builtins::Generate_InternalArrayCode"); | |
| 109 Label generic_array_code; | |
| 110 | |
| 111 // Get the InternalArray function. | |
| 112 GenerateLoadInternalArrayFunction(masm, x1); | |
| 113 | |
| 114 if (FLAG_debug_code) { | |
| 115 // Initial map for the builtin InternalArray functions should be maps. | |
| 116 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); | |
| 117 __ Tst(x10, kSmiTagMask); | |
| 118 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction); | |
| 119 __ CompareObjectType(x10, x11, x12, MAP_TYPE); | |
| 120 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); | |
| 121 } | |
| 122 | |
| 123 // Run the native code for the InternalArray function called as a normal | |
| 124 // function. | |
| 125 InternalArrayConstructorStub stub(masm->isolate()); | |
| 126 __ TailCallStub(&stub); | |
| 127 } | |
| 128 | |
| 129 | |
| 130 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { | |
| 131 // ----------- S t a t e ------------- | |
| 132 // -- x0 : number of arguments | |
| 133 // -- lr : return address | |
| 134 // -- sp[...]: constructor arguments | |
| 135 // ----------------------------------- | |
| 136 ASM_LOCATION("Builtins::Generate_ArrayCode"); | |
| 137 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | |
| 138 | |
| 139 // Get the Array function. | |
| 140 GenerateLoadArrayFunction(masm, x1); | |
| 141 | |
| 142 if (FLAG_debug_code) { | |
| 143 // Initial map for the builtin Array functions should be maps. | |
| 144 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); | |
| 145 __ Tst(x10, kSmiTagMask); | |
| 146 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); | |
| 147 __ CompareObjectType(x10, x11, x12, MAP_TYPE); | |
| 148 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); | |
| 149 } | |
| 150 | |
| 151 // Run the native code for the Array function called as a normal function. | |
| 152 Handle<Object> undefined_sentinel( | |
| 153 masm->isolate()->heap()->undefined_value(), | |
| 154 masm->isolate()); | |
| 155 __ Mov(x2, Operand(undefined_sentinel)); | |
| 156 ArrayConstructorStub stub(masm->isolate()); | |
| 157 __ TailCallStub(&stub); | |
| 158 } | |
| 159 | |
| 160 | |
| 161 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { | |
| 162 // ----------- S t a t e ------------- | |
| 163 // -- x0 : number of arguments | |
| 164 // -- x1 : constructor function | |
| 165 // -- lr : return address | |
| 166 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) | |
| 167 // -- sp[argc * 8] : receiver | |
| 168 // ----------------------------------- | |
| 169 ASM_LOCATION("Builtins::Generate_StringConstructCode"); | |
| 170 Counters* counters = masm->isolate()->counters(); | |
| 171 __ IncrementCounter(counters->string_ctor_calls(), 1, x10, x11); | |
| 172 | |
| 173 Register argc = x0; | |
| 174 Register function = x1; | |
| 175 if (FLAG_debug_code) { | |
| 176 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, x10); | |
| 177 __ Cmp(function, x10); | |
| 178 __ Assert(eq, kUnexpectedStringFunction); | |
| 179 } | |
| 180 | |
| 181 // Load the first arguments in x0 and get rid of the rest. | |
| 182 Label no_arguments; | |
| 183 __ Cbz(argc, &no_arguments); | |
| 184 // First args = sp[(argc - 1) * 8]. | |
| 185 __ Sub(argc, argc, 1); | |
| 186 __ Claim(argc, kXRegSizeInBytes); | |
| 187 // jssp now point to args[0], load and drop args[0] + receiver. | |
| 188 // TODO(jbramley): Consider adding ClaimAndPoke. | |
| 189 __ Ldr(argc, MemOperand(jssp, 2 * kPointerSize, PostIndex)); | |
| 190 | |
| 191 Register argument = x2; | |
| 192 Label not_cached, argument_is_string; | |
| 193 __ LookupNumberStringCache(argc, // Input. | |
| 194 argument, // Result. | |
| 195 x10, // Scratch. | |
| 196 x11, // Scratch. | |
| 197 x12, // Scratch. | |
| 198 ¬_cached); | |
| 199 __ IncrementCounter(counters->string_ctor_cached_number(), 1, x10, x11); | |
| 200 __ Bind(&argument_is_string); | |
| 201 | |
| 202 // ----------- S t a t e ------------- | |
| 203 // -- x2 : argument converted to string | |
| 204 // -- x1 : constructor function | |
| 205 // -- lr : return address | |
| 206 // ----------------------------------- | |
| 207 | |
| 208 Label gc_required; | |
| 209 Register new_obj = x0; | |
| 210 __ Allocate(JSValue::kSize, new_obj, x10, x11, &gc_required, TAG_OBJECT); | |
| 211 | |
| 212 // Initialize the String object. | |
| 213 Register map = x3; | |
| 214 __ LoadGlobalFunctionInitialMap(function, map, x10); | |
| 215 if (FLAG_debug_code) { | |
| 216 __ Ldrb(x4, FieldMemOperand(map, Map::kInstanceSizeOffset)); | |
| 217 __ Cmp(x4, JSValue::kSize >> kPointerSizeLog2); | |
| 218 __ Assert(eq, kUnexpectedStringWrapperInstanceSize); | |
| 219 __ Ldrb(x4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset)); | |
| 220 __ Cmp(x4, 0); | |
| 221 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper); | |
| 222 } | |
| 223 __ Str(map, FieldMemOperand(new_obj, HeapObject::kMapOffset)); | |
| 224 | |
| 225 Register empty = x3; | |
| 226 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex); | |
| 227 __ Str(empty, FieldMemOperand(new_obj, JSObject::kPropertiesOffset)); | |
| 228 __ Str(empty, FieldMemOperand(new_obj, JSObject::kElementsOffset)); | |
| 229 | |
| 230 __ Str(argument, FieldMemOperand(new_obj, JSValue::kValueOffset)); | |
| 231 | |
| 232 // Ensure the object is fully initialized. | |
| 233 STATIC_ASSERT(JSValue::kSize == (4 * kPointerSize)); | |
| 234 | |
| 235 __ Ret(); | |
| 236 | |
| 237 // The argument was not found in the number to string cache. Check | |
| 238 // if it's a string already before calling the conversion builtin. | |
| 239 Label convert_argument; | |
| 240 __ Bind(¬_cached); | |
| 241 __ JumpIfSmi(argc, &convert_argument); | |
| 242 | |
| 243 // Is it a String? | |
| 244 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset)); | |
| 245 __ Ldrb(x11, FieldMemOperand(x10, Map::kInstanceTypeOffset)); | |
| 246 __ Tbnz(x11, MaskToBit(kIsNotStringMask), &convert_argument); | |
| 247 __ Mov(argument, argc); | |
| 248 __ IncrementCounter(counters->string_ctor_string_value(), 1, x10, x11); | |
| 249 __ B(&argument_is_string); | |
| 250 | |
| 251 // Invoke the conversion builtin and put the result into x2. | |
| 252 __ Bind(&convert_argument); | |
| 253 __ Push(function); // Preserve the function. | |
| 254 __ IncrementCounter(counters->string_ctor_conversions(), 1, x10, x11); | |
| 255 { | |
| 256 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 257 __ Push(argc); | |
| 258 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); | |
| 259 } | |
| 260 __ Pop(function); | |
| 261 __ Mov(argument, x0); | |
| 262 __ B(&argument_is_string); | |
| 263 | |
| 264 // Load the empty string into x2, remove the receiver from the | |
| 265 // stack, and jump back to the case where the argument is a string. | |
| 266 __ Bind(&no_arguments); | |
| 267 __ LoadRoot(argument, Heap::kempty_stringRootIndex); | |
| 268 __ Drop(1); | |
| 269 __ B(&argument_is_string); | |
| 270 | |
| 271 // At this point the argument is already a string. Call runtime to create a | |
| 272 // string wrapper. | |
| 273 __ Bind(&gc_required); | |
| 274 __ IncrementCounter(counters->string_ctor_gc_required(), 1, x10, x11); | |
| 275 { | |
| 276 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 277 __ Push(argument); | |
| 278 __ CallRuntime(Runtime::kNewStringWrapper, 1); | |
| 279 } | |
| 280 __ Ret(); | |
| 281 } | |
| 282 | |
| 283 | |
| 284 static void CallRuntimePassFunction(MacroAssembler* masm, | |
| 285 Runtime::FunctionId function_id) { | |
| 286 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 287 // - Push a copy of the function onto the stack. | |
| 288 // - Push another copy as a parameter to the runtime call. | |
| 289 __ Push(x1, x1); | |
| 290 | |
| 291 __ CallRuntime(function_id, 1); | |
| 292 | |
| 293 // - Restore receiver. | |
| 294 __ Pop(x1); | |
| 295 } | |
| 296 | |
| 297 | |
| 298 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { | |
| 299 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); | |
| 300 __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset)); | |
| 301 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag); | |
| 302 __ Br(x2); | |
| 303 } | |
| 304 | |
| 305 | |
| 306 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) { | |
| 307 __ Add(x0, x0, Code::kHeaderSize - kHeapObjectTag); | |
| 308 __ Br(x0); | |
| 309 } | |
| 310 | |
| 311 | |
| 312 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { | |
| 313 // Checking whether the queued function is ready for install is optional, | |
| 314 // since we come across interrupts and stack checks elsewhere. However, not | |
| 315 // checking may delay installing ready functions, and always checking would be | |
| 316 // quite expensive. A good compromise is to first check against stack limit as | |
| 317 // a cue for an interrupt signal. | |
| 318 Label ok; | |
| 319 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex); | |
| 320 __ B(hs, &ok); | |
| 321 | |
| 322 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); | |
| 323 GenerateTailCallToReturnedCode(masm); | |
| 324 | |
| 325 __ Bind(&ok); | |
| 326 GenerateTailCallToSharedCode(masm); | |
| 327 } | |
| 328 | |
| 329 | |
| 330 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | |
| 331 bool is_api_function, | |
| 332 bool count_constructions) { | |
| 333 // ----------- S t a t e ------------- | |
| 334 // -- x0 : number of arguments | |
| 335 // -- x1 : constructor function | |
| 336 // -- lr : return address | |
| 337 // -- sp[...]: constructor arguments | |
| 338 // ----------------------------------- | |
| 339 | |
| 340 ASM_LOCATION("Builtins::Generate_JSConstructStubHelper"); | |
| 341 // Should never count constructions for api objects. | |
| 342 ASSERT(!is_api_function || !count_constructions); | |
| 343 | |
| 344 Isolate* isolate = masm->isolate(); | |
| 345 | |
| 346 // Enter a construct frame. | |
| 347 { | |
| 348 FrameScope scope(masm, StackFrame::CONSTRUCT); | |
| 349 | |
| 350 // Preserve the two incoming parameters on the stack. | |
| 351 Register argc = x0; | |
| 352 Register constructor = x1; | |
| 353 // x1: constructor function | |
| 354 __ SmiTag(argc); | |
| 355 __ Push(argc, constructor); | |
| 356 // sp[0] : Constructor function. | |
| 357 // sp[1]: number of arguments (smi-tagged) | |
| 358 | |
| 359 // Try to allocate the object without transitioning into C code. If any of | |
| 360 // the preconditions is not met, the code bails out to the runtime call. | |
| 361 Label rt_call, allocated; | |
| 362 if (FLAG_inline_new) { | |
| 363 Label undo_allocation; | |
| 364 #if ENABLE_DEBUGGER_SUPPORT | |
| 365 ExternalReference debug_step_in_fp = | |
| 366 ExternalReference::debug_step_in_fp_address(isolate); | |
| 367 __ Mov(x2, Operand(debug_step_in_fp)); | |
| 368 __ Ldr(x2, MemOperand(x2)); | |
| 369 __ Cbnz(x2, &rt_call); | |
| 370 #endif | |
| 371 // Load the initial map and verify that it is in fact a map. | |
| 372 Register init_map = x2; | |
| 373 __ Ldr(init_map, | |
| 374 FieldMemOperand(constructor, | |
| 375 JSFunction::kPrototypeOrInitialMapOffset)); | |
| 376 __ JumpIfSmi(init_map, &rt_call); | |
| 377 __ JumpIfNotObjectType(init_map, x10, x11, MAP_TYPE, &rt_call); | |
| 378 | |
| 379 // Check that the constructor is not constructing a JSFunction (see | |
| 380 // comments in Runtime_NewObject in runtime.cc). In which case the initial | |
| 381 // map's instance type would be JS_FUNCTION_TYPE. | |
| 382 __ CompareInstanceType(init_map, x10, JS_FUNCTION_TYPE); | |
| 383 __ B(eq, &rt_call); | |
| 384 | |
| 385 if (count_constructions) { | |
| 386 Label allocate; | |
| 387 // Decrease generous allocation count. | |
| 388 __ Ldr(x3, FieldMemOperand(constructor, | |
| 389 JSFunction::kSharedFunctionInfoOffset)); | |
| 390 MemOperand constructor_count = | |
| 391 FieldMemOperand(x3, SharedFunctionInfo::kConstructionCountOffset); | |
| 392 __ Ldrb(x4, constructor_count); | |
| 393 __ Subs(x4, x4, 1); | |
| 394 __ Strb(x4, constructor_count); | |
| 395 __ B(ne, &allocate); | |
| 396 | |
| 397 // Push the constructor and map to the stack, and the constructor again | |
| 398 // as argument to the runtime call. | |
| 399 __ Push(constructor, init_map, constructor); | |
| 400 // The call will replace the stub, so the countdown is only done once. | |
| 401 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); | |
| 402 __ Pop(init_map, constructor); | |
| 403 __ Bind(&allocate); | |
| 404 } | |
| 405 | |
| 406 // Now allocate the JSObject on the heap. | |
| 407 Register obj_size = x3; | |
| 408 Register new_obj = x4; | |
| 409 __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset)); | |
| 410 __ Allocate(obj_size, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS); | |
| 411 | |
| 412 // Allocated the JSObject, now initialize the fields. Map is set to | |
| 413 // initial map and properties and elements are set to empty fixed array. | |
| 414 // NB. the object pointer is not tagged, so MemOperand is used. | |
| 415 Register empty = x5; | |
| 416 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex); | |
| 417 __ Str(init_map, MemOperand(new_obj, JSObject::kMapOffset)); | |
| 418 __ Str(empty, MemOperand(new_obj, JSObject::kPropertiesOffset)); | |
| 419 __ Str(empty, MemOperand(new_obj, JSObject::kElementsOffset)); | |
| 420 | |
| 421 Register first_prop = x5; | |
| 422 __ Add(first_prop, new_obj, JSObject::kHeaderSize); | |
| 423 | |
| 424 // Fill all of the in-object properties with the appropriate filler. | |
| 425 Register obj_end = x6; | |
| 426 __ Add(obj_end, new_obj, Operand(obj_size, LSL, kPointerSizeLog2)); | |
| 427 Register undef = x7; | |
| 428 __ LoadRoot(undef, Heap::kUndefinedValueRootIndex); | |
| 429 | |
| 430 // Obtain number of pre-allocated property fields and in-object | |
| 431 // properties. | |
| 432 Register prealloc_fields = x10; | |
| 433 Register inobject_props = x11; | |
| 434 Register inst_sizes = x11; | |
| 435 __ Ldr(inst_sizes, FieldMemOperand(init_map, Map::kInstanceSizesOffset)); | |
| 436 __ Ubfx(prealloc_fields, inst_sizes, | |
| 437 Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, | |
| 438 kBitsPerByte); | |
| 439 __ Ubfx(inobject_props, inst_sizes, | |
| 440 Map::kInObjectPropertiesByte * kBitsPerByte, kBitsPerByte); | |
| 441 | |
| 442 if (count_constructions) { | |
| 443 // Register first_non_prealloc is the offset of the first field after | |
| 444 // pre-allocated fields. | |
| 445 Register first_non_prealloc = x12; | |
| 446 __ Add(first_non_prealloc, first_prop, | |
| 447 Operand(prealloc_fields, LSL, kPointerSizeLog2)); | |
| 448 | |
| 449 if (FLAG_debug_code) { | |
| 450 __ Cmp(first_non_prealloc, obj_end); | |
| 451 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields); | |
| 452 } | |
| 453 __ InitializeFieldsWithFiller(first_prop, first_non_prealloc, undef); | |
| 454 // To allow for truncation. | |
| 455 __ LoadRoot(x12, Heap::kOnePointerFillerMapRootIndex); | |
| 456 __ InitializeFieldsWithFiller(first_prop, obj_end, x12); | |
| 457 } else { | |
| 458 __ InitializeFieldsWithFiller(first_prop, obj_end, undef); | |
| 459 } | |
| 460 | |
| 461 // Add the object tag to make the JSObject real, so that we can continue | |
| 462 // and jump into the continuation code at any time from now on. Any | |
| 463 // failures need to undo the allocation, so that the heap is in a | |
| 464 // consistent state and verifiable. | |
| 465 __ Add(new_obj, new_obj, kHeapObjectTag); | |
| 466 | |
| 467 // Check if a non-empty properties array is needed. Continue with | |
| 468 // allocated object if not, or fall through to runtime call if it is. | |
| 469 Register element_count = x3; | |
| 470 __ Ldrb(x3, FieldMemOperand(init_map, Map::kUnusedPropertyFieldsOffset)); | |
| 471 // The field instance sizes contains both pre-allocated property fields | |
| 472 // and in-object properties. | |
| 473 __ Add(x3, x3, prealloc_fields); | |
| 474 __ Subs(element_count, x3, inobject_props); | |
| 475 | |
| 476 // Done if no extra properties are to be allocated. | |
| 477 __ B(eq, &allocated); | |
| 478 __ Assert(pl, kPropertyAllocationCountFailed); | |
| 479 | |
| 480 // Scale the number of elements by pointer size and add the header for | |
| 481 // FixedArrays to the start of the next object calculation from above. | |
| 482 Register new_array = x5; | |
| 483 Register array_size = x6; | |
| 484 __ Add(array_size, element_count, FixedArray::kHeaderSize / kPointerSize); | |
| 485 __ Allocate(array_size, new_array, x11, x12, &undo_allocation, | |
| 486 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | | |
| 487 SIZE_IN_WORDS)); | |
| 488 | |
| 489 Register array_map = x10; | |
| 490 __ LoadRoot(array_map, Heap::kFixedArrayMapRootIndex); | |
| 491 __ Str(array_map, MemOperand(new_array, FixedArray::kMapOffset)); | |
| 492 __ SmiTag(x0, element_count); | |
| 493 __ Str(x0, MemOperand(new_array, FixedArray::kLengthOffset)); | |
| 494 | |
| 495 // Initialize the fields to undefined. | |
| 496 Register elements = x10; | |
| 497 Register elements_end = x11; | |
| 498 __ Add(elements, new_array, FixedArray::kHeaderSize); | |
| 499 __ Add(elements_end, elements, | |
| 500 Operand(element_count, LSL, kPointerSizeLog2)); | |
| 501 __ InitializeFieldsWithFiller(elements, elements_end, undef); | |
| 502 | |
| 503 // Store the initialized FixedArray into the properties field of the | |
| 504 // JSObject. | |
| 505 __ Add(new_array, new_array, kHeapObjectTag); | |
| 506 __ Str(new_array, FieldMemOperand(new_obj, JSObject::kPropertiesOffset)); | |
| 507 | |
| 508 // Continue with JSObject being successfully allocated. | |
| 509 __ B(&allocated); | |
| 510 | |
| 511 // Undo the setting of the new top so that the heap is verifiable. For | |
| 512 // example, the map's unused properties potentially do not match the | |
| 513 // allocated objects unused properties. | |
| 514 __ Bind(&undo_allocation); | |
| 515 __ UndoAllocationInNewSpace(new_obj, x14); | |
| 516 } | |
| 517 | |
| 518 // Allocate the new receiver object using the runtime call. | |
| 519 __ Bind(&rt_call); | |
| 520 __ Push(constructor); // Argument for Runtime_NewObject. | |
| 521 __ CallRuntime(Runtime::kNewObject, 1); | |
| 522 __ Mov(x4, x0); | |
| 523 | |
| 524 // Receiver for constructor call allocated. | |
| 525 // x4: JSObject | |
| 526 __ Bind(&allocated); | |
| 527 __ Push(x4, x4); | |
| 528 | |
| 529 // Reload the number of arguments from the stack. | |
| 530 // Set it up in x0 for the function call below. | |
| 531 // jssp[0]: receiver | |
| 532 // jssp[1]: receiver | |
| 533 // jssp[2]: constructor function | |
| 534 // jssp[3]: number of arguments (smi-tagged) | |
| 535 __ Peek(constructor, 2 * kXRegSizeInBytes); // Load constructor. | |
| 536 __ Peek(argc, 3 * kXRegSizeInBytes); // Load number of arguments. | |
| 537 __ SmiUntag(argc); | |
| 538 | |
| 539 // Set up pointer to last argument. | |
| 540 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset); | |
| 541 | |
| 542 // Copy arguments and receiver to the expression stack. | |
| 543 // Copy 2 values every loop to use ldp/stp. | |
| 544 // x0: number of arguments | |
| 545 // x1: constructor function | |
| 546 // x2: address of last argument (caller sp) | |
| 547 // jssp[0]: receiver | |
| 548 // jssp[1]: receiver | |
| 549 // jssp[2]: constructor function | |
| 550 // jssp[3]: number of arguments (smi-tagged) | |
| 551 // Compute the start address of the copy in x3. | |
| 552 __ Add(x3, x2, Operand(argc, LSL, kPointerSizeLog2)); | |
| 553 Label loop, entry, done_copying_arguments; | |
| 554 __ B(&entry); | |
| 555 __ Bind(&loop); | |
| 556 __ Ldp(x10, x11, MemOperand(x3, -2 * kPointerSize, PreIndex)); | |
| 557 __ Push(x11, x10); | |
| 558 __ Bind(&entry); | |
| 559 __ Cmp(x3, x2); | |
| 560 __ B(gt, &loop); | |
| 561 // Because we copied values 2 by 2 we may have copied one extra value. | |
| 562 // Drop it if that is the case. | |
| 563 __ B(eq, &done_copying_arguments); | |
| 564 __ Drop(1); | |
| 565 __ Bind(&done_copying_arguments); | |
| 566 | |
| 567 // Call the function. | |
| 568 // x0: number of arguments | |
| 569 // x1: constructor function | |
| 570 if (is_api_function) { | |
| 571 __ Ldr(cp, FieldMemOperand(constructor, JSFunction::kContextOffset)); | |
| 572 Handle<Code> code = | |
| 573 masm->isolate()->builtins()->HandleApiCallConstruct(); | |
| 574 __ Call(code, RelocInfo::CODE_TARGET); | |
| 575 } else { | |
| 576 ParameterCount actual(argc); | |
| 577 __ InvokeFunction(constructor, actual, CALL_FUNCTION, NullCallWrapper()); | |
| 578 } | |
| 579 | |
| 580 // Store offset of return address for deoptimizer. | |
| 581 if (!is_api_function && !count_constructions) { | |
| 582 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); | |
| 583 } | |
| 584 | |
| 585 // Restore the context from the frame. | |
| 586 // x0: result | |
| 587 // jssp[0]: receiver | |
| 588 // jssp[1]: constructor function | |
| 589 // jssp[2]: number of arguments (smi-tagged) | |
| 590 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 591 | |
| 592 // If the result is an object (in the ECMA sense), we should get rid | |
| 593 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 | |
| 594 // on page 74. | |
| 595 Label use_receiver, exit; | |
| 596 | |
| 597 // If the result is a smi, it is *not* an object in the ECMA sense. | |
| 598 // x0: result | |
| 599 // jssp[0]: receiver (newly allocated object) | |
| 600 // jssp[1]: constructor function | |
| 601 // jssp[2]: number of arguments (smi-tagged) | |
| 602 __ JumpIfSmi(x0, &use_receiver); | |
| 603 | |
| 604 // If the type of the result (stored in its map) is less than | |
| 605 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. | |
| 606 __ JumpIfObjectType(x0, x1, x3, FIRST_SPEC_OBJECT_TYPE, &exit, ge); | |
| 607 | |
| 608 // Throw away the result of the constructor invocation and use the | |
| 609 // on-stack receiver as the result. | |
| 610 __ Bind(&use_receiver); | |
| 611 __ Peek(x0, 0); | |
| 612 | |
| 613 // Remove the receiver from the stack, remove caller arguments, and | |
| 614 // return. | |
| 615 __ Bind(&exit); | |
| 616 // x0: result | |
| 617 // jssp[0]: receiver (newly allocated object) | |
| 618 // jssp[1]: constructor function | |
| 619 // jssp[2]: number of arguments (smi-tagged) | |
| 620 __ Peek(x1, 2 * kXRegSizeInBytes); | |
| 621 | |
| 622 // Leave construct frame. | |
| 623 } | |
| 624 | |
| 625 __ DropBySMI(x1); | |
| 626 __ Drop(1); | |
| 627 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2); | |
| 628 __ Ret(); | |
| 629 } | |
| 630 | |
| 631 | |
| 632 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { | |
| 633 Generate_JSConstructStubHelper(masm, false, true); | |
| 634 } | |
| 635 | |
| 636 | |
| 637 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { | |
| 638 Generate_JSConstructStubHelper(masm, false, false); | |
| 639 } | |
| 640 | |
| 641 | |
| 642 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { | |
| 643 Generate_JSConstructStubHelper(masm, true, false); | |
| 644 } | |
| 645 | |
| 646 | |
| 647 // Input: | |
| 648 // x0: code entry. | |
| 649 // x1: function. | |
| 650 // x2: receiver. | |
| 651 // x3: argc. | |
| 652 // x4: argv. | |
| 653 // Output: | |
| 654 // x0: result. | |
| 655 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | |
| 656 bool is_construct) { | |
| 657 // Called from JSEntryStub::GenerateBody(). | |
| 658 Register function = x1; | |
| 659 Register receiver = x2; | |
| 660 Register argc = x3; | |
| 661 Register argv = x4; | |
| 662 | |
| 663 ProfileEntryHookStub::MaybeCallEntryHook(masm); | |
| 664 | |
| 665 // Clear the context before we push it when entering the internal frame. | |
| 666 __ Mov(cp, 0); | |
| 667 | |
| 668 { | |
| 669 // Enter an internal frame. | |
| 670 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 671 | |
| 672 // Set up the context from the function argument. | |
| 673 __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); | |
| 674 | |
| 675 __ InitializeRootRegister(); | |
| 676 | |
| 677 // Push the function and the receiver onto the stack. | |
| 678 __ Push(function, receiver); | |
| 679 | |
| 680 // Copy arguments to the stack in a loop, in reverse order. | |
| 681 // x3: argc. | |
| 682 // x4: argv. | |
| 683 Label loop, entry; | |
| 684 // Compute the copy end address. | |
| 685 __ Add(x10, argv, Operand(argc, LSL, kPointerSizeLog2)); | |
| 686 | |
| 687 __ B(&entry); | |
| 688 __ Bind(&loop); | |
| 689 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex)); | |
| 690 __ Ldr(x12, MemOperand(x11)); // Dereference the handle. | |
| 691 __ Push(x12); // Push the argument. | |
| 692 __ Bind(&entry); | |
| 693 __ Cmp(x10, argv); | |
| 694 __ B(ne, &loop); | |
| 695 | |
| 696 // Initialize all JavaScript callee-saved registers, since they will be seen | |
| 697 // by the garbage collector as part of handlers. | |
| 698 // The original values have been saved in JSEntryStub::GenerateBody(). | |
| 699 __ LoadRoot(x19, Heap::kUndefinedValueRootIndex); | |
| 700 __ Mov(x20, x19); | |
| 701 __ Mov(x21, x19); | |
| 702 __ Mov(x22, x19); | |
| 703 __ Mov(x23, x19); | |
| 704 __ Mov(x24, x19); | |
| 705 __ Mov(x25, x19); | |
| 706 // Don't initialize the reserved registers. | |
| 707 // x26 : root register (root). | |
| 708 // x27 : context pointer (cp). | |
| 709 // x28 : JS stack pointer (jssp). | |
| 710 // x29 : frame pointer (fp). | |
| 711 | |
| 712 // TODO(alexandre): Revisit the MAsm function invocation mechanisms. | |
| 713 // Currently there is a mix of statically and dynamically allocated | |
| 714 // registers. | |
| 715 __ Mov(x0, argc); | |
| 716 if (is_construct) { | |
| 717 // No type feedback cell is available. | |
| 718 Handle<Object> undefined_sentinel( | |
| 719 masm->isolate()->heap()->undefined_value(), masm->isolate()); | |
| 720 __ Mov(x2, Operand(undefined_sentinel)); | |
| 721 | |
| 722 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); | |
| 723 __ CallStub(&stub); | |
| 724 } else { | |
| 725 ParameterCount actual(x0); | |
| 726 __ InvokeFunction(function, actual, CALL_FUNCTION, NullCallWrapper()); | |
| 727 } | |
| 728 // Exit the JS internal frame and remove the parameters (except function), | |
| 729 // and return. | |
| 730 } | |
| 731 | |
| 732 // Result is in x0. Return. | |
| 733 __ Ret(); | |
| 734 } | |
| 735 | |
| 736 | |
| 737 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | |
| 738 Generate_JSEntryTrampolineHelper(masm, false); | |
| 739 } | |
| 740 | |
| 741 | |
| 742 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | |
| 743 Generate_JSEntryTrampolineHelper(masm, true); | |
| 744 } | |
| 745 | |
| 746 | |
| 747 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) { | |
| 748 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized); | |
| 749 GenerateTailCallToReturnedCode(masm); | |
| 750 } | |
| 751 | |
| 752 | |
| 753 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { | |
| 754 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 755 Register function = x1; | |
| 756 | |
| 757 // Preserve function. At the same time, push arguments for | |
| 758 // kCompileOptimized. | |
| 759 __ LoadObject(x10, masm->isolate()->factory()->ToBoolean(concurrent)); | |
| 760 __ Push(function, function, x10); | |
| 761 | |
| 762 __ CallRuntime(Runtime::kCompileOptimized, 2); | |
| 763 | |
| 764 // Restore receiver. | |
| 765 __ Pop(function); | |
| 766 } | |
| 767 | |
| 768 | |
| 769 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | |
| 770 CallCompileOptimized(masm, false); | |
| 771 GenerateTailCallToReturnedCode(masm); | |
| 772 } | |
| 773 | |
| 774 | |
| 775 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { | |
| 776 CallCompileOptimized(masm, true); | |
| 777 GenerateTailCallToReturnedCode(masm); | |
| 778 } | |
| 779 | |
| 780 | |
| 781 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { | |
| 782 // For now, we are relying on the fact that make_code_young doesn't do any | |
| 783 // garbage collection which allows us to save/restore the registers without | |
| 784 // worrying about which of them contain pointers. We also don't build an | |
| 785 // internal frame to make the code fast, since we shouldn't have to do stack | |
| 786 // crawls in MakeCodeYoung. This seems a bit fragile. | |
| 787 | |
| 788 // The following caller-saved registers must be saved and restored when | |
| 789 // calling through to the runtime: | |
| 790 // x0 - The address from which to resume execution. | |
| 791 // x1 - isolate | |
| 792 // lr - The return address for the JSFunction itself. It has not yet been | |
| 793 // preserved on the stack because the frame setup code was replaced | |
| 794 // with a call to this stub, to handle code ageing. | |
| 795 { | |
| 796 FrameScope scope(masm, StackFrame::MANUAL); | |
| 797 __ Push(x0, x1, fp, lr); | |
| 798 __ Mov(x1, Operand(ExternalReference::isolate_address(masm->isolate()))); | |
| 799 __ CallCFunction( | |
| 800 ExternalReference::get_make_code_young_function(masm->isolate()), 2); | |
| 801 __ Pop(lr, fp, x1, x0); | |
| 802 } | |
| 803 | |
| 804 // The calling function has been made young again, so return to execute the | |
| 805 // real frame set-up code. | |
| 806 __ Br(x0); | |
| 807 } | |
| 808 | |
| 809 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ | |
| 810 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ | |
| 811 MacroAssembler* masm) { \ | |
| 812 GenerateMakeCodeYoungAgainCommon(masm); \ | |
| 813 } \ | |
| 814 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ | |
| 815 MacroAssembler* masm) { \ | |
| 816 GenerateMakeCodeYoungAgainCommon(masm); \ | |
| 817 } | |
| 818 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) | |
| 819 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR | |
| 820 | |
| 821 | |
| 822 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { | |
| 823 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact | |
| 824 // that make_code_young doesn't do any garbage collection which allows us to | |
| 825 // save/restore the registers without worrying about which of them contain | |
| 826 // pointers. | |
| 827 | |
| 828 // The following caller-saved registers must be saved and restored when | |
| 829 // calling through to the runtime: | |
| 830 // x0 - The address from which to resume execution. | |
| 831 // x1 - isolate | |
| 832 // lr - The return address for the JSFunction itself. It has not yet been | |
| 833 // preserved on the stack because the frame setup code was replaced | |
| 834 // with a call to this stub, to handle code ageing. | |
| 835 { | |
| 836 FrameScope scope(masm, StackFrame::MANUAL); | |
| 837 __ Push(x0, x1, fp, lr); | |
| 838 __ Mov(x1, Operand(ExternalReference::isolate_address(masm->isolate()))); | |
| 839 __ CallCFunction( | |
| 840 ExternalReference::get_mark_code_as_executed_function( | |
| 841 masm->isolate()), 2); | |
| 842 __ Pop(lr, fp, x1, x0); | |
| 843 | |
| 844 // Perform prologue operations usually performed by the young code stub. | |
| 845 __ EmitFrameSetupForCodeAgePatching(masm); | |
| 846 } | |
| 847 | |
| 848 // Jump to point after the code-age stub. | |
| 849 __ Add(x0, x0, kCodeAgeSequenceSize); | |
| 850 __ Br(x0); | |
| 851 } | |
| 852 | |
| 853 | |
| 854 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { | |
| 855 GenerateMakeCodeYoungAgainCommon(masm); | |
| 856 } | |
| 857 | |
| 858 | |
| 859 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, | |
| 860 SaveFPRegsMode save_doubles) { | |
| 861 { | |
| 862 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 863 | |
| 864 // Preserve registers across notification, this is important for compiled | |
| 865 // stubs that tail call the runtime on deopts passing their parameters in | |
| 866 // registers. | |
| 867 // TODO(jbramley): Is it correct (and appropriate) to use safepoint | |
| 868 // registers here? According to the comment above, we should only need to | |
| 869 // preserve the registers with parameters. | |
| 870 __ PushXRegList(kSafepointSavedRegisters); | |
| 871 // Pass the function and deoptimization type to the runtime system. | |
| 872 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); | |
| 873 __ PopXRegList(kSafepointSavedRegisters); | |
| 874 } | |
| 875 | |
| 876 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate). | |
| 877 __ Drop(1); | |
| 878 | |
| 879 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this | |
| 880 // into lr before it jumps here. | |
| 881 __ Br(lr); | |
| 882 } | |
| 883 | |
| 884 | |
| 885 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { | |
| 886 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); | |
| 887 } | |
| 888 | |
| 889 | |
| 890 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { | |
| 891 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); | |
| 892 } | |
| 893 | |
| 894 | |
| 895 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | |
| 896 Deoptimizer::BailoutType type) { | |
| 897 { | |
| 898 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 899 // Pass the deoptimization type to the runtime system. | |
| 900 __ Mov(x0, Operand(Smi::FromInt(static_cast<int>(type)))); | |
| 901 __ Push(x0); | |
| 902 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | |
| 903 } | |
| 904 | |
| 905 // Get the full codegen state from the stack and untag it. | |
| 906 Register state = x6; | |
| 907 __ Peek(state, 0); | |
| 908 __ SmiUntag(state); | |
| 909 | |
| 910 // Switch on the state. | |
| 911 Label with_tos_register, unknown_state; | |
| 912 __ CompareAndBranch( | |
| 913 state, FullCodeGenerator::NO_REGISTERS, ne, &with_tos_register); | |
| 914 __ Drop(1); // Remove state. | |
| 915 __ Ret(); | |
| 916 | |
| 917 __ Bind(&with_tos_register); | |
| 918 // Reload TOS register. | |
| 919 __ Peek(x0, kPointerSize); | |
| 920 __ CompareAndBranch(state, FullCodeGenerator::TOS_REG, ne, &unknown_state); | |
| 921 __ Drop(2); // Remove state and TOS. | |
| 922 __ Ret(); | |
| 923 | |
| 924 __ Bind(&unknown_state); | |
| 925 __ Abort(kInvalidFullCodegenState); | |
| 926 } | |
| 927 | |
| 928 | |
| 929 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | |
| 930 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | |
| 931 } | |
| 932 | |
| 933 | |
| 934 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | |
| 935 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | |
| 936 } | |
| 937 | |
| 938 | |
| 939 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { | |
| 940 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); | |
| 941 } | |
| 942 | |
| 943 | |
| 944 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { | |
| 945 // Lookup the function in the JavaScript frame. | |
| 946 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 947 { | |
| 948 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 949 // Pass function as argument. | |
| 950 __ Push(x0); | |
| 951 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); | |
| 952 } | |
| 953 | |
| 954 // If the code object is null, just return to the unoptimized code. | |
| 955 Label skip; | |
| 956 __ CompareAndBranch(x0, Operand(Smi::FromInt(0)), ne, &skip); | |
| 957 __ Ret(); | |
| 958 | |
| 959 __ Bind(&skip); | |
| 960 | |
| 961 // Load deoptimization data from the code object. | |
| 962 // <deopt_data> = <code>[#deoptimization_data_offset] | |
| 963 __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); | |
| 964 | |
| 965 // Load the OSR entrypoint offset from the deoptimization data. | |
| 966 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] | |
| 967 __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt( | |
| 968 DeoptimizationInputData::kOsrPcOffsetIndex))); | |
| 969 | |
| 970 // Compute the target address = code_obj + header_size + osr_offset | |
| 971 // <entry_addr> = <code_obj> + #header_size + <osr_offset> | |
| 972 __ Add(x0, x0, x1); | |
| 973 __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag); | |
| 974 | |
| 975 // And "return" to the OSR entry point of the function. | |
| 976 __ Ret(); | |
| 977 } | |
| 978 | |
| 979 | |
| 980 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { | |
| 981 // We check the stack limit as indicator that recompilation might be done. | |
| 982 Label ok; | |
| 983 __ CompareRoot(jssp, Heap::kStackLimitRootIndex); | |
| 984 __ B(hs, &ok); | |
| 985 { | |
| 986 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 987 __ CallRuntime(Runtime::kStackGuard, 0); | |
| 988 } | |
| 989 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), | |
| 990 RelocInfo::CODE_TARGET); | |
| 991 | |
| 992 __ Bind(&ok); | |
| 993 __ Ret(); | |
| 994 } | |
| 995 | |
| 996 | |
| 997 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { | |
| 998 enum { | |
| 999 call_type_JS_func = 0, | |
| 1000 call_type_func_proxy = 1, | |
| 1001 call_type_non_func = 2 | |
| 1002 }; | |
| 1003 Register argc = x0; | |
| 1004 Register function = x1; | |
| 1005 Register call_type = x4; | |
| 1006 Register scratch1 = x10; | |
| 1007 Register scratch2 = x11; | |
| 1008 Register receiver_type = x13; | |
| 1009 | |
| 1010 ASM_LOCATION("Builtins::Generate_FunctionCall"); | |
| 1011 // 1. Make sure we have at least one argument. | |
| 1012 { Label done; | |
| 1013 __ Cbnz(argc, &done); | |
| 1014 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex); | |
| 1015 __ Push(scratch1); | |
| 1016 __ Mov(argc, 1); | |
| 1017 __ Bind(&done); | |
| 1018 } | |
| 1019 | |
| 1020 // 2. Get the function to call (passed as receiver) from the stack, check | |
| 1021 // if it is a function. | |
| 1022 Label slow, non_function; | |
| 1023 __ Peek(function, Operand(argc, LSL, kXRegSizeInBytesLog2)); | |
| 1024 __ JumpIfSmi(function, &non_function); | |
| 1025 __ JumpIfNotObjectType(function, scratch1, receiver_type, | |
| 1026 JS_FUNCTION_TYPE, &slow); | |
| 1027 | |
| 1028 // 3a. Patch the first argument if necessary when calling a function. | |
| 1029 Label shift_arguments; | |
| 1030 __ Mov(call_type, static_cast<int>(call_type_JS_func)); | |
| 1031 { Label convert_to_object, use_global_receiver, patch_receiver; | |
| 1032 // Change context eagerly in case we need the global receiver. | |
| 1033 __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); | |
| 1034 | |
| 1035 // Do not transform the receiver for strict mode functions. | |
| 1036 // Also do not transform the receiver for native (Compilerhints already in | |
| 1037 // x3). | |
| 1038 __ Ldr(scratch1, | |
| 1039 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); | |
| 1040 __ Ldr(scratch2.W(), | |
| 1041 FieldMemOperand(scratch1, SharedFunctionInfo::kCompilerHintsOffset)); | |
| 1042 __ TestAndBranchIfAnySet( | |
| 1043 scratch2.W(), | |
| 1044 (1 << SharedFunctionInfo::kStrictModeFunction) | | |
| 1045 (1 << SharedFunctionInfo::kNative), | |
| 1046 &shift_arguments); | |
| 1047 | |
| 1048 // Compute the receiver in non-strict mode. | |
| 1049 Register receiver = x2; | |
| 1050 __ Sub(scratch1, argc, 1); | |
| 1051 __ Peek(receiver, Operand(scratch1, LSL, kXRegSizeInBytesLog2)); | |
| 1052 __ JumpIfSmi(receiver, &convert_to_object); | |
| 1053 | |
| 1054 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex, | |
| 1055 &use_global_receiver); | |
| 1056 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_receiver); | |
| 1057 | |
| 1058 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | |
| 1059 __ JumpIfObjectType(receiver, scratch1, scratch2, | |
| 1060 FIRST_SPEC_OBJECT_TYPE, &shift_arguments, ge); | |
| 1061 | |
| 1062 __ Bind(&convert_to_object); | |
| 1063 | |
| 1064 { | |
| 1065 // Enter an internal frame in order to preserve argument count. | |
| 1066 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 1067 __ SmiTag(argc); | |
| 1068 | |
| 1069 __ Push(argc, receiver); | |
| 1070 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | |
| 1071 __ Mov(receiver, x0); | |
| 1072 | |
| 1073 __ Pop(argc); | |
| 1074 __ SmiUntag(argc); | |
| 1075 | |
| 1076 // Exit the internal frame. | |
| 1077 } | |
| 1078 | |
| 1079 // Restore the function and flag in the registers. | |
| 1080 __ Peek(function, Operand(argc, LSL, kXRegSizeInBytesLog2)); | |
| 1081 __ Mov(call_type, static_cast<int>(call_type_JS_func)); | |
| 1082 __ B(&patch_receiver); | |
| 1083 | |
| 1084 __ Bind(&use_global_receiver); | |
| 1085 __ Ldr(receiver, GlobalObjectMemOperand()); | |
| 1086 __ Ldr(receiver, | |
| 1087 FieldMemOperand(receiver, GlobalObject::kGlobalReceiverOffset)); | |
| 1088 | |
| 1089 | |
| 1090 __ Bind(&patch_receiver); | |
| 1091 __ Sub(scratch1, argc, 1); | |
| 1092 __ Poke(receiver, Operand(scratch1, LSL, kXRegSizeInBytesLog2)); | |
| 1093 | |
| 1094 __ B(&shift_arguments); | |
| 1095 } | |
| 1096 | |
| 1097 // 3b. Check for function proxy. | |
| 1098 __ Bind(&slow); | |
| 1099 __ Mov(call_type, static_cast<int>(call_type_func_proxy)); | |
| 1100 __ Cmp(receiver_type, JS_FUNCTION_PROXY_TYPE); | |
| 1101 __ B(eq, &shift_arguments); | |
| 1102 __ Bind(&non_function); | |
| 1103 __ Mov(call_type, static_cast<int>(call_type_non_func)); | |
| 1104 | |
| 1105 // 3c. Patch the first argument when calling a non-function. The | |
| 1106 // CALL_NON_FUNCTION builtin expects the non-function callee as | |
| 1107 // receiver, so overwrite the first argument which will ultimately | |
| 1108 // become the receiver. | |
| 1109 // call type (0: JS function, 1: function proxy, 2: non-function) | |
| 1110 __ Sub(scratch1, argc, 1); | |
| 1111 __ Poke(function, Operand(scratch1, LSL, kXRegSizeInBytesLog2)); | |
| 1112 | |
| 1113 // 4. Shift arguments and return address one slot down on the stack | |
| 1114 // (overwriting the original receiver). Adjust argument count to make | |
| 1115 // the original first argument the new receiver. | |
| 1116 // call type (0: JS function, 1: function proxy, 2: non-function) | |
| 1117 __ Bind(&shift_arguments); | |
| 1118 { Label loop; | |
| 1119 // Calculate the copy start address (destination). Copy end address is jssp. | |
| 1120 __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2)); | |
| 1121 __ Sub(scratch1, scratch2, kPointerSize); | |
| 1122 | |
| 1123 __ Bind(&loop); | |
| 1124 __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex)); | |
| 1125 __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex)); | |
| 1126 __ Cmp(scratch1, jssp); | |
| 1127 __ B(ge, &loop); | |
| 1128 // Adjust the actual number of arguments and remove the top element | |
| 1129 // (which is a copy of the last argument). | |
| 1130 __ Sub(argc, argc, 1); | |
| 1131 __ Drop(1); | |
| 1132 } | |
| 1133 | |
| 1134 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, | |
| 1135 // or a function proxy via CALL_FUNCTION_PROXY. | |
| 1136 // call type (0: JS function, 1: function proxy, 2: non-function) | |
| 1137 { Label js_function, non_proxy; | |
| 1138 __ Cbz(call_type, &js_function); | |
| 1139 // Expected number of arguments is 0 for CALL_NON_FUNCTION. | |
| 1140 __ Mov(x2, 0); | |
| 1141 __ Cmp(call_type, static_cast<int>(call_type_func_proxy)); | |
| 1142 __ B(ne, &non_proxy); | |
| 1143 | |
| 1144 __ Push(function); // Re-add proxy object as additional argument. | |
| 1145 __ Add(argc, argc, 1); | |
| 1146 __ GetBuiltinFunction(function, Builtins::CALL_FUNCTION_PROXY); | |
| 1147 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | |
| 1148 RelocInfo::CODE_TARGET); | |
| 1149 | |
| 1150 __ Bind(&non_proxy); | |
| 1151 __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION); | |
| 1152 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | |
| 1153 RelocInfo::CODE_TARGET); | |
| 1154 __ Bind(&js_function); | |
| 1155 } | |
| 1156 | |
| 1157 // 5b. Get the code to call from the function and check that the number of | |
| 1158 // expected arguments matches what we're providing. If so, jump | |
| 1159 // (tail-call) to the code in register edx without checking arguments. | |
| 1160 __ Ldr(x3, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); | |
| 1161 __ Ldrsw(x2, | |
| 1162 FieldMemOperand(x3, | |
| 1163 SharedFunctionInfo::kFormalParameterCountOffset)); | |
| 1164 Label dont_adapt_args; | |
| 1165 __ Cmp(x2, argc); // Check formal and actual parameter counts. | |
| 1166 __ B(eq, &dont_adapt_args); | |
| 1167 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | |
| 1168 RelocInfo::CODE_TARGET); | |
| 1169 __ Bind(&dont_adapt_args); | |
| 1170 | |
| 1171 __ Ldr(x3, FieldMemOperand(function, JSFunction::kCodeEntryOffset)); | |
| 1172 ParameterCount expected(0); | |
| 1173 __ InvokeCode(x3, expected, expected, JUMP_FUNCTION, NullCallWrapper()); | |
| 1174 } | |
| 1175 | |
| 1176 | |
| 1177 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | |
| 1178 ASM_LOCATION("Builtins::Generate_FunctionApply"); | |
| 1179 const int kIndexOffset = | |
| 1180 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); | |
| 1181 const int kLimitOffset = | |
| 1182 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); | |
| 1183 const int kArgsOffset = 2 * kPointerSize; | |
| 1184 const int kReceiverOffset = 3 * kPointerSize; | |
| 1185 const int kFunctionOffset = 4 * kPointerSize; | |
| 1186 | |
| 1187 { | |
| 1188 FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
| 1189 | |
| 1190 Register args = x12; | |
| 1191 Register receiver = x14; | |
| 1192 Register function = x15; | |
| 1193 | |
| 1194 // Get the length of the arguments via a builtin call. | |
| 1195 __ Ldr(function, MemOperand(fp, kFunctionOffset)); | |
| 1196 __ Ldr(args, MemOperand(fp, kArgsOffset)); | |
| 1197 __ Push(function, args); | |
| 1198 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | |
| 1199 Register argc = x0; | |
| 1200 | |
| 1201 // Check the stack for overflow. | |
| 1202 // We are not trying to catch interruptions (e.g. debug break and | |
| 1203 // preemption) here, so the "real stack limit" is checked. | |
| 1204 Label enough_stack_space; | |
| 1205 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex); | |
| 1206 __ Ldr(function, MemOperand(fp, kFunctionOffset)); | |
| 1207 // Make x10 the space we have left. The stack might already be overflowed | |
| 1208 // here which will cause x10 to become negative. | |
| 1209 // TODO(jbramley): Check that the stack usage here is safe. | |
| 1210 __ Sub(x10, jssp, x10); | |
| 1211 // Check if the arguments will overflow the stack. | |
| 1212 __ Cmp(x10, Operand(argc, LSR, kSmiShift - kPointerSizeLog2)); | |
| 1213 __ B(gt, &enough_stack_space); | |
| 1214 // There is not enough stack space, so use a builtin to throw an appropriate | |
| 1215 // error. | |
| 1216 __ Push(function, argc); | |
| 1217 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); | |
| 1218 // We should never return from the APPLY_OVERFLOW builtin. | |
| 1219 if (__ emit_debug_code()) { | |
| 1220 __ Unreachable(); | |
| 1221 } | |
| 1222 | |
| 1223 __ Bind(&enough_stack_space); | |
| 1224 // Push current limit and index. | |
| 1225 __ Mov(x1, 0); // Initial index. | |
| 1226 __ Push(argc, x1); | |
| 1227 | |
| 1228 Label push_receiver; | |
| 1229 __ Ldr(receiver, MemOperand(fp, kReceiverOffset)); | |
| 1230 | |
| 1231 // Check that the function is a JS function. Otherwise it must be a proxy. | |
| 1232 // When it is not the function proxy will be invoked later. | |
| 1233 __ JumpIfNotObjectType(function, x10, x11, JS_FUNCTION_TYPE, | |
| 1234 &push_receiver); | |
| 1235 | |
| 1236 // Change context eagerly to get the right global object if necessary. | |
| 1237 __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); | |
| 1238 // Load the shared function info. | |
| 1239 __ Ldr(x2, FieldMemOperand(function, | |
| 1240 JSFunction::kSharedFunctionInfoOffset)); | |
| 1241 | |
| 1242 // Compute and push the receiver. | |
| 1243 // Do not transform the receiver for strict mode functions. | |
| 1244 Label convert_receiver_to_object, use_global_receiver; | |
| 1245 __ Ldr(w10, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset)); | |
| 1246 __ Tbnz(x10, SharedFunctionInfo::kStrictModeFunction, &push_receiver); | |
| 1247 // Do not transform the receiver for native functions. | |
| 1248 __ Tbnz(x10, SharedFunctionInfo::kNative, &push_receiver); | |
| 1249 | |
| 1250 // Compute the receiver in non-strict mode. | |
| 1251 __ JumpIfSmi(receiver, &convert_receiver_to_object); | |
| 1252 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_receiver); | |
| 1253 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex, | |
| 1254 &use_global_receiver); | |
| 1255 | |
| 1256 // Check if the receiver is already a JavaScript object. | |
| 1257 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | |
| 1258 __ JumpIfObjectType(receiver, x10, x11, FIRST_SPEC_OBJECT_TYPE, | |
| 1259 &push_receiver, ge); | |
| 1260 | |
| 1261 // Call a builtin to convert the receiver to a regular object. | |
| 1262 __ Bind(&convert_receiver_to_object); | |
| 1263 __ Push(receiver); | |
| 1264 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | |
| 1265 __ Mov(receiver, x0); | |
| 1266 __ B(&push_receiver); | |
| 1267 | |
| 1268 __ Bind(&use_global_receiver); | |
| 1269 __ Ldr(x10, GlobalObjectMemOperand()); | |
| 1270 __ Ldr(receiver, FieldMemOperand(x10, GlobalObject::kGlobalReceiverOffset)); | |
| 1271 | |
| 1272 // Push the receiver | |
| 1273 __ Bind(&push_receiver); | |
| 1274 __ Push(receiver); | |
| 1275 | |
| 1276 // Copy all arguments from the array to the stack. | |
| 1277 Label entry, loop; | |
| 1278 Register current = x0; | |
| 1279 __ Ldr(current, MemOperand(fp, kIndexOffset)); | |
| 1280 __ B(&entry); | |
| 1281 | |
| 1282 __ Bind(&loop); | |
| 1283 // Load the current argument from the arguments array and push it. | |
| 1284 // TODO(all): Couldn't we optimize this for JS arrays? | |
| 1285 | |
| 1286 __ Ldr(x1, MemOperand(fp, kArgsOffset)); | |
| 1287 __ Push(x1, current); | |
| 1288 | |
| 1289 // Call the runtime to access the property in the arguments array. | |
| 1290 __ CallRuntime(Runtime::kGetProperty, 2); | |
| 1291 __ Push(x0); | |
| 1292 | |
| 1293 // Use inline caching to access the arguments. | |
| 1294 __ Ldr(current, MemOperand(fp, kIndexOffset)); | |
| 1295 __ Add(current, current, Operand(Smi::FromInt(1))); | |
| 1296 __ Str(current, MemOperand(fp, kIndexOffset)); | |
| 1297 | |
| 1298 // Test if the copy loop has finished copying all the elements from the | |
| 1299 // arguments object. | |
| 1300 __ Bind(&entry); | |
| 1301 __ Ldr(x1, MemOperand(fp, kLimitOffset)); | |
| 1302 __ Cmp(current, x1); | |
| 1303 __ B(ne, &loop); | |
| 1304 | |
| 1305 // At the end of the loop, the number of arguments is stored in 'current', | |
| 1306 // represented as a smi. | |
| 1307 | |
| 1308 function = x1; // From now on we want the function to be kept in x1; | |
| 1309 __ Ldr(function, MemOperand(fp, kFunctionOffset)); | |
| 1310 | |
| 1311 // Call the function. | |
| 1312 Label call_proxy; | |
| 1313 ParameterCount actual(current); | |
| 1314 __ SmiUntag(current); | |
| 1315 __ JumpIfNotObjectType(function, x10, x11, JS_FUNCTION_TYPE, &call_proxy); | |
| 1316 __ InvokeFunction(function, actual, CALL_FUNCTION, NullCallWrapper()); | |
| 1317 frame_scope.GenerateLeaveFrame(); | |
| 1318 __ Drop(3); | |
| 1319 __ Ret(); | |
| 1320 | |
| 1321 // Call the function proxy. | |
| 1322 __ Bind(&call_proxy); | |
| 1323 // x0 : argc | |
| 1324 // x1 : function | |
| 1325 __ Push(function); // Add function proxy as last argument. | |
| 1326 __ Add(x0, x0, 1); | |
| 1327 __ Mov(x2, 0); | |
| 1328 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY); | |
| 1329 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | |
| 1330 RelocInfo::CODE_TARGET); | |
| 1331 } | |
| 1332 __ Drop(3); | |
| 1333 __ Ret(); | |
| 1334 } | |
| 1335 | |
| 1336 | |
| 1337 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { | |
| 1338 __ SmiTag(x10, x0); | |
| 1339 __ Mov(x11, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
| 1340 __ Push(lr, fp); | |
| 1341 __ Push(x11, x1, x10); | |
| 1342 __ Add(fp, jssp, | |
| 1343 StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize); | |
| 1344 } | |
| 1345 | |
| 1346 | |
| 1347 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | |
| 1348 // ----------- S t a t e ------------- | |
| 1349 // -- x0 : result being passed through | |
| 1350 // ----------------------------------- | |
| 1351 // Get the number of arguments passed (as a smi), tear down the frame and | |
| 1352 // then drop the parameters and the receiver. | |
| 1353 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + | |
| 1354 kPointerSize))); | |
| 1355 __ Mov(jssp, fp); | |
| 1356 __ Pop(fp, lr); | |
| 1357 __ DropBySMI(x10, kXRegSizeInBytes); | |
| 1358 __ Drop(1); | |
| 1359 } | |
| 1360 | |
| 1361 | |
| 1362 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | |
| 1363 ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline"); | |
| 1364 // ----------- S t a t e ------------- | |
| 1365 // -- x0 : actual number of arguments | |
| 1366 // -- x1 : function (passed through to callee) | |
| 1367 // -- x2 : expected number of arguments | |
| 1368 // ----------------------------------- | |
| 1369 | |
| 1370 Label invoke, dont_adapt_arguments; | |
| 1371 | |
| 1372 Label enough, too_few; | |
| 1373 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kCodeEntryOffset)); | |
| 1374 __ Cmp(x0, x2); | |
| 1375 __ B(lt, &too_few); | |
| 1376 __ Cmp(x2, SharedFunctionInfo::kDontAdaptArgumentsSentinel); | |
| 1377 __ B(eq, &dont_adapt_arguments); | |
| 1378 | |
| 1379 { // Enough parameters: actual >= expected | |
| 1380 EnterArgumentsAdaptorFrame(masm); | |
| 1381 | |
| 1382 // Calculate copy start address into x10 and end address into x11. | |
| 1383 // x0: actual number of arguments | |
| 1384 // x1: function | |
| 1385 // x2: expected number of arguments | |
| 1386 // x3: code entry to call | |
| 1387 __ Add(x10, fp, Operand(x0, LSL, kPointerSizeLog2)); | |
| 1388 // Adjust for return address and receiver | |
| 1389 __ Add(x10, x10, 2 * kPointerSize); | |
| 1390 __ Sub(x11, x10, Operand(x2, LSL, kPointerSizeLog2)); | |
| 1391 | |
| 1392 // Copy the arguments (including the receiver) to the new stack frame. | |
| 1393 // x0: actual number of arguments | |
| 1394 // x1: function | |
| 1395 // x2: expected number of arguments | |
| 1396 // x3: code entry to call | |
| 1397 // x10: copy start address | |
| 1398 // x11: copy end address | |
| 1399 | |
| 1400 // TODO(all): Should we push values 2 by 2? | |
| 1401 Label copy; | |
| 1402 __ Bind(©); | |
| 1403 __ Cmp(x10, x11); | |
| 1404 __ Ldr(x12, MemOperand(x10, -kPointerSize, PostIndex)); | |
| 1405 __ Push(x12); | |
| 1406 __ B(gt, ©); | |
| 1407 | |
| 1408 __ B(&invoke); | |
| 1409 } | |
| 1410 | |
| 1411 { // Too few parameters: Actual < expected | |
| 1412 __ Bind(&too_few); | |
| 1413 EnterArgumentsAdaptorFrame(masm); | |
| 1414 | |
| 1415 // Calculate copy start address into x10 and copy end address into x11. | |
| 1416 // x0: actual number of arguments | |
| 1417 // x1: function | |
| 1418 // x2: expected number of arguments | |
| 1419 // x3: code entry to call | |
| 1420 // Adjust for return address. | |
| 1421 __ Add(x11, fp, 1 * kPointerSize); | |
| 1422 __ Add(x10, x11, Operand(x0, LSL, kPointerSizeLog2)); | |
| 1423 __ Add(x10, x10, 1 * kPointerSize); | |
| 1424 | |
| 1425 // Copy the arguments (including the receiver) to the new stack frame. | |
| 1426 // x0: actual number of arguments | |
| 1427 // x1: function | |
| 1428 // x2: expected number of arguments | |
| 1429 // x3: code entry to call | |
| 1430 // x10: copy start address | |
| 1431 // x11: copy end address | |
| 1432 Label copy; | |
| 1433 __ Bind(©); | |
| 1434 __ Ldr(x12, MemOperand(x10, -kPointerSize, PostIndex)); | |
| 1435 __ Push(x12); | |
| 1436 __ Cmp(x10, x11); // Compare before moving to next argument. | |
| 1437 __ B(ne, ©); | |
| 1438 | |
| 1439 // Fill the remaining expected arguments with undefined. | |
| 1440 // x0: actual number of arguments | |
| 1441 // x1: function | |
| 1442 // x2: expected number of arguments | |
| 1443 // x3: code entry to call | |
| 1444 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); | |
| 1445 __ Sub(x11, fp, Operand(x2, LSL, kPointerSizeLog2)); | |
| 1446 // Adjust for the arguments adaptor frame and already pushed receiver. | |
| 1447 __ Sub(x11, x11, | |
| 1448 StandardFrameConstants::kFixedFrameSizeFromFp + (2 * kPointerSize)); | |
| 1449 | |
| 1450 // TODO(all): Optimize this to use ldp? | |
| 1451 Label fill; | |
| 1452 __ Bind(&fill); | |
| 1453 __ Push(x10); | |
| 1454 __ Cmp(jssp, x11); | |
| 1455 __ B(ne, &fill); | |
| 1456 } | |
| 1457 | |
| 1458 // Arguments have been adapted. Now call the entry point. | |
| 1459 __ Bind(&invoke); | |
| 1460 __ Call(x3); | |
| 1461 | |
| 1462 // Store offset of return address for deoptimizer. | |
| 1463 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); | |
| 1464 | |
| 1465 // Exit frame and return. | |
| 1466 LeaveArgumentsAdaptorFrame(masm); | |
| 1467 __ Ret(); | |
| 1468 | |
| 1469 // Call the entry point without adapting the arguments. | |
| 1470 __ Bind(&dont_adapt_arguments); | |
| 1471 __ Jump(x3); | |
| 1472 } | |
| 1473 | |
| 1474 | |
| 1475 #undef __ | |
| 1476 | |
| 1477 } } // namespace v8::internal | |
| 1478 | |
| 1479 #endif // V8_TARGET_ARCH_ARM | |
| OLD | NEW |