| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 142 if (FLAG_debug_code) { | 142 if (FLAG_debug_code) { |
| 143 // Initial map for the builtin Array functions should be maps. | 143 // Initial map for the builtin Array functions should be maps. |
| 144 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); | 144 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); |
| 145 __ Tst(x10, kSmiTagMask); | 145 __ Tst(x10, kSmiTagMask); |
| 146 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); | 146 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); |
| 147 __ CompareObjectType(x10, x11, x12, MAP_TYPE); | 147 __ CompareObjectType(x10, x11, x12, MAP_TYPE); |
| 148 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); | 148 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); |
| 149 } | 149 } |
| 150 | 150 |
| 151 // Run the native code for the Array function called as a normal function. | 151 // Run the native code for the Array function called as a normal function. |
| 152 Handle<Object> undefined_sentinel( | 152 Handle<Object> megamorphic_sentinel = |
| 153 masm->isolate()->heap()->undefined_value(), | 153 TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()); |
| 154 masm->isolate()); | 154 __ Mov(x2, Operand(megamorphic_sentinel)); |
| 155 __ Mov(x2, Operand(undefined_sentinel)); | |
| 156 ArrayConstructorStub stub(masm->isolate()); | 155 ArrayConstructorStub stub(masm->isolate()); |
| 157 __ TailCallStub(&stub); | 156 __ TailCallStub(&stub); |
| 158 } | 157 } |
| 159 | 158 |
| 160 | 159 |
| 161 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { | 160 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { |
| 162 // ----------- S t a t e ------------- | 161 // ----------- S t a t e ------------- |
| 163 // -- x0 : number of arguments | 162 // -- x0 : number of arguments |
| 164 // -- x1 : constructor function | 163 // -- x1 : constructor function |
| 165 // -- lr : return address | 164 // -- lr : return address |
| (...skipping 10 matching lines...) Expand all Loading... |
| 176 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, x10); | 175 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, x10); |
| 177 __ Cmp(function, x10); | 176 __ Cmp(function, x10); |
| 178 __ Assert(eq, kUnexpectedStringFunction); | 177 __ Assert(eq, kUnexpectedStringFunction); |
| 179 } | 178 } |
| 180 | 179 |
| 181 // Load the first arguments in x0 and get rid of the rest. | 180 // Load the first arguments in x0 and get rid of the rest. |
| 182 Label no_arguments; | 181 Label no_arguments; |
| 183 __ Cbz(argc, &no_arguments); | 182 __ Cbz(argc, &no_arguments); |
| 184 // First args = sp[(argc - 1) * 8]. | 183 // First args = sp[(argc - 1) * 8]. |
| 185 __ Sub(argc, argc, 1); | 184 __ Sub(argc, argc, 1); |
| 186 __ Claim(argc, kXRegSizeInBytes); | 185 __ Claim(argc, kXRegSize); |
| 187 // jssp now point to args[0], load and drop args[0] + receiver. | 186 // jssp now point to args[0], load and drop args[0] + receiver. |
| 188 // TODO(jbramley): Consider adding ClaimAndPoke. | 187 Register arg = argc; |
| 189 __ Ldr(argc, MemOperand(jssp, 2 * kPointerSize, PostIndex)); | 188 __ Ldr(arg, MemOperand(jssp, 2 * kPointerSize, PostIndex)); |
| 189 argc = NoReg; |
| 190 | 190 |
| 191 Register argument = x2; | 191 Register argument = x2; |
| 192 Label not_cached, argument_is_string; | 192 Label not_cached, argument_is_string; |
| 193 __ LookupNumberStringCache(argc, // Input. | 193 __ LookupNumberStringCache(arg, // Input. |
| 194 argument, // Result. | 194 argument, // Result. |
| 195 x10, // Scratch. | 195 x10, // Scratch. |
| 196 x11, // Scratch. | 196 x11, // Scratch. |
| 197 x12, // Scratch. | 197 x12, // Scratch. |
| 198 ¬_cached); | 198 ¬_cached); |
| 199 __ IncrementCounter(counters->string_ctor_cached_number(), 1, x10, x11); | 199 __ IncrementCounter(counters->string_ctor_cached_number(), 1, x10, x11); |
| 200 __ Bind(&argument_is_string); | 200 __ Bind(&argument_is_string); |
| 201 | 201 |
| 202 // ----------- S t a t e ------------- | 202 // ----------- S t a t e ------------- |
| 203 // -- x2 : argument converted to string | 203 // -- x2 : argument converted to string |
| (...skipping 27 matching lines...) Expand all Loading... |
| 231 | 231 |
| 232 // Ensure the object is fully initialized. | 232 // Ensure the object is fully initialized. |
| 233 STATIC_ASSERT(JSValue::kSize == (4 * kPointerSize)); | 233 STATIC_ASSERT(JSValue::kSize == (4 * kPointerSize)); |
| 234 | 234 |
| 235 __ Ret(); | 235 __ Ret(); |
| 236 | 236 |
| 237 // The argument was not found in the number to string cache. Check | 237 // The argument was not found in the number to string cache. Check |
| 238 // if it's a string already before calling the conversion builtin. | 238 // if it's a string already before calling the conversion builtin. |
| 239 Label convert_argument; | 239 Label convert_argument; |
| 240 __ Bind(¬_cached); | 240 __ Bind(¬_cached); |
| 241 __ JumpIfSmi(argc, &convert_argument); | 241 __ JumpIfSmi(arg, &convert_argument); |
| 242 | 242 |
| 243 // Is it a String? | 243 // Is it a String? |
| 244 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset)); | 244 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset)); |
| 245 __ Ldrb(x11, FieldMemOperand(x10, Map::kInstanceTypeOffset)); | 245 __ Ldrb(x11, FieldMemOperand(x10, Map::kInstanceTypeOffset)); |
| 246 __ Tbnz(x11, MaskToBit(kIsNotStringMask), &convert_argument); | 246 __ Tbnz(x11, MaskToBit(kIsNotStringMask), &convert_argument); |
| 247 __ Mov(argument, argc); | 247 __ Mov(argument, arg); |
| 248 __ IncrementCounter(counters->string_ctor_string_value(), 1, x10, x11); | 248 __ IncrementCounter(counters->string_ctor_string_value(), 1, x10, x11); |
| 249 __ B(&argument_is_string); | 249 __ B(&argument_is_string); |
| 250 | 250 |
| 251 // Invoke the conversion builtin and put the result into x2. | 251 // Invoke the conversion builtin and put the result into x2. |
| 252 __ Bind(&convert_argument); | 252 __ Bind(&convert_argument); |
| 253 __ Push(function); // Preserve the function. | 253 __ Push(function); // Preserve the function. |
| 254 __ IncrementCounter(counters->string_ctor_conversions(), 1, x10, x11); | 254 __ IncrementCounter(counters->string_ctor_conversions(), 1, x10, x11); |
| 255 { | 255 { |
| 256 FrameScope scope(masm, StackFrame::INTERNAL); | 256 FrameScope scope(masm, StackFrame::INTERNAL); |
| 257 __ Push(argc); | 257 __ Push(arg); |
| 258 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); | 258 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); |
| 259 } | 259 } |
| 260 __ Pop(function); | 260 __ Pop(function); |
| 261 __ Mov(argument, x0); | 261 __ Mov(argument, x0); |
| 262 __ B(&argument_is_string); | 262 __ B(&argument_is_string); |
| 263 | 263 |
| 264 // Load the empty string into x2, remove the receiver from the | 264 // Load the empty string into x2, remove the receiver from the |
| 265 // stack, and jump back to the case where the argument is a string. | 265 // stack, and jump back to the case where the argument is a string. |
| 266 __ Bind(&no_arguments); | 266 __ Bind(&no_arguments); |
| 267 __ LoadRoot(argument, Heap::kempty_stringRootIndex); | 267 __ LoadRoot(argument, Heap::kempty_stringRootIndex); |
| (...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 408 Register new_obj = x4; | 408 Register new_obj = x4; |
| 409 __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset)); | 409 __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset)); |
| 410 __ Allocate(obj_size, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS); | 410 __ Allocate(obj_size, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS); |
| 411 | 411 |
| 412 // Allocated the JSObject, now initialize the fields. Map is set to | 412 // Allocated the JSObject, now initialize the fields. Map is set to |
| 413 // initial map and properties and elements are set to empty fixed array. | 413 // initial map and properties and elements are set to empty fixed array. |
| 414 // NB. the object pointer is not tagged, so MemOperand is used. | 414 // NB. the object pointer is not tagged, so MemOperand is used. |
| 415 Register empty = x5; | 415 Register empty = x5; |
| 416 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex); | 416 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex); |
| 417 __ Str(init_map, MemOperand(new_obj, JSObject::kMapOffset)); | 417 __ Str(init_map, MemOperand(new_obj, JSObject::kMapOffset)); |
| 418 __ Str(empty, MemOperand(new_obj, JSObject::kPropertiesOffset)); | 418 STATIC_ASSERT(JSObject::kElementsOffset == |
| 419 __ Str(empty, MemOperand(new_obj, JSObject::kElementsOffset)); | 419 (JSObject::kPropertiesOffset + kPointerSize)); |
| 420 __ Stp(empty, empty, MemOperand(new_obj, JSObject::kPropertiesOffset)); |
| 420 | 421 |
| 421 Register first_prop = x5; | 422 Register first_prop = x5; |
| 422 __ Add(first_prop, new_obj, JSObject::kHeaderSize); | 423 __ Add(first_prop, new_obj, JSObject::kHeaderSize); |
| 423 | 424 |
| 424 // Fill all of the in-object properties with the appropriate filler. | 425 // Fill all of the in-object properties with the appropriate filler. |
| 425 Register obj_end = x6; | |
| 426 __ Add(obj_end, new_obj, Operand(obj_size, LSL, kPointerSizeLog2)); | |
| 427 Register undef = x7; | 426 Register undef = x7; |
| 428 __ LoadRoot(undef, Heap::kUndefinedValueRootIndex); | 427 __ LoadRoot(undef, Heap::kUndefinedValueRootIndex); |
| 429 | 428 |
| 430 // Obtain number of pre-allocated property fields and in-object | 429 // Obtain number of pre-allocated property fields and in-object |
| 431 // properties. | 430 // properties. |
| 432 Register prealloc_fields = x10; | 431 Register prealloc_fields = x10; |
| 433 Register inobject_props = x11; | 432 Register inobject_props = x11; |
| 434 Register inst_sizes = x11; | 433 Register inst_sizes = x11; |
| 435 __ Ldr(inst_sizes, FieldMemOperand(init_map, Map::kInstanceSizesOffset)); | 434 __ Ldr(inst_sizes, FieldMemOperand(init_map, Map::kInstanceSizesOffset)); |
| 436 __ Ubfx(prealloc_fields, inst_sizes, | 435 __ Ubfx(prealloc_fields, inst_sizes, |
| 437 Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, | 436 Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, |
| 438 kBitsPerByte); | 437 kBitsPerByte); |
| 439 __ Ubfx(inobject_props, inst_sizes, | 438 __ Ubfx(inobject_props, inst_sizes, |
| 440 Map::kInObjectPropertiesByte * kBitsPerByte, kBitsPerByte); | 439 Map::kInObjectPropertiesByte * kBitsPerByte, kBitsPerByte); |
| 441 | 440 |
| 441 // Calculate number of property fields in the object. |
| 442 Register prop_fields = x6; |
| 443 __ Sub(prop_fields, obj_size, JSObject::kHeaderSize / kPointerSize); |
| 444 |
| 442 if (count_constructions) { | 445 if (count_constructions) { |
| 446 // Fill the pre-allocated fields with undef. |
| 447 __ FillFields(first_prop, prealloc_fields, undef); |
| 448 |
| 443 // Register first_non_prealloc is the offset of the first field after | 449 // Register first_non_prealloc is the offset of the first field after |
| 444 // pre-allocated fields. | 450 // pre-allocated fields. |
| 445 Register first_non_prealloc = x12; | 451 Register first_non_prealloc = x12; |
| 446 __ Add(first_non_prealloc, first_prop, | 452 __ Add(first_non_prealloc, first_prop, |
| 447 Operand(prealloc_fields, LSL, kPointerSizeLog2)); | 453 Operand(prealloc_fields, LSL, kPointerSizeLog2)); |
| 448 | 454 |
| 455 first_prop = NoReg; |
| 456 |
| 449 if (FLAG_debug_code) { | 457 if (FLAG_debug_code) { |
| 458 Register obj_end = x5; |
| 459 __ Add(obj_end, new_obj, Operand(obj_size, LSL, kPointerSizeLog2)); |
| 450 __ Cmp(first_non_prealloc, obj_end); | 460 __ Cmp(first_non_prealloc, obj_end); |
| 451 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields); | 461 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields); |
| 452 } | 462 } |
| 453 __ InitializeFieldsWithFiller(first_prop, first_non_prealloc, undef); | 463 |
| 454 // To allow for truncation. | 464 // Fill the remaining fields with one pointer filler map. |
| 455 __ LoadRoot(x12, Heap::kOnePointerFillerMapRootIndex); | 465 Register one_pointer_filler = x5; |
| 456 __ InitializeFieldsWithFiller(first_prop, obj_end, x12); | 466 Register non_prealloc_fields = x6; |
| 467 __ LoadRoot(one_pointer_filler, Heap::kOnePointerFillerMapRootIndex); |
| 468 __ Sub(non_prealloc_fields, prop_fields, prealloc_fields); |
| 469 __ FillFields(first_non_prealloc, non_prealloc_fields, |
| 470 one_pointer_filler); |
| 471 prop_fields = NoReg; |
| 457 } else { | 472 } else { |
| 458 __ InitializeFieldsWithFiller(first_prop, obj_end, undef); | 473 // Fill all of the property fields with undef. |
| 474 __ FillFields(first_prop, prop_fields, undef); |
| 475 first_prop = NoReg; |
| 476 prop_fields = NoReg; |
| 459 } | 477 } |
| 460 | 478 |
| 461 // Add the object tag to make the JSObject real, so that we can continue | 479 // Add the object tag to make the JSObject real, so that we can continue |
| 462 // and jump into the continuation code at any time from now on. Any | 480 // and jump into the continuation code at any time from now on. Any |
| 463 // failures need to undo the allocation, so that the heap is in a | 481 // failures need to undo the allocation, so that the heap is in a |
| 464 // consistent state and verifiable. | 482 // consistent state and verifiable. |
| 465 __ Add(new_obj, new_obj, kHeapObjectTag); | 483 __ Add(new_obj, new_obj, kHeapObjectTag); |
| 466 | 484 |
| 467 // Check if a non-empty properties array is needed. Continue with | 485 // Check if a non-empty properties array is needed. Continue with |
| 468 // allocated object if not, or fall through to runtime call if it is. | 486 // allocated object if not, or fall through to runtime call if it is. |
| 469 Register element_count = x3; | 487 Register element_count = x3; |
| 470 __ Ldrb(x3, FieldMemOperand(init_map, Map::kUnusedPropertyFieldsOffset)); | 488 __ Ldrb(element_count, |
| 489 FieldMemOperand(init_map, Map::kUnusedPropertyFieldsOffset)); |
| 471 // The field instance sizes contains both pre-allocated property fields | 490 // The field instance sizes contains both pre-allocated property fields |
| 472 // and in-object properties. | 491 // and in-object properties. |
| 473 __ Add(x3, x3, prealloc_fields); | 492 __ Add(element_count, element_count, prealloc_fields); |
| 474 __ Subs(element_count, x3, inobject_props); | 493 __ Subs(element_count, element_count, inobject_props); |
| 475 | 494 |
| 476 // Done if no extra properties are to be allocated. | 495 // Done if no extra properties are to be allocated. |
| 477 __ B(eq, &allocated); | 496 __ B(eq, &allocated); |
| 478 __ Assert(pl, kPropertyAllocationCountFailed); | 497 __ Assert(pl, kPropertyAllocationCountFailed); |
| 479 | 498 |
| 480 // Scale the number of elements by pointer size and add the header for | 499 // Scale the number of elements by pointer size and add the header for |
| 481 // FixedArrays to the start of the next object calculation from above. | 500 // FixedArrays to the start of the next object calculation from above. |
| 482 Register new_array = x5; | 501 Register new_array = x5; |
| 483 Register array_size = x6; | 502 Register array_size = x6; |
| 484 __ Add(array_size, element_count, FixedArray::kHeaderSize / kPointerSize); | 503 __ Add(array_size, element_count, FixedArray::kHeaderSize / kPointerSize); |
| 485 __ Allocate(array_size, new_array, x11, x12, &undo_allocation, | 504 __ Allocate(array_size, new_array, x11, x12, &undo_allocation, |
| 486 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | | 505 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | |
| 487 SIZE_IN_WORDS)); | 506 SIZE_IN_WORDS)); |
| 488 | 507 |
| 489 Register array_map = x10; | 508 Register array_map = x10; |
| 490 __ LoadRoot(array_map, Heap::kFixedArrayMapRootIndex); | 509 __ LoadRoot(array_map, Heap::kFixedArrayMapRootIndex); |
| 491 __ Str(array_map, MemOperand(new_array, FixedArray::kMapOffset)); | 510 __ Str(array_map, MemOperand(new_array, FixedArray::kMapOffset)); |
| 492 __ SmiTag(x0, element_count); | 511 __ SmiTag(x0, element_count); |
| 493 __ Str(x0, MemOperand(new_array, FixedArray::kLengthOffset)); | 512 __ Str(x0, MemOperand(new_array, FixedArray::kLengthOffset)); |
| 494 | 513 |
| 495 // Initialize the fields to undefined. | 514 // Initialize the fields to undefined. |
| 496 Register elements = x10; | 515 Register elements = x10; |
| 497 Register elements_end = x11; | |
| 498 __ Add(elements, new_array, FixedArray::kHeaderSize); | 516 __ Add(elements, new_array, FixedArray::kHeaderSize); |
| 499 __ Add(elements_end, elements, | 517 __ FillFields(elements, element_count, undef); |
| 500 Operand(element_count, LSL, kPointerSizeLog2)); | |
| 501 __ InitializeFieldsWithFiller(elements, elements_end, undef); | |
| 502 | 518 |
| 503 // Store the initialized FixedArray into the properties field of the | 519 // Store the initialized FixedArray into the properties field of the |
| 504 // JSObject. | 520 // JSObject. |
| 505 __ Add(new_array, new_array, kHeapObjectTag); | 521 __ Add(new_array, new_array, kHeapObjectTag); |
| 506 __ Str(new_array, FieldMemOperand(new_obj, JSObject::kPropertiesOffset)); | 522 __ Str(new_array, FieldMemOperand(new_obj, JSObject::kPropertiesOffset)); |
| 507 | 523 |
| 508 // Continue with JSObject being successfully allocated. | 524 // Continue with JSObject being successfully allocated. |
| 509 __ B(&allocated); | 525 __ B(&allocated); |
| 510 | 526 |
| 511 // Undo the setting of the new top so that the heap is verifiable. For | 527 // Undo the setting of the new top so that the heap is verifiable. For |
| (...skipping 13 matching lines...) Expand all Loading... |
| 525 // x4: JSObject | 541 // x4: JSObject |
| 526 __ Bind(&allocated); | 542 __ Bind(&allocated); |
| 527 __ Push(x4, x4); | 543 __ Push(x4, x4); |
| 528 | 544 |
| 529 // Reload the number of arguments from the stack. | 545 // Reload the number of arguments from the stack. |
| 530 // Set it up in x0 for the function call below. | 546 // Set it up in x0 for the function call below. |
| 531 // jssp[0]: receiver | 547 // jssp[0]: receiver |
| 532 // jssp[1]: receiver | 548 // jssp[1]: receiver |
| 533 // jssp[2]: constructor function | 549 // jssp[2]: constructor function |
| 534 // jssp[3]: number of arguments (smi-tagged) | 550 // jssp[3]: number of arguments (smi-tagged) |
| 535 __ Peek(constructor, 2 * kXRegSizeInBytes); // Load constructor. | 551 __ Peek(constructor, 2 * kXRegSize); // Load constructor. |
| 536 __ Peek(argc, 3 * kXRegSizeInBytes); // Load number of arguments. | 552 __ Peek(argc, 3 * kXRegSize); // Load number of arguments. |
| 537 __ SmiUntag(argc); | 553 __ SmiUntag(argc); |
| 538 | 554 |
| 539 // Set up pointer to last argument. | 555 // Set up pointer to last argument. |
| 540 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset); | 556 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset); |
| 541 | 557 |
| 542 // Copy arguments and receiver to the expression stack. | 558 // Copy arguments and receiver to the expression stack. |
| 543 // Copy 2 values every loop to use ldp/stp. | 559 // Copy 2 values every loop to use ldp/stp. |
| 544 // x0: number of arguments | 560 // x0: number of arguments |
| 545 // x1: constructor function | 561 // x1: constructor function |
| 546 // x2: address of last argument (caller sp) | 562 // x2: address of last argument (caller sp) |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 610 __ Bind(&use_receiver); | 626 __ Bind(&use_receiver); |
| 611 __ Peek(x0, 0); | 627 __ Peek(x0, 0); |
| 612 | 628 |
| 613 // Remove the receiver from the stack, remove caller arguments, and | 629 // Remove the receiver from the stack, remove caller arguments, and |
| 614 // return. | 630 // return. |
| 615 __ Bind(&exit); | 631 __ Bind(&exit); |
| 616 // x0: result | 632 // x0: result |
| 617 // jssp[0]: receiver (newly allocated object) | 633 // jssp[0]: receiver (newly allocated object) |
| 618 // jssp[1]: constructor function | 634 // jssp[1]: constructor function |
| 619 // jssp[2]: number of arguments (smi-tagged) | 635 // jssp[2]: number of arguments (smi-tagged) |
| 620 __ Peek(x1, 2 * kXRegSizeInBytes); | 636 __ Peek(x1, 2 * kXRegSize); |
| 621 | 637 |
| 622 // Leave construct frame. | 638 // Leave construct frame. |
| 623 } | 639 } |
| 624 | 640 |
| 625 __ DropBySMI(x1); | 641 __ DropBySMI(x1); |
| 626 __ Drop(1); | 642 __ Drop(1); |
| 627 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2); | 643 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2); |
| 628 __ Ret(); | 644 __ Ret(); |
| 629 } | 645 } |
| 630 | 646 |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 702 __ Mov(x22, x19); | 718 __ Mov(x22, x19); |
| 703 __ Mov(x23, x19); | 719 __ Mov(x23, x19); |
| 704 __ Mov(x24, x19); | 720 __ Mov(x24, x19); |
| 705 __ Mov(x25, x19); | 721 __ Mov(x25, x19); |
| 706 // Don't initialize the reserved registers. | 722 // Don't initialize the reserved registers. |
| 707 // x26 : root register (root). | 723 // x26 : root register (root). |
| 708 // x27 : context pointer (cp). | 724 // x27 : context pointer (cp). |
| 709 // x28 : JS stack pointer (jssp). | 725 // x28 : JS stack pointer (jssp). |
| 710 // x29 : frame pointer (fp). | 726 // x29 : frame pointer (fp). |
| 711 | 727 |
| 712 // TODO(alexandre): Revisit the MAsm function invocation mechanisms. | |
| 713 // Currently there is a mix of statically and dynamically allocated | |
| 714 // registers. | |
| 715 __ Mov(x0, argc); | 728 __ Mov(x0, argc); |
| 716 if (is_construct) { | 729 if (is_construct) { |
| 717 // No type feedback cell is available. | 730 // No type feedback cell is available. |
| 718 Handle<Object> undefined_sentinel( | 731 Handle<Object> megamorphic_sentinel = |
| 719 masm->isolate()->heap()->undefined_value(), masm->isolate()); | 732 TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()); |
| 720 __ Mov(x2, Operand(undefined_sentinel)); | 733 __ Mov(x2, Operand(megamorphic_sentinel)); |
| 721 | 734 |
| 722 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); | 735 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); |
| 723 __ CallStub(&stub); | 736 __ CallStub(&stub); |
| 724 } else { | 737 } else { |
| 725 ParameterCount actual(x0); | 738 ParameterCount actual(x0); |
| 726 __ InvokeFunction(function, actual, CALL_FUNCTION, NullCallWrapper()); | 739 __ InvokeFunction(function, actual, CALL_FUNCTION, NullCallWrapper()); |
| 727 } | 740 } |
| 728 // Exit the JS internal frame and remove the parameters (except function), | 741 // Exit the JS internal frame and remove the parameters (except function), |
| 729 // and return. | 742 // and return. |
| 730 } | 743 } |
| (...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1013 __ Cbnz(argc, &done); | 1026 __ Cbnz(argc, &done); |
| 1014 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex); | 1027 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex); |
| 1015 __ Push(scratch1); | 1028 __ Push(scratch1); |
| 1016 __ Mov(argc, 1); | 1029 __ Mov(argc, 1); |
| 1017 __ Bind(&done); | 1030 __ Bind(&done); |
| 1018 } | 1031 } |
| 1019 | 1032 |
| 1020 // 2. Get the function to call (passed as receiver) from the stack, check | 1033 // 2. Get the function to call (passed as receiver) from the stack, check |
| 1021 // if it is a function. | 1034 // if it is a function. |
| 1022 Label slow, non_function; | 1035 Label slow, non_function; |
| 1023 __ Peek(function, Operand(argc, LSL, kXRegSizeInBytesLog2)); | 1036 __ Peek(function, Operand(argc, LSL, kXRegSizeLog2)); |
| 1024 __ JumpIfSmi(function, &non_function); | 1037 __ JumpIfSmi(function, &non_function); |
| 1025 __ JumpIfNotObjectType(function, scratch1, receiver_type, | 1038 __ JumpIfNotObjectType(function, scratch1, receiver_type, |
| 1026 JS_FUNCTION_TYPE, &slow); | 1039 JS_FUNCTION_TYPE, &slow); |
| 1027 | 1040 |
| 1028 // 3a. Patch the first argument if necessary when calling a function. | 1041 // 3a. Patch the first argument if necessary when calling a function. |
| 1029 Label shift_arguments; | 1042 Label shift_arguments; |
| 1030 __ Mov(call_type, static_cast<int>(call_type_JS_func)); | 1043 __ Mov(call_type, static_cast<int>(call_type_JS_func)); |
| 1031 { Label convert_to_object, use_global_receiver, patch_receiver; | 1044 { Label convert_to_object, use_global_receiver, patch_receiver; |
| 1032 // Change context eagerly in case we need the global receiver. | 1045 // Change context eagerly in case we need the global receiver. |
| 1033 __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); | 1046 __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); |
| 1034 | 1047 |
| 1035 // Do not transform the receiver for strict mode functions. | 1048 // Do not transform the receiver for strict mode functions. |
| 1036 // Also do not transform the receiver for native (Compilerhints already in | 1049 // Also do not transform the receiver for native (Compilerhints already in |
| 1037 // x3). | 1050 // x3). |
| 1038 __ Ldr(scratch1, | 1051 __ Ldr(scratch1, |
| 1039 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); | 1052 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
| 1040 __ Ldr(scratch2.W(), | 1053 __ Ldr(scratch2.W(), |
| 1041 FieldMemOperand(scratch1, SharedFunctionInfo::kCompilerHintsOffset)); | 1054 FieldMemOperand(scratch1, SharedFunctionInfo::kCompilerHintsOffset)); |
| 1042 __ TestAndBranchIfAnySet( | 1055 __ TestAndBranchIfAnySet( |
| 1043 scratch2.W(), | 1056 scratch2.W(), |
| 1044 (1 << SharedFunctionInfo::kStrictModeFunction) | | 1057 (1 << SharedFunctionInfo::kStrictModeFunction) | |
| 1045 (1 << SharedFunctionInfo::kNative), | 1058 (1 << SharedFunctionInfo::kNative), |
| 1046 &shift_arguments); | 1059 &shift_arguments); |
| 1047 | 1060 |
| 1048 // Compute the receiver in non-strict mode. | 1061 // Compute the receiver in sloppy mode. |
| 1049 Register receiver = x2; | 1062 Register receiver = x2; |
| 1050 __ Sub(scratch1, argc, 1); | 1063 __ Sub(scratch1, argc, 1); |
| 1051 __ Peek(receiver, Operand(scratch1, LSL, kXRegSizeInBytesLog2)); | 1064 __ Peek(receiver, Operand(scratch1, LSL, kXRegSizeLog2)); |
| 1052 __ JumpIfSmi(receiver, &convert_to_object); | 1065 __ JumpIfSmi(receiver, &convert_to_object); |
| 1053 | 1066 |
| 1054 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex, | 1067 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex, |
| 1055 &use_global_receiver); | 1068 &use_global_receiver); |
| 1056 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_receiver); | 1069 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_receiver); |
| 1057 | 1070 |
| 1058 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 1071 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
| 1059 __ JumpIfObjectType(receiver, scratch1, scratch2, | 1072 __ JumpIfObjectType(receiver, scratch1, scratch2, |
| 1060 FIRST_SPEC_OBJECT_TYPE, &shift_arguments, ge); | 1073 FIRST_SPEC_OBJECT_TYPE, &shift_arguments, ge); |
| 1061 | 1074 |
| 1062 __ Bind(&convert_to_object); | 1075 __ Bind(&convert_to_object); |
| 1063 | 1076 |
| 1064 { | 1077 { |
| 1065 // Enter an internal frame in order to preserve argument count. | 1078 // Enter an internal frame in order to preserve argument count. |
| 1066 FrameScope scope(masm, StackFrame::INTERNAL); | 1079 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1067 __ SmiTag(argc); | 1080 __ SmiTag(argc); |
| 1068 | 1081 |
| 1069 __ Push(argc, receiver); | 1082 __ Push(argc, receiver); |
| 1070 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 1083 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 1071 __ Mov(receiver, x0); | 1084 __ Mov(receiver, x0); |
| 1072 | 1085 |
| 1073 __ Pop(argc); | 1086 __ Pop(argc); |
| 1074 __ SmiUntag(argc); | 1087 __ SmiUntag(argc); |
| 1075 | 1088 |
| 1076 // Exit the internal frame. | 1089 // Exit the internal frame. |
| 1077 } | 1090 } |
| 1078 | 1091 |
| 1079 // Restore the function and flag in the registers. | 1092 // Restore the function and flag in the registers. |
| 1080 __ Peek(function, Operand(argc, LSL, kXRegSizeInBytesLog2)); | 1093 __ Peek(function, Operand(argc, LSL, kXRegSizeLog2)); |
| 1081 __ Mov(call_type, static_cast<int>(call_type_JS_func)); | 1094 __ Mov(call_type, static_cast<int>(call_type_JS_func)); |
| 1082 __ B(&patch_receiver); | 1095 __ B(&patch_receiver); |
| 1083 | 1096 |
| 1084 __ Bind(&use_global_receiver); | 1097 __ Bind(&use_global_receiver); |
| 1085 __ Ldr(receiver, GlobalObjectMemOperand()); | 1098 __ Ldr(receiver, GlobalObjectMemOperand()); |
| 1086 __ Ldr(receiver, | 1099 __ Ldr(receiver, |
| 1087 FieldMemOperand(receiver, GlobalObject::kGlobalReceiverOffset)); | 1100 FieldMemOperand(receiver, GlobalObject::kGlobalReceiverOffset)); |
| 1088 | 1101 |
| 1089 | 1102 |
| 1090 __ Bind(&patch_receiver); | 1103 __ Bind(&patch_receiver); |
| 1091 __ Sub(scratch1, argc, 1); | 1104 __ Sub(scratch1, argc, 1); |
| 1092 __ Poke(receiver, Operand(scratch1, LSL, kXRegSizeInBytesLog2)); | 1105 __ Poke(receiver, Operand(scratch1, LSL, kXRegSizeLog2)); |
| 1093 | 1106 |
| 1094 __ B(&shift_arguments); | 1107 __ B(&shift_arguments); |
| 1095 } | 1108 } |
| 1096 | 1109 |
| 1097 // 3b. Check for function proxy. | 1110 // 3b. Check for function proxy. |
| 1098 __ Bind(&slow); | 1111 __ Bind(&slow); |
| 1099 __ Mov(call_type, static_cast<int>(call_type_func_proxy)); | 1112 __ Mov(call_type, static_cast<int>(call_type_func_proxy)); |
| 1100 __ Cmp(receiver_type, JS_FUNCTION_PROXY_TYPE); | 1113 __ Cmp(receiver_type, JS_FUNCTION_PROXY_TYPE); |
| 1101 __ B(eq, &shift_arguments); | 1114 __ B(eq, &shift_arguments); |
| 1102 __ Bind(&non_function); | 1115 __ Bind(&non_function); |
| 1103 __ Mov(call_type, static_cast<int>(call_type_non_func)); | 1116 __ Mov(call_type, static_cast<int>(call_type_non_func)); |
| 1104 | 1117 |
| 1105 // 3c. Patch the first argument when calling a non-function. The | 1118 // 3c. Patch the first argument when calling a non-function. The |
| 1106 // CALL_NON_FUNCTION builtin expects the non-function callee as | 1119 // CALL_NON_FUNCTION builtin expects the non-function callee as |
| 1107 // receiver, so overwrite the first argument which will ultimately | 1120 // receiver, so overwrite the first argument which will ultimately |
| 1108 // become the receiver. | 1121 // become the receiver. |
| 1109 // call type (0: JS function, 1: function proxy, 2: non-function) | 1122 // call type (0: JS function, 1: function proxy, 2: non-function) |
| 1110 __ Sub(scratch1, argc, 1); | 1123 __ Sub(scratch1, argc, 1); |
| 1111 __ Poke(function, Operand(scratch1, LSL, kXRegSizeInBytesLog2)); | 1124 __ Poke(function, Operand(scratch1, LSL, kXRegSizeLog2)); |
| 1112 | 1125 |
| 1113 // 4. Shift arguments and return address one slot down on the stack | 1126 // 4. Shift arguments and return address one slot down on the stack |
| 1114 // (overwriting the original receiver). Adjust argument count to make | 1127 // (overwriting the original receiver). Adjust argument count to make |
| 1115 // the original first argument the new receiver. | 1128 // the original first argument the new receiver. |
| 1116 // call type (0: JS function, 1: function proxy, 2: non-function) | 1129 // call type (0: JS function, 1: function proxy, 2: non-function) |
| 1117 __ Bind(&shift_arguments); | 1130 __ Bind(&shift_arguments); |
| 1118 { Label loop; | 1131 { Label loop; |
| 1119 // Calculate the copy start address (destination). Copy end address is jssp. | 1132 // Calculate the copy start address (destination). Copy end address is jssp. |
| 1120 __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2)); | 1133 __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2)); |
| 1121 __ Sub(scratch1, scratch2, kPointerSize); | 1134 __ Sub(scratch1, scratch2, kPointerSize); |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1240 JSFunction::kSharedFunctionInfoOffset)); | 1253 JSFunction::kSharedFunctionInfoOffset)); |
| 1241 | 1254 |
| 1242 // Compute and push the receiver. | 1255 // Compute and push the receiver. |
| 1243 // Do not transform the receiver for strict mode functions. | 1256 // Do not transform the receiver for strict mode functions. |
| 1244 Label convert_receiver_to_object, use_global_receiver; | 1257 Label convert_receiver_to_object, use_global_receiver; |
| 1245 __ Ldr(w10, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset)); | 1258 __ Ldr(w10, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset)); |
| 1246 __ Tbnz(x10, SharedFunctionInfo::kStrictModeFunction, &push_receiver); | 1259 __ Tbnz(x10, SharedFunctionInfo::kStrictModeFunction, &push_receiver); |
| 1247 // Do not transform the receiver for native functions. | 1260 // Do not transform the receiver for native functions. |
| 1248 __ Tbnz(x10, SharedFunctionInfo::kNative, &push_receiver); | 1261 __ Tbnz(x10, SharedFunctionInfo::kNative, &push_receiver); |
| 1249 | 1262 |
| 1250 // Compute the receiver in non-strict mode. | 1263 // Compute the receiver in sloppy mode. |
| 1251 __ JumpIfSmi(receiver, &convert_receiver_to_object); | 1264 __ JumpIfSmi(receiver, &convert_receiver_to_object); |
| 1252 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_receiver); | 1265 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_receiver); |
| 1253 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex, | 1266 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex, |
| 1254 &use_global_receiver); | 1267 &use_global_receiver); |
| 1255 | 1268 |
| 1256 // Check if the receiver is already a JavaScript object. | 1269 // Check if the receiver is already a JavaScript object. |
| 1257 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 1270 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
| 1258 __ JumpIfObjectType(receiver, x10, x11, FIRST_SPEC_OBJECT_TYPE, | 1271 __ JumpIfObjectType(receiver, x10, x11, FIRST_SPEC_OBJECT_TYPE, |
| 1259 &push_receiver, ge); | 1272 &push_receiver, ge); |
| 1260 | 1273 |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1347 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | 1360 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { |
| 1348 // ----------- S t a t e ------------- | 1361 // ----------- S t a t e ------------- |
| 1349 // -- x0 : result being passed through | 1362 // -- x0 : result being passed through |
| 1350 // ----------------------------------- | 1363 // ----------------------------------- |
| 1351 // Get the number of arguments passed (as a smi), tear down the frame and | 1364 // Get the number of arguments passed (as a smi), tear down the frame and |
| 1352 // then drop the parameters and the receiver. | 1365 // then drop the parameters and the receiver. |
| 1353 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + | 1366 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + |
| 1354 kPointerSize))); | 1367 kPointerSize))); |
| 1355 __ Mov(jssp, fp); | 1368 __ Mov(jssp, fp); |
| 1356 __ Pop(fp, lr); | 1369 __ Pop(fp, lr); |
| 1357 __ DropBySMI(x10, kXRegSizeInBytes); | 1370 __ DropBySMI(x10, kXRegSize); |
| 1358 __ Drop(1); | 1371 __ Drop(1); |
| 1359 } | 1372 } |
| 1360 | 1373 |
| 1361 | 1374 |
| 1362 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | 1375 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
| 1363 ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline"); | 1376 ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline"); |
| 1364 // ----------- S t a t e ------------- | 1377 // ----------- S t a t e ------------- |
| 1365 // -- x0 : actual number of arguments | 1378 // -- x0 : actual number of arguments |
| 1366 // -- x1 : function (passed through to callee) | 1379 // -- x1 : function (passed through to callee) |
| 1367 // -- x2 : expected number of arguments | 1380 // -- x2 : expected number of arguments |
| 1368 // ----------------------------------- | 1381 // ----------------------------------- |
| 1369 | 1382 |
| 1383 Register argc_actual = x0; // Excluding the receiver. |
| 1384 Register argc_expected = x2; // Excluding the receiver. |
| 1385 Register function = x1; |
| 1386 Register code_entry = x3; |
| 1387 |
| 1370 Label invoke, dont_adapt_arguments; | 1388 Label invoke, dont_adapt_arguments; |
| 1371 | 1389 |
| 1372 Label enough, too_few; | 1390 Label enough, too_few; |
| 1373 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kCodeEntryOffset)); | 1391 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset)); |
| 1374 __ Cmp(x0, x2); | 1392 __ Cmp(argc_actual, argc_expected); |
| 1375 __ B(lt, &too_few); | 1393 __ B(lt, &too_few); |
| 1376 __ Cmp(x2, SharedFunctionInfo::kDontAdaptArgumentsSentinel); | 1394 __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel); |
| 1377 __ B(eq, &dont_adapt_arguments); | 1395 __ B(eq, &dont_adapt_arguments); |
| 1378 | 1396 |
| 1379 { // Enough parameters: actual >= expected | 1397 { // Enough parameters: actual >= expected |
| 1380 EnterArgumentsAdaptorFrame(masm); | 1398 EnterArgumentsAdaptorFrame(masm); |
| 1381 | 1399 |
| 1382 // Calculate copy start address into x10 and end address into x11. | 1400 Register copy_start = x10; |
| 1383 // x0: actual number of arguments | 1401 Register copy_end = x11; |
| 1384 // x1: function | 1402 Register copy_to = x12; |
| 1385 // x2: expected number of arguments | 1403 Register scratch1 = x13, scratch2 = x14; |
| 1386 // x3: code entry to call | 1404 |
| 1387 __ Add(x10, fp, Operand(x0, LSL, kPointerSizeLog2)); | 1405 __ Lsl(argc_expected, argc_expected, kPointerSizeLog2); |
| 1388 // Adjust for return address and receiver | 1406 |
| 1389 __ Add(x10, x10, 2 * kPointerSize); | 1407 // Adjust for fp, lr, and the receiver. |
| 1390 __ Sub(x11, x10, Operand(x2, LSL, kPointerSizeLog2)); | 1408 __ Add(copy_start, fp, 3 * kPointerSize); |
| 1409 __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2)); |
| 1410 __ Sub(copy_end, copy_start, argc_expected); |
| 1411 __ Sub(copy_end, copy_end, kPointerSize); |
| 1412 __ Mov(copy_to, jssp); |
| 1413 |
| 1414 // Claim space for the arguments, the receiver, and one extra slot. |
| 1415 // The extra slot ensures we do not write under jssp. It will be popped |
| 1416 // later. |
| 1417 __ Add(scratch1, argc_expected, 2 * kPointerSize); |
| 1418 __ Claim(scratch1, 1); |
| 1391 | 1419 |
| 1392 // Copy the arguments (including the receiver) to the new stack frame. | 1420 // Copy the arguments (including the receiver) to the new stack frame. |
| 1393 // x0: actual number of arguments | 1421 Label copy_2_by_2; |
| 1394 // x1: function | 1422 __ Bind(©_2_by_2); |
| 1395 // x2: expected number of arguments | 1423 __ Ldp(scratch1, scratch2, |
| 1396 // x3: code entry to call | 1424 MemOperand(copy_start, - 2 * kPointerSize, PreIndex)); |
| 1397 // x10: copy start address | 1425 __ Stp(scratch1, scratch2, |
| 1398 // x11: copy end address | 1426 MemOperand(copy_to, - 2 * kPointerSize, PreIndex)); |
| 1427 __ Cmp(copy_start, copy_end); |
| 1428 __ B(hi, ©_2_by_2); |
| 1399 | 1429 |
| 1400 // TODO(all): Should we push values 2 by 2? | 1430 // Correct the space allocated for the extra slot. |
| 1401 Label copy; | 1431 __ Drop(1); |
| 1402 __ Bind(©); | |
| 1403 __ Cmp(x10, x11); | |
| 1404 __ Ldr(x12, MemOperand(x10, -kPointerSize, PostIndex)); | |
| 1405 __ Push(x12); | |
| 1406 __ B(gt, ©); | |
| 1407 | 1432 |
| 1408 __ B(&invoke); | 1433 __ B(&invoke); |
| 1409 } | 1434 } |
| 1410 | 1435 |
| 1411 { // Too few parameters: Actual < expected | 1436 { // Too few parameters: Actual < expected |
| 1412 __ Bind(&too_few); | 1437 __ Bind(&too_few); |
| 1413 EnterArgumentsAdaptorFrame(masm); | 1438 EnterArgumentsAdaptorFrame(masm); |
| 1414 | 1439 |
| 1415 // Calculate copy start address into x10 and copy end address into x11. | 1440 Register copy_from = x10; |
| 1416 // x0: actual number of arguments | 1441 Register copy_end = x11; |
| 1417 // x1: function | 1442 Register copy_to = x12; |
| 1418 // x2: expected number of arguments | 1443 Register scratch1 = x13, scratch2 = x14; |
| 1419 // x3: code entry to call | 1444 |
| 1420 // Adjust for return address. | 1445 __ Lsl(argc_expected, argc_expected, kPointerSizeLog2); |
| 1421 __ Add(x11, fp, 1 * kPointerSize); | 1446 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2); |
| 1422 __ Add(x10, x11, Operand(x0, LSL, kPointerSizeLog2)); | 1447 |
| 1423 __ Add(x10, x10, 1 * kPointerSize); | 1448 // Adjust for fp, lr, and the receiver. |
| 1449 __ Add(copy_from, fp, 3 * kPointerSize); |
| 1450 __ Add(copy_from, copy_from, argc_actual); |
| 1451 __ Mov(copy_to, jssp); |
| 1452 __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver. |
| 1453 __ Sub(copy_end, copy_end, argc_actual); |
| 1454 |
| 1455 // Claim space for the arguments, the receiver, and one extra slot. |
| 1456 // The extra slot ensures we do not write under jssp. It will be popped |
| 1457 // later. |
| 1458 __ Add(scratch1, argc_expected, 2 * kPointerSize); |
| 1459 __ Claim(scratch1, 1); |
| 1424 | 1460 |
| 1425 // Copy the arguments (including the receiver) to the new stack frame. | 1461 // Copy the arguments (including the receiver) to the new stack frame. |
| 1426 // x0: actual number of arguments | 1462 Label copy_2_by_2; |
| 1427 // x1: function | 1463 __ Bind(©_2_by_2); |
| 1428 // x2: expected number of arguments | 1464 __ Ldp(scratch1, scratch2, |
| 1429 // x3: code entry to call | 1465 MemOperand(copy_from, - 2 * kPointerSize, PreIndex)); |
| 1430 // x10: copy start address | 1466 __ Stp(scratch1, scratch2, |
| 1431 // x11: copy end address | 1467 MemOperand(copy_to, - 2 * kPointerSize, PreIndex)); |
| 1432 Label copy; | 1468 __ Cmp(copy_to, copy_end); |
| 1433 __ Bind(©); | 1469 __ B(hi, ©_2_by_2); |
| 1434 __ Ldr(x12, MemOperand(x10, -kPointerSize, PostIndex)); | 1470 |
| 1435 __ Push(x12); | 1471 __ Mov(copy_to, copy_end); |
| 1436 __ Cmp(x10, x11); // Compare before moving to next argument. | |
| 1437 __ B(ne, ©); | |
| 1438 | 1472 |
| 1439 // Fill the remaining expected arguments with undefined. | 1473 // Fill the remaining expected arguments with undefined. |
| 1440 // x0: actual number of arguments | 1474 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex); |
| 1441 // x1: function | 1475 __ Add(copy_end, jssp, kPointerSize); |
| 1442 // x2: expected number of arguments | |
| 1443 // x3: code entry to call | |
| 1444 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); | |
| 1445 __ Sub(x11, fp, Operand(x2, LSL, kPointerSizeLog2)); | |
| 1446 // Adjust for the arguments adaptor frame and already pushed receiver. | |
| 1447 __ Sub(x11, x11, | |
| 1448 StandardFrameConstants::kFixedFrameSizeFromFp + (2 * kPointerSize)); | |
| 1449 | 1476 |
| 1450 // TODO(all): Optimize this to use ldp? | |
| 1451 Label fill; | 1477 Label fill; |
| 1452 __ Bind(&fill); | 1478 __ Bind(&fill); |
| 1453 __ Push(x10); | 1479 __ Stp(scratch1, scratch1, |
| 1454 __ Cmp(jssp, x11); | 1480 MemOperand(copy_to, - 2 * kPointerSize, PreIndex)); |
| 1455 __ B(ne, &fill); | 1481 __ Cmp(copy_to, copy_end); |
| 1482 __ B(hi, &fill); |
| 1483 |
| 1484 // Correct the space allocated for the extra slot. |
| 1485 __ Drop(1); |
| 1456 } | 1486 } |
| 1457 | 1487 |
| 1458 // Arguments have been adapted. Now call the entry point. | 1488 // Arguments have been adapted. Now call the entry point. |
| 1459 __ Bind(&invoke); | 1489 __ Bind(&invoke); |
| 1460 __ Call(x3); | 1490 __ Call(code_entry); |
| 1461 | 1491 |
| 1462 // Store offset of return address for deoptimizer. | 1492 // Store offset of return address for deoptimizer. |
| 1463 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); | 1493 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); |
| 1464 | 1494 |
| 1465 // Exit frame and return. | 1495 // Exit frame and return. |
| 1466 LeaveArgumentsAdaptorFrame(masm); | 1496 LeaveArgumentsAdaptorFrame(masm); |
| 1467 __ Ret(); | 1497 __ Ret(); |
| 1468 | 1498 |
| 1469 // Call the entry point without adapting the arguments. | 1499 // Call the entry point without adapting the arguments. |
| 1470 __ Bind(&dont_adapt_arguments); | 1500 __ Bind(&dont_adapt_arguments); |
| 1471 __ Jump(x3); | 1501 __ Jump(code_entry); |
| 1472 } | 1502 } |
| 1473 | 1503 |
| 1474 | 1504 |
| 1475 #undef __ | 1505 #undef __ |
| 1476 | 1506 |
| 1477 } } // namespace v8::internal | 1507 } } // namespace v8::internal |
| 1478 | 1508 |
| 1479 #endif // V8_TARGET_ARCH_ARM | 1509 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |