| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1806 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1817 Isolate* isolate = masm->isolate(); | 1817 Isolate* isolate = masm->isolate(); |
| 1818 | 1818 |
| 1819 // Build an entry frame. | 1819 // Build an entry frame. |
| 1820 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; | 1820 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; |
| 1821 int64_t bad_frame_pointer = -1L; // Bad frame pointer to fail if it is used. | 1821 int64_t bad_frame_pointer = -1L; // Bad frame pointer to fail if it is used. |
| 1822 __ Mov(x13, bad_frame_pointer); | 1822 __ Mov(x13, bad_frame_pointer); |
| 1823 __ Mov(x12, Operand(Smi::FromInt(marker))); | 1823 __ Mov(x12, Operand(Smi::FromInt(marker))); |
| 1824 __ Mov(x11, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate))); | 1824 __ Mov(x11, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate))); |
| 1825 __ Ldr(x10, MemOperand(x11)); | 1825 __ Ldr(x10, MemOperand(x11)); |
| 1826 | 1826 |
| 1827 // TODO(all): Pushing the marker twice seems unnecessary. | 1827 __ Push(x13, xzr, x12, x10); |
| 1828 // In this case perhaps we could push xzr in the slot for the context | |
| 1829 // (see MAsm::EnterFrame). | |
| 1830 __ Push(x13, x12, x12, x10); | |
| 1831 // Set up fp. | 1828 // Set up fp. |
| 1832 __ Sub(fp, jssp, EntryFrameConstants::kCallerFPOffset); | 1829 __ Sub(fp, jssp, EntryFrameConstants::kCallerFPOffset); |
| 1833 | 1830 |
| 1834 // Push the JS entry frame marker. Also set js_entry_sp if this is the | 1831 // Push the JS entry frame marker. Also set js_entry_sp if this is the |
| 1835 // outermost JS call. | 1832 // outermost JS call. |
| 1836 Label non_outermost_js, done; | 1833 Label non_outermost_js, done; |
| 1837 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate); | 1834 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate); |
| 1838 __ Mov(x10, Operand(ExternalReference(js_entry_sp))); | 1835 __ Mov(x10, Operand(ExternalReference(js_entry_sp))); |
| 1839 __ Ldr(x11, MemOperand(x10)); | 1836 __ Ldr(x11, MemOperand(x10)); |
| 1840 __ Cbnz(x11, &non_outermost_js); | 1837 __ Cbnz(x11, &non_outermost_js); |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1866 // checking for constant pool emission, but we do not want to depend on | 1863 // checking for constant pool emission, but we do not want to depend on |
| 1867 // that. | 1864 // that. |
| 1868 { | 1865 { |
| 1869 Assembler::BlockPoolsScope block_pools(masm); | 1866 Assembler::BlockPoolsScope block_pools(masm); |
| 1870 __ bind(&handler_entry); | 1867 __ bind(&handler_entry); |
| 1871 handler_offset_ = handler_entry.pos(); | 1868 handler_offset_ = handler_entry.pos(); |
| 1872 // Caught exception: Store result (exception) in the pending exception | 1869 // Caught exception: Store result (exception) in the pending exception |
| 1873 // field in the JSEnv and return a failure sentinel. Coming in here the | 1870 // field in the JSEnv and return a failure sentinel. Coming in here the |
| 1874 // fp will be invalid because the PushTryHandler below sets it to 0 to | 1871 // fp will be invalid because the PushTryHandler below sets it to 0 to |
| 1875 // signal the existence of the JSEntry frame. | 1872 // signal the existence of the JSEntry frame. |
| 1876 // TODO(jbramley): Do this in the Assembler. | |
| 1877 __ Mov(x10, Operand(ExternalReference(Isolate::kPendingExceptionAddress, | 1873 __ Mov(x10, Operand(ExternalReference(Isolate::kPendingExceptionAddress, |
| 1878 isolate))); | 1874 isolate))); |
| 1879 } | 1875 } |
| 1880 __ Str(code_entry, MemOperand(x10)); | 1876 __ Str(code_entry, MemOperand(x10)); |
| 1881 __ Mov(x0, Operand(reinterpret_cast<int64_t>(Failure::Exception()))); | 1877 __ Mov(x0, Operand(reinterpret_cast<int64_t>(Failure::Exception()))); |
| 1882 __ B(&exit); | 1878 __ B(&exit); |
| 1883 | 1879 |
| 1884 // Invoke: Link this frame into the handler chain. There's only one | 1880 // Invoke: Link this frame into the handler chain. There's only one |
| 1885 // handler block in this code object, so its index is 0. | 1881 // handler block in this code object, so its index is 0. |
| 1886 __ Bind(&invoke); | 1882 __ Bind(&invoke); |
| (...skipping 2247 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4134 // Sliced string. Fetch parent and correct start index by offset. | 4130 // Sliced string. Fetch parent and correct start index by offset. |
| 4135 __ Ldrsw(temp, | 4131 __ Ldrsw(temp, |
| 4136 UntagSmiFieldMemOperand(input_string, SlicedString::kOffsetOffset)); | 4132 UntagSmiFieldMemOperand(input_string, SlicedString::kOffsetOffset)); |
| 4137 __ Add(from, from, temp); | 4133 __ Add(from, from, temp); |
| 4138 __ Ldr(unpacked_string, | 4134 __ Ldr(unpacked_string, |
| 4139 FieldMemOperand(input_string, SlicedString::kParentOffset)); | 4135 FieldMemOperand(input_string, SlicedString::kParentOffset)); |
| 4140 | 4136 |
| 4141 __ Bind(&update_instance_type); | 4137 __ Bind(&update_instance_type); |
| 4142 __ Ldr(temp, FieldMemOperand(unpacked_string, HeapObject::kMapOffset)); | 4138 __ Ldr(temp, FieldMemOperand(unpacked_string, HeapObject::kMapOffset)); |
| 4143 __ Ldrb(input_type, FieldMemOperand(temp, Map::kInstanceTypeOffset)); | 4139 __ Ldrb(input_type, FieldMemOperand(temp, Map::kInstanceTypeOffset)); |
| 4144 // TODO(all): This generates "b #+0x4". Can these be optimised out? | 4140 // Now control must go to &underlying_unpacked. Since the no code is generated |
| 4145 __ B(&underlying_unpacked); | 4141 // before then we fall through instead of generating a useless branch. |
| 4146 | 4142 |
| 4147 __ Bind(&seq_or_external_string); | 4143 __ Bind(&seq_or_external_string); |
| 4148 // Sequential or external string. Registers unpacked_string and input_string | 4144 // Sequential or external string. Registers unpacked_string and input_string |
| 4149 // alias, so there's nothing to do here. | 4145 // alias, so there's nothing to do here. |
| 4146 // Note that if code is added here, the above code must be updated. |
| 4150 | 4147 |
| 4151 // x0 result_string pointer to result string object (uninit) | 4148 // x0 result_string pointer to result string object (uninit) |
| 4152 // x1 result_length length of substring result | 4149 // x1 result_length length of substring result |
| 4153 // x10 unpacked_string pointer to unpacked string object | 4150 // x10 unpacked_string pointer to unpacked string object |
| 4154 // x11 input_length length of input string | 4151 // x11 input_length length of input string |
| 4155 // x12 input_type instance type of input string | 4152 // x12 input_type instance type of input string |
| 4156 // x15 from substring start character offset | 4153 // x15 from substring start character offset |
| 4157 __ Bind(&underlying_unpacked); | 4154 __ Bind(&underlying_unpacked); |
| 4158 | 4155 |
| 4159 if (FLAG_string_slices) { | 4156 if (FLAG_string_slices) { |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4266 __ bind(&single_char); | 4263 __ bind(&single_char); |
| 4267 // x1: result_length | 4264 // x1: result_length |
| 4268 // x10: input_string | 4265 // x10: input_string |
| 4269 // x12: input_type | 4266 // x12: input_type |
| 4270 // x15: from (untagged) | 4267 // x15: from (untagged) |
| 4271 __ SmiTag(from); | 4268 __ SmiTag(from); |
| 4272 StringCharAtGenerator generator( | 4269 StringCharAtGenerator generator( |
| 4273 input_string, from, result_length, x0, | 4270 input_string, from, result_length, x0, |
| 4274 &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER); | 4271 &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER); |
| 4275 generator.GenerateFast(masm); | 4272 generator.GenerateFast(masm); |
| 4276 // TODO(jbramley): Why doesn't this jump to return_x0? | |
| 4277 __ Drop(3); | 4273 __ Drop(3); |
| 4278 __ Ret(); | 4274 __ Ret(); |
| 4279 generator.SkipSlow(masm, &runtime); | 4275 generator.SkipSlow(masm, &runtime); |
| 4280 } | 4276 } |
| 4281 | 4277 |
| 4282 | 4278 |
| 4283 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm, | 4279 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm, |
| 4284 Register left, | 4280 Register left, |
| 4285 Register right, | 4281 Register right, |
| 4286 Register scratch1, | 4282 Register scratch1, |
| (...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4503 __ Peek(value, (argc - 1) * kPointerSize); | 4499 __ Peek(value, (argc - 1) * kPointerSize); |
| 4504 __ JumpIfNotSmi(value, &with_write_barrier); | 4500 __ JumpIfNotSmi(value, &with_write_barrier); |
| 4505 | 4501 |
| 4506 // Store the value. | 4502 // Store the value. |
| 4507 // We may need a register containing the address end_elements below, | 4503 // We may need a register containing the address end_elements below, |
| 4508 // so write back the value in end_elements. | 4504 // so write back the value in end_elements. |
| 4509 __ Add(end_elements, elements, | 4505 __ Add(end_elements, elements, |
| 4510 Operand::UntagSmiAndScale(length, kPointerSizeLog2)); | 4506 Operand::UntagSmiAndScale(length, kPointerSizeLog2)); |
| 4511 __ Str(value, MemOperand(end_elements, kEndElementsOffset, PreIndex)); | 4507 __ Str(value, MemOperand(end_elements, kEndElementsOffset, PreIndex)); |
| 4512 } else { | 4508 } else { |
| 4513 // TODO(all): ARM has a redundant cmp here. | |
| 4514 __ B(gt, &call_builtin); | 4509 __ B(gt, &call_builtin); |
| 4515 | 4510 |
| 4516 __ Peek(value, (argc - 1) * kPointerSize); | 4511 __ Peek(value, (argc - 1) * kPointerSize); |
| 4517 __ StoreNumberToDoubleElements(value, length, elements, x10, d0, d1, | 4512 __ StoreNumberToDoubleElements(value, length, elements, x10, d0, d1, |
| 4518 &call_builtin, argc * kDoubleSize); | 4513 &call_builtin, argc * kDoubleSize); |
| 4519 } | 4514 } |
| 4520 | 4515 |
| 4521 // Save new length. | 4516 // Save new length. |
| 4522 __ Str(length, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 4517 __ Str(length, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 4523 | 4518 |
| (...skipping 1176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5700 // not controlled by GC. | 5695 // not controlled by GC. |
| 5701 const int kApiStackSpace = 4; | 5696 const int kApiStackSpace = 4; |
| 5702 | 5697 |
| 5703 // Allocate space for CallApiFunctionAndReturn can store some scratch | 5698 // Allocate space for CallApiFunctionAndReturn can store some scratch |
| 5704 // registeres on the stack. | 5699 // registeres on the stack. |
| 5705 const int kCallApiFunctionSpillSpace = 4; | 5700 const int kCallApiFunctionSpillSpace = 4; |
| 5706 | 5701 |
| 5707 FrameScope frame_scope(masm, StackFrame::MANUAL); | 5702 FrameScope frame_scope(masm, StackFrame::MANUAL); |
| 5708 __ EnterExitFrame(false, x10, kApiStackSpace + kCallApiFunctionSpillSpace); | 5703 __ EnterExitFrame(false, x10, kApiStackSpace + kCallApiFunctionSpillSpace); |
| 5709 | 5704 |
| 5710 // TODO(all): Optimize this with stp and suchlike. | |
| 5711 ASSERT(!AreAliased(x0, api_function_address)); | 5705 ASSERT(!AreAliased(x0, api_function_address)); |
| 5712 // x0 = FunctionCallbackInfo& | 5706 // x0 = FunctionCallbackInfo& |
| 5713 // Arguments is after the return address. | 5707 // Arguments is after the return address. |
| 5714 __ Add(x0, masm->StackPointer(), 1 * kPointerSize); | 5708 __ Add(x0, masm->StackPointer(), 1 * kPointerSize); |
| 5715 // FunctionCallbackInfo::implicit_args_ and FunctionCallbackInfo::values_ | 5709 // FunctionCallbackInfo::implicit_args_ and FunctionCallbackInfo::values_ |
| 5716 __ Add(x10, args, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize)); | 5710 __ Add(x10, args, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize)); |
| 5717 __ Stp(args, x10, MemOperand(x0, 0 * kPointerSize)); | 5711 __ Stp(args, x10, MemOperand(x0, 0 * kPointerSize)); |
| 5718 // FunctionCallbackInfo::length_ = argc and | 5712 // FunctionCallbackInfo::length_ = argc and |
| 5719 // FunctionCallbackInfo::is_construct_call = 0 | 5713 // FunctionCallbackInfo::is_construct_call = 0 |
| 5720 __ Mov(x10, argc); | 5714 __ Mov(x10, argc); |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5793 MemOperand(fp, 6 * kPointerSize), | 5787 MemOperand(fp, 6 * kPointerSize), |
| 5794 NULL); | 5788 NULL); |
| 5795 } | 5789 } |
| 5796 | 5790 |
| 5797 | 5791 |
| 5798 #undef __ | 5792 #undef __ |
| 5799 | 5793 |
| 5800 } } // namespace v8::internal | 5794 } } // namespace v8::internal |
| 5801 | 5795 |
| 5802 #endif // V8_TARGET_ARCH_A64 | 5796 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |