| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
| 6 | 6 |
| 7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
| 8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
| (...skipping 4160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4171 __ Ret(); | 4171 __ Ret(); |
| 4172 | 4172 |
| 4173 __ Bind(&skip_to_incremental_noncompacting); | 4173 __ Bind(&skip_to_incremental_noncompacting); |
| 4174 GenerateIncremental(masm, INCREMENTAL); | 4174 GenerateIncremental(masm, INCREMENTAL); |
| 4175 | 4175 |
| 4176 __ Bind(&skip_to_incremental_compacting); | 4176 __ Bind(&skip_to_incremental_compacting); |
| 4177 GenerateIncremental(masm, INCREMENTAL_COMPACTION); | 4177 GenerateIncremental(masm, INCREMENTAL_COMPACTION); |
| 4178 } | 4178 } |
| 4179 | 4179 |
| 4180 | 4180 |
| 4181 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { | |
| 4182 // x0 value element value to store | |
| 4183 // x3 index_smi element index as smi | |
| 4184 // sp[0] array_index_smi array literal index in function as smi | |
| 4185 // sp[1] array array literal | |
| 4186 | |
| 4187 Register value = x0; | |
| 4188 Register index_smi = x3; | |
| 4189 | |
| 4190 Register array = x1; | |
| 4191 Register array_map = x2; | |
| 4192 Register array_index_smi = x4; | |
| 4193 __ PeekPair(array_index_smi, array, 0); | |
| 4194 __ Ldr(array_map, FieldMemOperand(array, JSObject::kMapOffset)); | |
| 4195 | |
| 4196 Label double_elements, smi_element, fast_elements, slow_elements; | |
| 4197 Register bitfield2 = x10; | |
| 4198 __ Ldrb(bitfield2, FieldMemOperand(array_map, Map::kBitField2Offset)); | |
| 4199 | |
| 4200 // Jump if array's ElementsKind is not FAST*_SMI_ELEMENTS, FAST_ELEMENTS or | |
| 4201 // FAST_HOLEY_ELEMENTS. | |
| 4202 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
| 4203 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
| 4204 STATIC_ASSERT(FAST_ELEMENTS == 2); | |
| 4205 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); | |
| 4206 __ Cmp(bitfield2, Map::kMaximumBitField2FastHoleyElementValue); | |
| 4207 __ B(hi, &double_elements); | |
| 4208 | |
| 4209 __ JumpIfSmi(value, &smi_element); | |
| 4210 | |
| 4211 // Jump if array's ElementsKind is not FAST_ELEMENTS or FAST_HOLEY_ELEMENTS. | |
| 4212 __ Tbnz(bitfield2, MaskToBit(FAST_ELEMENTS << Map::ElementsKindBits::kShift), | |
| 4213 &fast_elements); | |
| 4214 | |
| 4215 // Store into the array literal requires an elements transition. Call into | |
| 4216 // the runtime. | |
| 4217 __ Bind(&slow_elements); | |
| 4218 __ Push(array, index_smi, value); | |
| 4219 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 4220 __ Ldr(x11, FieldMemOperand(x10, JSFunction::kLiteralsOffset)); | |
| 4221 __ Push(x11, array_index_smi); | |
| 4222 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1); | |
| 4223 | |
| 4224 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object. | |
| 4225 __ Bind(&fast_elements); | |
| 4226 __ Ldr(x10, FieldMemOperand(array, JSObject::kElementsOffset)); | |
| 4227 __ Add(x11, x10, Operand::UntagSmiAndScale(index_smi, kPointerSizeLog2)); | |
| 4228 __ Add(x11, x11, FixedArray::kHeaderSize - kHeapObjectTag); | |
| 4229 __ Str(value, MemOperand(x11)); | |
| 4230 // Update the write barrier for the array store. | |
| 4231 __ RecordWrite(x10, x11, value, kLRHasNotBeenSaved, kDontSaveFPRegs, | |
| 4232 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
| 4233 __ Ret(); | |
| 4234 | |
| 4235 // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS, | |
| 4236 // and value is Smi. | |
| 4237 __ Bind(&smi_element); | |
| 4238 __ Ldr(x10, FieldMemOperand(array, JSObject::kElementsOffset)); | |
| 4239 __ Add(x11, x10, Operand::UntagSmiAndScale(index_smi, kPointerSizeLog2)); | |
| 4240 __ Str(value, FieldMemOperand(x11, FixedArray::kHeaderSize)); | |
| 4241 __ Ret(); | |
| 4242 | |
| 4243 __ Bind(&double_elements); | |
| 4244 __ Ldr(x10, FieldMemOperand(array, JSObject::kElementsOffset)); | |
| 4245 __ StoreNumberToDoubleElements(value, index_smi, x10, x11, d0, | |
| 4246 &slow_elements); | |
| 4247 __ Ret(); | |
| 4248 } | |
| 4249 | |
| 4250 | |
| 4251 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { | 4181 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { |
| 4252 CEntryStub ces(isolate(), 1, kSaveFPRegs); | 4182 CEntryStub ces(isolate(), 1, kSaveFPRegs); |
| 4253 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); | 4183 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); |
| 4254 int parameter_count_offset = | 4184 int parameter_count_offset = |
| 4255 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; | 4185 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; |
| 4256 __ Ldr(x1, MemOperand(fp, parameter_count_offset)); | 4186 __ Ldr(x1, MemOperand(fp, parameter_count_offset)); |
| 4257 if (function_mode() == JS_FUNCTION_STUB_MODE) { | 4187 if (function_mode() == JS_FUNCTION_STUB_MODE) { |
| 4258 __ Add(x1, x1, 1); | 4188 __ Add(x1, x1, 1); |
| 4259 } | 4189 } |
| 4260 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 4190 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
| (...skipping 1623 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5884 MemOperand(fp, 6 * kPointerSize), NULL); | 5814 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5885 } | 5815 } |
| 5886 | 5816 |
| 5887 | 5817 |
| 5888 #undef __ | 5818 #undef __ |
| 5889 | 5819 |
| 5890 } // namespace internal | 5820 } // namespace internal |
| 5891 } // namespace v8 | 5821 } // namespace v8 |
| 5892 | 5822 |
| 5893 #endif // V8_TARGET_ARCH_ARM64 | 5823 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |