| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
| 8 | 8 |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/ic/ic.h" | 10 #include "src/ic/ic.h" |
| (...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 144 // runtime system to make sure that indexing into string objects work | 144 // runtime system to make sure that indexing into string objects work |
| 145 // as intended. | 145 // as intended. |
| 146 STATIC_ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); | 146 STATIC_ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); |
| 147 __ Ldrb(scratch, FieldMemOperand(map_scratch, Map::kInstanceTypeOffset)); | 147 __ Ldrb(scratch, FieldMemOperand(map_scratch, Map::kInstanceTypeOffset)); |
| 148 __ Cmp(scratch, JS_OBJECT_TYPE); | 148 __ Cmp(scratch, JS_OBJECT_TYPE); |
| 149 __ B(lt, slow); | 149 __ B(lt, slow); |
| 150 } | 150 } |
| 151 | 151 |
| 152 | 152 |
| 153 // Loads an indexed element from a fast case array. | 153 // Loads an indexed element from a fast case array. |
| 154 // If not_fast_array is NULL, doesn't perform the elements map check. | |
| 155 // | 154 // |
| 156 // receiver - holds the receiver on entry. | 155 // receiver - holds the receiver on entry. |
| 157 // Unchanged unless 'result' is the same register. | 156 // Unchanged unless 'result' is the same register. |
| 158 // | 157 // |
| 159 // key - holds the smi key on entry. | 158 // key - holds the smi key on entry. |
| 160 // Unchanged unless 'result' is the same register. | 159 // Unchanged unless 'result' is the same register. |
| 161 // | 160 // |
| 162 // elements - holds the elements of the receiver on exit. | 161 // elements - holds the elements of the receiver and its prototypes. Clobbered. |
| 163 // | 162 // |
| 164 // elements_map - holds the elements map on exit if the not_fast_array branch is | 163 // result - holds the result on exit if the load succeeded. |
| 165 // taken. Otherwise, this is used as a scratch register. | 164 // Allowed to be the the same as 'receiver' or 'key'. |
| 166 // | 165 // Unchanged on bailout so 'receiver' and 'key' can be safely |
| 167 // result - holds the result on exit if the load succeeded. | 166 // used by further computation. |
| 168 // Allowed to be the the same as 'receiver' or 'key'. | |
| 169 // Unchanged on bailout so 'receiver' and 'key' can be safely | |
| 170 // used by further computation. | |
| 171 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, | 167 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, |
| 172 Register key, Register elements, | 168 Register key, Register elements, |
| 173 Register elements_map, Register scratch2, | 169 Register scratch1, Register scratch2, |
| 174 Register result, Label* not_fast_array, | 170 Register result, Label* slow) { |
| 175 Label* slow) { | 171 DCHECK(!AreAliased(receiver, key, elements, scratch1, scratch2)); |
| 176 DCHECK(!AreAliased(receiver, key, elements, elements_map, scratch2)); | 172 |
| 173 Label check_prototypes, check_next_prototype; |
| 174 Label done, in_bounds, return_undefined; |
| 177 | 175 |
| 178 // Check for fast array. | 176 // Check for fast array. |
| 179 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 177 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
| 180 if (not_fast_array != NULL) { | 178 __ AssertFastElements(elements); |
| 181 // Check that the object is in fast mode and writable. | |
| 182 __ Ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); | |
| 183 __ JumpIfNotRoot(elements_map, Heap::kFixedArrayMapRootIndex, | |
| 184 not_fast_array); | |
| 185 } else { | |
| 186 __ AssertFastElements(elements); | |
| 187 } | |
| 188 | |
| 189 // The elements_map register is only used for the not_fast_array path, which | |
| 190 // was handled above. From this point onward it is a scratch register. | |
| 191 Register scratch1 = elements_map; | |
| 192 | 179 |
| 193 // Check that the key (index) is within bounds. | 180 // Check that the key (index) is within bounds. |
| 194 __ Ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); | 181 __ Ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
| 195 __ Cmp(key, scratch1); | 182 __ Cmp(key, scratch1); |
| 196 __ B(hs, slow); | 183 __ B(lo, &in_bounds); |
| 197 | 184 |
| 185 // Out of bounds. Check the prototype chain to see if we can just return |
| 186 // 'undefined'. |
| 187 __ Cmp(key, Operand(Smi::FromInt(0))); |
| 188 __ B(lt, slow); // Negative keys can't take the fast OOB path. |
| 189 __ Bind(&check_prototypes); |
| 190 __ Ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 191 __ Bind(&check_next_prototype); |
| 192 __ Ldr(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset)); |
| 193 // scratch2: current prototype |
| 194 __ JumpIfRoot(scratch2, Heap::kNullValueRootIndex, &return_undefined); |
| 195 __ Ldr(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset)); |
| 196 __ Ldr(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset)); |
| 197 // elements: elements of current prototype |
| 198 // scratch2: map of current prototype |
| 199 __ CompareInstanceType(scratch2, scratch1, JS_OBJECT_TYPE); |
| 200 __ B(lo, slow); |
| 201 __ Ldrb(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); |
| 202 __ Tbnz(scratch1, Map::kIsAccessCheckNeeded, slow); |
| 203 __ Tbnz(scratch1, Map::kHasIndexedInterceptor, slow); |
| 204 __ JumpIfNotRoot(elements, Heap::kEmptyFixedArrayRootIndex, slow); |
| 205 __ B(&check_next_prototype); |
| 206 |
| 207 __ Bind(&return_undefined); |
| 208 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
| 209 __ B(&done); |
| 210 |
| 211 __ Bind(&in_bounds); |
| 198 // Fast case: Do the load. | 212 // Fast case: Do the load. |
| 199 __ Add(scratch1, elements, FixedArray::kHeaderSize - kHeapObjectTag); | 213 __ Add(scratch1, elements, FixedArray::kHeaderSize - kHeapObjectTag); |
| 200 __ SmiUntag(scratch2, key); | 214 __ SmiUntag(scratch2, key); |
| 201 __ Ldr(scratch2, MemOperand(scratch1, scratch2, LSL, kPointerSizeLog2)); | 215 __ Ldr(scratch2, MemOperand(scratch1, scratch2, LSL, kPointerSizeLog2)); |
| 202 | 216 |
| 203 // In case the loaded value is the_hole we have to consult GetProperty | 217 // In case the loaded value is the_hole we have to check the prototype chain. |
| 204 // to ensure the prototype chain is searched. | 218 __ JumpIfRoot(scratch2, Heap::kTheHoleValueRootIndex, &check_prototypes); |
| 205 __ JumpIfRoot(scratch2, Heap::kTheHoleValueRootIndex, slow); | |
| 206 | 219 |
| 207 // Move the value to the result register. | 220 // Move the value to the result register. |
| 208 // 'result' can alias with 'receiver' or 'key' but these two must be | 221 // 'result' can alias with 'receiver' or 'key' but these two must be |
| 209 // preserved if we jump to 'slow'. | 222 // preserved if we jump to 'slow'. |
| 210 __ Mov(result, scratch2); | 223 __ Mov(result, scratch2); |
| 224 __ Bind(&done); |
| 211 } | 225 } |
| 212 | 226 |
| 213 | 227 |
| 214 // Checks whether a key is an array index string or a unique name. | 228 // Checks whether a key is an array index string or a unique name. |
| 215 // Falls through if a key is a unique name. | 229 // Falls through if a key is a unique name. |
| 216 // The map of the key is returned in 'map_scratch'. | 230 // The map of the key is returned in 'map_scratch'. |
| 217 // If the jump to 'index_string' is done the hash of the key is left | 231 // If the jump to 'index_string' is done the hash of the key is left |
| 218 // in 'hash_scratch'. | 232 // in 'hash_scratch'. |
| 219 static void GenerateKeyNameCheck(MacroAssembler* masm, Register key, | 233 static void GenerateKeyNameCheck(MacroAssembler* masm, Register key, |
| 220 Register map_scratch, Register hash_scratch, | 234 Register map_scratch, Register hash_scratch, |
| (...skipping 252 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 473 // If we can load the value, it should be returned in x0. | 487 // If we can load the value, it should be returned in x0. |
| 474 Register result = x0; | 488 Register result = x0; |
| 475 | 489 |
| 476 GenerateKeyedLoadReceiverCheck(masm, receiver, scratch1, scratch2, | 490 GenerateKeyedLoadReceiverCheck(masm, receiver, scratch1, scratch2, |
| 477 Map::kHasIndexedInterceptor, slow); | 491 Map::kHasIndexedInterceptor, slow); |
| 478 | 492 |
| 479 // Check the receiver's map to see if it has fast elements. | 493 // Check the receiver's map to see if it has fast elements. |
| 480 __ CheckFastElements(scratch1, scratch2, &check_number_dictionary); | 494 __ CheckFastElements(scratch1, scratch2, &check_number_dictionary); |
| 481 | 495 |
| 482 GenerateFastArrayLoad(masm, receiver, key, scratch3, scratch2, scratch1, | 496 GenerateFastArrayLoad(masm, receiver, key, scratch3, scratch2, scratch1, |
| 483 result, NULL, slow); | 497 result, slow); |
| 484 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, | 498 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, |
| 485 scratch1, scratch2); | 499 scratch1, scratch2); |
| 486 __ Ret(); | 500 __ Ret(); |
| 487 | 501 |
| 488 __ Bind(&check_number_dictionary); | 502 __ Bind(&check_number_dictionary); |
| 489 __ Ldr(scratch3, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 503 __ Ldr(scratch3, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
| 490 __ Ldr(scratch2, FieldMemOperand(scratch3, JSObject::kMapOffset)); | 504 __ Ldr(scratch2, FieldMemOperand(scratch3, JSObject::kMapOffset)); |
| 491 | 505 |
| 492 // Check whether we have a number dictionary. | 506 // Check whether we have a number dictionary. |
| 493 __ JumpIfNotRoot(scratch2, Heap::kHashTableMapRootIndex, slow); | 507 __ JumpIfNotRoot(scratch2, Heap::kHashTableMapRootIndex, slow); |
| (...skipping 547 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1041 } else { | 1055 } else { |
| 1042 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); | 1056 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ); |
| 1043 // This is JumpIfSmi(smi_reg, branch_imm). | 1057 // This is JumpIfSmi(smi_reg, branch_imm). |
| 1044 patcher.tbz(smi_reg, 0, branch_imm); | 1058 patcher.tbz(smi_reg, 0, branch_imm); |
| 1045 } | 1059 } |
| 1046 } | 1060 } |
| 1047 } | 1061 } |
| 1048 } // namespace v8::internal | 1062 } // namespace v8::internal |
| 1049 | 1063 |
| 1050 #endif // V8_TARGET_ARCH_ARM64 | 1064 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |