| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
| 6 | 6 |
| 7 #include "src/codegen.h" | 7 #include "src/codegen.h" |
| 8 #include "src/ic/ic.h" | 8 #include "src/ic/ic.h" |
| 9 #include "src/ic/ic-compiler.h" | 9 #include "src/ic/ic-compiler.h" |
| 10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
| 11 | 11 |
| 12 namespace v8 { | 12 namespace v8 { |
| 13 namespace internal { | 13 namespace internal { |
| 14 | 14 |
| 15 | 15 |
| 16 // ---------------------------------------------------------------------------- | 16 // ---------------------------------------------------------------------------- |
| 17 // Static IC stub generators. | 17 // Static IC stub generators. |
| 18 // | 18 // |
| 19 | 19 |
| 20 #define __ ACCESS_MASM(masm) | 20 #define __ ACCESS_MASM(masm) |
| 21 | 21 |
| 22 | |
| 23 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type, | |
| 24 Label* global_object) { | |
| 25 // Register usage: | |
| 26 // type: holds the receiver instance type on entry. | |
| 27 __ Branch(global_object, eq, type, Operand(JS_GLOBAL_OBJECT_TYPE)); | |
| 28 __ Branch(global_object, eq, type, Operand(JS_GLOBAL_PROXY_TYPE)); | |
| 29 } | |
| 30 | |
| 31 | |
| 32 // Helper function used from LoadIC GenerateNormal. | 22 // Helper function used from LoadIC GenerateNormal. |
| 33 // | 23 // |
| 34 // elements: Property dictionary. It is not clobbered if a jump to the miss | 24 // elements: Property dictionary. It is not clobbered if a jump to the miss |
| 35 // label is done. | 25 // label is done. |
| 36 // name: Property name. It is not clobbered if a jump to the miss label is | 26 // name: Property name. It is not clobbered if a jump to the miss label is |
| 37 // done | 27 // done |
| 38 // result: Register for the result. It is only updated if a jump to the miss | 28 // result: Register for the result. It is only updated if a jump to the miss |
| 39 // label is not done. Can be the same as elements or name clobbering | 29 // label is not done. Can be the same as elements or name clobbering |
| 40 // one of these in the case of not jumping to the miss label. | 30 // one of these in the case of not jumping to the miss label. |
| 41 // The two scratch registers need to be different from elements, name and | 31 // The two scratch registers need to be different from elements, name and |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 121 const int kValueOffset = kElementsStartOffset + kPointerSize; | 111 const int kValueOffset = kElementsStartOffset + kPointerSize; |
| 122 __ Daddu(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag)); | 112 __ Daddu(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag)); |
| 123 __ sd(value, MemOperand(scratch2)); | 113 __ sd(value, MemOperand(scratch2)); |
| 124 | 114 |
| 125 // Update the write barrier. Make sure not to clobber the value. | 115 // Update the write barrier. Make sure not to clobber the value. |
| 126 __ mov(scratch1, value); | 116 __ mov(scratch1, value); |
| 127 __ RecordWrite(elements, scratch2, scratch1, kRAHasNotBeenSaved, | 117 __ RecordWrite(elements, scratch2, scratch1, kRAHasNotBeenSaved, |
| 128 kDontSaveFPRegs); | 118 kDontSaveFPRegs); |
| 129 } | 119 } |
| 130 | 120 |
| 131 | |
| 132 // Checks the receiver for special cases (value type, slow case bits). | |
| 133 // Falls through for regular JS object. | |
| 134 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, | |
| 135 Register receiver, Register map, | |
| 136 Register scratch, | |
| 137 int interceptor_bit, Label* slow) { | |
| 138 // Check that the object isn't a smi. | |
| 139 __ JumpIfSmi(receiver, slow); | |
| 140 // Get the map of the receiver. | |
| 141 __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
| 142 // Check bit field. | |
| 143 __ lbu(scratch, FieldMemOperand(map, Map::kBitFieldOffset)); | |
| 144 __ And(at, scratch, | |
| 145 Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit))); | |
| 146 __ Branch(slow, ne, at, Operand(zero_reg)); | |
| 147 // Check that the object is some kind of JS object EXCEPT JS Value type. | |
| 148 // In the case that the object is a value-wrapper object, | |
| 149 // we enter the runtime system to make sure that indexing into string | |
| 150 // objects work as intended. | |
| 151 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE); | |
| 152 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); | |
| 153 __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE)); | |
| 154 } | |
| 155 | |
| 156 | |
| 157 // Loads an indexed element from a fast case array. | |
| 158 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, | |
| 159 Register key, Register elements, | |
| 160 Register scratch1, Register scratch2, | |
| 161 Register result, Label* slow) { | |
| 162 // Register use: | |
| 163 // | |
| 164 // receiver - holds the receiver on entry. | |
| 165 // Unchanged unless 'result' is the same register. | |
| 166 // | |
| 167 // key - holds the smi key on entry. | |
| 168 // Unchanged unless 'result' is the same register. | |
| 169 // | |
| 170 // result - holds the result on exit if the load succeeded. | |
| 171 // Allowed to be the the same as 'receiver' or 'key'. | |
| 172 // Unchanged on bailout so 'receiver' and 'key' can be safely | |
| 173 // used by further computation. | |
| 174 // | |
| 175 // Scratch registers: | |
| 176 // | |
| 177 // elements - holds the elements of the receiver and its prototypes. | |
| 178 // | |
| 179 // scratch1 - used to hold elements length, bit fields, base addresses. | |
| 180 // | |
| 181 // scratch2 - used to hold maps, prototypes, and the loaded value. | |
| 182 Label check_prototypes, check_next_prototype; | |
| 183 Label done, in_bounds, absent; | |
| 184 | |
| 185 __ ld(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | |
| 186 __ AssertFastElements(elements); | |
| 187 | |
| 188 // Check that the key (index) is within bounds. | |
| 189 __ ld(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); | |
| 190 __ Branch(&in_bounds, lo, key, Operand(scratch1)); | |
| 191 // Out-of-bounds. Check the prototype chain to see if we can just return | |
| 192 // 'undefined'. | |
| 193 // Negative keys can't take the fast OOB path. | |
| 194 __ Branch(slow, lt, key, Operand(zero_reg)); | |
| 195 __ bind(&check_prototypes); | |
| 196 __ ld(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
| 197 __ bind(&check_next_prototype); | |
| 198 __ ld(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset)); | |
| 199 // scratch2: current prototype | |
| 200 __ LoadRoot(at, Heap::kNullValueRootIndex); | |
| 201 __ Branch(&absent, eq, scratch2, Operand(at)); | |
| 202 __ ld(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset)); | |
| 203 __ ld(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset)); | |
| 204 // elements: elements of current prototype | |
| 205 // scratch2: map of current prototype | |
| 206 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kInstanceTypeOffset)); | |
| 207 __ Branch(slow, lo, scratch1, Operand(JS_OBJECT_TYPE)); | |
| 208 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); | |
| 209 __ And(at, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) | | |
| 210 (1 << Map::kHasIndexedInterceptor))); | |
| 211 __ Branch(slow, ne, at, Operand(zero_reg)); | |
| 212 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); | |
| 213 __ Branch(slow, ne, elements, Operand(at)); | |
| 214 __ Branch(&check_next_prototype); | |
| 215 | |
| 216 __ bind(&absent); | |
| 217 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | |
| 218 __ Branch(&done); | |
| 219 | |
| 220 __ bind(&in_bounds); | |
| 221 // Fast case: Do the load. | |
| 222 __ Daddu(scratch1, elements, | |
| 223 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 224 // The key is a smi. | |
| 225 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); | |
| 226 __ SmiScale(at, key, kPointerSizeLog2); | |
| 227 __ daddu(at, at, scratch1); | |
| 228 __ ld(scratch2, MemOperand(at)); | |
| 229 | |
| 230 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | |
| 231 // In case the loaded value is the_hole we have to check the prototype chain. | |
| 232 __ Branch(&check_prototypes, eq, scratch2, Operand(at)); | |
| 233 __ Move(result, scratch2); | |
| 234 __ bind(&done); | |
| 235 } | |
| 236 | |
| 237 | |
| 238 // Checks whether a key is an array index string or a unique name. | |
| 239 // Falls through if a key is a unique name. | |
| 240 static void GenerateKeyNameCheck(MacroAssembler* masm, Register key, | |
| 241 Register map, Register hash, | |
| 242 Label* index_string, Label* not_unique) { | |
| 243 // The key is not a smi. | |
| 244 Label unique; | |
| 245 // Is it a name? | |
| 246 __ GetObjectType(key, map, hash); | |
| 247 __ Branch(not_unique, hi, hash, Operand(LAST_UNIQUE_NAME_TYPE)); | |
| 248 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE); | |
| 249 __ Branch(&unique, eq, hash, Operand(LAST_UNIQUE_NAME_TYPE)); | |
| 250 | |
| 251 // Is the string an array index, with cached numeric value? | |
| 252 __ lwu(hash, FieldMemOperand(key, Name::kHashFieldOffset)); | |
| 253 __ And(at, hash, Operand(Name::kContainsCachedArrayIndexMask)); | |
| 254 __ Branch(index_string, eq, at, Operand(zero_reg)); | |
| 255 | |
| 256 // Is the string internalized? We know it's a string, so a single | |
| 257 // bit test is enough. | |
| 258 // map: key map | |
| 259 __ lbu(hash, FieldMemOperand(map, Map::kInstanceTypeOffset)); | |
| 260 STATIC_ASSERT(kInternalizedTag == 0); | |
| 261 __ And(at, hash, Operand(kIsNotInternalizedMask)); | |
| 262 __ Branch(not_unique, ne, at, Operand(zero_reg)); | |
| 263 | |
| 264 __ bind(&unique); | |
| 265 } | |
| 266 | |
| 267 void LoadIC::GenerateNormal(MacroAssembler* masm) { | 121 void LoadIC::GenerateNormal(MacroAssembler* masm) { |
| 268 Register dictionary = a0; | 122 Register dictionary = a0; |
| 269 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister())); | 123 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister())); |
| 270 DCHECK(!dictionary.is(LoadDescriptor::NameRegister())); | 124 DCHECK(!dictionary.is(LoadDescriptor::NameRegister())); |
| 271 Label slow; | 125 Label slow; |
| 272 | 126 |
| 273 __ ld(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), | 127 __ ld(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), |
| 274 JSObject::kPropertiesOffset)); | 128 JSObject::kPropertiesOffset)); |
| 275 GenerateDictionaryLoad(masm, &slow, dictionary, | 129 GenerateDictionaryLoad(masm, &slow, dictionary, |
| 276 LoadDescriptor::NameRegister(), v0, a3, a4); | 130 LoadDescriptor::NameRegister(), v0, a3, a4); |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 337 | 191 |
| 338 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 192 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
| 339 // The return address is in ra. | 193 // The return address is in ra. |
| 340 | 194 |
| 341 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); | 195 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); |
| 342 | 196 |
| 343 // Do tail-call to runtime routine. | 197 // Do tail-call to runtime routine. |
| 344 __ TailCallRuntime(Runtime::kKeyedGetProperty); | 198 __ TailCallRuntime(Runtime::kKeyedGetProperty); |
| 345 } | 199 } |
| 346 | 200 |
| 347 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { | |
| 348 // The return address is in ra. | |
| 349 Label slow, check_name, index_smi, index_name, property_array_property; | |
| 350 Label probe_dictionary, check_number_dictionary; | |
| 351 | |
| 352 Register key = LoadDescriptor::NameRegister(); | |
| 353 Register receiver = LoadDescriptor::ReceiverRegister(); | |
| 354 DCHECK(key.is(a2)); | |
| 355 DCHECK(receiver.is(a1)); | |
| 356 | |
| 357 Isolate* isolate = masm->isolate(); | |
| 358 | |
| 359 // Check that the key is a smi. | |
| 360 __ JumpIfNotSmi(key, &check_name); | |
| 361 __ bind(&index_smi); | |
| 362 // Now the key is known to be a smi. This place is also jumped to from below | |
| 363 // where a numeric string is converted to a smi. | |
| 364 | |
| 365 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, | |
| 366 Map::kHasIndexedInterceptor, &slow); | |
| 367 | |
| 368 // Check the receiver's map to see if it has fast elements. | |
| 369 __ CheckFastElements(a0, a3, &check_number_dictionary); | |
| 370 | |
| 371 GenerateFastArrayLoad(masm, receiver, key, a0, a3, a4, v0, &slow); | |
| 372 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_smi(), 1, a4, | |
| 373 a3); | |
| 374 __ Ret(); | |
| 375 | |
| 376 __ bind(&check_number_dictionary); | |
| 377 __ ld(a4, FieldMemOperand(receiver, JSObject::kElementsOffset)); | |
| 378 __ ld(a3, FieldMemOperand(a4, JSObject::kMapOffset)); | |
| 379 | |
| 380 // Check whether the elements is a number dictionary. | |
| 381 // a3: elements map | |
| 382 // a4: elements | |
| 383 __ LoadRoot(at, Heap::kHashTableMapRootIndex); | |
| 384 __ Branch(&slow, ne, a3, Operand(at)); | |
| 385 __ dsra32(a0, key, 0); | |
| 386 __ LoadFromNumberDictionary(&slow, a4, key, v0, a0, a3, a5); | |
| 387 __ Ret(); | |
| 388 | |
| 389 // Slow case, key and receiver still in a2 and a1. | |
| 390 __ bind(&slow); | |
| 391 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_slow(), 1, a4, | |
| 392 a3); | |
| 393 GenerateRuntimeGetProperty(masm); | |
| 394 | |
| 395 __ bind(&check_name); | |
| 396 GenerateKeyNameCheck(masm, key, a0, a3, &index_name, &slow); | |
| 397 | |
| 398 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, | |
| 399 Map::kHasNamedInterceptor, &slow); | |
| 400 | |
| 401 | |
| 402 // If the receiver is a fast-case object, check the stub cache. Otherwise | |
| 403 // probe the dictionary. | |
| 404 __ ld(a3, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | |
| 405 __ ld(a4, FieldMemOperand(a3, HeapObject::kMapOffset)); | |
| 406 __ LoadRoot(at, Heap::kHashTableMapRootIndex); | |
| 407 __ Branch(&probe_dictionary, eq, a4, Operand(at)); | |
| 408 | |
| 409 // The handlers in the stub cache expect a vector and slot. Since we won't | |
| 410 // change the IC from any downstream misses, a dummy vector can be used. | |
| 411 Register vector = LoadWithVectorDescriptor::VectorRegister(); | |
| 412 Register slot = LoadWithVectorDescriptor::SlotRegister(); | |
| 413 DCHECK(!AreAliased(vector, slot, a4, a5, a6, t1)); | |
| 414 Handle<TypeFeedbackVector> dummy_vector = | |
| 415 TypeFeedbackVector::DummyVector(masm->isolate()); | |
| 416 int slot_index = dummy_vector->GetIndex( | |
| 417 FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedLoadICSlot)); | |
| 418 __ LoadRoot(vector, Heap::kDummyVectorRootIndex); | |
| 419 __ li(slot, Operand(Smi::FromInt(slot_index))); | |
| 420 | |
| 421 masm->isolate()->load_stub_cache()->GenerateProbe(masm, receiver, key, a4, a5, | |
| 422 a6, t1); | |
| 423 // Cache miss. | |
| 424 GenerateMiss(masm); | |
| 425 | |
| 426 // Do a quick inline probe of the receiver's dictionary, if it | |
| 427 // exists. | |
| 428 __ bind(&probe_dictionary); | |
| 429 // a3: elements | |
| 430 __ ld(a0, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
| 431 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); | |
| 432 GenerateGlobalInstanceTypeCheck(masm, a0, &slow); | |
| 433 // Load the property to v0. | |
| 434 GenerateDictionaryLoad(masm, &slow, a3, key, v0, a5, a4); | |
| 435 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_symbol(), 1, | |
| 436 a4, a3); | |
| 437 __ Ret(); | |
| 438 | |
| 439 __ bind(&index_name); | |
| 440 __ IndexFromHash(a3, key); | |
| 441 // Now jump to the place where smi keys are handled. | |
| 442 __ Branch(&index_smi); | |
| 443 } | |
| 444 | |
| 445 | |
| 446 static void KeyedStoreGenerateMegamorphicHelper( | 201 static void KeyedStoreGenerateMegamorphicHelper( |
| 447 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow, | 202 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow, |
| 448 KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length, | 203 KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length, |
| 449 Register value, Register key, Register receiver, Register receiver_map, | 204 Register value, Register key, Register receiver, Register receiver_map, |
| 450 Register elements_map, Register elements) { | 205 Register elements_map, Register elements) { |
| 451 Label transition_smi_elements; | 206 Label transition_smi_elements; |
| 452 Label finish_object_store, non_double_value, transition_double_elements; | 207 Label finish_object_store, non_double_value, transition_double_elements; |
| 453 Label fast_double_without_map_check; | 208 Label fast_double_without_map_check; |
| 454 | 209 |
| 455 // Fast case: Do the store, could be either Object or double. | 210 // Fast case: Do the store, could be either Object or double. |
| (...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 894 break; | 649 break; |
| 895 default: | 650 default: |
| 896 UNIMPLEMENTED(); | 651 UNIMPLEMENTED(); |
| 897 } | 652 } |
| 898 patcher.ChangeBranchCondition(branch_instr, opcode); | 653 patcher.ChangeBranchCondition(branch_instr, opcode); |
| 899 } | 654 } |
| 900 } // namespace internal | 655 } // namespace internal |
| 901 } // namespace v8 | 656 } // namespace v8 |
| 902 | 657 |
| 903 #endif // V8_TARGET_ARCH_MIPS64 | 658 #endif // V8_TARGET_ARCH_MIPS64 |
| OLD | NEW |