| OLD | NEW |
| (Empty) |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "src/v8.h" | |
| 6 | |
| 7 #if V8_TARGET_ARCH_IA32 | |
| 8 | |
| 9 #include "src/codegen.h" | |
| 10 #include "src/ic-inl.h" | |
| 11 #include "src/runtime.h" | |
| 12 #include "src/stub-cache.h" | |
| 13 | |
| 14 namespace v8 { | |
| 15 namespace internal { | |
| 16 | |
| 17 // ---------------------------------------------------------------------------- | |
| 18 // Static IC stub generators. | |
| 19 // | |
| 20 | |
| 21 #define __ ACCESS_MASM(masm) | |
| 22 | |
| 23 | |
| 24 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, | |
| 25 Register type, | |
| 26 Label* global_object) { | |
| 27 // Register usage: | |
| 28 // type: holds the receiver instance type on entry. | |
| 29 __ cmp(type, JS_GLOBAL_OBJECT_TYPE); | |
| 30 __ j(equal, global_object); | |
| 31 __ cmp(type, JS_BUILTINS_OBJECT_TYPE); | |
| 32 __ j(equal, global_object); | |
| 33 __ cmp(type, JS_GLOBAL_PROXY_TYPE); | |
| 34 __ j(equal, global_object); | |
| 35 } | |
| 36 | |
| 37 | |
| 38 // Helper function used to load a property from a dictionary backing | |
| 39 // storage. This function may fail to load a property even though it is | |
| 40 // in the dictionary, so code at miss_label must always call a backup | |
| 41 // property load that is complete. This function is safe to call if | |
| 42 // name is not internalized, and will jump to the miss_label in that | |
| 43 // case. The generated code assumes that the receiver has slow | |
| 44 // properties, is not a global object and does not have interceptors. | |
| 45 static void GenerateDictionaryLoad(MacroAssembler* masm, | |
| 46 Label* miss_label, | |
| 47 Register elements, | |
| 48 Register name, | |
| 49 Register r0, | |
| 50 Register r1, | |
| 51 Register result) { | |
| 52 // Register use: | |
| 53 // | |
| 54 // elements - holds the property dictionary on entry and is unchanged. | |
| 55 // | |
| 56 // name - holds the name of the property on entry and is unchanged. | |
| 57 // | |
| 58 // Scratch registers: | |
| 59 // | |
| 60 // r0 - used for the index into the property dictionary | |
| 61 // | |
| 62 // r1 - used to hold the capacity of the property dictionary. | |
| 63 // | |
| 64 // result - holds the result on exit. | |
| 65 | |
| 66 Label done; | |
| 67 | |
| 68 // Probe the dictionary. | |
| 69 NameDictionaryLookupStub::GeneratePositiveLookup(masm, | |
| 70 miss_label, | |
| 71 &done, | |
| 72 elements, | |
| 73 name, | |
| 74 r0, | |
| 75 r1); | |
| 76 | |
| 77 // If probing finds an entry in the dictionary, r0 contains the | |
| 78 // index into the dictionary. Check that the value is a normal | |
| 79 // property. | |
| 80 __ bind(&done); | |
| 81 const int kElementsStartOffset = | |
| 82 NameDictionary::kHeaderSize + | |
| 83 NameDictionary::kElementsStartIndex * kPointerSize; | |
| 84 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; | |
| 85 __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag), | |
| 86 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize)); | |
| 87 __ j(not_zero, miss_label); | |
| 88 | |
| 89 // Get the value at the masked, scaled index. | |
| 90 const int kValueOffset = kElementsStartOffset + kPointerSize; | |
| 91 __ mov(result, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag)); | |
| 92 } | |
| 93 | |
| 94 | |
| 95 // Helper function used to store a property to a dictionary backing | |
| 96 // storage. This function may fail to store a property eventhough it | |
| 97 // is in the dictionary, so code at miss_label must always call a | |
| 98 // backup property store that is complete. This function is safe to | |
| 99 // call if name is not internalized, and will jump to the miss_label in | |
| 100 // that case. The generated code assumes that the receiver has slow | |
| 101 // properties, is not a global object and does not have interceptors. | |
| 102 static void GenerateDictionaryStore(MacroAssembler* masm, | |
| 103 Label* miss_label, | |
| 104 Register elements, | |
| 105 Register name, | |
| 106 Register value, | |
| 107 Register r0, | |
| 108 Register r1) { | |
| 109 // Register use: | |
| 110 // | |
| 111 // elements - holds the property dictionary on entry and is clobbered. | |
| 112 // | |
| 113 // name - holds the name of the property on entry and is unchanged. | |
| 114 // | |
| 115 // value - holds the value to store and is unchanged. | |
| 116 // | |
| 117 // r0 - used for index into the property dictionary and is clobbered. | |
| 118 // | |
| 119 // r1 - used to hold the capacity of the property dictionary and is clobbered. | |
| 120 Label done; | |
| 121 | |
| 122 | |
| 123 // Probe the dictionary. | |
| 124 NameDictionaryLookupStub::GeneratePositiveLookup(masm, | |
| 125 miss_label, | |
| 126 &done, | |
| 127 elements, | |
| 128 name, | |
| 129 r0, | |
| 130 r1); | |
| 131 | |
| 132 // If probing finds an entry in the dictionary, r0 contains the | |
| 133 // index into the dictionary. Check that the value is a normal | |
| 134 // property that is not read only. | |
| 135 __ bind(&done); | |
| 136 const int kElementsStartOffset = | |
| 137 NameDictionary::kHeaderSize + | |
| 138 NameDictionary::kElementsStartIndex * kPointerSize; | |
| 139 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; | |
| 140 const int kTypeAndReadOnlyMask = | |
| 141 (PropertyDetails::TypeField::kMask | | |
| 142 PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize; | |
| 143 __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag), | |
| 144 Immediate(kTypeAndReadOnlyMask)); | |
| 145 __ j(not_zero, miss_label); | |
| 146 | |
| 147 // Store the value at the masked, scaled index. | |
| 148 const int kValueOffset = kElementsStartOffset + kPointerSize; | |
| 149 __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag)); | |
| 150 __ mov(Operand(r0, 0), value); | |
| 151 | |
| 152 // Update write barrier. Make sure not to clobber the value. | |
| 153 __ mov(r1, value); | |
| 154 __ RecordWrite(elements, r0, r1, kDontSaveFPRegs); | |
| 155 } | |
| 156 | |
| 157 | |
| 158 // Checks the receiver for special cases (value type, slow case bits). | |
| 159 // Falls through for regular JS object. | |
| 160 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, | |
| 161 Register receiver, | |
| 162 Register map, | |
| 163 int interceptor_bit, | |
| 164 Label* slow) { | |
| 165 // Register use: | |
| 166 // receiver - holds the receiver and is unchanged. | |
| 167 // Scratch registers: | |
| 168 // map - used to hold the map of the receiver. | |
| 169 | |
| 170 // Check that the object isn't a smi. | |
| 171 __ JumpIfSmi(receiver, slow); | |
| 172 | |
| 173 // Get the map of the receiver. | |
| 174 __ mov(map, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 175 | |
| 176 // Check bit field. | |
| 177 __ test_b(FieldOperand(map, Map::kBitFieldOffset), | |
| 178 (1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)); | |
| 179 __ j(not_zero, slow); | |
| 180 // Check that the object is some kind of JS object EXCEPT JS Value type. | |
| 181 // In the case that the object is a value-wrapper object, | |
| 182 // we enter the runtime system to make sure that indexing | |
| 183 // into string objects works as intended. | |
| 184 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE); | |
| 185 | |
| 186 __ CmpInstanceType(map, JS_OBJECT_TYPE); | |
| 187 __ j(below, slow); | |
| 188 } | |
| 189 | |
| 190 | |
| 191 // Loads an indexed element from a fast case array. | |
| 192 // If not_fast_array is NULL, doesn't perform the elements map check. | |
| 193 static void GenerateFastArrayLoad(MacroAssembler* masm, | |
| 194 Register receiver, | |
| 195 Register key, | |
| 196 Register scratch, | |
| 197 Register result, | |
| 198 Label* not_fast_array, | |
| 199 Label* out_of_range) { | |
| 200 // Register use: | |
| 201 // receiver - holds the receiver and is unchanged. | |
| 202 // key - holds the key and is unchanged (must be a smi). | |
| 203 // Scratch registers: | |
| 204 // scratch - used to hold elements of the receiver and the loaded value. | |
| 205 // result - holds the result on exit if the load succeeds and | |
| 206 // we fall through. | |
| 207 | |
| 208 __ mov(scratch, FieldOperand(receiver, JSObject::kElementsOffset)); | |
| 209 if (not_fast_array != NULL) { | |
| 210 // Check that the object is in fast mode and writable. | |
| 211 __ CheckMap(scratch, | |
| 212 masm->isolate()->factory()->fixed_array_map(), | |
| 213 not_fast_array, | |
| 214 DONT_DO_SMI_CHECK); | |
| 215 } else { | |
| 216 __ AssertFastElements(scratch); | |
| 217 } | |
| 218 // Check that the key (index) is within bounds. | |
| 219 __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset)); | |
| 220 __ j(above_equal, out_of_range); | |
| 221 // Fast case: Do the load. | |
| 222 STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0)); | |
| 223 __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize)); | |
| 224 __ cmp(scratch, Immediate(masm->isolate()->factory()->the_hole_value())); | |
| 225 // In case the loaded value is the_hole we have to consult GetProperty | |
| 226 // to ensure the prototype chain is searched. | |
| 227 __ j(equal, out_of_range); | |
| 228 if (!result.is(scratch)) { | |
| 229 __ mov(result, scratch); | |
| 230 } | |
| 231 } | |
| 232 | |
| 233 | |
| 234 // Checks whether a key is an array index string or a unique name. | |
| 235 // Falls through if the key is a unique name. | |
| 236 static void GenerateKeyNameCheck(MacroAssembler* masm, | |
| 237 Register key, | |
| 238 Register map, | |
| 239 Register hash, | |
| 240 Label* index_string, | |
| 241 Label* not_unique) { | |
| 242 // Register use: | |
| 243 // key - holds the key and is unchanged. Assumed to be non-smi. | |
| 244 // Scratch registers: | |
| 245 // map - used to hold the map of the key. | |
| 246 // hash - used to hold the hash of the key. | |
| 247 Label unique; | |
| 248 __ CmpObjectType(key, LAST_UNIQUE_NAME_TYPE, map); | |
| 249 __ j(above, not_unique); | |
| 250 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE); | |
| 251 __ j(equal, &unique); | |
| 252 | |
| 253 // Is the string an array index, with cached numeric value? | |
| 254 __ mov(hash, FieldOperand(key, Name::kHashFieldOffset)); | |
| 255 __ test(hash, Immediate(Name::kContainsCachedArrayIndexMask)); | |
| 256 __ j(zero, index_string); | |
| 257 | |
| 258 // Is the string internalized? We already know it's a string so a single | |
| 259 // bit test is enough. | |
| 260 STATIC_ASSERT(kNotInternalizedTag != 0); | |
| 261 __ test_b(FieldOperand(map, Map::kInstanceTypeOffset), | |
| 262 kIsNotInternalizedMask); | |
| 263 __ j(not_zero, not_unique); | |
| 264 | |
| 265 __ bind(&unique); | |
| 266 } | |
| 267 | |
| 268 | |
| 269 static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm, | |
| 270 Register object, | |
| 271 Register key, | |
| 272 Register scratch1, | |
| 273 Register scratch2, | |
| 274 Label* unmapped_case, | |
| 275 Label* slow_case) { | |
| 276 Heap* heap = masm->isolate()->heap(); | |
| 277 Factory* factory = masm->isolate()->factory(); | |
| 278 | |
| 279 // Check that the receiver is a JSObject. Because of the elements | |
| 280 // map check later, we do not need to check for interceptors or | |
| 281 // whether it requires access checks. | |
| 282 __ JumpIfSmi(object, slow_case); | |
| 283 // Check that the object is some kind of JSObject. | |
| 284 __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1); | |
| 285 __ j(below, slow_case); | |
| 286 | |
| 287 // Check that the key is a positive smi. | |
| 288 __ test(key, Immediate(0x80000001)); | |
| 289 __ j(not_zero, slow_case); | |
| 290 | |
| 291 // Load the elements into scratch1 and check its map. | |
| 292 Handle<Map> arguments_map(heap->sloppy_arguments_elements_map()); | |
| 293 __ mov(scratch1, FieldOperand(object, JSObject::kElementsOffset)); | |
| 294 __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK); | |
| 295 | |
| 296 // Check if element is in the range of mapped arguments. If not, jump | |
| 297 // to the unmapped lookup with the parameter map in scratch1. | |
| 298 __ mov(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset)); | |
| 299 __ sub(scratch2, Immediate(Smi::FromInt(2))); | |
| 300 __ cmp(key, scratch2); | |
| 301 __ j(above_equal, unmapped_case); | |
| 302 | |
| 303 // Load element index and check whether it is the hole. | |
| 304 const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize; | |
| 305 __ mov(scratch2, FieldOperand(scratch1, | |
| 306 key, | |
| 307 times_half_pointer_size, | |
| 308 kHeaderSize)); | |
| 309 __ cmp(scratch2, factory->the_hole_value()); | |
| 310 __ j(equal, unmapped_case); | |
| 311 | |
| 312 // Load value from context and return it. We can reuse scratch1 because | |
| 313 // we do not jump to the unmapped lookup (which requires the parameter | |
| 314 // map in scratch1). | |
| 315 const int kContextOffset = FixedArray::kHeaderSize; | |
| 316 __ mov(scratch1, FieldOperand(scratch1, kContextOffset)); | |
| 317 return FieldOperand(scratch1, | |
| 318 scratch2, | |
| 319 times_half_pointer_size, | |
| 320 Context::kHeaderSize); | |
| 321 } | |
| 322 | |
| 323 | |
| 324 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm, | |
| 325 Register key, | |
| 326 Register parameter_map, | |
| 327 Register scratch, | |
| 328 Label* slow_case) { | |
| 329 // Element is in arguments backing store, which is referenced by the | |
| 330 // second element of the parameter_map. | |
| 331 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; | |
| 332 Register backing_store = parameter_map; | |
| 333 __ mov(backing_store, FieldOperand(parameter_map, kBackingStoreOffset)); | |
| 334 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); | |
| 335 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK); | |
| 336 __ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset)); | |
| 337 __ cmp(key, scratch); | |
| 338 __ j(greater_equal, slow_case); | |
| 339 return FieldOperand(backing_store, | |
| 340 key, | |
| 341 times_half_pointer_size, | |
| 342 FixedArray::kHeaderSize); | |
| 343 } | |
| 344 | |
| 345 | |
| 346 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | |
| 347 // The return address is on the stack. | |
| 348 Label slow, check_name, index_smi, index_name, property_array_property; | |
| 349 Label probe_dictionary, check_number_dictionary; | |
| 350 | |
| 351 Register receiver = ReceiverRegister(); | |
| 352 Register key = NameRegister(); | |
| 353 DCHECK(receiver.is(edx)); | |
| 354 DCHECK(key.is(ecx)); | |
| 355 | |
| 356 // Check that the key is a smi. | |
| 357 __ JumpIfNotSmi(key, &check_name); | |
| 358 __ bind(&index_smi); | |
| 359 // Now the key is known to be a smi. This place is also jumped to from | |
| 360 // where a numeric string is converted to a smi. | |
| 361 | |
| 362 GenerateKeyedLoadReceiverCheck( | |
| 363 masm, receiver, eax, Map::kHasIndexedInterceptor, &slow); | |
| 364 | |
| 365 // Check the receiver's map to see if it has fast elements. | |
| 366 __ CheckFastElements(eax, &check_number_dictionary); | |
| 367 | |
| 368 GenerateFastArrayLoad(masm, receiver, key, eax, eax, NULL, &slow); | |
| 369 Isolate* isolate = masm->isolate(); | |
| 370 Counters* counters = isolate->counters(); | |
| 371 __ IncrementCounter(counters->keyed_load_generic_smi(), 1); | |
| 372 __ ret(0); | |
| 373 | |
| 374 __ bind(&check_number_dictionary); | |
| 375 __ mov(ebx, key); | |
| 376 __ SmiUntag(ebx); | |
| 377 __ mov(eax, FieldOperand(receiver, JSObject::kElementsOffset)); | |
| 378 | |
| 379 // Check whether the elements is a number dictionary. | |
| 380 // ebx: untagged index | |
| 381 // eax: elements | |
| 382 __ CheckMap(eax, | |
| 383 isolate->factory()->hash_table_map(), | |
| 384 &slow, | |
| 385 DONT_DO_SMI_CHECK); | |
| 386 Label slow_pop_receiver; | |
| 387 // Push receiver on the stack to free up a register for the dictionary | |
| 388 // probing. | |
| 389 __ push(receiver); | |
| 390 __ LoadFromNumberDictionary(&slow_pop_receiver, eax, key, ebx, edx, edi, eax); | |
| 391 // Pop receiver before returning. | |
| 392 __ pop(receiver); | |
| 393 __ ret(0); | |
| 394 | |
| 395 __ bind(&slow_pop_receiver); | |
| 396 // Pop the receiver from the stack and jump to runtime. | |
| 397 __ pop(receiver); | |
| 398 | |
| 399 __ bind(&slow); | |
| 400 // Slow case: jump to runtime. | |
| 401 __ IncrementCounter(counters->keyed_load_generic_slow(), 1); | |
| 402 GenerateRuntimeGetProperty(masm); | |
| 403 | |
| 404 __ bind(&check_name); | |
| 405 GenerateKeyNameCheck(masm, key, eax, ebx, &index_name, &slow); | |
| 406 | |
| 407 GenerateKeyedLoadReceiverCheck( | |
| 408 masm, receiver, eax, Map::kHasNamedInterceptor, &slow); | |
| 409 | |
| 410 // If the receiver is a fast-case object, check the keyed lookup | |
| 411 // cache. Otherwise probe the dictionary. | |
| 412 __ mov(ebx, FieldOperand(receiver, JSObject::kPropertiesOffset)); | |
| 413 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), | |
| 414 Immediate(isolate->factory()->hash_table_map())); | |
| 415 __ j(equal, &probe_dictionary); | |
| 416 | |
| 417 // The receiver's map is still in eax, compute the keyed lookup cache hash | |
| 418 // based on 32 bits of the map pointer and the string hash. | |
| 419 if (FLAG_debug_code) { | |
| 420 __ cmp(eax, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 421 __ Check(equal, kMapIsNoLongerInEax); | |
| 422 } | |
| 423 __ mov(ebx, eax); // Keep the map around for later. | |
| 424 __ shr(eax, KeyedLookupCache::kMapHashShift); | |
| 425 __ mov(edi, FieldOperand(key, String::kHashFieldOffset)); | |
| 426 __ shr(edi, String::kHashShift); | |
| 427 __ xor_(eax, edi); | |
| 428 __ and_(eax, KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); | |
| 429 | |
| 430 // Load the key (consisting of map and internalized string) from the cache and | |
| 431 // check for match. | |
| 432 Label load_in_object_property; | |
| 433 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; | |
| 434 Label hit_on_nth_entry[kEntriesPerBucket]; | |
| 435 ExternalReference cache_keys = | |
| 436 ExternalReference::keyed_lookup_cache_keys(masm->isolate()); | |
| 437 | |
| 438 for (int i = 0; i < kEntriesPerBucket - 1; i++) { | |
| 439 Label try_next_entry; | |
| 440 __ mov(edi, eax); | |
| 441 __ shl(edi, kPointerSizeLog2 + 1); | |
| 442 if (i != 0) { | |
| 443 __ add(edi, Immediate(kPointerSize * i * 2)); | |
| 444 } | |
| 445 __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys)); | |
| 446 __ j(not_equal, &try_next_entry); | |
| 447 __ add(edi, Immediate(kPointerSize)); | |
| 448 __ cmp(key, Operand::StaticArray(edi, times_1, cache_keys)); | |
| 449 __ j(equal, &hit_on_nth_entry[i]); | |
| 450 __ bind(&try_next_entry); | |
| 451 } | |
| 452 | |
| 453 __ lea(edi, Operand(eax, 1)); | |
| 454 __ shl(edi, kPointerSizeLog2 + 1); | |
| 455 __ add(edi, Immediate(kPointerSize * (kEntriesPerBucket - 1) * 2)); | |
| 456 __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys)); | |
| 457 __ j(not_equal, &slow); | |
| 458 __ add(edi, Immediate(kPointerSize)); | |
| 459 __ cmp(key, Operand::StaticArray(edi, times_1, cache_keys)); | |
| 460 __ j(not_equal, &slow); | |
| 461 | |
| 462 // Get field offset. | |
| 463 // ebx : receiver's map | |
| 464 // eax : lookup cache index | |
| 465 ExternalReference cache_field_offsets = | |
| 466 ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); | |
| 467 | |
| 468 // Hit on nth entry. | |
| 469 for (int i = kEntriesPerBucket - 1; i >= 0; i--) { | |
| 470 __ bind(&hit_on_nth_entry[i]); | |
| 471 if (i != 0) { | |
| 472 __ add(eax, Immediate(i)); | |
| 473 } | |
| 474 __ mov(edi, | |
| 475 Operand::StaticArray(eax, times_pointer_size, cache_field_offsets)); | |
| 476 __ movzx_b(eax, FieldOperand(ebx, Map::kInObjectPropertiesOffset)); | |
| 477 __ sub(edi, eax); | |
| 478 __ j(above_equal, &property_array_property); | |
| 479 if (i != 0) { | |
| 480 __ jmp(&load_in_object_property); | |
| 481 } | |
| 482 } | |
| 483 | |
| 484 // Load in-object property. | |
| 485 __ bind(&load_in_object_property); | |
| 486 __ movzx_b(eax, FieldOperand(ebx, Map::kInstanceSizeOffset)); | |
| 487 __ add(eax, edi); | |
| 488 __ mov(eax, FieldOperand(receiver, eax, times_pointer_size, 0)); | |
| 489 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); | |
| 490 __ ret(0); | |
| 491 | |
| 492 // Load property array property. | |
| 493 __ bind(&property_array_property); | |
| 494 __ mov(eax, FieldOperand(receiver, JSObject::kPropertiesOffset)); | |
| 495 __ mov(eax, FieldOperand(eax, edi, times_pointer_size, | |
| 496 FixedArray::kHeaderSize)); | |
| 497 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); | |
| 498 __ ret(0); | |
| 499 | |
| 500 // Do a quick inline probe of the receiver's dictionary, if it | |
| 501 // exists. | |
| 502 __ bind(&probe_dictionary); | |
| 503 | |
| 504 __ mov(eax, FieldOperand(receiver, JSObject::kMapOffset)); | |
| 505 __ movzx_b(eax, FieldOperand(eax, Map::kInstanceTypeOffset)); | |
| 506 GenerateGlobalInstanceTypeCheck(masm, eax, &slow); | |
| 507 | |
| 508 GenerateDictionaryLoad(masm, &slow, ebx, key, eax, edi, eax); | |
| 509 __ IncrementCounter(counters->keyed_load_generic_symbol(), 1); | |
| 510 __ ret(0); | |
| 511 | |
| 512 __ bind(&index_name); | |
| 513 __ IndexFromHash(ebx, key); | |
| 514 // Now jump to the place where smi keys are handled. | |
| 515 __ jmp(&index_smi); | |
| 516 } | |
| 517 | |
| 518 | |
| 519 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { | |
| 520 // Return address is on the stack. | |
| 521 Label miss; | |
| 522 | |
| 523 Register receiver = ReceiverRegister(); | |
| 524 Register index = NameRegister(); | |
| 525 Register scratch = ebx; | |
| 526 DCHECK(!scratch.is(receiver) && !scratch.is(index)); | |
| 527 Register result = eax; | |
| 528 DCHECK(!result.is(scratch)); | |
| 529 | |
| 530 StringCharAtGenerator char_at_generator(receiver, | |
| 531 index, | |
| 532 scratch, | |
| 533 result, | |
| 534 &miss, // When not a string. | |
| 535 &miss, // When not a number. | |
| 536 &miss, // When index out of range. | |
| 537 STRING_INDEX_IS_ARRAY_INDEX); | |
| 538 char_at_generator.GenerateFast(masm); | |
| 539 __ ret(0); | |
| 540 | |
| 541 StubRuntimeCallHelper call_helper; | |
| 542 char_at_generator.GenerateSlow(masm, call_helper); | |
| 543 | |
| 544 __ bind(&miss); | |
| 545 GenerateMiss(masm); | |
| 546 } | |
| 547 | |
| 548 | |
| 549 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { | |
| 550 // Return address is on the stack. | |
| 551 Label slow; | |
| 552 | |
| 553 Register receiver = ReceiverRegister(); | |
| 554 Register key = NameRegister(); | |
| 555 Register scratch = eax; | |
| 556 DCHECK(!scratch.is(receiver) && !scratch.is(key)); | |
| 557 | |
| 558 // Check that the receiver isn't a smi. | |
| 559 __ JumpIfSmi(receiver, &slow); | |
| 560 | |
| 561 // Check that the key is an array index, that is Uint32. | |
| 562 __ test(key, Immediate(kSmiTagMask | kSmiSignMask)); | |
| 563 __ j(not_zero, &slow); | |
| 564 | |
| 565 // Get the map of the receiver. | |
| 566 __ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 567 | |
| 568 // Check that it has indexed interceptor and access checks | |
| 569 // are not enabled for this object. | |
| 570 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset)); | |
| 571 __ and_(scratch, Immediate(kSlowCaseBitFieldMask)); | |
| 572 __ cmp(scratch, Immediate(1 << Map::kHasIndexedInterceptor)); | |
| 573 __ j(not_zero, &slow); | |
| 574 | |
| 575 // Everything is fine, call runtime. | |
| 576 __ pop(scratch); | |
| 577 __ push(receiver); // receiver | |
| 578 __ push(key); // key | |
| 579 __ push(scratch); // return address | |
| 580 | |
| 581 // Perform tail call to the entry. | |
| 582 ExternalReference ref = ExternalReference( | |
| 583 IC_Utility(kLoadElementWithInterceptor), masm->isolate()); | |
| 584 __ TailCallExternalReference(ref, 2, 1); | |
| 585 | |
| 586 __ bind(&slow); | |
| 587 GenerateMiss(masm); | |
| 588 } | |
| 589 | |
| 590 | |
| 591 void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) { | |
| 592 // The return address is on the stack. | |
| 593 Register receiver = ReceiverRegister(); | |
| 594 Register key = NameRegister(); | |
| 595 DCHECK(receiver.is(edx)); | |
| 596 DCHECK(key.is(ecx)); | |
| 597 | |
| 598 Label slow, notin; | |
| 599 Factory* factory = masm->isolate()->factory(); | |
| 600 Operand mapped_location = | |
| 601 GenerateMappedArgumentsLookup( | |
| 602 masm, receiver, key, ebx, eax, ¬in, &slow); | |
| 603 __ mov(eax, mapped_location); | |
| 604 __ Ret(); | |
| 605 __ bind(¬in); | |
| 606 // The unmapped lookup expects that the parameter map is in ebx. | |
| 607 Operand unmapped_location = | |
| 608 GenerateUnmappedArgumentsLookup(masm, key, ebx, eax, &slow); | |
| 609 __ cmp(unmapped_location, factory->the_hole_value()); | |
| 610 __ j(equal, &slow); | |
| 611 __ mov(eax, unmapped_location); | |
| 612 __ Ret(); | |
| 613 __ bind(&slow); | |
| 614 GenerateMiss(masm); | |
| 615 } | |
| 616 | |
| 617 | |
| 618 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) { | |
| 619 // Return address is on the stack. | |
| 620 Label slow, notin; | |
| 621 Register receiver = ReceiverRegister(); | |
| 622 Register name = NameRegister(); | |
| 623 Register value = ValueRegister(); | |
| 624 DCHECK(receiver.is(edx)); | |
| 625 DCHECK(name.is(ecx)); | |
| 626 DCHECK(value.is(eax)); | |
| 627 | |
| 628 Operand mapped_location = | |
| 629 GenerateMappedArgumentsLookup(masm, receiver, name, ebx, edi, ¬in, | |
| 630 &slow); | |
| 631 __ mov(mapped_location, value); | |
| 632 __ lea(ecx, mapped_location); | |
| 633 __ mov(edx, value); | |
| 634 __ RecordWrite(ebx, ecx, edx, kDontSaveFPRegs); | |
| 635 __ Ret(); | |
| 636 __ bind(¬in); | |
| 637 // The unmapped lookup expects that the parameter map is in ebx. | |
| 638 Operand unmapped_location = | |
| 639 GenerateUnmappedArgumentsLookup(masm, name, ebx, edi, &slow); | |
| 640 __ mov(unmapped_location, value); | |
| 641 __ lea(edi, unmapped_location); | |
| 642 __ mov(edx, value); | |
| 643 __ RecordWrite(ebx, edi, edx, kDontSaveFPRegs); | |
| 644 __ Ret(); | |
| 645 __ bind(&slow); | |
| 646 GenerateMiss(masm); | |
| 647 } | |
| 648 | |
| 649 | |
| 650 static void KeyedStoreGenerateGenericHelper( | |
| 651 MacroAssembler* masm, | |
| 652 Label* fast_object, | |
| 653 Label* fast_double, | |
| 654 Label* slow, | |
| 655 KeyedStoreCheckMap check_map, | |
| 656 KeyedStoreIncrementLength increment_length) { | |
| 657 Label transition_smi_elements; | |
| 658 Label finish_object_store, non_double_value, transition_double_elements; | |
| 659 Label fast_double_without_map_check; | |
| 660 Register receiver = KeyedStoreIC::ReceiverRegister(); | |
| 661 Register key = KeyedStoreIC::NameRegister(); | |
| 662 Register value = KeyedStoreIC::ValueRegister(); | |
| 663 DCHECK(receiver.is(edx)); | |
| 664 DCHECK(key.is(ecx)); | |
| 665 DCHECK(value.is(eax)); | |
| 666 // key is a smi. | |
| 667 // ebx: FixedArray receiver->elements | |
| 668 // edi: receiver map | |
| 669 // Fast case: Do the store, could either Object or double. | |
| 670 __ bind(fast_object); | |
| 671 if (check_map == kCheckMap) { | |
| 672 __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset)); | |
| 673 __ cmp(edi, masm->isolate()->factory()->fixed_array_map()); | |
| 674 __ j(not_equal, fast_double); | |
| 675 } | |
| 676 | |
| 677 // HOLECHECK: guards "A[i] = V" | |
| 678 // We have to go to the runtime if the current value is the hole because | |
| 679 // there may be a callback on the element | |
| 680 Label holecheck_passed1; | |
| 681 __ cmp(FixedArrayElementOperand(ebx, key), | |
| 682 masm->isolate()->factory()->the_hole_value()); | |
| 683 __ j(not_equal, &holecheck_passed1); | |
| 684 __ JumpIfDictionaryInPrototypeChain(receiver, ebx, edi, slow); | |
| 685 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset)); | |
| 686 | |
| 687 __ bind(&holecheck_passed1); | |
| 688 | |
| 689 // Smi stores don't require further checks. | |
| 690 Label non_smi_value; | |
| 691 __ JumpIfNotSmi(value, &non_smi_value); | |
| 692 if (increment_length == kIncrementLength) { | |
| 693 // Add 1 to receiver->length. | |
| 694 __ add(FieldOperand(receiver, JSArray::kLengthOffset), | |
| 695 Immediate(Smi::FromInt(1))); | |
| 696 } | |
| 697 // It's irrelevant whether array is smi-only or not when writing a smi. | |
| 698 __ mov(FixedArrayElementOperand(ebx, key), value); | |
| 699 __ ret(0); | |
| 700 | |
| 701 __ bind(&non_smi_value); | |
| 702 // Escape to elements kind transition case. | |
| 703 __ mov(edi, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 704 __ CheckFastObjectElements(edi, &transition_smi_elements); | |
| 705 | |
| 706 // Fast elements array, store the value to the elements backing store. | |
| 707 __ bind(&finish_object_store); | |
| 708 if (increment_length == kIncrementLength) { | |
| 709 // Add 1 to receiver->length. | |
| 710 __ add(FieldOperand(receiver, JSArray::kLengthOffset), | |
| 711 Immediate(Smi::FromInt(1))); | |
| 712 } | |
| 713 __ mov(FixedArrayElementOperand(ebx, key), value); | |
| 714 // Update write barrier for the elements array address. | |
| 715 __ mov(edx, value); // Preserve the value which is returned. | |
| 716 __ RecordWriteArray( | |
| 717 ebx, edx, key, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
| 718 __ ret(0); | |
| 719 | |
| 720 __ bind(fast_double); | |
| 721 if (check_map == kCheckMap) { | |
| 722 // Check for fast double array case. If this fails, call through to the | |
| 723 // runtime. | |
| 724 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map()); | |
| 725 __ j(not_equal, slow); | |
| 726 // If the value is a number, store it as a double in the FastDoubleElements | |
| 727 // array. | |
| 728 } | |
| 729 | |
| 730 // HOLECHECK: guards "A[i] double hole?" | |
| 731 // We have to see if the double version of the hole is present. If so | |
| 732 // go to the runtime. | |
| 733 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32); | |
| 734 __ cmp(FieldOperand(ebx, key, times_4, offset), Immediate(kHoleNanUpper32)); | |
| 735 __ j(not_equal, &fast_double_without_map_check); | |
| 736 __ JumpIfDictionaryInPrototypeChain(receiver, ebx, edi, slow); | |
| 737 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset)); | |
| 738 | |
| 739 __ bind(&fast_double_without_map_check); | |
| 740 __ StoreNumberToDoubleElements(value, ebx, key, edi, xmm0, | |
| 741 &transition_double_elements); | |
| 742 if (increment_length == kIncrementLength) { | |
| 743 // Add 1 to receiver->length. | |
| 744 __ add(FieldOperand(receiver, JSArray::kLengthOffset), | |
| 745 Immediate(Smi::FromInt(1))); | |
| 746 } | |
| 747 __ ret(0); | |
| 748 | |
| 749 __ bind(&transition_smi_elements); | |
| 750 __ mov(ebx, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 751 | |
| 752 // Transition the array appropriately depending on the value type. | |
| 753 __ CheckMap(value, | |
| 754 masm->isolate()->factory()->heap_number_map(), | |
| 755 &non_double_value, | |
| 756 DONT_DO_SMI_CHECK); | |
| 757 | |
| 758 // Value is a double. Transition FAST_SMI_ELEMENTS -> FAST_DOUBLE_ELEMENTS | |
| 759 // and complete the store. | |
| 760 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, | |
| 761 FAST_DOUBLE_ELEMENTS, | |
| 762 ebx, | |
| 763 edi, | |
| 764 slow); | |
| 765 AllocationSiteMode mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, | |
| 766 FAST_DOUBLE_ELEMENTS); | |
| 767 ElementsTransitionGenerator::GenerateSmiToDouble( | |
| 768 masm, receiver, key, value, ebx, mode, slow); | |
| 769 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset)); | |
| 770 __ jmp(&fast_double_without_map_check); | |
| 771 | |
| 772 __ bind(&non_double_value); | |
| 773 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS | |
| 774 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, | |
| 775 FAST_ELEMENTS, | |
| 776 ebx, | |
| 777 edi, | |
| 778 slow); | |
| 779 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS); | |
| 780 ElementsTransitionGenerator::GenerateMapChangeElementsTransition( | |
| 781 masm, receiver, key, value, ebx, mode, slow); | |
| 782 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset)); | |
| 783 __ jmp(&finish_object_store); | |
| 784 | |
| 785 __ bind(&transition_double_elements); | |
| 786 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a | |
| 787 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and | |
| 788 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS | |
| 789 __ mov(ebx, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 790 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, | |
| 791 FAST_ELEMENTS, | |
| 792 ebx, | |
| 793 edi, | |
| 794 slow); | |
| 795 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS); | |
| 796 ElementsTransitionGenerator::GenerateDoubleToObject( | |
| 797 masm, receiver, key, value, ebx, mode, slow); | |
| 798 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset)); | |
| 799 __ jmp(&finish_object_store); | |
| 800 } | |
| 801 | |
| 802 | |
| 803 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, | |
| 804 StrictMode strict_mode) { | |
| 805 // Return address is on the stack. | |
| 806 Label slow, fast_object, fast_object_grow; | |
| 807 Label fast_double, fast_double_grow; | |
| 808 Label array, extra, check_if_double_array; | |
| 809 Register receiver = ReceiverRegister(); | |
| 810 Register key = NameRegister(); | |
| 811 DCHECK(receiver.is(edx)); | |
| 812 DCHECK(key.is(ecx)); | |
| 813 | |
| 814 // Check that the object isn't a smi. | |
| 815 __ JumpIfSmi(receiver, &slow); | |
| 816 // Get the map from the receiver. | |
| 817 __ mov(edi, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 818 // Check that the receiver does not require access checks and is not observed. | |
| 819 // The generic stub does not perform map checks or handle observed objects. | |
| 820 __ test_b(FieldOperand(edi, Map::kBitFieldOffset), | |
| 821 1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved); | |
| 822 __ j(not_zero, &slow); | |
| 823 // Check that the key is a smi. | |
| 824 __ JumpIfNotSmi(key, &slow); | |
| 825 __ CmpInstanceType(edi, JS_ARRAY_TYPE); | |
| 826 __ j(equal, &array); | |
| 827 // Check that the object is some kind of JSObject. | |
| 828 __ CmpInstanceType(edi, FIRST_JS_OBJECT_TYPE); | |
| 829 __ j(below, &slow); | |
| 830 | |
| 831 // Object case: Check key against length in the elements array. | |
| 832 // Key is a smi. | |
| 833 // edi: receiver map | |
| 834 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset)); | |
| 835 // Check array bounds. Both the key and the length of FixedArray are smis. | |
| 836 __ cmp(key, FieldOperand(ebx, FixedArray::kLengthOffset)); | |
| 837 __ j(below, &fast_object); | |
| 838 | |
| 839 // Slow case: call runtime. | |
| 840 __ bind(&slow); | |
| 841 GenerateRuntimeSetProperty(masm, strict_mode); | |
| 842 | |
| 843 // Extra capacity case: Check if there is extra capacity to | |
| 844 // perform the store and update the length. Used for adding one | |
| 845 // element to the array by writing to array[array.length]. | |
| 846 __ bind(&extra); | |
| 847 // receiver is a JSArray. | |
| 848 // key is a smi. | |
| 849 // ebx: receiver->elements, a FixedArray | |
| 850 // edi: receiver map | |
| 851 // flags: compare (key, receiver.length()) | |
| 852 // do not leave holes in the array: | |
| 853 __ j(not_equal, &slow); | |
| 854 __ cmp(key, FieldOperand(ebx, FixedArray::kLengthOffset)); | |
| 855 __ j(above_equal, &slow); | |
| 856 __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset)); | |
| 857 __ cmp(edi, masm->isolate()->factory()->fixed_array_map()); | |
| 858 __ j(not_equal, &check_if_double_array); | |
| 859 __ jmp(&fast_object_grow); | |
| 860 | |
| 861 __ bind(&check_if_double_array); | |
| 862 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map()); | |
| 863 __ j(not_equal, &slow); | |
| 864 __ jmp(&fast_double_grow); | |
| 865 | |
| 866 // Array case: Get the length and the elements array from the JS | |
| 867 // array. Check that the array is in fast mode (and writable); if it | |
| 868 // is the length is always a smi. | |
| 869 __ bind(&array); | |
| 870 // receiver is a JSArray. | |
| 871 // key is a smi. | |
| 872 // edi: receiver map | |
| 873 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset)); | |
| 874 | |
| 875 // Check the key against the length in the array and fall through to the | |
| 876 // common store code. | |
| 877 __ cmp(key, FieldOperand(receiver, JSArray::kLengthOffset)); // Compare smis. | |
| 878 __ j(above_equal, &extra); | |
| 879 | |
| 880 KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double, | |
| 881 &slow, kCheckMap, kDontIncrementLength); | |
| 882 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow, | |
| 883 &slow, kDontCheckMap, kIncrementLength); | |
| 884 } | |
| 885 | |
| 886 | |
| 887 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) { | |
| 888 // The return address is on the stack. | |
| 889 Register receiver = ReceiverRegister(); | |
| 890 Register name = NameRegister(); | |
| 891 DCHECK(receiver.is(edx)); | |
| 892 DCHECK(name.is(ecx)); | |
| 893 | |
| 894 // Probe the stub cache. | |
| 895 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( | |
| 896 Code::ComputeHandlerFlags(Code::LOAD_IC)); | |
| 897 masm->isolate()->stub_cache()->GenerateProbe( | |
| 898 masm, flags, receiver, name, ebx, eax); | |
| 899 | |
| 900 // Cache miss: Jump to runtime. | |
| 901 GenerateMiss(masm); | |
| 902 } | |
| 903 | |
| 904 | |
| 905 void LoadIC::GenerateNormal(MacroAssembler* masm) { | |
| 906 Register dictionary = eax; | |
| 907 DCHECK(!dictionary.is(ReceiverRegister())); | |
| 908 DCHECK(!dictionary.is(NameRegister())); | |
| 909 | |
| 910 Label slow; | |
| 911 | |
| 912 __ mov(dictionary, | |
| 913 FieldOperand(ReceiverRegister(), JSObject::kPropertiesOffset)); | |
| 914 GenerateDictionaryLoad(masm, &slow, dictionary, NameRegister(), edi, ebx, | |
| 915 eax); | |
| 916 __ ret(0); | |
| 917 | |
| 918 // Dictionary load failed, go slow (but don't miss). | |
| 919 __ bind(&slow); | |
| 920 GenerateRuntimeGetProperty(masm); | |
| 921 } | |
| 922 | |
| 923 | |
| 924 static void LoadIC_PushArgs(MacroAssembler* masm) { | |
| 925 Register receiver = LoadIC::ReceiverRegister(); | |
| 926 Register name = LoadIC::NameRegister(); | |
| 927 DCHECK(!ebx.is(receiver) && !ebx.is(name)); | |
| 928 | |
| 929 __ pop(ebx); | |
| 930 __ push(receiver); | |
| 931 __ push(name); | |
| 932 __ push(ebx); | |
| 933 } | |
| 934 | |
| 935 | |
| 936 void LoadIC::GenerateMiss(MacroAssembler* masm) { | |
| 937 // Return address is on the stack. | |
| 938 __ IncrementCounter(masm->isolate()->counters()->load_miss(), 1); | |
| 939 | |
| 940 LoadIC_PushArgs(masm); | |
| 941 | |
| 942 // Perform tail call to the entry. | |
| 943 ExternalReference ref = | |
| 944 ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate()); | |
| 945 __ TailCallExternalReference(ref, 2, 1); | |
| 946 } | |
| 947 | |
| 948 | |
| 949 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | |
| 950 // Return address is on the stack. | |
| 951 LoadIC_PushArgs(masm); | |
| 952 | |
| 953 // Perform tail call to the entry. | |
| 954 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); | |
| 955 } | |
| 956 | |
| 957 | |
| 958 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | |
| 959 // Return address is on the stack. | |
| 960 __ IncrementCounter(masm->isolate()->counters()->keyed_load_miss(), 1); | |
| 961 | |
| 962 LoadIC_PushArgs(masm); | |
| 963 | |
| 964 // Perform tail call to the entry. | |
| 965 ExternalReference ref = | |
| 966 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); | |
| 967 __ TailCallExternalReference(ref, 2, 1); | |
| 968 } | |
| 969 | |
| 970 | |
| 971 // IC register specifications | |
| 972 const Register LoadIC::ReceiverRegister() { return edx; } | |
| 973 const Register LoadIC::NameRegister() { return ecx; } | |
| 974 | |
| 975 | |
| 976 const Register LoadIC::SlotRegister() { | |
| 977 DCHECK(FLAG_vector_ics); | |
| 978 return eax; | |
| 979 } | |
| 980 | |
| 981 | |
| 982 const Register LoadIC::VectorRegister() { | |
| 983 DCHECK(FLAG_vector_ics); | |
| 984 return ebx; | |
| 985 } | |
| 986 | |
| 987 | |
| 988 const Register StoreIC::ReceiverRegister() { return edx; } | |
| 989 const Register StoreIC::NameRegister() { return ecx; } | |
| 990 const Register StoreIC::ValueRegister() { return eax; } | |
| 991 | |
| 992 | |
| 993 const Register KeyedStoreIC::MapRegister() { | |
| 994 return ebx; | |
| 995 } | |
| 996 | |
| 997 | |
| 998 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | |
| 999 // Return address is on the stack. | |
| 1000 LoadIC_PushArgs(masm); | |
| 1001 | |
| 1002 // Perform tail call to the entry. | |
| 1003 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); | |
| 1004 } | |
| 1005 | |
| 1006 | |
| 1007 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { | |
| 1008 // Return address is on the stack. | |
| 1009 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( | |
| 1010 Code::ComputeHandlerFlags(Code::STORE_IC)); | |
| 1011 masm->isolate()->stub_cache()->GenerateProbe( | |
| 1012 masm, flags, ReceiverRegister(), NameRegister(), | |
| 1013 ebx, no_reg); | |
| 1014 | |
| 1015 // Cache miss: Jump to runtime. | |
| 1016 GenerateMiss(masm); | |
| 1017 } | |
| 1018 | |
| 1019 | |
| 1020 static void StoreIC_PushArgs(MacroAssembler* masm) { | |
| 1021 Register receiver = StoreIC::ReceiverRegister(); | |
| 1022 Register name = StoreIC::NameRegister(); | |
| 1023 Register value = StoreIC::ValueRegister(); | |
| 1024 | |
| 1025 DCHECK(!ebx.is(receiver) && !ebx.is(name) && !ebx.is(value)); | |
| 1026 | |
| 1027 __ pop(ebx); | |
| 1028 __ push(receiver); | |
| 1029 __ push(name); | |
| 1030 __ push(value); | |
| 1031 __ push(ebx); | |
| 1032 } | |
| 1033 | |
| 1034 | |
| 1035 void StoreIC::GenerateMiss(MacroAssembler* masm) { | |
| 1036 // Return address is on the stack. | |
| 1037 StoreIC_PushArgs(masm); | |
| 1038 | |
| 1039 // Perform tail call to the entry. | |
| 1040 ExternalReference ref = | |
| 1041 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate()); | |
| 1042 __ TailCallExternalReference(ref, 3, 1); | |
| 1043 } | |
| 1044 | |
| 1045 | |
| 1046 void StoreIC::GenerateNormal(MacroAssembler* masm) { | |
| 1047 Label restore_miss; | |
| 1048 Register receiver = ReceiverRegister(); | |
| 1049 Register name = NameRegister(); | |
| 1050 Register value = ValueRegister(); | |
| 1051 Register dictionary = ebx; | |
| 1052 | |
| 1053 __ mov(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset)); | |
| 1054 | |
| 1055 // A lot of registers are needed for storing to slow case | |
| 1056 // objects. Push and restore receiver but rely on | |
| 1057 // GenerateDictionaryStore preserving the value and name. | |
| 1058 __ push(receiver); | |
| 1059 GenerateDictionaryStore(masm, &restore_miss, dictionary, name, value, | |
| 1060 receiver, edi); | |
| 1061 __ Drop(1); | |
| 1062 Counters* counters = masm->isolate()->counters(); | |
| 1063 __ IncrementCounter(counters->store_normal_hit(), 1); | |
| 1064 __ ret(0); | |
| 1065 | |
| 1066 __ bind(&restore_miss); | |
| 1067 __ pop(receiver); | |
| 1068 __ IncrementCounter(counters->store_normal_miss(), 1); | |
| 1069 GenerateMiss(masm); | |
| 1070 } | |
| 1071 | |
| 1072 | |
| 1073 void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, | |
| 1074 StrictMode strict_mode) { | |
| 1075 // Return address is on the stack. | |
| 1076 DCHECK(!ebx.is(ReceiverRegister()) && !ebx.is(NameRegister()) && | |
| 1077 !ebx.is(ValueRegister())); | |
| 1078 __ pop(ebx); | |
| 1079 __ push(ReceiverRegister()); | |
| 1080 __ push(NameRegister()); | |
| 1081 __ push(ValueRegister()); | |
| 1082 __ push(Immediate(Smi::FromInt(strict_mode))); | |
| 1083 __ push(ebx); // return address | |
| 1084 | |
| 1085 // Do tail-call to runtime routine. | |
| 1086 __ TailCallRuntime(Runtime::kSetProperty, 4, 1); | |
| 1087 } | |
| 1088 | |
| 1089 | |
| 1090 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, | |
| 1091 StrictMode strict_mode) { | |
| 1092 // Return address is on the stack. | |
| 1093 DCHECK(!ebx.is(ReceiverRegister()) && !ebx.is(NameRegister()) && | |
| 1094 !ebx.is(ValueRegister())); | |
| 1095 __ pop(ebx); | |
| 1096 __ push(ReceiverRegister()); | |
| 1097 __ push(NameRegister()); | |
| 1098 __ push(ValueRegister()); | |
| 1099 __ push(Immediate(Smi::FromInt(strict_mode))); | |
| 1100 __ push(ebx); // return address | |
| 1101 | |
| 1102 // Do tail-call to runtime routine. | |
| 1103 __ TailCallRuntime(Runtime::kSetProperty, 4, 1); | |
| 1104 } | |
| 1105 | |
| 1106 | |
| 1107 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { | |
| 1108 // Return address is on the stack. | |
| 1109 StoreIC_PushArgs(masm); | |
| 1110 | |
| 1111 // Do tail-call to runtime routine. | |
| 1112 ExternalReference ref = | |
| 1113 ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate()); | |
| 1114 __ TailCallExternalReference(ref, 3, 1); | |
| 1115 } | |
| 1116 | |
| 1117 | |
| 1118 void StoreIC::GenerateSlow(MacroAssembler* masm) { | |
| 1119 // Return address is on the stack. | |
| 1120 StoreIC_PushArgs(masm); | |
| 1121 | |
| 1122 // Do tail-call to runtime routine. | |
| 1123 ExternalReference ref(IC_Utility(kStoreIC_Slow), masm->isolate()); | |
| 1124 __ TailCallExternalReference(ref, 3, 1); | |
| 1125 } | |
| 1126 | |
| 1127 | |
| 1128 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) { | |
| 1129 // Return address is on the stack. | |
| 1130 StoreIC_PushArgs(masm); | |
| 1131 | |
| 1132 // Do tail-call to runtime routine. | |
| 1133 ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate()); | |
| 1134 __ TailCallExternalReference(ref, 3, 1); | |
| 1135 } | |
| 1136 | |
| 1137 | |
| 1138 #undef __ | |
| 1139 | |
| 1140 | |
| 1141 Condition CompareIC::ComputeCondition(Token::Value op) { | |
| 1142 switch (op) { | |
| 1143 case Token::EQ_STRICT: | |
| 1144 case Token::EQ: | |
| 1145 return equal; | |
| 1146 case Token::LT: | |
| 1147 return less; | |
| 1148 case Token::GT: | |
| 1149 return greater; | |
| 1150 case Token::LTE: | |
| 1151 return less_equal; | |
| 1152 case Token::GTE: | |
| 1153 return greater_equal; | |
| 1154 default: | |
| 1155 UNREACHABLE(); | |
| 1156 return no_condition; | |
| 1157 } | |
| 1158 } | |
| 1159 | |
| 1160 | |
| 1161 bool CompareIC::HasInlinedSmiCode(Address address) { | |
| 1162 // The address of the instruction following the call. | |
| 1163 Address test_instruction_address = | |
| 1164 address + Assembler::kCallTargetAddressOffset; | |
| 1165 | |
| 1166 // If the instruction following the call is not a test al, nothing | |
| 1167 // was inlined. | |
| 1168 return *test_instruction_address == Assembler::kTestAlByte; | |
| 1169 } | |
| 1170 | |
| 1171 | |
| 1172 void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) { | |
| 1173 // The address of the instruction following the call. | |
| 1174 Address test_instruction_address = | |
| 1175 address + Assembler::kCallTargetAddressOffset; | |
| 1176 | |
| 1177 // If the instruction following the call is not a test al, nothing | |
| 1178 // was inlined. | |
| 1179 if (*test_instruction_address != Assembler::kTestAlByte) { | |
| 1180 DCHECK(*test_instruction_address == Assembler::kNopByte); | |
| 1181 return; | |
| 1182 } | |
| 1183 | |
| 1184 Address delta_address = test_instruction_address + 1; | |
| 1185 // The delta to the start of the map check instruction and the | |
| 1186 // condition code uses at the patched jump. | |
| 1187 uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address); | |
| 1188 if (FLAG_trace_ic) { | |
| 1189 PrintF("[ patching ic at %p, test=%p, delta=%d\n", | |
| 1190 address, test_instruction_address, delta); | |
| 1191 } | |
| 1192 | |
| 1193 // Patch with a short conditional jump. Enabling means switching from a short | |
| 1194 // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the | |
| 1195 // reverse operation of that. | |
| 1196 Address jmp_address = test_instruction_address - delta; | |
| 1197 DCHECK((check == ENABLE_INLINED_SMI_CHECK) | |
| 1198 ? (*jmp_address == Assembler::kJncShortOpcode || | |
| 1199 *jmp_address == Assembler::kJcShortOpcode) | |
| 1200 : (*jmp_address == Assembler::kJnzShortOpcode || | |
| 1201 *jmp_address == Assembler::kJzShortOpcode)); | |
| 1202 Condition cc = (check == ENABLE_INLINED_SMI_CHECK) | |
| 1203 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) | |
| 1204 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); | |
| 1205 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); | |
| 1206 } | |
| 1207 | |
| 1208 | |
| 1209 } } // namespace v8::internal | |
| 1210 | |
| 1211 #endif // V8_TARGET_ARCH_IA32 | |
| OLD | NEW |