| OLD | NEW |
| (Empty) |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "src/v8.h" | |
| 6 | |
| 7 #if V8_TARGET_ARCH_IA32 | |
| 8 | |
| 9 #include "src/codegen.h" | |
| 10 #include "src/ic-inl.h" | |
| 11 #include "src/stub-cache.h" | |
| 12 | |
| 13 namespace v8 { | |
| 14 namespace internal { | |
| 15 | |
| 16 #define __ ACCESS_MASM(masm) | |
| 17 | |
| 18 | |
| 19 static void ProbeTable(Isolate* isolate, | |
| 20 MacroAssembler* masm, | |
| 21 Code::Flags flags, | |
| 22 StubCache::Table table, | |
| 23 Register name, | |
| 24 Register receiver, | |
| 25 // Number of the cache entry pointer-size scaled. | |
| 26 Register offset, | |
| 27 Register extra) { | |
| 28 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); | |
| 29 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); | |
| 30 ExternalReference map_offset(isolate->stub_cache()->map_reference(table)); | |
| 31 | |
| 32 Label miss; | |
| 33 | |
| 34 // Multiply by 3 because there are 3 fields per entry (name, code, map). | |
| 35 __ lea(offset, Operand(offset, offset, times_2, 0)); | |
| 36 | |
| 37 if (extra.is_valid()) { | |
| 38 // Get the code entry from the cache. | |
| 39 __ mov(extra, Operand::StaticArray(offset, times_1, value_offset)); | |
| 40 | |
| 41 // Check that the key in the entry matches the name. | |
| 42 __ cmp(name, Operand::StaticArray(offset, times_1, key_offset)); | |
| 43 __ j(not_equal, &miss); | |
| 44 | |
| 45 // Check the map matches. | |
| 46 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset)); | |
| 47 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 48 __ j(not_equal, &miss); | |
| 49 | |
| 50 // Check that the flags match what we're looking for. | |
| 51 __ mov(offset, FieldOperand(extra, Code::kFlagsOffset)); | |
| 52 __ and_(offset, ~Code::kFlagsNotUsedInLookup); | |
| 53 __ cmp(offset, flags); | |
| 54 __ j(not_equal, &miss); | |
| 55 | |
| 56 #ifdef DEBUG | |
| 57 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { | |
| 58 __ jmp(&miss); | |
| 59 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { | |
| 60 __ jmp(&miss); | |
| 61 } | |
| 62 #endif | |
| 63 | |
| 64 // Jump to the first instruction in the code stub. | |
| 65 __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag)); | |
| 66 __ jmp(extra); | |
| 67 | |
| 68 __ bind(&miss); | |
| 69 } else { | |
| 70 // Save the offset on the stack. | |
| 71 __ push(offset); | |
| 72 | |
| 73 // Check that the key in the entry matches the name. | |
| 74 __ cmp(name, Operand::StaticArray(offset, times_1, key_offset)); | |
| 75 __ j(not_equal, &miss); | |
| 76 | |
| 77 // Check the map matches. | |
| 78 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset)); | |
| 79 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 80 __ j(not_equal, &miss); | |
| 81 | |
| 82 // Restore offset register. | |
| 83 __ mov(offset, Operand(esp, 0)); | |
| 84 | |
| 85 // Get the code entry from the cache. | |
| 86 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset)); | |
| 87 | |
| 88 // Check that the flags match what we're looking for. | |
| 89 __ mov(offset, FieldOperand(offset, Code::kFlagsOffset)); | |
| 90 __ and_(offset, ~Code::kFlagsNotUsedInLookup); | |
| 91 __ cmp(offset, flags); | |
| 92 __ j(not_equal, &miss); | |
| 93 | |
| 94 #ifdef DEBUG | |
| 95 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { | |
| 96 __ jmp(&miss); | |
| 97 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { | |
| 98 __ jmp(&miss); | |
| 99 } | |
| 100 #endif | |
| 101 | |
| 102 // Restore offset and re-load code entry from cache. | |
| 103 __ pop(offset); | |
| 104 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset)); | |
| 105 | |
| 106 // Jump to the first instruction in the code stub. | |
| 107 __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag)); | |
| 108 __ jmp(offset); | |
| 109 | |
| 110 // Pop at miss. | |
| 111 __ bind(&miss); | |
| 112 __ pop(offset); | |
| 113 } | |
| 114 } | |
| 115 | |
| 116 | |
| 117 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( | |
| 118 MacroAssembler* masm, Label* miss_label, Register receiver, | |
| 119 Handle<Name> name, Register scratch0, Register scratch1) { | |
| 120 DCHECK(name->IsUniqueName()); | |
| 121 DCHECK(!receiver.is(scratch0)); | |
| 122 Counters* counters = masm->isolate()->counters(); | |
| 123 __ IncrementCounter(counters->negative_lookups(), 1); | |
| 124 __ IncrementCounter(counters->negative_lookups_miss(), 1); | |
| 125 | |
| 126 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 127 | |
| 128 const int kInterceptorOrAccessCheckNeededMask = | |
| 129 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); | |
| 130 | |
| 131 // Bail out if the receiver has a named interceptor or requires access checks. | |
| 132 __ test_b(FieldOperand(scratch0, Map::kBitFieldOffset), | |
| 133 kInterceptorOrAccessCheckNeededMask); | |
| 134 __ j(not_zero, miss_label); | |
| 135 | |
| 136 // Check that receiver is a JSObject. | |
| 137 __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE); | |
| 138 __ j(below, miss_label); | |
| 139 | |
| 140 // Load properties array. | |
| 141 Register properties = scratch0; | |
| 142 __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset)); | |
| 143 | |
| 144 // Check that the properties array is a dictionary. | |
| 145 __ cmp(FieldOperand(properties, HeapObject::kMapOffset), | |
| 146 Immediate(masm->isolate()->factory()->hash_table_map())); | |
| 147 __ j(not_equal, miss_label); | |
| 148 | |
| 149 Label done; | |
| 150 NameDictionaryLookupStub::GenerateNegativeLookup(masm, | |
| 151 miss_label, | |
| 152 &done, | |
| 153 properties, | |
| 154 name, | |
| 155 scratch1); | |
| 156 __ bind(&done); | |
| 157 __ DecrementCounter(counters->negative_lookups_miss(), 1); | |
| 158 } | |
| 159 | |
| 160 | |
| 161 void StubCache::GenerateProbe(MacroAssembler* masm, | |
| 162 Code::Flags flags, | |
| 163 Register receiver, | |
| 164 Register name, | |
| 165 Register scratch, | |
| 166 Register extra, | |
| 167 Register extra2, | |
| 168 Register extra3) { | |
| 169 Label miss; | |
| 170 | |
| 171 // Assert that code is valid. The multiplying code relies on the entry size | |
| 172 // being 12. | |
| 173 DCHECK(sizeof(Entry) == 12); | |
| 174 | |
| 175 // Assert the flags do not name a specific type. | |
| 176 DCHECK(Code::ExtractTypeFromFlags(flags) == 0); | |
| 177 | |
| 178 // Assert that there are no register conflicts. | |
| 179 DCHECK(!scratch.is(receiver)); | |
| 180 DCHECK(!scratch.is(name)); | |
| 181 DCHECK(!extra.is(receiver)); | |
| 182 DCHECK(!extra.is(name)); | |
| 183 DCHECK(!extra.is(scratch)); | |
| 184 | |
| 185 // Assert scratch and extra registers are valid, and extra2/3 are unused. | |
| 186 DCHECK(!scratch.is(no_reg)); | |
| 187 DCHECK(extra2.is(no_reg)); | |
| 188 DCHECK(extra3.is(no_reg)); | |
| 189 | |
| 190 Register offset = scratch; | |
| 191 scratch = no_reg; | |
| 192 | |
| 193 Counters* counters = masm->isolate()->counters(); | |
| 194 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1); | |
| 195 | |
| 196 // Check that the receiver isn't a smi. | |
| 197 __ JumpIfSmi(receiver, &miss); | |
| 198 | |
| 199 // Get the map of the receiver and compute the hash. | |
| 200 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset)); | |
| 201 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 202 __ xor_(offset, flags); | |
| 203 // We mask out the last two bits because they are not part of the hash and | |
| 204 // they are always 01 for maps. Also in the two 'and' instructions below. | |
| 205 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift); | |
| 206 // ProbeTable expects the offset to be pointer scaled, which it is, because | |
| 207 // the heap object tag size is 2 and the pointer size log 2 is also 2. | |
| 208 DCHECK(kCacheIndexShift == kPointerSizeLog2); | |
| 209 | |
| 210 // Probe the primary table. | |
| 211 ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra); | |
| 212 | |
| 213 // Primary miss: Compute hash for secondary probe. | |
| 214 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset)); | |
| 215 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset)); | |
| 216 __ xor_(offset, flags); | |
| 217 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift); | |
| 218 __ sub(offset, name); | |
| 219 __ add(offset, Immediate(flags)); | |
| 220 __ and_(offset, (kSecondaryTableSize - 1) << kCacheIndexShift); | |
| 221 | |
| 222 // Probe the secondary table. | |
| 223 ProbeTable( | |
| 224 isolate(), masm, flags, kSecondary, name, receiver, offset, extra); | |
| 225 | |
| 226 // Cache miss: Fall-through and let caller handle the miss by | |
| 227 // entering the runtime system. | |
| 228 __ bind(&miss); | |
| 229 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); | |
| 230 } | |
| 231 | |
| 232 | |
| 233 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype( | |
| 234 MacroAssembler* masm, int index, Register prototype, Label* miss) { | |
| 235 // Get the global function with the given index. | |
| 236 Handle<JSFunction> function( | |
| 237 JSFunction::cast(masm->isolate()->native_context()->get(index))); | |
| 238 // Check we're still in the same context. | |
| 239 Register scratch = prototype; | |
| 240 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX); | |
| 241 __ mov(scratch, Operand(esi, offset)); | |
| 242 __ mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset)); | |
| 243 __ cmp(Operand(scratch, Context::SlotOffset(index)), function); | |
| 244 __ j(not_equal, miss); | |
| 245 | |
| 246 // Load its initial map. The global functions all have initial maps. | |
| 247 __ Move(prototype, Immediate(Handle<Map>(function->initial_map()))); | |
| 248 // Load the prototype from the initial map. | |
| 249 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); | |
| 250 } | |
| 251 | |
| 252 | |
| 253 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype( | |
| 254 MacroAssembler* masm, Register receiver, Register scratch1, | |
| 255 Register scratch2, Label* miss_label) { | |
| 256 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); | |
| 257 __ mov(eax, scratch1); | |
| 258 __ ret(0); | |
| 259 } | |
| 260 | |
| 261 | |
| 262 static void PushInterceptorArguments(MacroAssembler* masm, | |
| 263 Register receiver, | |
| 264 Register holder, | |
| 265 Register name, | |
| 266 Handle<JSObject> holder_obj) { | |
| 267 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); | |
| 268 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1); | |
| 269 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2); | |
| 270 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3); | |
| 271 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4); | |
| 272 __ push(name); | |
| 273 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); | |
| 274 DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor)); | |
| 275 Register scratch = name; | |
| 276 __ mov(scratch, Immediate(interceptor)); | |
| 277 __ push(scratch); | |
| 278 __ push(receiver); | |
| 279 __ push(holder); | |
| 280 } | |
| 281 | |
| 282 | |
| 283 static void CompileCallLoadPropertyWithInterceptor( | |
| 284 MacroAssembler* masm, | |
| 285 Register receiver, | |
| 286 Register holder, | |
| 287 Register name, | |
| 288 Handle<JSObject> holder_obj, | |
| 289 IC::UtilityId id) { | |
| 290 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); | |
| 291 __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()), | |
| 292 NamedLoadHandlerCompiler::kInterceptorArgsLength); | |
| 293 } | |
| 294 | |
| 295 | |
| 296 // Generate call to api function. | |
| 297 // This function uses push() to generate smaller, faster code than | |
| 298 // the version above. It is an optimization that should will be removed | |
| 299 // when api call ICs are generated in hydrogen. | |
| 300 void PropertyHandlerCompiler::GenerateFastApiCall( | |
| 301 MacroAssembler* masm, const CallOptimization& optimization, | |
| 302 Handle<Map> receiver_map, Register receiver, Register scratch_in, | |
| 303 bool is_store, int argc, Register* values) { | |
| 304 // Copy return value. | |
| 305 __ pop(scratch_in); | |
| 306 // receiver | |
| 307 __ push(receiver); | |
| 308 // Write the arguments to stack frame. | |
| 309 for (int i = 0; i < argc; i++) { | |
| 310 Register arg = values[argc-1-i]; | |
| 311 DCHECK(!receiver.is(arg)); | |
| 312 DCHECK(!scratch_in.is(arg)); | |
| 313 __ push(arg); | |
| 314 } | |
| 315 __ push(scratch_in); | |
| 316 // Stack now matches JSFunction abi. | |
| 317 DCHECK(optimization.is_simple_api_call()); | |
| 318 | |
| 319 // Abi for CallApiFunctionStub. | |
| 320 Register callee = eax; | |
| 321 Register call_data = ebx; | |
| 322 Register holder = ecx; | |
| 323 Register api_function_address = edx; | |
| 324 Register scratch = edi; // scratch_in is no longer valid. | |
| 325 | |
| 326 // Put holder in place. | |
| 327 CallOptimization::HolderLookup holder_lookup; | |
| 328 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType( | |
| 329 receiver_map, | |
| 330 &holder_lookup); | |
| 331 switch (holder_lookup) { | |
| 332 case CallOptimization::kHolderIsReceiver: | |
| 333 __ Move(holder, receiver); | |
| 334 break; | |
| 335 case CallOptimization::kHolderFound: | |
| 336 __ LoadHeapObject(holder, api_holder); | |
| 337 break; | |
| 338 case CallOptimization::kHolderNotFound: | |
| 339 UNREACHABLE(); | |
| 340 break; | |
| 341 } | |
| 342 | |
| 343 Isolate* isolate = masm->isolate(); | |
| 344 Handle<JSFunction> function = optimization.constant_function(); | |
| 345 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); | |
| 346 Handle<Object> call_data_obj(api_call_info->data(), isolate); | |
| 347 | |
| 348 // Put callee in place. | |
| 349 __ LoadHeapObject(callee, function); | |
| 350 | |
| 351 bool call_data_undefined = false; | |
| 352 // Put call_data in place. | |
| 353 if (isolate->heap()->InNewSpace(*call_data_obj)) { | |
| 354 __ mov(scratch, api_call_info); | |
| 355 __ mov(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset)); | |
| 356 } else if (call_data_obj->IsUndefined()) { | |
| 357 call_data_undefined = true; | |
| 358 __ mov(call_data, Immediate(isolate->factory()->undefined_value())); | |
| 359 } else { | |
| 360 __ mov(call_data, call_data_obj); | |
| 361 } | |
| 362 | |
| 363 // Put api_function_address in place. | |
| 364 Address function_address = v8::ToCData<Address>(api_call_info->callback()); | |
| 365 __ mov(api_function_address, Immediate(function_address)); | |
| 366 | |
| 367 // Jump to stub. | |
| 368 CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc); | |
| 369 __ TailCallStub(&stub); | |
| 370 } | |
| 371 | |
| 372 | |
| 373 // Generate code to check that a global property cell is empty. Create | |
| 374 // the property cell at compilation time if no cell exists for the | |
| 375 // property. | |
| 376 void PropertyHandlerCompiler::GenerateCheckPropertyCell( | |
| 377 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, | |
| 378 Register scratch, Label* miss) { | |
| 379 Handle<PropertyCell> cell = | |
| 380 JSGlobalObject::EnsurePropertyCell(global, name); | |
| 381 DCHECK(cell->value()->IsTheHole()); | |
| 382 Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value(); | |
| 383 if (masm->serializer_enabled()) { | |
| 384 __ mov(scratch, Immediate(cell)); | |
| 385 __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset), | |
| 386 Immediate(the_hole)); | |
| 387 } else { | |
| 388 __ cmp(Operand::ForCell(cell), Immediate(the_hole)); | |
| 389 } | |
| 390 __ j(not_equal, miss); | |
| 391 } | |
| 392 | |
| 393 | |
| 394 void PropertyAccessCompiler::GenerateTailCall(MacroAssembler* masm, | |
| 395 Handle<Code> code) { | |
| 396 __ jmp(code, RelocInfo::CODE_TARGET); | |
| 397 } | |
| 398 | |
| 399 | |
| 400 #undef __ | |
| 401 #define __ ACCESS_MASM(masm()) | |
| 402 | |
| 403 | |
| 404 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label, | |
| 405 Handle<Name> name) { | |
| 406 if (!label->is_unused()) { | |
| 407 __ bind(label); | |
| 408 __ mov(this->name(), Immediate(name)); | |
| 409 } | |
| 410 } | |
| 411 | |
| 412 | |
| 413 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if | |
| 414 // store is successful. | |
| 415 void NamedStoreHandlerCompiler::GenerateStoreTransition( | |
| 416 Handle<Map> transition, Handle<Name> name, Register receiver_reg, | |
| 417 Register storage_reg, Register value_reg, Register scratch1, | |
| 418 Register scratch2, Register unused, Label* miss_label, Label* slow) { | |
| 419 int descriptor = transition->LastAdded(); | |
| 420 DescriptorArray* descriptors = transition->instance_descriptors(); | |
| 421 PropertyDetails details = descriptors->GetDetails(descriptor); | |
| 422 Representation representation = details.representation(); | |
| 423 DCHECK(!representation.IsNone()); | |
| 424 | |
| 425 if (details.type() == CONSTANT) { | |
| 426 Handle<Object> constant(descriptors->GetValue(descriptor), isolate()); | |
| 427 __ CmpObject(value_reg, constant); | |
| 428 __ j(not_equal, miss_label); | |
| 429 } else if (representation.IsSmi()) { | |
| 430 __ JumpIfNotSmi(value_reg, miss_label); | |
| 431 } else if (representation.IsHeapObject()) { | |
| 432 __ JumpIfSmi(value_reg, miss_label); | |
| 433 HeapType* field_type = descriptors->GetFieldType(descriptor); | |
| 434 HeapType::Iterator<Map> it = field_type->Classes(); | |
| 435 if (!it.Done()) { | |
| 436 Label do_store; | |
| 437 while (true) { | |
| 438 __ CompareMap(value_reg, it.Current()); | |
| 439 it.Advance(); | |
| 440 if (it.Done()) { | |
| 441 __ j(not_equal, miss_label); | |
| 442 break; | |
| 443 } | |
| 444 __ j(equal, &do_store, Label::kNear); | |
| 445 } | |
| 446 __ bind(&do_store); | |
| 447 } | |
| 448 } else if (representation.IsDouble()) { | |
| 449 Label do_store, heap_number; | |
| 450 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow, MUTABLE); | |
| 451 | |
| 452 __ JumpIfNotSmi(value_reg, &heap_number); | |
| 453 __ SmiUntag(value_reg); | |
| 454 __ Cvtsi2sd(xmm0, value_reg); | |
| 455 __ SmiTag(value_reg); | |
| 456 __ jmp(&do_store); | |
| 457 | |
| 458 __ bind(&heap_number); | |
| 459 __ CheckMap(value_reg, isolate()->factory()->heap_number_map(), miss_label, | |
| 460 DONT_DO_SMI_CHECK); | |
| 461 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); | |
| 462 | |
| 463 __ bind(&do_store); | |
| 464 __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0); | |
| 465 } | |
| 466 | |
| 467 // Stub never generated for objects that require access checks. | |
| 468 DCHECK(!transition->is_access_check_needed()); | |
| 469 | |
| 470 // Perform map transition for the receiver if necessary. | |
| 471 if (details.type() == FIELD && | |
| 472 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) { | |
| 473 // The properties must be extended before we can store the value. | |
| 474 // We jump to a runtime call that extends the properties array. | |
| 475 __ pop(scratch1); // Return address. | |
| 476 __ push(receiver_reg); | |
| 477 __ push(Immediate(transition)); | |
| 478 __ push(value_reg); | |
| 479 __ push(scratch1); | |
| 480 __ TailCallExternalReference( | |
| 481 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), | |
| 482 isolate()), | |
| 483 3, 1); | |
| 484 return; | |
| 485 } | |
| 486 | |
| 487 // Update the map of the object. | |
| 488 __ mov(scratch1, Immediate(transition)); | |
| 489 __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1); | |
| 490 | |
| 491 // Update the write barrier for the map field. | |
| 492 __ RecordWriteField(receiver_reg, | |
| 493 HeapObject::kMapOffset, | |
| 494 scratch1, | |
| 495 scratch2, | |
| 496 kDontSaveFPRegs, | |
| 497 OMIT_REMEMBERED_SET, | |
| 498 OMIT_SMI_CHECK); | |
| 499 | |
| 500 if (details.type() == CONSTANT) { | |
| 501 DCHECK(value_reg.is(eax)); | |
| 502 __ ret(0); | |
| 503 return; | |
| 504 } | |
| 505 | |
| 506 int index = transition->instance_descriptors()->GetFieldIndex( | |
| 507 transition->LastAdded()); | |
| 508 | |
| 509 // Adjust for the number of properties stored in the object. Even in the | |
| 510 // face of a transition we can use the old map here because the size of the | |
| 511 // object and the number of in-object properties is not going to change. | |
| 512 index -= transition->inobject_properties(); | |
| 513 | |
| 514 SmiCheck smi_check = representation.IsTagged() | |
| 515 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; | |
| 516 // TODO(verwaest): Share this code as a code stub. | |
| 517 if (index < 0) { | |
| 518 // Set the property straight into the object. | |
| 519 int offset = transition->instance_size() + (index * kPointerSize); | |
| 520 if (representation.IsDouble()) { | |
| 521 __ mov(FieldOperand(receiver_reg, offset), storage_reg); | |
| 522 } else { | |
| 523 __ mov(FieldOperand(receiver_reg, offset), value_reg); | |
| 524 } | |
| 525 | |
| 526 if (!representation.IsSmi()) { | |
| 527 // Update the write barrier for the array address. | |
| 528 if (!representation.IsDouble()) { | |
| 529 __ mov(storage_reg, value_reg); | |
| 530 } | |
| 531 __ RecordWriteField(receiver_reg, | |
| 532 offset, | |
| 533 storage_reg, | |
| 534 scratch1, | |
| 535 kDontSaveFPRegs, | |
| 536 EMIT_REMEMBERED_SET, | |
| 537 smi_check); | |
| 538 } | |
| 539 } else { | |
| 540 // Write to the properties array. | |
| 541 int offset = index * kPointerSize + FixedArray::kHeaderSize; | |
| 542 // Get the properties array (optimistically). | |
| 543 __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); | |
| 544 if (representation.IsDouble()) { | |
| 545 __ mov(FieldOperand(scratch1, offset), storage_reg); | |
| 546 } else { | |
| 547 __ mov(FieldOperand(scratch1, offset), value_reg); | |
| 548 } | |
| 549 | |
| 550 if (!representation.IsSmi()) { | |
| 551 // Update the write barrier for the array address. | |
| 552 if (!representation.IsDouble()) { | |
| 553 __ mov(storage_reg, value_reg); | |
| 554 } | |
| 555 __ RecordWriteField(scratch1, | |
| 556 offset, | |
| 557 storage_reg, | |
| 558 receiver_reg, | |
| 559 kDontSaveFPRegs, | |
| 560 EMIT_REMEMBERED_SET, | |
| 561 smi_check); | |
| 562 } | |
| 563 } | |
| 564 | |
| 565 // Return the value (register eax). | |
| 566 DCHECK(value_reg.is(eax)); | |
| 567 __ ret(0); | |
| 568 } | |
| 569 | |
| 570 | |
| 571 void NamedStoreHandlerCompiler::GenerateStoreField(LookupIterator* lookup, | |
| 572 Register value_reg, | |
| 573 Label* miss_label) { | |
| 574 DCHECK(lookup->representation().IsHeapObject()); | |
| 575 __ JumpIfSmi(value_reg, miss_label); | |
| 576 HeapType::Iterator<Map> it = lookup->GetFieldType()->Classes(); | |
| 577 Label do_store; | |
| 578 while (true) { | |
| 579 __ CompareMap(value_reg, it.Current()); | |
| 580 it.Advance(); | |
| 581 if (it.Done()) { | |
| 582 __ j(not_equal, miss_label); | |
| 583 break; | |
| 584 } | |
| 585 __ j(equal, &do_store, Label::kNear); | |
| 586 } | |
| 587 __ bind(&do_store); | |
| 588 | |
| 589 StoreFieldStub stub(isolate(), lookup->GetFieldIndex(), | |
| 590 lookup->representation()); | |
| 591 GenerateTailCall(masm(), stub.GetCode()); | |
| 592 } | |
| 593 | |
| 594 | |
| 595 Register PropertyHandlerCompiler::CheckPrototypes( | |
| 596 Register object_reg, Register holder_reg, Register scratch1, | |
| 597 Register scratch2, Handle<Name> name, Label* miss, | |
| 598 PrototypeCheckType check) { | |
| 599 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate())); | |
| 600 | |
| 601 // Make sure there's no overlap between holder and object registers. | |
| 602 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); | |
| 603 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) | |
| 604 && !scratch2.is(scratch1)); | |
| 605 | |
| 606 // Keep track of the current object in register reg. | |
| 607 Register reg = object_reg; | |
| 608 int depth = 0; | |
| 609 | |
| 610 Handle<JSObject> current = Handle<JSObject>::null(); | |
| 611 if (type()->IsConstant()) | |
| 612 current = Handle<JSObject>::cast(type()->AsConstant()->Value()); | |
| 613 Handle<JSObject> prototype = Handle<JSObject>::null(); | |
| 614 Handle<Map> current_map = receiver_map; | |
| 615 Handle<Map> holder_map(holder()->map()); | |
| 616 // Traverse the prototype chain and check the maps in the prototype chain for | |
| 617 // fast and global objects or do negative lookup for normal objects. | |
| 618 while (!current_map.is_identical_to(holder_map)) { | |
| 619 ++depth; | |
| 620 | |
| 621 // Only global objects and objects that do not require access | |
| 622 // checks are allowed in stubs. | |
| 623 DCHECK(current_map->IsJSGlobalProxyMap() || | |
| 624 !current_map->is_access_check_needed()); | |
| 625 | |
| 626 prototype = handle(JSObject::cast(current_map->prototype())); | |
| 627 if (current_map->is_dictionary_map() && | |
| 628 !current_map->IsJSGlobalObjectMap()) { | |
| 629 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast. | |
| 630 if (!name->IsUniqueName()) { | |
| 631 DCHECK(name->IsString()); | |
| 632 name = factory()->InternalizeString(Handle<String>::cast(name)); | |
| 633 } | |
| 634 DCHECK(current.is_null() || | |
| 635 current->property_dictionary()->FindEntry(name) == | |
| 636 NameDictionary::kNotFound); | |
| 637 | |
| 638 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, | |
| 639 scratch1, scratch2); | |
| 640 | |
| 641 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); | |
| 642 reg = holder_reg; // From now on the object will be in holder_reg. | |
| 643 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); | |
| 644 } else { | |
| 645 bool in_new_space = heap()->InNewSpace(*prototype); | |
| 646 // Two possible reasons for loading the prototype from the map: | |
| 647 // (1) Can't store references to new space in code. | |
| 648 // (2) Handler is shared for all receivers with the same prototype | |
| 649 // map (but not necessarily the same prototype instance). | |
| 650 bool load_prototype_from_map = in_new_space || depth == 1; | |
| 651 if (depth != 1 || check == CHECK_ALL_MAPS) { | |
| 652 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK); | |
| 653 } | |
| 654 | |
| 655 // Check access rights to the global object. This has to happen after | |
| 656 // the map check so that we know that the object is actually a global | |
| 657 // object. | |
| 658 // This allows us to install generated handlers for accesses to the | |
| 659 // global proxy (as opposed to using slow ICs). See corresponding code | |
| 660 // in LookupForRead(). | |
| 661 if (current_map->IsJSGlobalProxyMap()) { | |
| 662 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss); | |
| 663 } else if (current_map->IsJSGlobalObjectMap()) { | |
| 664 GenerateCheckPropertyCell( | |
| 665 masm(), Handle<JSGlobalObject>::cast(current), name, | |
| 666 scratch2, miss); | |
| 667 } | |
| 668 | |
| 669 if (load_prototype_from_map) { | |
| 670 // Save the map in scratch1 for later. | |
| 671 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); | |
| 672 } | |
| 673 | |
| 674 reg = holder_reg; // From now on the object will be in holder_reg. | |
| 675 | |
| 676 if (load_prototype_from_map) { | |
| 677 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); | |
| 678 } else { | |
| 679 __ mov(reg, prototype); | |
| 680 } | |
| 681 } | |
| 682 | |
| 683 // Go to the next object in the prototype chain. | |
| 684 current = prototype; | |
| 685 current_map = handle(current->map()); | |
| 686 } | |
| 687 | |
| 688 // Log the check depth. | |
| 689 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); | |
| 690 | |
| 691 if (depth != 0 || check == CHECK_ALL_MAPS) { | |
| 692 // Check the holder map. | |
| 693 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK); | |
| 694 } | |
| 695 | |
| 696 // Perform security check for access to the global object. | |
| 697 DCHECK(current_map->IsJSGlobalProxyMap() || | |
| 698 !current_map->is_access_check_needed()); | |
| 699 if (current_map->IsJSGlobalProxyMap()) { | |
| 700 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss); | |
| 701 } | |
| 702 | |
| 703 // Return the register containing the holder. | |
| 704 return reg; | |
| 705 } | |
| 706 | |
| 707 | |
| 708 void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) { | |
| 709 if (!miss->is_unused()) { | |
| 710 Label success; | |
| 711 __ jmp(&success); | |
| 712 __ bind(miss); | |
| 713 TailCallBuiltin(masm(), MissBuiltin(kind())); | |
| 714 __ bind(&success); | |
| 715 } | |
| 716 } | |
| 717 | |
| 718 | |
| 719 void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) { | |
| 720 if (!miss->is_unused()) { | |
| 721 Label success; | |
| 722 __ jmp(&success); | |
| 723 GenerateRestoreName(miss, name); | |
| 724 TailCallBuiltin(masm(), MissBuiltin(kind())); | |
| 725 __ bind(&success); | |
| 726 } | |
| 727 } | |
| 728 | |
| 729 | |
| 730 void NamedLoadHandlerCompiler::GenerateLoadCallback( | |
| 731 Register reg, Handle<ExecutableAccessorInfo> callback) { | |
| 732 // Insert additional parameters into the stack frame above return address. | |
| 733 DCHECK(!scratch3().is(reg)); | |
| 734 __ pop(scratch3()); // Get return address to place it below. | |
| 735 | |
| 736 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); | |
| 737 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); | |
| 738 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); | |
| 739 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); | |
| 740 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); | |
| 741 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); | |
| 742 __ push(receiver()); // receiver | |
| 743 // Push data from ExecutableAccessorInfo. | |
| 744 if (isolate()->heap()->InNewSpace(callback->data())) { | |
| 745 DCHECK(!scratch2().is(reg)); | |
| 746 __ mov(scratch2(), Immediate(callback)); | |
| 747 __ push(FieldOperand(scratch2(), ExecutableAccessorInfo::kDataOffset)); | |
| 748 } else { | |
| 749 __ push(Immediate(Handle<Object>(callback->data(), isolate()))); | |
| 750 } | |
| 751 __ push(Immediate(isolate()->factory()->undefined_value())); // ReturnValue | |
| 752 // ReturnValue default value | |
| 753 __ push(Immediate(isolate()->factory()->undefined_value())); | |
| 754 __ push(Immediate(reinterpret_cast<int>(isolate()))); | |
| 755 __ push(reg); // holder | |
| 756 | |
| 757 // Save a pointer to where we pushed the arguments. This will be | |
| 758 // passed as the const PropertyAccessorInfo& to the C++ callback. | |
| 759 __ push(esp); | |
| 760 | |
| 761 __ push(name()); // name | |
| 762 | |
| 763 __ push(scratch3()); // Restore return address. | |
| 764 | |
| 765 // Abi for CallApiGetter | |
| 766 Register getter_address = edx; | |
| 767 Address function_address = v8::ToCData<Address>(callback->getter()); | |
| 768 __ mov(getter_address, Immediate(function_address)); | |
| 769 | |
| 770 CallApiGetterStub stub(isolate()); | |
| 771 __ TailCallStub(&stub); | |
| 772 } | |
| 773 | |
| 774 | |
| 775 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) { | |
| 776 // Return the constant value. | |
| 777 __ LoadObject(eax, value); | |
| 778 __ ret(0); | |
| 779 } | |
| 780 | |
| 781 | |
| 782 void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup( | |
| 783 LookupIterator* it, Register holder_reg) { | |
| 784 DCHECK(holder()->HasNamedInterceptor()); | |
| 785 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); | |
| 786 | |
| 787 // Compile the interceptor call, followed by inline code to load the | |
| 788 // property from further up the prototype chain if the call fails. | |
| 789 // Check that the maps haven't changed. | |
| 790 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1())); | |
| 791 | |
| 792 // Preserve the receiver register explicitly whenever it is different from the | |
| 793 // holder and it is needed should the interceptor return without any result. | |
| 794 // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD | |
| 795 // case might cause a miss during the prototype check. | |
| 796 bool must_perform_prototype_check = | |
| 797 !holder().is_identical_to(it->GetHolder<JSObject>()); | |
| 798 bool must_preserve_receiver_reg = | |
| 799 !receiver().is(holder_reg) && | |
| 800 (it->property_kind() == LookupIterator::ACCESSOR || | |
| 801 must_perform_prototype_check); | |
| 802 | |
| 803 // Save necessary data before invoking an interceptor. | |
| 804 // Requires a frame to make GC aware of pushed pointers. | |
| 805 { | |
| 806 FrameScope frame_scope(masm(), StackFrame::INTERNAL); | |
| 807 | |
| 808 if (must_preserve_receiver_reg) { | |
| 809 __ push(receiver()); | |
| 810 } | |
| 811 __ push(holder_reg); | |
| 812 __ push(this->name()); | |
| 813 | |
| 814 // Invoke an interceptor. Note: map checks from receiver to | |
| 815 // interceptor's holder has been compiled before (see a caller | |
| 816 // of this method.) | |
| 817 CompileCallLoadPropertyWithInterceptor( | |
| 818 masm(), receiver(), holder_reg, this->name(), holder(), | |
| 819 IC::kLoadPropertyWithInterceptorOnly); | |
| 820 | |
| 821 // Check if interceptor provided a value for property. If it's | |
| 822 // the case, return immediately. | |
| 823 Label interceptor_failed; | |
| 824 __ cmp(eax, factory()->no_interceptor_result_sentinel()); | |
| 825 __ j(equal, &interceptor_failed); | |
| 826 frame_scope.GenerateLeaveFrame(); | |
| 827 __ ret(0); | |
| 828 | |
| 829 // Clobber registers when generating debug-code to provoke errors. | |
| 830 __ bind(&interceptor_failed); | |
| 831 if (FLAG_debug_code) { | |
| 832 __ mov(receiver(), Immediate(BitCast<int32_t>(kZapValue))); | |
| 833 __ mov(holder_reg, Immediate(BitCast<int32_t>(kZapValue))); | |
| 834 __ mov(this->name(), Immediate(BitCast<int32_t>(kZapValue))); | |
| 835 } | |
| 836 | |
| 837 __ pop(this->name()); | |
| 838 __ pop(holder_reg); | |
| 839 if (must_preserve_receiver_reg) { | |
| 840 __ pop(receiver()); | |
| 841 } | |
| 842 | |
| 843 // Leave the internal frame. | |
| 844 } | |
| 845 | |
| 846 GenerateLoadPostInterceptor(it, holder_reg); | |
| 847 } | |
| 848 | |
| 849 | |
| 850 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) { | |
| 851 DCHECK(holder()->HasNamedInterceptor()); | |
| 852 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); | |
| 853 // Call the runtime system to load the interceptor. | |
| 854 __ pop(scratch2()); // save old return address | |
| 855 PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(), | |
| 856 holder()); | |
| 857 __ push(scratch2()); // restore old return address | |
| 858 | |
| 859 ExternalReference ref = ExternalReference( | |
| 860 IC_Utility(IC::kLoadPropertyWithInterceptor), isolate()); | |
| 861 __ TailCallExternalReference( | |
| 862 ref, NamedLoadHandlerCompiler::kInterceptorArgsLength, 1); | |
| 863 } | |
| 864 | |
| 865 | |
| 866 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback( | |
| 867 Handle<JSObject> object, Handle<Name> name, | |
| 868 Handle<ExecutableAccessorInfo> callback) { | |
| 869 Register holder_reg = Frontend(receiver(), name); | |
| 870 | |
| 871 __ pop(scratch1()); // remove the return address | |
| 872 __ push(receiver()); | |
| 873 __ push(holder_reg); | |
| 874 __ Push(callback); | |
| 875 __ Push(name); | |
| 876 __ push(value()); | |
| 877 __ push(scratch1()); // restore return address | |
| 878 | |
| 879 // Do tail-call to the runtime system. | |
| 880 ExternalReference store_callback_property = | |
| 881 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); | |
| 882 __ TailCallExternalReference(store_callback_property, 5, 1); | |
| 883 | |
| 884 // Return the generated code. | |
| 885 return GetCode(kind(), Code::FAST, name); | |
| 886 } | |
| 887 | |
| 888 | |
| 889 #undef __ | |
| 890 #define __ ACCESS_MASM(masm) | |
| 891 | |
| 892 | |
| 893 void NamedStoreHandlerCompiler::GenerateStoreViaSetter( | |
| 894 MacroAssembler* masm, Handle<HeapType> type, Register receiver, | |
| 895 Handle<JSFunction> setter) { | |
| 896 // ----------- S t a t e ------------- | |
| 897 // -- esp[0] : return address | |
| 898 // ----------------------------------- | |
| 899 { | |
| 900 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 901 | |
| 902 // Save value register, so we can restore it later. | |
| 903 __ push(value()); | |
| 904 | |
| 905 if (!setter.is_null()) { | |
| 906 // Call the JavaScript setter with receiver and value on the stack. | |
| 907 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { | |
| 908 // Swap in the global receiver. | |
| 909 __ mov(receiver, | |
| 910 FieldOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); | |
| 911 } | |
| 912 __ push(receiver); | |
| 913 __ push(value()); | |
| 914 ParameterCount actual(1); | |
| 915 ParameterCount expected(setter); | |
| 916 __ InvokeFunction(setter, expected, actual, | |
| 917 CALL_FUNCTION, NullCallWrapper()); | |
| 918 } else { | |
| 919 // If we generate a global code snippet for deoptimization only, remember | |
| 920 // the place to continue after deoptimization. | |
| 921 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); | |
| 922 } | |
| 923 | |
| 924 // We have to return the passed value, not the return value of the setter. | |
| 925 __ pop(eax); | |
| 926 | |
| 927 // Restore context register. | |
| 928 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 929 } | |
| 930 __ ret(0); | |
| 931 } | |
| 932 | |
| 933 | |
| 934 #undef __ | |
| 935 #define __ ACCESS_MASM(masm()) | |
| 936 | |
| 937 | |
| 938 Handle<Code> NamedStoreHandlerCompiler::CompileStoreInterceptor( | |
| 939 Handle<Name> name) { | |
| 940 __ pop(scratch1()); // remove the return address | |
| 941 __ push(receiver()); | |
| 942 __ push(this->name()); | |
| 943 __ push(value()); | |
| 944 __ push(scratch1()); // restore return address | |
| 945 | |
| 946 // Do tail-call to the runtime system. | |
| 947 ExternalReference store_ic_property = ExternalReference( | |
| 948 IC_Utility(IC::kStorePropertyWithInterceptor), isolate()); | |
| 949 __ TailCallExternalReference(store_ic_property, 3, 1); | |
| 950 | |
| 951 // Return the generated code. | |
| 952 return GetCode(kind(), Code::FAST, name); | |
| 953 } | |
| 954 | |
| 955 | |
| 956 Handle<Code> PropertyICCompiler::CompileKeyedStorePolymorphic( | |
| 957 MapHandleList* receiver_maps, CodeHandleList* handler_stubs, | |
| 958 MapHandleList* transitioned_maps) { | |
| 959 Label miss; | |
| 960 __ JumpIfSmi(receiver(), &miss, Label::kNear); | |
| 961 __ mov(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset)); | |
| 962 for (int i = 0; i < receiver_maps->length(); ++i) { | |
| 963 __ cmp(scratch1(), receiver_maps->at(i)); | |
| 964 if (transitioned_maps->at(i).is_null()) { | |
| 965 __ j(equal, handler_stubs->at(i)); | |
| 966 } else { | |
| 967 Label next_map; | |
| 968 __ j(not_equal, &next_map, Label::kNear); | |
| 969 __ mov(transition_map(), Immediate(transitioned_maps->at(i))); | |
| 970 __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET); | |
| 971 __ bind(&next_map); | |
| 972 } | |
| 973 } | |
| 974 __ bind(&miss); | |
| 975 TailCallBuiltin(masm(), MissBuiltin(kind())); | |
| 976 | |
| 977 // Return the generated code. | |
| 978 return GetCode(kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC); | |
| 979 } | |
| 980 | |
| 981 | |
| 982 Register* PropertyAccessCompiler::load_calling_convention() { | |
| 983 // receiver, name, scratch1, scratch2, scratch3, scratch4. | |
| 984 Register receiver = LoadIC::ReceiverRegister(); | |
| 985 Register name = LoadIC::NameRegister(); | |
| 986 static Register registers[] = { receiver, name, ebx, eax, edi, no_reg }; | |
| 987 return registers; | |
| 988 } | |
| 989 | |
| 990 | |
| 991 Register* PropertyAccessCompiler::store_calling_convention() { | |
| 992 // receiver, name, scratch1, scratch2, scratch3. | |
| 993 Register receiver = StoreIC::ReceiverRegister(); | |
| 994 Register name = StoreIC::NameRegister(); | |
| 995 DCHECK(ebx.is(KeyedStoreIC::MapRegister())); | |
| 996 static Register registers[] = { receiver, name, ebx, edi, no_reg }; | |
| 997 return registers; | |
| 998 } | |
| 999 | |
| 1000 | |
| 1001 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); } | |
| 1002 | |
| 1003 | |
| 1004 #undef __ | |
| 1005 #define __ ACCESS_MASM(masm) | |
| 1006 | |
| 1007 | |
| 1008 void NamedLoadHandlerCompiler::GenerateLoadViaGetter( | |
| 1009 MacroAssembler* masm, Handle<HeapType> type, Register receiver, | |
| 1010 Handle<JSFunction> getter) { | |
| 1011 { | |
| 1012 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 1013 | |
| 1014 if (!getter.is_null()) { | |
| 1015 // Call the JavaScript getter with the receiver on the stack. | |
| 1016 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { | |
| 1017 // Swap in the global receiver. | |
| 1018 __ mov(receiver, | |
| 1019 FieldOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); | |
| 1020 } | |
| 1021 __ push(receiver); | |
| 1022 ParameterCount actual(0); | |
| 1023 ParameterCount expected(getter); | |
| 1024 __ InvokeFunction(getter, expected, actual, | |
| 1025 CALL_FUNCTION, NullCallWrapper()); | |
| 1026 } else { | |
| 1027 // If we generate a global code snippet for deoptimization only, remember | |
| 1028 // the place to continue after deoptimization. | |
| 1029 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); | |
| 1030 } | |
| 1031 | |
| 1032 // Restore context register. | |
| 1033 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 1034 } | |
| 1035 __ ret(0); | |
| 1036 } | |
| 1037 | |
| 1038 | |
| 1039 #undef __ | |
| 1040 #define __ ACCESS_MASM(masm()) | |
| 1041 | |
| 1042 | |
| 1043 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal( | |
| 1044 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) { | |
| 1045 Label miss; | |
| 1046 | |
| 1047 FrontendHeader(receiver(), name, &miss); | |
| 1048 // Get the value from the cell. | |
| 1049 Register result = StoreIC::ValueRegister(); | |
| 1050 if (masm()->serializer_enabled()) { | |
| 1051 __ mov(result, Immediate(cell)); | |
| 1052 __ mov(result, FieldOperand(result, PropertyCell::kValueOffset)); | |
| 1053 } else { | |
| 1054 __ mov(result, Operand::ForCell(cell)); | |
| 1055 } | |
| 1056 | |
| 1057 // Check for deleted property if property can actually be deleted. | |
| 1058 if (is_configurable) { | |
| 1059 __ cmp(result, factory()->the_hole_value()); | |
| 1060 __ j(equal, &miss); | |
| 1061 } else if (FLAG_debug_code) { | |
| 1062 __ cmp(result, factory()->the_hole_value()); | |
| 1063 __ Check(not_equal, kDontDeleteCellsCannotContainTheHole); | |
| 1064 } | |
| 1065 | |
| 1066 Counters* counters = isolate()->counters(); | |
| 1067 __ IncrementCounter(counters->named_load_global_stub(), 1); | |
| 1068 // The code above already loads the result into the return register. | |
| 1069 __ ret(0); | |
| 1070 | |
| 1071 FrontendFooter(name, &miss); | |
| 1072 | |
| 1073 // Return the generated code. | |
| 1074 return GetCode(kind(), Code::NORMAL, name); | |
| 1075 } | |
| 1076 | |
| 1077 | |
| 1078 Handle<Code> PropertyICCompiler::CompilePolymorphic(TypeHandleList* types, | |
| 1079 CodeHandleList* handlers, | |
| 1080 Handle<Name> name, | |
| 1081 Code::StubType type, | |
| 1082 IcCheckType check) { | |
| 1083 Label miss; | |
| 1084 | |
| 1085 if (check == PROPERTY && | |
| 1086 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) { | |
| 1087 // In case we are compiling an IC for dictionary loads and stores, just | |
| 1088 // check whether the name is unique. | |
| 1089 if (name.is_identical_to(isolate()->factory()->normal_ic_symbol())) { | |
| 1090 __ JumpIfNotUniqueName(this->name(), &miss); | |
| 1091 } else { | |
| 1092 __ cmp(this->name(), Immediate(name)); | |
| 1093 __ j(not_equal, &miss); | |
| 1094 } | |
| 1095 } | |
| 1096 | |
| 1097 Label number_case; | |
| 1098 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss; | |
| 1099 __ JumpIfSmi(receiver(), smi_target); | |
| 1100 | |
| 1101 // Polymorphic keyed stores may use the map register | |
| 1102 Register map_reg = scratch1(); | |
| 1103 DCHECK(kind() != Code::KEYED_STORE_IC || | |
| 1104 map_reg.is(KeyedStoreIC::MapRegister())); | |
| 1105 __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset)); | |
| 1106 int receiver_count = types->length(); | |
| 1107 int number_of_handled_maps = 0; | |
| 1108 for (int current = 0; current < receiver_count; ++current) { | |
| 1109 Handle<HeapType> type = types->at(current); | |
| 1110 Handle<Map> map = IC::TypeToMap(*type, isolate()); | |
| 1111 if (!map->is_deprecated()) { | |
| 1112 number_of_handled_maps++; | |
| 1113 __ cmp(map_reg, map); | |
| 1114 if (type->Is(HeapType::Number())) { | |
| 1115 DCHECK(!number_case.is_unused()); | |
| 1116 __ bind(&number_case); | |
| 1117 } | |
| 1118 __ j(equal, handlers->at(current)); | |
| 1119 } | |
| 1120 } | |
| 1121 DCHECK(number_of_handled_maps != 0); | |
| 1122 | |
| 1123 __ bind(&miss); | |
| 1124 TailCallBuiltin(masm(), MissBuiltin(kind())); | |
| 1125 | |
| 1126 // Return the generated code. | |
| 1127 InlineCacheState state = | |
| 1128 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC; | |
| 1129 return GetCode(kind(), type, name, state); | |
| 1130 } | |
| 1131 | |
| 1132 | |
| 1133 #undef __ | |
| 1134 #define __ ACCESS_MASM(masm) | |
| 1135 | |
| 1136 | |
| 1137 void ElementHandlerCompiler::GenerateLoadDictionaryElement( | |
| 1138 MacroAssembler* masm) { | |
| 1139 // ----------- S t a t e ------------- | |
| 1140 // -- ecx : key | |
| 1141 // -- edx : receiver | |
| 1142 // -- esp[0] : return address | |
| 1143 // ----------------------------------- | |
| 1144 DCHECK(edx.is(LoadIC::ReceiverRegister())); | |
| 1145 DCHECK(ecx.is(LoadIC::NameRegister())); | |
| 1146 Label slow, miss; | |
| 1147 | |
| 1148 // This stub is meant to be tail-jumped to, the receiver must already | |
| 1149 // have been verified by the caller to not be a smi. | |
| 1150 __ JumpIfNotSmi(ecx, &miss); | |
| 1151 __ mov(ebx, ecx); | |
| 1152 __ SmiUntag(ebx); | |
| 1153 __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset)); | |
| 1154 | |
| 1155 // Push receiver on the stack to free up a register for the dictionary | |
| 1156 // probing. | |
| 1157 __ push(edx); | |
| 1158 __ LoadFromNumberDictionary(&slow, eax, ecx, ebx, edx, edi, eax); | |
| 1159 // Pop receiver before returning. | |
| 1160 __ pop(edx); | |
| 1161 __ ret(0); | |
| 1162 | |
| 1163 __ bind(&slow); | |
| 1164 __ pop(edx); | |
| 1165 | |
| 1166 // ----------- S t a t e ------------- | |
| 1167 // -- ecx : key | |
| 1168 // -- edx : receiver | |
| 1169 // -- esp[0] : return address | |
| 1170 // ----------------------------------- | |
| 1171 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow); | |
| 1172 | |
| 1173 __ bind(&miss); | |
| 1174 // ----------- S t a t e ------------- | |
| 1175 // -- ecx : key | |
| 1176 // -- edx : receiver | |
| 1177 // -- esp[0] : return address | |
| 1178 // ----------------------------------- | |
| 1179 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); | |
| 1180 } | |
| 1181 | |
| 1182 | |
| 1183 #undef __ | |
| 1184 | |
| 1185 } } // namespace v8::internal | |
| 1186 | |
| 1187 #endif // V8_TARGET_ARCH_IA32 | |
| OLD | NEW |