Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(101)

Side by Side Diff: src/ia32/stub-cache-ia32.cc

Issue 430503007: Rename ASSERT* to DCHECK*. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE and fixes Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/regexp-macro-assembler-ia32.cc ('k') | src/ic.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_IA32 7 #if V8_TARGET_ARCH_IA32
8 8
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/ic-inl.h" 10 #include "src/ic-inl.h"
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
110 // Pop at miss. 110 // Pop at miss.
111 __ bind(&miss); 111 __ bind(&miss);
112 __ pop(offset); 112 __ pop(offset);
113 } 113 }
114 } 114 }
115 115
116 116
117 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( 117 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
118 MacroAssembler* masm, Label* miss_label, Register receiver, 118 MacroAssembler* masm, Label* miss_label, Register receiver,
119 Handle<Name> name, Register scratch0, Register scratch1) { 119 Handle<Name> name, Register scratch0, Register scratch1) {
120 ASSERT(name->IsUniqueName()); 120 DCHECK(name->IsUniqueName());
121 ASSERT(!receiver.is(scratch0)); 121 DCHECK(!receiver.is(scratch0));
122 Counters* counters = masm->isolate()->counters(); 122 Counters* counters = masm->isolate()->counters();
123 __ IncrementCounter(counters->negative_lookups(), 1); 123 __ IncrementCounter(counters->negative_lookups(), 1);
124 __ IncrementCounter(counters->negative_lookups_miss(), 1); 124 __ IncrementCounter(counters->negative_lookups_miss(), 1);
125 125
126 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset)); 126 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
127 127
128 const int kInterceptorOrAccessCheckNeededMask = 128 const int kInterceptorOrAccessCheckNeededMask =
129 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); 129 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
130 130
131 // Bail out if the receiver has a named interceptor or requires access checks. 131 // Bail out if the receiver has a named interceptor or requires access checks.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
163 Register receiver, 163 Register receiver,
164 Register name, 164 Register name,
165 Register scratch, 165 Register scratch,
166 Register extra, 166 Register extra,
167 Register extra2, 167 Register extra2,
168 Register extra3) { 168 Register extra3) {
169 Label miss; 169 Label miss;
170 170
171 // Assert that code is valid. The multiplying code relies on the entry size 171 // Assert that code is valid. The multiplying code relies on the entry size
172 // being 12. 172 // being 12.
173 ASSERT(sizeof(Entry) == 12); 173 DCHECK(sizeof(Entry) == 12);
174 174
175 // Assert the flags do not name a specific type. 175 // Assert the flags do not name a specific type.
176 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); 176 DCHECK(Code::ExtractTypeFromFlags(flags) == 0);
177 177
178 // Assert that there are no register conflicts. 178 // Assert that there are no register conflicts.
179 ASSERT(!scratch.is(receiver)); 179 DCHECK(!scratch.is(receiver));
180 ASSERT(!scratch.is(name)); 180 DCHECK(!scratch.is(name));
181 ASSERT(!extra.is(receiver)); 181 DCHECK(!extra.is(receiver));
182 ASSERT(!extra.is(name)); 182 DCHECK(!extra.is(name));
183 ASSERT(!extra.is(scratch)); 183 DCHECK(!extra.is(scratch));
184 184
185 // Assert scratch and extra registers are valid, and extra2/3 are unused. 185 // Assert scratch and extra registers are valid, and extra2/3 are unused.
186 ASSERT(!scratch.is(no_reg)); 186 DCHECK(!scratch.is(no_reg));
187 ASSERT(extra2.is(no_reg)); 187 DCHECK(extra2.is(no_reg));
188 ASSERT(extra3.is(no_reg)); 188 DCHECK(extra3.is(no_reg));
189 189
190 Register offset = scratch; 190 Register offset = scratch;
191 scratch = no_reg; 191 scratch = no_reg;
192 192
193 Counters* counters = masm->isolate()->counters(); 193 Counters* counters = masm->isolate()->counters();
194 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1); 194 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
195 195
196 // Check that the receiver isn't a smi. 196 // Check that the receiver isn't a smi.
197 __ JumpIfSmi(receiver, &miss); 197 __ JumpIfSmi(receiver, &miss);
198 198
199 // Get the map of the receiver and compute the hash. 199 // Get the map of the receiver and compute the hash.
200 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset)); 200 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
201 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset)); 201 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
202 __ xor_(offset, flags); 202 __ xor_(offset, flags);
203 // We mask out the last two bits because they are not part of the hash and 203 // We mask out the last two bits because they are not part of the hash and
204 // they are always 01 for maps. Also in the two 'and' instructions below. 204 // they are always 01 for maps. Also in the two 'and' instructions below.
205 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift); 205 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
206 // ProbeTable expects the offset to be pointer scaled, which it is, because 206 // ProbeTable expects the offset to be pointer scaled, which it is, because
207 // the heap object tag size is 2 and the pointer size log 2 is also 2. 207 // the heap object tag size is 2 and the pointer size log 2 is also 2.
208 ASSERT(kCacheIndexShift == kPointerSizeLog2); 208 DCHECK(kCacheIndexShift == kPointerSizeLog2);
209 209
210 // Probe the primary table. 210 // Probe the primary table.
211 ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra); 211 ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra);
212 212
213 // Primary miss: Compute hash for secondary probe. 213 // Primary miss: Compute hash for secondary probe.
214 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset)); 214 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
215 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset)); 215 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
216 __ xor_(offset, flags); 216 __ xor_(offset, flags);
217 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift); 217 __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
218 __ sub(offset, name); 218 __ sub(offset, name);
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
264 Register holder, 264 Register holder,
265 Register name, 265 Register name,
266 Handle<JSObject> holder_obj) { 266 Handle<JSObject> holder_obj) {
267 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); 267 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
268 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1); 268 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1);
269 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2); 269 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2);
270 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3); 270 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3);
271 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4); 271 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4);
272 __ push(name); 272 __ push(name);
273 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); 273 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
274 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor)); 274 DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor));
275 Register scratch = name; 275 Register scratch = name;
276 __ mov(scratch, Immediate(interceptor)); 276 __ mov(scratch, Immediate(interceptor));
277 __ push(scratch); 277 __ push(scratch);
278 __ push(receiver); 278 __ push(receiver);
279 __ push(holder); 279 __ push(holder);
280 } 280 }
281 281
282 282
283 static void CompileCallLoadPropertyWithInterceptor( 283 static void CompileCallLoadPropertyWithInterceptor(
284 MacroAssembler* masm, 284 MacroAssembler* masm,
(...skipping 16 matching lines...) Expand all
301 MacroAssembler* masm, const CallOptimization& optimization, 301 MacroAssembler* masm, const CallOptimization& optimization,
302 Handle<Map> receiver_map, Register receiver, Register scratch_in, 302 Handle<Map> receiver_map, Register receiver, Register scratch_in,
303 bool is_store, int argc, Register* values) { 303 bool is_store, int argc, Register* values) {
304 // Copy return value. 304 // Copy return value.
305 __ pop(scratch_in); 305 __ pop(scratch_in);
306 // receiver 306 // receiver
307 __ push(receiver); 307 __ push(receiver);
308 // Write the arguments to stack frame. 308 // Write the arguments to stack frame.
309 for (int i = 0; i < argc; i++) { 309 for (int i = 0; i < argc; i++) {
310 Register arg = values[argc-1-i]; 310 Register arg = values[argc-1-i];
311 ASSERT(!receiver.is(arg)); 311 DCHECK(!receiver.is(arg));
312 ASSERT(!scratch_in.is(arg)); 312 DCHECK(!scratch_in.is(arg));
313 __ push(arg); 313 __ push(arg);
314 } 314 }
315 __ push(scratch_in); 315 __ push(scratch_in);
316 // Stack now matches JSFunction abi. 316 // Stack now matches JSFunction abi.
317 ASSERT(optimization.is_simple_api_call()); 317 DCHECK(optimization.is_simple_api_call());
318 318
319 // Abi for CallApiFunctionStub. 319 // Abi for CallApiFunctionStub.
320 Register callee = eax; 320 Register callee = eax;
321 Register call_data = ebx; 321 Register call_data = ebx;
322 Register holder = ecx; 322 Register holder = ecx;
323 Register api_function_address = edx; 323 Register api_function_address = edx;
324 Register scratch = edi; // scratch_in is no longer valid. 324 Register scratch = edi; // scratch_in is no longer valid.
325 325
326 // Put holder in place. 326 // Put holder in place.
327 CallOptimization::HolderLookup holder_lookup; 327 CallOptimization::HolderLookup holder_lookup;
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
371 371
372 372
373 // Generate code to check that a global property cell is empty. Create 373 // Generate code to check that a global property cell is empty. Create
374 // the property cell at compilation time if no cell exists for the 374 // the property cell at compilation time if no cell exists for the
375 // property. 375 // property.
376 void PropertyHandlerCompiler::GenerateCheckPropertyCell( 376 void PropertyHandlerCompiler::GenerateCheckPropertyCell(
377 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, 377 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
378 Register scratch, Label* miss) { 378 Register scratch, Label* miss) {
379 Handle<PropertyCell> cell = 379 Handle<PropertyCell> cell =
380 JSGlobalObject::EnsurePropertyCell(global, name); 380 JSGlobalObject::EnsurePropertyCell(global, name);
381 ASSERT(cell->value()->IsTheHole()); 381 DCHECK(cell->value()->IsTheHole());
382 Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value(); 382 Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
383 if (masm->serializer_enabled()) { 383 if (masm->serializer_enabled()) {
384 __ mov(scratch, Immediate(cell)); 384 __ mov(scratch, Immediate(cell));
385 __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset), 385 __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset),
386 Immediate(the_hole)); 386 Immediate(the_hole));
387 } else { 387 } else {
388 __ cmp(Operand::ForCell(cell), Immediate(the_hole)); 388 __ cmp(Operand::ForCell(cell), Immediate(the_hole));
389 } 389 }
390 __ j(not_equal, miss); 390 __ j(not_equal, miss);
391 } 391 }
(...skipping 21 matching lines...) Expand all
413 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if 413 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
414 // store is successful. 414 // store is successful.
415 void NamedStoreHandlerCompiler::GenerateStoreTransition( 415 void NamedStoreHandlerCompiler::GenerateStoreTransition(
416 Handle<Map> transition, Handle<Name> name, Register receiver_reg, 416 Handle<Map> transition, Handle<Name> name, Register receiver_reg,
417 Register storage_reg, Register value_reg, Register scratch1, 417 Register storage_reg, Register value_reg, Register scratch1,
418 Register scratch2, Register unused, Label* miss_label, Label* slow) { 418 Register scratch2, Register unused, Label* miss_label, Label* slow) {
419 int descriptor = transition->LastAdded(); 419 int descriptor = transition->LastAdded();
420 DescriptorArray* descriptors = transition->instance_descriptors(); 420 DescriptorArray* descriptors = transition->instance_descriptors();
421 PropertyDetails details = descriptors->GetDetails(descriptor); 421 PropertyDetails details = descriptors->GetDetails(descriptor);
422 Representation representation = details.representation(); 422 Representation representation = details.representation();
423 ASSERT(!representation.IsNone()); 423 DCHECK(!representation.IsNone());
424 424
425 if (details.type() == CONSTANT) { 425 if (details.type() == CONSTANT) {
426 Handle<Object> constant(descriptors->GetValue(descriptor), isolate()); 426 Handle<Object> constant(descriptors->GetValue(descriptor), isolate());
427 __ CmpObject(value_reg, constant); 427 __ CmpObject(value_reg, constant);
428 __ j(not_equal, miss_label); 428 __ j(not_equal, miss_label);
429 } else if (representation.IsSmi()) { 429 } else if (representation.IsSmi()) {
430 __ JumpIfNotSmi(value_reg, miss_label); 430 __ JumpIfNotSmi(value_reg, miss_label);
431 } else if (representation.IsHeapObject()) { 431 } else if (representation.IsHeapObject()) {
432 __ JumpIfSmi(value_reg, miss_label); 432 __ JumpIfSmi(value_reg, miss_label);
433 HeapType* field_type = descriptors->GetFieldType(descriptor); 433 HeapType* field_type = descriptors->GetFieldType(descriptor);
(...skipping 24 matching lines...) Expand all
458 __ bind(&heap_number); 458 __ bind(&heap_number);
459 __ CheckMap(value_reg, isolate()->factory()->heap_number_map(), miss_label, 459 __ CheckMap(value_reg, isolate()->factory()->heap_number_map(), miss_label,
460 DONT_DO_SMI_CHECK); 460 DONT_DO_SMI_CHECK);
461 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); 461 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
462 462
463 __ bind(&do_store); 463 __ bind(&do_store);
464 __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0); 464 __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
465 } 465 }
466 466
467 // Stub never generated for objects that require access checks. 467 // Stub never generated for objects that require access checks.
468 ASSERT(!transition->is_access_check_needed()); 468 DCHECK(!transition->is_access_check_needed());
469 469
470 // Perform map transition for the receiver if necessary. 470 // Perform map transition for the receiver if necessary.
471 if (details.type() == FIELD && 471 if (details.type() == FIELD &&
472 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) { 472 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) {
473 // The properties must be extended before we can store the value. 473 // The properties must be extended before we can store the value.
474 // We jump to a runtime call that extends the properties array. 474 // We jump to a runtime call that extends the properties array.
475 __ pop(scratch1); // Return address. 475 __ pop(scratch1); // Return address.
476 __ push(receiver_reg); 476 __ push(receiver_reg);
477 __ push(Immediate(transition)); 477 __ push(Immediate(transition));
478 __ push(value_reg); 478 __ push(value_reg);
(...skipping 12 matching lines...) Expand all
491 // Update the write barrier for the map field. 491 // Update the write barrier for the map field.
492 __ RecordWriteField(receiver_reg, 492 __ RecordWriteField(receiver_reg,
493 HeapObject::kMapOffset, 493 HeapObject::kMapOffset,
494 scratch1, 494 scratch1,
495 scratch2, 495 scratch2,
496 kDontSaveFPRegs, 496 kDontSaveFPRegs,
497 OMIT_REMEMBERED_SET, 497 OMIT_REMEMBERED_SET,
498 OMIT_SMI_CHECK); 498 OMIT_SMI_CHECK);
499 499
500 if (details.type() == CONSTANT) { 500 if (details.type() == CONSTANT) {
501 ASSERT(value_reg.is(eax)); 501 DCHECK(value_reg.is(eax));
502 __ ret(0); 502 __ ret(0);
503 return; 503 return;
504 } 504 }
505 505
506 int index = transition->instance_descriptors()->GetFieldIndex( 506 int index = transition->instance_descriptors()->GetFieldIndex(
507 transition->LastAdded()); 507 transition->LastAdded());
508 508
509 // Adjust for the number of properties stored in the object. Even in the 509 // Adjust for the number of properties stored in the object. Even in the
510 // face of a transition we can use the old map here because the size of the 510 // face of a transition we can use the old map here because the size of the
511 // object and the number of in-object properties is not going to change. 511 // object and the number of in-object properties is not going to change.
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
556 offset, 556 offset,
557 storage_reg, 557 storage_reg,
558 receiver_reg, 558 receiver_reg,
559 kDontSaveFPRegs, 559 kDontSaveFPRegs,
560 EMIT_REMEMBERED_SET, 560 EMIT_REMEMBERED_SET,
561 smi_check); 561 smi_check);
562 } 562 }
563 } 563 }
564 564
565 // Return the value (register eax). 565 // Return the value (register eax).
566 ASSERT(value_reg.is(eax)); 566 DCHECK(value_reg.is(eax));
567 __ ret(0); 567 __ ret(0);
568 } 568 }
569 569
570 570
571 // Both name_reg and receiver_reg are preserved on jumps to miss_label, 571 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
572 // but may be destroyed if store is successful. 572 // but may be destroyed if store is successful.
573 void NamedStoreHandlerCompiler::GenerateStoreField( 573 void NamedStoreHandlerCompiler::GenerateStoreField(
574 Handle<JSObject> object, LookupResult* lookup, Register receiver_reg, 574 Handle<JSObject> object, LookupResult* lookup, Register receiver_reg,
575 Register name_reg, Register value_reg, Register scratch1, Register scratch2, 575 Register name_reg, Register value_reg, Register scratch1, Register scratch2,
576 Label* miss_label) { 576 Label* miss_label) {
577 // Stub never generated for objects that require access checks. 577 // Stub never generated for objects that require access checks.
578 ASSERT(!object->IsAccessCheckNeeded()); 578 DCHECK(!object->IsAccessCheckNeeded());
579 ASSERT(!object->IsJSGlobalProxy()); 579 DCHECK(!object->IsJSGlobalProxy());
580 580
581 FieldIndex index = lookup->GetFieldIndex(); 581 FieldIndex index = lookup->GetFieldIndex();
582 582
583 Representation representation = lookup->representation(); 583 Representation representation = lookup->representation();
584 ASSERT(!representation.IsNone()); 584 DCHECK(!representation.IsNone());
585 if (representation.IsSmi()) { 585 if (representation.IsSmi()) {
586 __ JumpIfNotSmi(value_reg, miss_label); 586 __ JumpIfNotSmi(value_reg, miss_label);
587 } else if (representation.IsHeapObject()) { 587 } else if (representation.IsHeapObject()) {
588 __ JumpIfSmi(value_reg, miss_label); 588 __ JumpIfSmi(value_reg, miss_label);
589 HeapType* field_type = lookup->GetFieldType(); 589 HeapType* field_type = lookup->GetFieldType();
590 HeapType::Iterator<Map> it = field_type->Classes(); 590 HeapType::Iterator<Map> it = field_type->Classes();
591 if (!it.Done()) { 591 if (!it.Done()) {
592 Label do_store; 592 Label do_store;
593 while (true) { 593 while (true) {
594 __ CompareMap(value_reg, it.Current()); 594 __ CompareMap(value_reg, it.Current());
(...skipping 22 matching lines...) Expand all
617 __ Cvtsi2sd(xmm0, value_reg); 617 __ Cvtsi2sd(xmm0, value_reg);
618 __ SmiTag(value_reg); 618 __ SmiTag(value_reg);
619 __ jmp(&do_store); 619 __ jmp(&do_store);
620 __ bind(&heap_number); 620 __ bind(&heap_number);
621 __ CheckMap(value_reg, isolate()->factory()->heap_number_map(), miss_label, 621 __ CheckMap(value_reg, isolate()->factory()->heap_number_map(), miss_label,
622 DONT_DO_SMI_CHECK); 622 DONT_DO_SMI_CHECK);
623 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); 623 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
624 __ bind(&do_store); 624 __ bind(&do_store);
625 __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0); 625 __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
626 // Return the value (register eax). 626 // Return the value (register eax).
627 ASSERT(value_reg.is(eax)); 627 DCHECK(value_reg.is(eax));
628 __ ret(0); 628 __ ret(0);
629 return; 629 return;
630 } 630 }
631 631
632 ASSERT(!representation.IsDouble()); 632 DCHECK(!representation.IsDouble());
633 // TODO(verwaest): Share this code as a code stub. 633 // TODO(verwaest): Share this code as a code stub.
634 SmiCheck smi_check = representation.IsTagged() 634 SmiCheck smi_check = representation.IsTagged()
635 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; 635 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
636 if (index.is_inobject()) { 636 if (index.is_inobject()) {
637 // Set the property straight into the object. 637 // Set the property straight into the object.
638 __ mov(FieldOperand(receiver_reg, index.offset()), value_reg); 638 __ mov(FieldOperand(receiver_reg, index.offset()), value_reg);
639 639
640 if (!representation.IsSmi()) { 640 if (!representation.IsSmi()) {
641 // Update the write barrier for the array address. 641 // Update the write barrier for the array address.
642 // Pass the value being stored in the now unused name_reg. 642 // Pass the value being stored in the now unused name_reg.
(...skipping 20 matching lines...) Expand all
663 index.offset(), 663 index.offset(),
664 name_reg, 664 name_reg,
665 receiver_reg, 665 receiver_reg,
666 kDontSaveFPRegs, 666 kDontSaveFPRegs,
667 EMIT_REMEMBERED_SET, 667 EMIT_REMEMBERED_SET,
668 smi_check); 668 smi_check);
669 } 669 }
670 } 670 }
671 671
672 // Return the value (register eax). 672 // Return the value (register eax).
673 ASSERT(value_reg.is(eax)); 673 DCHECK(value_reg.is(eax));
674 __ ret(0); 674 __ ret(0);
675 } 675 }
676 676
677 677
678 Register PropertyHandlerCompiler::CheckPrototypes( 678 Register PropertyHandlerCompiler::CheckPrototypes(
679 Register object_reg, Register holder_reg, Register scratch1, 679 Register object_reg, Register holder_reg, Register scratch1,
680 Register scratch2, Handle<Name> name, Label* miss, 680 Register scratch2, Handle<Name> name, Label* miss,
681 PrototypeCheckType check) { 681 PrototypeCheckType check) {
682 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate())); 682 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate()));
683 683
684 // Make sure there's no overlap between holder and object registers. 684 // Make sure there's no overlap between holder and object registers.
685 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); 685 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
686 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) 686 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
687 && !scratch2.is(scratch1)); 687 && !scratch2.is(scratch1));
688 688
689 // Keep track of the current object in register reg. 689 // Keep track of the current object in register reg.
690 Register reg = object_reg; 690 Register reg = object_reg;
691 int depth = 0; 691 int depth = 0;
692 692
693 Handle<JSObject> current = Handle<JSObject>::null(); 693 Handle<JSObject> current = Handle<JSObject>::null();
694 if (type()->IsConstant()) 694 if (type()->IsConstant())
695 current = Handle<JSObject>::cast(type()->AsConstant()->Value()); 695 current = Handle<JSObject>::cast(type()->AsConstant()->Value());
696 Handle<JSObject> prototype = Handle<JSObject>::null(); 696 Handle<JSObject> prototype = Handle<JSObject>::null();
697 Handle<Map> current_map = receiver_map; 697 Handle<Map> current_map = receiver_map;
698 Handle<Map> holder_map(holder()->map()); 698 Handle<Map> holder_map(holder()->map());
699 // Traverse the prototype chain and check the maps in the prototype chain for 699 // Traverse the prototype chain and check the maps in the prototype chain for
700 // fast and global objects or do negative lookup for normal objects. 700 // fast and global objects or do negative lookup for normal objects.
701 while (!current_map.is_identical_to(holder_map)) { 701 while (!current_map.is_identical_to(holder_map)) {
702 ++depth; 702 ++depth;
703 703
704 // Only global objects and objects that do not require access 704 // Only global objects and objects that do not require access
705 // checks are allowed in stubs. 705 // checks are allowed in stubs.
706 ASSERT(current_map->IsJSGlobalProxyMap() || 706 DCHECK(current_map->IsJSGlobalProxyMap() ||
707 !current_map->is_access_check_needed()); 707 !current_map->is_access_check_needed());
708 708
709 prototype = handle(JSObject::cast(current_map->prototype())); 709 prototype = handle(JSObject::cast(current_map->prototype()));
710 if (current_map->is_dictionary_map() && 710 if (current_map->is_dictionary_map() &&
711 !current_map->IsJSGlobalObjectMap() && 711 !current_map->IsJSGlobalObjectMap() &&
712 !current_map->IsJSGlobalProxyMap()) { 712 !current_map->IsJSGlobalProxyMap()) {
713 if (!name->IsUniqueName()) { 713 if (!name->IsUniqueName()) {
714 ASSERT(name->IsString()); 714 DCHECK(name->IsString());
715 name = factory()->InternalizeString(Handle<String>::cast(name)); 715 name = factory()->InternalizeString(Handle<String>::cast(name));
716 } 716 }
717 ASSERT(current.is_null() || 717 DCHECK(current.is_null() ||
718 current->property_dictionary()->FindEntry(name) == 718 current->property_dictionary()->FindEntry(name) ==
719 NameDictionary::kNotFound); 719 NameDictionary::kNotFound);
720 720
721 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, 721 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
722 scratch1, scratch2); 722 scratch1, scratch2);
723 723
724 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); 724 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
725 reg = holder_reg; // From now on the object will be in holder_reg. 725 reg = holder_reg; // From now on the object will be in holder_reg.
726 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); 726 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
727 } else { 727 } else {
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
767 767
768 // Log the check depth. 768 // Log the check depth.
769 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); 769 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
770 770
771 if (depth != 0 || check == CHECK_ALL_MAPS) { 771 if (depth != 0 || check == CHECK_ALL_MAPS) {
772 // Check the holder map. 772 // Check the holder map.
773 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK); 773 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
774 } 774 }
775 775
776 // Perform security check for access to the global object. 776 // Perform security check for access to the global object.
777 ASSERT(current_map->IsJSGlobalProxyMap() || 777 DCHECK(current_map->IsJSGlobalProxyMap() ||
778 !current_map->is_access_check_needed()); 778 !current_map->is_access_check_needed());
779 if (current_map->IsJSGlobalProxyMap()) { 779 if (current_map->IsJSGlobalProxyMap()) {
780 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss); 780 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
781 } 781 }
782 782
783 // Return the register containing the holder. 783 // Return the register containing the holder.
784 return reg; 784 return reg;
785 } 785 }
786 786
787 787
(...skipping 20 matching lines...) Expand all
808 808
809 809
810 Register NamedLoadHandlerCompiler::CallbackFrontend(Register object_reg, 810 Register NamedLoadHandlerCompiler::CallbackFrontend(Register object_reg,
811 Handle<Name> name, 811 Handle<Name> name,
812 Handle<Object> callback) { 812 Handle<Object> callback) {
813 Label miss; 813 Label miss;
814 814
815 Register reg = FrontendHeader(object_reg, name, &miss); 815 Register reg = FrontendHeader(object_reg, name, &miss);
816 816
817 if (!holder()->HasFastProperties()) { 817 if (!holder()->HasFastProperties()) {
818 ASSERT(!holder()->IsGlobalObject()); 818 DCHECK(!holder()->IsGlobalObject());
819 ASSERT(!reg.is(scratch2())); 819 DCHECK(!reg.is(scratch2()));
820 ASSERT(!reg.is(scratch3())); 820 DCHECK(!reg.is(scratch3()));
821 Register dictionary = scratch1(); 821 Register dictionary = scratch1();
822 bool must_preserve_dictionary_reg = reg.is(dictionary); 822 bool must_preserve_dictionary_reg = reg.is(dictionary);
823 823
824 // Load the properties dictionary. 824 // Load the properties dictionary.
825 if (must_preserve_dictionary_reg) { 825 if (must_preserve_dictionary_reg) {
826 __ push(dictionary); 826 __ push(dictionary);
827 } 827 }
828 __ mov(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset)); 828 __ mov(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset));
829 829
830 // Probe the dictionary. 830 // Probe the dictionary.
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
868 Register reg, FieldIndex field, Representation representation) { 868 Register reg, FieldIndex field, Representation representation) {
869 if (!reg.is(receiver())) __ mov(receiver(), reg); 869 if (!reg.is(receiver())) __ mov(receiver(), reg);
870 LoadFieldStub stub(isolate(), field); 870 LoadFieldStub stub(isolate(), field);
871 GenerateTailCall(masm(), stub.GetCode()); 871 GenerateTailCall(masm(), stub.GetCode());
872 } 872 }
873 873
874 874
875 void NamedLoadHandlerCompiler::GenerateLoadCallback( 875 void NamedLoadHandlerCompiler::GenerateLoadCallback(
876 Register reg, Handle<ExecutableAccessorInfo> callback) { 876 Register reg, Handle<ExecutableAccessorInfo> callback) {
877 // Insert additional parameters into the stack frame above return address. 877 // Insert additional parameters into the stack frame above return address.
878 ASSERT(!scratch3().is(reg)); 878 DCHECK(!scratch3().is(reg));
879 __ pop(scratch3()); // Get return address to place it below. 879 __ pop(scratch3()); // Get return address to place it below.
880 880
881 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); 881 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
882 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); 882 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
883 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); 883 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
884 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); 884 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
885 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); 885 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
886 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); 886 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
887 __ push(receiver()); // receiver 887 __ push(receiver()); // receiver
888 // Push data from ExecutableAccessorInfo. 888 // Push data from ExecutableAccessorInfo.
889 if (isolate()->heap()->InNewSpace(callback->data())) { 889 if (isolate()->heap()->InNewSpace(callback->data())) {
890 ASSERT(!scratch2().is(reg)); 890 DCHECK(!scratch2().is(reg));
891 __ mov(scratch2(), Immediate(callback)); 891 __ mov(scratch2(), Immediate(callback));
892 __ push(FieldOperand(scratch2(), ExecutableAccessorInfo::kDataOffset)); 892 __ push(FieldOperand(scratch2(), ExecutableAccessorInfo::kDataOffset));
893 } else { 893 } else {
894 __ push(Immediate(Handle<Object>(callback->data(), isolate()))); 894 __ push(Immediate(Handle<Object>(callback->data(), isolate())));
895 } 895 }
896 __ push(Immediate(isolate()->factory()->undefined_value())); // ReturnValue 896 __ push(Immediate(isolate()->factory()->undefined_value())); // ReturnValue
897 // ReturnValue default value 897 // ReturnValue default value
898 __ push(Immediate(isolate()->factory()->undefined_value())); 898 __ push(Immediate(isolate()->factory()->undefined_value()));
899 __ push(Immediate(reinterpret_cast<int>(isolate()))); 899 __ push(Immediate(reinterpret_cast<int>(isolate())));
900 __ push(reg); // holder 900 __ push(reg); // holder
(...skipping 19 matching lines...) Expand all
920 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) { 920 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
921 // Return the constant value. 921 // Return the constant value.
922 __ LoadObject(eax, value); 922 __ LoadObject(eax, value);
923 __ ret(0); 923 __ ret(0);
924 } 924 }
925 925
926 926
927 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg, 927 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg,
928 LookupResult* lookup, 928 LookupResult* lookup,
929 Handle<Name> name) { 929 Handle<Name> name) {
930 ASSERT(holder()->HasNamedInterceptor()); 930 DCHECK(holder()->HasNamedInterceptor());
931 ASSERT(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); 931 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
932 932
933 // So far the most popular follow ups for interceptor loads are FIELD 933 // So far the most popular follow ups for interceptor loads are FIELD
934 // and CALLBACKS, so inline only them, other cases may be added 934 // and CALLBACKS, so inline only them, other cases may be added
935 // later. 935 // later.
936 bool compile_followup_inline = false; 936 bool compile_followup_inline = false;
937 if (lookup->IsFound() && lookup->IsCacheable()) { 937 if (lookup->IsFound() && lookup->IsCacheable()) {
938 if (lookup->IsField()) { 938 if (lookup->IsField()) {
939 compile_followup_inline = true; 939 compile_followup_inline = true;
940 } else if (lookup->type() == CALLBACKS && 940 } else if (lookup->type() == CALLBACKS &&
941 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) { 941 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
942 Handle<ExecutableAccessorInfo> callback( 942 Handle<ExecutableAccessorInfo> callback(
943 ExecutableAccessorInfo::cast(lookup->GetCallbackObject())); 943 ExecutableAccessorInfo::cast(lookup->GetCallbackObject()));
944 compile_followup_inline = 944 compile_followup_inline =
945 callback->getter() != NULL && 945 callback->getter() != NULL &&
946 ExecutableAccessorInfo::IsCompatibleReceiverType(isolate(), callback, 946 ExecutableAccessorInfo::IsCompatibleReceiverType(isolate(), callback,
947 type()); 947 type());
948 } 948 }
949 } 949 }
950 950
951 if (compile_followup_inline) { 951 if (compile_followup_inline) {
952 // Compile the interceptor call, followed by inline code to load the 952 // Compile the interceptor call, followed by inline code to load the
953 // property from further up the prototype chain if the call fails. 953 // property from further up the prototype chain if the call fails.
954 // Check that the maps haven't changed. 954 // Check that the maps haven't changed.
955 ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1())); 955 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
956 956
957 // Preserve the receiver register explicitly whenever it is different from 957 // Preserve the receiver register explicitly whenever it is different from
958 // the holder and it is needed should the interceptor return without any 958 // the holder and it is needed should the interceptor return without any
959 // result. The CALLBACKS case needs the receiver to be passed into C++ code, 959 // result. The CALLBACKS case needs the receiver to be passed into C++ code,
960 // the FIELD case might cause a miss during the prototype check. 960 // the FIELD case might cause a miss during the prototype check.
961 bool must_perfrom_prototype_check = *holder() != lookup->holder(); 961 bool must_perfrom_prototype_check = *holder() != lookup->holder();
962 bool must_preserve_receiver_reg = !receiver().is(holder_reg) && 962 bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
963 (lookup->type() == CALLBACKS || must_perfrom_prototype_check); 963 (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
964 964
965 // Save necessary data before invoking an interceptor. 965 // Save necessary data before invoking an interceptor.
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
1145 Register name = LoadIC::NameRegister(); 1145 Register name = LoadIC::NameRegister();
1146 static Register registers[] = { receiver, name, ebx, eax, edi, no_reg }; 1146 static Register registers[] = { receiver, name, ebx, eax, edi, no_reg };
1147 return registers; 1147 return registers;
1148 } 1148 }
1149 1149
1150 1150
1151 Register* PropertyAccessCompiler::store_calling_convention() { 1151 Register* PropertyAccessCompiler::store_calling_convention() {
1152 // receiver, name, scratch1, scratch2, scratch3. 1152 // receiver, name, scratch1, scratch2, scratch3.
1153 Register receiver = StoreIC::ReceiverRegister(); 1153 Register receiver = StoreIC::ReceiverRegister();
1154 Register name = StoreIC::NameRegister(); 1154 Register name = StoreIC::NameRegister();
1155 ASSERT(ebx.is(KeyedStoreIC::MapRegister())); 1155 DCHECK(ebx.is(KeyedStoreIC::MapRegister()));
1156 static Register registers[] = { receiver, name, ebx, edi, no_reg }; 1156 static Register registers[] = { receiver, name, ebx, edi, no_reg };
1157 return registers; 1157 return registers;
1158 } 1158 }
1159 1159
1160 1160
1161 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); } 1161 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); }
1162 1162
1163 1163
1164 #undef __ 1164 #undef __
1165 #define __ ACCESS_MASM(masm) 1165 #define __ ACCESS_MASM(masm)
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
1253 __ j(not_equal, &miss); 1253 __ j(not_equal, &miss);
1254 } 1254 }
1255 } 1255 }
1256 1256
1257 Label number_case; 1257 Label number_case;
1258 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss; 1258 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1259 __ JumpIfSmi(receiver(), smi_target); 1259 __ JumpIfSmi(receiver(), smi_target);
1260 1260
1261 // Polymorphic keyed stores may use the map register 1261 // Polymorphic keyed stores may use the map register
1262 Register map_reg = scratch1(); 1262 Register map_reg = scratch1();
1263 ASSERT(kind() != Code::KEYED_STORE_IC || 1263 DCHECK(kind() != Code::KEYED_STORE_IC ||
1264 map_reg.is(KeyedStoreIC::MapRegister())); 1264 map_reg.is(KeyedStoreIC::MapRegister()));
1265 __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset)); 1265 __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
1266 int receiver_count = types->length(); 1266 int receiver_count = types->length();
1267 int number_of_handled_maps = 0; 1267 int number_of_handled_maps = 0;
1268 for (int current = 0; current < receiver_count; ++current) { 1268 for (int current = 0; current < receiver_count; ++current) {
1269 Handle<HeapType> type = types->at(current); 1269 Handle<HeapType> type = types->at(current);
1270 Handle<Map> map = IC::TypeToMap(*type, isolate()); 1270 Handle<Map> map = IC::TypeToMap(*type, isolate());
1271 if (!map->is_deprecated()) { 1271 if (!map->is_deprecated()) {
1272 number_of_handled_maps++; 1272 number_of_handled_maps++;
1273 __ cmp(map_reg, map); 1273 __ cmp(map_reg, map);
1274 if (type->Is(HeapType::Number())) { 1274 if (type->Is(HeapType::Number())) {
1275 ASSERT(!number_case.is_unused()); 1275 DCHECK(!number_case.is_unused());
1276 __ bind(&number_case); 1276 __ bind(&number_case);
1277 } 1277 }
1278 __ j(equal, handlers->at(current)); 1278 __ j(equal, handlers->at(current));
1279 } 1279 }
1280 } 1280 }
1281 ASSERT(number_of_handled_maps != 0); 1281 DCHECK(number_of_handled_maps != 0);
1282 1282
1283 __ bind(&miss); 1283 __ bind(&miss);
1284 TailCallBuiltin(masm(), MissBuiltin(kind())); 1284 TailCallBuiltin(masm(), MissBuiltin(kind()));
1285 1285
1286 // Return the generated code. 1286 // Return the generated code.
1287 InlineCacheState state = 1287 InlineCacheState state =
1288 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC; 1288 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1289 return GetCode(kind(), type, name, state); 1289 return GetCode(kind(), type, name, state);
1290 } 1290 }
1291 1291
1292 1292
1293 #undef __ 1293 #undef __
1294 #define __ ACCESS_MASM(masm) 1294 #define __ ACCESS_MASM(masm)
1295 1295
1296 1296
1297 void ElementHandlerCompiler::GenerateLoadDictionaryElement( 1297 void ElementHandlerCompiler::GenerateLoadDictionaryElement(
1298 MacroAssembler* masm) { 1298 MacroAssembler* masm) {
1299 // ----------- S t a t e ------------- 1299 // ----------- S t a t e -------------
1300 // -- ecx : key 1300 // -- ecx : key
1301 // -- edx : receiver 1301 // -- edx : receiver
1302 // -- esp[0] : return address 1302 // -- esp[0] : return address
1303 // ----------------------------------- 1303 // -----------------------------------
1304 ASSERT(edx.is(LoadIC::ReceiverRegister())); 1304 DCHECK(edx.is(LoadIC::ReceiverRegister()));
1305 ASSERT(ecx.is(LoadIC::NameRegister())); 1305 DCHECK(ecx.is(LoadIC::NameRegister()));
1306 Label slow, miss; 1306 Label slow, miss;
1307 1307
1308 // This stub is meant to be tail-jumped to, the receiver must already 1308 // This stub is meant to be tail-jumped to, the receiver must already
1309 // have been verified by the caller to not be a smi. 1309 // have been verified by the caller to not be a smi.
1310 __ JumpIfNotSmi(ecx, &miss); 1310 __ JumpIfNotSmi(ecx, &miss);
1311 __ mov(ebx, ecx); 1311 __ mov(ebx, ecx);
1312 __ SmiUntag(ebx); 1312 __ SmiUntag(ebx);
1313 __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset)); 1313 __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset));
1314 1314
1315 // Push receiver on the stack to free up a register for the dictionary 1315 // Push receiver on the stack to free up a register for the dictionary
(...skipping 22 matching lines...) Expand all
1338 // ----------------------------------- 1338 // -----------------------------------
1339 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); 1339 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1340 } 1340 }
1341 1341
1342 1342
1343 #undef __ 1343 #undef __
1344 1344
1345 } } // namespace v8::internal 1345 } } // namespace v8::internal
1346 1346
1347 #endif // V8_TARGET_ARCH_IA32 1347 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/regexp-macro-assembler-ia32.cc ('k') | src/ic.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698