Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/mips/stub-cache-mips.cc

Issue 430503007: Rename ASSERT* to DCHECK*. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE and fixes Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/simulator-mips.cc ('k') | src/mips64/assembler-mips64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_MIPS 7 #if V8_TARGET_ARCH_MIPS
8 8
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/ic-inl.h" 10 #include "src/ic-inl.h"
(...skipping 18 matching lines...) Expand all
29 Register offset_scratch) { 29 Register offset_scratch) {
30 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); 30 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
31 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); 31 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
32 ExternalReference map_offset(isolate->stub_cache()->map_reference(table)); 32 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
33 33
34 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); 34 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
35 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); 35 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
36 uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address()); 36 uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
37 37
38 // Check the relative positions of the address fields. 38 // Check the relative positions of the address fields.
39 ASSERT(value_off_addr > key_off_addr); 39 DCHECK(value_off_addr > key_off_addr);
40 ASSERT((value_off_addr - key_off_addr) % 4 == 0); 40 DCHECK((value_off_addr - key_off_addr) % 4 == 0);
41 ASSERT((value_off_addr - key_off_addr) < (256 * 4)); 41 DCHECK((value_off_addr - key_off_addr) < (256 * 4));
42 ASSERT(map_off_addr > key_off_addr); 42 DCHECK(map_off_addr > key_off_addr);
43 ASSERT((map_off_addr - key_off_addr) % 4 == 0); 43 DCHECK((map_off_addr - key_off_addr) % 4 == 0);
44 ASSERT((map_off_addr - key_off_addr) < (256 * 4)); 44 DCHECK((map_off_addr - key_off_addr) < (256 * 4));
45 45
46 Label miss; 46 Label miss;
47 Register base_addr = scratch; 47 Register base_addr = scratch;
48 scratch = no_reg; 48 scratch = no_reg;
49 49
50 // Multiply by 3 because there are 3 fields per entry (name, code, map). 50 // Multiply by 3 because there are 3 fields per entry (name, code, map).
51 __ sll(offset_scratch, offset, 1); 51 __ sll(offset_scratch, offset, 1);
52 __ Addu(offset_scratch, offset_scratch, offset); 52 __ Addu(offset_scratch, offset_scratch, offset);
53 53
54 // Calculate the base address of the entry. 54 // Calculate the base address of the entry.
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
90 __ Jump(at); 90 __ Jump(at);
91 91
92 // Miss: fall through. 92 // Miss: fall through.
93 __ bind(&miss); 93 __ bind(&miss);
94 } 94 }
95 95
96 96
97 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( 97 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
98 MacroAssembler* masm, Label* miss_label, Register receiver, 98 MacroAssembler* masm, Label* miss_label, Register receiver,
99 Handle<Name> name, Register scratch0, Register scratch1) { 99 Handle<Name> name, Register scratch0, Register scratch1) {
100 ASSERT(name->IsUniqueName()); 100 DCHECK(name->IsUniqueName());
101 ASSERT(!receiver.is(scratch0)); 101 DCHECK(!receiver.is(scratch0));
102 Counters* counters = masm->isolate()->counters(); 102 Counters* counters = masm->isolate()->counters();
103 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); 103 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
104 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 104 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
105 105
106 Label done; 106 Label done;
107 107
108 const int kInterceptorOrAccessCheckNeededMask = 108 const int kInterceptorOrAccessCheckNeededMask =
109 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); 109 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
110 110
111 // Bail out if the receiver has a named interceptor or requires access checks. 111 // Bail out if the receiver has a named interceptor or requires access checks.
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
150 Register name, 150 Register name,
151 Register scratch, 151 Register scratch,
152 Register extra, 152 Register extra,
153 Register extra2, 153 Register extra2,
154 Register extra3) { 154 Register extra3) {
155 Isolate* isolate = masm->isolate(); 155 Isolate* isolate = masm->isolate();
156 Label miss; 156 Label miss;
157 157
158 // Make sure that code is valid. The multiplying code relies on the 158 // Make sure that code is valid. The multiplying code relies on the
159 // entry size being 12. 159 // entry size being 12.
160 ASSERT(sizeof(Entry) == 12); 160 DCHECK(sizeof(Entry) == 12);
161 161
162 // Make sure the flags does not name a specific type. 162 // Make sure the flags does not name a specific type.
163 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); 163 DCHECK(Code::ExtractTypeFromFlags(flags) == 0);
164 164
165 // Make sure that there are no register conflicts. 165 // Make sure that there are no register conflicts.
166 ASSERT(!scratch.is(receiver)); 166 DCHECK(!scratch.is(receiver));
167 ASSERT(!scratch.is(name)); 167 DCHECK(!scratch.is(name));
168 ASSERT(!extra.is(receiver)); 168 DCHECK(!extra.is(receiver));
169 ASSERT(!extra.is(name)); 169 DCHECK(!extra.is(name));
170 ASSERT(!extra.is(scratch)); 170 DCHECK(!extra.is(scratch));
171 ASSERT(!extra2.is(receiver)); 171 DCHECK(!extra2.is(receiver));
172 ASSERT(!extra2.is(name)); 172 DCHECK(!extra2.is(name));
173 ASSERT(!extra2.is(scratch)); 173 DCHECK(!extra2.is(scratch));
174 ASSERT(!extra2.is(extra)); 174 DCHECK(!extra2.is(extra));
175 175
176 // Check register validity. 176 // Check register validity.
177 ASSERT(!scratch.is(no_reg)); 177 DCHECK(!scratch.is(no_reg));
178 ASSERT(!extra.is(no_reg)); 178 DCHECK(!extra.is(no_reg));
179 ASSERT(!extra2.is(no_reg)); 179 DCHECK(!extra2.is(no_reg));
180 ASSERT(!extra3.is(no_reg)); 180 DCHECK(!extra3.is(no_reg));
181 181
182 Counters* counters = masm->isolate()->counters(); 182 Counters* counters = masm->isolate()->counters();
183 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, 183 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
184 extra2, extra3); 184 extra2, extra3);
185 185
186 // Check that the receiver isn't a smi. 186 // Check that the receiver isn't a smi.
187 __ JumpIfSmi(receiver, &miss); 187 __ JumpIfSmi(receiver, &miss);
188 188
189 // Get the map of the receiver and compute the hash. 189 // Get the map of the receiver and compute the hash.
190 __ lw(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); 190 __ lw(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
265 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); 265 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
266 __ Ret(USE_DELAY_SLOT); 266 __ Ret(USE_DELAY_SLOT);
267 __ mov(v0, scratch1); 267 __ mov(v0, scratch1);
268 } 268 }
269 269
270 270
271 void PropertyHandlerCompiler::GenerateCheckPropertyCell( 271 void PropertyHandlerCompiler::GenerateCheckPropertyCell(
272 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, 272 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
273 Register scratch, Label* miss) { 273 Register scratch, Label* miss) {
274 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name); 274 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
275 ASSERT(cell->value()->IsTheHole()); 275 DCHECK(cell->value()->IsTheHole());
276 __ li(scratch, Operand(cell)); 276 __ li(scratch, Operand(cell));
277 __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); 277 __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
278 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 278 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
279 __ Branch(miss, ne, scratch, Operand(at)); 279 __ Branch(miss, ne, scratch, Operand(at));
280 } 280 }
281 281
282 282
283 // Generate StoreTransition code, value is passed in a0 register. 283 // Generate StoreTransition code, value is passed in a0 register.
284 // After executing generated code, the receiver_reg and name_reg 284 // After executing generated code, the receiver_reg and name_reg
285 // may be clobbered. 285 // may be clobbered.
286 void NamedStoreHandlerCompiler::GenerateStoreTransition( 286 void NamedStoreHandlerCompiler::GenerateStoreTransition(
287 MacroAssembler* masm, LookupResult* lookup, Handle<Map> transition, 287 MacroAssembler* masm, LookupResult* lookup, Handle<Map> transition,
288 Handle<Name> name, Register receiver_reg, Register storage_reg, 288 Handle<Name> name, Register receiver_reg, Register storage_reg,
289 Register value_reg, Register scratch1, Register scratch2, Register scratch3, 289 Register value_reg, Register scratch1, Register scratch2, Register scratch3,
290 Label* miss_label, Label* slow) { 290 Label* miss_label, Label* slow) {
291 // a0 : value. 291 // a0 : value.
292 Label exit; 292 Label exit;
293 293
294 int descriptor = transition->LastAdded(); 294 int descriptor = transition->LastAdded();
295 DescriptorArray* descriptors = transition->instance_descriptors(); 295 DescriptorArray* descriptors = transition->instance_descriptors();
296 PropertyDetails details = descriptors->GetDetails(descriptor); 296 PropertyDetails details = descriptors->GetDetails(descriptor);
297 Representation representation = details.representation(); 297 Representation representation = details.representation();
298 ASSERT(!representation.IsNone()); 298 DCHECK(!representation.IsNone());
299 299
300 if (details.type() == CONSTANT) { 300 if (details.type() == CONSTANT) {
301 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate()); 301 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
302 __ li(scratch1, constant); 302 __ li(scratch1, constant);
303 __ Branch(miss_label, ne, value_reg, Operand(scratch1)); 303 __ Branch(miss_label, ne, value_reg, Operand(scratch1));
304 } else if (representation.IsSmi()) { 304 } else if (representation.IsSmi()) {
305 __ JumpIfNotSmi(value_reg, miss_label); 305 __ JumpIfNotSmi(value_reg, miss_label);
306 } else if (representation.IsHeapObject()) { 306 } else if (representation.IsHeapObject()) {
307 __ JumpIfSmi(value_reg, miss_label); 307 __ JumpIfSmi(value_reg, miss_label);
308 HeapType* field_type = descriptors->GetFieldType(descriptor); 308 HeapType* field_type = descriptors->GetFieldType(descriptor);
(...skipping 29 matching lines...) Expand all
338 __ bind(&heap_number); 338 __ bind(&heap_number);
339 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, 339 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
340 miss_label, DONT_DO_SMI_CHECK); 340 miss_label, DONT_DO_SMI_CHECK);
341 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); 341 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
342 342
343 __ bind(&do_store); 343 __ bind(&do_store);
344 __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); 344 __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
345 } 345 }
346 346
347 // Stub never generated for objects that require access checks. 347 // Stub never generated for objects that require access checks.
348 ASSERT(!transition->is_access_check_needed()); 348 DCHECK(!transition->is_access_check_needed());
349 349
350 // Perform map transition for the receiver if necessary. 350 // Perform map transition for the receiver if necessary.
351 if (details.type() == FIELD && 351 if (details.type() == FIELD &&
352 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) { 352 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) {
353 // The properties must be extended before we can store the value. 353 // The properties must be extended before we can store the value.
354 // We jump to a runtime call that extends the properties array. 354 // We jump to a runtime call that extends the properties array.
355 __ push(receiver_reg); 355 __ push(receiver_reg);
356 __ li(a2, Operand(transition)); 356 __ li(a2, Operand(transition));
357 __ Push(a2, a0); 357 __ Push(a2, a0);
358 __ TailCallExternalReference( 358 __ TailCallExternalReference(
(...skipping 11 matching lines...) Expand all
370 __ RecordWriteField(receiver_reg, 370 __ RecordWriteField(receiver_reg,
371 HeapObject::kMapOffset, 371 HeapObject::kMapOffset,
372 scratch1, 372 scratch1,
373 scratch2, 373 scratch2,
374 kRAHasNotBeenSaved, 374 kRAHasNotBeenSaved,
375 kDontSaveFPRegs, 375 kDontSaveFPRegs,
376 OMIT_REMEMBERED_SET, 376 OMIT_REMEMBERED_SET,
377 OMIT_SMI_CHECK); 377 OMIT_SMI_CHECK);
378 378
379 if (details.type() == CONSTANT) { 379 if (details.type() == CONSTANT) {
380 ASSERT(value_reg.is(a0)); 380 DCHECK(value_reg.is(a0));
381 __ Ret(USE_DELAY_SLOT); 381 __ Ret(USE_DELAY_SLOT);
382 __ mov(v0, a0); 382 __ mov(v0, a0);
383 return; 383 return;
384 } 384 }
385 385
386 int index = transition->instance_descriptors()->GetFieldIndex( 386 int index = transition->instance_descriptors()->GetFieldIndex(
387 transition->LastAdded()); 387 transition->LastAdded());
388 388
389 // Adjust for the number of properties stored in the object. Even in the 389 // Adjust for the number of properties stored in the object. Even in the
390 // face of a transition we can use the old map here because the size of the 390 // face of a transition we can use the old map here because the size of the
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
439 storage_reg, 439 storage_reg,
440 receiver_reg, 440 receiver_reg,
441 kRAHasNotBeenSaved, 441 kRAHasNotBeenSaved,
442 kDontSaveFPRegs, 442 kDontSaveFPRegs,
443 EMIT_REMEMBERED_SET, 443 EMIT_REMEMBERED_SET,
444 smi_check); 444 smi_check);
445 } 445 }
446 } 446 }
447 447
448 // Return the value (register v0). 448 // Return the value (register v0).
449 ASSERT(value_reg.is(a0)); 449 DCHECK(value_reg.is(a0));
450 __ bind(&exit); 450 __ bind(&exit);
451 __ Ret(USE_DELAY_SLOT); 451 __ Ret(USE_DELAY_SLOT);
452 __ mov(v0, a0); 452 __ mov(v0, a0);
453 } 453 }
454 454
455 455
456 // Generate StoreField code, value is passed in a0 register. 456 // Generate StoreField code, value is passed in a0 register.
457 // When leaving generated code after success, the receiver_reg and name_reg 457 // When leaving generated code after success, the receiver_reg and name_reg
458 // may be clobbered. Upon branch to miss_label, the receiver and name 458 // may be clobbered. Upon branch to miss_label, the receiver and name
459 // registers have their original values. 459 // registers have their original values.
460 void NamedStoreHandlerCompiler::GenerateStoreField( 460 void NamedStoreHandlerCompiler::GenerateStoreField(
461 MacroAssembler* masm, Handle<JSObject> object, LookupResult* lookup, 461 MacroAssembler* masm, Handle<JSObject> object, LookupResult* lookup,
462 Register receiver_reg, Register name_reg, Register value_reg, 462 Register receiver_reg, Register name_reg, Register value_reg,
463 Register scratch1, Register scratch2, Label* miss_label) { 463 Register scratch1, Register scratch2, Label* miss_label) {
464 // a0 : value 464 // a0 : value
465 Label exit; 465 Label exit;
466 466
467 // Stub never generated for non-global objects that require access 467 // Stub never generated for non-global objects that require access
468 // checks. 468 // checks.
469 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 469 DCHECK(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
470 470
471 FieldIndex index = lookup->GetFieldIndex(); 471 FieldIndex index = lookup->GetFieldIndex();
472 472
473 Representation representation = lookup->representation(); 473 Representation representation = lookup->representation();
474 ASSERT(!representation.IsNone()); 474 DCHECK(!representation.IsNone());
475 if (representation.IsSmi()) { 475 if (representation.IsSmi()) {
476 __ JumpIfNotSmi(value_reg, miss_label); 476 __ JumpIfNotSmi(value_reg, miss_label);
477 } else if (representation.IsHeapObject()) { 477 } else if (representation.IsHeapObject()) {
478 __ JumpIfSmi(value_reg, miss_label); 478 __ JumpIfSmi(value_reg, miss_label);
479 HeapType* field_type = lookup->GetFieldType(); 479 HeapType* field_type = lookup->GetFieldType();
480 HeapType::Iterator<Map> it = field_type->Classes(); 480 HeapType::Iterator<Map> it = field_type->Classes();
481 if (!it.Done()) { 481 if (!it.Done()) {
482 __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); 482 __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
483 Label do_store; 483 Label do_store;
484 Handle<Map> current; 484 Handle<Map> current;
(...skipping 28 matching lines...) Expand all
513 __ jmp(&do_store); 513 __ jmp(&do_store);
514 514
515 __ bind(&heap_number); 515 __ bind(&heap_number);
516 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex, 516 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
517 miss_label, DONT_DO_SMI_CHECK); 517 miss_label, DONT_DO_SMI_CHECK);
518 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); 518 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
519 519
520 __ bind(&do_store); 520 __ bind(&do_store);
521 __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset)); 521 __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
522 // Return the value (register v0). 522 // Return the value (register v0).
523 ASSERT(value_reg.is(a0)); 523 DCHECK(value_reg.is(a0));
524 __ Ret(USE_DELAY_SLOT); 524 __ Ret(USE_DELAY_SLOT);
525 __ mov(v0, a0); 525 __ mov(v0, a0);
526 return; 526 return;
527 } 527 }
528 528
529 // TODO(verwaest): Share this code as a code stub. 529 // TODO(verwaest): Share this code as a code stub.
530 SmiCheck smi_check = representation.IsTagged() 530 SmiCheck smi_check = representation.IsTagged()
531 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; 531 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
532 if (index.is_inobject()) { 532 if (index.is_inobject()) {
533 // Set the property straight into the object. 533 // Set the property straight into the object.
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
568 name_reg, 568 name_reg,
569 receiver_reg, 569 receiver_reg,
570 kRAHasNotBeenSaved, 570 kRAHasNotBeenSaved,
571 kDontSaveFPRegs, 571 kDontSaveFPRegs,
572 EMIT_REMEMBERED_SET, 572 EMIT_REMEMBERED_SET,
573 smi_check); 573 smi_check);
574 } 574 }
575 } 575 }
576 576
577 // Return the value (register v0). 577 // Return the value (register v0).
578 ASSERT(value_reg.is(a0)); 578 DCHECK(value_reg.is(a0));
579 __ bind(&exit); 579 __ bind(&exit);
580 __ Ret(USE_DELAY_SLOT); 580 __ Ret(USE_DELAY_SLOT);
581 __ mov(v0, a0); 581 __ mov(v0, a0);
582 } 582 }
583 583
584 584
585 void NamedStoreHandlerCompiler::GenerateRestoreName(MacroAssembler* masm, 585 void NamedStoreHandlerCompiler::GenerateRestoreName(MacroAssembler* masm,
586 Label* label, 586 Label* label,
587 Handle<Name> name) { 587 Handle<Name> name) {
588 if (!label->is_unused()) { 588 if (!label->is_unused()) {
589 __ bind(label); 589 __ bind(label);
590 __ li(this->name(), Operand(name)); 590 __ li(this->name(), Operand(name));
591 } 591 }
592 } 592 }
593 593
594 594
595 static void PushInterceptorArguments(MacroAssembler* masm, 595 static void PushInterceptorArguments(MacroAssembler* masm,
596 Register receiver, 596 Register receiver,
597 Register holder, 597 Register holder,
598 Register name, 598 Register name,
599 Handle<JSObject> holder_obj) { 599 Handle<JSObject> holder_obj) {
600 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); 600 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
601 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1); 601 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1);
602 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2); 602 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2);
603 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3); 603 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3);
604 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4); 604 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4);
605 __ push(name); 605 __ push(name);
606 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); 606 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
607 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor)); 607 DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor));
608 Register scratch = name; 608 Register scratch = name;
609 __ li(scratch, Operand(interceptor)); 609 __ li(scratch, Operand(interceptor));
610 __ Push(scratch, receiver, holder); 610 __ Push(scratch, receiver, holder);
611 } 611 }
612 612
613 613
614 static void CompileCallLoadPropertyWithInterceptor( 614 static void CompileCallLoadPropertyWithInterceptor(
615 MacroAssembler* masm, 615 MacroAssembler* masm,
616 Register receiver, 616 Register receiver,
617 Register holder, 617 Register holder,
618 Register name, 618 Register name,
619 Handle<JSObject> holder_obj, 619 Handle<JSObject> holder_obj,
620 IC::UtilityId id) { 620 IC::UtilityId id) {
621 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 621 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
622 __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()), 622 __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()),
623 NamedLoadHandlerCompiler::kInterceptorArgsLength); 623 NamedLoadHandlerCompiler::kInterceptorArgsLength);
624 } 624 }
625 625
626 626
627 // Generate call to api function. 627 // Generate call to api function.
628 void PropertyHandlerCompiler::GenerateFastApiCall( 628 void PropertyHandlerCompiler::GenerateFastApiCall(
629 MacroAssembler* masm, const CallOptimization& optimization, 629 MacroAssembler* masm, const CallOptimization& optimization,
630 Handle<Map> receiver_map, Register receiver, Register scratch_in, 630 Handle<Map> receiver_map, Register receiver, Register scratch_in,
631 bool is_store, int argc, Register* values) { 631 bool is_store, int argc, Register* values) {
632 ASSERT(!receiver.is(scratch_in)); 632 DCHECK(!receiver.is(scratch_in));
633 // Preparing to push, adjust sp. 633 // Preparing to push, adjust sp.
634 __ Subu(sp, sp, Operand((argc + 1) * kPointerSize)); 634 __ Subu(sp, sp, Operand((argc + 1) * kPointerSize));
635 __ sw(receiver, MemOperand(sp, argc * kPointerSize)); // Push receiver. 635 __ sw(receiver, MemOperand(sp, argc * kPointerSize)); // Push receiver.
636 // Write the arguments to stack frame. 636 // Write the arguments to stack frame.
637 for (int i = 0; i < argc; i++) { 637 for (int i = 0; i < argc; i++) {
638 Register arg = values[argc-1-i]; 638 Register arg = values[argc-1-i];
639 ASSERT(!receiver.is(arg)); 639 DCHECK(!receiver.is(arg));
640 ASSERT(!scratch_in.is(arg)); 640 DCHECK(!scratch_in.is(arg));
641 __ sw(arg, MemOperand(sp, (argc-1-i) * kPointerSize)); // Push arg. 641 __ sw(arg, MemOperand(sp, (argc-1-i) * kPointerSize)); // Push arg.
642 } 642 }
643 ASSERT(optimization.is_simple_api_call()); 643 DCHECK(optimization.is_simple_api_call());
644 644
645 // Abi for CallApiFunctionStub. 645 // Abi for CallApiFunctionStub.
646 Register callee = a0; 646 Register callee = a0;
647 Register call_data = t0; 647 Register call_data = t0;
648 Register holder = a2; 648 Register holder = a2;
649 Register api_function_address = a1; 649 Register api_function_address = a1;
650 650
651 // Put holder in place. 651 // Put holder in place.
652 CallOptimization::HolderLookup holder_lookup; 652 CallOptimization::HolderLookup holder_lookup;
653 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType( 653 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
710 #define __ ACCESS_MASM(masm()) 710 #define __ ACCESS_MASM(masm())
711 711
712 712
713 Register PropertyHandlerCompiler::CheckPrototypes( 713 Register PropertyHandlerCompiler::CheckPrototypes(
714 Register object_reg, Register holder_reg, Register scratch1, 714 Register object_reg, Register holder_reg, Register scratch1,
715 Register scratch2, Handle<Name> name, Label* miss, 715 Register scratch2, Handle<Name> name, Label* miss,
716 PrototypeCheckType check) { 716 PrototypeCheckType check) {
717 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate())); 717 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate()));
718 718
719 // Make sure there's no overlap between holder and object registers. 719 // Make sure there's no overlap between holder and object registers.
720 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); 720 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
721 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) 721 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
722 && !scratch2.is(scratch1)); 722 && !scratch2.is(scratch1));
723 723
724 // Keep track of the current object in register reg. 724 // Keep track of the current object in register reg.
725 Register reg = object_reg; 725 Register reg = object_reg;
726 int depth = 0; 726 int depth = 0;
727 727
728 Handle<JSObject> current = Handle<JSObject>::null(); 728 Handle<JSObject> current = Handle<JSObject>::null();
729 if (type()->IsConstant()) { 729 if (type()->IsConstant()) {
730 current = Handle<JSObject>::cast(type()->AsConstant()->Value()); 730 current = Handle<JSObject>::cast(type()->AsConstant()->Value());
731 } 731 }
732 Handle<JSObject> prototype = Handle<JSObject>::null(); 732 Handle<JSObject> prototype = Handle<JSObject>::null();
733 Handle<Map> current_map = receiver_map; 733 Handle<Map> current_map = receiver_map;
734 Handle<Map> holder_map(holder()->map()); 734 Handle<Map> holder_map(holder()->map());
735 // Traverse the prototype chain and check the maps in the prototype chain for 735 // Traverse the prototype chain and check the maps in the prototype chain for
736 // fast and global objects or do negative lookup for normal objects. 736 // fast and global objects or do negative lookup for normal objects.
737 while (!current_map.is_identical_to(holder_map)) { 737 while (!current_map.is_identical_to(holder_map)) {
738 ++depth; 738 ++depth;
739 739
740 // Only global objects and objects that do not require access 740 // Only global objects and objects that do not require access
741 // checks are allowed in stubs. 741 // checks are allowed in stubs.
742 ASSERT(current_map->IsJSGlobalProxyMap() || 742 DCHECK(current_map->IsJSGlobalProxyMap() ||
743 !current_map->is_access_check_needed()); 743 !current_map->is_access_check_needed());
744 744
745 prototype = handle(JSObject::cast(current_map->prototype())); 745 prototype = handle(JSObject::cast(current_map->prototype()));
746 if (current_map->is_dictionary_map() && 746 if (current_map->is_dictionary_map() &&
747 !current_map->IsJSGlobalObjectMap() && 747 !current_map->IsJSGlobalObjectMap() &&
748 !current_map->IsJSGlobalProxyMap()) { 748 !current_map->IsJSGlobalProxyMap()) {
749 if (!name->IsUniqueName()) { 749 if (!name->IsUniqueName()) {
750 ASSERT(name->IsString()); 750 DCHECK(name->IsString());
751 name = factory()->InternalizeString(Handle<String>::cast(name)); 751 name = factory()->InternalizeString(Handle<String>::cast(name));
752 } 752 }
753 ASSERT(current.is_null() || 753 DCHECK(current.is_null() ||
754 current->property_dictionary()->FindEntry(name) == 754 current->property_dictionary()->FindEntry(name) ==
755 NameDictionary::kNotFound); 755 NameDictionary::kNotFound);
756 756
757 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, 757 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
758 scratch1, scratch2); 758 scratch1, scratch2);
759 759
760 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); 760 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
761 reg = holder_reg; // From now on the object will be in holder_reg. 761 reg = holder_reg; // From now on the object will be in holder_reg.
762 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); 762 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
763 } else { 763 } else {
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
802 802
803 // Log the check depth. 803 // Log the check depth.
804 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); 804 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
805 805
806 if (depth != 0 || check == CHECK_ALL_MAPS) { 806 if (depth != 0 || check == CHECK_ALL_MAPS) {
807 // Check the holder map. 807 // Check the holder map.
808 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK); 808 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK);
809 } 809 }
810 810
811 // Perform security check for access to the global object. 811 // Perform security check for access to the global object.
812 ASSERT(current_map->IsJSGlobalProxyMap() || 812 DCHECK(current_map->IsJSGlobalProxyMap() ||
813 !current_map->is_access_check_needed()); 813 !current_map->is_access_check_needed());
814 if (current_map->IsJSGlobalProxyMap()) { 814 if (current_map->IsJSGlobalProxyMap()) {
815 __ CheckAccessGlobalProxy(reg, scratch1, miss); 815 __ CheckAccessGlobalProxy(reg, scratch1, miss);
816 } 816 }
817 817
818 // Return the register containing the holder. 818 // Return the register containing the holder.
819 return reg; 819 return reg;
820 } 820 }
821 821
822 822
(...skipping 20 matching lines...) Expand all
843 843
844 844
845 Register NamedLoadHandlerCompiler::CallbackFrontend(Register object_reg, 845 Register NamedLoadHandlerCompiler::CallbackFrontend(Register object_reg,
846 Handle<Name> name, 846 Handle<Name> name,
847 Handle<Object> callback) { 847 Handle<Object> callback) {
848 Label miss; 848 Label miss;
849 849
850 Register reg = FrontendHeader(object_reg, name, &miss); 850 Register reg = FrontendHeader(object_reg, name, &miss);
851 851
852 if (!holder()->HasFastProperties()) { 852 if (!holder()->HasFastProperties()) {
853 ASSERT(!holder()->IsGlobalObject()); 853 DCHECK(!holder()->IsGlobalObject());
854 ASSERT(!reg.is(scratch2())); 854 DCHECK(!reg.is(scratch2()));
855 ASSERT(!reg.is(scratch3())); 855 DCHECK(!reg.is(scratch3()));
856 ASSERT(!reg.is(scratch4())); 856 DCHECK(!reg.is(scratch4()));
857 857
858 // Load the properties dictionary. 858 // Load the properties dictionary.
859 Register dictionary = scratch4(); 859 Register dictionary = scratch4();
860 __ lw(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset)); 860 __ lw(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
861 861
862 // Probe the dictionary. 862 // Probe the dictionary.
863 Label probe_done; 863 Label probe_done;
864 NameDictionaryLookupStub::GeneratePositiveLookup(masm(), 864 NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
865 &miss, 865 &miss,
866 &probe_done, 866 &probe_done,
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
904 Register reg, Handle<ExecutableAccessorInfo> callback) { 904 Register reg, Handle<ExecutableAccessorInfo> callback) {
905 // Build AccessorInfo::args_ list on the stack and push property name below 905 // Build AccessorInfo::args_ list on the stack and push property name below
906 // the exit frame to make GC aware of them and store pointers to them. 906 // the exit frame to make GC aware of them and store pointers to them.
907 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); 907 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
908 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); 908 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
909 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); 909 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
910 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); 910 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
911 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); 911 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
912 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); 912 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
913 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6); 913 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
914 ASSERT(!scratch2().is(reg)); 914 DCHECK(!scratch2().is(reg));
915 ASSERT(!scratch3().is(reg)); 915 DCHECK(!scratch3().is(reg));
916 ASSERT(!scratch4().is(reg)); 916 DCHECK(!scratch4().is(reg));
917 __ push(receiver()); 917 __ push(receiver());
918 if (heap()->InNewSpace(callback->data())) { 918 if (heap()->InNewSpace(callback->data())) {
919 __ li(scratch3(), callback); 919 __ li(scratch3(), callback);
920 __ lw(scratch3(), FieldMemOperand(scratch3(), 920 __ lw(scratch3(), FieldMemOperand(scratch3(),
921 ExecutableAccessorInfo::kDataOffset)); 921 ExecutableAccessorInfo::kDataOffset));
922 } else { 922 } else {
923 __ li(scratch3(), Handle<Object>(callback->data(), isolate())); 923 __ li(scratch3(), Handle<Object>(callback->data(), isolate()));
924 } 924 }
925 __ Subu(sp, sp, 6 * kPointerSize); 925 __ Subu(sp, sp, 6 * kPointerSize);
926 __ sw(scratch3(), MemOperand(sp, 5 * kPointerSize)); 926 __ sw(scratch3(), MemOperand(sp, 5 * kPointerSize));
(...skipping 18 matching lines...) Expand all
945 __ li(getter_address_reg, Operand(ref)); 945 __ li(getter_address_reg, Operand(ref));
946 946
947 CallApiGetterStub stub(isolate()); 947 CallApiGetterStub stub(isolate());
948 __ TailCallStub(&stub); 948 __ TailCallStub(&stub);
949 } 949 }
950 950
951 951
952 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg, 952 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg,
953 LookupResult* lookup, 953 LookupResult* lookup,
954 Handle<Name> name) { 954 Handle<Name> name) {
955 ASSERT(holder()->HasNamedInterceptor()); 955 DCHECK(holder()->HasNamedInterceptor());
956 ASSERT(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); 956 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
957 957
958 // So far the most popular follow ups for interceptor loads are FIELD 958 // So far the most popular follow ups for interceptor loads are FIELD
959 // and CALLBACKS, so inline only them, other cases may be added 959 // and CALLBACKS, so inline only them, other cases may be added
960 // later. 960 // later.
961 bool compile_followup_inline = false; 961 bool compile_followup_inline = false;
962 if (lookup->IsFound() && lookup->IsCacheable()) { 962 if (lookup->IsFound() && lookup->IsCacheable()) {
963 if (lookup->IsField()) { 963 if (lookup->IsField()) {
964 compile_followup_inline = true; 964 compile_followup_inline = true;
965 } else if (lookup->type() == CALLBACKS && 965 } else if (lookup->type() == CALLBACKS &&
966 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) { 966 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
967 Handle<ExecutableAccessorInfo> callback( 967 Handle<ExecutableAccessorInfo> callback(
968 ExecutableAccessorInfo::cast(lookup->GetCallbackObject())); 968 ExecutableAccessorInfo::cast(lookup->GetCallbackObject()));
969 compile_followup_inline = 969 compile_followup_inline =
970 callback->getter() != NULL && 970 callback->getter() != NULL &&
971 ExecutableAccessorInfo::IsCompatibleReceiverType(isolate(), callback, 971 ExecutableAccessorInfo::IsCompatibleReceiverType(isolate(), callback,
972 type()); 972 type());
973 } 973 }
974 } 974 }
975 975
976 if (compile_followup_inline) { 976 if (compile_followup_inline) {
977 // Compile the interceptor call, followed by inline code to load the 977 // Compile the interceptor call, followed by inline code to load the
978 // property from further up the prototype chain if the call fails. 978 // property from further up the prototype chain if the call fails.
979 // Check that the maps haven't changed. 979 // Check that the maps haven't changed.
980 ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1())); 980 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
981 981
982 // Preserve the receiver register explicitly whenever it is different from 982 // Preserve the receiver register explicitly whenever it is different from
983 // the holder and it is needed should the interceptor return without any 983 // the holder and it is needed should the interceptor return without any
984 // result. The CALLBACKS case needs the receiver to be passed into C++ code, 984 // result. The CALLBACKS case needs the receiver to be passed into C++ code,
985 // the FIELD case might cause a miss during the prototype check. 985 // the FIELD case might cause a miss during the prototype check.
986 bool must_perfrom_prototype_check = *holder() != lookup->holder(); 986 bool must_perfrom_prototype_check = *holder() != lookup->holder();
987 bool must_preserve_receiver_reg = !receiver().is(holder_reg) && 987 bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
988 (lookup->type() == CALLBACKS || must_perfrom_prototype_check); 988 (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
989 989
990 // Save necessary data before invoking an interceptor. 990 // Save necessary data before invoking an interceptor.
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
1123 Register name = LoadIC::NameRegister(); 1123 Register name = LoadIC::NameRegister();
1124 static Register registers[] = { receiver, name, a3, a0, t0, t1 }; 1124 static Register registers[] = { receiver, name, a3, a0, t0, t1 };
1125 return registers; 1125 return registers;
1126 } 1126 }
1127 1127
1128 1128
1129 Register* PropertyAccessCompiler::store_calling_convention() { 1129 Register* PropertyAccessCompiler::store_calling_convention() {
1130 // receiver, name, scratch1, scratch2, scratch3. 1130 // receiver, name, scratch1, scratch2, scratch3.
1131 Register receiver = StoreIC::ReceiverRegister(); 1131 Register receiver = StoreIC::ReceiverRegister();
1132 Register name = StoreIC::NameRegister(); 1132 Register name = StoreIC::NameRegister();
1133 ASSERT(a3.is(KeyedStoreIC::MapRegister())); 1133 DCHECK(a3.is(KeyedStoreIC::MapRegister()));
1134 static Register registers[] = { receiver, name, a3, t0, t1 }; 1134 static Register registers[] = { receiver, name, a3, t0, t1 };
1135 return registers; 1135 return registers;
1136 } 1136 }
1137 1137
1138 1138
1139 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); } 1139 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); }
1140 1140
1141 1141
1142 #undef __ 1142 #undef __
1143 #define __ ACCESS_MASM(masm) 1143 #define __ ACCESS_MASM(masm)
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1230 } 1230 }
1231 } 1231 }
1232 1232
1233 Label number_case; 1233 Label number_case;
1234 Register match = scratch2(); 1234 Register match = scratch2();
1235 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss; 1235 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1236 __ JumpIfSmi(receiver(), smi_target, match); // Reg match is 0 if Smi. 1236 __ JumpIfSmi(receiver(), smi_target, match); // Reg match is 0 if Smi.
1237 1237
1238 // Polymorphic keyed stores may use the map register 1238 // Polymorphic keyed stores may use the map register
1239 Register map_reg = scratch1(); 1239 Register map_reg = scratch1();
1240 ASSERT(kind() != Code::KEYED_STORE_IC || 1240 DCHECK(kind() != Code::KEYED_STORE_IC ||
1241 map_reg.is(KeyedStoreIC::MapRegister())); 1241 map_reg.is(KeyedStoreIC::MapRegister()));
1242 1242
1243 int receiver_count = types->length(); 1243 int receiver_count = types->length();
1244 int number_of_handled_maps = 0; 1244 int number_of_handled_maps = 0;
1245 __ lw(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset)); 1245 __ lw(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
1246 for (int current = 0; current < receiver_count; ++current) { 1246 for (int current = 0; current < receiver_count; ++current) {
1247 Handle<HeapType> type = types->at(current); 1247 Handle<HeapType> type = types->at(current);
1248 Handle<Map> map = IC::TypeToMap(*type, isolate()); 1248 Handle<Map> map = IC::TypeToMap(*type, isolate());
1249 if (!map->is_deprecated()) { 1249 if (!map->is_deprecated()) {
1250 number_of_handled_maps++; 1250 number_of_handled_maps++;
1251 // Check map and tail call if there's a match. 1251 // Check map and tail call if there's a match.
1252 // Separate compare from branch, to provide path for above JumpIfSmi(). 1252 // Separate compare from branch, to provide path for above JumpIfSmi().
1253 __ Subu(match, map_reg, Operand(map)); 1253 __ Subu(match, map_reg, Operand(map));
1254 if (type->Is(HeapType::Number())) { 1254 if (type->Is(HeapType::Number())) {
1255 ASSERT(!number_case.is_unused()); 1255 DCHECK(!number_case.is_unused());
1256 __ bind(&number_case); 1256 __ bind(&number_case);
1257 } 1257 }
1258 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, 1258 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
1259 eq, match, Operand(zero_reg)); 1259 eq, match, Operand(zero_reg));
1260 } 1260 }
1261 } 1261 }
1262 ASSERT(number_of_handled_maps != 0); 1262 DCHECK(number_of_handled_maps != 0);
1263 1263
1264 __ bind(&miss); 1264 __ bind(&miss);
1265 TailCallBuiltin(masm(), MissBuiltin(kind())); 1265 TailCallBuiltin(masm(), MissBuiltin(kind()));
1266 1266
1267 // Return the generated code. 1267 // Return the generated code.
1268 InlineCacheState state = 1268 InlineCacheState state =
1269 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC; 1269 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1270 return GetCode(kind(), type, name, state); 1270 return GetCode(kind(), type, name, state);
1271 } 1271 }
1272 1272
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
1315 #define __ ACCESS_MASM(masm) 1315 #define __ ACCESS_MASM(masm)
1316 1316
1317 1317
1318 void ElementHandlerCompiler::GenerateLoadDictionaryElement( 1318 void ElementHandlerCompiler::GenerateLoadDictionaryElement(
1319 MacroAssembler* masm) { 1319 MacroAssembler* masm) {
1320 // The return address is in ra. 1320 // The return address is in ra.
1321 Label slow, miss; 1321 Label slow, miss;
1322 1322
1323 Register key = LoadIC::NameRegister(); 1323 Register key = LoadIC::NameRegister();
1324 Register receiver = LoadIC::ReceiverRegister(); 1324 Register receiver = LoadIC::ReceiverRegister();
1325 ASSERT(receiver.is(a1)); 1325 DCHECK(receiver.is(a1));
1326 ASSERT(key.is(a2)); 1326 DCHECK(key.is(a2));
1327 1327
1328 __ UntagAndJumpIfNotSmi(t2, key, &miss); 1328 __ UntagAndJumpIfNotSmi(t2, key, &miss);
1329 __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset)); 1329 __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
1330 __ LoadFromNumberDictionary(&slow, t0, key, v0, t2, a3, t1); 1330 __ LoadFromNumberDictionary(&slow, t0, key, v0, t2, a3, t1);
1331 __ Ret(); 1331 __ Ret();
1332 1332
1333 // Slow case, key and receiver still unmodified. 1333 // Slow case, key and receiver still unmodified.
1334 __ bind(&slow); 1334 __ bind(&slow);
1335 __ IncrementCounter( 1335 __ IncrementCounter(
1336 masm->isolate()->counters()->keyed_load_external_array_slow(), 1336 masm->isolate()->counters()->keyed_load_external_array_slow(),
1337 1, a2, a3); 1337 1, a2, a3);
1338 1338
1339 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow); 1339 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1340 1340
1341 // Miss case, call the runtime. 1341 // Miss case, call the runtime.
1342 __ bind(&miss); 1342 __ bind(&miss);
1343 1343
1344 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); 1344 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1345 } 1345 }
1346 1346
1347 1347
1348 #undef __ 1348 #undef __
1349 1349
1350 } } // namespace v8::internal 1350 } } // namespace v8::internal
1351 1351
1352 #endif // V8_TARGET_ARCH_MIPS 1352 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/simulator-mips.cc ('k') | src/mips64/assembler-mips64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698