Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(305)

Side by Side Diff: src/mips64/stub-cache-mips64.cc

Issue 430503007: Rename ASSERT* to DCHECK*. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE and fixes Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips64/simulator-mips64.cc ('k') | src/mksnapshot.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_MIPS64 7 #if V8_TARGET_ARCH_MIPS64
8 8
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/ic-inl.h" 10 #include "src/ic-inl.h"
(...skipping 18 matching lines...) Expand all
29 Register offset_scratch) { 29 Register offset_scratch) {
30 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); 30 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
31 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); 31 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
32 ExternalReference map_offset(isolate->stub_cache()->map_reference(table)); 32 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
33 33
34 uint64_t key_off_addr = reinterpret_cast<uint64_t>(key_offset.address()); 34 uint64_t key_off_addr = reinterpret_cast<uint64_t>(key_offset.address());
35 uint64_t value_off_addr = reinterpret_cast<uint64_t>(value_offset.address()); 35 uint64_t value_off_addr = reinterpret_cast<uint64_t>(value_offset.address());
36 uint64_t map_off_addr = reinterpret_cast<uint64_t>(map_offset.address()); 36 uint64_t map_off_addr = reinterpret_cast<uint64_t>(map_offset.address());
37 37
38 // Check the relative positions of the address fields. 38 // Check the relative positions of the address fields.
39 ASSERT(value_off_addr > key_off_addr); 39 DCHECK(value_off_addr > key_off_addr);
40 ASSERT((value_off_addr - key_off_addr) % 4 == 0); 40 DCHECK((value_off_addr - key_off_addr) % 4 == 0);
41 ASSERT((value_off_addr - key_off_addr) < (256 * 4)); 41 DCHECK((value_off_addr - key_off_addr) < (256 * 4));
42 ASSERT(map_off_addr > key_off_addr); 42 DCHECK(map_off_addr > key_off_addr);
43 ASSERT((map_off_addr - key_off_addr) % 4 == 0); 43 DCHECK((map_off_addr - key_off_addr) % 4 == 0);
44 ASSERT((map_off_addr - key_off_addr) < (256 * 4)); 44 DCHECK((map_off_addr - key_off_addr) < (256 * 4));
45 45
46 Label miss; 46 Label miss;
47 Register base_addr = scratch; 47 Register base_addr = scratch;
48 scratch = no_reg; 48 scratch = no_reg;
49 49
50 // Multiply by 3 because there are 3 fields per entry (name, code, map). 50 // Multiply by 3 because there are 3 fields per entry (name, code, map).
51 __ dsll(offset_scratch, offset, 1); 51 __ dsll(offset_scratch, offset, 1);
52 __ Daddu(offset_scratch, offset_scratch, offset); 52 __ Daddu(offset_scratch, offset_scratch, offset);
53 53
54 // Calculate the base address of the entry. 54 // Calculate the base address of the entry.
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
90 __ Jump(at); 90 __ Jump(at);
91 91
92 // Miss: fall through. 92 // Miss: fall through.
93 __ bind(&miss); 93 __ bind(&miss);
94 } 94 }
95 95
96 96
97 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( 97 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
98 MacroAssembler* masm, Label* miss_label, Register receiver, 98 MacroAssembler* masm, Label* miss_label, Register receiver,
99 Handle<Name> name, Register scratch0, Register scratch1) { 99 Handle<Name> name, Register scratch0, Register scratch1) {
100 ASSERT(name->IsUniqueName()); 100 DCHECK(name->IsUniqueName());
101 ASSERT(!receiver.is(scratch0)); 101 DCHECK(!receiver.is(scratch0));
102 Counters* counters = masm->isolate()->counters(); 102 Counters* counters = masm->isolate()->counters();
103 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); 103 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
104 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 104 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
105 105
106 Label done; 106 Label done;
107 107
108 const int kInterceptorOrAccessCheckNeededMask = 108 const int kInterceptorOrAccessCheckNeededMask =
109 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); 109 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
110 110
111 // Bail out if the receiver has a named interceptor or requires access checks. 111 // Bail out if the receiver has a named interceptor or requires access checks.
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
150 Register name, 150 Register name,
151 Register scratch, 151 Register scratch,
152 Register extra, 152 Register extra,
153 Register extra2, 153 Register extra2,
154 Register extra3) { 154 Register extra3) {
155 Isolate* isolate = masm->isolate(); 155 Isolate* isolate = masm->isolate();
156 Label miss; 156 Label miss;
157 157
158 // Make sure that code is valid. The multiplying code relies on the 158 // Make sure that code is valid. The multiplying code relies on the
159 // entry size being 12. 159 // entry size being 12.
160 // ASSERT(sizeof(Entry) == 12); 160 // DCHECK(sizeof(Entry) == 12);
161 // ASSERT(sizeof(Entry) == 3 * kPointerSize); 161 // DCHECK(sizeof(Entry) == 3 * kPointerSize);
162 162
163 // Make sure the flags does not name a specific type. 163 // Make sure the flags does not name a specific type.
164 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); 164 DCHECK(Code::ExtractTypeFromFlags(flags) == 0);
165 165
166 // Make sure that there are no register conflicts. 166 // Make sure that there are no register conflicts.
167 ASSERT(!scratch.is(receiver)); 167 DCHECK(!scratch.is(receiver));
168 ASSERT(!scratch.is(name)); 168 DCHECK(!scratch.is(name));
169 ASSERT(!extra.is(receiver)); 169 DCHECK(!extra.is(receiver));
170 ASSERT(!extra.is(name)); 170 DCHECK(!extra.is(name));
171 ASSERT(!extra.is(scratch)); 171 DCHECK(!extra.is(scratch));
172 ASSERT(!extra2.is(receiver)); 172 DCHECK(!extra2.is(receiver));
173 ASSERT(!extra2.is(name)); 173 DCHECK(!extra2.is(name));
174 ASSERT(!extra2.is(scratch)); 174 DCHECK(!extra2.is(scratch));
175 ASSERT(!extra2.is(extra)); 175 DCHECK(!extra2.is(extra));
176 176
177 // Check register validity. 177 // Check register validity.
178 ASSERT(!scratch.is(no_reg)); 178 DCHECK(!scratch.is(no_reg));
179 ASSERT(!extra.is(no_reg)); 179 DCHECK(!extra.is(no_reg));
180 ASSERT(!extra2.is(no_reg)); 180 DCHECK(!extra2.is(no_reg));
181 ASSERT(!extra3.is(no_reg)); 181 DCHECK(!extra3.is(no_reg));
182 182
183 Counters* counters = masm->isolate()->counters(); 183 Counters* counters = masm->isolate()->counters();
184 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, 184 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
185 extra2, extra3); 185 extra2, extra3);
186 186
187 // Check that the receiver isn't a smi. 187 // Check that the receiver isn't a smi.
188 __ JumpIfSmi(receiver, &miss); 188 __ JumpIfSmi(receiver, &miss);
189 189
190 // Get the map of the receiver and compute the hash. 190 // Get the map of the receiver and compute the hash.
191 __ ld(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); 191 __ ld(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
266 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); 266 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
267 __ Ret(USE_DELAY_SLOT); 267 __ Ret(USE_DELAY_SLOT);
268 __ mov(v0, scratch1); 268 __ mov(v0, scratch1);
269 } 269 }
270 270
271 271
272 void PropertyHandlerCompiler::GenerateCheckPropertyCell( 272 void PropertyHandlerCompiler::GenerateCheckPropertyCell(
273 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, 273 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
274 Register scratch, Label* miss) { 274 Register scratch, Label* miss) {
275 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name); 275 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
276 ASSERT(cell->value()->IsTheHole()); 276 DCHECK(cell->value()->IsTheHole());
277 __ li(scratch, Operand(cell)); 277 __ li(scratch, Operand(cell));
278 __ ld(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); 278 __ ld(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
279 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 279 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
280 __ Branch(miss, ne, scratch, Operand(at)); 280 __ Branch(miss, ne, scratch, Operand(at));
281 } 281 }
282 282
283 283
284 // Generate StoreTransition code, value is passed in a0 register. 284 // Generate StoreTransition code, value is passed in a0 register.
285 // After executing generated code, the receiver_reg and name_reg 285 // After executing generated code, the receiver_reg and name_reg
286 // may be clobbered. 286 // may be clobbered.
287 void NamedStoreHandlerCompiler::GenerateStoreTransition( 287 void NamedStoreHandlerCompiler::GenerateStoreTransition(
288 MacroAssembler* masm, LookupResult* lookup, Handle<Map> transition, 288 MacroAssembler* masm, LookupResult* lookup, Handle<Map> transition,
289 Handle<Name> name, Register receiver_reg, Register storage_reg, 289 Handle<Name> name, Register receiver_reg, Register storage_reg,
290 Register value_reg, Register scratch1, Register scratch2, Register scratch3, 290 Register value_reg, Register scratch1, Register scratch2, Register scratch3,
291 Label* miss_label, Label* slow) { 291 Label* miss_label, Label* slow) {
292 // a0 : value. 292 // a0 : value.
293 Label exit; 293 Label exit;
294 294
295 int descriptor = transition->LastAdded(); 295 int descriptor = transition->LastAdded();
296 DescriptorArray* descriptors = transition->instance_descriptors(); 296 DescriptorArray* descriptors = transition->instance_descriptors();
297 PropertyDetails details = descriptors->GetDetails(descriptor); 297 PropertyDetails details = descriptors->GetDetails(descriptor);
298 Representation representation = details.representation(); 298 Representation representation = details.representation();
299 ASSERT(!representation.IsNone()); 299 DCHECK(!representation.IsNone());
300 300
301 if (details.type() == CONSTANT) { 301 if (details.type() == CONSTANT) {
302 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate()); 302 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
303 __ li(scratch1, constant); 303 __ li(scratch1, constant);
304 __ Branch(miss_label, ne, value_reg, Operand(scratch1)); 304 __ Branch(miss_label, ne, value_reg, Operand(scratch1));
305 } else if (representation.IsSmi()) { 305 } else if (representation.IsSmi()) {
306 __ JumpIfNotSmi(value_reg, miss_label); 306 __ JumpIfNotSmi(value_reg, miss_label);
307 } else if (representation.IsHeapObject()) { 307 } else if (representation.IsHeapObject()) {
308 __ JumpIfSmi(value_reg, miss_label); 308 __ JumpIfSmi(value_reg, miss_label);
309 HeapType* field_type = descriptors->GetFieldType(descriptor); 309 HeapType* field_type = descriptors->GetFieldType(descriptor);
(...skipping 29 matching lines...) Expand all
339 __ bind(&heap_number); 339 __ bind(&heap_number);
340 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, 340 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
341 miss_label, DONT_DO_SMI_CHECK); 341 miss_label, DONT_DO_SMI_CHECK);
342 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); 342 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
343 343
344 __ bind(&do_store); 344 __ bind(&do_store);
345 __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); 345 __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
346 } 346 }
347 347
348 // Stub never generated for objects that require access checks. 348 // Stub never generated for objects that require access checks.
349 ASSERT(!transition->is_access_check_needed()); 349 DCHECK(!transition->is_access_check_needed());
350 350
351 // Perform map transition for the receiver if necessary. 351 // Perform map transition for the receiver if necessary.
352 if (details.type() == FIELD && 352 if (details.type() == FIELD &&
353 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) { 353 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) {
354 // The properties must be extended before we can store the value. 354 // The properties must be extended before we can store the value.
355 // We jump to a runtime call that extends the properties array. 355 // We jump to a runtime call that extends the properties array.
356 __ push(receiver_reg); 356 __ push(receiver_reg);
357 __ li(a2, Operand(transition)); 357 __ li(a2, Operand(transition));
358 __ Push(a2, a0); 358 __ Push(a2, a0);
359 __ TailCallExternalReference( 359 __ TailCallExternalReference(
(...skipping 11 matching lines...) Expand all
371 __ RecordWriteField(receiver_reg, 371 __ RecordWriteField(receiver_reg,
372 HeapObject::kMapOffset, 372 HeapObject::kMapOffset,
373 scratch1, 373 scratch1,
374 scratch2, 374 scratch2,
375 kRAHasNotBeenSaved, 375 kRAHasNotBeenSaved,
376 kDontSaveFPRegs, 376 kDontSaveFPRegs,
377 OMIT_REMEMBERED_SET, 377 OMIT_REMEMBERED_SET,
378 OMIT_SMI_CHECK); 378 OMIT_SMI_CHECK);
379 379
380 if (details.type() == CONSTANT) { 380 if (details.type() == CONSTANT) {
381 ASSERT(value_reg.is(a0)); 381 DCHECK(value_reg.is(a0));
382 __ Ret(USE_DELAY_SLOT); 382 __ Ret(USE_DELAY_SLOT);
383 __ mov(v0, a0); 383 __ mov(v0, a0);
384 return; 384 return;
385 } 385 }
386 386
387 int index = transition->instance_descriptors()->GetFieldIndex( 387 int index = transition->instance_descriptors()->GetFieldIndex(
388 transition->LastAdded()); 388 transition->LastAdded());
389 389
390 // Adjust for the number of properties stored in the object. Even in the 390 // Adjust for the number of properties stored in the object. Even in the
391 // face of a transition we can use the old map here because the size of the 391 // face of a transition we can use the old map here because the size of the
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
440 storage_reg, 440 storage_reg,
441 receiver_reg, 441 receiver_reg,
442 kRAHasNotBeenSaved, 442 kRAHasNotBeenSaved,
443 kDontSaveFPRegs, 443 kDontSaveFPRegs,
444 EMIT_REMEMBERED_SET, 444 EMIT_REMEMBERED_SET,
445 smi_check); 445 smi_check);
446 } 446 }
447 } 447 }
448 448
449 // Return the value (register v0). 449 // Return the value (register v0).
450 ASSERT(value_reg.is(a0)); 450 DCHECK(value_reg.is(a0));
451 __ bind(&exit); 451 __ bind(&exit);
452 __ Ret(USE_DELAY_SLOT); 452 __ Ret(USE_DELAY_SLOT);
453 __ mov(v0, a0); 453 __ mov(v0, a0);
454 } 454 }
455 455
456 456
457 // Generate StoreField code, value is passed in a0 register. 457 // Generate StoreField code, value is passed in a0 register.
458 // When leaving generated code after success, the receiver_reg and name_reg 458 // When leaving generated code after success, the receiver_reg and name_reg
459 // may be clobbered. Upon branch to miss_label, the receiver and name 459 // may be clobbered. Upon branch to miss_label, the receiver and name
460 // registers have their original values. 460 // registers have their original values.
461 void NamedStoreHandlerCompiler::GenerateStoreField( 461 void NamedStoreHandlerCompiler::GenerateStoreField(
462 MacroAssembler* masm, Handle<JSObject> object, LookupResult* lookup, 462 MacroAssembler* masm, Handle<JSObject> object, LookupResult* lookup,
463 Register receiver_reg, Register name_reg, Register value_reg, 463 Register receiver_reg, Register name_reg, Register value_reg,
464 Register scratch1, Register scratch2, Label* miss_label) { 464 Register scratch1, Register scratch2, Label* miss_label) {
465 // a0 : value 465 // a0 : value
466 Label exit; 466 Label exit;
467 467
468 // Stub never generated for non-global objects that require access 468 // Stub never generated for non-global objects that require access
469 // checks. 469 // checks.
470 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 470 DCHECK(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
471 471
472 FieldIndex index = lookup->GetFieldIndex(); 472 FieldIndex index = lookup->GetFieldIndex();
473 473
474 Representation representation = lookup->representation(); 474 Representation representation = lookup->representation();
475 ASSERT(!representation.IsNone()); 475 DCHECK(!representation.IsNone());
476 if (representation.IsSmi()) { 476 if (representation.IsSmi()) {
477 __ JumpIfNotSmi(value_reg, miss_label); 477 __ JumpIfNotSmi(value_reg, miss_label);
478 } else if (representation.IsHeapObject()) { 478 } else if (representation.IsHeapObject()) {
479 __ JumpIfSmi(value_reg, miss_label); 479 __ JumpIfSmi(value_reg, miss_label);
480 HeapType* field_type = lookup->GetFieldType(); 480 HeapType* field_type = lookup->GetFieldType();
481 HeapType::Iterator<Map> it = field_type->Classes(); 481 HeapType::Iterator<Map> it = field_type->Classes();
482 if (!it.Done()) { 482 if (!it.Done()) {
483 __ ld(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); 483 __ ld(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
484 Label do_store; 484 Label do_store;
485 Handle<Map> current; 485 Handle<Map> current;
(...skipping 28 matching lines...) Expand all
514 __ jmp(&do_store); 514 __ jmp(&do_store);
515 515
516 __ bind(&heap_number); 516 __ bind(&heap_number);
517 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex, 517 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
518 miss_label, DONT_DO_SMI_CHECK); 518 miss_label, DONT_DO_SMI_CHECK);
519 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); 519 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
520 520
521 __ bind(&do_store); 521 __ bind(&do_store);
522 __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset)); 522 __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
523 // Return the value (register v0). 523 // Return the value (register v0).
524 ASSERT(value_reg.is(a0)); 524 DCHECK(value_reg.is(a0));
525 __ Ret(USE_DELAY_SLOT); 525 __ Ret(USE_DELAY_SLOT);
526 __ mov(v0, a0); 526 __ mov(v0, a0);
527 return; 527 return;
528 } 528 }
529 529
530 // TODO(verwaest): Share this code as a code stub. 530 // TODO(verwaest): Share this code as a code stub.
531 SmiCheck smi_check = representation.IsTagged() 531 SmiCheck smi_check = representation.IsTagged()
532 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; 532 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
533 if (index.is_inobject()) { 533 if (index.is_inobject()) {
534 // Set the property straight into the object. 534 // Set the property straight into the object.
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
569 name_reg, 569 name_reg,
570 receiver_reg, 570 receiver_reg,
571 kRAHasNotBeenSaved, 571 kRAHasNotBeenSaved,
572 kDontSaveFPRegs, 572 kDontSaveFPRegs,
573 EMIT_REMEMBERED_SET, 573 EMIT_REMEMBERED_SET,
574 smi_check); 574 smi_check);
575 } 575 }
576 } 576 }
577 577
578 // Return the value (register v0). 578 // Return the value (register v0).
579 ASSERT(value_reg.is(a0)); 579 DCHECK(value_reg.is(a0));
580 __ bind(&exit); 580 __ bind(&exit);
581 __ Ret(USE_DELAY_SLOT); 581 __ Ret(USE_DELAY_SLOT);
582 __ mov(v0, a0); 582 __ mov(v0, a0);
583 } 583 }
584 584
585 585
586 void NamedStoreHandlerCompiler::GenerateRestoreName(MacroAssembler* masm, 586 void NamedStoreHandlerCompiler::GenerateRestoreName(MacroAssembler* masm,
587 Label* label, 587 Label* label,
588 Handle<Name> name) { 588 Handle<Name> name) {
589 if (!label->is_unused()) { 589 if (!label->is_unused()) {
590 __ bind(label); 590 __ bind(label);
591 __ li(this->name(), Operand(name)); 591 __ li(this->name(), Operand(name));
592 } 592 }
593 } 593 }
594 594
595 595
596 static void PushInterceptorArguments(MacroAssembler* masm, 596 static void PushInterceptorArguments(MacroAssembler* masm,
597 Register receiver, 597 Register receiver,
598 Register holder, 598 Register holder,
599 Register name, 599 Register name,
600 Handle<JSObject> holder_obj) { 600 Handle<JSObject> holder_obj) {
601 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); 601 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
602 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1); 602 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1);
603 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2); 603 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2);
604 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3); 604 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3);
605 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4); 605 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4);
606 __ push(name); 606 __ push(name);
607 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); 607 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
608 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor)); 608 DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor));
609 Register scratch = name; 609 Register scratch = name;
610 __ li(scratch, Operand(interceptor)); 610 __ li(scratch, Operand(interceptor));
611 __ Push(scratch, receiver, holder); 611 __ Push(scratch, receiver, holder);
612 } 612 }
613 613
614 614
615 static void CompileCallLoadPropertyWithInterceptor( 615 static void CompileCallLoadPropertyWithInterceptor(
616 MacroAssembler* masm, 616 MacroAssembler* masm,
617 Register receiver, 617 Register receiver,
618 Register holder, 618 Register holder,
619 Register name, 619 Register name,
620 Handle<JSObject> holder_obj, 620 Handle<JSObject> holder_obj,
621 IC::UtilityId id) { 621 IC::UtilityId id) {
622 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 622 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
623 __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()), 623 __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()),
624 NamedLoadHandlerCompiler::kInterceptorArgsLength); 624 NamedLoadHandlerCompiler::kInterceptorArgsLength);
625 } 625 }
626 626
627 627
628 // Generate call to api function. 628 // Generate call to api function.
629 void PropertyHandlerCompiler::GenerateFastApiCall( 629 void PropertyHandlerCompiler::GenerateFastApiCall(
630 MacroAssembler* masm, const CallOptimization& optimization, 630 MacroAssembler* masm, const CallOptimization& optimization,
631 Handle<Map> receiver_map, Register receiver, Register scratch_in, 631 Handle<Map> receiver_map, Register receiver, Register scratch_in,
632 bool is_store, int argc, Register* values) { 632 bool is_store, int argc, Register* values) {
633 ASSERT(!receiver.is(scratch_in)); 633 DCHECK(!receiver.is(scratch_in));
634 // Preparing to push, adjust sp. 634 // Preparing to push, adjust sp.
635 __ Dsubu(sp, sp, Operand((argc + 1) * kPointerSize)); 635 __ Dsubu(sp, sp, Operand((argc + 1) * kPointerSize));
636 __ sd(receiver, MemOperand(sp, argc * kPointerSize)); // Push receiver. 636 __ sd(receiver, MemOperand(sp, argc * kPointerSize)); // Push receiver.
637 // Write the arguments to stack frame. 637 // Write the arguments to stack frame.
638 for (int i = 0; i < argc; i++) { 638 for (int i = 0; i < argc; i++) {
639 Register arg = values[argc-1-i]; 639 Register arg = values[argc-1-i];
640 ASSERT(!receiver.is(arg)); 640 DCHECK(!receiver.is(arg));
641 ASSERT(!scratch_in.is(arg)); 641 DCHECK(!scratch_in.is(arg));
642 __ sd(arg, MemOperand(sp, (argc-1-i) * kPointerSize)); // Push arg. 642 __ sd(arg, MemOperand(sp, (argc-1-i) * kPointerSize)); // Push arg.
643 } 643 }
644 ASSERT(optimization.is_simple_api_call()); 644 DCHECK(optimization.is_simple_api_call());
645 645
646 // Abi for CallApiFunctionStub. 646 // Abi for CallApiFunctionStub.
647 Register callee = a0; 647 Register callee = a0;
648 Register call_data = a4; 648 Register call_data = a4;
649 Register holder = a2; 649 Register holder = a2;
650 Register api_function_address = a1; 650 Register api_function_address = a1;
651 651
652 // Put holder in place. 652 // Put holder in place.
653 CallOptimization::HolderLookup holder_lookup; 653 CallOptimization::HolderLookup holder_lookup;
654 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType( 654 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
711 #define __ ACCESS_MASM(masm()) 711 #define __ ACCESS_MASM(masm())
712 712
713 713
714 Register PropertyHandlerCompiler::CheckPrototypes( 714 Register PropertyHandlerCompiler::CheckPrototypes(
715 Register object_reg, Register holder_reg, Register scratch1, 715 Register object_reg, Register holder_reg, Register scratch1,
716 Register scratch2, Handle<Name> name, Label* miss, 716 Register scratch2, Handle<Name> name, Label* miss,
717 PrototypeCheckType check) { 717 PrototypeCheckType check) {
718 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate())); 718 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate()));
719 719
720 // Make sure there's no overlap between holder and object registers. 720 // Make sure there's no overlap between holder and object registers.
721 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); 721 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
722 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) 722 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
723 && !scratch2.is(scratch1)); 723 && !scratch2.is(scratch1));
724 724
725 // Keep track of the current object in register reg. 725 // Keep track of the current object in register reg.
726 Register reg = object_reg; 726 Register reg = object_reg;
727 int depth = 0; 727 int depth = 0;
728 728
729 Handle<JSObject> current = Handle<JSObject>::null(); 729 Handle<JSObject> current = Handle<JSObject>::null();
730 if (type()->IsConstant()) { 730 if (type()->IsConstant()) {
731 current = Handle<JSObject>::cast(type()->AsConstant()->Value()); 731 current = Handle<JSObject>::cast(type()->AsConstant()->Value());
732 } 732 }
733 Handle<JSObject> prototype = Handle<JSObject>::null(); 733 Handle<JSObject> prototype = Handle<JSObject>::null();
734 Handle<Map> current_map = receiver_map; 734 Handle<Map> current_map = receiver_map;
735 Handle<Map> holder_map(holder()->map()); 735 Handle<Map> holder_map(holder()->map());
736 // Traverse the prototype chain and check the maps in the prototype chain for 736 // Traverse the prototype chain and check the maps in the prototype chain for
737 // fast and global objects or do negative lookup for normal objects. 737 // fast and global objects or do negative lookup for normal objects.
738 while (!current_map.is_identical_to(holder_map)) { 738 while (!current_map.is_identical_to(holder_map)) {
739 ++depth; 739 ++depth;
740 740
741 // Only global objects and objects that do not require access 741 // Only global objects and objects that do not require access
742 // checks are allowed in stubs. 742 // checks are allowed in stubs.
743 ASSERT(current_map->IsJSGlobalProxyMap() || 743 DCHECK(current_map->IsJSGlobalProxyMap() ||
744 !current_map->is_access_check_needed()); 744 !current_map->is_access_check_needed());
745 745
746 prototype = handle(JSObject::cast(current_map->prototype())); 746 prototype = handle(JSObject::cast(current_map->prototype()));
747 if (current_map->is_dictionary_map() && 747 if (current_map->is_dictionary_map() &&
748 !current_map->IsJSGlobalObjectMap() && 748 !current_map->IsJSGlobalObjectMap() &&
749 !current_map->IsJSGlobalProxyMap()) { 749 !current_map->IsJSGlobalProxyMap()) {
750 if (!name->IsUniqueName()) { 750 if (!name->IsUniqueName()) {
751 ASSERT(name->IsString()); 751 DCHECK(name->IsString());
752 name = factory()->InternalizeString(Handle<String>::cast(name)); 752 name = factory()->InternalizeString(Handle<String>::cast(name));
753 } 753 }
754 ASSERT(current.is_null() || 754 DCHECK(current.is_null() ||
755 current->property_dictionary()->FindEntry(name) == 755 current->property_dictionary()->FindEntry(name) ==
756 NameDictionary::kNotFound); 756 NameDictionary::kNotFound);
757 757
758 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, 758 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
759 scratch1, scratch2); 759 scratch1, scratch2);
760 760
761 __ ld(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); 761 __ ld(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
762 reg = holder_reg; // From now on the object will be in holder_reg. 762 reg = holder_reg; // From now on the object will be in holder_reg.
763 __ ld(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); 763 __ ld(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
764 } else { 764 } else {
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
803 803
804 // Log the check depth. 804 // Log the check depth.
805 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); 805 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
806 806
807 if (depth != 0 || check == CHECK_ALL_MAPS) { 807 if (depth != 0 || check == CHECK_ALL_MAPS) {
808 // Check the holder map. 808 // Check the holder map.
809 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK); 809 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK);
810 } 810 }
811 811
812 // Perform security check for access to the global object. 812 // Perform security check for access to the global object.
813 ASSERT(current_map->IsJSGlobalProxyMap() || 813 DCHECK(current_map->IsJSGlobalProxyMap() ||
814 !current_map->is_access_check_needed()); 814 !current_map->is_access_check_needed());
815 if (current_map->IsJSGlobalProxyMap()) { 815 if (current_map->IsJSGlobalProxyMap()) {
816 __ CheckAccessGlobalProxy(reg, scratch1, miss); 816 __ CheckAccessGlobalProxy(reg, scratch1, miss);
817 } 817 }
818 818
819 // Return the register containing the holder. 819 // Return the register containing the holder.
820 return reg; 820 return reg;
821 } 821 }
822 822
823 823
(...skipping 20 matching lines...) Expand all
844 844
845 845
846 Register NamedLoadHandlerCompiler::CallbackFrontend(Register object_reg, 846 Register NamedLoadHandlerCompiler::CallbackFrontend(Register object_reg,
847 Handle<Name> name, 847 Handle<Name> name,
848 Handle<Object> callback) { 848 Handle<Object> callback) {
849 Label miss; 849 Label miss;
850 850
851 Register reg = FrontendHeader(object_reg, name, &miss); 851 Register reg = FrontendHeader(object_reg, name, &miss);
852 852
853 if (!holder()->HasFastProperties()) { 853 if (!holder()->HasFastProperties()) {
854 ASSERT(!holder()->IsGlobalObject()); 854 DCHECK(!holder()->IsGlobalObject());
855 ASSERT(!reg.is(scratch2())); 855 DCHECK(!reg.is(scratch2()));
856 ASSERT(!reg.is(scratch3())); 856 DCHECK(!reg.is(scratch3()));
857 ASSERT(!reg.is(scratch4())); 857 DCHECK(!reg.is(scratch4()));
858 858
859 // Load the properties dictionary. 859 // Load the properties dictionary.
860 Register dictionary = scratch4(); 860 Register dictionary = scratch4();
861 __ ld(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset)); 861 __ ld(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
862 862
863 // Probe the dictionary. 863 // Probe the dictionary.
864 Label probe_done; 864 Label probe_done;
865 NameDictionaryLookupStub::GeneratePositiveLookup(masm(), 865 NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
866 &miss, 866 &miss,
867 &probe_done, 867 &probe_done,
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
905 Register reg, Handle<ExecutableAccessorInfo> callback) { 905 Register reg, Handle<ExecutableAccessorInfo> callback) {
906 // Build AccessorInfo::args_ list on the stack and push property name below 906 // Build AccessorInfo::args_ list on the stack and push property name below
907 // the exit frame to make GC aware of them and store pointers to them. 907 // the exit frame to make GC aware of them and store pointers to them.
908 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); 908 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
909 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); 909 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
910 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); 910 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
911 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); 911 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
912 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); 912 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
913 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); 913 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
914 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6); 914 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
915 ASSERT(!scratch2().is(reg)); 915 DCHECK(!scratch2().is(reg));
916 ASSERT(!scratch3().is(reg)); 916 DCHECK(!scratch3().is(reg));
917 ASSERT(!scratch4().is(reg)); 917 DCHECK(!scratch4().is(reg));
918 __ push(receiver()); 918 __ push(receiver());
919 if (heap()->InNewSpace(callback->data())) { 919 if (heap()->InNewSpace(callback->data())) {
920 __ li(scratch3(), callback); 920 __ li(scratch3(), callback);
921 __ ld(scratch3(), FieldMemOperand(scratch3(), 921 __ ld(scratch3(), FieldMemOperand(scratch3(),
922 ExecutableAccessorInfo::kDataOffset)); 922 ExecutableAccessorInfo::kDataOffset));
923 } else { 923 } else {
924 __ li(scratch3(), Handle<Object>(callback->data(), isolate())); 924 __ li(scratch3(), Handle<Object>(callback->data(), isolate()));
925 } 925 }
926 __ Dsubu(sp, sp, 6 * kPointerSize); 926 __ Dsubu(sp, sp, 6 * kPointerSize);
927 __ sd(scratch3(), MemOperand(sp, 5 * kPointerSize)); 927 __ sd(scratch3(), MemOperand(sp, 5 * kPointerSize));
(...skipping 18 matching lines...) Expand all
946 __ li(getter_address_reg, Operand(ref)); 946 __ li(getter_address_reg, Operand(ref));
947 947
948 CallApiGetterStub stub(isolate()); 948 CallApiGetterStub stub(isolate());
949 __ TailCallStub(&stub); 949 __ TailCallStub(&stub);
950 } 950 }
951 951
952 952
953 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg, 953 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg,
954 LookupResult* lookup, 954 LookupResult* lookup,
955 Handle<Name> name) { 955 Handle<Name> name) {
956 ASSERT(holder()->HasNamedInterceptor()); 956 DCHECK(holder()->HasNamedInterceptor());
957 ASSERT(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); 957 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
958 958
959 // So far the most popular follow ups for interceptor loads are FIELD 959 // So far the most popular follow ups for interceptor loads are FIELD
960 // and CALLBACKS, so inline only them, other cases may be added 960 // and CALLBACKS, so inline only them, other cases may be added
961 // later. 961 // later.
962 bool compile_followup_inline = false; 962 bool compile_followup_inline = false;
963 if (lookup->IsFound() && lookup->IsCacheable()) { 963 if (lookup->IsFound() && lookup->IsCacheable()) {
964 if (lookup->IsField()) { 964 if (lookup->IsField()) {
965 compile_followup_inline = true; 965 compile_followup_inline = true;
966 } else if (lookup->type() == CALLBACKS && 966 } else if (lookup->type() == CALLBACKS &&
967 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) { 967 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
968 Handle<ExecutableAccessorInfo> callback( 968 Handle<ExecutableAccessorInfo> callback(
969 ExecutableAccessorInfo::cast(lookup->GetCallbackObject())); 969 ExecutableAccessorInfo::cast(lookup->GetCallbackObject()));
970 compile_followup_inline = 970 compile_followup_inline =
971 callback->getter() != NULL && 971 callback->getter() != NULL &&
972 ExecutableAccessorInfo::IsCompatibleReceiverType(isolate(), callback, 972 ExecutableAccessorInfo::IsCompatibleReceiverType(isolate(), callback,
973 type()); 973 type());
974 } 974 }
975 } 975 }
976 976
977 if (compile_followup_inline) { 977 if (compile_followup_inline) {
978 // Compile the interceptor call, followed by inline code to load the 978 // Compile the interceptor call, followed by inline code to load the
979 // property from further up the prototype chain if the call fails. 979 // property from further up the prototype chain if the call fails.
980 // Check that the maps haven't changed. 980 // Check that the maps haven't changed.
981 ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1())); 981 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
982 982
983 // Preserve the receiver register explicitly whenever it is different from 983 // Preserve the receiver register explicitly whenever it is different from
984 // the holder and it is needed should the interceptor return without any 984 // the holder and it is needed should the interceptor return without any
985 // result. The CALLBACKS case needs the receiver to be passed into C++ code, 985 // result. The CALLBACKS case needs the receiver to be passed into C++ code,
986 // the FIELD case might cause a miss during the prototype check. 986 // the FIELD case might cause a miss during the prototype check.
987 bool must_perfrom_prototype_check = *holder() != lookup->holder(); 987 bool must_perfrom_prototype_check = *holder() != lookup->holder();
988 bool must_preserve_receiver_reg = !receiver().is(holder_reg) && 988 bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
989 (lookup->type() == CALLBACKS || must_perfrom_prototype_check); 989 (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
990 990
991 // Save necessary data before invoking an interceptor. 991 // Save necessary data before invoking an interceptor.
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
1124 Register name = LoadIC::NameRegister(); 1124 Register name = LoadIC::NameRegister();
1125 static Register registers[] = { receiver, name, a3, a0, a4, a5 }; 1125 static Register registers[] = { receiver, name, a3, a0, a4, a5 };
1126 return registers; 1126 return registers;
1127 } 1127 }
1128 1128
1129 1129
1130 Register* PropertyAccessCompiler::store_calling_convention() { 1130 Register* PropertyAccessCompiler::store_calling_convention() {
1131 // receiver, name, scratch1, scratch2, scratch3. 1131 // receiver, name, scratch1, scratch2, scratch3.
1132 Register receiver = StoreIC::ReceiverRegister(); 1132 Register receiver = StoreIC::ReceiverRegister();
1133 Register name = StoreIC::NameRegister(); 1133 Register name = StoreIC::NameRegister();
1134 ASSERT(a3.is(KeyedStoreIC::MapRegister())); 1134 DCHECK(a3.is(KeyedStoreIC::MapRegister()));
1135 static Register registers[] = { receiver, name, a3, a4, a5 }; 1135 static Register registers[] = { receiver, name, a3, a4, a5 };
1136 return registers; 1136 return registers;
1137 } 1137 }
1138 1138
1139 1139
1140 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); } 1140 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); }
1141 1141
1142 1142
1143 #undef __ 1143 #undef __
1144 #define __ ACCESS_MASM(masm) 1144 #define __ ACCESS_MASM(masm)
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1231 } 1231 }
1232 } 1232 }
1233 1233
1234 Label number_case; 1234 Label number_case;
1235 Register match = scratch2(); 1235 Register match = scratch2();
1236 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss; 1236 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1237 __ JumpIfSmi(receiver(), smi_target, match); // Reg match is 0 if Smi. 1237 __ JumpIfSmi(receiver(), smi_target, match); // Reg match is 0 if Smi.
1238 1238
1239 // Polymorphic keyed stores may use the map register 1239 // Polymorphic keyed stores may use the map register
1240 Register map_reg = scratch1(); 1240 Register map_reg = scratch1();
1241 ASSERT(kind() != Code::KEYED_STORE_IC || 1241 DCHECK(kind() != Code::KEYED_STORE_IC ||
1242 map_reg.is(KeyedStoreIC::MapRegister())); 1242 map_reg.is(KeyedStoreIC::MapRegister()));
1243 1243
1244 int receiver_count = types->length(); 1244 int receiver_count = types->length();
1245 int number_of_handled_maps = 0; 1245 int number_of_handled_maps = 0;
1246 __ ld(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset)); 1246 __ ld(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
1247 for (int current = 0; current < receiver_count; ++current) { 1247 for (int current = 0; current < receiver_count; ++current) {
1248 Handle<HeapType> type = types->at(current); 1248 Handle<HeapType> type = types->at(current);
1249 Handle<Map> map = IC::TypeToMap(*type, isolate()); 1249 Handle<Map> map = IC::TypeToMap(*type, isolate());
1250 if (!map->is_deprecated()) { 1250 if (!map->is_deprecated()) {
1251 number_of_handled_maps++; 1251 number_of_handled_maps++;
1252 // Check map and tail call if there's a match. 1252 // Check map and tail call if there's a match.
1253 // Separate compare from branch, to provide path for above JumpIfSmi(). 1253 // Separate compare from branch, to provide path for above JumpIfSmi().
1254 __ Dsubu(match, map_reg, Operand(map)); 1254 __ Dsubu(match, map_reg, Operand(map));
1255 if (type->Is(HeapType::Number())) { 1255 if (type->Is(HeapType::Number())) {
1256 ASSERT(!number_case.is_unused()); 1256 DCHECK(!number_case.is_unused());
1257 __ bind(&number_case); 1257 __ bind(&number_case);
1258 } 1258 }
1259 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, 1259 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
1260 eq, match, Operand(zero_reg)); 1260 eq, match, Operand(zero_reg));
1261 } 1261 }
1262 } 1262 }
1263 ASSERT(number_of_handled_maps != 0); 1263 DCHECK(number_of_handled_maps != 0);
1264 1264
1265 __ bind(&miss); 1265 __ bind(&miss);
1266 TailCallBuiltin(masm(), MissBuiltin(kind())); 1266 TailCallBuiltin(masm(), MissBuiltin(kind()));
1267 1267
1268 // Return the generated code. 1268 // Return the generated code.
1269 InlineCacheState state = 1269 InlineCacheState state =
1270 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC; 1270 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1271 return GetCode(kind(), type, name, state); 1271 return GetCode(kind(), type, name, state);
1272 } 1272 }
1273 1273
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
1316 #define __ ACCESS_MASM(masm) 1316 #define __ ACCESS_MASM(masm)
1317 1317
1318 1318
1319 void ElementHandlerCompiler::GenerateLoadDictionaryElement( 1319 void ElementHandlerCompiler::GenerateLoadDictionaryElement(
1320 MacroAssembler* masm) { 1320 MacroAssembler* masm) {
1321 // The return address is in ra 1321 // The return address is in ra
1322 Label slow, miss; 1322 Label slow, miss;
1323 1323
1324 Register key = LoadIC::NameRegister(); 1324 Register key = LoadIC::NameRegister();
1325 Register receiver = LoadIC::ReceiverRegister(); 1325 Register receiver = LoadIC::ReceiverRegister();
1326 ASSERT(receiver.is(a1)); 1326 DCHECK(receiver.is(a1));
1327 ASSERT(key.is(a2)); 1327 DCHECK(key.is(a2));
1328 1328
1329 __ UntagAndJumpIfNotSmi(a6, key, &miss); 1329 __ UntagAndJumpIfNotSmi(a6, key, &miss);
1330 __ ld(a4, FieldMemOperand(receiver, JSObject::kElementsOffset)); 1330 __ ld(a4, FieldMemOperand(receiver, JSObject::kElementsOffset));
1331 ASSERT(kSmiTagSize + kSmiShiftSize == 32); 1331 DCHECK(kSmiTagSize + kSmiShiftSize == 32);
1332 __ LoadFromNumberDictionary(&slow, a4, key, v0, a6, a3, a5); 1332 __ LoadFromNumberDictionary(&slow, a4, key, v0, a6, a3, a5);
1333 __ Ret(); 1333 __ Ret();
1334 1334
1335 // Slow case, key and receiver still unmodified. 1335 // Slow case, key and receiver still unmodified.
1336 __ bind(&slow); 1336 __ bind(&slow);
1337 __ IncrementCounter( 1337 __ IncrementCounter(
1338 masm->isolate()->counters()->keyed_load_external_array_slow(), 1338 masm->isolate()->counters()->keyed_load_external_array_slow(),
1339 1, a2, a3); 1339 1, a2, a3);
1340 1340
1341 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow); 1341 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1342 1342
1343 // Miss case, call the runtime. 1343 // Miss case, call the runtime.
1344 __ bind(&miss); 1344 __ bind(&miss);
1345 1345
1346 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); 1346 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1347 } 1347 }
1348 1348
1349 1349
1350 #undef __ 1350 #undef __
1351 1351
1352 } } // namespace v8::internal 1352 } } // namespace v8::internal
1353 1353
1354 #endif // V8_TARGET_ARCH_MIPS64 1354 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW
« no previous file with comments | « src/mips64/simulator-mips64.cc ('k') | src/mksnapshot.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698