OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 | 5 |
6 #include "src/v8.h" | 6 #include "src/v8.h" |
7 | 7 |
8 #if V8_TARGET_ARCH_MIPS64 | 8 #if V8_TARGET_ARCH_MIPS64 |
9 | 9 |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 455 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
466 | 466 |
467 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 467 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
468 // The return address is in ra. | 468 // The return address is in ra. |
469 | 469 |
470 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); | 470 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); |
471 | 471 |
472 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); | 472 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); |
473 } | 473 } |
474 | 474 |
475 | 475 |
476 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 476 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { |
477 // The return address is in ra. | 477 // The return address is in ra. |
478 Label slow, check_name, index_smi, index_name, property_array_property; | 478 Label slow, check_name, index_smi, index_name, property_array_property; |
479 Label probe_dictionary, check_number_dictionary; | 479 Label probe_dictionary, check_number_dictionary; |
480 | 480 |
481 Register key = LoadDescriptor::NameRegister(); | 481 Register key = LoadDescriptor::NameRegister(); |
482 Register receiver = LoadDescriptor::ReceiverRegister(); | 482 Register receiver = LoadDescriptor::ReceiverRegister(); |
483 DCHECK(key.is(a2)); | 483 DCHECK(key.is(a2)); |
484 DCHECK(receiver.is(a1)); | 484 DCHECK(receiver.is(a1)); |
485 | 485 |
486 Isolate* isolate = masm->isolate(); | 486 Isolate* isolate = masm->isolate(); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
520 a3); | 520 a3); |
521 GenerateRuntimeGetProperty(masm); | 521 GenerateRuntimeGetProperty(masm); |
522 | 522 |
523 __ bind(&check_name); | 523 __ bind(&check_name); |
524 GenerateKeyNameCheck(masm, key, a0, a3, &index_name, &slow); | 524 GenerateKeyNameCheck(masm, key, a0, a3, &index_name, &slow); |
525 | 525 |
526 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, | 526 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, |
527 Map::kHasNamedInterceptor, &slow); | 527 Map::kHasNamedInterceptor, &slow); |
528 | 528 |
529 | 529 |
530 // If the receiver is a fast-case object, check the keyed lookup | 530 // If the receiver is a fast-case object, check the stub cache. Otherwise |
531 // cache. Otherwise probe the dictionary. | 531 // probe the dictionary. |
532 __ ld(a3, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 532 __ ld(a3, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
533 __ ld(a4, FieldMemOperand(a3, HeapObject::kMapOffset)); | 533 __ ld(a4, FieldMemOperand(a3, HeapObject::kMapOffset)); |
534 __ LoadRoot(at, Heap::kHashTableMapRootIndex); | 534 __ LoadRoot(at, Heap::kHashTableMapRootIndex); |
535 __ Branch(&probe_dictionary, eq, a4, Operand(at)); | 535 __ Branch(&probe_dictionary, eq, a4, Operand(at)); |
536 | 536 |
537 // Load the map of the receiver, compute the keyed lookup cache hash | 537 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( |
538 // based on 32 bits of the map pointer and the name hash. | 538 Code::ComputeHandlerFlags(Code::LOAD_IC)); |
539 __ ld(a0, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 539 masm->isolate()->stub_cache()->GenerateProbe( |
540 __ dsll32(a3, a0, 0); | 540 masm, Code::LOAD_IC, flags, false, receiver, key, a3, a4, a5, a6); |
541 __ dsrl32(a3, a3, 0); | 541 // Cache miss. |
542 __ dsra(a3, a3, KeyedLookupCache::kMapHashShift); | 542 GenerateMiss(masm); |
543 __ lwu(a4, FieldMemOperand(key, Name::kHashFieldOffset)); | |
544 __ dsra(at, a4, Name::kHashShift); | |
545 __ xor_(a3, a3, at); | |
546 int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask; | |
547 __ And(a3, a3, Operand(mask)); | |
548 | |
549 // Load the key (consisting of map and unique name) from the cache and | |
550 // check for match. | |
551 Label load_in_object_property; | |
552 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; | |
553 Label hit_on_nth_entry[kEntriesPerBucket]; | |
554 ExternalReference cache_keys = | |
555 ExternalReference::keyed_lookup_cache_keys(isolate); | |
556 __ li(a4, Operand(cache_keys)); | |
557 __ dsll(at, a3, kPointerSizeLog2 + 1); | |
558 __ daddu(a4, a4, at); | |
559 | |
560 for (int i = 0; i < kEntriesPerBucket - 1; i++) { | |
561 Label try_next_entry; | |
562 __ ld(a5, MemOperand(a4, kPointerSize * i * 2)); | |
563 __ Branch(&try_next_entry, ne, a0, Operand(a5)); | |
564 __ ld(a5, MemOperand(a4, kPointerSize * (i * 2 + 1))); | |
565 __ Branch(&hit_on_nth_entry[i], eq, key, Operand(a5)); | |
566 __ bind(&try_next_entry); | |
567 } | |
568 | |
569 __ ld(a5, MemOperand(a4, kPointerSize * (kEntriesPerBucket - 1) * 2)); | |
570 __ Branch(&slow, ne, a0, Operand(a5)); | |
571 __ ld(a5, MemOperand(a4, kPointerSize * ((kEntriesPerBucket - 1) * 2 + 1))); | |
572 __ Branch(&slow, ne, key, Operand(a5)); | |
573 | |
574 // Get field offset. | |
575 // a0 : receiver's map | |
576 // a3 : lookup cache index | |
577 ExternalReference cache_field_offsets = | |
578 ExternalReference::keyed_lookup_cache_field_offsets(isolate); | |
579 | |
580 // Hit on nth entry. | |
581 for (int i = kEntriesPerBucket - 1; i >= 0; i--) { | |
582 __ bind(&hit_on_nth_entry[i]); | |
583 __ li(a4, Operand(cache_field_offsets)); | |
584 | |
585 // TODO(yy) This data structure does NOT follow natural pointer size. | |
586 __ dsll(at, a3, kPointerSizeLog2 - 1); | |
587 __ daddu(at, a4, at); | |
588 __ lwu(a5, MemOperand(at, kPointerSize / 2 * i)); | |
589 | |
590 __ lbu(a6, FieldMemOperand(a0, Map::kInObjectPropertiesOffset)); | |
591 __ Dsubu(a5, a5, a6); | |
592 __ Branch(&property_array_property, ge, a5, Operand(zero_reg)); | |
593 if (i != 0) { | |
594 __ Branch(&load_in_object_property); | |
595 } | |
596 } | |
597 | |
598 // Load in-object property. | |
599 __ bind(&load_in_object_property); | |
600 __ lbu(a6, FieldMemOperand(a0, Map::kInstanceSizeOffset)); | |
601 // Index from start of object. | |
602 __ daddu(a6, a6, a5); | |
603 // Remove the heap tag. | |
604 __ Dsubu(receiver, receiver, Operand(kHeapObjectTag)); | |
605 __ dsll(at, a6, kPointerSizeLog2); | |
606 __ daddu(at, receiver, at); | |
607 __ ld(v0, MemOperand(at)); | |
608 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1, | |
609 a4, a3); | |
610 __ Ret(); | |
611 | |
612 // Load property array property. | |
613 __ bind(&property_array_property); | |
614 __ ld(receiver, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | |
615 __ Daddu(receiver, receiver, FixedArray::kHeaderSize - kHeapObjectTag); | |
616 __ dsll(v0, a5, kPointerSizeLog2); | |
617 __ Daddu(v0, v0, a1); | |
618 __ ld(v0, MemOperand(v0)); | |
619 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1, | |
620 a4, a3); | |
621 __ Ret(); | |
622 | |
623 | 543 |
624 // Do a quick inline probe of the receiver's dictionary, if it | 544 // Do a quick inline probe of the receiver's dictionary, if it |
625 // exists. | 545 // exists. |
626 __ bind(&probe_dictionary); | 546 __ bind(&probe_dictionary); |
627 // a3: elements | 547 // a3: elements |
628 __ ld(a0, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 548 __ ld(a0, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
629 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); | 549 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); |
630 GenerateGlobalInstanceTypeCheck(masm, a0, &slow); | 550 GenerateGlobalInstanceTypeCheck(masm, a0, &slow); |
631 // Load the property to v0. | 551 // Load the property to v0. |
632 GenerateDictionaryLoad(masm, &slow, a3, key, v0, a5, a4); | 552 GenerateDictionaryLoad(masm, &slow, a3, key, v0, a5, a4); |
(...skipping 420 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1053 patcher.ChangeBranchCondition(ne); | 973 patcher.ChangeBranchCondition(ne); |
1054 } else { | 974 } else { |
1055 DCHECK(Assembler::IsBne(branch_instr)); | 975 DCHECK(Assembler::IsBne(branch_instr)); |
1056 patcher.ChangeBranchCondition(eq); | 976 patcher.ChangeBranchCondition(eq); |
1057 } | 977 } |
1058 } | 978 } |
1059 } | 979 } |
1060 } // namespace v8::internal | 980 } // namespace v8::internal |
1061 | 981 |
1062 #endif // V8_TARGET_ARCH_MIPS64 | 982 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |