Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: src/arm/ic-arm.cc

Issue 338963003: KeyedLoadIC should have same register spec as LoadIC. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: ARM and ARM64 ports. Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM 7 #if V8_TARGET_ARCH_ARM
8 8
9 #include "src/arm/assembler-arm.h" 9 #include "src/arm/assembler-arm.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after
307 __ b(ne, not_unique); 307 __ b(ne, not_unique);
308 308
309 __ bind(&unique); 309 __ bind(&unique);
310 } 310 }
311 311
312 312
313 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) { 313 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
314 // ----------- S t a t e ------------- 314 // ----------- S t a t e -------------
315 // -- r2 : name 315 // -- r2 : name
316 // -- lr : return address 316 // -- lr : return address
317 // -- r0 : receiver 317 // -- r1 : receiver
Jakob Kummerow 2014/06/30 14:12:45 Why doesn't this get the same treatment as e.g. Ke
mvstanton 2014/06/30 14:58:59 Yep, I can do that.
318 // ----------------------------------- 318 // -----------------------------------
319 ASSERT(r0.is(ReceiverRegister())); 319 ASSERT(r1.is(ReceiverRegister()));
320 ASSERT(r2.is(NameRegister())); 320 ASSERT(r2.is(NameRegister()));
321 321
322 // Probe the stub cache. 322 // Probe the stub cache.
323 Code::Flags flags = Code::ComputeHandlerFlags(Code::LOAD_IC); 323 Code::Flags flags = Code::ComputeHandlerFlags(Code::LOAD_IC);
324 masm->isolate()->stub_cache()->GenerateProbe( 324 masm->isolate()->stub_cache()->GenerateProbe(
325 masm, flags, r0, r2, r3, r4, r5, r6); 325 masm, flags, r1, r2, r3, r4, r5, r6);
326 326
327 // Cache miss: Jump to runtime. 327 // Cache miss: Jump to runtime.
328 GenerateMiss(masm); 328 GenerateMiss(masm);
329 } 329 }
330 330
331 331
332 void LoadIC::GenerateNormal(MacroAssembler* masm) { 332 void LoadIC::GenerateNormal(MacroAssembler* masm) {
333 // ----------- S t a t e ------------- 333 // ----------- S t a t e -------------
334 // -- r2 : name 334 // -- r2 : name
335 // -- lr : return address 335 // -- lr : return address
336 // -- r0 : receiver 336 // -- r1 : receiver
337 // ----------------------------------- 337 // -----------------------------------
338 ASSERT(r0.is(ReceiverRegister())); 338 ASSERT(r1.is(ReceiverRegister()));
339 ASSERT(r2.is(NameRegister())); 339 ASSERT(r2.is(NameRegister()));
340 340
341 Label miss, slow; 341 Label miss, slow;
342 342
343 GenerateNameDictionaryReceiverCheck(masm, r0, r1, r3, r4, &miss); 343 GenerateNameDictionaryReceiverCheck(masm, r1, r0, r3, r4, &miss);
344 344
345 // r1: elements 345 // r0: elements
346 GenerateDictionaryLoad(masm, &slow, r1, r2, r0, r3, r4); 346 GenerateDictionaryLoad(masm, &slow, r0, r2, r0, r3, r4);
347 __ Ret(); 347 __ Ret();
348 348
349 // Dictionary load failed, go slow (but don't miss). 349 // Dictionary load failed, go slow (but don't miss).
350 __ bind(&slow); 350 __ bind(&slow);
351 GenerateRuntimeGetProperty(masm); 351 GenerateRuntimeGetProperty(masm);
352 352
353 // Cache miss: Jump to runtime. 353 // Cache miss: Jump to runtime.
354 __ bind(&miss); 354 __ bind(&miss);
355 GenerateMiss(masm); 355 GenerateMiss(masm);
356 } 356 }
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
465 __ mov(scratch, Operand(kPointerSize >> 1)); 465 __ mov(scratch, Operand(kPointerSize >> 1));
466 __ mul(scratch, key, scratch); 466 __ mul(scratch, key, scratch);
467 __ add(scratch, 467 __ add(scratch,
468 scratch, 468 scratch,
469 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 469 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
470 return MemOperand(backing_store, scratch); 470 return MemOperand(backing_store, scratch);
471 } 471 }
472 472
473 473
474 void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) { 474 void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) {
475 // ---------- S t a t e -------------- 475 // The return address is on the stack.
Jakob Kummerow 2014/06/30 14:12:45 actually, it's in |lr|
mvstanton 2014/06/30 14:58:59 Done.
476 // -- lr : return address 476 Register receiver = ReceiverRegister();
477 // -- r0 : key 477 Register key = NameRegister();
478 // -- r1 : receiver 478 ASSERT(receiver.is(r1));
479 // ----------------------------------- 479 ASSERT(key.is(r2));
480 ASSERT(r1.is(ReceiverRegister())); 480
481 ASSERT(r0.is(NameRegister()));
482 Label slow, notin; 481 Label slow, notin;
483 MemOperand mapped_location = 482 MemOperand mapped_location =
484 GenerateMappedArgumentsLookup(masm, r1, r0, r2, r3, r4, &notin, &slow); 483 GenerateMappedArgumentsLookup(
484 masm, receiver, key, r0, r3, r4, &notin, &slow);
485 __ ldr(r0, mapped_location); 485 __ ldr(r0, mapped_location);
486 __ Ret(); 486 __ Ret();
487 __ bind(&notin); 487 __ bind(&notin);
488 // The unmapped lookup expects that the parameter map is in r2. 488 // The unmapped lookup expects that the parameter map is in r0.
489 MemOperand unmapped_location = 489 MemOperand unmapped_location =
490 GenerateUnmappedArgumentsLookup(masm, r0, r2, r3, &slow); 490 GenerateUnmappedArgumentsLookup(masm, key, r0, r3, &slow);
491 __ ldr(r2, unmapped_location); 491 __ ldr(r0, unmapped_location);
492 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); 492 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
493 __ cmp(r2, r3); 493 __ cmp(r0, r3);
494 __ b(eq, &slow); 494 __ b(eq, &slow);
495 __ mov(r0, r2);
496 __ Ret(); 495 __ Ret();
497 __ bind(&slow); 496 __ bind(&slow);
498 GenerateMiss(masm); 497 GenerateMiss(masm);
499 } 498 }
500 499
501 500
502 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) { 501 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) {
503 // ---------- S t a t e -------------- 502 // ---------- S t a t e --------------
504 // -- r0 : value 503 // -- r0 : value
505 // -- r1 : key 504 // -- r1 : key
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
538 537
539 // Perform tail call to the entry. 538 // Perform tail call to the entry.
540 ExternalReference ref = 539 ExternalReference ref =
541 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); 540 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
542 541
543 __ TailCallExternalReference(ref, 2, 1); 542 __ TailCallExternalReference(ref, 2, 1);
544 } 543 }
545 544
546 545
547 // IC register specifications 546 // IC register specifications
548 const Register LoadIC::ReceiverRegister() { return r0; } 547 const Register LoadIC::ReceiverRegister() { return r1; }
549 const Register LoadIC::NameRegister() { return r2; } 548 const Register LoadIC::NameRegister() { return r2; }
550 const Register KeyedLoadIC::ReceiverRegister() { return r1; }
551 const Register KeyedLoadIC::NameRegister() { return r0; }
552 549
553 550
554 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { 551 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
555 // The return address is on the stack. 552 // The return address is on the stack.
556 553
557 __ Push(ReceiverRegister(), NameRegister()); 554 __ Push(ReceiverRegister(), NameRegister());
558 555
559 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); 556 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
560 } 557 }
561 558
562 559
563 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { 560 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
564 // ---------- S t a t e -------------- 561 // The return address is on the stack.
565 // -- lr : return address
566 // -- r0 : key
567 // -- r1 : receiver
568 // -----------------------------------
569 Label slow, check_name, index_smi, index_name, property_array_property; 562 Label slow, check_name, index_smi, index_name, property_array_property;
570 Label probe_dictionary, check_number_dictionary; 563 Label probe_dictionary, check_number_dictionary;
571 564
572 Register key = NameRegister(); 565 Register key = NameRegister();
573 Register receiver = ReceiverRegister(); 566 Register receiver = ReceiverRegister();
574 ASSERT(key.is(r0)); 567 ASSERT(key.is(r2));
575 ASSERT(receiver.is(r1)); 568 ASSERT(receiver.is(r1));
576 569
577 Isolate* isolate = masm->isolate(); 570 Isolate* isolate = masm->isolate();
578 571
579 // Check that the key is a smi. 572 // Check that the key is a smi.
580 __ JumpIfNotSmi(key, &check_name); 573 __ JumpIfNotSmi(key, &check_name);
581 __ bind(&index_smi); 574 __ bind(&index_smi);
582 // Now the key is known to be a smi. This place is also jumped to from below 575 // Now the key is known to be a smi. This place is also jumped to from below
583 // where a numeric string is converted to a smi. 576 // where a numeric string is converted to a smi.
584 577
585 GenerateKeyedLoadReceiverCheck( 578 GenerateKeyedLoadReceiverCheck(
586 masm, receiver, r2, r3, Map::kHasIndexedInterceptor, &slow); 579 masm, receiver, r0, r3, Map::kHasIndexedInterceptor, &slow);
587 580
588 // Check the receiver's map to see if it has fast elements. 581 // Check the receiver's map to see if it has fast elements.
589 __ CheckFastElements(r2, r3, &check_number_dictionary); 582 __ CheckFastElements(r0, r3, &check_number_dictionary);
590 583
591 GenerateFastArrayLoad( 584 GenerateFastArrayLoad(
592 masm, receiver, key, r4, r3, r2, r0, NULL, &slow); 585 masm, receiver, key, r0, r3, r4, r0, NULL, &slow);
593 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, r2, r3); 586 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, r4, r3);
594 __ Ret(); 587 __ Ret();
595 588
596 __ bind(&check_number_dictionary); 589 __ bind(&check_number_dictionary);
597 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset)); 590 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
598 __ ldr(r3, FieldMemOperand(r4, JSObject::kMapOffset)); 591 __ ldr(r3, FieldMemOperand(r4, JSObject::kMapOffset));
599 592
600 // Check whether the elements is a number dictionary. 593 // Check whether the elements is a number dictionary.
601 // r0: key
602 // r3: elements map 594 // r3: elements map
603 // r4: elements 595 // r4: elements
604 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 596 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
605 __ cmp(r3, ip); 597 __ cmp(r3, ip);
606 __ b(ne, &slow); 598 __ b(ne, &slow);
607 __ SmiUntag(r2, r0); 599 __ SmiUntag(r0, key);
608 __ LoadFromNumberDictionary(&slow, r4, r0, r0, r2, r3, r5); 600 __ LoadFromNumberDictionary(&slow, r4, key, r0, r0, r3, r5);
609 __ Ret(); 601 __ Ret();
610 602
611 // Slow case, key and receiver still in r0 and r1. 603 // Slow case, key and receiver still in r2 and r1.
612 __ bind(&slow); 604 __ bind(&slow);
613 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), 605 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
614 1, r2, r3); 606 1, r4, r3);
615 GenerateRuntimeGetProperty(masm); 607 GenerateRuntimeGetProperty(masm);
616 608
617 __ bind(&check_name); 609 __ bind(&check_name);
618 GenerateKeyNameCheck(masm, key, r2, r3, &index_name, &slow); 610 GenerateKeyNameCheck(masm, key, r0, r3, &index_name, &slow);
619 611
620 GenerateKeyedLoadReceiverCheck( 612 GenerateKeyedLoadReceiverCheck(
621 masm, receiver, r2, r3, Map::kHasNamedInterceptor, &slow); 613 masm, receiver, r0, r3, Map::kHasNamedInterceptor, &slow);
622 614
623 // If the receiver is a fast-case object, check the keyed lookup 615 // If the receiver is a fast-case object, check the keyed lookup
624 // cache. Otherwise probe the dictionary. 616 // cache. Otherwise probe the dictionary.
625 __ ldr(r3, FieldMemOperand(r1, JSObject::kPropertiesOffset)); 617 __ ldr(r3, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
626 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset)); 618 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
627 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 619 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
628 __ cmp(r4, ip); 620 __ cmp(r4, ip);
629 __ b(eq, &probe_dictionary); 621 __ b(eq, &probe_dictionary);
630 622
631 // Load the map of the receiver, compute the keyed lookup cache hash 623 // Load the map of the receiver, compute the keyed lookup cache hash
632 // based on 32 bits of the map pointer and the name hash. 624 // based on 32 bits of the map pointer and the name hash.
633 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); 625 __ ldr(r0, FieldMemOperand(receiver, HeapObject::kMapOffset));
634 __ mov(r3, Operand(r2, ASR, KeyedLookupCache::kMapHashShift)); 626 __ mov(r3, Operand(r0, ASR, KeyedLookupCache::kMapHashShift));
635 __ ldr(r4, FieldMemOperand(r0, Name::kHashFieldOffset)); 627 __ ldr(r4, FieldMemOperand(key, Name::kHashFieldOffset));
636 __ eor(r3, r3, Operand(r4, ASR, Name::kHashShift)); 628 __ eor(r3, r3, Operand(r4, ASR, Name::kHashShift));
637 int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask; 629 int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
638 __ And(r3, r3, Operand(mask)); 630 __ And(r3, r3, Operand(mask));
639 631
640 // Load the key (consisting of map and unique name) from the cache and 632 // Load the key (consisting of map and unique name) from the cache and
641 // check for match. 633 // check for match.
642 Label load_in_object_property; 634 Label load_in_object_property;
643 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; 635 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
644 Label hit_on_nth_entry[kEntriesPerBucket]; 636 Label hit_on_nth_entry[kEntriesPerBucket];
645 ExternalReference cache_keys = 637 ExternalReference cache_keys =
646 ExternalReference::keyed_lookup_cache_keys(isolate); 638 ExternalReference::keyed_lookup_cache_keys(isolate);
647 639
648 __ mov(r4, Operand(cache_keys)); 640 __ mov(r4, Operand(cache_keys));
649 __ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1)); 641 __ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
650 642
651 for (int i = 0; i < kEntriesPerBucket - 1; i++) { 643 for (int i = 0; i < kEntriesPerBucket - 1; i++) {
652 Label try_next_entry; 644 Label try_next_entry;
653 // Load map and move r4 to next entry. 645 // Load map and move r4 to next entry.
654 __ ldr(r5, MemOperand(r4, kPointerSize * 2, PostIndex)); 646 __ ldr(r5, MemOperand(r4, kPointerSize * 2, PostIndex));
655 __ cmp(r2, r5); 647 __ cmp(r0, r5);
656 __ b(ne, &try_next_entry); 648 __ b(ne, &try_next_entry);
657 __ ldr(r5, MemOperand(r4, -kPointerSize)); // Load name 649 __ ldr(r5, MemOperand(r4, -kPointerSize)); // Load name
658 __ cmp(r0, r5); 650 __ cmp(key, r5);
659 __ b(eq, &hit_on_nth_entry[i]); 651 __ b(eq, &hit_on_nth_entry[i]);
660 __ bind(&try_next_entry); 652 __ bind(&try_next_entry);
661 } 653 }
662 654
663 // Last entry: Load map and move r4 to name. 655 // Last entry: Load map and move r4 to name.
664 __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex)); 656 __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex));
665 __ cmp(r2, r5); 657 __ cmp(r0, r5);
666 __ b(ne, &slow); 658 __ b(ne, &slow);
667 __ ldr(r5, MemOperand(r4)); 659 __ ldr(r5, MemOperand(r4));
668 __ cmp(r0, r5); 660 __ cmp(key, r5);
669 __ b(ne, &slow); 661 __ b(ne, &slow);
670 662
671 // Get field offset. 663 // Get field offset.
672 // r0 : key 664 // r0 : receiver's map
673 // r1 : receiver
674 // r2 : receiver's map
675 // r3 : lookup cache index 665 // r3 : lookup cache index
676 ExternalReference cache_field_offsets = 666 ExternalReference cache_field_offsets =
677 ExternalReference::keyed_lookup_cache_field_offsets(isolate); 667 ExternalReference::keyed_lookup_cache_field_offsets(isolate);
678 668
679 // Hit on nth entry. 669 // Hit on nth entry.
680 for (int i = kEntriesPerBucket - 1; i >= 0; i--) { 670 for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
681 __ bind(&hit_on_nth_entry[i]); 671 __ bind(&hit_on_nth_entry[i]);
682 __ mov(r4, Operand(cache_field_offsets)); 672 __ mov(r4, Operand(cache_field_offsets));
683 if (i != 0) { 673 if (i != 0) {
684 __ add(r3, r3, Operand(i)); 674 __ add(r3, r3, Operand(i));
685 } 675 }
686 __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2)); 676 __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
687 __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset)); 677 __ ldrb(r6, FieldMemOperand(r0, Map::kInObjectPropertiesOffset));
688 __ sub(r5, r5, r6, SetCC); 678 __ sub(r5, r5, r6, SetCC);
689 __ b(ge, &property_array_property); 679 __ b(ge, &property_array_property);
690 if (i != 0) { 680 if (i != 0) {
691 __ jmp(&load_in_object_property); 681 __ jmp(&load_in_object_property);
692 } 682 }
693 } 683 }
694 684
695 // Load in-object property. 685 // Load in-object property.
696 __ bind(&load_in_object_property); 686 __ bind(&load_in_object_property);
697 __ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset)); 687 __ ldrb(r6, FieldMemOperand(r0, Map::kInstanceSizeOffset));
698 __ add(r6, r6, r5); // Index from start of object. 688 __ add(r6, r6, r5); // Index from start of object.
699 __ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag. 689 __ sub(receiver, receiver, Operand(kHeapObjectTag)); // Remove the heap tag.
700 __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2)); 690 __ ldr(r0, MemOperand(receiver, r6, LSL, kPointerSizeLog2));
701 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 691 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
702 1, r2, r3); 692 1, r4, r3);
703 __ Ret(); 693 __ Ret();
704 694
705 // Load property array property. 695 // Load property array property.
706 __ bind(&property_array_property); 696 __ bind(&property_array_property);
707 __ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset)); 697 __ ldr(receiver, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
708 __ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 698 __ add(receiver, receiver, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
709 __ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2)); 699 __ ldr(r0, MemOperand(receiver, r5, LSL, kPointerSizeLog2));
710 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 700 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
711 1, r2, r3); 701 1, r4, r3);
712 __ Ret(); 702 __ Ret();
713 703
714 // Do a quick inline probe of the receiver's dictionary, if it 704 // Do a quick inline probe of the receiver's dictionary, if it
715 // exists. 705 // exists.
716 __ bind(&probe_dictionary); 706 __ bind(&probe_dictionary);
717 // r1: receiver
718 // r0: key
719 // r3: elements 707 // r3: elements
720 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); 708 __ ldr(r0, FieldMemOperand(receiver, HeapObject::kMapOffset));
721 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); 709 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
722 GenerateGlobalInstanceTypeCheck(masm, r2, &slow); 710 GenerateGlobalInstanceTypeCheck(masm, r0, &slow);
723 // Load the property to r0. 711 // Load the property to r0.
724 GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4); 712 GenerateDictionaryLoad(masm, &slow, r3, key, r0, r5, r4);
725 __ IncrementCounter( 713 __ IncrementCounter(
726 isolate->counters()->keyed_load_generic_symbol(), 1, r2, r3); 714 isolate->counters()->keyed_load_generic_symbol(), 1, r4, r3);
727 __ Ret(); 715 __ Ret();
728 716
729 __ bind(&index_name); 717 __ bind(&index_name);
730 __ IndexFromHash(r3, key); 718 __ IndexFromHash(r3, key);
731 // Now jump to the place where smi keys are handled. 719 // Now jump to the place where smi keys are handled.
732 __ jmp(&index_smi); 720 __ jmp(&index_smi);
733 } 721 }
734 722
735 723
736 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { 724 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
(...skipping 24 matching lines...) Expand all
761 GenerateMiss(masm); 749 GenerateMiss(masm);
762 } 750 }
763 751
764 752
765 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { 753 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
766 // Return address is on the stack. 754 // Return address is on the stack.
767 Label slow; 755 Label slow;
768 756
769 Register receiver = ReceiverRegister(); 757 Register receiver = ReceiverRegister();
770 Register key = NameRegister(); 758 Register key = NameRegister();
771 Register scratch1 = r2; 759 Register scratch1 = r3;
772 Register scratch2 = r3; 760 Register scratch2 = r4;
773 ASSERT(!scratch1.is(receiver) && !scratch1.is(key)); 761 ASSERT(!scratch1.is(receiver) && !scratch1.is(key));
774 ASSERT(!scratch2.is(receiver) && !scratch2.is(key)); 762 ASSERT(!scratch2.is(receiver) && !scratch2.is(key));
775 763
776 // Check that the receiver isn't a smi. 764 // Check that the receiver isn't a smi.
777 __ JumpIfSmi(receiver, &slow); 765 __ JumpIfSmi(receiver, &slow);
778 766
779 // Check that the key is an array index, that is Uint32. 767 // Check that the key is an array index, that is Uint32.
780 __ NonNegativeSmiTst(key); 768 __ NonNegativeSmiTst(key);
781 __ b(ne, &slow); 769 __ b(ne, &slow);
782 770
(...skipping 533 matching lines...) Expand 10 before | Expand all | Expand 10 after
1316 } else { 1304 } else {
1317 ASSERT(Assembler::GetCondition(branch_instr) == ne); 1305 ASSERT(Assembler::GetCondition(branch_instr) == ne);
1318 patcher.EmitCondition(eq); 1306 patcher.EmitCondition(eq);
1319 } 1307 }
1320 } 1308 }
1321 1309
1322 1310
1323 } } // namespace v8::internal 1311 } } // namespace v8::internal
1324 1312
1325 #endif // V8_TARGET_ARCH_ARM 1313 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698