Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(388)

Side by Side Diff: src/arm/ic-arm.cc

Issue 6685088: Merge isolates to bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/full-codegen-arm.cc ('k') | src/arm/lithium-gap-resolver-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 534 matching lines...) Expand 10 before | Expand all | Expand 10 after
545 // ----------------------------------- 545 // -----------------------------------
546 Label number, non_number, non_string, boolean, probe, miss; 546 Label number, non_number, non_string, boolean, probe, miss;
547 547
548 // Probe the stub cache. 548 // Probe the stub cache.
549 Code::Flags flags = Code::ComputeFlags(kind, 549 Code::Flags flags = Code::ComputeFlags(kind,
550 NOT_IN_LOOP, 550 NOT_IN_LOOP,
551 MONOMORPHIC, 551 MONOMORPHIC,
552 Code::kNoExtraICState, 552 Code::kNoExtraICState,
553 NORMAL, 553 NORMAL,
554 argc); 554 argc);
555 StubCache::GenerateProbe(masm, flags, r1, r2, r3, r4, r5); 555 Isolate::Current()->stub_cache()->GenerateProbe(
556 masm, flags, r1, r2, r3, r4, r5);
556 557
557 // If the stub cache probing failed, the receiver might be a value. 558 // If the stub cache probing failed, the receiver might be a value.
558 // For value objects, we use the map of the prototype objects for 559 // For value objects, we use the map of the prototype objects for
559 // the corresponding JSValue for the cache and that is what we need 560 // the corresponding JSValue for the cache and that is what we need
560 // to probe. 561 // to probe.
561 // 562 //
562 // Check for number. 563 // Check for number.
563 __ tst(r1, Operand(kSmiTagMask)); 564 __ tst(r1, Operand(kSmiTagMask));
564 __ b(eq, &number); 565 __ b(eq, &number);
565 __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE); 566 __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE);
(...skipping 18 matching lines...) Expand all
584 __ b(eq, &boolean); 585 __ b(eq, &boolean);
585 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 586 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
586 __ cmp(r1, ip); 587 __ cmp(r1, ip);
587 __ b(ne, &miss); 588 __ b(ne, &miss);
588 __ bind(&boolean); 589 __ bind(&boolean);
589 StubCompiler::GenerateLoadGlobalFunctionPrototype( 590 StubCompiler::GenerateLoadGlobalFunctionPrototype(
590 masm, Context::BOOLEAN_FUNCTION_INDEX, r1); 591 masm, Context::BOOLEAN_FUNCTION_INDEX, r1);
591 592
592 // Probe the stub cache for the value object. 593 // Probe the stub cache for the value object.
593 __ bind(&probe); 594 __ bind(&probe);
594 StubCache::GenerateProbe(masm, flags, r1, r2, r3, r4, r5); 595 Isolate::Current()->stub_cache()->GenerateProbe(
596 masm, flags, r1, r2, r3, r4, r5);
595 597
596 __ bind(&miss); 598 __ bind(&miss);
597 } 599 }
598 600
599 601
600 static void GenerateFunctionTailCall(MacroAssembler* masm, 602 static void GenerateFunctionTailCall(MacroAssembler* masm,
601 int argc, 603 int argc,
602 Label* miss, 604 Label* miss,
603 Register scratch) { 605 Register scratch) {
604 // r1: function 606 // r1: function
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
639 } 641 }
640 642
641 643
642 static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) { 644 static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) {
643 // ----------- S t a t e ------------- 645 // ----------- S t a t e -------------
644 // -- r2 : name 646 // -- r2 : name
645 // -- lr : return address 647 // -- lr : return address
646 // ----------------------------------- 648 // -----------------------------------
647 649
648 if (id == IC::kCallIC_Miss) { 650 if (id == IC::kCallIC_Miss) {
649 __ IncrementCounter(&Counters::call_miss, 1, r3, r4); 651 __ IncrementCounter(COUNTERS->call_miss(), 1, r3, r4);
650 } else { 652 } else {
651 __ IncrementCounter(&Counters::keyed_call_miss, 1, r3, r4); 653 __ IncrementCounter(COUNTERS->keyed_call_miss(), 1, r3, r4);
652 } 654 }
653 655
654 // Get the receiver of the function from the stack. 656 // Get the receiver of the function from the stack.
655 __ ldr(r3, MemOperand(sp, argc * kPointerSize)); 657 __ ldr(r3, MemOperand(sp, argc * kPointerSize));
656 658
657 __ EnterInternalFrame(); 659 __ EnterInternalFrame();
658 660
659 // Push the receiver and the name of the function. 661 // Push the receiver and the name of the function.
660 __ Push(r3, r2); 662 __ Push(r3, r2);
661 663
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
756 __ JumpIfNotSmi(r2, &check_string); 758 __ JumpIfNotSmi(r2, &check_string);
757 __ bind(&index_smi); 759 __ bind(&index_smi);
758 // Now the key is known to be a smi. This place is also jumped to from below 760 // Now the key is known to be a smi. This place is also jumped to from below
759 // where a numeric string is converted to a smi. 761 // where a numeric string is converted to a smi.
760 762
761 GenerateKeyedLoadReceiverCheck( 763 GenerateKeyedLoadReceiverCheck(
762 masm, r1, r0, r3, Map::kHasIndexedInterceptor, &slow_call); 764 masm, r1, r0, r3, Map::kHasIndexedInterceptor, &slow_call);
763 765
764 GenerateFastArrayLoad( 766 GenerateFastArrayLoad(
765 masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load); 767 masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load);
766 __ IncrementCounter(&Counters::keyed_call_generic_smi_fast, 1, r0, r3); 768 __ IncrementCounter(COUNTERS->keyed_call_generic_smi_fast(), 1, r0, r3);
767 769
768 __ bind(&do_call); 770 __ bind(&do_call);
769 // receiver in r1 is not used after this point. 771 // receiver in r1 is not used after this point.
770 // r2: key 772 // r2: key
771 // r1: function 773 // r1: function
772 GenerateFunctionTailCall(masm, argc, &slow_call, r0); 774 GenerateFunctionTailCall(masm, argc, &slow_call, r0);
773 775
774 __ bind(&check_number_dictionary); 776 __ bind(&check_number_dictionary);
775 // r2: key 777 // r2: key
776 // r3: elements map 778 // r3: elements map
777 // r4: elements 779 // r4: elements
778 // Check whether the elements is a number dictionary. 780 // Check whether the elements is a number dictionary.
779 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 781 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
780 __ cmp(r3, ip); 782 __ cmp(r3, ip);
781 __ b(ne, &slow_load); 783 __ b(ne, &slow_load);
782 __ mov(r0, Operand(r2, ASR, kSmiTagSize)); 784 __ mov(r0, Operand(r2, ASR, kSmiTagSize));
783 // r0: untagged index 785 // r0: untagged index
784 GenerateNumberDictionaryLoad(masm, &slow_load, r4, r2, r1, r0, r3, r5); 786 GenerateNumberDictionaryLoad(masm, &slow_load, r4, r2, r1, r0, r3, r5);
785 __ IncrementCounter(&Counters::keyed_call_generic_smi_dict, 1, r0, r3); 787 __ IncrementCounter(COUNTERS->keyed_call_generic_smi_dict(), 1, r0, r3);
786 __ jmp(&do_call); 788 __ jmp(&do_call);
787 789
788 __ bind(&slow_load); 790 __ bind(&slow_load);
789 // This branch is taken when calling KeyedCallIC_Miss is neither required 791 // This branch is taken when calling KeyedCallIC_Miss is neither required
790 // nor beneficial. 792 // nor beneficial.
791 __ IncrementCounter(&Counters::keyed_call_generic_slow_load, 1, r0, r3); 793 __ IncrementCounter(COUNTERS->keyed_call_generic_slow_load(), 1, r0, r3);
792 __ EnterInternalFrame(); 794 __ EnterInternalFrame();
793 __ push(r2); // save the key 795 __ push(r2); // save the key
794 __ Push(r1, r2); // pass the receiver and the key 796 __ Push(r1, r2); // pass the receiver and the key
795 __ CallRuntime(Runtime::kKeyedGetProperty, 2); 797 __ CallRuntime(Runtime::kKeyedGetProperty, 2);
796 __ pop(r2); // restore the key 798 __ pop(r2); // restore the key
797 __ LeaveInternalFrame(); 799 __ LeaveInternalFrame();
798 __ mov(r1, r0); 800 __ mov(r1, r0);
799 __ jmp(&do_call); 801 __ jmp(&do_call);
800 802
801 __ bind(&check_string); 803 __ bind(&check_string);
802 GenerateKeyStringCheck(masm, r2, r0, r3, &index_string, &slow_call); 804 GenerateKeyStringCheck(masm, r2, r0, r3, &index_string, &slow_call);
803 805
804 // The key is known to be a symbol. 806 // The key is known to be a symbol.
805 // If the receiver is a regular JS object with slow properties then do 807 // If the receiver is a regular JS object with slow properties then do
806 // a quick inline probe of the receiver's dictionary. 808 // a quick inline probe of the receiver's dictionary.
807 // Otherwise do the monomorphic cache probe. 809 // Otherwise do the monomorphic cache probe.
808 GenerateKeyedLoadReceiverCheck( 810 GenerateKeyedLoadReceiverCheck(
809 masm, r1, r0, r3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache); 811 masm, r1, r0, r3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
810 812
811 __ ldr(r0, FieldMemOperand(r1, JSObject::kPropertiesOffset)); 813 __ ldr(r0, FieldMemOperand(r1, JSObject::kPropertiesOffset));
812 __ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset)); 814 __ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset));
813 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 815 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
814 __ cmp(r3, ip); 816 __ cmp(r3, ip);
815 __ b(ne, &lookup_monomorphic_cache); 817 __ b(ne, &lookup_monomorphic_cache);
816 818
817 GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4); 819 GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4);
818 __ IncrementCounter(&Counters::keyed_call_generic_lookup_dict, 1, r0, r3); 820 __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_dict(), 1, r0, r3);
819 __ jmp(&do_call); 821 __ jmp(&do_call);
820 822
821 __ bind(&lookup_monomorphic_cache); 823 __ bind(&lookup_monomorphic_cache);
822 __ IncrementCounter(&Counters::keyed_call_generic_lookup_cache, 1, r0, r3); 824 __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_cache(), 1, r0, r3);
823 GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC); 825 GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
824 // Fall through on miss. 826 // Fall through on miss.
825 827
826 __ bind(&slow_call); 828 __ bind(&slow_call);
827 // This branch is taken if: 829 // This branch is taken if:
828 // - the receiver requires boxing or access check, 830 // - the receiver requires boxing or access check,
829 // - the key is neither smi nor symbol, 831 // - the key is neither smi nor symbol,
830 // - the value loaded is not a function, 832 // - the value loaded is not a function,
831 // - there is hope that the runtime will create a monomorphic call stub 833 // - there is hope that the runtime will create a monomorphic call stub
832 // that will get fetched next time. 834 // that will get fetched next time.
833 __ IncrementCounter(&Counters::keyed_call_generic_slow, 1, r0, r3); 835 __ IncrementCounter(COUNTERS->keyed_call_generic_slow(), 1, r0, r3);
834 GenerateMiss(masm, argc); 836 GenerateMiss(masm, argc);
835 837
836 __ bind(&index_string); 838 __ bind(&index_string);
837 __ IndexFromHash(r3, r2); 839 __ IndexFromHash(r3, r2);
838 // Now jump to the place where smi keys are handled. 840 // Now jump to the place where smi keys are handled.
839 __ jmp(&index_smi); 841 __ jmp(&index_smi);
840 } 842 }
841 843
842 844
843 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) { 845 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
(...skipping 22 matching lines...) Expand all
866 // -- r2 : name 868 // -- r2 : name
867 // -- lr : return address 869 // -- lr : return address
868 // -- r0 : receiver 870 // -- r0 : receiver
869 // -- sp[0] : receiver 871 // -- sp[0] : receiver
870 // ----------------------------------- 872 // -----------------------------------
871 873
872 // Probe the stub cache. 874 // Probe the stub cache.
873 Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, 875 Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
874 NOT_IN_LOOP, 876 NOT_IN_LOOP,
875 MONOMORPHIC); 877 MONOMORPHIC);
876 StubCache::GenerateProbe(masm, flags, r0, r2, r3, r4, r5); 878 Isolate::Current()->stub_cache()->GenerateProbe(
879 masm, flags, r0, r2, r3, r4, r5);
877 880
878 // Cache miss: Jump to runtime. 881 // Cache miss: Jump to runtime.
879 GenerateMiss(masm); 882 GenerateMiss(masm);
880 } 883 }
881 884
882 885
883 void LoadIC::GenerateNormal(MacroAssembler* masm) { 886 void LoadIC::GenerateNormal(MacroAssembler* masm) {
884 // ----------- S t a t e ------------- 887 // ----------- S t a t e -------------
885 // -- r2 : name 888 // -- r2 : name
886 // -- lr : return address 889 // -- lr : return address
(...skipping 15 matching lines...) Expand all
902 905
903 906
904 void LoadIC::GenerateMiss(MacroAssembler* masm) { 907 void LoadIC::GenerateMiss(MacroAssembler* masm) {
905 // ----------- S t a t e ------------- 908 // ----------- S t a t e -------------
906 // -- r2 : name 909 // -- r2 : name
907 // -- lr : return address 910 // -- lr : return address
908 // -- r0 : receiver 911 // -- r0 : receiver
909 // -- sp[0] : receiver 912 // -- sp[0] : receiver
910 // ----------------------------------- 913 // -----------------------------------
911 914
912 __ IncrementCounter(&Counters::load_miss, 1, r3, r4); 915 __ IncrementCounter(COUNTERS->load_miss(), 1, r3, r4);
913 916
914 __ mov(r3, r0); 917 __ mov(r3, r0);
915 __ Push(r3, r2); 918 __ Push(r3, r2);
916 919
917 // Perform tail call to the entry. 920 // Perform tail call to the entry.
918 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss)); 921 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss));
919 __ TailCallExternalReference(ref, 2, 1); 922 __ TailCallExternalReference(ref, 2, 1);
920 } 923 }
921 924
922 // Returns the code marker, or the 0 if the code is not marked. 925 // Returns the code marker, or the 0 if the code is not marked.
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
1050 1053
1051 // Compute the address of the map load instruction. 1054 // Compute the address of the map load instruction.
1052 Address ldr_map_instr_address = 1055 Address ldr_map_instr_address =
1053 inline_end_address - 1056 inline_end_address -
1054 (CodeGenerator::GetInlinedNamedStoreInstructionsAfterPatch() * 1057 (CodeGenerator::GetInlinedNamedStoreInstructionsAfterPatch() *
1055 Assembler::kInstrSize); 1058 Assembler::kInstrSize);
1056 1059
1057 // Update the offsets if initializing the inlined store. No reason 1060 // Update the offsets if initializing the inlined store. No reason
1058 // to update the offsets when clearing the inlined version because 1061 // to update the offsets when clearing the inlined version because
1059 // it will bail out in the map check. 1062 // it will bail out in the map check.
1060 if (map != Heap::null_value()) { 1063 if (map != HEAP->null_value()) {
1061 // Patch the offset in the actual store instruction. 1064 // Patch the offset in the actual store instruction.
1062 Address str_property_instr_address = 1065 Address str_property_instr_address =
1063 ldr_map_instr_address + 3 * Assembler::kInstrSize; 1066 ldr_map_instr_address + 3 * Assembler::kInstrSize;
1064 Instr str_property_instr = Assembler::instr_at(str_property_instr_address); 1067 Instr str_property_instr = Assembler::instr_at(str_property_instr_address);
1065 ASSERT(Assembler::IsStrRegisterImmediate(str_property_instr)); 1068 ASSERT(Assembler::IsStrRegisterImmediate(str_property_instr));
1066 str_property_instr = Assembler::SetStrRegisterImmediateOffset( 1069 str_property_instr = Assembler::SetStrRegisterImmediateOffset(
1067 str_property_instr, offset - kHeapObjectTag); 1070 str_property_instr, offset - kHeapObjectTag);
1068 Assembler::instr_at_put(str_property_instr_address, str_property_instr); 1071 Assembler::instr_at_put(str_property_instr_address, str_property_instr);
1069 1072
1070 // Patch the offset in the add instruction that is part of the 1073 // Patch the offset in the add instruction that is part of the
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
1134 Object* KeyedLoadIC_Miss(Arguments args); 1137 Object* KeyedLoadIC_Miss(Arguments args);
1135 1138
1136 1139
1137 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { 1140 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
1138 // ---------- S t a t e -------------- 1141 // ---------- S t a t e --------------
1139 // -- lr : return address 1142 // -- lr : return address
1140 // -- r0 : key 1143 // -- r0 : key
1141 // -- r1 : receiver 1144 // -- r1 : receiver
1142 // ----------------------------------- 1145 // -----------------------------------
1143 1146
1144 __ IncrementCounter(&Counters::keyed_load_miss, 1, r3, r4); 1147 __ IncrementCounter(COUNTERS->keyed_load_miss(), 1, r3, r4);
1145 1148
1146 __ Push(r1, r0); 1149 __ Push(r1, r0);
1147 1150
1148 ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss)); 1151 ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss));
1149 __ TailCallExternalReference(ref, 2, 1); 1152 __ TailCallExternalReference(ref, 2, 1);
1150 } 1153 }
1151 1154
1152 1155
1153 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { 1156 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
1154 // ---------- S t a t e -------------- 1157 // ---------- S t a t e --------------
(...skipping 30 matching lines...) Expand all
1185 masm, receiver, r2, r3, Map::kHasIndexedInterceptor, &slow); 1188 masm, receiver, r2, r3, Map::kHasIndexedInterceptor, &slow);
1186 1189
1187 // Check the "has fast elements" bit in the receiver's map which is 1190 // Check the "has fast elements" bit in the receiver's map which is
1188 // now in r2. 1191 // now in r2.
1189 __ ldrb(r3, FieldMemOperand(r2, Map::kBitField2Offset)); 1192 __ ldrb(r3, FieldMemOperand(r2, Map::kBitField2Offset));
1190 __ tst(r3, Operand(1 << Map::kHasFastElements)); 1193 __ tst(r3, Operand(1 << Map::kHasFastElements));
1191 __ b(eq, &check_number_dictionary); 1194 __ b(eq, &check_number_dictionary);
1192 1195
1193 GenerateFastArrayLoad( 1196 GenerateFastArrayLoad(
1194 masm, receiver, key, r4, r3, r2, r0, NULL, &slow); 1197 masm, receiver, key, r4, r3, r2, r0, NULL, &slow);
1195 __ IncrementCounter(&Counters::keyed_load_generic_smi, 1, r2, r3); 1198 __ IncrementCounter(COUNTERS->keyed_load_generic_smi(), 1, r2, r3);
1196 __ Ret(); 1199 __ Ret();
1197 1200
1198 __ bind(&check_number_dictionary); 1201 __ bind(&check_number_dictionary);
1199 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset)); 1202 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
1200 __ ldr(r3, FieldMemOperand(r4, JSObject::kMapOffset)); 1203 __ ldr(r3, FieldMemOperand(r4, JSObject::kMapOffset));
1201 1204
1202 // Check whether the elements is a number dictionary. 1205 // Check whether the elements is a number dictionary.
1203 // r0: key 1206 // r0: key
1204 // r3: elements map 1207 // r3: elements map
1205 // r4: elements 1208 // r4: elements
1206 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 1209 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
1207 __ cmp(r3, ip); 1210 __ cmp(r3, ip);
1208 __ b(ne, &slow); 1211 __ b(ne, &slow);
1209 __ mov(r2, Operand(r0, ASR, kSmiTagSize)); 1212 __ mov(r2, Operand(r0, ASR, kSmiTagSize));
1210 GenerateNumberDictionaryLoad(masm, &slow, r4, r0, r0, r2, r3, r5); 1213 GenerateNumberDictionaryLoad(masm, &slow, r4, r0, r0, r2, r3, r5);
1211 __ Ret(); 1214 __ Ret();
1212 1215
1213 // Slow case, key and receiver still in r0 and r1. 1216 // Slow case, key and receiver still in r0 and r1.
1214 __ bind(&slow); 1217 __ bind(&slow);
1215 __ IncrementCounter(&Counters::keyed_load_generic_slow, 1, r2, r3); 1218 __ IncrementCounter(COUNTERS->keyed_load_generic_slow(), 1, r2, r3);
1216 GenerateRuntimeGetProperty(masm); 1219 GenerateRuntimeGetProperty(masm);
1217 1220
1218 __ bind(&check_string); 1221 __ bind(&check_string);
1219 GenerateKeyStringCheck(masm, key, r2, r3, &index_string, &slow); 1222 GenerateKeyStringCheck(masm, key, r2, r3, &index_string, &slow);
1220 1223
1221 GenerateKeyedLoadReceiverCheck( 1224 GenerateKeyedLoadReceiverCheck(
1222 masm, receiver, r2, r3, Map::kHasNamedInterceptor, &slow); 1225 masm, receiver, r2, r3, Map::kHasNamedInterceptor, &slow);
1223 1226
1224 // If the receiver is a fast-case object, check the keyed lookup 1227 // If the receiver is a fast-case object, check the keyed lookup
1225 // cache. Otherwise probe the dictionary. 1228 // cache. Otherwise probe the dictionary.
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1260 __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2)); 1263 __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
1261 __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset)); 1264 __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset));
1262 __ sub(r5, r5, r6, SetCC); 1265 __ sub(r5, r5, r6, SetCC);
1263 __ b(ge, &property_array_property); 1266 __ b(ge, &property_array_property);
1264 1267
1265 // Load in-object property. 1268 // Load in-object property.
1266 __ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset)); 1269 __ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset));
1267 __ add(r6, r6, r5); // Index from start of object. 1270 __ add(r6, r6, r5); // Index from start of object.
1268 __ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag. 1271 __ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag.
1269 __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2)); 1272 __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2));
1270 __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1, r2, r3); 1273 __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1, r2, r3);
1271 __ Ret(); 1274 __ Ret();
1272 1275
1273 // Load property array property. 1276 // Load property array property.
1274 __ bind(&property_array_property); 1277 __ bind(&property_array_property);
1275 __ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset)); 1278 __ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset));
1276 __ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1279 __ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1277 __ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2)); 1280 __ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2));
1278 __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1, r2, r3); 1281 __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1, r2, r3);
1279 __ Ret(); 1282 __ Ret();
1280 1283
1281 // Do a quick inline probe of the receiver's dictionary, if it 1284 // Do a quick inline probe of the receiver's dictionary, if it
1282 // exists. 1285 // exists.
1283 __ bind(&probe_dictionary); 1286 __ bind(&probe_dictionary);
1284 // r1: receiver 1287 // r1: receiver
1285 // r0: key 1288 // r0: key
1286 // r3: elements 1289 // r3: elements
1287 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); 1290 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1288 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); 1291 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
1289 GenerateGlobalInstanceTypeCheck(masm, r2, &slow); 1292 GenerateGlobalInstanceTypeCheck(masm, r2, &slow);
1290 // Load the property to r0. 1293 // Load the property to r0.
1291 GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4); 1294 GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4);
1292 __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1, r2, r3); 1295 __ IncrementCounter(COUNTERS->keyed_load_generic_symbol(), 1, r2, r3);
1293 __ Ret(); 1296 __ Ret();
1294 1297
1295 __ bind(&index_string); 1298 __ bind(&index_string);
1296 __ IndexFromHash(r3, key); 1299 __ IndexFromHash(r3, key);
1297 // Now jump to the place where smi keys are handled. 1300 // Now jump to the place where smi keys are handled.
1298 __ jmp(&index_smi); 1301 __ jmp(&index_smi);
1299 } 1302 }
1300 1303
1301 1304
1302 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { 1305 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
(...skipping 217 matching lines...) Expand 10 before | Expand all | Expand 10 after
1520 // -- r1 : receiver 1523 // -- r1 : receiver
1521 // -- r2 : name 1524 // -- r2 : name
1522 // -- lr : return address 1525 // -- lr : return address
1523 // ----------------------------------- 1526 // -----------------------------------
1524 1527
1525 // Get the receiver from the stack and probe the stub cache. 1528 // Get the receiver from the stack and probe the stub cache.
1526 Code::Flags flags = Code::ComputeFlags(Code::STORE_IC, 1529 Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
1527 NOT_IN_LOOP, 1530 NOT_IN_LOOP,
1528 MONOMORPHIC, 1531 MONOMORPHIC,
1529 strict_mode); 1532 strict_mode);
1530 StubCache::GenerateProbe(masm, flags, r1, r2, r3, r4, r5); 1533
1534 Isolate::Current()->stub_cache()->GenerateProbe(
1535 masm, flags, r1, r2, r3, r4, r5);
1531 1536
1532 // Cache miss: Jump to runtime. 1537 // Cache miss: Jump to runtime.
1533 GenerateMiss(masm); 1538 GenerateMiss(masm);
1534 } 1539 }
1535 1540
1536 1541
1537 void StoreIC::GenerateMiss(MacroAssembler* masm) { 1542 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1538 // ----------- S t a t e ------------- 1543 // ----------- S t a t e -------------
1539 // -- r0 : value 1544 // -- r0 : value
1540 // -- r1 : receiver 1545 // -- r1 : receiver
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
1604 // -- r0 : value 1609 // -- r0 : value
1605 // -- r1 : receiver 1610 // -- r1 : receiver
1606 // -- r2 : name 1611 // -- r2 : name
1607 // -- lr : return address 1612 // -- lr : return address
1608 // ----------------------------------- 1613 // -----------------------------------
1609 Label miss; 1614 Label miss;
1610 1615
1611 GenerateStringDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss); 1616 GenerateStringDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss);
1612 1617
1613 GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5); 1618 GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5);
1614 __ IncrementCounter(&Counters::store_normal_hit, 1, r4, r5); 1619 __ IncrementCounter(COUNTERS->store_normal_hit(), 1, r4, r5);
1615 __ Ret(); 1620 __ Ret();
1616 1621
1617 __ bind(&miss); 1622 __ bind(&miss);
1618 __ IncrementCounter(&Counters::store_normal_miss, 1, r4, r5); 1623 __ IncrementCounter(COUNTERS->store_normal_miss(), 1, r4, r5);
1619 GenerateMiss(masm); 1624 GenerateMiss(masm);
1620 } 1625 }
1621 1626
1622 1627
1623 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm, 1628 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1624 StrictModeFlag strict_mode) { 1629 StrictModeFlag strict_mode) {
1625 // ----------- S t a t e ------------- 1630 // ----------- S t a t e -------------
1626 // -- r0 : value 1631 // -- r0 : value
1627 // -- r1 : receiver 1632 // -- r1 : receiver
1628 // -- r2 : name 1633 // -- r2 : name
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
1758 Register reg = Assembler::GetRn(instr_at_patch); 1763 Register reg = Assembler::GetRn(instr_at_patch);
1759 patcher.masm()->tst(reg, Operand(kSmiTagMask)); 1764 patcher.masm()->tst(reg, Operand(kSmiTagMask));
1760 patcher.EmitCondition(eq); 1765 patcher.EmitCondition(eq);
1761 } 1766 }
1762 } 1767 }
1763 1768
1764 1769
1765 } } // namespace v8::internal 1770 } } // namespace v8::internal
1766 1771
1767 #endif // V8_TARGET_ARCH_ARM 1772 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/full-codegen-arm.cc ('k') | src/arm/lithium-gap-resolver-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698