| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 536 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 547 // ----------------------------------- | 547 // ----------------------------------- |
| 548 Label number, non_number, non_string, boolean, probe, miss; | 548 Label number, non_number, non_string, boolean, probe, miss; |
| 549 | 549 |
| 550 // Probe the stub cache. | 550 // Probe the stub cache. |
| 551 Code::Flags flags = Code::ComputeFlags(kind, | 551 Code::Flags flags = Code::ComputeFlags(kind, |
| 552 NOT_IN_LOOP, | 552 NOT_IN_LOOP, |
| 553 MONOMORPHIC, | 553 MONOMORPHIC, |
| 554 Code::kNoExtraICState, | 554 Code::kNoExtraICState, |
| 555 NORMAL, | 555 NORMAL, |
| 556 argc); | 556 argc); |
| 557 StubCache::GenerateProbe(masm, flags, r1, r2, r3, r4, r5); | 557 Isolate::Current()->stub_cache()->GenerateProbe( |
| 558 masm, flags, r1, r2, r3, r4, r5); |
| 558 | 559 |
| 559 // If the stub cache probing failed, the receiver might be a value. | 560 // If the stub cache probing failed, the receiver might be a value. |
| 560 // For value objects, we use the map of the prototype objects for | 561 // For value objects, we use the map of the prototype objects for |
| 561 // the corresponding JSValue for the cache and that is what we need | 562 // the corresponding JSValue for the cache and that is what we need |
| 562 // to probe. | 563 // to probe. |
| 563 // | 564 // |
| 564 // Check for number. | 565 // Check for number. |
| 565 __ tst(r1, Operand(kSmiTagMask)); | 566 __ tst(r1, Operand(kSmiTagMask)); |
| 566 __ b(eq, &number); | 567 __ b(eq, &number); |
| 567 __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE); | 568 __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 586 __ b(eq, &boolean); | 587 __ b(eq, &boolean); |
| 587 __ LoadRoot(ip, Heap::kFalseValueRootIndex); | 588 __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
| 588 __ cmp(r1, ip); | 589 __ cmp(r1, ip); |
| 589 __ b(ne, &miss); | 590 __ b(ne, &miss); |
| 590 __ bind(&boolean); | 591 __ bind(&boolean); |
| 591 StubCompiler::GenerateLoadGlobalFunctionPrototype( | 592 StubCompiler::GenerateLoadGlobalFunctionPrototype( |
| 592 masm, Context::BOOLEAN_FUNCTION_INDEX, r1); | 593 masm, Context::BOOLEAN_FUNCTION_INDEX, r1); |
| 593 | 594 |
| 594 // Probe the stub cache for the value object. | 595 // Probe the stub cache for the value object. |
| 595 __ bind(&probe); | 596 __ bind(&probe); |
| 596 StubCache::GenerateProbe(masm, flags, r1, r2, r3, r4, r5); | 597 Isolate::Current()->stub_cache()->GenerateProbe( |
| 598 masm, flags, r1, r2, r3, r4, r5); |
| 597 | 599 |
| 598 __ bind(&miss); | 600 __ bind(&miss); |
| 599 } | 601 } |
| 600 | 602 |
| 601 | 603 |
| 602 static void GenerateFunctionTailCall(MacroAssembler* masm, | 604 static void GenerateFunctionTailCall(MacroAssembler* masm, |
| 603 int argc, | 605 int argc, |
| 604 Label* miss, | 606 Label* miss, |
| 605 Register scratch) { | 607 Register scratch) { |
| 606 // r1: function | 608 // r1: function |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 641 } | 643 } |
| 642 | 644 |
| 643 | 645 |
| 644 static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) { | 646 static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) { |
| 645 // ----------- S t a t e ------------- | 647 // ----------- S t a t e ------------- |
| 646 // -- r2 : name | 648 // -- r2 : name |
| 647 // -- lr : return address | 649 // -- lr : return address |
| 648 // ----------------------------------- | 650 // ----------------------------------- |
| 649 | 651 |
| 650 if (id == IC::kCallIC_Miss) { | 652 if (id == IC::kCallIC_Miss) { |
| 651 __ IncrementCounter(&Counters::call_miss, 1, r3, r4); | 653 __ IncrementCounter(COUNTERS->call_miss(), 1, r3, r4); |
| 652 } else { | 654 } else { |
| 653 __ IncrementCounter(&Counters::keyed_call_miss, 1, r3, r4); | 655 __ IncrementCounter(COUNTERS->keyed_call_miss(), 1, r3, r4); |
| 654 } | 656 } |
| 655 | 657 |
| 656 // Get the receiver of the function from the stack. | 658 // Get the receiver of the function from the stack. |
| 657 __ ldr(r3, MemOperand(sp, argc * kPointerSize)); | 659 __ ldr(r3, MemOperand(sp, argc * kPointerSize)); |
| 658 | 660 |
| 659 __ EnterInternalFrame(); | 661 __ EnterInternalFrame(); |
| 660 | 662 |
| 661 // Push the receiver and the name of the function. | 663 // Push the receiver and the name of the function. |
| 662 __ Push(r3, r2); | 664 __ Push(r3, r2); |
| 663 | 665 |
| (...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 758 __ JumpIfNotSmi(r2, &check_string); | 760 __ JumpIfNotSmi(r2, &check_string); |
| 759 __ bind(&index_smi); | 761 __ bind(&index_smi); |
| 760 // Now the key is known to be a smi. This place is also jumped to from below | 762 // Now the key is known to be a smi. This place is also jumped to from below |
| 761 // where a numeric string is converted to a smi. | 763 // where a numeric string is converted to a smi. |
| 762 | 764 |
| 763 GenerateKeyedLoadReceiverCheck( | 765 GenerateKeyedLoadReceiverCheck( |
| 764 masm, r1, r0, r3, Map::kHasIndexedInterceptor, &slow_call); | 766 masm, r1, r0, r3, Map::kHasIndexedInterceptor, &slow_call); |
| 765 | 767 |
| 766 GenerateFastArrayLoad( | 768 GenerateFastArrayLoad( |
| 767 masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load); | 769 masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load); |
| 768 __ IncrementCounter(&Counters::keyed_call_generic_smi_fast, 1, r0, r3); | 770 __ IncrementCounter(COUNTERS->keyed_call_generic_smi_fast(), 1, r0, r3); |
| 769 | 771 |
| 770 __ bind(&do_call); | 772 __ bind(&do_call); |
| 771 // receiver in r1 is not used after this point. | 773 // receiver in r1 is not used after this point. |
| 772 // r2: key | 774 // r2: key |
| 773 // r1: function | 775 // r1: function |
| 774 GenerateFunctionTailCall(masm, argc, &slow_call, r0); | 776 GenerateFunctionTailCall(masm, argc, &slow_call, r0); |
| 775 | 777 |
| 776 __ bind(&check_number_dictionary); | 778 __ bind(&check_number_dictionary); |
| 777 // r2: key | 779 // r2: key |
| 778 // r3: elements map | 780 // r3: elements map |
| 779 // r4: elements | 781 // r4: elements |
| 780 // Check whether the elements is a number dictionary. | 782 // Check whether the elements is a number dictionary. |
| 781 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); | 783 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); |
| 782 __ cmp(r3, ip); | 784 __ cmp(r3, ip); |
| 783 __ b(ne, &slow_load); | 785 __ b(ne, &slow_load); |
| 784 __ mov(r0, Operand(r2, ASR, kSmiTagSize)); | 786 __ mov(r0, Operand(r2, ASR, kSmiTagSize)); |
| 785 // r0: untagged index | 787 // r0: untagged index |
| 786 GenerateNumberDictionaryLoad(masm, &slow_load, r4, r2, r1, r0, r3, r5); | 788 GenerateNumberDictionaryLoad(masm, &slow_load, r4, r2, r1, r0, r3, r5); |
| 787 __ IncrementCounter(&Counters::keyed_call_generic_smi_dict, 1, r0, r3); | 789 __ IncrementCounter(COUNTERS->keyed_call_generic_smi_dict(), 1, r0, r3); |
| 788 __ jmp(&do_call); | 790 __ jmp(&do_call); |
| 789 | 791 |
| 790 __ bind(&slow_load); | 792 __ bind(&slow_load); |
| 791 // This branch is taken when calling KeyedCallIC_Miss is neither required | 793 // This branch is taken when calling KeyedCallIC_Miss is neither required |
| 792 // nor beneficial. | 794 // nor beneficial. |
| 793 __ IncrementCounter(&Counters::keyed_call_generic_slow_load, 1, r0, r3); | 795 __ IncrementCounter(COUNTERS->keyed_call_generic_slow_load(), 1, r0, r3); |
| 794 __ EnterInternalFrame(); | 796 __ EnterInternalFrame(); |
| 795 __ push(r2); // save the key | 797 __ push(r2); // save the key |
| 796 __ Push(r1, r2); // pass the receiver and the key | 798 __ Push(r1, r2); // pass the receiver and the key |
| 797 __ CallRuntime(Runtime::kKeyedGetProperty, 2); | 799 __ CallRuntime(Runtime::kKeyedGetProperty, 2); |
| 798 __ pop(r2); // restore the key | 800 __ pop(r2); // restore the key |
| 799 __ LeaveInternalFrame(); | 801 __ LeaveInternalFrame(); |
| 800 __ mov(r1, r0); | 802 __ mov(r1, r0); |
| 801 __ jmp(&do_call); | 803 __ jmp(&do_call); |
| 802 | 804 |
| 803 __ bind(&check_string); | 805 __ bind(&check_string); |
| 804 GenerateKeyStringCheck(masm, r2, r0, r3, &index_string, &slow_call); | 806 GenerateKeyStringCheck(masm, r2, r0, r3, &index_string, &slow_call); |
| 805 | 807 |
| 806 // The key is known to be a symbol. | 808 // The key is known to be a symbol. |
| 807 // If the receiver is a regular JS object with slow properties then do | 809 // If the receiver is a regular JS object with slow properties then do |
| 808 // a quick inline probe of the receiver's dictionary. | 810 // a quick inline probe of the receiver's dictionary. |
| 809 // Otherwise do the monomorphic cache probe. | 811 // Otherwise do the monomorphic cache probe. |
| 810 GenerateKeyedLoadReceiverCheck( | 812 GenerateKeyedLoadReceiverCheck( |
| 811 masm, r1, r0, r3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache); | 813 masm, r1, r0, r3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache); |
| 812 | 814 |
| 813 __ ldr(r0, FieldMemOperand(r1, JSObject::kPropertiesOffset)); | 815 __ ldr(r0, FieldMemOperand(r1, JSObject::kPropertiesOffset)); |
| 814 __ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset)); | 816 __ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 815 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); | 817 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); |
| 816 __ cmp(r3, ip); | 818 __ cmp(r3, ip); |
| 817 __ b(ne, &lookup_monomorphic_cache); | 819 __ b(ne, &lookup_monomorphic_cache); |
| 818 | 820 |
| 819 GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4); | 821 GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4); |
| 820 __ IncrementCounter(&Counters::keyed_call_generic_lookup_dict, 1, r0, r3); | 822 __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_dict(), 1, r0, r3); |
| 821 __ jmp(&do_call); | 823 __ jmp(&do_call); |
| 822 | 824 |
| 823 __ bind(&lookup_monomorphic_cache); | 825 __ bind(&lookup_monomorphic_cache); |
| 824 __ IncrementCounter(&Counters::keyed_call_generic_lookup_cache, 1, r0, r3); | 826 __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_cache(), 1, r0, r3); |
| 825 GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC); | 827 GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC); |
| 826 // Fall through on miss. | 828 // Fall through on miss. |
| 827 | 829 |
| 828 __ bind(&slow_call); | 830 __ bind(&slow_call); |
| 829 // This branch is taken if: | 831 // This branch is taken if: |
| 830 // - the receiver requires boxing or access check, | 832 // - the receiver requires boxing or access check, |
| 831 // - the key is neither smi nor symbol, | 833 // - the key is neither smi nor symbol, |
| 832 // - the value loaded is not a function, | 834 // - the value loaded is not a function, |
| 833 // - there is hope that the runtime will create a monomorphic call stub | 835 // - there is hope that the runtime will create a monomorphic call stub |
| 834 // that will get fetched next time. | 836 // that will get fetched next time. |
| 835 __ IncrementCounter(&Counters::keyed_call_generic_slow, 1, r0, r3); | 837 __ IncrementCounter(COUNTERS->keyed_call_generic_slow(), 1, r0, r3); |
| 836 GenerateMiss(masm, argc); | 838 GenerateMiss(masm, argc); |
| 837 | 839 |
| 838 __ bind(&index_string); | 840 __ bind(&index_string); |
| 839 __ IndexFromHash(r3, r2); | 841 __ IndexFromHash(r3, r2); |
| 840 // Now jump to the place where smi keys are handled. | 842 // Now jump to the place where smi keys are handled. |
| 841 __ jmp(&index_smi); | 843 __ jmp(&index_smi); |
| 842 } | 844 } |
| 843 | 845 |
| 844 | 846 |
| 845 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) { | 847 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) { |
| (...skipping 22 matching lines...) Expand all Loading... |
| 868 // -- r2 : name | 870 // -- r2 : name |
| 869 // -- lr : return address | 871 // -- lr : return address |
| 870 // -- r0 : receiver | 872 // -- r0 : receiver |
| 871 // -- sp[0] : receiver | 873 // -- sp[0] : receiver |
| 872 // ----------------------------------- | 874 // ----------------------------------- |
| 873 | 875 |
| 874 // Probe the stub cache. | 876 // Probe the stub cache. |
| 875 Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, | 877 Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, |
| 876 NOT_IN_LOOP, | 878 NOT_IN_LOOP, |
| 877 MONOMORPHIC); | 879 MONOMORPHIC); |
| 878 StubCache::GenerateProbe(masm, flags, r0, r2, r3, r4, r5); | 880 Isolate::Current()->stub_cache()->GenerateProbe( |
| 881 masm, flags, r0, r2, r3, r4, r5); |
| 879 | 882 |
| 880 // Cache miss: Jump to runtime. | 883 // Cache miss: Jump to runtime. |
| 881 GenerateMiss(masm); | 884 GenerateMiss(masm); |
| 882 } | 885 } |
| 883 | 886 |
| 884 | 887 |
| 885 void LoadIC::GenerateNormal(MacroAssembler* masm) { | 888 void LoadIC::GenerateNormal(MacroAssembler* masm) { |
| 886 // ----------- S t a t e ------------- | 889 // ----------- S t a t e ------------- |
| 887 // -- r2 : name | 890 // -- r2 : name |
| 888 // -- lr : return address | 891 // -- lr : return address |
| (...skipping 15 matching lines...) Expand all Loading... |
| 904 | 907 |
| 905 | 908 |
| 906 void LoadIC::GenerateMiss(MacroAssembler* masm) { | 909 void LoadIC::GenerateMiss(MacroAssembler* masm) { |
| 907 // ----------- S t a t e ------------- | 910 // ----------- S t a t e ------------- |
| 908 // -- r2 : name | 911 // -- r2 : name |
| 909 // -- lr : return address | 912 // -- lr : return address |
| 910 // -- r0 : receiver | 913 // -- r0 : receiver |
| 911 // -- sp[0] : receiver | 914 // -- sp[0] : receiver |
| 912 // ----------------------------------- | 915 // ----------------------------------- |
| 913 | 916 |
| 914 __ IncrementCounter(&Counters::load_miss, 1, r3, r4); | 917 __ IncrementCounter(COUNTERS->load_miss(), 1, r3, r4); |
| 915 | 918 |
| 916 __ mov(r3, r0); | 919 __ mov(r3, r0); |
| 917 __ Push(r3, r2); | 920 __ Push(r3, r2); |
| 918 | 921 |
| 919 // Perform tail call to the entry. | 922 // Perform tail call to the entry. |
| 920 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss)); | 923 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss)); |
| 921 __ TailCallExternalReference(ref, 2, 1); | 924 __ TailCallExternalReference(ref, 2, 1); |
| 922 } | 925 } |
| 923 | 926 |
| 924 // Returns the code marker, or the 0 if the code is not marked. | 927 // Returns the code marker, or the 0 if the code is not marked. |
| (...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1052 | 1055 |
| 1053 // Compute the address of the map load instruction. | 1056 // Compute the address of the map load instruction. |
| 1054 Address ldr_map_instr_address = | 1057 Address ldr_map_instr_address = |
| 1055 inline_end_address - | 1058 inline_end_address - |
| 1056 (CodeGenerator::GetInlinedNamedStoreInstructionsAfterPatch() * | 1059 (CodeGenerator::GetInlinedNamedStoreInstructionsAfterPatch() * |
| 1057 Assembler::kInstrSize); | 1060 Assembler::kInstrSize); |
| 1058 | 1061 |
| 1059 // Update the offsets if initializing the inlined store. No reason | 1062 // Update the offsets if initializing the inlined store. No reason |
| 1060 // to update the offsets when clearing the inlined version because | 1063 // to update the offsets when clearing the inlined version because |
| 1061 // it will bail out in the map check. | 1064 // it will bail out in the map check. |
| 1062 if (map != Heap::null_value()) { | 1065 if (map != HEAP->null_value()) { |
| 1063 // Patch the offset in the actual store instruction. | 1066 // Patch the offset in the actual store instruction. |
| 1064 Address str_property_instr_address = | 1067 Address str_property_instr_address = |
| 1065 ldr_map_instr_address + 3 * Assembler::kInstrSize; | 1068 ldr_map_instr_address + 3 * Assembler::kInstrSize; |
| 1066 Instr str_property_instr = Assembler::instr_at(str_property_instr_address); | 1069 Instr str_property_instr = Assembler::instr_at(str_property_instr_address); |
| 1067 ASSERT(Assembler::IsStrRegisterImmediate(str_property_instr)); | 1070 ASSERT(Assembler::IsStrRegisterImmediate(str_property_instr)); |
| 1068 str_property_instr = Assembler::SetStrRegisterImmediateOffset( | 1071 str_property_instr = Assembler::SetStrRegisterImmediateOffset( |
| 1069 str_property_instr, offset - kHeapObjectTag); | 1072 str_property_instr, offset - kHeapObjectTag); |
| 1070 Assembler::instr_at_put(str_property_instr_address, str_property_instr); | 1073 Assembler::instr_at_put(str_property_instr_address, str_property_instr); |
| 1071 | 1074 |
| 1072 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER | 1075 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1138 Object* KeyedLoadIC_Miss(Arguments args); | 1141 Object* KeyedLoadIC_Miss(Arguments args); |
| 1139 | 1142 |
| 1140 | 1143 |
| 1141 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 1144 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { |
| 1142 // ---------- S t a t e -------------- | 1145 // ---------- S t a t e -------------- |
| 1143 // -- lr : return address | 1146 // -- lr : return address |
| 1144 // -- r0 : key | 1147 // -- r0 : key |
| 1145 // -- r1 : receiver | 1148 // -- r1 : receiver |
| 1146 // ----------------------------------- | 1149 // ----------------------------------- |
| 1147 | 1150 |
| 1148 __ IncrementCounter(&Counters::keyed_load_miss, 1, r3, r4); | 1151 __ IncrementCounter(COUNTERS->keyed_load_miss(), 1, r3, r4); |
| 1149 | 1152 |
| 1150 __ Push(r1, r0); | 1153 __ Push(r1, r0); |
| 1151 | 1154 |
| 1152 ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss)); | 1155 ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss)); |
| 1153 __ TailCallExternalReference(ref, 2, 1); | 1156 __ TailCallExternalReference(ref, 2, 1); |
| 1154 } | 1157 } |
| 1155 | 1158 |
| 1156 | 1159 |
| 1157 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 1160 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
| 1158 // ---------- S t a t e -------------- | 1161 // ---------- S t a t e -------------- |
| (...skipping 30 matching lines...) Expand all Loading... |
| 1189 masm, receiver, r2, r3, Map::kHasIndexedInterceptor, &slow); | 1192 masm, receiver, r2, r3, Map::kHasIndexedInterceptor, &slow); |
| 1190 | 1193 |
| 1191 // Check the "has fast elements" bit in the receiver's map which is | 1194 // Check the "has fast elements" bit in the receiver's map which is |
| 1192 // now in r2. | 1195 // now in r2. |
| 1193 __ ldrb(r3, FieldMemOperand(r2, Map::kBitField2Offset)); | 1196 __ ldrb(r3, FieldMemOperand(r2, Map::kBitField2Offset)); |
| 1194 __ tst(r3, Operand(1 << Map::kHasFastElements)); | 1197 __ tst(r3, Operand(1 << Map::kHasFastElements)); |
| 1195 __ b(eq, &check_number_dictionary); | 1198 __ b(eq, &check_number_dictionary); |
| 1196 | 1199 |
| 1197 GenerateFastArrayLoad( | 1200 GenerateFastArrayLoad( |
| 1198 masm, receiver, key, r4, r3, r2, r0, NULL, &slow); | 1201 masm, receiver, key, r4, r3, r2, r0, NULL, &slow); |
| 1199 __ IncrementCounter(&Counters::keyed_load_generic_smi, 1, r2, r3); | 1202 __ IncrementCounter(COUNTERS->keyed_load_generic_smi(), 1, r2, r3); |
| 1200 __ Ret(); | 1203 __ Ret(); |
| 1201 | 1204 |
| 1202 __ bind(&check_number_dictionary); | 1205 __ bind(&check_number_dictionary); |
| 1203 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 1206 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
| 1204 __ ldr(r3, FieldMemOperand(r4, JSObject::kMapOffset)); | 1207 __ ldr(r3, FieldMemOperand(r4, JSObject::kMapOffset)); |
| 1205 | 1208 |
| 1206 // Check whether the elements is a number dictionary. | 1209 // Check whether the elements is a number dictionary. |
| 1207 // r0: key | 1210 // r0: key |
| 1208 // r3: elements map | 1211 // r3: elements map |
| 1209 // r4: elements | 1212 // r4: elements |
| 1210 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); | 1213 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); |
| 1211 __ cmp(r3, ip); | 1214 __ cmp(r3, ip); |
| 1212 __ b(ne, &slow); | 1215 __ b(ne, &slow); |
| 1213 __ mov(r2, Operand(r0, ASR, kSmiTagSize)); | 1216 __ mov(r2, Operand(r0, ASR, kSmiTagSize)); |
| 1214 GenerateNumberDictionaryLoad(masm, &slow, r4, r0, r0, r2, r3, r5); | 1217 GenerateNumberDictionaryLoad(masm, &slow, r4, r0, r0, r2, r3, r5); |
| 1215 __ Ret(); | 1218 __ Ret(); |
| 1216 | 1219 |
| 1217 // Slow case, key and receiver still in r0 and r1. | 1220 // Slow case, key and receiver still in r0 and r1. |
| 1218 __ bind(&slow); | 1221 __ bind(&slow); |
| 1219 __ IncrementCounter(&Counters::keyed_load_generic_slow, 1, r2, r3); | 1222 __ IncrementCounter(COUNTERS->keyed_load_generic_slow(), 1, r2, r3); |
| 1220 GenerateRuntimeGetProperty(masm); | 1223 GenerateRuntimeGetProperty(masm); |
| 1221 | 1224 |
| 1222 __ bind(&check_string); | 1225 __ bind(&check_string); |
| 1223 GenerateKeyStringCheck(masm, key, r2, r3, &index_string, &slow); | 1226 GenerateKeyStringCheck(masm, key, r2, r3, &index_string, &slow); |
| 1224 | 1227 |
| 1225 GenerateKeyedLoadReceiverCheck( | 1228 GenerateKeyedLoadReceiverCheck( |
| 1226 masm, receiver, r2, r3, Map::kHasNamedInterceptor, &slow); | 1229 masm, receiver, r2, r3, Map::kHasNamedInterceptor, &slow); |
| 1227 | 1230 |
| 1228 // If the receiver is a fast-case object, check the keyed lookup | 1231 // If the receiver is a fast-case object, check the keyed lookup |
| 1229 // cache. Otherwise probe the dictionary. | 1232 // cache. Otherwise probe the dictionary. |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1264 __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2)); | 1267 __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2)); |
| 1265 __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset)); | 1268 __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset)); |
| 1266 __ sub(r5, r5, r6, SetCC); | 1269 __ sub(r5, r5, r6, SetCC); |
| 1267 __ b(ge, &property_array_property); | 1270 __ b(ge, &property_array_property); |
| 1268 | 1271 |
| 1269 // Load in-object property. | 1272 // Load in-object property. |
| 1270 __ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset)); | 1273 __ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset)); |
| 1271 __ add(r6, r6, r5); // Index from start of object. | 1274 __ add(r6, r6, r5); // Index from start of object. |
| 1272 __ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag. | 1275 __ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag. |
| 1273 __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2)); | 1276 __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2)); |
| 1274 __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1, r2, r3); | 1277 __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1, r2, r3); |
| 1275 __ Ret(); | 1278 __ Ret(); |
| 1276 | 1279 |
| 1277 // Load property array property. | 1280 // Load property array property. |
| 1278 __ bind(&property_array_property); | 1281 __ bind(&property_array_property); |
| 1279 __ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset)); | 1282 __ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset)); |
| 1280 __ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 1283 __ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 1281 __ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2)); | 1284 __ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2)); |
| 1282 __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1, r2, r3); | 1285 __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1, r2, r3); |
| 1283 __ Ret(); | 1286 __ Ret(); |
| 1284 | 1287 |
| 1285 // Do a quick inline probe of the receiver's dictionary, if it | 1288 // Do a quick inline probe of the receiver's dictionary, if it |
| 1286 // exists. | 1289 // exists. |
| 1287 __ bind(&probe_dictionary); | 1290 __ bind(&probe_dictionary); |
| 1288 // r1: receiver | 1291 // r1: receiver |
| 1289 // r0: key | 1292 // r0: key |
| 1290 // r3: elements | 1293 // r3: elements |
| 1291 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); | 1294 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 1292 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); | 1295 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); |
| 1293 GenerateGlobalInstanceTypeCheck(masm, r2, &slow); | 1296 GenerateGlobalInstanceTypeCheck(masm, r2, &slow); |
| 1294 // Load the property to r0. | 1297 // Load the property to r0. |
| 1295 GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4); | 1298 GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4); |
| 1296 __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1, r2, r3); | 1299 __ IncrementCounter(COUNTERS->keyed_load_generic_symbol(), 1, r2, r3); |
| 1297 __ Ret(); | 1300 __ Ret(); |
| 1298 | 1301 |
| 1299 __ bind(&index_string); | 1302 __ bind(&index_string); |
| 1300 __ IndexFromHash(r3, key); | 1303 __ IndexFromHash(r3, key); |
| 1301 // Now jump to the place where smi keys are handled. | 1304 // Now jump to the place where smi keys are handled. |
| 1302 __ jmp(&index_smi); | 1305 __ jmp(&index_smi); |
| 1303 } | 1306 } |
| 1304 | 1307 |
| 1305 | 1308 |
| 1306 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { | 1309 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { |
| (...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1527 // -- r1 : receiver | 1530 // -- r1 : receiver |
| 1528 // -- r2 : name | 1531 // -- r2 : name |
| 1529 // -- lr : return address | 1532 // -- lr : return address |
| 1530 // ----------------------------------- | 1533 // ----------------------------------- |
| 1531 | 1534 |
| 1532 // Get the receiver from the stack and probe the stub cache. | 1535 // Get the receiver from the stack and probe the stub cache. |
| 1533 Code::Flags flags = Code::ComputeFlags(Code::STORE_IC, | 1536 Code::Flags flags = Code::ComputeFlags(Code::STORE_IC, |
| 1534 NOT_IN_LOOP, | 1537 NOT_IN_LOOP, |
| 1535 MONOMORPHIC, | 1538 MONOMORPHIC, |
| 1536 strict_mode); | 1539 strict_mode); |
| 1537 StubCache::GenerateProbe(masm, flags, r1, r2, r3, r4, r5); | 1540 |
| 1541 Isolate::Current()->stub_cache()->GenerateProbe( |
| 1542 masm, flags, r1, r2, r3, r4, r5); |
| 1538 | 1543 |
| 1539 // Cache miss: Jump to runtime. | 1544 // Cache miss: Jump to runtime. |
| 1540 GenerateMiss(masm); | 1545 GenerateMiss(masm); |
| 1541 } | 1546 } |
| 1542 | 1547 |
| 1543 | 1548 |
| 1544 void StoreIC::GenerateMiss(MacroAssembler* masm) { | 1549 void StoreIC::GenerateMiss(MacroAssembler* masm) { |
| 1545 // ----------- S t a t e ------------- | 1550 // ----------- S t a t e ------------- |
| 1546 // -- r0 : value | 1551 // -- r0 : value |
| 1547 // -- r1 : receiver | 1552 // -- r1 : receiver |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1611 // -- r0 : value | 1616 // -- r0 : value |
| 1612 // -- r1 : receiver | 1617 // -- r1 : receiver |
| 1613 // -- r2 : name | 1618 // -- r2 : name |
| 1614 // -- lr : return address | 1619 // -- lr : return address |
| 1615 // ----------------------------------- | 1620 // ----------------------------------- |
| 1616 Label miss; | 1621 Label miss; |
| 1617 | 1622 |
| 1618 GenerateStringDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss); | 1623 GenerateStringDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss); |
| 1619 | 1624 |
| 1620 GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5); | 1625 GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5); |
| 1621 __ IncrementCounter(&Counters::store_normal_hit, 1, r4, r5); | 1626 __ IncrementCounter(COUNTERS->store_normal_hit(), 1, r4, r5); |
| 1622 __ Ret(); | 1627 __ Ret(); |
| 1623 | 1628 |
| 1624 __ bind(&miss); | 1629 __ bind(&miss); |
| 1625 __ IncrementCounter(&Counters::store_normal_miss, 1, r4, r5); | 1630 __ IncrementCounter(COUNTERS->store_normal_miss(), 1, r4, r5); |
| 1626 GenerateMiss(masm); | 1631 GenerateMiss(masm); |
| 1627 } | 1632 } |
| 1628 | 1633 |
| 1629 | 1634 |
| 1630 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm, | 1635 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm, |
| 1631 StrictModeFlag strict_mode) { | 1636 StrictModeFlag strict_mode) { |
| 1632 // ----------- S t a t e ------------- | 1637 // ----------- S t a t e ------------- |
| 1633 // -- r0 : value | 1638 // -- r0 : value |
| 1634 // -- r1 : receiver | 1639 // -- r1 : receiver |
| 1635 // -- r2 : name | 1640 // -- r2 : name |
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1765 Register reg = Assembler::GetRn(instr_at_patch); | 1770 Register reg = Assembler::GetRn(instr_at_patch); |
| 1766 patcher.masm()->tst(reg, Operand(kSmiTagMask)); | 1771 patcher.masm()->tst(reg, Operand(kSmiTagMask)); |
| 1767 patcher.EmitCondition(eq); | 1772 patcher.EmitCondition(eq); |
| 1768 } | 1773 } |
| 1769 } | 1774 } |
| 1770 | 1775 |
| 1771 | 1776 |
| 1772 } } // namespace v8::internal | 1777 } } // namespace v8::internal |
| 1773 | 1778 |
| 1774 #endif // V8_TARGET_ARCH_ARM | 1779 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |