| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. | 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its | 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived | 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. | 14 // from this software without specific prior written permission. |
| 15 // | 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include "v8.h" |
| 29 |
| 28 #include "arm/lithium-codegen-arm.h" | 30 #include "arm/lithium-codegen-arm.h" |
| 29 #include "arm/lithium-gap-resolver-arm.h" | 31 #include "arm/lithium-gap-resolver-arm.h" |
| 30 #include "code-stubs.h" | 32 #include "code-stubs.h" |
| 31 #include "stub-cache.h" | 33 #include "stub-cache.h" |
| 32 | 34 |
| 33 namespace v8 { | 35 namespace v8 { |
| 34 namespace internal { | 36 namespace internal { |
| 35 | 37 |
| 36 | 38 |
| 37 class SafepointGenerator : public CallWrapper { | 39 class SafepointGenerator : public CallWrapper { |
| (...skipping 444 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 482 RelocInfo::Mode mode, | 484 RelocInfo::Mode mode, |
| 483 LInstruction* instr) { | 485 LInstruction* instr) { |
| 484 ASSERT(instr != NULL); | 486 ASSERT(instr != NULL); |
| 485 LPointerMap* pointers = instr->pointer_map(); | 487 LPointerMap* pointers = instr->pointer_map(); |
| 486 RecordPosition(pointers->position()); | 488 RecordPosition(pointers->position()); |
| 487 __ Call(code, mode); | 489 __ Call(code, mode); |
| 488 RegisterLazyDeoptimization(instr); | 490 RegisterLazyDeoptimization(instr); |
| 489 } | 491 } |
| 490 | 492 |
| 491 | 493 |
| 492 void LCodeGen::CallRuntime(Runtime::Function* function, | 494 void LCodeGen::CallRuntime(const Runtime::Function* function, |
| 493 int num_arguments, | 495 int num_arguments, |
| 494 LInstruction* instr) { | 496 LInstruction* instr) { |
| 495 ASSERT(instr != NULL); | 497 ASSERT(instr != NULL); |
| 496 LPointerMap* pointers = instr->pointer_map(); | 498 LPointerMap* pointers = instr->pointer_map(); |
| 497 ASSERT(pointers != NULL); | 499 ASSERT(pointers != NULL); |
| 498 RecordPosition(pointers->position()); | 500 RecordPosition(pointers->position()); |
| 499 | 501 |
| 500 __ CallRuntime(function, num_arguments); | 502 __ CallRuntime(function, num_arguments); |
| 501 RegisterLazyDeoptimization(instr); | 503 RegisterLazyDeoptimization(instr); |
| 502 } | 504 } |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 581 } | 583 } |
| 582 } | 584 } |
| 583 } | 585 } |
| 584 | 586 |
| 585 | 587 |
| 586 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 588 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { |
| 587 int length = deoptimizations_.length(); | 589 int length = deoptimizations_.length(); |
| 588 if (length == 0) return; | 590 if (length == 0) return; |
| 589 ASSERT(FLAG_deopt); | 591 ASSERT(FLAG_deopt); |
| 590 Handle<DeoptimizationInputData> data = | 592 Handle<DeoptimizationInputData> data = |
| 591 Factory::NewDeoptimizationInputData(length, TENURED); | 593 factory()->NewDeoptimizationInputData(length, TENURED); |
| 592 | 594 |
| 593 Handle<ByteArray> translations = translations_.CreateByteArray(); | 595 Handle<ByteArray> translations = translations_.CreateByteArray(); |
| 594 data->SetTranslationByteArray(*translations); | 596 data->SetTranslationByteArray(*translations); |
| 595 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); | 597 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_)); |
| 596 | 598 |
| 597 Handle<FixedArray> literals = | 599 Handle<FixedArray> literals = |
| 598 Factory::NewFixedArray(deoptimization_literals_.length(), TENURED); | 600 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED); |
| 599 for (int i = 0; i < deoptimization_literals_.length(); i++) { | 601 for (int i = 0; i < deoptimization_literals_.length(); i++) { |
| 600 literals->set(i, *deoptimization_literals_[i]); | 602 literals->set(i, *deoptimization_literals_[i]); |
| 601 } | 603 } |
| 602 data->SetLiteralArray(*literals); | 604 data->SetLiteralArray(*literals); |
| 603 | 605 |
| 604 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); | 606 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id())); |
| 605 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); | 607 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_)); |
| 606 | 608 |
| 607 // Populate the deoptimization entries. | 609 // Populate the deoptimization entries. |
| 608 for (int i = 0; i < length; i++) { | 610 for (int i = 0; i < length; i++) { |
| (...skipping 1284 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1893 | 1895 |
| 1894 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 1896 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
| 1895 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0. | 1897 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0. |
| 1896 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1. | 1898 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1. |
| 1897 | 1899 |
| 1898 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 1900 InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
| 1899 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1901 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 1900 | 1902 |
| 1901 Label true_value, done; | 1903 Label true_value, done; |
| 1902 __ tst(r0, r0); | 1904 __ tst(r0, r0); |
| 1903 __ mov(r0, Operand(Factory::false_value()), LeaveCC, ne); | 1905 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne); |
| 1904 __ mov(r0, Operand(Factory::true_value()), LeaveCC, eq); | 1906 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq); |
| 1905 } | 1907 } |
| 1906 | 1908 |
| 1907 | 1909 |
| 1908 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { | 1910 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { |
| 1909 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0. | 1911 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0. |
| 1910 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1. | 1912 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1. |
| 1911 | 1913 |
| 1912 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1914 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1913 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1915 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1914 | 1916 |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1953 // This is the inlined call site instanceof cache. The two occurences of the | 1955 // This is the inlined call site instanceof cache. The two occurences of the |
| 1954 // hole value will be patched to the last map/result pair generated by the | 1956 // hole value will be patched to the last map/result pair generated by the |
| 1955 // instanceof stub. | 1957 // instanceof stub. |
| 1956 Label cache_miss; | 1958 Label cache_miss; |
| 1957 Register map = temp; | 1959 Register map = temp; |
| 1958 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); | 1960 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 1959 __ bind(deferred->map_check()); // Label for calculating code patching. | 1961 __ bind(deferred->map_check()); // Label for calculating code patching. |
| 1960 // We use Factory::the_hole_value() on purpose instead of loading from the | 1962 // We use Factory::the_hole_value() on purpose instead of loading from the |
| 1961 // root array to force relocation to be able to later patch with | 1963 // root array to force relocation to be able to later patch with |
| 1962 // the cached map. | 1964 // the cached map. |
| 1963 __ mov(ip, Operand(Factory::the_hole_value())); | 1965 __ mov(ip, Operand(factory()->the_hole_value())); |
| 1964 __ cmp(map, Operand(ip)); | 1966 __ cmp(map, Operand(ip)); |
| 1965 __ b(ne, &cache_miss); | 1967 __ b(ne, &cache_miss); |
| 1966 // We use Factory::the_hole_value() on purpose instead of loading from the | 1968 // We use Factory::the_hole_value() on purpose instead of loading from the |
| 1967 // root array to force relocation to be able to later patch | 1969 // root array to force relocation to be able to later patch |
| 1968 // with true or false. | 1970 // with true or false. |
| 1969 __ mov(result, Operand(Factory::the_hole_value())); | 1971 __ mov(result, Operand(factory()->the_hole_value())); |
| 1970 __ b(&done); | 1972 __ b(&done); |
| 1971 | 1973 |
| 1972 // The inlined call site cache did not match. Check null and string before | 1974 // The inlined call site cache did not match. Check null and string before |
| 1973 // calling the deferred code. | 1975 // calling the deferred code. |
| 1974 __ bind(&cache_miss); | 1976 __ bind(&cache_miss); |
| 1975 // Null is not instance of anything. | 1977 // Null is not instance of anything. |
| 1976 __ LoadRoot(ip, Heap::kNullValueRootIndex); | 1978 __ LoadRoot(ip, Heap::kNullValueRootIndex); |
| 1977 __ cmp(object, Operand(ip)); | 1979 __ cmp(object, Operand(ip)); |
| 1978 __ b(eq, &false_result); | 1980 __ b(eq, &false_result); |
| 1979 | 1981 |
| (...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2172 } | 2174 } |
| 2173 } | 2175 } |
| 2174 | 2176 |
| 2175 | 2177 |
| 2176 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 2178 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
| 2177 ASSERT(ToRegister(instr->object()).is(r0)); | 2179 ASSERT(ToRegister(instr->object()).is(r0)); |
| 2178 ASSERT(ToRegister(instr->result()).is(r0)); | 2180 ASSERT(ToRegister(instr->result()).is(r0)); |
| 2179 | 2181 |
| 2180 // Name is always in r2. | 2182 // Name is always in r2. |
| 2181 __ mov(r2, Operand(instr->name())); | 2183 __ mov(r2, Operand(instr->name())); |
| 2182 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | 2184 Handle<Code> ic( |
| 2185 isolate()->builtins()->builtin(Builtins::LoadIC_Initialize)); |
| 2183 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2186 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2184 } | 2187 } |
| 2185 | 2188 |
| 2186 | 2189 |
| 2187 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { | 2190 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { |
| 2188 Register scratch = scratch0(); | 2191 Register scratch = scratch0(); |
| 2189 Register function = ToRegister(instr->function()); | 2192 Register function = ToRegister(instr->function()); |
| 2190 Register result = ToRegister(instr->result()); | 2193 Register result = ToRegister(instr->result()); |
| 2191 | 2194 |
| 2192 // Check that the function really is a function. Load map into the | 2195 // Check that the function really is a function. Load map into the |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2303 | 2306 |
| 2304 // Load the result. | 2307 // Load the result. |
| 2305 __ ldrb(result, MemOperand(external_pointer, key)); | 2308 __ ldrb(result, MemOperand(external_pointer, key)); |
| 2306 } | 2309 } |
| 2307 | 2310 |
| 2308 | 2311 |
| 2309 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { | 2312 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
| 2310 ASSERT(ToRegister(instr->object()).is(r1)); | 2313 ASSERT(ToRegister(instr->object()).is(r1)); |
| 2311 ASSERT(ToRegister(instr->key()).is(r0)); | 2314 ASSERT(ToRegister(instr->key()).is(r0)); |
| 2312 | 2315 |
| 2313 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | 2316 Handle<Code> ic(isolate()->builtins()->builtin( |
| 2317 Builtins::KeyedLoadIC_Initialize)); |
| 2314 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2318 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2315 } | 2319 } |
| 2316 | 2320 |
| 2317 | 2321 |
| 2318 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 2322 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
| 2319 Register scratch = scratch0(); | 2323 Register scratch = scratch0(); |
| 2320 Register result = ToRegister(instr->result()); | 2324 Register result = ToRegister(instr->result()); |
| 2321 | 2325 |
| 2322 // Check if the calling frame is an arguments adaptor frame. | 2326 // Check if the calling frame is an arguments adaptor frame. |
| 2323 Label done, adapted; | 2327 Label done, adapted; |
| (...skipping 484 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2808 Abort("Unimplemented type of LUnaryMathOperation."); | 2812 Abort("Unimplemented type of LUnaryMathOperation."); |
| 2809 UNREACHABLE(); | 2813 UNREACHABLE(); |
| 2810 } | 2814 } |
| 2811 } | 2815 } |
| 2812 | 2816 |
| 2813 | 2817 |
| 2814 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { | 2818 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { |
| 2815 ASSERT(ToRegister(instr->result()).is(r0)); | 2819 ASSERT(ToRegister(instr->result()).is(r0)); |
| 2816 | 2820 |
| 2817 int arity = instr->arity(); | 2821 int arity = instr->arity(); |
| 2818 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); | 2822 Handle<Code> ic = |
| 2823 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); |
| 2819 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2824 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2820 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2825 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2821 } | 2826 } |
| 2822 | 2827 |
| 2823 | 2828 |
| 2824 void LCodeGen::DoCallNamed(LCallNamed* instr) { | 2829 void LCodeGen::DoCallNamed(LCallNamed* instr) { |
| 2825 ASSERT(ToRegister(instr->result()).is(r0)); | 2830 ASSERT(ToRegister(instr->result()).is(r0)); |
| 2826 | 2831 |
| 2827 int arity = instr->arity(); | 2832 int arity = instr->arity(); |
| 2828 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); | 2833 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize( |
| 2834 arity, NOT_IN_LOOP); |
| 2829 __ mov(r2, Operand(instr->name())); | 2835 __ mov(r2, Operand(instr->name())); |
| 2830 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2836 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2831 // Restore context register. | 2837 // Restore context register. |
| 2832 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2838 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2833 } | 2839 } |
| 2834 | 2840 |
| 2835 | 2841 |
| 2836 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 2842 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
| 2837 ASSERT(ToRegister(instr->result()).is(r0)); | 2843 ASSERT(ToRegister(instr->result()).is(r0)); |
| 2838 | 2844 |
| 2839 int arity = instr->arity(); | 2845 int arity = instr->arity(); |
| 2840 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); | 2846 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); |
| 2841 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2847 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 2842 __ Drop(1); | 2848 __ Drop(1); |
| 2843 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2849 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2844 } | 2850 } |
| 2845 | 2851 |
| 2846 | 2852 |
| 2847 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { | 2853 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { |
| 2848 ASSERT(ToRegister(instr->result()).is(r0)); | 2854 ASSERT(ToRegister(instr->result()).is(r0)); |
| 2849 | 2855 |
| 2850 int arity = instr->arity(); | 2856 int arity = instr->arity(); |
| 2851 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); | 2857 Handle<Code> ic = |
| 2858 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP); |
| 2852 __ mov(r2, Operand(instr->name())); | 2859 __ mov(r2, Operand(instr->name())); |
| 2853 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); | 2860 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); |
| 2854 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2861 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2855 } | 2862 } |
| 2856 | 2863 |
| 2857 | 2864 |
| 2858 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { | 2865 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { |
| 2859 ASSERT(ToRegister(instr->result()).is(r0)); | 2866 ASSERT(ToRegister(instr->result()).is(r0)); |
| 2860 __ mov(r1, Operand(instr->target())); | 2867 __ mov(r1, Operand(instr->target())); |
| 2861 CallKnownFunction(instr->target(), instr->arity(), instr); | 2868 CallKnownFunction(instr->target(), instr->arity(), instr); |
| 2862 } | 2869 } |
| 2863 | 2870 |
| 2864 | 2871 |
| 2865 void LCodeGen::DoCallNew(LCallNew* instr) { | 2872 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 2866 ASSERT(ToRegister(instr->InputAt(0)).is(r1)); | 2873 ASSERT(ToRegister(instr->InputAt(0)).is(r1)); |
| 2867 ASSERT(ToRegister(instr->result()).is(r0)); | 2874 ASSERT(ToRegister(instr->result()).is(r0)); |
| 2868 | 2875 |
| 2869 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall)); | 2876 Handle<Code> builtin(isolate()->builtins()->builtin( |
| 2877 Builtins::JSConstructCall)); |
| 2870 __ mov(r0, Operand(instr->arity())); | 2878 __ mov(r0, Operand(instr->arity())); |
| 2871 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); | 2879 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); |
| 2872 } | 2880 } |
| 2873 | 2881 |
| 2874 | 2882 |
| 2875 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 2883 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
| 2876 CallRuntime(instr->function(), instr->arity(), instr); | 2884 CallRuntime(instr->function(), instr->arity(), instr); |
| 2877 } | 2885 } |
| 2878 | 2886 |
| 2879 | 2887 |
| (...skipping 28 matching lines...) Expand all Loading... |
| 2908 } | 2916 } |
| 2909 } | 2917 } |
| 2910 | 2918 |
| 2911 | 2919 |
| 2912 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { | 2920 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { |
| 2913 ASSERT(ToRegister(instr->object()).is(r1)); | 2921 ASSERT(ToRegister(instr->object()).is(r1)); |
| 2914 ASSERT(ToRegister(instr->value()).is(r0)); | 2922 ASSERT(ToRegister(instr->value()).is(r0)); |
| 2915 | 2923 |
| 2916 // Name is always in r2. | 2924 // Name is always in r2. |
| 2917 __ mov(r2, Operand(instr->name())); | 2925 __ mov(r2, Operand(instr->name())); |
| 2918 Handle<Code> ic(Builtins::builtin( | 2926 Handle<Code> ic(isolate()->builtins()->builtin( |
| 2919 info_->is_strict() ? Builtins::StoreIC_Initialize_Strict | 2927 info_->is_strict() ? Builtins::StoreIC_Initialize_Strict |
| 2920 : Builtins::StoreIC_Initialize)); | 2928 : Builtins::StoreIC_Initialize)); |
| 2921 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2929 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2922 } | 2930 } |
| 2923 | 2931 |
| 2924 | 2932 |
| 2925 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { | 2933 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { |
| 2926 __ cmp(ToRegister(instr->index()), ToRegister(instr->length())); | 2934 __ cmp(ToRegister(instr->index()), ToRegister(instr->length())); |
| 2927 DeoptimizeIf(hs, instr->environment()); | 2935 DeoptimizeIf(hs, instr->environment()); |
| 2928 } | 2936 } |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2963 __ Usat(value, 8, Operand(value)); | 2971 __ Usat(value, 8, Operand(value)); |
| 2964 __ strb(value, MemOperand(external_pointer, key, LSL, 0)); | 2972 __ strb(value, MemOperand(external_pointer, key, LSL, 0)); |
| 2965 } | 2973 } |
| 2966 | 2974 |
| 2967 | 2975 |
| 2968 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 2976 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
| 2969 ASSERT(ToRegister(instr->object()).is(r2)); | 2977 ASSERT(ToRegister(instr->object()).is(r2)); |
| 2970 ASSERT(ToRegister(instr->key()).is(r1)); | 2978 ASSERT(ToRegister(instr->key()).is(r1)); |
| 2971 ASSERT(ToRegister(instr->value()).is(r0)); | 2979 ASSERT(ToRegister(instr->value()).is(r0)); |
| 2972 | 2980 |
| 2973 Handle<Code> ic(Builtins::builtin( | 2981 Handle<Code> ic(isolate()->builtins()->builtin( |
| 2974 info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict | 2982 info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict |
| 2975 : Builtins::KeyedStoreIC_Initialize)); | 2983 : Builtins::KeyedStoreIC_Initialize)); |
| 2976 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2984 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2977 } | 2985 } |
| 2978 | 2986 |
| 2979 | 2987 |
| 2980 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { | 2988 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
| 2981 class DeferredStringCharCodeAt: public LDeferredCode { | 2989 class DeferredStringCharCodeAt: public LDeferredCode { |
| 2982 public: | 2990 public: |
| 2983 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) | 2991 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) |
| (...skipping 591 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3575 ASSERT(input->IsRegister()); | 3583 ASSERT(input->IsRegister()); |
| 3576 Register reg = ToRegister(input); | 3584 Register reg = ToRegister(input); |
| 3577 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset)); | 3585 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 3578 __ cmp(scratch, Operand(instr->hydrogen()->map())); | 3586 __ cmp(scratch, Operand(instr->hydrogen()->map())); |
| 3579 DeoptimizeIf(ne, instr->environment()); | 3587 DeoptimizeIf(ne, instr->environment()); |
| 3580 } | 3588 } |
| 3581 | 3589 |
| 3582 | 3590 |
| 3583 void LCodeGen::LoadHeapObject(Register result, | 3591 void LCodeGen::LoadHeapObject(Register result, |
| 3584 Handle<HeapObject> object) { | 3592 Handle<HeapObject> object) { |
| 3585 if (Heap::InNewSpace(*object)) { | 3593 if (heap()->InNewSpace(*object)) { |
| 3586 Handle<JSGlobalPropertyCell> cell = | 3594 Handle<JSGlobalPropertyCell> cell = |
| 3587 Factory::NewJSGlobalPropertyCell(object); | 3595 factory()->NewJSGlobalPropertyCell(object); |
| 3588 __ mov(result, Operand(cell)); | 3596 __ mov(result, Operand(cell)); |
| 3589 __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset)); | 3597 __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset)); |
| 3590 } else { | 3598 } else { |
| 3591 __ mov(result, Operand(object)); | 3599 __ mov(result, Operand(object)); |
| 3592 } | 3600 } |
| 3593 } | 3601 } |
| 3594 | 3602 |
| 3595 | 3603 |
| 3596 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { | 3604 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { |
| 3597 Register temp1 = ToRegister(instr->TempAt(0)); | 3605 Register temp1 = ToRegister(instr->TempAt(0)); |
| (...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3728 bool pretenure = instr->hydrogen()->pretenure(); | 3736 bool pretenure = instr->hydrogen()->pretenure(); |
| 3729 if (!pretenure && shared_info->num_literals() == 0) { | 3737 if (!pretenure && shared_info->num_literals() == 0) { |
| 3730 FastNewClosureStub stub( | 3738 FastNewClosureStub stub( |
| 3731 shared_info->strict_mode() ? kStrictMode : kNonStrictMode); | 3739 shared_info->strict_mode() ? kStrictMode : kNonStrictMode); |
| 3732 __ mov(r1, Operand(shared_info)); | 3740 __ mov(r1, Operand(shared_info)); |
| 3733 __ push(r1); | 3741 __ push(r1); |
| 3734 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3742 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 3735 } else { | 3743 } else { |
| 3736 __ mov(r2, Operand(shared_info)); | 3744 __ mov(r2, Operand(shared_info)); |
| 3737 __ mov(r1, Operand(pretenure | 3745 __ mov(r1, Operand(pretenure |
| 3738 ? Factory::true_value() | 3746 ? factory()->true_value() |
| 3739 : Factory::false_value())); | 3747 : factory()->false_value())); |
| 3740 __ Push(cp, r2, r1); | 3748 __ Push(cp, r2, r1); |
| 3741 CallRuntime(Runtime::kNewClosure, 3, instr); | 3749 CallRuntime(Runtime::kNewClosure, 3, instr); |
| 3742 } | 3750 } |
| 3743 } | 3751 } |
| 3744 | 3752 |
| 3745 | 3753 |
| 3746 void LCodeGen::DoTypeof(LTypeof* instr) { | 3754 void LCodeGen::DoTypeof(LTypeof* instr) { |
| 3747 Register input = ToRegister(instr->InputAt(0)); | 3755 Register input = ToRegister(instr->InputAt(0)); |
| 3748 __ push(input); | 3756 __ push(input); |
| 3749 CallRuntime(Runtime::kTypeof, 1, instr); | 3757 CallRuntime(Runtime::kTypeof, 1, instr); |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3788 EmitBranch(true_block, false_block, final_branch_condition); | 3796 EmitBranch(true_block, false_block, final_branch_condition); |
| 3789 } | 3797 } |
| 3790 | 3798 |
| 3791 | 3799 |
| 3792 Condition LCodeGen::EmitTypeofIs(Label* true_label, | 3800 Condition LCodeGen::EmitTypeofIs(Label* true_label, |
| 3793 Label* false_label, | 3801 Label* false_label, |
| 3794 Register input, | 3802 Register input, |
| 3795 Handle<String> type_name) { | 3803 Handle<String> type_name) { |
| 3796 Condition final_branch_condition = kNoCondition; | 3804 Condition final_branch_condition = kNoCondition; |
| 3797 Register scratch = scratch0(); | 3805 Register scratch = scratch0(); |
| 3798 if (type_name->Equals(Heap::number_symbol())) { | 3806 if (type_name->Equals(heap()->number_symbol())) { |
| 3799 __ JumpIfSmi(input, true_label); | 3807 __ JumpIfSmi(input, true_label); |
| 3800 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset)); | 3808 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset)); |
| 3801 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | 3809 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); |
| 3802 __ cmp(input, Operand(ip)); | 3810 __ cmp(input, Operand(ip)); |
| 3803 final_branch_condition = eq; | 3811 final_branch_condition = eq; |
| 3804 | 3812 |
| 3805 } else if (type_name->Equals(Heap::string_symbol())) { | 3813 } else if (type_name->Equals(heap()->string_symbol())) { |
| 3806 __ JumpIfSmi(input, false_label); | 3814 __ JumpIfSmi(input, false_label); |
| 3807 __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE); | 3815 __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE); |
| 3808 __ b(ge, false_label); | 3816 __ b(ge, false_label); |
| 3809 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); | 3817 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); |
| 3810 __ tst(ip, Operand(1 << Map::kIsUndetectable)); | 3818 __ tst(ip, Operand(1 << Map::kIsUndetectable)); |
| 3811 final_branch_condition = eq; | 3819 final_branch_condition = eq; |
| 3812 | 3820 |
| 3813 } else if (type_name->Equals(Heap::boolean_symbol())) { | 3821 } else if (type_name->Equals(heap()->boolean_symbol())) { |
| 3814 __ CompareRoot(input, Heap::kTrueValueRootIndex); | 3822 __ CompareRoot(input, Heap::kTrueValueRootIndex); |
| 3815 __ b(eq, true_label); | 3823 __ b(eq, true_label); |
| 3816 __ CompareRoot(input, Heap::kFalseValueRootIndex); | 3824 __ CompareRoot(input, Heap::kFalseValueRootIndex); |
| 3817 final_branch_condition = eq; | 3825 final_branch_condition = eq; |
| 3818 | 3826 |
| 3819 } else if (type_name->Equals(Heap::undefined_symbol())) { | 3827 } else if (type_name->Equals(heap()->undefined_symbol())) { |
| 3820 __ CompareRoot(input, Heap::kUndefinedValueRootIndex); | 3828 __ CompareRoot(input, Heap::kUndefinedValueRootIndex); |
| 3821 __ b(eq, true_label); | 3829 __ b(eq, true_label); |
| 3822 __ JumpIfSmi(input, false_label); | 3830 __ JumpIfSmi(input, false_label); |
| 3823 // Check for undetectable objects => true. | 3831 // Check for undetectable objects => true. |
| 3824 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset)); | 3832 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset)); |
| 3825 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); | 3833 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); |
| 3826 __ tst(ip, Operand(1 << Map::kIsUndetectable)); | 3834 __ tst(ip, Operand(1 << Map::kIsUndetectable)); |
| 3827 final_branch_condition = ne; | 3835 final_branch_condition = ne; |
| 3828 | 3836 |
| 3829 } else if (type_name->Equals(Heap::function_symbol())) { | 3837 } else if (type_name->Equals(heap()->function_symbol())) { |
| 3830 __ JumpIfSmi(input, false_label); | 3838 __ JumpIfSmi(input, false_label); |
| 3831 __ CompareObjectType(input, input, scratch, FIRST_FUNCTION_CLASS_TYPE); | 3839 __ CompareObjectType(input, input, scratch, FIRST_FUNCTION_CLASS_TYPE); |
| 3832 final_branch_condition = ge; | 3840 final_branch_condition = ge; |
| 3833 | 3841 |
| 3834 } else if (type_name->Equals(Heap::object_symbol())) { | 3842 } else if (type_name->Equals(heap()->object_symbol())) { |
| 3835 __ JumpIfSmi(input, false_label); | 3843 __ JumpIfSmi(input, false_label); |
| 3836 __ CompareRoot(input, Heap::kNullValueRootIndex); | 3844 __ CompareRoot(input, Heap::kNullValueRootIndex); |
| 3837 __ b(eq, true_label); | 3845 __ b(eq, true_label); |
| 3838 __ CompareObjectType(input, input, scratch, FIRST_JS_OBJECT_TYPE); | 3846 __ CompareObjectType(input, input, scratch, FIRST_JS_OBJECT_TYPE); |
| 3839 __ b(lo, false_label); | 3847 __ b(lo, false_label); |
| 3840 __ CompareInstanceType(input, scratch, FIRST_FUNCTION_CLASS_TYPE); | 3848 __ CompareInstanceType(input, scratch, FIRST_FUNCTION_CLASS_TYPE); |
| 3841 __ b(hs, false_label); | 3849 __ b(hs, false_label); |
| 3842 // Check for undetectable objects => false. | 3850 // Check for undetectable objects => false. |
| 3843 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); | 3851 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); |
| 3844 __ tst(ip, Operand(1 << Map::kIsUndetectable)); | 3852 __ tst(ip, Operand(1 << Map::kIsUndetectable)); |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3957 ASSERT(!environment->HasBeenRegistered()); | 3965 ASSERT(!environment->HasBeenRegistered()); |
| 3958 RegisterEnvironmentForDeoptimization(environment); | 3966 RegisterEnvironmentForDeoptimization(environment); |
| 3959 ASSERT(osr_pc_offset_ == -1); | 3967 ASSERT(osr_pc_offset_ == -1); |
| 3960 osr_pc_offset_ = masm()->pc_offset(); | 3968 osr_pc_offset_ = masm()->pc_offset(); |
| 3961 } | 3969 } |
| 3962 | 3970 |
| 3963 | 3971 |
| 3964 #undef __ | 3972 #undef __ |
| 3965 | 3973 |
| 3966 } } // namespace v8::internal | 3974 } } // namespace v8::internal |
| OLD | NEW |