OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 828 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
839 | 839 |
840 // Push the current return address before the C call. Return will be | 840 // Push the current return address before the C call. Return will be |
841 // through pop(pc) below. | 841 // through pop(pc) below. |
842 __ push(lr); | 842 __ push(lr); |
843 __ PrepareCallCFunction(0, 2, scratch); | 843 __ PrepareCallCFunction(0, 2, scratch); |
844 if (masm->use_eabi_hardfloat()) { | 844 if (masm->use_eabi_hardfloat()) { |
845 CpuFeatures::Scope scope(VFP3); | 845 CpuFeatures::Scope scope(VFP3); |
846 __ vmov(d0, r0, r1); | 846 __ vmov(d0, r0, r1); |
847 __ vmov(d1, r2, r3); | 847 __ vmov(d1, r2, r3); |
848 } | 848 } |
849 { | 849 // Call C routine that may not cause GC or other trouble. |
850 AllowExternalCallThatCantCauseGC scope(masm); | 850 __ CallCFunction(ExternalReference::double_fp_operation(op, masm->isolate()), |
851 __ CallCFunction( | 851 0, 2); |
852 ExternalReference::double_fp_operation(op, masm->isolate()), 0, 2); | |
853 } | |
854 // Store answer in the overwritable heap number. Double returned in | 852 // Store answer in the overwritable heap number. Double returned in |
855 // registers r0 and r1 or in d0. | 853 // registers r0 and r1 or in d0. |
856 if (masm->use_eabi_hardfloat()) { | 854 if (masm->use_eabi_hardfloat()) { |
857 CpuFeatures::Scope scope(VFP3); | 855 CpuFeatures::Scope scope(VFP3); |
858 __ vstr(d0, | 856 __ vstr(d0, |
859 FieldMemOperand(heap_number_result, HeapNumber::kValueOffset)); | 857 FieldMemOperand(heap_number_result, HeapNumber::kValueOffset)); |
860 } else { | 858 } else { |
861 __ Strd(r0, r1, FieldMemOperand(heap_number_result, | 859 __ Strd(r0, r1, FieldMemOperand(heap_number_result, |
862 HeapNumber::kValueOffset)); | 860 HeapNumber::kValueOffset)); |
863 } | 861 } |
(...skipping 748 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1612 | 1610 |
1613 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 1611 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
1614 // tagged as a small integer. | 1612 // tagged as a small integer. |
1615 __ InvokeBuiltin(native, JUMP_FUNCTION); | 1613 __ InvokeBuiltin(native, JUMP_FUNCTION); |
1616 } | 1614 } |
1617 | 1615 |
1618 | 1616 |
1619 // This stub does not handle the inlined cases (Smis, Booleans, undefined). | 1617 // This stub does not handle the inlined cases (Smis, Booleans, undefined). |
1620 // The stub returns zero for false, and a non-zero value for true. | 1618 // The stub returns zero for false, and a non-zero value for true. |
1621 void ToBooleanStub::Generate(MacroAssembler* masm) { | 1619 void ToBooleanStub::Generate(MacroAssembler* masm) { |
1622 // This stub overrides SometimesSetsUpAFrame() to return false. That means | |
1623 // we cannot call anything that could cause a GC from this stub. | |
1624 // This stub uses VFP3 instructions. | 1620 // This stub uses VFP3 instructions. |
1625 CpuFeatures::Scope scope(VFP3); | 1621 CpuFeatures::Scope scope(VFP3); |
1626 | 1622 |
1627 Label false_result; | 1623 Label false_result; |
1628 Label not_heap_number; | 1624 Label not_heap_number; |
1629 Register scratch = r9.is(tos_) ? r7 : r9; | 1625 Register scratch = r9.is(tos_) ? r7 : r9; |
1630 | 1626 |
1631 // undefined -> false | 1627 // undefined -> false |
1632 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 1628 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
1633 __ cmp(tos_, ip); | 1629 __ cmp(tos_, ip); |
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1879 if (mode_ == UNARY_OVERWRITE) { | 1875 if (mode_ == UNARY_OVERWRITE) { |
1880 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | 1876 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); |
1881 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. | 1877 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. |
1882 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | 1878 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); |
1883 } else { | 1879 } else { |
1884 Label slow_allocate_heapnumber, heapnumber_allocated; | 1880 Label slow_allocate_heapnumber, heapnumber_allocated; |
1885 __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber); | 1881 __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber); |
1886 __ jmp(&heapnumber_allocated); | 1882 __ jmp(&heapnumber_allocated); |
1887 | 1883 |
1888 __ bind(&slow_allocate_heapnumber); | 1884 __ bind(&slow_allocate_heapnumber); |
1889 { | 1885 __ EnterInternalFrame(); |
1890 FrameScope scope(masm, StackFrame::INTERNAL); | 1886 __ push(r0); |
1891 __ push(r0); | 1887 __ CallRuntime(Runtime::kNumberAlloc, 0); |
1892 __ CallRuntime(Runtime::kNumberAlloc, 0); | 1888 __ mov(r1, Operand(r0)); |
1893 __ mov(r1, Operand(r0)); | 1889 __ pop(r0); |
1894 __ pop(r0); | 1890 __ LeaveInternalFrame(); |
1895 } | |
1896 | 1891 |
1897 __ bind(&heapnumber_allocated); | 1892 __ bind(&heapnumber_allocated); |
1898 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); | 1893 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); |
1899 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | 1894 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); |
1900 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset)); | 1895 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset)); |
1901 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. | 1896 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. |
1902 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); | 1897 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); |
1903 __ mov(r0, Operand(r1)); | 1898 __ mov(r0, Operand(r1)); |
1904 } | 1899 } |
1905 __ Ret(); | 1900 __ Ret(); |
(...skipping 20 matching lines...) Expand all Loading... |
1926 | 1921 |
1927 // Try to store the result in a heap number. | 1922 // Try to store the result in a heap number. |
1928 __ bind(&try_float); | 1923 __ bind(&try_float); |
1929 if (mode_ == UNARY_NO_OVERWRITE) { | 1924 if (mode_ == UNARY_NO_OVERWRITE) { |
1930 Label slow_allocate_heapnumber, heapnumber_allocated; | 1925 Label slow_allocate_heapnumber, heapnumber_allocated; |
1931 // Allocate a new heap number without zapping r0, which we need if it fails. | 1926 // Allocate a new heap number without zapping r0, which we need if it fails. |
1932 __ AllocateHeapNumber(r2, r3, r4, r6, &slow_allocate_heapnumber); | 1927 __ AllocateHeapNumber(r2, r3, r4, r6, &slow_allocate_heapnumber); |
1933 __ jmp(&heapnumber_allocated); | 1928 __ jmp(&heapnumber_allocated); |
1934 | 1929 |
1935 __ bind(&slow_allocate_heapnumber); | 1930 __ bind(&slow_allocate_heapnumber); |
1936 { | 1931 __ EnterInternalFrame(); |
1937 FrameScope scope(masm, StackFrame::INTERNAL); | 1932 __ push(r0); // Push the heap number, not the untagged int32. |
1938 __ push(r0); // Push the heap number, not the untagged int32. | 1933 __ CallRuntime(Runtime::kNumberAlloc, 0); |
1939 __ CallRuntime(Runtime::kNumberAlloc, 0); | 1934 __ mov(r2, r0); // Move the new heap number into r2. |
1940 __ mov(r2, r0); // Move the new heap number into r2. | 1935 // Get the heap number into r0, now that the new heap number is in r2. |
1941 // Get the heap number into r0, now that the new heap number is in r2. | 1936 __ pop(r0); |
1942 __ pop(r0); | 1937 __ LeaveInternalFrame(); |
1943 } | |
1944 | 1938 |
1945 // Convert the heap number in r0 to an untagged integer in r1. | 1939 // Convert the heap number in r0 to an untagged integer in r1. |
1946 // This can't go slow-case because it's the same number we already | 1940 // This can't go slow-case because it's the same number we already |
1947 // converted once again. | 1941 // converted once again. |
1948 __ ConvertToInt32(r0, r1, r3, r4, d0, &impossible); | 1942 __ ConvertToInt32(r0, r1, r3, r4, d0, &impossible); |
1949 __ mvn(r1, Operand(r1)); | 1943 __ mvn(r1, Operand(r1)); |
1950 | 1944 |
1951 __ bind(&heapnumber_allocated); | 1945 __ bind(&heapnumber_allocated); |
1952 __ mov(r0, r2); // Move newly allocated heap number to r0. | 1946 __ mov(r0, r2); // Move newly allocated heap number to r0. |
1953 } | 1947 } |
(...skipping 1208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3162 __ vstr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset)); | 3156 __ vstr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset)); |
3163 __ stm(ia, cache_entry, r2.bit() | r3.bit() | r6.bit()); | 3157 __ stm(ia, cache_entry, r2.bit() | r3.bit() | r6.bit()); |
3164 __ Ret(); | 3158 __ Ret(); |
3165 | 3159 |
3166 __ bind(&invalid_cache); | 3160 __ bind(&invalid_cache); |
3167 // The cache is invalid. Call runtime which will recreate the | 3161 // The cache is invalid. Call runtime which will recreate the |
3168 // cache. | 3162 // cache. |
3169 __ LoadRoot(r5, Heap::kHeapNumberMapRootIndex); | 3163 __ LoadRoot(r5, Heap::kHeapNumberMapRootIndex); |
3170 __ AllocateHeapNumber(r0, scratch0, scratch1, r5, &skip_cache); | 3164 __ AllocateHeapNumber(r0, scratch0, scratch1, r5, &skip_cache); |
3171 __ vstr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset)); | 3165 __ vstr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset)); |
3172 { | 3166 __ EnterInternalFrame(); |
3173 FrameScope scope(masm, StackFrame::INTERNAL); | 3167 __ push(r0); |
3174 __ push(r0); | 3168 __ CallRuntime(RuntimeFunction(), 1); |
3175 __ CallRuntime(RuntimeFunction(), 1); | 3169 __ LeaveInternalFrame(); |
3176 } | |
3177 __ vldr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset)); | 3170 __ vldr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset)); |
3178 __ Ret(); | 3171 __ Ret(); |
3179 | 3172 |
3180 __ bind(&skip_cache); | 3173 __ bind(&skip_cache); |
3181 // Call C function to calculate the result and answer directly | 3174 // Call C function to calculate the result and answer directly |
3182 // without updating the cache. | 3175 // without updating the cache. |
3183 GenerateCallCFunction(masm, scratch0); | 3176 GenerateCallCFunction(masm, scratch0); |
3184 __ GetCFunctionDoubleResult(d2); | 3177 __ GetCFunctionDoubleResult(d2); |
3185 __ bind(&no_update); | 3178 __ bind(&no_update); |
3186 | 3179 |
3187 // We return the value in d2 without adding it to the cache, but | 3180 // We return the value in d2 without adding it to the cache, but |
3188 // we cause a scavenging GC so that future allocations will succeed. | 3181 // we cause a scavenging GC so that future allocations will succeed. |
3189 { | 3182 __ EnterInternalFrame(); |
3190 FrameScope scope(masm, StackFrame::INTERNAL); | |
3191 | 3183 |
3192 // Allocate an aligned object larger than a HeapNumber. | 3184 // Allocate an aligned object larger than a HeapNumber. |
3193 ASSERT(4 * kPointerSize >= HeapNumber::kSize); | 3185 ASSERT(4 * kPointerSize >= HeapNumber::kSize); |
3194 __ mov(scratch0, Operand(4 * kPointerSize)); | 3186 __ mov(scratch0, Operand(4 * kPointerSize)); |
3195 __ push(scratch0); | 3187 __ push(scratch0); |
3196 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); | 3188 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); |
3197 } | 3189 __ LeaveInternalFrame(); |
3198 __ Ret(); | 3190 __ Ret(); |
3199 } | 3191 } |
3200 } | 3192 } |
3201 | 3193 |
3202 | 3194 |
3203 void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm, | 3195 void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm, |
3204 Register scratch) { | 3196 Register scratch) { |
3205 Isolate* isolate = masm->isolate(); | 3197 Isolate* isolate = masm->isolate(); |
3206 | 3198 |
3207 __ push(lr); | 3199 __ push(lr); |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3299 // C function for integer exponents. The register containing | 3291 // C function for integer exponents. The register containing |
3300 // the heap number is callee-saved. | 3292 // the heap number is callee-saved. |
3301 __ AllocateHeapNumber(heapnumber, | 3293 __ AllocateHeapNumber(heapnumber, |
3302 scratch, | 3294 scratch, |
3303 scratch2, | 3295 scratch2, |
3304 heapnumbermap, | 3296 heapnumbermap, |
3305 &call_runtime); | 3297 &call_runtime); |
3306 __ push(lr); | 3298 __ push(lr); |
3307 __ PrepareCallCFunction(1, 1, scratch); | 3299 __ PrepareCallCFunction(1, 1, scratch); |
3308 __ SetCallCDoubleArguments(double_base, exponent); | 3300 __ SetCallCDoubleArguments(double_base, exponent); |
3309 { | 3301 __ CallCFunction( |
3310 AllowExternalCallThatCantCauseGC scope(masm); | 3302 ExternalReference::power_double_int_function(masm->isolate()), |
3311 __ CallCFunction( | 3303 1, 1); |
3312 ExternalReference::power_double_int_function(masm->isolate()), | 3304 __ pop(lr); |
3313 1, 1); | 3305 __ GetCFunctionDoubleResult(double_result); |
3314 __ pop(lr); | |
3315 __ GetCFunctionDoubleResult(double_result); | |
3316 } | |
3317 __ vstr(double_result, | 3306 __ vstr(double_result, |
3318 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); | 3307 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); |
3319 __ mov(r0, heapnumber); | 3308 __ mov(r0, heapnumber); |
3320 __ Ret(2 * kPointerSize); | 3309 __ Ret(2 * kPointerSize); |
3321 | 3310 |
3322 __ bind(&exponent_not_smi); | 3311 __ bind(&exponent_not_smi); |
3323 __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset)); | 3312 __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset)); |
3324 __ cmp(scratch, heapnumbermap); | 3313 __ cmp(scratch, heapnumbermap); |
3325 __ b(ne, &call_runtime); | 3314 __ b(ne, &call_runtime); |
3326 // Exponent is a heapnumber. Load it into double register. | 3315 // Exponent is a heapnumber. Load it into double register. |
3327 __ vldr(double_exponent, | 3316 __ vldr(double_exponent, |
3328 FieldMemOperand(exponent, HeapNumber::kValueOffset)); | 3317 FieldMemOperand(exponent, HeapNumber::kValueOffset)); |
3329 | 3318 |
3330 // The base and the exponent are in double registers. | 3319 // The base and the exponent are in double registers. |
3331 // Allocate a heap number and call a C function for | 3320 // Allocate a heap number and call a C function for |
3332 // double exponents. The register containing | 3321 // double exponents. The register containing |
3333 // the heap number is callee-saved. | 3322 // the heap number is callee-saved. |
3334 __ AllocateHeapNumber(heapnumber, | 3323 __ AllocateHeapNumber(heapnumber, |
3335 scratch, | 3324 scratch, |
3336 scratch2, | 3325 scratch2, |
3337 heapnumbermap, | 3326 heapnumbermap, |
3338 &call_runtime); | 3327 &call_runtime); |
3339 __ push(lr); | 3328 __ push(lr); |
3340 __ PrepareCallCFunction(0, 2, scratch); | 3329 __ PrepareCallCFunction(0, 2, scratch); |
3341 __ SetCallCDoubleArguments(double_base, double_exponent); | 3330 __ SetCallCDoubleArguments(double_base, double_exponent); |
3342 { | 3331 __ CallCFunction( |
3343 AllowExternalCallThatCantCauseGC scope(masm); | 3332 ExternalReference::power_double_double_function(masm->isolate()), |
3344 __ CallCFunction( | 3333 0, 2); |
3345 ExternalReference::power_double_double_function(masm->isolate()), | 3334 __ pop(lr); |
3346 0, 2); | 3335 __ GetCFunctionDoubleResult(double_result); |
3347 __ pop(lr); | |
3348 __ GetCFunctionDoubleResult(double_result); | |
3349 } | |
3350 __ vstr(double_result, | 3336 __ vstr(double_result, |
3351 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); | 3337 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); |
3352 __ mov(r0, heapnumber); | 3338 __ mov(r0, heapnumber); |
3353 __ Ret(2 * kPointerSize); | 3339 __ Ret(2 * kPointerSize); |
3354 } | 3340 } |
3355 | 3341 |
3356 __ bind(&call_runtime); | 3342 __ bind(&call_runtime); |
3357 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); | 3343 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); |
3358 } | 3344 } |
3359 | 3345 |
(...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3511 // NOTE: Invocations of builtins may return failure objects | 3497 // NOTE: Invocations of builtins may return failure objects |
3512 // instead of a proper result. The builtin entry handles | 3498 // instead of a proper result. The builtin entry handles |
3513 // this by performing a garbage collection and retrying the | 3499 // this by performing a garbage collection and retrying the |
3514 // builtin once. | 3500 // builtin once. |
3515 | 3501 |
3516 // Compute the argv pointer in a callee-saved register. | 3502 // Compute the argv pointer in a callee-saved register. |
3517 __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2)); | 3503 __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2)); |
3518 __ sub(r6, r6, Operand(kPointerSize)); | 3504 __ sub(r6, r6, Operand(kPointerSize)); |
3519 | 3505 |
3520 // Enter the exit frame that transitions from JavaScript to C++. | 3506 // Enter the exit frame that transitions from JavaScript to C++. |
3521 FrameScope scope(masm, StackFrame::MANUAL); | |
3522 __ EnterExitFrame(save_doubles_); | 3507 __ EnterExitFrame(save_doubles_); |
3523 | 3508 |
3524 // Setup argc and the builtin function in callee-saved registers. | 3509 // Setup argc and the builtin function in callee-saved registers. |
3525 __ mov(r4, Operand(r0)); | 3510 __ mov(r4, Operand(r0)); |
3526 __ mov(r5, Operand(r1)); | 3511 __ mov(r5, Operand(r1)); |
3527 | 3512 |
3528 // r4: number of arguments (C callee-saved) | 3513 // r4: number of arguments (C callee-saved) |
3529 // r5: pointer to builtin function (C callee-saved) | 3514 // r5: pointer to builtin function (C callee-saved) |
3530 // r6: pointer to first argument (C callee-saved) | 3515 // r6: pointer to first argument (C callee-saved) |
3531 | 3516 |
(...skipping 345 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3877 __ Ret(HasArgsInRegisters() ? 0 : 2); | 3862 __ Ret(HasArgsInRegisters() ? 0 : 2); |
3878 | 3863 |
3879 // Slow-case. Tail call builtin. | 3864 // Slow-case. Tail call builtin. |
3880 __ bind(&slow); | 3865 __ bind(&slow); |
3881 if (!ReturnTrueFalseObject()) { | 3866 if (!ReturnTrueFalseObject()) { |
3882 if (HasArgsInRegisters()) { | 3867 if (HasArgsInRegisters()) { |
3883 __ Push(r0, r1); | 3868 __ Push(r0, r1); |
3884 } | 3869 } |
3885 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 3870 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
3886 } else { | 3871 } else { |
3887 { | 3872 __ EnterInternalFrame(); |
3888 FrameScope scope(masm, StackFrame::INTERNAL); | 3873 __ Push(r0, r1); |
3889 __ Push(r0, r1); | 3874 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); |
3890 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); | 3875 __ LeaveInternalFrame(); |
3891 } | |
3892 __ cmp(r0, Operand(0)); | 3876 __ cmp(r0, Operand(0)); |
3893 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq); | 3877 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq); |
3894 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne); | 3878 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne); |
3895 __ Ret(HasArgsInRegisters() ? 0 : 2); | 3879 __ Ret(HasArgsInRegisters() ? 0 : 2); |
3896 } | 3880 } |
3897 } | 3881 } |
3898 | 3882 |
3899 | 3883 |
3900 Register InstanceofStub::left() { return r0; } | 3884 Register InstanceofStub::left() { return r0; } |
3901 | 3885 |
(...skipping 2244 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6146 } | 6130 } |
6147 | 6131 |
6148 | 6132 |
6149 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { | 6133 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { |
6150 __ Push(r1, r0); | 6134 __ Push(r1, r0); |
6151 __ push(lr); | 6135 __ push(lr); |
6152 | 6136 |
6153 // Call the runtime system in a fresh internal frame. | 6137 // Call the runtime system in a fresh internal frame. |
6154 ExternalReference miss = | 6138 ExternalReference miss = |
6155 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); | 6139 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); |
6156 { | 6140 __ EnterInternalFrame(); |
6157 FrameScope scope(masm, StackFrame::INTERNAL); | 6141 __ Push(r1, r0); |
6158 __ Push(r1, r0); | 6142 __ mov(ip, Operand(Smi::FromInt(op_))); |
6159 __ mov(ip, Operand(Smi::FromInt(op_))); | 6143 __ push(ip); |
6160 __ push(ip); | 6144 __ CallExternalReference(miss, 3); |
6161 __ CallExternalReference(miss, 3); | 6145 __ LeaveInternalFrame(); |
6162 } | |
6163 // Compute the entry point of the rewritten stub. | 6146 // Compute the entry point of the rewritten stub. |
6164 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 6147 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
6165 // Restore registers. | 6148 // Restore registers. |
6166 __ pop(lr); | 6149 __ pop(lr); |
6167 __ pop(r0); | 6150 __ pop(r0); |
6168 __ pop(r1); | 6151 __ pop(r1); |
6169 __ Jump(r2); | 6152 __ Jump(r2); |
6170 } | 6153 } |
6171 | 6154 |
6172 | 6155 |
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6333 __ tst(r0, Operand(r0)); | 6316 __ tst(r0, Operand(r0)); |
6334 __ mov(scratch2, Operand(r2)); | 6317 __ mov(scratch2, Operand(r2)); |
6335 __ ldm(ia_w, sp, spill_mask); | 6318 __ ldm(ia_w, sp, spill_mask); |
6336 | 6319 |
6337 __ b(ne, done); | 6320 __ b(ne, done); |
6338 __ b(eq, miss); | 6321 __ b(eq, miss); |
6339 } | 6322 } |
6340 | 6323 |
6341 | 6324 |
6342 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { | 6325 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { |
6343 // This stub overrides SometimesSetsUpAFrame() to return false. That means | |
6344 // we cannot call anything that could cause a GC from this stub. | |
6345 // Registers: | 6326 // Registers: |
6346 // result: StringDictionary to probe | 6327 // result: StringDictionary to probe |
6347 // r1: key | 6328 // r1: key |
6348 // : StringDictionary to probe. | 6329 // : StringDictionary to probe. |
6349 // index_: will hold an index of entry if lookup is successful. | 6330 // index_: will hold an index of entry if lookup is successful. |
6350 // might alias with result_. | 6331 // might alias with result_. |
6351 // Returns: | 6332 // Returns: |
6352 // result_ is zero if lookup failed, non zero otherwise. | 6333 // result_ is zero if lookup failed, non zero otherwise. |
6353 | 6334 |
6354 Register result = r0; | 6335 Register result = r0; |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6429 __ mov(result, Operand(0)); | 6410 __ mov(result, Operand(0)); |
6430 __ Ret(); | 6411 __ Ret(); |
6431 } | 6412 } |
6432 | 6413 |
6433 | 6414 |
6434 #undef __ | 6415 #undef __ |
6435 | 6416 |
6436 } } // namespace v8::internal | 6417 } } // namespace v8::internal |
6437 | 6418 |
6438 #endif // V8_TARGET_ARCH_ARM | 6419 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |