OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 828 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
839 | 839 |
840 // Push the current return address before the C call. Return will be | 840 // Push the current return address before the C call. Return will be |
841 // through pop(pc) below. | 841 // through pop(pc) below. |
842 __ push(lr); | 842 __ push(lr); |
843 __ PrepareCallCFunction(0, 2, scratch); | 843 __ PrepareCallCFunction(0, 2, scratch); |
844 if (masm->use_eabi_hardfloat()) { | 844 if (masm->use_eabi_hardfloat()) { |
845 CpuFeatures::Scope scope(VFP3); | 845 CpuFeatures::Scope scope(VFP3); |
846 __ vmov(d0, r0, r1); | 846 __ vmov(d0, r0, r1); |
847 __ vmov(d1, r2, r3); | 847 __ vmov(d1, r2, r3); |
848 } | 848 } |
849 // Call C routine that may not cause GC or other trouble. | 849 { |
850 __ CallCFunction(ExternalReference::double_fp_operation(op, masm->isolate()), | 850 AllowExternalCallThatCantCauseGC scope(masm); |
851 0, 2); | 851 __ CallCFunction(ExternalReference::double_fp_operation(op, masm->isolate())
, |
| 852 0, 2); |
| 853 } |
852 // Store answer in the overwritable heap number. Double returned in | 854 // Store answer in the overwritable heap number. Double returned in |
853 // registers r0 and r1 or in d0. | 855 // registers r0 and r1 or in d0. |
854 if (masm->use_eabi_hardfloat()) { | 856 if (masm->use_eabi_hardfloat()) { |
855 CpuFeatures::Scope scope(VFP3); | 857 CpuFeatures::Scope scope(VFP3); |
856 __ vstr(d0, | 858 __ vstr(d0, |
857 FieldMemOperand(heap_number_result, HeapNumber::kValueOffset)); | 859 FieldMemOperand(heap_number_result, HeapNumber::kValueOffset)); |
858 } else { | 860 } else { |
859 __ Strd(r0, r1, FieldMemOperand(heap_number_result, | 861 __ Strd(r0, r1, FieldMemOperand(heap_number_result, |
860 HeapNumber::kValueOffset)); | 862 HeapNumber::kValueOffset)); |
861 } | 863 } |
(...skipping 748 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1610 | 1612 |
1611 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 1613 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
1612 // tagged as a small integer. | 1614 // tagged as a small integer. |
1613 __ InvokeBuiltin(native, JUMP_FUNCTION); | 1615 __ InvokeBuiltin(native, JUMP_FUNCTION); |
1614 } | 1616 } |
1615 | 1617 |
1616 | 1618 |
1617 // This stub does not handle the inlined cases (Smis, Booleans, undefined). | 1619 // This stub does not handle the inlined cases (Smis, Booleans, undefined). |
1618 // The stub returns zero for false, and a non-zero value for true. | 1620 // The stub returns zero for false, and a non-zero value for true. |
1619 void ToBooleanStub::Generate(MacroAssembler* masm) { | 1621 void ToBooleanStub::Generate(MacroAssembler* masm) { |
| 1622 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
| 1623 // we cannot call anything that could cause a GC from this stub. |
1620 // This stub uses VFP3 instructions. | 1624 // This stub uses VFP3 instructions. |
1621 CpuFeatures::Scope scope(VFP3); | 1625 CpuFeatures::Scope scope(VFP3); |
1622 | 1626 |
1623 Label false_result; | 1627 Label false_result; |
1624 Label not_heap_number; | 1628 Label not_heap_number; |
1625 Register scratch = r9.is(tos_) ? r7 : r9; | 1629 Register scratch = r9.is(tos_) ? r7 : r9; |
1626 | 1630 |
1627 // undefined -> false | 1631 // undefined -> false |
1628 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 1632 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
1629 __ cmp(tos_, ip); | 1633 __ cmp(tos_, ip); |
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1875 if (mode_ == UNARY_OVERWRITE) { | 1879 if (mode_ == UNARY_OVERWRITE) { |
1876 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | 1880 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); |
1877 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. | 1881 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. |
1878 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | 1882 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); |
1879 } else { | 1883 } else { |
1880 Label slow_allocate_heapnumber, heapnumber_allocated; | 1884 Label slow_allocate_heapnumber, heapnumber_allocated; |
1881 __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber); | 1885 __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber); |
1882 __ jmp(&heapnumber_allocated); | 1886 __ jmp(&heapnumber_allocated); |
1883 | 1887 |
1884 __ bind(&slow_allocate_heapnumber); | 1888 __ bind(&slow_allocate_heapnumber); |
1885 __ EnterInternalFrame(); | 1889 { |
1886 __ push(r0); | 1890 FrameScope scope(masm, StackFrame::INTERNAL); |
1887 __ CallRuntime(Runtime::kNumberAlloc, 0); | 1891 __ push(r0); |
1888 __ mov(r1, Operand(r0)); | 1892 __ CallRuntime(Runtime::kNumberAlloc, 0); |
1889 __ pop(r0); | 1893 __ mov(r1, Operand(r0)); |
1890 __ LeaveInternalFrame(); | 1894 __ pop(r0); |
| 1895 } |
1891 | 1896 |
1892 __ bind(&heapnumber_allocated); | 1897 __ bind(&heapnumber_allocated); |
1893 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); | 1898 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); |
1894 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | 1899 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); |
1895 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset)); | 1900 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset)); |
1896 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. | 1901 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. |
1897 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); | 1902 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); |
1898 __ mov(r0, Operand(r1)); | 1903 __ mov(r0, Operand(r1)); |
1899 } | 1904 } |
1900 __ Ret(); | 1905 __ Ret(); |
(...skipping 20 matching lines...) Expand all Loading... |
1921 | 1926 |
1922 // Try to store the result in a heap number. | 1927 // Try to store the result in a heap number. |
1923 __ bind(&try_float); | 1928 __ bind(&try_float); |
1924 if (mode_ == UNARY_NO_OVERWRITE) { | 1929 if (mode_ == UNARY_NO_OVERWRITE) { |
1925 Label slow_allocate_heapnumber, heapnumber_allocated; | 1930 Label slow_allocate_heapnumber, heapnumber_allocated; |
1926 // Allocate a new heap number without zapping r0, which we need if it fails. | 1931 // Allocate a new heap number without zapping r0, which we need if it fails. |
1927 __ AllocateHeapNumber(r2, r3, r4, r6, &slow_allocate_heapnumber); | 1932 __ AllocateHeapNumber(r2, r3, r4, r6, &slow_allocate_heapnumber); |
1928 __ jmp(&heapnumber_allocated); | 1933 __ jmp(&heapnumber_allocated); |
1929 | 1934 |
1930 __ bind(&slow_allocate_heapnumber); | 1935 __ bind(&slow_allocate_heapnumber); |
1931 __ EnterInternalFrame(); | 1936 { |
1932 __ push(r0); // Push the heap number, not the untagged int32. | 1937 FrameScope scope(masm, StackFrame::INTERNAL); |
1933 __ CallRuntime(Runtime::kNumberAlloc, 0); | 1938 __ push(r0); // Push the heap number, not the untagged int32. |
1934 __ mov(r2, r0); // Move the new heap number into r2. | 1939 __ CallRuntime(Runtime::kNumberAlloc, 0); |
1935 // Get the heap number into r0, now that the new heap number is in r2. | 1940 __ mov(r2, r0); // Move the new heap number into r2. |
1936 __ pop(r0); | 1941 // Get the heap number into r0, now that the new heap number is in r2. |
1937 __ LeaveInternalFrame(); | 1942 __ pop(r0); |
| 1943 } |
1938 | 1944 |
1939 // Convert the heap number in r0 to an untagged integer in r1. | 1945 // Convert the heap number in r0 to an untagged integer in r1. |
1940 // This can't go slow-case because it's the same number we already | 1946 // This can't go slow-case because it's the same number we already |
1941 // converted once again. | 1947 // converted once again. |
1942 __ ConvertToInt32(r0, r1, r3, r4, d0, &impossible); | 1948 __ ConvertToInt32(r0, r1, r3, r4, d0, &impossible); |
1943 __ mvn(r1, Operand(r1)); | 1949 __ mvn(r1, Operand(r1)); |
1944 | 1950 |
1945 __ bind(&heapnumber_allocated); | 1951 __ bind(&heapnumber_allocated); |
1946 __ mov(r0, r2); // Move newly allocated heap number to r0. | 1952 __ mov(r0, r2); // Move newly allocated heap number to r0. |
1947 } | 1953 } |
(...skipping 1208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3156 __ vstr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset)); | 3162 __ vstr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset)); |
3157 __ stm(ia, cache_entry, r2.bit() | r3.bit() | r6.bit()); | 3163 __ stm(ia, cache_entry, r2.bit() | r3.bit() | r6.bit()); |
3158 __ Ret(); | 3164 __ Ret(); |
3159 | 3165 |
3160 __ bind(&invalid_cache); | 3166 __ bind(&invalid_cache); |
3161 // The cache is invalid. Call runtime which will recreate the | 3167 // The cache is invalid. Call runtime which will recreate the |
3162 // cache. | 3168 // cache. |
3163 __ LoadRoot(r5, Heap::kHeapNumberMapRootIndex); | 3169 __ LoadRoot(r5, Heap::kHeapNumberMapRootIndex); |
3164 __ AllocateHeapNumber(r0, scratch0, scratch1, r5, &skip_cache); | 3170 __ AllocateHeapNumber(r0, scratch0, scratch1, r5, &skip_cache); |
3165 __ vstr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset)); | 3171 __ vstr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset)); |
3166 __ EnterInternalFrame(); | 3172 { |
3167 __ push(r0); | 3173 FrameScope scope(masm, StackFrame::INTERNAL); |
3168 __ CallRuntime(RuntimeFunction(), 1); | 3174 __ push(r0); |
3169 __ LeaveInternalFrame(); | 3175 __ CallRuntime(RuntimeFunction(), 1); |
| 3176 } |
3170 __ vldr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset)); | 3177 __ vldr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset)); |
3171 __ Ret(); | 3178 __ Ret(); |
3172 | 3179 |
3173 __ bind(&skip_cache); | 3180 __ bind(&skip_cache); |
3174 // Call C function to calculate the result and answer directly | 3181 // Call C function to calculate the result and answer directly |
3175 // without updating the cache. | 3182 // without updating the cache. |
3176 GenerateCallCFunction(masm, scratch0); | 3183 GenerateCallCFunction(masm, scratch0); |
3177 __ GetCFunctionDoubleResult(d2); | 3184 __ GetCFunctionDoubleResult(d2); |
3178 __ bind(&no_update); | 3185 __ bind(&no_update); |
3179 | 3186 |
3180 // We return the value in d2 without adding it to the cache, but | 3187 // We return the value in d2 without adding it to the cache, but |
3181 // we cause a scavenging GC so that future allocations will succeed. | 3188 // we cause a scavenging GC so that future allocations will succeed. |
3182 __ EnterInternalFrame(); | 3189 { |
| 3190 FrameScope scope(masm, StackFrame::INTERNAL); |
3183 | 3191 |
3184 // Allocate an aligned object larger than a HeapNumber. | 3192 // Allocate an aligned object larger than a HeapNumber. |
3185 ASSERT(4 * kPointerSize >= HeapNumber::kSize); | 3193 ASSERT(4 * kPointerSize >= HeapNumber::kSize); |
3186 __ mov(scratch0, Operand(4 * kPointerSize)); | 3194 __ mov(scratch0, Operand(4 * kPointerSize)); |
3187 __ push(scratch0); | 3195 __ push(scratch0); |
3188 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); | 3196 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); |
3189 __ LeaveInternalFrame(); | 3197 } |
3190 __ Ret(); | 3198 __ Ret(); |
3191 } | 3199 } |
3192 } | 3200 } |
3193 | 3201 |
3194 | 3202 |
3195 void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm, | 3203 void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm, |
3196 Register scratch) { | 3204 Register scratch) { |
3197 Isolate* isolate = masm->isolate(); | 3205 Isolate* isolate = masm->isolate(); |
3198 | 3206 |
3199 __ push(lr); | 3207 __ push(lr); |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3291 // C function for integer exponents. The register containing | 3299 // C function for integer exponents. The register containing |
3292 // the heap number is callee-saved. | 3300 // the heap number is callee-saved. |
3293 __ AllocateHeapNumber(heapnumber, | 3301 __ AllocateHeapNumber(heapnumber, |
3294 scratch, | 3302 scratch, |
3295 scratch2, | 3303 scratch2, |
3296 heapnumbermap, | 3304 heapnumbermap, |
3297 &call_runtime); | 3305 &call_runtime); |
3298 __ push(lr); | 3306 __ push(lr); |
3299 __ PrepareCallCFunction(1, 1, scratch); | 3307 __ PrepareCallCFunction(1, 1, scratch); |
3300 __ SetCallCDoubleArguments(double_base, exponent); | 3308 __ SetCallCDoubleArguments(double_base, exponent); |
3301 __ CallCFunction( | 3309 { |
3302 ExternalReference::power_double_int_function(masm->isolate()), | 3310 AllowExternalCallThatCantCauseGC scope(masm); |
3303 1, 1); | 3311 __ CallCFunction( |
3304 __ pop(lr); | 3312 ExternalReference::power_double_int_function(masm->isolate()), |
3305 __ GetCFunctionDoubleResult(double_result); | 3313 1, 1); |
| 3314 __ pop(lr); |
| 3315 __ GetCFunctionDoubleResult(double_result); |
| 3316 } |
3306 __ vstr(double_result, | 3317 __ vstr(double_result, |
3307 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); | 3318 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); |
3308 __ mov(r0, heapnumber); | 3319 __ mov(r0, heapnumber); |
3309 __ Ret(2 * kPointerSize); | 3320 __ Ret(2 * kPointerSize); |
3310 | 3321 |
3311 __ bind(&exponent_not_smi); | 3322 __ bind(&exponent_not_smi); |
3312 __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset)); | 3323 __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset)); |
3313 __ cmp(scratch, heapnumbermap); | 3324 __ cmp(scratch, heapnumbermap); |
3314 __ b(ne, &call_runtime); | 3325 __ b(ne, &call_runtime); |
3315 // Exponent is a heapnumber. Load it into double register. | 3326 // Exponent is a heapnumber. Load it into double register. |
3316 __ vldr(double_exponent, | 3327 __ vldr(double_exponent, |
3317 FieldMemOperand(exponent, HeapNumber::kValueOffset)); | 3328 FieldMemOperand(exponent, HeapNumber::kValueOffset)); |
3318 | 3329 |
3319 // The base and the exponent are in double registers. | 3330 // The base and the exponent are in double registers. |
3320 // Allocate a heap number and call a C function for | 3331 // Allocate a heap number and call a C function for |
3321 // double exponents. The register containing | 3332 // double exponents. The register containing |
3322 // the heap number is callee-saved. | 3333 // the heap number is callee-saved. |
3323 __ AllocateHeapNumber(heapnumber, | 3334 __ AllocateHeapNumber(heapnumber, |
3324 scratch, | 3335 scratch, |
3325 scratch2, | 3336 scratch2, |
3326 heapnumbermap, | 3337 heapnumbermap, |
3327 &call_runtime); | 3338 &call_runtime); |
3328 __ push(lr); | 3339 __ push(lr); |
3329 __ PrepareCallCFunction(0, 2, scratch); | 3340 __ PrepareCallCFunction(0, 2, scratch); |
3330 __ SetCallCDoubleArguments(double_base, double_exponent); | 3341 __ SetCallCDoubleArguments(double_base, double_exponent); |
3331 __ CallCFunction( | 3342 { |
3332 ExternalReference::power_double_double_function(masm->isolate()), | 3343 AllowExternalCallThatCantCauseGC scope(masm); |
3333 0, 2); | 3344 __ CallCFunction( |
3334 __ pop(lr); | 3345 ExternalReference::power_double_double_function(masm->isolate()), |
3335 __ GetCFunctionDoubleResult(double_result); | 3346 0, 2); |
| 3347 __ pop(lr); |
| 3348 __ GetCFunctionDoubleResult(double_result); |
| 3349 } |
3336 __ vstr(double_result, | 3350 __ vstr(double_result, |
3337 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); | 3351 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); |
3338 __ mov(r0, heapnumber); | 3352 __ mov(r0, heapnumber); |
3339 __ Ret(2 * kPointerSize); | 3353 __ Ret(2 * kPointerSize); |
3340 } | 3354 } |
3341 | 3355 |
3342 __ bind(&call_runtime); | 3356 __ bind(&call_runtime); |
3343 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); | 3357 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); |
3344 } | 3358 } |
3345 | 3359 |
(...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3497 // NOTE: Invocations of builtins may return failure objects | 3511 // NOTE: Invocations of builtins may return failure objects |
3498 // instead of a proper result. The builtin entry handles | 3512 // instead of a proper result. The builtin entry handles |
3499 // this by performing a garbage collection and retrying the | 3513 // this by performing a garbage collection and retrying the |
3500 // builtin once. | 3514 // builtin once. |
3501 | 3515 |
3502 // Compute the argv pointer in a callee-saved register. | 3516 // Compute the argv pointer in a callee-saved register. |
3503 __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2)); | 3517 __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2)); |
3504 __ sub(r6, r6, Operand(kPointerSize)); | 3518 __ sub(r6, r6, Operand(kPointerSize)); |
3505 | 3519 |
3506 // Enter the exit frame that transitions from JavaScript to C++. | 3520 // Enter the exit frame that transitions from JavaScript to C++. |
| 3521 FrameScope scope(masm, StackFrame::MANUAL); |
3507 __ EnterExitFrame(save_doubles_); | 3522 __ EnterExitFrame(save_doubles_); |
3508 | 3523 |
3509 // Setup argc and the builtin function in callee-saved registers. | 3524 // Setup argc and the builtin function in callee-saved registers. |
3510 __ mov(r4, Operand(r0)); | 3525 __ mov(r4, Operand(r0)); |
3511 __ mov(r5, Operand(r1)); | 3526 __ mov(r5, Operand(r1)); |
3512 | 3527 |
3513 // r4: number of arguments (C callee-saved) | 3528 // r4: number of arguments (C callee-saved) |
3514 // r5: pointer to builtin function (C callee-saved) | 3529 // r5: pointer to builtin function (C callee-saved) |
3515 // r6: pointer to first argument (C callee-saved) | 3530 // r6: pointer to first argument (C callee-saved) |
3516 | 3531 |
(...skipping 345 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3862 __ Ret(HasArgsInRegisters() ? 0 : 2); | 3877 __ Ret(HasArgsInRegisters() ? 0 : 2); |
3863 | 3878 |
3864 // Slow-case. Tail call builtin. | 3879 // Slow-case. Tail call builtin. |
3865 __ bind(&slow); | 3880 __ bind(&slow); |
3866 if (!ReturnTrueFalseObject()) { | 3881 if (!ReturnTrueFalseObject()) { |
3867 if (HasArgsInRegisters()) { | 3882 if (HasArgsInRegisters()) { |
3868 __ Push(r0, r1); | 3883 __ Push(r0, r1); |
3869 } | 3884 } |
3870 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 3885 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
3871 } else { | 3886 } else { |
3872 __ EnterInternalFrame(); | 3887 { |
3873 __ Push(r0, r1); | 3888 FrameScope scope(masm, StackFrame::INTERNAL); |
3874 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); | 3889 __ Push(r0, r1); |
3875 __ LeaveInternalFrame(); | 3890 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); |
| 3891 } |
3876 __ cmp(r0, Operand(0)); | 3892 __ cmp(r0, Operand(0)); |
3877 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq); | 3893 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq); |
3878 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne); | 3894 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne); |
3879 __ Ret(HasArgsInRegisters() ? 0 : 2); | 3895 __ Ret(HasArgsInRegisters() ? 0 : 2); |
3880 } | 3896 } |
3881 } | 3897 } |
3882 | 3898 |
3883 | 3899 |
3884 Register InstanceofStub::left() { return r0; } | 3900 Register InstanceofStub::left() { return r0; } |
3885 | 3901 |
(...skipping 2244 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6130 } | 6146 } |
6131 | 6147 |
6132 | 6148 |
6133 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { | 6149 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { |
6134 __ Push(r1, r0); | 6150 __ Push(r1, r0); |
6135 __ push(lr); | 6151 __ push(lr); |
6136 | 6152 |
6137 // Call the runtime system in a fresh internal frame. | 6153 // Call the runtime system in a fresh internal frame. |
6138 ExternalReference miss = | 6154 ExternalReference miss = |
6139 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); | 6155 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); |
6140 __ EnterInternalFrame(); | 6156 { |
6141 __ Push(r1, r0); | 6157 FrameScope scope(masm, StackFrame::INTERNAL); |
6142 __ mov(ip, Operand(Smi::FromInt(op_))); | 6158 __ Push(r1, r0); |
6143 __ push(ip); | 6159 __ mov(ip, Operand(Smi::FromInt(op_))); |
6144 __ CallExternalReference(miss, 3); | 6160 __ push(ip); |
6145 __ LeaveInternalFrame(); | 6161 __ CallExternalReference(miss, 3); |
| 6162 } |
6146 // Compute the entry point of the rewritten stub. | 6163 // Compute the entry point of the rewritten stub. |
6147 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); | 6164 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
6148 // Restore registers. | 6165 // Restore registers. |
6149 __ pop(lr); | 6166 __ pop(lr); |
6150 __ pop(r0); | 6167 __ pop(r0); |
6151 __ pop(r1); | 6168 __ pop(r1); |
6152 __ Jump(r2); | 6169 __ Jump(r2); |
6153 } | 6170 } |
6154 | 6171 |
6155 | 6172 |
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6316 __ tst(r0, Operand(r0)); | 6333 __ tst(r0, Operand(r0)); |
6317 __ mov(scratch2, Operand(r2)); | 6334 __ mov(scratch2, Operand(r2)); |
6318 __ ldm(ia_w, sp, spill_mask); | 6335 __ ldm(ia_w, sp, spill_mask); |
6319 | 6336 |
6320 __ b(ne, done); | 6337 __ b(ne, done); |
6321 __ b(eq, miss); | 6338 __ b(eq, miss); |
6322 } | 6339 } |
6323 | 6340 |
6324 | 6341 |
6325 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { | 6342 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { |
| 6343 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
| 6344 // we cannot call anything that could cause a GC from this stub. |
6326 // Registers: | 6345 // Registers: |
6327 // result: StringDictionary to probe | 6346 // result: StringDictionary to probe |
6328 // r1: key | 6347 // r1: key |
6329 // : StringDictionary to probe. | 6348 // : StringDictionary to probe. |
6330 // index_: will hold an index of entry if lookup is successful. | 6349 // index_: will hold an index of entry if lookup is successful. |
6331 // might alias with result_. | 6350 // might alias with result_. |
6332 // Returns: | 6351 // Returns: |
6333 // result_ is zero if lookup failed, non zero otherwise. | 6352 // result_ is zero if lookup failed, non zero otherwise. |
6334 | 6353 |
6335 Register result = r0; | 6354 Register result = r0; |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6410 __ mov(result, Operand(0)); | 6429 __ mov(result, Operand(0)); |
6411 __ Ret(); | 6430 __ Ret(); |
6412 } | 6431 } |
6413 | 6432 |
6414 | 6433 |
6415 #undef __ | 6434 #undef __ |
6416 | 6435 |
6417 } } // namespace v8::internal | 6436 } } // namespace v8::internal |
6418 | 6437 |
6419 #endif // V8_TARGET_ARCH_ARM | 6438 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |