Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1165)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 7891042: Add asserts to ensure that we: (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/codegen-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 820 matching lines...) Expand 10 before | Expand all | Expand 10 after
831 831
832 // Push the current return address before the C call. Return will be 832 // Push the current return address before the C call. Return will be
833 // through pop(pc) below. 833 // through pop(pc) below.
834 __ push(lr); 834 __ push(lr);
835 __ PrepareCallCFunction(0, 2, scratch); 835 __ PrepareCallCFunction(0, 2, scratch);
836 if (masm->use_eabi_hardfloat()) { 836 if (masm->use_eabi_hardfloat()) {
837 CpuFeatures::Scope scope(VFP3); 837 CpuFeatures::Scope scope(VFP3);
838 __ vmov(d0, r0, r1); 838 __ vmov(d0, r0, r1);
839 __ vmov(d1, r2, r3); 839 __ vmov(d1, r2, r3);
840 } 840 }
841 // Call C routine that may not cause GC or other trouble. 841 {
842 __ CallCFunction(ExternalReference::double_fp_operation(op, masm->isolate()), 842 AllowExternalCallThatCantCauseGC scope(masm);
843 0, 2); 843 __ CallCFunction(
844 ExternalReference::double_fp_operation(op, masm->isolate()), 0, 2);
845 }
844 // Store answer in the overwritable heap number. Double returned in 846 // Store answer in the overwritable heap number. Double returned in
845 // registers r0 and r1 or in d0. 847 // registers r0 and r1 or in d0.
846 if (masm->use_eabi_hardfloat()) { 848 if (masm->use_eabi_hardfloat()) {
847 CpuFeatures::Scope scope(VFP3); 849 CpuFeatures::Scope scope(VFP3);
848 __ vstr(d0, 850 __ vstr(d0,
849 FieldMemOperand(heap_number_result, HeapNumber::kValueOffset)); 851 FieldMemOperand(heap_number_result, HeapNumber::kValueOffset));
850 } else { 852 } else {
851 __ Strd(r0, r1, FieldMemOperand(heap_number_result, 853 __ Strd(r0, r1, FieldMemOperand(heap_number_result,
852 HeapNumber::kValueOffset)); 854 HeapNumber::kValueOffset));
853 } 855 }
(...skipping 745 matching lines...) Expand 10 before | Expand all | Expand 10 after
1599 1601
1600 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 1602 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1601 // tagged as a small integer. 1603 // tagged as a small integer.
1602 __ InvokeBuiltin(native, JUMP_FUNCTION); 1604 __ InvokeBuiltin(native, JUMP_FUNCTION);
1603 } 1605 }
1604 1606
1605 1607
1606 // The stub expects its argument in the tos_ register and returns its result in 1608 // The stub expects its argument in the tos_ register and returns its result in
1607 // it, too: zero for false, and a non-zero value for true. 1609 // it, too: zero for false, and a non-zero value for true.
1608 void ToBooleanStub::Generate(MacroAssembler* masm) { 1610 void ToBooleanStub::Generate(MacroAssembler* masm) {
1611 // This stub overrides SometimesSetsUpAFrame() to return false. That means
1612 // we cannot call anything that could cause a GC from this stub.
1609 // This stub uses VFP3 instructions. 1613 // This stub uses VFP3 instructions.
1610 CpuFeatures::Scope scope(VFP3); 1614 CpuFeatures::Scope scope(VFP3);
1611 1615
1612 Label patch; 1616 Label patch;
1613 const Register map = r9.is(tos_) ? r7 : r9; 1617 const Register map = r9.is(tos_) ? r7 : r9;
1614 1618
1615 // undefined -> false. 1619 // undefined -> false.
1616 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false); 1620 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
1617 1621
1618 // Boolean -> its value. 1622 // Boolean -> its value.
(...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after
1859 if (mode_ == UNARY_OVERWRITE) { 1863 if (mode_ == UNARY_OVERWRITE) {
1860 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 1864 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1861 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. 1865 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
1862 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 1866 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1863 } else { 1867 } else {
1864 Label slow_allocate_heapnumber, heapnumber_allocated; 1868 Label slow_allocate_heapnumber, heapnumber_allocated;
1865 __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber); 1869 __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber);
1866 __ jmp(&heapnumber_allocated); 1870 __ jmp(&heapnumber_allocated);
1867 1871
1868 __ bind(&slow_allocate_heapnumber); 1872 __ bind(&slow_allocate_heapnumber);
1869 __ EnterInternalFrame(); 1873 {
1870 __ push(r0); 1874 FrameScope scope(masm, StackFrame::INTERNAL);
1871 __ CallRuntime(Runtime::kNumberAlloc, 0); 1875 __ push(r0);
1872 __ mov(r1, Operand(r0)); 1876 __ CallRuntime(Runtime::kNumberAlloc, 0);
1873 __ pop(r0); 1877 __ mov(r1, Operand(r0));
1874 __ LeaveInternalFrame(); 1878 __ pop(r0);
1879 }
1875 1880
1876 __ bind(&heapnumber_allocated); 1881 __ bind(&heapnumber_allocated);
1877 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); 1882 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
1878 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 1883 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1879 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset)); 1884 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset));
1880 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. 1885 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
1881 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); 1886 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset));
1882 __ mov(r0, Operand(r1)); 1887 __ mov(r0, Operand(r1));
1883 } 1888 }
1884 __ Ret(); 1889 __ Ret();
(...skipping 20 matching lines...) Expand all
1905 1910
1906 // Try to store the result in a heap number. 1911 // Try to store the result in a heap number.
1907 __ bind(&try_float); 1912 __ bind(&try_float);
1908 if (mode_ == UNARY_NO_OVERWRITE) { 1913 if (mode_ == UNARY_NO_OVERWRITE) {
1909 Label slow_allocate_heapnumber, heapnumber_allocated; 1914 Label slow_allocate_heapnumber, heapnumber_allocated;
1910 // Allocate a new heap number without zapping r0, which we need if it fails. 1915 // Allocate a new heap number without zapping r0, which we need if it fails.
1911 __ AllocateHeapNumber(r2, r3, r4, r6, &slow_allocate_heapnumber); 1916 __ AllocateHeapNumber(r2, r3, r4, r6, &slow_allocate_heapnumber);
1912 __ jmp(&heapnumber_allocated); 1917 __ jmp(&heapnumber_allocated);
1913 1918
1914 __ bind(&slow_allocate_heapnumber); 1919 __ bind(&slow_allocate_heapnumber);
1915 __ EnterInternalFrame(); 1920 {
1916 __ push(r0); // Push the heap number, not the untagged int32. 1921 FrameScope scope(masm, StackFrame::INTERNAL);
1917 __ CallRuntime(Runtime::kNumberAlloc, 0); 1922 __ push(r0); // Push the heap number, not the untagged int32.
1918 __ mov(r2, r0); // Move the new heap number into r2. 1923 __ CallRuntime(Runtime::kNumberAlloc, 0);
1919 // Get the heap number into r0, now that the new heap number is in r2. 1924 __ mov(r2, r0); // Move the new heap number into r2.
1920 __ pop(r0); 1925 // Get the heap number into r0, now that the new heap number is in r2.
1921 __ LeaveInternalFrame(); 1926 __ pop(r0);
1927 }
1922 1928
1923 // Convert the heap number in r0 to an untagged integer in r1. 1929 // Convert the heap number in r0 to an untagged integer in r1.
1924 // This can't go slow-case because it's the same number we already 1930 // This can't go slow-case because it's the same number we already
1925 // converted once again. 1931 // converted once again.
1926 __ ConvertToInt32(r0, r1, r3, r4, d0, &impossible); 1932 __ ConvertToInt32(r0, r1, r3, r4, d0, &impossible);
1927 __ mvn(r1, Operand(r1)); 1933 __ mvn(r1, Operand(r1));
1928 1934
1929 __ bind(&heapnumber_allocated); 1935 __ bind(&heapnumber_allocated);
1930 __ mov(r0, r2); // Move newly allocated heap number to r0. 1936 __ mov(r0, r2); // Move newly allocated heap number to r0.
1931 } 1937 }
(...skipping 1194 matching lines...) Expand 10 before | Expand all | Expand 10 after
3126 __ vstr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset)); 3132 __ vstr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset));
3127 __ stm(ia, cache_entry, r2.bit() | r3.bit() | r6.bit()); 3133 __ stm(ia, cache_entry, r2.bit() | r3.bit() | r6.bit());
3128 __ Ret(); 3134 __ Ret();
3129 3135
3130 __ bind(&invalid_cache); 3136 __ bind(&invalid_cache);
3131 // The cache is invalid. Call runtime which will recreate the 3137 // The cache is invalid. Call runtime which will recreate the
3132 // cache. 3138 // cache.
3133 __ LoadRoot(r5, Heap::kHeapNumberMapRootIndex); 3139 __ LoadRoot(r5, Heap::kHeapNumberMapRootIndex);
3134 __ AllocateHeapNumber(r0, scratch0, scratch1, r5, &skip_cache); 3140 __ AllocateHeapNumber(r0, scratch0, scratch1, r5, &skip_cache);
3135 __ vstr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset)); 3141 __ vstr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset));
3136 __ EnterInternalFrame(); 3142 {
3137 __ push(r0); 3143 FrameScope scope(masm, StackFrame::INTERNAL);
3138 __ CallRuntime(RuntimeFunction(), 1); 3144 __ push(r0);
3139 __ LeaveInternalFrame(); 3145 __ CallRuntime(RuntimeFunction(), 1);
3146 }
3140 __ vldr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset)); 3147 __ vldr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset));
3141 __ Ret(); 3148 __ Ret();
3142 3149
3143 __ bind(&skip_cache); 3150 __ bind(&skip_cache);
3144 // Call C function to calculate the result and answer directly 3151 // Call C function to calculate the result and answer directly
3145 // without updating the cache. 3152 // without updating the cache.
3146 GenerateCallCFunction(masm, scratch0); 3153 GenerateCallCFunction(masm, scratch0);
3147 __ GetCFunctionDoubleResult(d2); 3154 __ GetCFunctionDoubleResult(d2);
3148 __ bind(&no_update); 3155 __ bind(&no_update);
3149 3156
3150 // We return the value in d2 without adding it to the cache, but 3157 // We return the value in d2 without adding it to the cache, but
3151 // we cause a scavenging GC so that future allocations will succeed. 3158 // we cause a scavenging GC so that future allocations will succeed.
3152 __ EnterInternalFrame(); 3159 {
3160 FrameScope scope(masm, StackFrame::INTERNAL);
3153 3161
3154 // Allocate an aligned object larger than a HeapNumber. 3162 // Allocate an aligned object larger than a HeapNumber.
3155 ASSERT(4 * kPointerSize >= HeapNumber::kSize); 3163 ASSERT(4 * kPointerSize >= HeapNumber::kSize);
3156 __ mov(scratch0, Operand(4 * kPointerSize)); 3164 __ mov(scratch0, Operand(4 * kPointerSize));
3157 __ push(scratch0); 3165 __ push(scratch0);
3158 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); 3166 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
3159 __ LeaveInternalFrame(); 3167 }
3160 __ Ret(); 3168 __ Ret();
3161 } 3169 }
3162 } 3170 }
3163 3171
3164 3172
3165 void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm, 3173 void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm,
3166 Register scratch) { 3174 Register scratch) {
3167 Isolate* isolate = masm->isolate(); 3175 Isolate* isolate = masm->isolate();
3168 3176
3169 __ push(lr); 3177 __ push(lr);
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
3261 // C function for integer exponents. The register containing 3269 // C function for integer exponents. The register containing
3262 // the heap number is callee-saved. 3270 // the heap number is callee-saved.
3263 __ AllocateHeapNumber(heapnumber, 3271 __ AllocateHeapNumber(heapnumber,
3264 scratch, 3272 scratch,
3265 scratch2, 3273 scratch2,
3266 heapnumbermap, 3274 heapnumbermap,
3267 &call_runtime); 3275 &call_runtime);
3268 __ push(lr); 3276 __ push(lr);
3269 __ PrepareCallCFunction(1, 1, scratch); 3277 __ PrepareCallCFunction(1, 1, scratch);
3270 __ SetCallCDoubleArguments(double_base, exponent); 3278 __ SetCallCDoubleArguments(double_base, exponent);
3271 __ CallCFunction( 3279 {
3272 ExternalReference::power_double_int_function(masm->isolate()), 3280 AllowExternalCallThatCantCauseGC scope(masm);
3273 1, 1); 3281 __ CallCFunction(
3274 __ pop(lr); 3282 ExternalReference::power_double_int_function(masm->isolate()),
3275 __ GetCFunctionDoubleResult(double_result); 3283 1, 1);
3284 __ pop(lr);
3285 __ GetCFunctionDoubleResult(double_result);
3286 }
3276 __ vstr(double_result, 3287 __ vstr(double_result,
3277 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); 3288 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
3278 __ mov(r0, heapnumber); 3289 __ mov(r0, heapnumber);
3279 __ Ret(2 * kPointerSize); 3290 __ Ret(2 * kPointerSize);
3280 3291
3281 __ bind(&exponent_not_smi); 3292 __ bind(&exponent_not_smi);
3282 __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset)); 3293 __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset));
3283 __ cmp(scratch, heapnumbermap); 3294 __ cmp(scratch, heapnumbermap);
3284 __ b(ne, &call_runtime); 3295 __ b(ne, &call_runtime);
3285 // Exponent is a heapnumber. Load it into double register. 3296 // Exponent is a heapnumber. Load it into double register.
3286 __ vldr(double_exponent, 3297 __ vldr(double_exponent,
3287 FieldMemOperand(exponent, HeapNumber::kValueOffset)); 3298 FieldMemOperand(exponent, HeapNumber::kValueOffset));
3288 3299
3289 // The base and the exponent are in double registers. 3300 // The base and the exponent are in double registers.
3290 // Allocate a heap number and call a C function for 3301 // Allocate a heap number and call a C function for
3291 // double exponents. The register containing 3302 // double exponents. The register containing
3292 // the heap number is callee-saved. 3303 // the heap number is callee-saved.
3293 __ AllocateHeapNumber(heapnumber, 3304 __ AllocateHeapNumber(heapnumber,
3294 scratch, 3305 scratch,
3295 scratch2, 3306 scratch2,
3296 heapnumbermap, 3307 heapnumbermap,
3297 &call_runtime); 3308 &call_runtime);
3298 __ push(lr); 3309 __ push(lr);
3299 __ PrepareCallCFunction(0, 2, scratch); 3310 __ PrepareCallCFunction(0, 2, scratch);
3300 __ SetCallCDoubleArguments(double_base, double_exponent); 3311 __ SetCallCDoubleArguments(double_base, double_exponent);
3301 __ CallCFunction( 3312 {
3302 ExternalReference::power_double_double_function(masm->isolate()), 3313 AllowExternalCallThatCantCauseGC scope(masm);
3303 0, 2); 3314 __ CallCFunction(
3304 __ pop(lr); 3315 ExternalReference::power_double_double_function(masm->isolate()),
3305 __ GetCFunctionDoubleResult(double_result); 3316 0, 2);
3317 __ pop(lr);
3318 __ GetCFunctionDoubleResult(double_result);
3319 }
3306 __ vstr(double_result, 3320 __ vstr(double_result,
3307 FieldMemOperand(heapnumber, HeapNumber::kValueOffset)); 3321 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
3308 __ mov(r0, heapnumber); 3322 __ mov(r0, heapnumber);
3309 __ Ret(2 * kPointerSize); 3323 __ Ret(2 * kPointerSize);
3310 } 3324 }
3311 3325
3312 __ bind(&call_runtime); 3326 __ bind(&call_runtime);
3313 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); 3327 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
3314 } 3328 }
3315 3329
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after
3462 // NOTE: Invocations of builtins may return failure objects 3476 // NOTE: Invocations of builtins may return failure objects
3463 // instead of a proper result. The builtin entry handles 3477 // instead of a proper result. The builtin entry handles
3464 // this by performing a garbage collection and retrying the 3478 // this by performing a garbage collection and retrying the
3465 // builtin once. 3479 // builtin once.
3466 3480
3467 // Compute the argv pointer in a callee-saved register. 3481 // Compute the argv pointer in a callee-saved register.
3468 __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2)); 3482 __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2));
3469 __ sub(r6, r6, Operand(kPointerSize)); 3483 __ sub(r6, r6, Operand(kPointerSize));
3470 3484
3471 // Enter the exit frame that transitions from JavaScript to C++. 3485 // Enter the exit frame that transitions from JavaScript to C++.
3486 FrameScope scope(masm, StackFrame::MANUAL);
3472 __ EnterExitFrame(save_doubles_); 3487 __ EnterExitFrame(save_doubles_);
3473 3488
3474 // Setup argc and the builtin function in callee-saved registers. 3489 // Setup argc and the builtin function in callee-saved registers.
3475 __ mov(r4, Operand(r0)); 3490 __ mov(r4, Operand(r0));
3476 __ mov(r5, Operand(r1)); 3491 __ mov(r5, Operand(r1));
3477 3492
3478 // r4: number of arguments (C callee-saved) 3493 // r4: number of arguments (C callee-saved)
3479 // r5: pointer to builtin function (C callee-saved) 3494 // r5: pointer to builtin function (C callee-saved)
3480 // r6: pointer to first argument (C callee-saved) 3495 // r6: pointer to first argument (C callee-saved)
3481 3496
(...skipping 362 matching lines...) Expand 10 before | Expand all | Expand 10 after
3844 __ Ret(HasArgsInRegisters() ? 0 : 2); 3859 __ Ret(HasArgsInRegisters() ? 0 : 2);
3845 3860
3846 // Slow-case. Tail call builtin. 3861 // Slow-case. Tail call builtin.
3847 __ bind(&slow); 3862 __ bind(&slow);
3848 if (!ReturnTrueFalseObject()) { 3863 if (!ReturnTrueFalseObject()) {
3849 if (HasArgsInRegisters()) { 3864 if (HasArgsInRegisters()) {
3850 __ Push(r0, r1); 3865 __ Push(r0, r1);
3851 } 3866 }
3852 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); 3867 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3853 } else { 3868 } else {
3854 __ EnterInternalFrame(); 3869 {
3855 __ Push(r0, r1); 3870 FrameScope scope(masm, StackFrame::INTERNAL);
3856 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); 3871 __ Push(r0, r1);
3857 __ LeaveInternalFrame(); 3872 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
3873 }
3858 __ cmp(r0, Operand::Zero()); 3874 __ cmp(r0, Operand::Zero());
3859 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq); 3875 __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq);
3860 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne); 3876 __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne);
3861 __ Ret(HasArgsInRegisters() ? 0 : 2); 3877 __ Ret(HasArgsInRegisters() ? 0 : 2);
3862 } 3878 }
3863 } 3879 }
3864 3880
3865 3881
3866 Register InstanceofStub::left() { return r0; } 3882 Register InstanceofStub::left() { return r0; }
3867 3883
(...skipping 2550 matching lines...) Expand 10 before | Expand all | Expand 10 after
6418 } 6434 }
6419 6435
6420 6436
6421 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { 6437 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
6422 __ Push(r1, r0); 6438 __ Push(r1, r0);
6423 __ push(lr); 6439 __ push(lr);
6424 6440
6425 // Call the runtime system in a fresh internal frame. 6441 // Call the runtime system in a fresh internal frame.
6426 ExternalReference miss = 6442 ExternalReference miss =
6427 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); 6443 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
6428 __ EnterInternalFrame(); 6444 {
6429 __ Push(r1, r0); 6445 FrameScope scope(masm, StackFrame::INTERNAL);
6430 __ mov(ip, Operand(Smi::FromInt(op_))); 6446 __ Push(r1, r0);
6431 __ push(ip); 6447 __ mov(ip, Operand(Smi::FromInt(op_)));
6432 __ CallExternalReference(miss, 3); 6448 __ push(ip);
6433 __ LeaveInternalFrame(); 6449 __ CallExternalReference(miss, 3);
6450 }
6434 // Compute the entry point of the rewritten stub. 6451 // Compute the entry point of the rewritten stub.
6435 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); 6452 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
6436 // Restore registers. 6453 // Restore registers.
6437 __ pop(lr); 6454 __ pop(lr);
6438 __ pop(r0); 6455 __ pop(r0);
6439 __ pop(r1); 6456 __ pop(r1);
6440 __ Jump(r2); 6457 __ Jump(r2);
6441 } 6458 }
6442 6459
6443 6460
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after
6606 __ tst(r0, Operand(r0)); 6623 __ tst(r0, Operand(r0));
6607 __ mov(scratch2, Operand(r2)); 6624 __ mov(scratch2, Operand(r2));
6608 __ ldm(ia_w, sp, spill_mask); 6625 __ ldm(ia_w, sp, spill_mask);
6609 6626
6610 __ b(ne, done); 6627 __ b(ne, done);
6611 __ b(eq, miss); 6628 __ b(eq, miss);
6612 } 6629 }
6613 6630
6614 6631
6615 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { 6632 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
6633 // This stub overrides SometimesSetsUpAFrame() to return false. That means
6634 // we cannot call anything that could cause a GC from this stub.
6616 // Registers: 6635 // Registers:
6617 // result: StringDictionary to probe 6636 // result: StringDictionary to probe
6618 // r1: key 6637 // r1: key
6619 // : StringDictionary to probe. 6638 // : StringDictionary to probe.
6620 // index_: will hold an index of entry if lookup is successful. 6639 // index_: will hold an index of entry if lookup is successful.
6621 // might alias with result_. 6640 // might alias with result_.
6622 // Returns: 6641 // Returns:
6623 // result_ is zero if lookup failed, non zero otherwise. 6642 // result_ is zero if lookup failed, non zero otherwise.
6624 6643
6625 Register result = r0; 6644 Register result = r0;
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
6700 __ mov(result, Operand::Zero()); 6719 __ mov(result, Operand::Zero());
6701 __ Ret(); 6720 __ Ret();
6702 } 6721 }
6703 6722
6704 6723
6705 #undef __ 6724 #undef __
6706 6725
6707 } } // namespace v8::internal 6726 } } // namespace v8::internal
6708 6727
6709 #endif // V8_TARGET_ARCH_ARM 6728 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/codegen-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698