Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(418)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 6248004: ARM: Implement DoInstanceOfKnownGlobal stub (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/arm/lithium-arm.h » ('j') | src/arm/lithium-codegen-arm.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 2872 matching lines...) Expand 10 before | Expand all | Expand 10 after
2883 // Restore callee-saved registers and return. 2883 // Restore callee-saved registers and return.
2884 #ifdef DEBUG 2884 #ifdef DEBUG
2885 if (FLAG_debug_code) { 2885 if (FLAG_debug_code) {
2886 __ mov(lr, Operand(pc)); 2886 __ mov(lr, Operand(pc));
2887 } 2887 }
2888 #endif 2888 #endif
2889 __ ldm(ia_w, sp, kCalleeSaved | pc.bit()); 2889 __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
2890 } 2890 }
2891 2891
2892 2892
2893 // Uses registers r0 to r4. Expected input is 2893 // Uses registers r0 to r4. Expected input is
Søren Thygesen Gjesse 2011/01/13 15:14:36 Please update this comment to include information
Alexandre 2011/01/14 17:53:40 Done.
2894 // object in r0 (or at sp+1*kPointerSize) and function in 2894 // object in r0 (or at sp+1*kPointerSize) and function in
2895 // r1 (or at sp), depending on whether or not 2895 // r1 (or at sp), depending on whether or not
2896 // args_in_registers() is true. 2896 // args_in_registers() is true.
2897 void InstanceofStub::Generate(MacroAssembler* masm) { 2897 void InstanceofStub::Generate(MacroAssembler* masm) {
2898 // Call site inlining and patching implies arguments in registers.
2899 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
2900 // ReturnTrueFalse is only implemented for inlined call sites.
2901 ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
2902
2898 // Fixed register usage throughout the stub: 2903 // Fixed register usage throughout the stub:
2899 const Register object = r0; // Object (lhs). 2904 const Register object = r0; // Object (lhs).
2900 const Register map = r3; // Map of the object. 2905 const Register map = r3; // Map of the object.
2901 const Register function = r1; // Function (rhs). 2906 const Register function = r1; // Function (rhs).
2902 const Register prototype = r4; // Prototype of the function. 2907 const Register prototype = r4; // Prototype of the function.
2908 const Register inline_site = r9;
2903 const Register scratch = r2; 2909 const Register scratch = r2;
2910
2911 const int32_t kDeltaToLoadBoolResult = 3 * kPointerSize;
2912 const uint32_t kLdrOffsetMask = (1 << 12) - 1;
2913 const int32_t kPCRegOffset = 2 * kPointerSize;
2914
2904 Label slow, loop, is_instance, is_not_instance, not_js_object; 2915 Label slow, loop, is_instance, is_not_instance, not_js_object;
2916
2905 if (!HasArgsInRegisters()) { 2917 if (!HasArgsInRegisters()) {
2906 __ ldr(object, MemOperand(sp, 1 * kPointerSize)); 2918 __ ldr(object, MemOperand(sp, 1 * kPointerSize));
2907 __ ldr(function, MemOperand(sp, 0)); 2919 __ ldr(function, MemOperand(sp, 0));
2908 } 2920 }
2909 2921
2910 // Check that the left hand is a JS object and load map. 2922 // Check that the left hand is a JS object and load map.
2911 __ BranchOnSmi(object, &not_js_object); 2923 __ BranchOnSmi(object, &not_js_object);
2912 __ IsObjectJSObjectType(object, map, scratch, &not_js_object); 2924 __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
2913 2925
2914 // Look up the function and the map in the instanceof cache. 2926 // If there is a call site cache don't look in the global cache, but do the
2915 Label miss; 2927 // real lookup and update the call site cache.
2916 __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex); 2928 if (!HasCallSiteInlineCheck()) {
2917 __ cmp(function, ip); 2929 Label miss;
2918 __ b(ne, &miss); 2930 __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex);
2919 __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex); 2931 __ cmp(function, ip);
2920 __ cmp(map, ip); 2932 __ b(ne, &miss);
2921 __ b(ne, &miss); 2933 __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex);
2922 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); 2934 __ cmp(map, ip);
2923 __ Ret(HasArgsInRegisters() ? 0 : 2); 2935 __ b(ne, &miss);
2936 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
2937 __ Ret(HasArgsInRegisters() ? 0 : 2);
2924 2938
2925 __ bind(&miss); 2939 __ bind(&miss);
2940 }
2941
2942 // Get the prototype of the function.
2926 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); 2943 __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
2927 2944
2928 // Check that the function prototype is a JS object. 2945 // Check that the function prototype is a JS object.
2929 __ BranchOnSmi(prototype, &slow); 2946 __ BranchOnSmi(prototype, &slow);
2930 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); 2947 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
2931 2948
2932 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); 2949 // Update the global instanceof or call site inlined cache with the current
2933 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); 2950 // map and function. The cached answer will be set when it is known below.
2951 if (!HasCallSiteInlineCheck()) {
2952 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
2953 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
2954 } else {
2955 ASSERT(HasArgsInRegisters());
2956 // Patch the (relocated) inlined map check.
2957
2958 // Get the load instruction.
2959 // The offset was stored in r4 slot on the stack.
2960 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal)
2961 __ ldr(scratch, MemOperand(sp, r4.code() * kPointerSize));
2962 __ sub(inline_site, lr, scratch);
2963 __ ldr(scratch, MemOperand(inline_site));
Søren Thygesen Gjesse 2011/01/13 15:14:36 This code pattern (the patching of the constant po
Alexandre 2011/01/14 17:53:40 Introduced a new GetRelocatedValueLocation MacroAs
2964 if (FLAG_debug_code) {
2965 // Check that the instruction is a ldr reg, [pc + offset] .
2966 __ and_(scratch, scratch, Operand(kLdrPCPattern));
2967 __ cmp(scratch, Operand(kLdrPCPattern));
2968 __ Check(eq, "The instruction to patch should be a load from pc.");
2969 // scratch was clobbered. Restore it.
2970 __ ldr(scratch, MemOperand(inline_site));
2971 }
2972 // Retrieve the address of the constant.
2973 // constant_address = (load_address + actual_pc_offset) + load_offset
2974 __ and_(scratch, scratch, Operand(kLdrOffsetMask));
2975 __ add(scratch, inline_site, Operand(scratch));
2976 __ add(scratch, scratch, Operand(kPCRegOffset));
2977
2978 // scratch: address of the map to patch (in the constant pool).
2979 // inline_site: address of the load reg, map instruction (inline site).
2980 // Preserve this register as we will use it again.
2981
2982 // Patch the constant in the constant pool.
2983 __ str(map, MemOperand(scratch));
2984 }
2934 2985
2935 // Register mapping: r3 is object map and r4 is function prototype. 2986 // Register mapping: r3 is object map and r4 is function prototype.
2936 // Get prototype of object into r2. 2987 // Get prototype of object into r2.
2937 __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset)); 2988 __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset));
2938 2989
2990 // We don't need map any more. Use it as a scratch register.
2991 Register scratch3 = map;
Søren Thygesen Gjesse 2011/01/13 15:14:36 Please add map = no_reg; as well.
Alexandre 2011/01/14 17:53:40 Done.
2992
2939 // Loop through the prototype chain looking for the function prototype. 2993 // Loop through the prototype chain looking for the function prototype.
2994 __ LoadRoot(scratch3, Heap::kNullValueRootIndex);
2940 __ bind(&loop); 2995 __ bind(&loop);
2941 __ cmp(scratch, Operand(prototype)); 2996 __ cmp(scratch, Operand(prototype));
2942 __ b(eq, &is_instance); 2997 __ b(eq, &is_instance);
2943 __ LoadRoot(ip, Heap::kNullValueRootIndex); 2998 __ cmp(scratch, scratch3);
2944 __ cmp(scratch, ip);
2945 __ b(eq, &is_not_instance); 2999 __ b(eq, &is_not_instance);
2946 __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); 3000 __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2947 __ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset)); 3001 __ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset));
2948 __ jmp(&loop); 3002 __ jmp(&loop);
2949 3003
2950 __ bind(&is_instance); 3004 __ bind(&is_instance);
3005 if (!HasCallSiteInlineCheck()) {
2951 __ mov(r0, Operand(Smi::FromInt(0))); 3006 __ mov(r0, Operand(Smi::FromInt(0)));
2952 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); 3007 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
3008 } else {
3009 // Patch the call site to return true (relocated boolean constant).
3010 __ ldr(scratch, MemOperand(inline_site, kDeltaToLoadBoolResult));
3011 if (FLAG_debug_code) {
3012 // Check that the instruction is a ldr reg, [pc + offset] .
3013 __ and_(scratch, scratch, Operand(kLdrPCPattern));
3014 __ cmp(scratch, Operand(kLdrPCPattern));
3015 __ Check(eq, "The instruction to patch should be a load from pc.");
3016 // Scratch was clobbered. Restore it.
3017 __ ldr(scratch, MemOperand(inline_site, kDeltaToLoadBoolResult));
3018 }
3019 // Get the address of the constant.
3020 __ and_(scratch, scratch, Operand(kLdrOffsetMask));
3021 __ add(scratch, inline_site, Operand(scratch));
3022 __ add(scratch, scratch, Operand(kPCRegOffset));
3023
3024 // Patch the constant in the constant pool.
3025 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3026 __ str(r0, MemOperand(scratch));
3027
3028 if (!ReturnTrueFalseObject()) {
3029 __ mov(r0, Operand(Smi::FromInt(0)));
3030 }
3031 }
2953 __ Ret(HasArgsInRegisters() ? 0 : 2); 3032 __ Ret(HasArgsInRegisters() ? 0 : 2);
2954 3033
2955 __ bind(&is_not_instance); 3034 __ bind(&is_not_instance);
3035 if (!HasCallSiteInlineCheck()) {
2956 __ mov(r0, Operand(Smi::FromInt(1))); 3036 __ mov(r0, Operand(Smi::FromInt(1)));
2957 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); 3037 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
3038 } else {
3039 // Patch the call site to return false.
3040 __ ldr(scratch, MemOperand(inline_site, kDeltaToLoadBoolResult));
3041 if (FLAG_debug_code) {
3042 // Check that the instruction is a ldr reg, [pc + offset] .
3043 __ and_(scratch, scratch, Operand(kLdrPCPattern));
3044 __ cmp(scratch, Operand(kLdrPCPattern));
3045 __ Check(eq, "The instruction to patch should be a load from pc.");
3046 // Scratch was clobbered. Restore it.
3047 __ ldr(scratch, MemOperand(inline_site, kDeltaToLoadBoolResult));
3048 }
3049 // Get the address of the constant.
3050 __ and_(scratch, scratch, Operand(kLdrOffsetMask));
3051 __ add(scratch, inline_site, Operand(scratch));
3052 __ add(scratch, scratch, Operand(kPCRegOffset));
3053
3054 // Patch the constant in the constant pool.
3055 // Use r0 as we need the result in it.
3056 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3057 __ str(r0, MemOperand(scratch));
3058
3059 if (!ReturnTrueFalseObject()) {
3060 __ mov(r0, Operand(Smi::FromInt(1)));
3061 }
3062 }
2958 __ Ret(HasArgsInRegisters() ? 0 : 2); 3063 __ Ret(HasArgsInRegisters() ? 0 : 2);
2959 3064
2960 Label object_not_null, object_not_null_or_smi; 3065 Label object_not_null, object_not_null_or_smi;
2961 __ bind(&not_js_object); 3066 __ bind(&not_js_object);
2962 // Before null, smi and string value checks, check that the rhs is a function 3067 // Before null, smi and string value checks, check that the rhs is a function
2963 // as for a non-function rhs an exception needs to be thrown. 3068 // as for a non-function rhs an exception needs to be thrown.
2964 __ BranchOnSmi(function, &slow); 3069 __ BranchOnSmi(function, &slow);
2965 __ CompareObjectType(function, map, scratch, JS_FUNCTION_TYPE); 3070 __ CompareObjectType(function, map, scratch, JS_FUNCTION_TYPE);
2966 __ b(ne, &slow); 3071 __ b(ne, &slow);
2967 3072
(...skipping 10 matching lines...) Expand all
2978 __ Ret(HasArgsInRegisters() ? 0 : 2); 3083 __ Ret(HasArgsInRegisters() ? 0 : 2);
2979 3084
2980 __ bind(&object_not_null_or_smi); 3085 __ bind(&object_not_null_or_smi);
2981 // String values are not instances of anything. 3086 // String values are not instances of anything.
2982 __ IsObjectJSStringType(object, scratch, &slow); 3087 __ IsObjectJSStringType(object, scratch, &slow);
2983 __ mov(r0, Operand(Smi::FromInt(1))); 3088 __ mov(r0, Operand(Smi::FromInt(1)));
2984 __ Ret(HasArgsInRegisters() ? 0 : 2); 3089 __ Ret(HasArgsInRegisters() ? 0 : 2);
2985 3090
2986 // Slow-case. Tail call builtin. 3091 // Slow-case. Tail call builtin.
2987 __ bind(&slow); 3092 __ bind(&slow);
2988 if (HasArgsInRegisters()) { 3093 if (!ReturnTrueFalseObject()) {
3094 if (HasArgsInRegisters()) {
3095 __ Push(r0, r1);
3096 }
3097 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS);
3098 } else {
3099 __ EnterInternalFrame();
2989 __ Push(r0, r1); 3100 __ Push(r0, r1);
3101 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_JS);
3102 __ EnterInternalFrame();
3103 __ tst(r0, r0);
3104 __ LoadRoot(r0, Heap::kTrueValueRootIndex, ne);
3105 __ LoadRoot(r0, Heap::kFalseValueRootIndex, eq);
3106 __ Ret(HasArgsInRegisters() ? 0 : 2);
2990 } 3107 }
2991 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS);
2992 } 3108 }
2993 3109
2994 3110
3111 Register InstanceofStub::left() { return r0; }
3112
3113
3114 Register InstanceofStub::right() { return r1; }
3115
3116
2995 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { 3117 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2996 // The displacement is the offset of the last parameter (if any) 3118 // The displacement is the offset of the last parameter (if any)
2997 // relative to the frame pointer. 3119 // relative to the frame pointer.
2998 static const int kDisplacement = 3120 static const int kDisplacement =
2999 StandardFrameConstants::kCallerSPOffset - kPointerSize; 3121 StandardFrameConstants::kCallerSPOffset - kPointerSize;
3000 3122
3001 // Check that the key is a smi. 3123 // Check that the key is a smi.
3002 Label slow; 3124 Label slow;
3003 __ BranchOnNotSmi(r1, &slow); 3125 __ BranchOnNotSmi(r1, &slow);
3004 3126
(...skipping 1971 matching lines...) Expand 10 before | Expand all | Expand 10 after
4976 __ pop(r1); 5098 __ pop(r1);
4977 __ Jump(r2); 5099 __ Jump(r2);
4978 } 5100 }
4979 5101
4980 5102
4981 #undef __ 5103 #undef __
4982 5104
4983 } } // namespace v8::internal 5105 } } // namespace v8::internal
4984 5106
4985 #endif // V8_TARGET_ARCH_ARM 5107 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « no previous file | src/arm/lithium-arm.h » ('j') | src/arm/lithium-codegen-arm.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698