OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2928 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2939 | 2939 |
2940 // Load the cache state into r4. | 2940 // Load the cache state into r4. |
2941 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2941 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2942 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); | 2942 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
2943 | 2943 |
2944 // A monomorphic cache hit or an already megamorphic state: invoke the | 2944 // A monomorphic cache hit or an already megamorphic state: invoke the |
2945 // function without changing the state. | 2945 // function without changing the state. |
2946 __ cmp(r4, r1); | 2946 __ cmp(r4, r1); |
2947 __ b(eq, &done); | 2947 __ b(eq, &done); |
2948 | 2948 |
2949 // If we came here, we need to see if we are the array function. | 2949 if (!FLAG_pretenuring_call_new) { |
2950 // If we didn't have a matching function, and we didn't find the megamorph | 2950 // If we came here, we need to see if we are the array function. |
2951 // sentinel, then we have in the slot either some other function or an | 2951 // If we didn't have a matching function, and we didn't find the megamorph |
2952 // AllocationSite. Do a map check on the object in ecx. | 2952 // sentinel, then we have in the slot either some other function or an |
2953 __ ldr(r5, FieldMemOperand(r4, 0)); | 2953 // AllocationSite. Do a map check on the object in ecx. |
2954 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); | 2954 __ ldr(r5, FieldMemOperand(r4, 0)); |
2955 __ b(ne, &miss); | 2955 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); |
| 2956 __ b(ne, &miss); |
2956 | 2957 |
2957 // Make sure the function is the Array() function | 2958 // Make sure the function is the Array() function |
2958 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4); | 2959 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4); |
2959 __ cmp(r1, r4); | 2960 __ cmp(r1, r4); |
2960 __ b(ne, &megamorphic); | 2961 __ b(ne, &megamorphic); |
2961 __ jmp(&done); | 2962 __ jmp(&done); |
| 2963 } |
2962 | 2964 |
2963 __ bind(&miss); | 2965 __ bind(&miss); |
2964 | 2966 |
2965 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 2967 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
2966 // megamorphic. | 2968 // megamorphic. |
2967 __ CompareRoot(r4, Heap::kUninitializedSymbolRootIndex); | 2969 __ CompareRoot(r4, Heap::kUninitializedSymbolRootIndex); |
2968 __ b(eq, &initialize); | 2970 __ b(eq, &initialize); |
2969 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 2971 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
2970 // write-barrier is needed. | 2972 // write-barrier is needed. |
2971 __ bind(&megamorphic); | 2973 __ bind(&megamorphic); |
2972 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2974 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2973 __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex); | 2975 __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex); |
2974 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); | 2976 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
2975 __ jmp(&done); | 2977 __ jmp(&done); |
2976 | 2978 |
2977 // An uninitialized cache is patched with the function or sentinel to | 2979 // An uninitialized cache is patched with the function |
2978 // indicate the ElementsKind if function is the Array constructor. | |
2979 __ bind(&initialize); | 2980 __ bind(&initialize); |
2980 // Make sure the function is the Array() function | |
2981 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4); | |
2982 __ cmp(r1, r4); | |
2983 __ b(ne, ¬_array_function); | |
2984 | 2981 |
2985 // The target function is the Array constructor, | 2982 if (!FLAG_pretenuring_call_new) { |
2986 // Create an AllocationSite if we don't already have it, store it in the slot. | 2983 // Make sure the function is the Array() function |
2987 { | 2984 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4); |
2988 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 2985 __ cmp(r1, r4); |
| 2986 __ b(ne, ¬_array_function); |
2989 | 2987 |
2990 // Arguments register must be smi-tagged to call out. | 2988 // The target function is the Array constructor, |
2991 __ SmiTag(r0); | 2989 // Create an AllocationSite if we don't already have it, store it in the |
2992 __ Push(r3, r2, r1, r0); | 2990 // slot. |
| 2991 { |
| 2992 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
2993 | 2993 |
2994 CreateAllocationSiteStub create_stub; | 2994 // Arguments register must be smi-tagged to call out. |
2995 __ CallStub(&create_stub); | 2995 __ SmiTag(r0); |
| 2996 __ Push(r3, r2, r1, r0); |
2996 | 2997 |
2997 __ Pop(r3, r2, r1, r0); | 2998 CreateAllocationSiteStub create_stub; |
2998 __ SmiUntag(r0); | 2999 __ CallStub(&create_stub); |
| 3000 |
| 3001 __ Pop(r3, r2, r1, r0); |
| 3002 __ SmiUntag(r0); |
| 3003 } |
| 3004 __ b(&done); |
| 3005 |
| 3006 __ bind(¬_array_function); |
2999 } | 3007 } |
3000 __ b(&done); | |
3001 | |
3002 __ bind(¬_array_function); | |
3003 | 3008 |
3004 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 3009 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
3005 __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 3010 __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
3006 __ str(r1, MemOperand(r4, 0)); | 3011 __ str(r1, MemOperand(r4, 0)); |
3007 | 3012 |
3008 __ Push(r4, r2, r1); | 3013 __ Push(r4, r2, r1); |
3009 __ RecordWrite(r2, r4, r1, kLRHasNotBeenSaved, kDontSaveFPRegs, | 3014 __ RecordWrite(r2, r4, r1, kLRHasNotBeenSaved, kDontSaveFPRegs, |
3010 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 3015 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
3011 __ Pop(r4, r2, r1); | 3016 __ Pop(r4, r2, r1); |
3012 | 3017 |
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3131 // vector (Smi) | 3136 // vector (Smi) |
3132 Label slow, non_function_call; | 3137 Label slow, non_function_call; |
3133 | 3138 |
3134 // Check that the function is not a smi. | 3139 // Check that the function is not a smi. |
3135 __ JumpIfSmi(r1, &non_function_call); | 3140 __ JumpIfSmi(r1, &non_function_call); |
3136 // Check that the function is a JSFunction. | 3141 // Check that the function is a JSFunction. |
3137 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); | 3142 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); |
3138 __ b(ne, &slow); | 3143 __ b(ne, &slow); |
3139 | 3144 |
3140 if (RecordCallTarget()) { | 3145 if (RecordCallTarget()) { |
3141 Label feedback_register_initialized; | |
3142 GenerateRecordCallTarget(masm); | 3146 GenerateRecordCallTarget(masm); |
3143 | 3147 |
3144 // Put the AllocationSite from the feedback vector into r2, or undefined. | |
3145 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); | 3148 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); |
3146 __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize)); | 3149 if (FLAG_pretenuring_call_new) { |
3147 __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset)); | 3150 // Put the AllocationSite from the feedback vector into r2. |
3148 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); | 3151 // By adding kPointerSize we encode that we know the AllocationSite |
3149 __ b(eq, &feedback_register_initialized); | 3152 // entry is at the feedback vector slot given by r3 + 1. |
3150 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | 3153 __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize + kPointerSize)); |
3151 __ bind(&feedback_register_initialized); | 3154 } else { |
| 3155 Label feedback_register_initialized; |
| 3156 // Put the AllocationSite from the feedback vector into r2, or undefined. |
| 3157 __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize)); |
| 3158 __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset)); |
| 3159 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); |
| 3160 __ b(eq, &feedback_register_initialized); |
| 3161 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 3162 __ bind(&feedback_register_initialized); |
| 3163 } |
| 3164 |
3152 __ AssertUndefinedOrAllocationSite(r2, r5); | 3165 __ AssertUndefinedOrAllocationSite(r2, r5); |
3153 } | 3166 } |
3154 | 3167 |
3155 // Jump to the function-specific construct stub. | 3168 // Jump to the function-specific construct stub. |
3156 Register jmp_reg = r4; | 3169 Register jmp_reg = r4; |
3157 __ ldr(jmp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 3170 __ ldr(jmp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
3158 __ ldr(jmp_reg, FieldMemOperand(jmp_reg, | 3171 __ ldr(jmp_reg, FieldMemOperand(jmp_reg, |
3159 SharedFunctionInfo::kConstructStubOffset)); | 3172 SharedFunctionInfo::kConstructStubOffset)); |
3160 __ add(pc, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); | 3173 __ add(pc, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); |
3161 | 3174 |
(...skipping 2326 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5488 MemOperand(fp, 6 * kPointerSize), | 5501 MemOperand(fp, 6 * kPointerSize), |
5489 NULL); | 5502 NULL); |
5490 } | 5503 } |
5491 | 5504 |
5492 | 5505 |
5493 #undef __ | 5506 #undef __ |
5494 | 5507 |
5495 } } // namespace v8::internal | 5508 } } // namespace v8::internal |
5496 | 5509 |
5497 #endif // V8_TARGET_ARCH_ARM | 5510 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |