OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
95 Isolate* isolate, | 95 Isolate* isolate, |
96 CodeStubInterfaceDescriptor* descriptor) { | 96 CodeStubInterfaceDescriptor* descriptor) { |
97 static Register registers[] = { r3, r2, r1, r0 }; | 97 static Register registers[] = { r3, r2, r1, r0 }; |
98 descriptor->register_param_count_ = 4; | 98 descriptor->register_param_count_ = 4; |
99 descriptor->register_params_ = registers; | 99 descriptor->register_params_ = registers; |
100 descriptor->deoptimization_handler_ = | 100 descriptor->deoptimization_handler_ = |
101 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; | 101 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; |
102 } | 102 } |
103 | 103 |
104 | 104 |
105 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( | |
106 Isolate* isolate, | |
107 CodeStubInterfaceDescriptor* descriptor) { | |
108 static Register registers[] = { r2, r3 }; | |
109 descriptor->register_param_count_ = 2; | |
110 descriptor->register_params_ = registers; | |
111 descriptor->deoptimization_handler_ = NULL; | |
112 } | |
113 | |
114 | |
115 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( | 105 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( |
116 Isolate* isolate, | 106 Isolate* isolate, |
117 CodeStubInterfaceDescriptor* descriptor) { | 107 CodeStubInterfaceDescriptor* descriptor) { |
118 static Register registers[] = { r1, r0 }; | 108 static Register registers[] = { r1, r0 }; |
119 descriptor->register_param_count_ = 2; | 109 descriptor->register_param_count_ = 2; |
120 descriptor->register_params_ = registers; | 110 descriptor->register_params_ = registers; |
121 descriptor->deoptimization_handler_ = | 111 descriptor->deoptimization_handler_ = |
122 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); | 112 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); |
123 } | 113 } |
124 | 114 |
(...skipping 1317 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1442 return true; | 1432 return true; |
1443 } | 1433 } |
1444 | 1434 |
1445 | 1435 |
1446 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 1436 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
1447 CEntryStub::GenerateAheadOfTime(isolate); | 1437 CEntryStub::GenerateAheadOfTime(isolate); |
1448 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); | 1438 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); |
1449 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 1439 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
1450 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 1440 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
1451 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 1441 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
1452 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | |
1453 BinaryOpICStub::GenerateAheadOfTime(isolate); | 1442 BinaryOpICStub::GenerateAheadOfTime(isolate); |
1454 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 1443 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
1455 } | 1444 } |
1456 | 1445 |
1457 | 1446 |
1458 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 1447 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
1459 SaveFPRegsMode mode = kSaveFPRegs; | 1448 SaveFPRegsMode mode = kSaveFPRegs; |
1460 CEntryStub save_doubles(1, mode); | 1449 CEntryStub save_doubles(1, mode); |
1461 StoreBufferOverflowStub stub(mode); | 1450 StoreBufferOverflowStub stub(mode); |
1462 // These stubs might already be in the snapshot, detect that and don't | 1451 // These stubs might already be in the snapshot, detect that and don't |
(...skipping 1557 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3020 | 3009 |
3021 // Load the cache state into r4. | 3010 // Load the cache state into r4. |
3022 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 3011 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
3023 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); | 3012 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
3024 | 3013 |
3025 // A monomorphic cache hit or an already megamorphic state: invoke the | 3014 // A monomorphic cache hit or an already megamorphic state: invoke the |
3026 // function without changing the state. | 3015 // function without changing the state. |
3027 __ cmp(r4, r1); | 3016 __ cmp(r4, r1); |
3028 __ b(eq, &done); | 3017 __ b(eq, &done); |
3029 | 3018 |
3030 // If we came here, we need to see if we are the array function. | |
3031 // If we didn't have a matching function, and we didn't find the megamorph | |
3032 // sentinel, then we have in the slot either some other function or an | |
3033 // AllocationSite. Do a map check on the object in ecx. | |
3034 __ ldr(r5, FieldMemOperand(r4, 0)); | |
3035 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); | |
3036 __ b(ne, &miss); | |
3037 | |
3038 // Make sure the function is the Array() function | |
3039 __ LoadArrayFunction(r4); | |
3040 __ cmp(r1, r4); | |
3041 __ b(ne, &megamorphic); | |
3042 __ jmp(&done); | |
3043 | |
3044 __ bind(&miss); | 3019 __ bind(&miss); |
3045 | 3020 |
3046 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 3021 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
3047 // megamorphic. | 3022 // megamorphic. |
3048 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); | 3023 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); |
3049 __ b(eq, &initialize); | 3024 __ b(eq, &initialize); |
3050 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 3025 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
3051 // write-barrier is needed. | 3026 // write-barrier is needed. |
3052 __ bind(&megamorphic); | 3027 __ bind(&megamorphic); |
3053 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 3028 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
3054 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 3029 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
3055 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); | 3030 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
3056 __ jmp(&done); | 3031 __ jmp(&done); |
3057 | 3032 |
3058 // An uninitialized cache is patched with the function or sentinel to | 3033 // An uninitialized cache is patched with the function or sentinel to |
3059 // indicate the ElementsKind if function is the Array constructor. | 3034 // indicate the ElementsKind if function is the Array constructor. |
3060 __ bind(&initialize); | 3035 __ bind(&initialize); |
3061 // Make sure the function is the Array() function | |
3062 __ LoadArrayFunction(r4); | |
3063 __ cmp(r1, r4); | |
3064 __ b(ne, ¬_array_function); | |
3065 | |
3066 // The target function is the Array constructor, | |
3067 // Create an AllocationSite if we don't already have it, store it in the slot. | |
3068 { | |
3069 FrameScope scope(masm, StackFrame::INTERNAL); | |
3070 | |
3071 // Arguments register must be smi-tagged to call out. | |
3072 __ SmiTag(r0); | |
3073 __ Push(r3, r2, r1, r0); | |
3074 | |
3075 CreateAllocationSiteStub create_stub; | |
3076 __ CallStub(&create_stub); | |
3077 | |
3078 __ Pop(r3, r2, r1, r0); | |
3079 __ SmiUntag(r0); | |
3080 } | |
3081 __ b(&done); | |
3082 | |
3083 __ bind(¬_array_function); | |
3084 | |
3085 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 3036 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
3086 __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 3037 __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
3087 __ str(r1, MemOperand(r4, 0)); | 3038 __ str(r1, MemOperand(r4, 0)); |
3088 | 3039 |
3089 __ Push(r4, r2, r1); | 3040 __ Push(r4, r2, r1); |
3090 __ RecordWrite(r2, r4, r1, kLRHasNotBeenSaved, kDontSaveFPRegs, | 3041 __ RecordWrite(r2, r4, r1, kLRHasNotBeenSaved, kDontSaveFPRegs, |
3091 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 3042 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
3092 __ Pop(r4, r2, r1); | 3043 __ Pop(r4, r2, r1); |
3093 | 3044 |
3094 __ bind(&done); | 3045 __ bind(&done); |
(...skipping 10 matching lines...) Expand all Loading... | |
3105 // Check that the function is really a JavaScript function. | 3056 // Check that the function is really a JavaScript function. |
3106 // r1: pushed function (to be verified) | 3057 // r1: pushed function (to be verified) |
3107 __ JumpIfSmi(r1, &non_function); | 3058 __ JumpIfSmi(r1, &non_function); |
3108 | 3059 |
3109 // Goto slow case if we do not have a function. | 3060 // Goto slow case if we do not have a function. |
3110 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); | 3061 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); |
3111 __ b(ne, &slow); | 3062 __ b(ne, &slow); |
3112 | 3063 |
3113 if (RecordCallTarget()) { | 3064 if (RecordCallTarget()) { |
3114 GenerateRecordCallTarget(masm); | 3065 GenerateRecordCallTarget(masm); |
3066 // Type information was updated. Because we may call Array, which | |
3067 // expects either undefined or an AllocationSite in ebx we need | |
3068 // to set ebx to undefined. | |
3069 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | |
3115 } | 3070 } |
3116 } | 3071 } |
3117 | 3072 |
3118 // Fast-case: Invoke the function now. | 3073 // Fast-case: Invoke the function now. |
3119 // r1: pushed function | 3074 // r1: pushed function |
3120 ParameterCount actual(argc_); | 3075 ParameterCount actual(argc_); |
3121 | 3076 |
3122 if (CallAsMethod()) { | 3077 if (CallAsMethod()) { |
3123 if (NeedsChecks()) { | 3078 if (NeedsChecks()) { |
3124 // Do not transform the receiver for strict mode functions. | 3079 // Do not transform the receiver for strict mode functions. |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3207 Label slow, non_function_call; | 3162 Label slow, non_function_call; |
3208 | 3163 |
3209 // Check that the function is not a smi. | 3164 // Check that the function is not a smi. |
3210 __ JumpIfSmi(r1, &non_function_call); | 3165 __ JumpIfSmi(r1, &non_function_call); |
3211 // Check that the function is a JSFunction. | 3166 // Check that the function is a JSFunction. |
3212 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); | 3167 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); |
3213 __ b(ne, &slow); | 3168 __ b(ne, &slow); |
3214 | 3169 |
3215 if (RecordCallTarget()) { | 3170 if (RecordCallTarget()) { |
3216 GenerateRecordCallTarget(masm); | 3171 GenerateRecordCallTarget(masm); |
3172 // Put the AllocationSite from the feedback vector into r2. | |
3173 // By adding kPointerSize we encode that we know the AllocationSite | |
3174 // entry is at the feedback vector slot given by r3 + 1. | |
3175 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); | |
3176 __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize + kPointerSize)); | |
3177 __ AssertUndefinedOrAllocationSite(r2, r5); | |
3217 } | 3178 } |
3218 | 3179 |
3219 // Jump to the function-specific construct stub. | 3180 // Jump to the function-specific construct stub. |
3220 Register jmp_reg = r4; | 3181 Register jmp_reg = r4; |
3221 __ ldr(jmp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | 3182 __ ldr(jmp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
3222 __ ldr(jmp_reg, FieldMemOperand(jmp_reg, | 3183 __ ldr(jmp_reg, FieldMemOperand(jmp_reg, |
3223 SharedFunctionInfo::kConstructStubOffset)); | 3184 SharedFunctionInfo::kConstructStubOffset)); |
3224 __ add(pc, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); | 3185 __ add(pc, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); |
3225 | 3186 |
3226 // r0: number of arguments | 3187 // r0: number of arguments |
(...skipping 2062 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5289 } else { | 5250 } else { |
5290 UNREACHABLE(); | 5251 UNREACHABLE(); |
5291 } | 5252 } |
5292 } | 5253 } |
5293 | 5254 |
5294 | 5255 |
5295 void ArrayConstructorStub::Generate(MacroAssembler* masm) { | 5256 void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
5296 // ----------- S t a t e ------------- | 5257 // ----------- S t a t e ------------- |
5297 // -- r0 : argc (only if argument_count_ == ANY) | 5258 // -- r0 : argc (only if argument_count_ == ANY) |
5298 // -- r1 : constructor | 5259 // -- r1 : constructor |
5299 // -- r2 : feedback vector (fixed array or undefined) | 5260 // -- r2 : AllocationSite or undefined |
5300 // -- r3 : slot index (if r2 is fixed array) | |
5301 // -- sp[0] : return address | 5261 // -- sp[0] : return address |
5302 // -- sp[4] : last argument | 5262 // -- sp[4] : last argument |
5303 // ----------------------------------- | 5263 // ----------------------------------- |
5304 if (FLAG_debug_code) { | 5264 if (FLAG_debug_code) { |
5305 // The array construct code is only set for the global and natives | 5265 // The array construct code is only set for the global and natives |
5306 // builtin Array functions which always have maps. | 5266 // builtin Array functions which always have maps. |
5307 | 5267 |
5308 // Initial map for the builtin Array function should be a map. | 5268 // Initial map for the builtin Array function should be a map. |
5309 __ ldr(r4, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); | 5269 __ ldr(r4, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); |
5310 // Will both indicate a NULL and a Smi. | 5270 // Will both indicate a NULL and a Smi. |
5311 __ tst(r4, Operand(kSmiTagMask)); | 5271 __ tst(r4, Operand(kSmiTagMask)); |
5312 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); | 5272 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); |
5313 __ CompareObjectType(r4, r4, r5, MAP_TYPE); | 5273 __ CompareObjectType(r4, r4, r5, MAP_TYPE); |
5314 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); | 5274 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); |
5315 | 5275 |
5316 // We should either have undefined in ebx or a valid fixed array. | 5276 // We should either have undefined in ebx or a valid AllocationSite |
Hannes Payer (out of office)
2014/02/18 16:24:26
r2
mvstanton
2014/02/19 08:40:26
Done.
| |
5317 Label okay_here; | 5277 __ AssertUndefinedOrAllocationSite(r2, r4); |
5318 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map(); | |
5319 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); | |
5320 __ b(eq, &okay_here); | |
5321 __ ldr(r4, FieldMemOperand(r2, 0)); | |
5322 __ cmp(r4, Operand(fixed_array_map)); | |
5323 __ Assert(eq, kExpectedFixedArrayInRegisterR2); | |
5324 | |
5325 // r3 should be a smi if we don't have undefined in r2 | |
5326 __ AssertSmi(r3); | |
5327 | |
5328 __ bind(&okay_here); | |
5329 } | 5278 } |
5330 | 5279 |
5331 Label no_info; | 5280 Label no_info; |
5332 // Get the elements kind and case on that. | 5281 // Get the elements kind and case on that. |
5333 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); | 5282 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); |
5334 __ b(eq, &no_info); | 5283 __ b(eq, &no_info); |
5335 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); | |
5336 __ ldr(r2, FieldMemOperand(r2, FixedArray::kHeaderSize)); | |
5337 | |
5338 // If the feedback vector is undefined, or contains anything other than an | |
5339 // AllocationSite, call an array constructor that doesn't use AllocationSites. | |
5340 __ ldr(r4, FieldMemOperand(r2, 0)); | |
5341 __ CompareRoot(r4, Heap::kAllocationSiteMapRootIndex); | |
5342 __ b(ne, &no_info); | |
5343 | 5284 |
5344 __ ldr(r3, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset)); | 5285 __ ldr(r3, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset)); |
5345 __ SmiUntag(r3); | 5286 __ SmiUntag(r3); |
5346 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); | 5287 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); |
5347 __ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask)); | 5288 __ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask)); |
5348 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); | 5289 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); |
5349 | 5290 |
5350 __ bind(&no_info); | 5291 __ bind(&no_info); |
5351 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); | 5292 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); |
5352 } | 5293 } |
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5574 MemOperand(fp, 6 * kPointerSize), | 5515 MemOperand(fp, 6 * kPointerSize), |
5575 NULL); | 5516 NULL); |
5576 } | 5517 } |
5577 | 5518 |
5578 | 5519 |
5579 #undef __ | 5520 #undef __ |
5580 | 5521 |
5581 } } // namespace v8::internal | 5522 } } // namespace v8::internal |
5582 | 5523 |
5583 #endif // V8_TARGET_ARCH_ARM | 5524 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |