| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3250 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3261 | 3261 |
| 3262 // TODO(jbramley): Don't use static registers here, but take them as arguments. | 3262 // TODO(jbramley): Don't use static registers here, but take them as arguments. |
| 3263 static void GenerateRecordCallTarget(MacroAssembler* masm) { | 3263 static void GenerateRecordCallTarget(MacroAssembler* masm) { |
| 3264 ASM_LOCATION("GenerateRecordCallTarget"); | 3264 ASM_LOCATION("GenerateRecordCallTarget"); |
| 3265 // Cache the called function in a feedback vector slot. Cache states are | 3265 // Cache the called function in a feedback vector slot. Cache states are |
| 3266 // uninitialized, monomorphic (indicated by a JSFunction), and megamorphic. | 3266 // uninitialized, monomorphic (indicated by a JSFunction), and megamorphic. |
| 3267 // x0 : number of arguments to the construct function | 3267 // x0 : number of arguments to the construct function |
| 3268 // x1 : the function to call | 3268 // x1 : the function to call |
| 3269 // x2 : feedback vector | 3269 // x2 : feedback vector |
| 3270 // x3 : slot in feedback vector (smi) | 3270 // x3 : slot in feedback vector (smi) |
| 3271 Label check_array, initialize_array, initialize_non_array, megamorphic, done; | 3271 Label initialize, done, miss, megamorphic, not_array_function; |
| 3272 | 3272 |
| 3273 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), | 3273 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), |
| 3274 masm->isolate()->heap()->undefined_value()); | 3274 masm->isolate()->heap()->undefined_value()); |
| 3275 Heap::RootListIndex kMegamorphicRootIndex = Heap::kUndefinedValueRootIndex; | |
| 3276 ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()), | 3275 ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()), |
| 3277 masm->isolate()->heap()->the_hole_value()); | 3276 masm->isolate()->heap()->the_hole_value()); |
| 3278 Heap::RootListIndex kUninitializedRootIndex = Heap::kTheHoleValueRootIndex; | |
| 3279 ASSERT_EQ(*TypeFeedbackInfo::PremonomorphicSentinel(masm->isolate()), | |
| 3280 masm->isolate()->heap()->null_value()); | |
| 3281 Heap::RootListIndex kPremonomorphicRootIndex = Heap::kNullValueRootIndex; | |
| 3282 | 3277 |
| 3283 // Load the cache state. | 3278 // Load the cache state. |
| 3284 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); | 3279 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
| 3285 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); | 3280 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); |
| 3286 | 3281 |
| 3287 // A monomorphic cache hit or an already megamorphic state: invoke the | 3282 // A monomorphic cache hit or an already megamorphic state: invoke the |
| 3288 // function without changing the state. | 3283 // function without changing the state. |
| 3289 __ Cmp(x4, x1); | 3284 __ Cmp(x4, x1); |
| 3290 __ B(eq, &done); | 3285 __ B(eq, &done); |
| 3291 __ JumpIfRoot(x4, kMegamorphicRootIndex, &done); | |
| 3292 | 3286 |
| 3293 // Check if we're dealing with the Array function or not. | 3287 // If we came here, we need to see if we are the array function. |
| 3294 __ LoadArrayFunction(x5); | 3288 // If we didn't have a matching function, and we didn't find the megamorph |
| 3295 __ Cmp(x1, x5); | 3289 // sentinel, then we have in the slot either some other function or an |
| 3296 __ B(eq, &check_array); | 3290 // AllocationSite. Do a map check on the object in ecx. |
| 3291 __ Ldr(x5, FieldMemOperand(x4, AllocationSite::kMapOffset)); |
| 3292 __ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, &miss); |
| 3297 | 3293 |
| 3298 // Non-array cache: Check the cache state. | 3294 // Make sure the function is the Array() function |
| 3299 __ JumpIfRoot(x4, kPremonomorphicRootIndex, &initialize_non_array); | 3295 __ LoadArrayFunction(x4); |
| 3300 __ JumpIfNotRoot(x4, kUninitializedRootIndex, &megamorphic); | 3296 __ Cmp(x1, x4); |
| 3297 __ B(ne, &megamorphic); |
| 3298 __ B(&done); |
| 3301 | 3299 |
| 3302 // Non-array cache: Uninitialized -> premonomorphic. The sentinel is an | 3300 __ Bind(&miss); |
| 3303 // immortal immovable object (null) so no write-barrier is needed. | 3301 |
| 3302 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
| 3303 // megamorphic. |
| 3304 __ JumpIfRoot(x4, Heap::kTheHoleValueRootIndex, &initialize); |
| 3305 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
| 3306 // write-barrier is needed. |
| 3307 __ Bind(&megamorphic); |
| 3304 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); | 3308 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
| 3305 __ LoadRoot(x10, kPremonomorphicRootIndex); | 3309 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); |
| 3306 __ Str(x10, FieldMemOperand(x4, FixedArray::kHeaderSize)); | 3310 __ Str(x10, FieldMemOperand(x4, FixedArray::kHeaderSize)); |
| 3307 __ B(&done); | 3311 __ B(&done); |
| 3308 | 3312 |
| 3309 // Array cache: Check the cache state to see if we're in a monomorphic | 3313 // An uninitialized cache is patched with the function or sentinel to |
| 3310 // state where the state object is an AllocationSite object. | 3314 // indicate the ElementsKind if function is the Array constructor. |
| 3311 __ Bind(&check_array); | 3315 __ Bind(&initialize); |
| 3312 __ Ldr(x5, FieldMemOperand(x4, AllocationSite::kMapOffset)); | 3316 // Make sure the function is the Array() function |
| 3313 __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex, &done); | 3317 __ LoadArrayFunction(x4); |
| 3318 __ Cmp(x1, x4); |
| 3319 __ B(ne, ¬_array_function); |
| 3314 | 3320 |
| 3315 // Array cache: Uninitialized or premonomorphic -> monomorphic. | 3321 // The target function is the Array constructor, |
| 3316 __ JumpIfRoot(x4, kUninitializedRootIndex, &initialize_array); | 3322 // Create an AllocationSite if we don't already have it, store it in the slot. |
| 3317 __ JumpIfRoot(x4, kPremonomorphicRootIndex, &initialize_array); | |
| 3318 | |
| 3319 // Both caches: Monomorphic -> megamorphic. The sentinel is an | |
| 3320 // immortal immovable object (undefined) so no write-barrier is needed. | |
| 3321 __ Bind(&megamorphic); | |
| 3322 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); | |
| 3323 __ LoadRoot(x10, kMegamorphicRootIndex); | |
| 3324 __ Str(x10, FieldMemOperand(x4, FixedArray::kHeaderSize)); | |
| 3325 __ B(&done); | |
| 3326 | |
| 3327 // Array cache: Uninitialized or premonomorphic -> monomorphic. | |
| 3328 __ Bind(&initialize_array); | |
| 3329 { | 3323 { |
| 3330 FrameScope scope(masm, StackFrame::INTERNAL); | 3324 FrameScope scope(masm, StackFrame::INTERNAL); |
| 3331 CreateAllocationSiteStub create_stub; | 3325 CreateAllocationSiteStub create_stub; |
| 3332 | 3326 |
| 3333 // Arguments register must be smi-tagged to call out. | 3327 // Arguments register must be smi-tagged to call out. |
| 3334 __ SmiTag(x0); | 3328 __ SmiTag(x0); |
| 3335 __ Push(x0, x1, x2, x3); | 3329 __ Push(x0, x1, x2, x3); |
| 3336 | 3330 |
| 3337 __ CallStub(&create_stub); | 3331 __ CallStub(&create_stub); |
| 3338 | 3332 |
| 3339 __ Pop(x3, x2, x1, x0); | 3333 __ Pop(x3, x2, x1, x0); |
| 3340 __ SmiUntag(x0); | 3334 __ SmiUntag(x0); |
| 3341 } | 3335 } |
| 3342 __ B(&done); | 3336 __ B(&done); |
| 3343 | 3337 |
| 3344 // Non-array cache: Premonomorphic -> monomorphic. | 3338 __ Bind(¬_array_function); |
| 3345 __ Bind(&initialize_non_array); | 3339 // An uninitialized cache is patched with the function. |
| 3340 |
| 3346 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); | 3341 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
| 3347 // TODO(all): Does the value need to be left in x4? If not, FieldMemOperand | 3342 // TODO(all): Does the value need to be left in x4? If not, FieldMemOperand |
| 3348 // could be used to avoid this add. | 3343 // could be used to avoid this add. |
| 3349 __ Add(x4, x4, FixedArray::kHeaderSize - kHeapObjectTag); | 3344 __ Add(x4, x4, FixedArray::kHeaderSize - kHeapObjectTag); |
| 3350 __ Str(x1, MemOperand(x4, 0)); | 3345 __ Str(x1, MemOperand(x4, 0)); |
| 3351 | 3346 |
| 3352 __ Push(x4, x2, x1); | 3347 __ Push(x4, x2, x1); |
| 3353 __ RecordWrite(x2, x4, x1, kLRHasNotBeenSaved, kDontSaveFPRegs, | 3348 __ RecordWrite(x2, x4, x1, kLRHasNotBeenSaved, kDontSaveFPRegs, |
| 3354 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 3349 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 3355 __ Pop(x1, x2, x4); | 3350 __ Pop(x1, x2, x4); |
| 3356 | 3351 |
| 3357 // TODO(all): Are x4, x2 and x1 outputs? This isn't clear. | 3352 // TODO(all): Are x4, x2 and x1 outputs? This isn't clear. |
| 3353 |
| 3358 __ Bind(&done); | 3354 __ Bind(&done); |
| 3359 } | 3355 } |
| 3360 | 3356 |
| 3361 | 3357 |
| 3362 void CallFunctionStub::Generate(MacroAssembler* masm) { | 3358 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 3363 ASM_LOCATION("CallFunctionStub::Generate"); | 3359 ASM_LOCATION("CallFunctionStub::Generate"); |
| 3364 // x1 function the function to call | 3360 // x1 function the function to call |
| 3365 // x2 : feedback vector | 3361 // x2 : feedback vector |
| 3366 // x3 : slot in feedback vector (smi) (if x2 is not undefined) | 3362 // x3 : slot in feedback vector (smi) (if x2 is not undefined) |
| 3367 Register function = x1; | 3363 Register function = x1; |
| (...skipping 2435 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5803 MemOperand(fp, 6 * kPointerSize), | 5799 MemOperand(fp, 6 * kPointerSize), |
| 5804 NULL); | 5800 NULL); |
| 5805 } | 5801 } |
| 5806 | 5802 |
| 5807 | 5803 |
| 5808 #undef __ | 5804 #undef __ |
| 5809 | 5805 |
| 5810 } } // namespace v8::internal | 5806 } } // namespace v8::internal |
| 5811 | 5807 |
| 5812 #endif // V8_TARGET_ARCH_A64 | 5808 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |