Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(962)

Side by Side Diff: src/a64/code-stubs-a64.cc

Issue 132963012: Pretenure call new support. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE. Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/builtins-a64.cc ('k') | src/a64/full-codegen-a64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 3189 matching lines...) Expand 10 before | Expand all | Expand 10 after
3200 // Load the cache state. 3200 // Load the cache state.
3201 __ Add(scratch1, feedback_vector, 3201 __ Add(scratch1, feedback_vector,
3202 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 3202 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
3203 __ Ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); 3203 __ Ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
3204 3204
3205 // A monomorphic cache hit or an already megamorphic state: invoke the 3205 // A monomorphic cache hit or an already megamorphic state: invoke the
3206 // function without changing the state. 3206 // function without changing the state.
3207 __ Cmp(scratch1, function); 3207 __ Cmp(scratch1, function);
3208 __ B(eq, &done); 3208 __ B(eq, &done);
3209 3209
3210 // If we came here, we need to see if we are the array function. 3210 if (!FLAG_pretenuring_call_new) {
3211 // If we didn't have a matching function, and we didn't find the megamorph 3211 // If we came here, we need to see if we are the array function.
3212 // sentinel, then we have in the slot either some other function or an 3212 // If we didn't have a matching function, and we didn't find the megamorph
3213 // AllocationSite. Do a map check on the object in scratch1 register. 3213 // sentinel, then we have in the slot either some other function or an
3214 __ Ldr(scratch2, FieldMemOperand(scratch1, AllocationSite::kMapOffset)); 3214 // AllocationSite. Do a map check on the object in scratch1 register.
3215 __ JumpIfNotRoot(scratch2, Heap::kAllocationSiteMapRootIndex, &miss); 3215 __ Ldr(scratch2, FieldMemOperand(scratch1, AllocationSite::kMapOffset));
3216 __ JumpIfNotRoot(scratch2, Heap::kAllocationSiteMapRootIndex, &miss);
3216 3217
3217 // Make sure the function is the Array() function 3218 // Make sure the function is the Array() function
3218 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); 3219 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
3219 __ Cmp(function, scratch1); 3220 __ Cmp(function, scratch1);
3220 __ B(ne, &megamorphic); 3221 __ B(ne, &megamorphic);
3221 __ B(&done); 3222 __ B(&done);
3223 }
3222 3224
3223 __ Bind(&miss); 3225 __ Bind(&miss);
3224 3226
3225 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 3227 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
3226 // megamorphic. 3228 // megamorphic.
3227 __ JumpIfRoot(scratch1, Heap::kUninitializedSymbolRootIndex, &initialize); 3229 __ JumpIfRoot(scratch1, Heap::kUninitializedSymbolRootIndex, &initialize);
3228 // MegamorphicSentinel is an immortal immovable object (undefined) so no 3230 // MegamorphicSentinel is an immortal immovable object (undefined) so no
3229 // write-barrier is needed. 3231 // write-barrier is needed.
3230 __ Bind(&megamorphic); 3232 __ Bind(&megamorphic);
3231 __ Add(scratch1, feedback_vector, 3233 __ Add(scratch1, feedback_vector,
3232 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 3234 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
3233 __ LoadRoot(scratch2, Heap::kMegamorphicSymbolRootIndex); 3235 __ LoadRoot(scratch2, Heap::kMegamorphicSymbolRootIndex);
3234 __ Str(scratch2, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); 3236 __ Str(scratch2, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
3235 __ B(&done); 3237 __ B(&done);
3236 3238
3237 // An uninitialized cache is patched with the function or sentinel to 3239 // An uninitialized cache is patched with the function or sentinel to
3238 // indicate the ElementsKind if function is the Array constructor. 3240 // indicate the ElementsKind if function is the Array constructor.
3239 __ Bind(&initialize); 3241 __ Bind(&initialize);
3240 // Make sure the function is the Array() function
3241 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
3242 __ Cmp(function, scratch1);
3243 __ B(ne, &not_array_function);
3244 3242
3245 // The target function is the Array constructor, 3243 if (!FLAG_pretenuring_call_new) {
3246 // Create an AllocationSite if we don't already have it, store it in the slot. 3244 // Make sure the function is the Array() function
3247 { 3245 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
3248 FrameScope scope(masm, StackFrame::INTERNAL); 3246 __ Cmp(function, scratch1);
3249 CreateAllocationSiteStub create_stub; 3247 __ B(ne, &not_array_function);
3250 3248
3251 // Arguments register must be smi-tagged to call out. 3249 // The target function is the Array constructor,
3252 __ SmiTag(argc); 3250 // Create an AllocationSite if we don't already have it, store it in the
3253 __ Push(argc, function, feedback_vector, index); 3251 // slot.
3252 {
3253 FrameScope scope(masm, StackFrame::INTERNAL);
3254 CreateAllocationSiteStub create_stub;
3254 3255
3255 // CreateAllocationSiteStub expect the feedback vector in x2 and the slot 3256 // Arguments register must be smi-tagged to call out.
3256 // index in x3. 3257 __ SmiTag(argc);
3257 ASSERT(feedback_vector.Is(x2) && index.Is(x3)); 3258 __ Push(argc, function, feedback_vector, index);
3258 __ CallStub(&create_stub);
3259 3259
3260 __ Pop(index, feedback_vector, function, argc); 3260 // CreateAllocationSiteStub expect the feedback vector in x2 and the slot
3261 __ SmiUntag(argc); 3261 // index in x3.
3262 ASSERT(feedback_vector.Is(x2) && index.Is(x3));
3263 __ CallStub(&create_stub);
3264
3265 __ Pop(index, feedback_vector, function, argc);
3266 __ SmiUntag(argc);
3267 }
3268 __ B(&done);
3269
3270 __ Bind(&not_array_function);
3262 } 3271 }
3263 __ B(&done);
3264 3272
3265 __ Bind(&not_array_function);
3266 // An uninitialized cache is patched with the function. 3273 // An uninitialized cache is patched with the function.
3267 3274
3268 __ Add(scratch1, feedback_vector, 3275 __ Add(scratch1, feedback_vector,
3269 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 3276 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
3270 __ Add(scratch1, scratch1, FixedArray::kHeaderSize - kHeapObjectTag); 3277 __ Add(scratch1, scratch1, FixedArray::kHeaderSize - kHeapObjectTag);
3271 __ Str(function, MemOperand(scratch1, 0)); 3278 __ Str(function, MemOperand(scratch1, 0));
3272 3279
3273 __ Push(function); 3280 __ Push(function);
3274 __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved, 3281 __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved,
3275 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 3282 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
3403 Label slow, non_function_call; 3410 Label slow, non_function_call;
3404 3411
3405 // Check that the function is not a smi. 3412 // Check that the function is not a smi.
3406 __ JumpIfSmi(function, &non_function_call); 3413 __ JumpIfSmi(function, &non_function_call);
3407 // Check that the function is a JSFunction. 3414 // Check that the function is a JSFunction.
3408 Register object_type = x10; 3415 Register object_type = x10;
3409 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, 3416 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE,
3410 &slow); 3417 &slow);
3411 3418
3412 if (RecordCallTarget()) { 3419 if (RecordCallTarget()) {
3413 Label feedback_register_initialized;
3414 GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5); 3420 GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5);
3415 3421
3416 // Put the AllocationSite from the feedback vector into x2, or undefined.
3417 __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); 3422 __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
3418 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize)); 3423 if (FLAG_pretenuring_call_new) {
3419 __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset)); 3424 // Put the AllocationSite from the feedback vector into x2.
3420 __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex, 3425 // By adding kPointerSize we encode that we know the AllocationSite
3421 &feedback_register_initialized); 3426 // entry is at the feedback vector slot given by x3 + 1.
3422 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); 3427 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize + kPointerSize));
3423 __ bind(&feedback_register_initialized); 3428 } else {
3429 Label feedback_register_initialized;
3430 // Put the AllocationSite from the feedback vector into x2, or undefined.
3431 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize));
3432 __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset));
3433 __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex,
3434 &feedback_register_initialized);
3435 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
3436 __ bind(&feedback_register_initialized);
3437 }
3438
3424 __ AssertUndefinedOrAllocationSite(x2, x5); 3439 __ AssertUndefinedOrAllocationSite(x2, x5);
3425 } 3440 }
3426 3441
3427 // Jump to the function-specific construct stub. 3442 // Jump to the function-specific construct stub.
3428 Register jump_reg = x4; 3443 Register jump_reg = x4;
3429 Register shared_func_info = jump_reg; 3444 Register shared_func_info = jump_reg;
3430 Register cons_stub = jump_reg; 3445 Register cons_stub = jump_reg;
3431 Register cons_stub_code = jump_reg; 3446 Register cons_stub_code = jump_reg;
3432 __ Ldr(shared_func_info, 3447 __ Ldr(shared_func_info,
3433 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); 3448 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
(...skipping 2274 matching lines...) Expand 10 before | Expand all | Expand 10 after
5708 MemOperand(fp, 6 * kPointerSize), 5723 MemOperand(fp, 6 * kPointerSize),
5709 NULL); 5724 NULL);
5710 } 5725 }
5711 5726
5712 5727
5713 #undef __ 5728 #undef __
5714 5729
5715 } } // namespace v8::internal 5730 } } // namespace v8::internal
5716 5731
5717 #endif // V8_TARGET_ARCH_A64 5732 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/builtins-a64.cc ('k') | src/a64/full-codegen-a64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698