Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(135)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 203463013: MIPS: Pretenure call new support. (Closed) Base URL: https://github.com/v8/v8.git@gbl
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips/builtins-mips.cc ('k') | src/mips/full-codegen-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 3075 matching lines...) Expand 10 before | Expand all | Expand 10 after
3086 3086
3087 // Load the cache state into t0. 3087 // Load the cache state into t0.
3088 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); 3088 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
3089 __ Addu(t0, a2, Operand(t0)); 3089 __ Addu(t0, a2, Operand(t0));
3090 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); 3090 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize));
3091 3091
3092 // A monomorphic cache hit or an already megamorphic state: invoke the 3092 // A monomorphic cache hit or an already megamorphic state: invoke the
3093 // function without changing the state. 3093 // function without changing the state.
3094 __ Branch(&done, eq, t0, Operand(a1)); 3094 __ Branch(&done, eq, t0, Operand(a1));
3095 3095
3096 // If we came here, we need to see if we are the array function. 3096 if (!FLAG_pretenuring_call_new) {
3097 // If we didn't have a matching function, and we didn't find the megamorph 3097 // If we came here, we need to see if we are the array function.
3098 // sentinel, then we have in the slot either some other function or an 3098 // If we didn't have a matching function, and we didn't find the megamorph
3099 // AllocationSite. Do a map check on the object in a3. 3099 // sentinel, then we have in the slot either some other function or an
3100 __ lw(t1, FieldMemOperand(t0, 0)); 3100 // AllocationSite. Do a map check on the object in a3.
3101 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); 3101 __ lw(t1, FieldMemOperand(t0, 0));
3102 __ Branch(&miss, ne, t1, Operand(at)); 3102 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
3103 __ Branch(&miss, ne, t1, Operand(at));
3103 3104
3104 // Make sure the function is the Array() function 3105 // Make sure the function is the Array() function
3105 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); 3106 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0);
3106 __ Branch(&megamorphic, ne, a1, Operand(t0)); 3107 __ Branch(&megamorphic, ne, a1, Operand(t0));
3107 __ jmp(&done); 3108 __ jmp(&done);
3109 }
3108 3110
3109 __ bind(&miss); 3111 __ bind(&miss);
3110 3112
3111 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 3113 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
3112 // megamorphic. 3114 // megamorphic.
3113 __ LoadRoot(at, Heap::kUninitializedSymbolRootIndex); 3115 __ LoadRoot(at, Heap::kUninitializedSymbolRootIndex);
3114 __ Branch(&initialize, eq, t0, Operand(at)); 3116 __ Branch(&initialize, eq, t0, Operand(at));
3115 // MegamorphicSentinel is an immortal immovable object (undefined) so no 3117 // MegamorphicSentinel is an immortal immovable object (undefined) so no
3116 // write-barrier is needed. 3118 // write-barrier is needed.
3117 __ bind(&megamorphic); 3119 __ bind(&megamorphic);
3118 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); 3120 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
3119 __ Addu(t0, a2, Operand(t0)); 3121 __ Addu(t0, a2, Operand(t0));
3120 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex); 3122 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
3121 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); 3123 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize));
3122 __ jmp(&done); 3124 __ jmp(&done);
3123 3125
3124 // An uninitialized cache is patched with the function or sentinel to 3126 // An uninitialized cache is patched with the function.
3125 // indicate the ElementsKind if function is the Array constructor.
3126 __ bind(&initialize); 3127 __ bind(&initialize);
3127 // Make sure the function is the Array() function 3128 if (!FLAG_pretenuring_call_new) {
3128 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); 3129 // Make sure the function is the Array() function.
3129 __ Branch(&not_array_function, ne, a1, Operand(t0)); 3130 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0);
3131 __ Branch(&not_array_function, ne, a1, Operand(t0));
3130 3132
3131 // The target function is the Array constructor. 3133 // The target function is the Array constructor,
3132 // Create an AllocationSite if we don't already have it, store it in the slot. 3134 // Create an AllocationSite if we don't already have it, store it in the
3133 { 3135 // slot.
3134 FrameScope scope(masm, StackFrame::INTERNAL); 3136 {
3135 const RegList kSavedRegs = 3137 FrameScope scope(masm, StackFrame::INTERNAL);
3136 1 << 4 | // a0 3138 const RegList kSavedRegs =
3137 1 << 5 | // a1 3139 1 << 4 | // a0
3138 1 << 6 | // a2 3140 1 << 5 | // a1
3139 1 << 7; // a3 3141 1 << 6 | // a2
3142 1 << 7; // a3
3140 3143
3141 // Arguments register must be smi-tagged to call out. 3144 // Arguments register must be smi-tagged to call out.
3142 __ SmiTag(a0); 3145 __ SmiTag(a0);
3143 __ MultiPush(kSavedRegs); 3146 __ MultiPush(kSavedRegs);
3144 3147
3145 CreateAllocationSiteStub create_stub; 3148 CreateAllocationSiteStub create_stub;
3146 __ CallStub(&create_stub); 3149 __ CallStub(&create_stub);
3147 3150
3148 __ MultiPop(kSavedRegs); 3151 __ MultiPop(kSavedRegs);
3149 __ SmiUntag(a0); 3152 __ SmiUntag(a0);
3153 }
3154 __ Branch(&done);
3155
3156 __ bind(&not_array_function);
3150 } 3157 }
3151 __ Branch(&done);
3152
3153 __ bind(&not_array_function);
3154 3158
3155 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); 3159 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
3156 __ Addu(t0, a2, Operand(t0)); 3160 __ Addu(t0, a2, Operand(t0));
3157 __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3161 __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3158 __ sw(a1, MemOperand(t0, 0)); 3162 __ sw(a1, MemOperand(t0, 0));
3159 3163
3160 __ Push(t0, a2, a1); 3164 __ Push(t0, a2, a1);
3161 __ RecordWrite(a2, t0, a1, kRAHasNotBeenSaved, kDontSaveFPRegs, 3165 __ RecordWrite(a2, t0, a1, kRAHasNotBeenSaved, kDontSaveFPRegs,
3162 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 3166 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
3163 __ Pop(t0, a2, a1); 3167 __ Pop(t0, a2, a1);
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
3281 // a3 : (only if a2 is not undefined) slot in feedback vector (Smi) 3285 // a3 : (only if a2 is not undefined) slot in feedback vector (Smi)
3282 Label slow, non_function_call; 3286 Label slow, non_function_call;
3283 3287
3284 // Check that the function is not a smi. 3288 // Check that the function is not a smi.
3285 __ JumpIfSmi(a1, &non_function_call); 3289 __ JumpIfSmi(a1, &non_function_call);
3286 // Check that the function is a JSFunction. 3290 // Check that the function is a JSFunction.
3287 __ GetObjectType(a1, t0, t0); 3291 __ GetObjectType(a1, t0, t0);
3288 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); 3292 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE));
3289 3293
3290 if (RecordCallTarget()) { 3294 if (RecordCallTarget()) {
3291 Label feedback_register_initialized;
3292 GenerateRecordCallTarget(masm); 3295 GenerateRecordCallTarget(masm);
3293 3296
3294 // Put the AllocationSite from the feedback vector into a2, or undefined.
3295 __ sll(at, a3, kPointerSizeLog2 - kSmiTagSize); 3297 __ sll(at, a3, kPointerSizeLog2 - kSmiTagSize);
3296 __ Addu(t1, a2, at); 3298 __ Addu(t1, a2, at);
3297 __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize)); 3299 if (FLAG_pretenuring_call_new) {
3298 __ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset)); 3300 // Put the AllocationSite from the feedback vector into a2.
3299 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); 3301 // By adding kPointerSize we encode that we know the AllocationSite
3300 __ Branch(&feedback_register_initialized, eq, t1, Operand(at)); 3302 // entry is at the feedback vector slot given by a3 + 1.
3301 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 3303 __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize + kPointerSize));
3302 __ bind(&feedback_register_initialized); 3304 } else {
3305 Label feedback_register_initialized;
3306 // Put the AllocationSite from the feedback vector into a2, or undefined.
3307 __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize));
3308 __ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset));
3309 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
3310 __ Branch(&feedback_register_initialized, eq, t1, Operand(at));
3311 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3312 __ bind(&feedback_register_initialized);
3313 }
3314
3303 __ AssertUndefinedOrAllocationSite(a2, t1); 3315 __ AssertUndefinedOrAllocationSite(a2, t1);
3304 } 3316 }
3305 3317
3306 // Jump to the function-specific construct stub. 3318 // Jump to the function-specific construct stub.
3307 Register jmp_reg = t0; 3319 Register jmp_reg = t0;
3308 __ lw(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 3320 __ lw(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
3309 __ lw(jmp_reg, FieldMemOperand(jmp_reg, 3321 __ lw(jmp_reg, FieldMemOperand(jmp_reg,
3310 SharedFunctionInfo::kConstructStubOffset)); 3322 SharedFunctionInfo::kConstructStubOffset));
3311 __ Addu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); 3323 __ Addu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
3312 __ Jump(at); 3324 __ Jump(at);
(...skipping 2354 matching lines...) Expand 10 before | Expand all | Expand 10 after
5667 MemOperand(fp, 6 * kPointerSize), 5679 MemOperand(fp, 6 * kPointerSize),
5668 NULL); 5680 NULL);
5669 } 5681 }
5670 5682
5671 5683
5672 #undef __ 5684 #undef __
5673 5685
5674 } } // namespace v8::internal 5686 } } // namespace v8::internal
5675 5687
5676 #endif // V8_TARGET_ARCH_MIPS 5688 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/builtins-mips.cc ('k') | src/mips/full-codegen-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698