Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(266)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 181453002: Reset trunk to 3.24.35.4 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/assembler-mips.cc ('k') | src/mips/debug-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
99 descriptor->register_param_count_ = 4; 99 descriptor->register_param_count_ = 4;
100 descriptor->register_params_ = registers; 100 descriptor->register_params_ = registers;
101 descriptor->deoptimization_handler_ = 101 descriptor->deoptimization_handler_ =
102 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; 102 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry;
103 } 103 }
104 104
105 105
106 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( 106 void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
107 Isolate* isolate, 107 Isolate* isolate,
108 CodeStubInterfaceDescriptor* descriptor) { 108 CodeStubInterfaceDescriptor* descriptor) {
109 static Register registers[] = { a2, a3 }; 109 static Register registers[] = { a2 };
110 descriptor->register_param_count_ = 2; 110 descriptor->register_param_count_ = 1;
111 descriptor->register_params_ = registers; 111 descriptor->register_params_ = registers;
112 descriptor->deoptimization_handler_ = NULL; 112 descriptor->deoptimization_handler_ = NULL;
113 } 113 }
114 114
115 115
116 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( 116 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
117 Isolate* isolate, 117 Isolate* isolate,
118 CodeStubInterfaceDescriptor* descriptor) { 118 CodeStubInterfaceDescriptor* descriptor) {
119 static Register registers[] = { a1, a0 }; 119 static Register registers[] = { a1, a0 };
120 descriptor->register_param_count_ = 2; 120 descriptor->register_param_count_ = 2;
(...skipping 3024 matching lines...) Expand 10 before | Expand all | Expand 10 after
3145 // Load offset into t0 and replace subject string with parent. 3145 // Load offset into t0 and replace subject string with parent.
3146 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); 3146 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset));
3147 __ sra(t0, t0, kSmiTagSize); 3147 __ sra(t0, t0, kSmiTagSize);
3148 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); 3148 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
3149 __ jmp(&check_underlying); // Go to (4). 3149 __ jmp(&check_underlying); // Go to (4).
3150 #endif // V8_INTERPRETED_REGEXP 3150 #endif // V8_INTERPRETED_REGEXP
3151 } 3151 }
3152 3152
3153 3153
3154 static void GenerateRecordCallTarget(MacroAssembler* masm) { 3154 static void GenerateRecordCallTarget(MacroAssembler* masm) {
3155 // Cache the called function in a feedback vector slot. Cache states 3155 // Cache the called function in a global property cell. Cache states
3156 // are uninitialized, monomorphic (indicated by a JSFunction), and 3156 // are uninitialized, monomorphic (indicated by a JSFunction), and
3157 // megamorphic. 3157 // megamorphic.
3158 // a0 : number of arguments to the construct function 3158 // a0 : number of arguments to the construct function
3159 // a1 : the function to call 3159 // a1 : the function to call
3160 // a2 : Feedback vector 3160 // a2 : cache cell for call target
3161 // a3 : slot in feedback vector (Smi)
3162 Label initialize, done, miss, megamorphic, not_array_function; 3161 Label initialize, done, miss, megamorphic, not_array_function;
3163 3162
3164 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), 3163 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()),
3165 masm->isolate()->heap()->undefined_value()); 3164 masm->isolate()->heap()->undefined_value());
3166 ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()), 3165 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()),
3167 masm->isolate()->heap()->the_hole_value()); 3166 masm->isolate()->heap()->the_hole_value());
3168 3167
3169 // Load the cache state into t0. 3168 // Load the cache state into a3.
3170 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); 3169 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
3171 __ Addu(t0, a2, Operand(t0));
3172 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize));
3173 3170
3174 // A monomorphic cache hit or an already megamorphic state: invoke the 3171 // A monomorphic cache hit or an already megamorphic state: invoke the
3175 // function without changing the state. 3172 // function without changing the state.
3176 __ Branch(&done, eq, t0, Operand(a1)); 3173 __ Branch(&done, eq, a3, Operand(a1));
3177 3174
3178 // If we came here, we need to see if we are the array function. 3175 // If we came here, we need to see if we are the array function.
3179 // If we didn't have a matching function, and we didn't find the megamorph 3176 // If we didn't have a matching function, and we didn't find the megamorph
3180 // sentinel, then we have in the slot either some other function or an 3177 // sentinel, then we have in the cell either some other function or an
3181 // AllocationSite. Do a map check on the object in a3. 3178 // AllocationSite. Do a map check on the object in a3.
3182 __ lw(t1, FieldMemOperand(t0, 0)); 3179 __ lw(t1, FieldMemOperand(a3, 0));
3183 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); 3180 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
3184 __ Branch(&miss, ne, t1, Operand(at)); 3181 __ Branch(&miss, ne, t1, Operand(at));
3185 3182
3186 // Make sure the function is the Array() function 3183 // Make sure the function is the Array() function
3187 __ LoadArrayFunction(t0); 3184 __ LoadArrayFunction(a3);
3188 __ Branch(&megamorphic, ne, a1, Operand(t0)); 3185 __ Branch(&megamorphic, ne, a1, Operand(a3));
3189 __ jmp(&done); 3186 __ jmp(&done);
3190 3187
3191 __ bind(&miss); 3188 __ bind(&miss);
3192 3189
3193 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 3190 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
3194 // megamorphic. 3191 // megamorphic.
3195 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 3192 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
3196 __ Branch(&initialize, eq, t0, Operand(at)); 3193 __ Branch(&initialize, eq, a3, Operand(at));
3197 // MegamorphicSentinel is an immortal immovable object (undefined) so no 3194 // MegamorphicSentinel is an immortal immovable object (undefined) so no
3198 // write-barrier is needed. 3195 // write-barrier is needed.
3199 __ bind(&megamorphic); 3196 __ bind(&megamorphic);
3200 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
3201 __ Addu(t0, a2, Operand(t0));
3202 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 3197 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3203 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); 3198 __ sw(at, FieldMemOperand(a2, Cell::kValueOffset));
3204 __ jmp(&done); 3199 __ jmp(&done);
3205 3200
3206 // An uninitialized cache is patched with the function or sentinel to 3201 // An uninitialized cache is patched with the function or sentinel to
3207 // indicate the ElementsKind if function is the Array constructor. 3202 // indicate the ElementsKind if function is the Array constructor.
3208 __ bind(&initialize); 3203 __ bind(&initialize);
3209 // Make sure the function is the Array() function 3204 // Make sure the function is the Array() function
3210 __ LoadArrayFunction(t0); 3205 __ LoadArrayFunction(a3);
3211 __ Branch(&not_array_function, ne, a1, Operand(t0)); 3206 __ Branch(&not_array_function, ne, a1, Operand(a3));
3212 3207
3213 // The target function is the Array constructor. 3208 // The target function is the Array constructor.
3214 // Create an AllocationSite if we don't already have it, store it in the slot. 3209 // Create an AllocationSite if we don't already have it, store it in the cell.
3215 { 3210 {
3216 FrameScope scope(masm, StackFrame::INTERNAL); 3211 FrameScope scope(masm, StackFrame::INTERNAL);
3217 const RegList kSavedRegs = 3212 const RegList kSavedRegs =
3218 1 << 4 | // a0 3213 1 << 4 | // a0
3219 1 << 5 | // a1 3214 1 << 5 | // a1
3220 1 << 6 | // a2 3215 1 << 6; // a2
3221 1 << 7; // a3
3222 3216
3223 // Arguments register must be smi-tagged to call out. 3217 // Arguments register must be smi-tagged to call out.
3224 __ SmiTag(a0); 3218 __ SmiTag(a0);
3225 __ MultiPush(kSavedRegs); 3219 __ MultiPush(kSavedRegs);
3226 3220
3227 CreateAllocationSiteStub create_stub; 3221 CreateAllocationSiteStub create_stub;
3228 __ CallStub(&create_stub); 3222 __ CallStub(&create_stub);
3229 3223
3230 __ MultiPop(kSavedRegs); 3224 __ MultiPop(kSavedRegs);
3231 __ SmiUntag(a0); 3225 __ SmiUntag(a0);
3232 } 3226 }
3233 __ Branch(&done); 3227 __ Branch(&done);
3234 3228
3235 __ bind(&not_array_function); 3229 __ bind(&not_array_function);
3236 3230 __ sw(a1, FieldMemOperand(a2, Cell::kValueOffset));
3237 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); 3231 // No need for a write barrier here - cells are rescanned.
3238 __ Addu(t0, a2, Operand(t0));
3239 __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3240 __ sw(a1, MemOperand(t0, 0));
3241
3242 __ Push(t0, a2, a1);
3243 __ RecordWrite(a2, t0, a1, kRAHasNotBeenSaved, kDontSaveFPRegs,
3244 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
3245 __ Pop(t0, a2, a1);
3246 3232
3247 __ bind(&done); 3233 __ bind(&done);
3248 } 3234 }
3249 3235
3250 3236
3251 void CallFunctionStub::Generate(MacroAssembler* masm) { 3237 void CallFunctionStub::Generate(MacroAssembler* masm) {
3252 // a1 : the function to call 3238 // a1 : the function to call
3253 // a2 : feedback vector 3239 // a2 : cache cell for call target
3254 // a3 : (only if a2 is not undefined) slot in feedback vector (Smi)
3255 Label slow, non_function, wrap, cont; 3240 Label slow, non_function, wrap, cont;
3256 3241
3257 if (NeedsChecks()) { 3242 if (NeedsChecks()) {
3258 // Check that the function is really a JavaScript function. 3243 // Check that the function is really a JavaScript function.
3259 // a1: pushed function (to be verified) 3244 // a1: pushed function (to be verified)
3260 __ JumpIfSmi(a1, &non_function); 3245 __ JumpIfSmi(a1, &non_function);
3261 3246
3262 // Goto slow case if we do not have a function. 3247 // Goto slow case if we do not have a function.
3263 __ GetObjectType(a1, t0, t0); 3248 __ GetObjectType(a1, a3, a3);
3264 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); 3249 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE));
3265 3250
3266 if (RecordCallTarget()) { 3251 if (RecordCallTarget()) {
3267 GenerateRecordCallTarget(masm); 3252 GenerateRecordCallTarget(masm);
3268 } 3253 }
3269 } 3254 }
3270 3255
3271 // Fast-case: Invoke the function now. 3256 // Fast-case: Invoke the function now.
3272 // a1: pushed function 3257 // a1: pushed function
3273 ParameterCount actual(argc_); 3258 ParameterCount actual(argc_);
3274 3259
(...skipping 24 matching lines...) Expand all
3299 } 3284 }
3300 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); 3285 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper());
3301 3286
3302 if (NeedsChecks()) { 3287 if (NeedsChecks()) {
3303 // Slow-case: Non-function called. 3288 // Slow-case: Non-function called.
3304 __ bind(&slow); 3289 __ bind(&slow);
3305 if (RecordCallTarget()) { 3290 if (RecordCallTarget()) {
3306 // If there is a call target cache, mark it megamorphic in the 3291 // If there is a call target cache, mark it megamorphic in the
3307 // non-function case. MegamorphicSentinel is an immortal immovable 3292 // non-function case. MegamorphicSentinel is an immortal immovable
3308 // object (undefined) so no write barrier is needed. 3293 // object (undefined) so no write barrier is needed.
3309 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), 3294 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()),
3310 masm->isolate()->heap()->undefined_value()); 3295 masm->isolate()->heap()->undefined_value());
3311 __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
3312 __ Addu(t1, a2, Operand(t1));
3313 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 3296 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3314 __ sw(at, FieldMemOperand(t1, FixedArray::kHeaderSize)); 3297 __ sw(at, FieldMemOperand(a2, Cell::kValueOffset));
3315 } 3298 }
3316 // Check for function proxy. 3299 // Check for function proxy.
3317 __ Branch(&non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); 3300 __ Branch(&non_function, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE));
3318 __ push(a1); // Put proxy as additional argument. 3301 __ push(a1); // Put proxy as additional argument.
3319 __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32)); 3302 __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32));
3320 __ li(a2, Operand(0, RelocInfo::NONE32)); 3303 __ li(a2, Operand(0, RelocInfo::NONE32));
3321 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY); 3304 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
3322 { 3305 {
3323 Handle<Code> adaptor = 3306 Handle<Code> adaptor =
3324 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 3307 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
3325 __ Jump(adaptor, RelocInfo::CODE_TARGET); 3308 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3326 } 3309 }
3327 3310
(...skipping 19 matching lines...) Expand all
3347 __ mov(a0, v0); 3330 __ mov(a0, v0);
3348 __ sw(a0, MemOperand(sp, argc_ * kPointerSize)); 3331 __ sw(a0, MemOperand(sp, argc_ * kPointerSize));
3349 __ jmp(&cont); 3332 __ jmp(&cont);
3350 } 3333 }
3351 } 3334 }
3352 3335
3353 3336
3354 void CallConstructStub::Generate(MacroAssembler* masm) { 3337 void CallConstructStub::Generate(MacroAssembler* masm) {
3355 // a0 : number of arguments 3338 // a0 : number of arguments
3356 // a1 : the function to call 3339 // a1 : the function to call
3357 // a2 : feedback vector 3340 // a2 : cache cell for call target
3358 // a3 : (only if a2 is not undefined) slot in feedback vector (Smi)
3359 Label slow, non_function_call; 3341 Label slow, non_function_call;
3360 3342
3361 // Check that the function is not a smi. 3343 // Check that the function is not a smi.
3362 __ JumpIfSmi(a1, &non_function_call); 3344 __ JumpIfSmi(a1, &non_function_call);
3363 // Check that the function is a JSFunction. 3345 // Check that the function is a JSFunction.
3364 __ GetObjectType(a1, t0, t0); 3346 __ GetObjectType(a1, a3, a3);
3365 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); 3347 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE));
3366 3348
3367 if (RecordCallTarget()) { 3349 if (RecordCallTarget()) {
3368 GenerateRecordCallTarget(masm); 3350 GenerateRecordCallTarget(masm);
3369 } 3351 }
3370 3352
3371 // Jump to the function-specific construct stub. 3353 // Jump to the function-specific construct stub.
3372 Register jmp_reg = t0; 3354 Register jmp_reg = a3;
3373 __ lw(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 3355 __ lw(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
3374 __ lw(jmp_reg, FieldMemOperand(jmp_reg, 3356 __ lw(jmp_reg, FieldMemOperand(jmp_reg,
3375 SharedFunctionInfo::kConstructStubOffset)); 3357 SharedFunctionInfo::kConstructStubOffset));
3376 __ Addu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); 3358 __ Addu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
3377 __ Jump(at); 3359 __ Jump(at);
3378 3360
3379 // a0: number of arguments 3361 // a0: number of arguments
3380 // a1: called object 3362 // a1: called object
3381 // t0: object type 3363 // a3: object type
3382 Label do_call; 3364 Label do_call;
3383 __ bind(&slow); 3365 __ bind(&slow);
3384 __ Branch(&non_function_call, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); 3366 __ Branch(&non_function_call, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE));
3385 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); 3367 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
3386 __ jmp(&do_call); 3368 __ jmp(&do_call);
3387 3369
3388 __ bind(&non_function_call); 3370 __ bind(&non_function_call);
3389 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 3371 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
3390 __ bind(&do_call); 3372 __ bind(&do_call);
3391 // Set expected number of arguments to zero (not changing r0). 3373 // Set expected number of arguments to zero (not changing r0).
3392 __ li(a2, Operand(0, RelocInfo::NONE32)); 3374 __ li(a2, Operand(0, RelocInfo::NONE32));
3393 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 3375 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
3394 RelocInfo::CODE_TARGET); 3376 RelocInfo::CODE_TARGET);
(...skipping 1977 matching lines...) Expand 10 before | Expand all | Expand 10 after
5372 ArraySingleArgumentConstructorStub stub_holey(holey_initial, 5354 ArraySingleArgumentConstructorStub stub_holey(holey_initial,
5373 DISABLE_ALLOCATION_SITES); 5355 DISABLE_ALLOCATION_SITES);
5374 __ TailCallStub(&stub_holey); 5356 __ TailCallStub(&stub_holey);
5375 5357
5376 __ bind(&normal_sequence); 5358 __ bind(&normal_sequence);
5377 ArraySingleArgumentConstructorStub stub(initial, 5359 ArraySingleArgumentConstructorStub stub(initial,
5378 DISABLE_ALLOCATION_SITES); 5360 DISABLE_ALLOCATION_SITES);
5379 __ TailCallStub(&stub); 5361 __ TailCallStub(&stub);
5380 } else if (mode == DONT_OVERRIDE) { 5362 } else if (mode == DONT_OVERRIDE) {
5381 // We are going to create a holey array, but our kind is non-holey. 5363 // We are going to create a holey array, but our kind is non-holey.
5382 // Fix kind and retry (only if we have an allocation site in the slot). 5364 // Fix kind and retry (only if we have an allocation site in the cell).
5383 __ Addu(a3, a3, Operand(1)); 5365 __ Addu(a3, a3, Operand(1));
5384 5366
5385 if (FLAG_debug_code) { 5367 if (FLAG_debug_code) {
5386 __ lw(t1, FieldMemOperand(a2, 0)); 5368 __ lw(t1, FieldMemOperand(a2, 0));
5387 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); 5369 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
5388 __ Assert(eq, kExpectedAllocationSite, t1, Operand(at)); 5370 __ Assert(eq, kExpectedAllocationSite, t1, Operand(at));
5389 } 5371 }
5390 5372
5391 // Save the resulting elements kind in type info. We can't just store a3 5373 // Save the resulting elements kind in type info. We can't just store a3
5392 // in the AllocationSite::transition_info field because elements kind is 5374 // in the AllocationSite::transition_info field because elements kind is
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
5479 } else { 5461 } else {
5480 UNREACHABLE(); 5462 UNREACHABLE();
5481 } 5463 }
5482 } 5464 }
5483 5465
5484 5466
5485 void ArrayConstructorStub::Generate(MacroAssembler* masm) { 5467 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
5486 // ----------- S t a t e ------------- 5468 // ----------- S t a t e -------------
5487 // -- a0 : argc (only if argument_count_ == ANY) 5469 // -- a0 : argc (only if argument_count_ == ANY)
5488 // -- a1 : constructor 5470 // -- a1 : constructor
5489 // -- a2 : feedback vector (fixed array or undefined) 5471 // -- a2 : type info cell
5490 // -- a3 : slot index (if a2 is fixed array)
5491 // -- sp[0] : return address 5472 // -- sp[0] : return address
5492 // -- sp[4] : last argument 5473 // -- sp[4] : last argument
5493 // ----------------------------------- 5474 // -----------------------------------
5494 if (FLAG_debug_code) { 5475 if (FLAG_debug_code) {
5495 // The array construct code is only set for the global and natives 5476 // The array construct code is only set for the global and natives
5496 // builtin Array functions which always have maps. 5477 // builtin Array functions which always have maps.
5497 5478
5498 // Initial map for the builtin Array function should be a map. 5479 // Initial map for the builtin Array function should be a map.
5499 __ lw(t0, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); 5480 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
5500 // Will both indicate a NULL and a Smi. 5481 // Will both indicate a NULL and a Smi.
5501 __ SmiTst(t0, at); 5482 __ SmiTst(a3, at);
5502 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, 5483 __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
5503 at, Operand(zero_reg)); 5484 at, Operand(zero_reg));
5504 __ GetObjectType(t0, t0, t1); 5485 __ GetObjectType(a3, a3, t0);
5505 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, 5486 __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
5506 t1, Operand(MAP_TYPE)); 5487 t0, Operand(MAP_TYPE));
5507 5488
5508 // We should either have undefined in a2 or a valid fixed array. 5489 // We should either have undefined in a2 or a valid cell.
5509 Label okay_here; 5490 Label okay_here;
5510 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map(); 5491 Handle<Map> cell_map = masm->isolate()->factory()->cell_map();
5511 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 5492 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5512 __ Branch(&okay_here, eq, a2, Operand(at)); 5493 __ Branch(&okay_here, eq, a2, Operand(at));
5513 __ lw(t0, FieldMemOperand(a2, 0)); 5494 __ lw(a3, FieldMemOperand(a2, 0));
5514 __ Assert(eq, kExpectedFixedArrayInRegisterA2, 5495 __ Assert(eq, kExpectedPropertyCellInRegisterA2,
5515 t0, Operand(fixed_array_map)); 5496 a3, Operand(cell_map));
5516
5517 // a3 should be a smi if we don't have undefined in a2
5518 __ AssertSmi(a3);
5519
5520 __ bind(&okay_here); 5497 __ bind(&okay_here);
5521 } 5498 }
5522 5499
5523 Label no_info; 5500 Label no_info;
5524 // Get the elements kind and case on that. 5501 // Get the elements kind and case on that.
5525 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 5502 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5526 __ Branch(&no_info, eq, a2, Operand(at)); 5503 __ Branch(&no_info, eq, a2, Operand(at));
5527 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); 5504 __ lw(a2, FieldMemOperand(a2, Cell::kValueOffset));
5528 __ Addu(a2, a2, Operand(t0));
5529 __ lw(a2, FieldMemOperand(a2, FixedArray::kHeaderSize));
5530 5505
5531 // If the feedback vector is undefined, or contains anything other than an 5506 // If the type cell is undefined, or contains anything other than an
5532 // AllocationSite, call an array constructor that doesn't use AllocationSites. 5507 // AllocationSite, call an array constructor that doesn't use AllocationSites.
5533 __ lw(t0, FieldMemOperand(a2, 0)); 5508 __ lw(t0, FieldMemOperand(a2, 0));
5534 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); 5509 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
5535 __ Branch(&no_info, ne, t0, Operand(at)); 5510 __ Branch(&no_info, ne, t0, Operand(at));
5536 5511
5537 __ lw(a3, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); 5512 __ lw(a3, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset));
5538 __ SmiUntag(a3); 5513 __ SmiUntag(a3);
5539 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 5514 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5540 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask)); 5515 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask));
5541 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); 5516 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
5633 // -- sp[argc * 4] : receiver 5608 // -- sp[argc * 4] : receiver
5634 // ----------------------------------- 5609 // -----------------------------------
5635 5610
5636 Register callee = a0; 5611 Register callee = a0;
5637 Register call_data = t0; 5612 Register call_data = t0;
5638 Register holder = a2; 5613 Register holder = a2;
5639 Register api_function_address = a1; 5614 Register api_function_address = a1;
5640 Register context = cp; 5615 Register context = cp;
5641 5616
5642 int argc = ArgumentBits::decode(bit_field_); 5617 int argc = ArgumentBits::decode(bit_field_);
5643 bool is_store = IsStoreBits::decode(bit_field_); 5618 bool restore_context = RestoreContextBits::decode(bit_field_);
5644 bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_); 5619 bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_);
5645 5620
5646 typedef FunctionCallbackArguments FCA; 5621 typedef FunctionCallbackArguments FCA;
5647 5622
5648 STATIC_ASSERT(FCA::kContextSaveIndex == 6); 5623 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5649 STATIC_ASSERT(FCA::kCalleeIndex == 5); 5624 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5650 STATIC_ASSERT(FCA::kDataIndex == 4); 5625 STATIC_ASSERT(FCA::kDataIndex == 4);
5651 STATIC_ASSERT(FCA::kReturnValueOffset == 3); 5626 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5652 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2); 5627 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5653 STATIC_ASSERT(FCA::kIsolateIndex == 1); 5628 STATIC_ASSERT(FCA::kIsolateIndex == 1);
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
5700 const int kStackUnwindSpace = argc + FCA::kArgsLength + 1; 5675 const int kStackUnwindSpace = argc + FCA::kArgsLength + 1;
5701 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); 5676 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
5702 ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL; 5677 ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
5703 ApiFunction thunk_fun(thunk_address); 5678 ApiFunction thunk_fun(thunk_address);
5704 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, 5679 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
5705 masm->isolate()); 5680 masm->isolate());
5706 5681
5707 AllowExternalCallThatCantCauseGC scope(masm); 5682 AllowExternalCallThatCantCauseGC scope(masm);
5708 MemOperand context_restore_operand( 5683 MemOperand context_restore_operand(
5709 fp, (2 + FCA::kContextSaveIndex) * kPointerSize); 5684 fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
5710 // Stores return the first js argument. 5685 MemOperand return_value_operand(fp,
5711 int return_value_offset = 0; 5686 (2 + FCA::kReturnValueOffset) * kPointerSize);
5712 if (is_store) {
5713 return_value_offset = 2 + FCA::kArgsLength;
5714 } else {
5715 return_value_offset = 2 + FCA::kReturnValueOffset;
5716 }
5717 MemOperand return_value_operand(fp, return_value_offset * kPointerSize);
5718 5687
5719 __ CallApiFunctionAndReturn(api_function_address, 5688 __ CallApiFunctionAndReturn(api_function_address,
5720 thunk_ref, 5689 thunk_ref,
5721 kStackUnwindSpace, 5690 kStackUnwindSpace,
5722 return_value_operand, 5691 return_value_operand,
5723 &context_restore_operand); 5692 restore_context ?
5693 &context_restore_operand : NULL);
5724 } 5694 }
5725 5695
5726 5696
5727 void CallApiGetterStub::Generate(MacroAssembler* masm) { 5697 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5728 // ----------- S t a t e ------------- 5698 // ----------- S t a t e -------------
5729 // -- sp[0] : name 5699 // -- sp[0] : name
5730 // -- sp[4 - kArgsLength*4] : PropertyCallbackArguments object 5700 // -- sp[4 - kArgsLength*4] : PropertyCallbackArguments object
5731 // -- ... 5701 // -- ...
5732 // -- a2 : api_function_address 5702 // -- a2 : api_function_address
5733 // ----------------------------------- 5703 // -----------------------------------
(...skipping 26 matching lines...) Expand all
5760 MemOperand(fp, 6 * kPointerSize), 5730 MemOperand(fp, 6 * kPointerSize),
5761 NULL); 5731 NULL);
5762 } 5732 }
5763 5733
5764 5734
5765 #undef __ 5735 #undef __
5766 5736
5767 } } // namespace v8::internal 5737 } } // namespace v8::internal
5768 5738
5769 #endif // V8_TARGET_ARCH_MIPS 5739 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/assembler-mips.cc ('k') | src/mips/debug-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698