| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 99 descriptor->register_param_count_ = 4; | 99 descriptor->register_param_count_ = 4; |
| 100 descriptor->register_params_ = registers; | 100 descriptor->register_params_ = registers; |
| 101 descriptor->deoptimization_handler_ = | 101 descriptor->deoptimization_handler_ = |
| 102 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; | 102 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; |
| 103 } | 103 } |
| 104 | 104 |
| 105 | 105 |
| 106 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( | 106 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( |
| 107 Isolate* isolate, | 107 Isolate* isolate, |
| 108 CodeStubInterfaceDescriptor* descriptor) { | 108 CodeStubInterfaceDescriptor* descriptor) { |
| 109 static Register registers[] = { a2 }; | 109 static Register registers[] = { a2, a3 }; |
| 110 descriptor->register_param_count_ = 1; | 110 descriptor->register_param_count_ = 2; |
| 111 descriptor->register_params_ = registers; | 111 descriptor->register_params_ = registers; |
| 112 descriptor->deoptimization_handler_ = NULL; | 112 descriptor->deoptimization_handler_ = NULL; |
| 113 } | 113 } |
| 114 | 114 |
| 115 | 115 |
| 116 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( | 116 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( |
| 117 Isolate* isolate, | 117 Isolate* isolate, |
| 118 CodeStubInterfaceDescriptor* descriptor) { | 118 CodeStubInterfaceDescriptor* descriptor) { |
| 119 static Register registers[] = { a1, a0 }; | 119 static Register registers[] = { a1, a0 }; |
| 120 descriptor->register_param_count_ = 2; | 120 descriptor->register_param_count_ = 2; |
| (...skipping 3024 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3145 // Load offset into t0 and replace subject string with parent. | 3145 // Load offset into t0 and replace subject string with parent. |
| 3146 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); | 3146 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); |
| 3147 __ sra(t0, t0, kSmiTagSize); | 3147 __ sra(t0, t0, kSmiTagSize); |
| 3148 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); | 3148 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); |
| 3149 __ jmp(&check_underlying); // Go to (4). | 3149 __ jmp(&check_underlying); // Go to (4). |
| 3150 #endif // V8_INTERPRETED_REGEXP | 3150 #endif // V8_INTERPRETED_REGEXP |
| 3151 } | 3151 } |
| 3152 | 3152 |
| 3153 | 3153 |
| 3154 static void GenerateRecordCallTarget(MacroAssembler* masm) { | 3154 static void GenerateRecordCallTarget(MacroAssembler* masm) { |
| 3155 // Cache the called function in a global property cell. Cache states | 3155 // Cache the called function in a feedback vector slot. Cache states |
| 3156 // are uninitialized, monomorphic (indicated by a JSFunction), and | 3156 // are uninitialized, monomorphic (indicated by a JSFunction), and |
| 3157 // megamorphic. | 3157 // megamorphic. |
| 3158 // a0 : number of arguments to the construct function | 3158 // a0 : number of arguments to the construct function |
| 3159 // a1 : the function to call | 3159 // a1 : the function to call |
| 3160 // a2 : cache cell for call target | 3160 // a2 : Feedback vector |
| 3161 // a3 : slot in feedback vector (Smi) |
| 3161 Label initialize, done, miss, megamorphic, not_array_function; | 3162 Label initialize, done, miss, megamorphic, not_array_function; |
| 3162 | 3163 |
| 3163 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), | 3164 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), |
| 3164 masm->isolate()->heap()->undefined_value()); | 3165 masm->isolate()->heap()->undefined_value()); |
| 3165 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()), | 3166 ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()), |
| 3166 masm->isolate()->heap()->the_hole_value()); | 3167 masm->isolate()->heap()->the_hole_value()); |
| 3167 | 3168 |
| 3168 // Load the cache state into a3. | 3169 // Load the cache state into t0. |
| 3169 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | 3170 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 3171 __ Addu(t0, a2, Operand(t0)); |
| 3172 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); |
| 3170 | 3173 |
| 3171 // A monomorphic cache hit or an already megamorphic state: invoke the | 3174 // A monomorphic cache hit or an already megamorphic state: invoke the |
| 3172 // function without changing the state. | 3175 // function without changing the state. |
| 3173 __ Branch(&done, eq, a3, Operand(a1)); | 3176 __ Branch(&done, eq, t0, Operand(a1)); |
| 3174 | 3177 |
| 3175 // If we came here, we need to see if we are the array function. | 3178 // If we came here, we need to see if we are the array function. |
| 3176 // If we didn't have a matching function, and we didn't find the megamorph | 3179 // If we didn't have a matching function, and we didn't find the megamorph |
| 3177 // sentinel, then we have in the cell either some other function or an | 3180 // sentinel, then we have in the slot either some other function or an |
| 3178 // AllocationSite. Do a map check on the object in a3. | 3181 // AllocationSite. Do a map check on the object in a3. |
| 3179 __ lw(t1, FieldMemOperand(a3, 0)); | 3182 __ lw(t1, FieldMemOperand(t0, 0)); |
| 3180 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 3183 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
| 3181 __ Branch(&miss, ne, t1, Operand(at)); | 3184 __ Branch(&miss, ne, t1, Operand(at)); |
| 3182 | 3185 |
| 3183 // Make sure the function is the Array() function | 3186 // Make sure the function is the Array() function |
| 3184 __ LoadArrayFunction(a3); | 3187 __ LoadArrayFunction(t0); |
| 3185 __ Branch(&megamorphic, ne, a1, Operand(a3)); | 3188 __ Branch(&megamorphic, ne, a1, Operand(t0)); |
| 3186 __ jmp(&done); | 3189 __ jmp(&done); |
| 3187 | 3190 |
| 3188 __ bind(&miss); | 3191 __ bind(&miss); |
| 3189 | 3192 |
| 3190 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 3193 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
| 3191 // megamorphic. | 3194 // megamorphic. |
| 3192 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 3195 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 3193 __ Branch(&initialize, eq, a3, Operand(at)); | 3196 __ Branch(&initialize, eq, t0, Operand(at)); |
| 3194 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 3197 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
| 3195 // write-barrier is needed. | 3198 // write-barrier is needed. |
| 3196 __ bind(&megamorphic); | 3199 __ bind(&megamorphic); |
| 3200 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 3201 __ Addu(t0, a2, Operand(t0)); |
| 3197 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 3202 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 3198 __ sw(at, FieldMemOperand(a2, Cell::kValueOffset)); | 3203 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); |
| 3199 __ jmp(&done); | 3204 __ jmp(&done); |
| 3200 | 3205 |
| 3201 // An uninitialized cache is patched with the function or sentinel to | 3206 // An uninitialized cache is patched with the function or sentinel to |
| 3202 // indicate the ElementsKind if function is the Array constructor. | 3207 // indicate the ElementsKind if function is the Array constructor. |
| 3203 __ bind(&initialize); | 3208 __ bind(&initialize); |
| 3204 // Make sure the function is the Array() function | 3209 // Make sure the function is the Array() function |
| 3205 __ LoadArrayFunction(a3); | 3210 __ LoadArrayFunction(t0); |
| 3206 __ Branch(¬_array_function, ne, a1, Operand(a3)); | 3211 __ Branch(¬_array_function, ne, a1, Operand(t0)); |
| 3207 | 3212 |
| 3208 // The target function is the Array constructor. | 3213 // The target function is the Array constructor. |
| 3209 // Create an AllocationSite if we don't already have it, store it in the cell. | 3214 // Create an AllocationSite if we don't already have it, store it in the slot. |
| 3210 { | 3215 { |
| 3211 FrameScope scope(masm, StackFrame::INTERNAL); | 3216 FrameScope scope(masm, StackFrame::INTERNAL); |
| 3212 const RegList kSavedRegs = | 3217 const RegList kSavedRegs = |
| 3213 1 << 4 | // a0 | 3218 1 << 4 | // a0 |
| 3214 1 << 5 | // a1 | 3219 1 << 5 | // a1 |
| 3215 1 << 6; // a2 | 3220 1 << 6 | // a2 |
| 3221 1 << 7; // a3 |
| 3216 | 3222 |
| 3217 // Arguments register must be smi-tagged to call out. | 3223 // Arguments register must be smi-tagged to call out. |
| 3218 __ SmiTag(a0); | 3224 __ SmiTag(a0); |
| 3219 __ MultiPush(kSavedRegs); | 3225 __ MultiPush(kSavedRegs); |
| 3220 | 3226 |
| 3221 CreateAllocationSiteStub create_stub; | 3227 CreateAllocationSiteStub create_stub; |
| 3222 __ CallStub(&create_stub); | 3228 __ CallStub(&create_stub); |
| 3223 | 3229 |
| 3224 __ MultiPop(kSavedRegs); | 3230 __ MultiPop(kSavedRegs); |
| 3225 __ SmiUntag(a0); | 3231 __ SmiUntag(a0); |
| 3226 } | 3232 } |
| 3227 __ Branch(&done); | 3233 __ Branch(&done); |
| 3228 | 3234 |
| 3229 __ bind(¬_array_function); | 3235 __ bind(¬_array_function); |
| 3230 __ sw(a1, FieldMemOperand(a2, Cell::kValueOffset)); | 3236 |
| 3231 // No need for a write barrier here - cells are rescanned. | 3237 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 3238 __ Addu(t0, a2, Operand(t0)); |
| 3239 __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3240 __ sw(a1, MemOperand(t0, 0)); |
| 3241 |
| 3242 __ Push(t0, a2, a1); |
| 3243 __ RecordWrite(a2, t0, a1, kRAHasNotBeenSaved, kDontSaveFPRegs, |
| 3244 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 3245 __ Pop(t0, a2, a1); |
| 3232 | 3246 |
| 3233 __ bind(&done); | 3247 __ bind(&done); |
| 3234 } | 3248 } |
| 3235 | 3249 |
| 3236 | 3250 |
| 3237 void CallFunctionStub::Generate(MacroAssembler* masm) { | 3251 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 3238 // a1 : the function to call | 3252 // a1 : the function to call |
| 3239 // a2 : cache cell for call target | 3253 // a2 : feedback vector |
| 3254 // a3 : (only if a2 is not undefined) slot in feedback vector (Smi) |
| 3240 Label slow, non_function, wrap, cont; | 3255 Label slow, non_function, wrap, cont; |
| 3241 | 3256 |
| 3242 if (NeedsChecks()) { | 3257 if (NeedsChecks()) { |
| 3243 // Check that the function is really a JavaScript function. | 3258 // Check that the function is really a JavaScript function. |
| 3244 // a1: pushed function (to be verified) | 3259 // a1: pushed function (to be verified) |
| 3245 __ JumpIfSmi(a1, &non_function); | 3260 __ JumpIfSmi(a1, &non_function); |
| 3246 | 3261 |
| 3247 // Goto slow case if we do not have a function. | 3262 // Goto slow case if we do not have a function. |
| 3248 __ GetObjectType(a1, a3, a3); | 3263 __ GetObjectType(a1, t0, t0); |
| 3249 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE)); | 3264 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); |
| 3250 | 3265 |
| 3251 if (RecordCallTarget()) { | 3266 if (RecordCallTarget()) { |
| 3252 GenerateRecordCallTarget(masm); | 3267 GenerateRecordCallTarget(masm); |
| 3253 } | 3268 } |
| 3254 } | 3269 } |
| 3255 | 3270 |
| 3256 // Fast-case: Invoke the function now. | 3271 // Fast-case: Invoke the function now. |
| 3257 // a1: pushed function | 3272 // a1: pushed function |
| 3258 ParameterCount actual(argc_); | 3273 ParameterCount actual(argc_); |
| 3259 | 3274 |
| (...skipping 24 matching lines...) Expand all Loading... |
| 3284 } | 3299 } |
| 3285 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); | 3300 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); |
| 3286 | 3301 |
| 3287 if (NeedsChecks()) { | 3302 if (NeedsChecks()) { |
| 3288 // Slow-case: Non-function called. | 3303 // Slow-case: Non-function called. |
| 3289 __ bind(&slow); | 3304 __ bind(&slow); |
| 3290 if (RecordCallTarget()) { | 3305 if (RecordCallTarget()) { |
| 3291 // If there is a call target cache, mark it megamorphic in the | 3306 // If there is a call target cache, mark it megamorphic in the |
| 3292 // non-function case. MegamorphicSentinel is an immortal immovable | 3307 // non-function case. MegamorphicSentinel is an immortal immovable |
| 3293 // object (undefined) so no write barrier is needed. | 3308 // object (undefined) so no write barrier is needed. |
| 3294 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), | 3309 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), |
| 3295 masm->isolate()->heap()->undefined_value()); | 3310 masm->isolate()->heap()->undefined_value()); |
| 3311 __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize); |
| 3312 __ Addu(t1, a2, Operand(t1)); |
| 3296 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 3313 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 3297 __ sw(at, FieldMemOperand(a2, Cell::kValueOffset)); | 3314 __ sw(at, FieldMemOperand(t1, FixedArray::kHeaderSize)); |
| 3298 } | 3315 } |
| 3299 // Check for function proxy. | 3316 // Check for function proxy. |
| 3300 __ Branch(&non_function, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE)); | 3317 __ Branch(&non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); |
| 3301 __ push(a1); // Put proxy as additional argument. | 3318 __ push(a1); // Put proxy as additional argument. |
| 3302 __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32)); | 3319 __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32)); |
| 3303 __ li(a2, Operand(0, RelocInfo::NONE32)); | 3320 __ li(a2, Operand(0, RelocInfo::NONE32)); |
| 3304 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY); | 3321 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY); |
| 3305 { | 3322 { |
| 3306 Handle<Code> adaptor = | 3323 Handle<Code> adaptor = |
| 3307 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 3324 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
| 3308 __ Jump(adaptor, RelocInfo::CODE_TARGET); | 3325 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
| 3309 } | 3326 } |
| 3310 | 3327 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 3330 __ mov(a0, v0); | 3347 __ mov(a0, v0); |
| 3331 __ sw(a0, MemOperand(sp, argc_ * kPointerSize)); | 3348 __ sw(a0, MemOperand(sp, argc_ * kPointerSize)); |
| 3332 __ jmp(&cont); | 3349 __ jmp(&cont); |
| 3333 } | 3350 } |
| 3334 } | 3351 } |
| 3335 | 3352 |
| 3336 | 3353 |
| 3337 void CallConstructStub::Generate(MacroAssembler* masm) { | 3354 void CallConstructStub::Generate(MacroAssembler* masm) { |
| 3338 // a0 : number of arguments | 3355 // a0 : number of arguments |
| 3339 // a1 : the function to call | 3356 // a1 : the function to call |
| 3340 // a2 : cache cell for call target | 3357 // a2 : feedback vector |
| 3358 // a3 : (only if a2 is not undefined) slot in feedback vector (Smi) |
| 3341 Label slow, non_function_call; | 3359 Label slow, non_function_call; |
| 3342 | 3360 |
| 3343 // Check that the function is not a smi. | 3361 // Check that the function is not a smi. |
| 3344 __ JumpIfSmi(a1, &non_function_call); | 3362 __ JumpIfSmi(a1, &non_function_call); |
| 3345 // Check that the function is a JSFunction. | 3363 // Check that the function is a JSFunction. |
| 3346 __ GetObjectType(a1, a3, a3); | 3364 __ GetObjectType(a1, t0, t0); |
| 3347 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE)); | 3365 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); |
| 3348 | 3366 |
| 3349 if (RecordCallTarget()) { | 3367 if (RecordCallTarget()) { |
| 3350 GenerateRecordCallTarget(masm); | 3368 GenerateRecordCallTarget(masm); |
| 3351 } | 3369 } |
| 3352 | 3370 |
| 3353 // Jump to the function-specific construct stub. | 3371 // Jump to the function-specific construct stub. |
| 3354 Register jmp_reg = a3; | 3372 Register jmp_reg = t0; |
| 3355 __ lw(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 3373 __ lw(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 3356 __ lw(jmp_reg, FieldMemOperand(jmp_reg, | 3374 __ lw(jmp_reg, FieldMemOperand(jmp_reg, |
| 3357 SharedFunctionInfo::kConstructStubOffset)); | 3375 SharedFunctionInfo::kConstructStubOffset)); |
| 3358 __ Addu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); | 3376 __ Addu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 3359 __ Jump(at); | 3377 __ Jump(at); |
| 3360 | 3378 |
| 3361 // a0: number of arguments | 3379 // a0: number of arguments |
| 3362 // a1: called object | 3380 // a1: called object |
| 3363 // a3: object type | 3381 // t0: object type |
| 3364 Label do_call; | 3382 Label do_call; |
| 3365 __ bind(&slow); | 3383 __ bind(&slow); |
| 3366 __ Branch(&non_function_call, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE)); | 3384 __ Branch(&non_function_call, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); |
| 3367 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); | 3385 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); |
| 3368 __ jmp(&do_call); | 3386 __ jmp(&do_call); |
| 3369 | 3387 |
| 3370 __ bind(&non_function_call); | 3388 __ bind(&non_function_call); |
| 3371 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); | 3389 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); |
| 3372 __ bind(&do_call); | 3390 __ bind(&do_call); |
| 3373 // Set expected number of arguments to zero (not changing r0). | 3391 // Set expected number of arguments to zero (not changing r0). |
| 3374 __ li(a2, Operand(0, RelocInfo::NONE32)); | 3392 __ li(a2, Operand(0, RelocInfo::NONE32)); |
| 3375 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 3393 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
| 3376 RelocInfo::CODE_TARGET); | 3394 RelocInfo::CODE_TARGET); |
| (...skipping 1977 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5354 ArraySingleArgumentConstructorStub stub_holey(holey_initial, | 5372 ArraySingleArgumentConstructorStub stub_holey(holey_initial, |
| 5355 DISABLE_ALLOCATION_SITES); | 5373 DISABLE_ALLOCATION_SITES); |
| 5356 __ TailCallStub(&stub_holey); | 5374 __ TailCallStub(&stub_holey); |
| 5357 | 5375 |
| 5358 __ bind(&normal_sequence); | 5376 __ bind(&normal_sequence); |
| 5359 ArraySingleArgumentConstructorStub stub(initial, | 5377 ArraySingleArgumentConstructorStub stub(initial, |
| 5360 DISABLE_ALLOCATION_SITES); | 5378 DISABLE_ALLOCATION_SITES); |
| 5361 __ TailCallStub(&stub); | 5379 __ TailCallStub(&stub); |
| 5362 } else if (mode == DONT_OVERRIDE) { | 5380 } else if (mode == DONT_OVERRIDE) { |
| 5363 // We are going to create a holey array, but our kind is non-holey. | 5381 // We are going to create a holey array, but our kind is non-holey. |
| 5364 // Fix kind and retry (only if we have an allocation site in the cell). | 5382 // Fix kind and retry (only if we have an allocation site in the slot). |
| 5365 __ Addu(a3, a3, Operand(1)); | 5383 __ Addu(a3, a3, Operand(1)); |
| 5366 | 5384 |
| 5367 if (FLAG_debug_code) { | 5385 if (FLAG_debug_code) { |
| 5368 __ lw(t1, FieldMemOperand(a2, 0)); | 5386 __ lw(t1, FieldMemOperand(a2, 0)); |
| 5369 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 5387 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
| 5370 __ Assert(eq, kExpectedAllocationSite, t1, Operand(at)); | 5388 __ Assert(eq, kExpectedAllocationSite, t1, Operand(at)); |
| 5371 } | 5389 } |
| 5372 | 5390 |
| 5373 // Save the resulting elements kind in type info. We can't just store a3 | 5391 // Save the resulting elements kind in type info. We can't just store a3 |
| 5374 // in the AllocationSite::transition_info field because elements kind is | 5392 // in the AllocationSite::transition_info field because elements kind is |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5461 } else { | 5479 } else { |
| 5462 UNREACHABLE(); | 5480 UNREACHABLE(); |
| 5463 } | 5481 } |
| 5464 } | 5482 } |
| 5465 | 5483 |
| 5466 | 5484 |
| 5467 void ArrayConstructorStub::Generate(MacroAssembler* masm) { | 5485 void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
| 5468 // ----------- S t a t e ------------- | 5486 // ----------- S t a t e ------------- |
| 5469 // -- a0 : argc (only if argument_count_ == ANY) | 5487 // -- a0 : argc (only if argument_count_ == ANY) |
| 5470 // -- a1 : constructor | 5488 // -- a1 : constructor |
| 5471 // -- a2 : type info cell | 5489 // -- a2 : feedback vector (fixed array or undefined) |
| 5490 // -- a3 : slot index (if a2 is fixed array) |
| 5472 // -- sp[0] : return address | 5491 // -- sp[0] : return address |
| 5473 // -- sp[4] : last argument | 5492 // -- sp[4] : last argument |
| 5474 // ----------------------------------- | 5493 // ----------------------------------- |
| 5475 if (FLAG_debug_code) { | 5494 if (FLAG_debug_code) { |
| 5476 // The array construct code is only set for the global and natives | 5495 // The array construct code is only set for the global and natives |
| 5477 // builtin Array functions which always have maps. | 5496 // builtin Array functions which always have maps. |
| 5478 | 5497 |
| 5479 // Initial map for the builtin Array function should be a map. | 5498 // Initial map for the builtin Array function should be a map. |
| 5480 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | 5499 __ lw(t0, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); |
| 5481 // Will both indicate a NULL and a Smi. | 5500 // Will both indicate a NULL and a Smi. |
| 5482 __ SmiTst(a3, at); | 5501 __ SmiTst(t0, at); |
| 5483 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, | 5502 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, |
| 5484 at, Operand(zero_reg)); | 5503 at, Operand(zero_reg)); |
| 5485 __ GetObjectType(a3, a3, t0); | 5504 __ GetObjectType(t0, t0, t1); |
| 5486 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, | 5505 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, |
| 5487 t0, Operand(MAP_TYPE)); | 5506 t1, Operand(MAP_TYPE)); |
| 5488 | 5507 |
| 5489 // We should either have undefined in a2 or a valid cell. | 5508 // We should either have undefined in a2 or a valid fixed array. |
| 5490 Label okay_here; | 5509 Label okay_here; |
| 5491 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); | 5510 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map(); |
| 5492 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 5511 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 5493 __ Branch(&okay_here, eq, a2, Operand(at)); | 5512 __ Branch(&okay_here, eq, a2, Operand(at)); |
| 5494 __ lw(a3, FieldMemOperand(a2, 0)); | 5513 __ lw(t0, FieldMemOperand(a2, 0)); |
| 5495 __ Assert(eq, kExpectedPropertyCellInRegisterA2, | 5514 __ Assert(eq, kExpectedFixedArrayInRegisterR2, |
| 5496 a3, Operand(cell_map)); | 5515 t0, Operand(fixed_array_map)); |
| 5516 |
| 5517 // a3 should be a smi if we don't have undefined in a2 |
| 5518 __ AssertSmi(a3); |
| 5519 |
| 5497 __ bind(&okay_here); | 5520 __ bind(&okay_here); |
| 5498 } | 5521 } |
| 5499 | 5522 |
| 5500 Label no_info; | 5523 Label no_info; |
| 5501 // Get the elements kind and case on that. | 5524 // Get the elements kind and case on that. |
| 5502 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 5525 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 5503 __ Branch(&no_info, eq, a2, Operand(at)); | 5526 __ Branch(&no_info, eq, a2, Operand(at)); |
| 5504 __ lw(a2, FieldMemOperand(a2, Cell::kValueOffset)); | 5527 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 5528 __ Addu(a2, a2, Operand(t0)); |
| 5529 __ lw(a2, FieldMemOperand(a2, FixedArray::kHeaderSize)); |
| 5505 | 5530 |
| 5506 // If the type cell is undefined, or contains anything other than an | 5531 // If the feedback vector is undefined, or contains anything other than an |
| 5507 // AllocationSite, call an array constructor that doesn't use AllocationSites. | 5532 // AllocationSite, call an array constructor that doesn't use AllocationSites. |
| 5508 __ lw(t0, FieldMemOperand(a2, 0)); | 5533 __ lw(t0, FieldMemOperand(a2, 0)); |
| 5509 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 5534 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
| 5510 __ Branch(&no_info, ne, t0, Operand(at)); | 5535 __ Branch(&no_info, ne, t0, Operand(at)); |
| 5511 | 5536 |
| 5512 __ lw(a3, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); | 5537 __ lw(a3, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); |
| 5513 __ SmiUntag(a3); | 5538 __ SmiUntag(a3); |
| 5514 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); | 5539 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); |
| 5515 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask)); | 5540 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask)); |
| 5516 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); | 5541 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); |
| (...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5730 MemOperand(fp, 6 * kPointerSize), | 5755 MemOperand(fp, 6 * kPointerSize), |
| 5731 NULL); | 5756 NULL); |
| 5732 } | 5757 } |
| 5733 | 5758 |
| 5734 | 5759 |
| 5735 #undef __ | 5760 #undef __ |
| 5736 | 5761 |
| 5737 } } // namespace v8::internal | 5762 } } // namespace v8::internal |
| 5738 | 5763 |
| 5739 #endif // V8_TARGET_ARCH_MIPS | 5764 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |