| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 109 descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); | 109 descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); |
| 110 descriptor->register_params_ = registers; | 110 descriptor->register_params_ = registers; |
| 111 descriptor->deoptimization_handler_ = | 111 descriptor->deoptimization_handler_ = |
| 112 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; | 112 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; |
| 113 } | 113 } |
| 114 | 114 |
| 115 | 115 |
| 116 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( | 116 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( |
| 117 Isolate* isolate, | 117 Isolate* isolate, |
| 118 CodeStubInterfaceDescriptor* descriptor) { | 118 CodeStubInterfaceDescriptor* descriptor) { |
| 119 // x2: cache cell | 119 // x2: feedback vector |
| 120 static Register registers[] = { x2 }; | 120 // x3: call feedback slot |
| 121 static Register registers[] = { x2, x3 }; |
| 121 descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); | 122 descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); |
| 122 descriptor->register_params_ = registers; | 123 descriptor->register_params_ = registers; |
| 123 descriptor->deoptimization_handler_ = NULL; | 124 descriptor->deoptimization_handler_ = NULL; |
| 124 } | 125 } |
| 125 | 126 |
| 126 | 127 |
| 127 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( | 128 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( |
| 128 Isolate* isolate, | 129 Isolate* isolate, |
| 129 CodeStubInterfaceDescriptor* descriptor) { | 130 CodeStubInterfaceDescriptor* descriptor) { |
| 130 // x1: receiver | 131 // x1: receiver |
| (...skipping 3040 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3171 __ Ldr(sliced_string_offset, | 3172 __ Ldr(sliced_string_offset, |
| 3172 UntagSmiFieldMemOperand(subject, SlicedString::kOffsetOffset)); | 3173 UntagSmiFieldMemOperand(subject, SlicedString::kOffsetOffset)); |
| 3173 __ Ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); | 3174 __ Ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); |
| 3174 __ B(&check_underlying); // Go to (4). | 3175 __ B(&check_underlying); // Go to (4). |
| 3175 #endif | 3176 #endif |
| 3176 } | 3177 } |
| 3177 | 3178 |
| 3178 | 3179 |
| 3179 // TODO(jbramley): Don't use static registers here, but take them as arguments. | 3180 // TODO(jbramley): Don't use static registers here, but take them as arguments. |
| 3180 static void GenerateRecordCallTarget(MacroAssembler* masm) { | 3181 static void GenerateRecordCallTarget(MacroAssembler* masm) { |
| 3181 // Cache the called function in a global property cell. Cache states are | 3182 ASM_LOCATION("GenerateRecordCallTarget"); |
| 3183 // Cache the called function in a feedback vector slot. Cache states are |
| 3182 // uninitialized, monomorphic (indicated by a JSFunction), and megamorphic. | 3184 // uninitialized, monomorphic (indicated by a JSFunction), and megamorphic. |
| 3183 // x0 : number of arguments to the construct function | 3185 // x0 : number of arguments to the construct function |
| 3184 // x1 : the function to call | 3186 // x1 : the function to call |
| 3185 // x2 : cache cell for the call target | 3187 // x2 : feedback vector |
| 3188 // x3 : slot in feedback vector (smi) |
| 3186 Label initialize, done, miss, megamorphic, not_array_function; | 3189 Label initialize, done, miss, megamorphic, not_array_function; |
| 3187 | 3190 |
| 3188 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), | 3191 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), |
| 3189 masm->isolate()->heap()->undefined_value()); | 3192 masm->isolate()->heap()->undefined_value()); |
| 3190 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()), | 3193 ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()), |
| 3191 masm->isolate()->heap()->the_hole_value()); | 3194 masm->isolate()->heap()->the_hole_value()); |
| 3192 | 3195 |
| 3193 // Load the cache state. | 3196 // Load the cache state. |
| 3194 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset)); | 3197 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
| 3198 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); |
| 3195 | 3199 |
| 3196 // A monomorphic cache hit or an already megamorphic state: invoke the | 3200 // A monomorphic cache hit or an already megamorphic state: invoke the |
| 3197 // function without changing the state. | 3201 // function without changing the state. |
| 3198 __ Cmp(x3, x1); | 3202 __ Cmp(x4, x1); |
| 3199 __ B(eq, &done); | 3203 __ B(eq, &done); |
| 3200 | 3204 |
| 3201 // If we came here, we need to see if we are the array function. | 3205 // If we came here, we need to see if we are the array function. |
| 3202 // If we didn't have a matching function, and we didn't find the megamorph | 3206 // If we didn't have a matching function, and we didn't find the megamorph |
| 3203 // sentinel, then we have in the cell either some other function or an | 3207 // sentinel, then we have in the slot either some other function or an |
| 3204 // AllocationSite. Do a map check on the object in ecx. | 3208 // AllocationSite. Do a map check on the object in ecx. |
| 3205 __ Ldr(x5, FieldMemOperand(x3, AllocationSite::kMapOffset)); | 3209 __ Ldr(x5, FieldMemOperand(x4, AllocationSite::kMapOffset)); |
| 3206 __ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, &miss); | 3210 __ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, &miss); |
| 3207 | 3211 |
| 3208 // Make sure the function is the Array() function | 3212 // Make sure the function is the Array() function |
| 3209 __ LoadArrayFunction(x3); | 3213 __ LoadArrayFunction(x4); |
| 3210 __ Cmp(x1, x3); | 3214 __ Cmp(x1, x4); |
| 3211 __ B(ne, &megamorphic); | 3215 __ B(ne, &megamorphic); |
| 3212 __ B(&done); | 3216 __ B(&done); |
| 3213 | 3217 |
| 3214 __ Bind(&miss); | 3218 __ Bind(&miss); |
| 3215 | 3219 |
| 3216 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 3220 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
| 3217 // megamorphic. | 3221 // megamorphic. |
| 3218 __ JumpIfRoot(x3, Heap::kTheHoleValueRootIndex, &initialize); | 3222 __ JumpIfRoot(x4, Heap::kTheHoleValueRootIndex, &initialize); |
| 3219 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 3223 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
| 3220 // write-barrier is needed. | 3224 // write-barrier is needed. |
| 3221 __ Bind(&megamorphic); | 3225 __ Bind(&megamorphic); |
| 3222 __ LoadRoot(x3, Heap::kUndefinedValueRootIndex); | 3226 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
| 3223 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset)); | 3227 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); |
| 3228 __ Str(x10, FieldMemOperand(x4, FixedArray::kHeaderSize)); |
| 3224 __ B(&done); | 3229 __ B(&done); |
| 3225 | 3230 |
| 3226 // An uninitialized cache is patched with the function or sentinel to | 3231 // An uninitialized cache is patched with the function or sentinel to |
| 3227 // indicate the ElementsKind if function is the Array constructor. | 3232 // indicate the ElementsKind if function is the Array constructor. |
| 3228 __ Bind(&initialize); | 3233 __ Bind(&initialize); |
| 3229 // Make sure the function is the Array() function | 3234 // Make sure the function is the Array() function |
| 3230 __ LoadArrayFunction(x3); | 3235 __ LoadArrayFunction(x4); |
| 3231 __ Cmp(x1, x3); | 3236 __ Cmp(x1, x4); |
| 3232 __ B(ne, ¬_array_function); | 3237 __ B(ne, ¬_array_function); |
| 3233 | 3238 |
| 3234 // The target function is the Array constructor, | 3239 // The target function is the Array constructor, |
| 3235 // Create an AllocationSite if we don't already have it, store it in the cell | 3240 // Create an AllocationSite if we don't already have it, store it in the slot. |
| 3236 { | 3241 { |
| 3237 FrameScope scope(masm, StackFrame::INTERNAL); | 3242 FrameScope scope(masm, StackFrame::INTERNAL); |
| 3238 CreateAllocationSiteStub create_stub; | 3243 CreateAllocationSiteStub create_stub; |
| 3239 | 3244 |
| 3240 // Arguments register must be smi-tagged to call out. | 3245 // Arguments register must be smi-tagged to call out. |
| 3241 __ SmiTag(x0); | 3246 __ SmiTag(x0); |
| 3242 __ Push(x0, x1, x2); | 3247 __ Push(x0, x1, x2, x3); |
| 3243 | 3248 |
| 3244 __ CallStub(&create_stub); | 3249 __ CallStub(&create_stub); |
| 3245 | 3250 |
| 3246 __ Pop(x2, x1, x0); | 3251 __ Pop(x3, x2, x1, x0); |
| 3247 __ SmiUntag(x0); | 3252 __ SmiUntag(x0); |
| 3248 } | 3253 } |
| 3249 __ B(&done); | 3254 __ B(&done); |
| 3250 | 3255 |
| 3251 __ Bind(¬_array_function); | 3256 __ Bind(¬_array_function); |
| 3252 // An uninitialized cache is patched with the function. | 3257 // An uninitialized cache is patched with the function. |
| 3253 __ Str(x1, FieldMemOperand(x2, Cell::kValueOffset)); | 3258 |
| 3254 // No need for a write barrier here - cells are rescanned. | 3259 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
| 3260 // TODO(all): Does the value need to be left in x4? If not, FieldMemOperand |
| 3261 // could be used to avoid this add. |
| 3262 __ Add(x4, x4, FixedArray::kHeaderSize - kHeapObjectTag); |
| 3263 __ Str(x1, MemOperand(x4, 0)); |
| 3264 |
| 3265 __ Push(x4, x2, x1); |
| 3266 __ RecordWrite(x2, x4, x1, kLRHasNotBeenSaved, kDontSaveFPRegs, |
| 3267 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 3268 __ Pop(x1, x2, x4); |
| 3269 |
| 3270 // TODO(all): Are x4, x2 and x1 outputs? This isn't clear. |
| 3255 | 3271 |
| 3256 __ Bind(&done); | 3272 __ Bind(&done); |
| 3257 } | 3273 } |
| 3258 | 3274 |
| 3259 | 3275 |
| 3260 void CallFunctionStub::Generate(MacroAssembler* masm) { | 3276 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 3261 ASM_LOCATION("CallFunctionStub::Generate"); | 3277 ASM_LOCATION("CallFunctionStub::Generate"); |
| 3262 // x1 function the function to call | 3278 // x1 function the function to call |
| 3263 // x2 cache_cell cache cell for call target | 3279 // x2 : feedback vector |
| 3280 // x3 : slot in feedback vector (smi) (if x2 is not undefined) |
| 3264 Register function = x1; | 3281 Register function = x1; |
| 3265 Register cache_cell = x2; | 3282 Register cache_cell = x2; |
| 3283 Register slot = x3; |
| 3266 Register type = x4; | 3284 Register type = x4; |
| 3267 Label slow, non_function, wrap, cont; | 3285 Label slow, non_function, wrap, cont; |
| 3268 | 3286 |
| 3269 // TODO(jbramley): This function has a lot of unnamed registers. Name them, | 3287 // TODO(jbramley): This function has a lot of unnamed registers. Name them, |
| 3270 // and tidy things up a bit. | 3288 // and tidy things up a bit. |
| 3271 | 3289 |
| 3272 if (NeedsChecks()) { | 3290 if (NeedsChecks()) { |
| 3273 // Check that the function is really a JavaScript function. | 3291 // Check that the function is really a JavaScript function. |
| 3274 __ JumpIfSmi(function, &non_function); | 3292 __ JumpIfSmi(function, &non_function); |
| 3275 | 3293 |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3313 JUMP_FUNCTION, | 3331 JUMP_FUNCTION, |
| 3314 NullCallWrapper()); | 3332 NullCallWrapper()); |
| 3315 | 3333 |
| 3316 if (NeedsChecks()) { | 3334 if (NeedsChecks()) { |
| 3317 // Slow-case: Non-function called. | 3335 // Slow-case: Non-function called. |
| 3318 __ Bind(&slow); | 3336 __ Bind(&slow); |
| 3319 if (RecordCallTarget()) { | 3337 if (RecordCallTarget()) { |
| 3320 // If there is a call target cache, mark it megamorphic in the | 3338 // If there is a call target cache, mark it megamorphic in the |
| 3321 // non-function case. MegamorphicSentinel is an immortal immovable object | 3339 // non-function case. MegamorphicSentinel is an immortal immovable object |
| 3322 // (undefined) so no write barrier is needed. | 3340 // (undefined) so no write barrier is needed. |
| 3323 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), | 3341 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), |
| 3324 masm->isolate()->heap()->undefined_value()); | 3342 masm->isolate()->heap()->undefined_value()); |
| 3343 __ Add(x12, cache_cell, Operand::UntagSmiAndScale(slot, |
| 3344 kPointerSizeLog2)); |
| 3325 __ LoadRoot(x11, Heap::kUndefinedValueRootIndex); | 3345 __ LoadRoot(x11, Heap::kUndefinedValueRootIndex); |
| 3326 __ Str(x11, FieldMemOperand(cache_cell, Cell::kValueOffset)); | 3346 __ Str(x11, FieldMemOperand(x12, FixedArray::kHeaderSize)); |
| 3327 } | 3347 } |
| 3328 // Check for function proxy. | 3348 // Check for function proxy. |
| 3329 // x10 : function type. | 3349 // x10 : function type. |
| 3330 __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, &non_function); | 3350 __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, &non_function); |
| 3331 __ Push(function); // put proxy as additional argument | 3351 __ Push(function); // put proxy as additional argument |
| 3332 __ Mov(x0, argc_ + 1); | 3352 __ Mov(x0, argc_ + 1); |
| 3333 __ Mov(x2, 0); | 3353 __ Mov(x2, 0); |
| 3334 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY); | 3354 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY); |
| 3335 { | 3355 { |
| 3336 Handle<Code> adaptor = | 3356 Handle<Code> adaptor = |
| (...skipping 23 matching lines...) Expand all Loading... |
| 3360 __ Poke(x0, argc_ * kPointerSize); | 3380 __ Poke(x0, argc_ * kPointerSize); |
| 3361 __ B(&cont); | 3381 __ B(&cont); |
| 3362 } | 3382 } |
| 3363 } | 3383 } |
| 3364 | 3384 |
| 3365 | 3385 |
| 3366 void CallConstructStub::Generate(MacroAssembler* masm) { | 3386 void CallConstructStub::Generate(MacroAssembler* masm) { |
| 3367 ASM_LOCATION("CallConstructStub::Generate"); | 3387 ASM_LOCATION("CallConstructStub::Generate"); |
| 3368 // x0 : number of arguments | 3388 // x0 : number of arguments |
| 3369 // x1 : the function to call | 3389 // x1 : the function to call |
| 3370 // x2 : cache cell for call target | 3390 // x2 : feedback vector |
| 3391 // x3 : slot in feedback vector (smi) (if r2 is not undefined) |
| 3371 Register function = x1; | 3392 Register function = x1; |
| 3372 Label slow, non_function_call; | 3393 Label slow, non_function_call; |
| 3373 | 3394 |
| 3374 // Check that the function is not a smi. | 3395 // Check that the function is not a smi. |
| 3375 __ JumpIfSmi(function, &non_function_call); | 3396 __ JumpIfSmi(function, &non_function_call); |
| 3376 // Check that the function is a JSFunction. | 3397 // Check that the function is a JSFunction. |
| 3377 Register object_type = x10; | 3398 Register object_type = x10; |
| 3378 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, | 3399 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, |
| 3379 &slow); | 3400 &slow); |
| 3380 | 3401 |
| 3381 if (RecordCallTarget()) { | 3402 if (RecordCallTarget()) { |
| 3382 GenerateRecordCallTarget(masm); | 3403 GenerateRecordCallTarget(masm); |
| 3383 } | 3404 } |
| 3384 | 3405 |
| 3385 // Jump to the function-specific construct stub. | 3406 // Jump to the function-specific construct stub. |
| 3386 Register jump_reg = x3; | 3407 Register jump_reg = x4; |
| 3387 Register shared_func_info = jump_reg; | 3408 Register shared_func_info = jump_reg; |
| 3388 Register cons_stub = jump_reg; | 3409 Register cons_stub = jump_reg; |
| 3389 Register cons_stub_code = jump_reg; | 3410 Register cons_stub_code = jump_reg; |
| 3390 __ Ldr(shared_func_info, | 3411 __ Ldr(shared_func_info, |
| 3391 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); | 3412 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
| 3392 __ Ldr(cons_stub, | 3413 __ Ldr(cons_stub, |
| 3393 FieldMemOperand(shared_func_info, | 3414 FieldMemOperand(shared_func_info, |
| 3394 SharedFunctionInfo::kConstructStubOffset)); | 3415 SharedFunctionInfo::kConstructStubOffset)); |
| 3395 __ Add(cons_stub_code, cons_stub, Code::kHeaderSize - kHeapObjectTag); | 3416 __ Add(cons_stub_code, cons_stub, Code::kHeaderSize - kHeapObjectTag); |
| 3396 __ Br(cons_stub_code); | 3417 __ Br(cons_stub_code); |
| (...skipping 1782 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5179 | 5200 |
| 5180 __ Bind(¬_in_dictionary); | 5201 __ Bind(¬_in_dictionary); |
| 5181 __ Mov(result, 0); | 5202 __ Mov(result, 0); |
| 5182 __ Ret(); | 5203 __ Ret(); |
| 5183 } | 5204 } |
| 5184 | 5205 |
| 5185 | 5206 |
| 5186 template<class T> | 5207 template<class T> |
| 5187 static void CreateArrayDispatch(MacroAssembler* masm, | 5208 static void CreateArrayDispatch(MacroAssembler* masm, |
| 5188 AllocationSiteOverrideMode mode) { | 5209 AllocationSiteOverrideMode mode) { |
| 5210 ASM_LOCATION("CreateArrayDispatch"); |
| 5189 if (mode == DISABLE_ALLOCATION_SITES) { | 5211 if (mode == DISABLE_ALLOCATION_SITES) { |
| 5190 T stub(GetInitialFastElementsKind(), mode); | 5212 T stub(GetInitialFastElementsKind(), mode); |
| 5191 __ TailCallStub(&stub); | 5213 __ TailCallStub(&stub); |
| 5192 | 5214 |
| 5193 } else if (mode == DONT_OVERRIDE) { | 5215 } else if (mode == DONT_OVERRIDE) { |
| 5194 Register kind = x3; | 5216 Register kind = x3; |
| 5195 int last_index = | 5217 int last_index = |
| 5196 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); | 5218 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); |
| 5197 for (int i = 0; i <= last_index; ++i) { | 5219 for (int i = 0; i <= last_index; ++i) { |
| 5198 Label next; | 5220 Label next; |
| (...skipping 12 matching lines...) Expand all Loading... |
| 5211 } else { | 5233 } else { |
| 5212 UNREACHABLE(); | 5234 UNREACHABLE(); |
| 5213 } | 5235 } |
| 5214 } | 5236 } |
| 5215 | 5237 |
| 5216 | 5238 |
| 5217 // TODO(jbramley): If this needs to be a special case, make it a proper template | 5239 // TODO(jbramley): If this needs to be a special case, make it a proper template |
| 5218 // specialization, and not a separate function. | 5240 // specialization, and not a separate function. |
| 5219 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, | 5241 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, |
| 5220 AllocationSiteOverrideMode mode) { | 5242 AllocationSiteOverrideMode mode) { |
| 5243 ASM_LOCATION("CreateArrayDispatchOneArgument"); |
| 5221 // x0 - argc | 5244 // x0 - argc |
| 5222 // x1 - constructor? | 5245 // x1 - constructor? |
| 5223 // x2 - allocation site (if mode != DISABLE_ALLOCATION_SITES) | 5246 // x2 - allocation site (if mode != DISABLE_ALLOCATION_SITES) |
| 5224 // x3 - kind (if mode != DISABLE_ALLOCATION_SITES) | 5247 // x3 - kind (if mode != DISABLE_ALLOCATION_SITES) |
| 5225 // sp[0] - last argument | 5248 // sp[0] - last argument |
| 5226 | 5249 |
| 5227 Register allocation_site = x2; | 5250 Register allocation_site = x2; |
| 5228 Register kind = x3; | 5251 Register kind = x3; |
| 5229 | 5252 |
| 5230 Label normal_sequence; | 5253 Label normal_sequence; |
| (...skipping 21 matching lines...) Expand all Loading... |
| 5252 ArraySingleArgumentConstructorStub stub_holey(holey_initial, | 5275 ArraySingleArgumentConstructorStub stub_holey(holey_initial, |
| 5253 DISABLE_ALLOCATION_SITES); | 5276 DISABLE_ALLOCATION_SITES); |
| 5254 __ TailCallStub(&stub_holey); | 5277 __ TailCallStub(&stub_holey); |
| 5255 | 5278 |
| 5256 __ Bind(&normal_sequence); | 5279 __ Bind(&normal_sequence); |
| 5257 ArraySingleArgumentConstructorStub stub(initial, | 5280 ArraySingleArgumentConstructorStub stub(initial, |
| 5258 DISABLE_ALLOCATION_SITES); | 5281 DISABLE_ALLOCATION_SITES); |
| 5259 __ TailCallStub(&stub); | 5282 __ TailCallStub(&stub); |
| 5260 } else if (mode == DONT_OVERRIDE) { | 5283 } else if (mode == DONT_OVERRIDE) { |
| 5261 // We are going to create a holey array, but our kind is non-holey. | 5284 // We are going to create a holey array, but our kind is non-holey. |
| 5262 // Fix kind and retry (only if we have an allocation site in the cell). | 5285 // Fix kind and retry (only if we have an allocation site in the slot). |
| 5263 __ Orr(kind, kind, 1); | 5286 __ Orr(kind, kind, 1); |
| 5264 | 5287 |
| 5265 if (FLAG_debug_code) { | 5288 if (FLAG_debug_code) { |
| 5266 __ Ldr(x10, FieldMemOperand(allocation_site, 0)); | 5289 __ Ldr(x10, FieldMemOperand(allocation_site, 0)); |
| 5267 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex, | 5290 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex, |
| 5268 &normal_sequence); | 5291 &normal_sequence); |
| 5269 __ Assert(eq, kExpectedAllocationSite); | 5292 __ Assert(eq, kExpectedAllocationSite); |
| 5270 } | 5293 } |
| 5271 | 5294 |
| 5272 // Save the resulting elements kind in type info. We can't just store 'kind' | 5295 // Save the resulting elements kind in type info. We can't just store 'kind' |
| (...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5369 CreateArrayDispatchOneArgument(masm, mode); | 5392 CreateArrayDispatchOneArgument(masm, mode); |
| 5370 } else if (argument_count_ == MORE_THAN_ONE) { | 5393 } else if (argument_count_ == MORE_THAN_ONE) { |
| 5371 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode); | 5394 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode); |
| 5372 } else { | 5395 } else { |
| 5373 UNREACHABLE(); | 5396 UNREACHABLE(); |
| 5374 } | 5397 } |
| 5375 } | 5398 } |
| 5376 | 5399 |
| 5377 | 5400 |
| 5378 void ArrayConstructorStub::Generate(MacroAssembler* masm) { | 5401 void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
| 5402 ASM_LOCATION("ArrayConstructorStub::Generate"); |
| 5379 // ----------- S t a t e ------------- | 5403 // ----------- S t a t e ------------- |
| 5380 // -- x0 : argc (only if argument_count_ == ANY) | 5404 // -- x0 : argc (only if argument_count_ == ANY) |
| 5381 // -- x1 : constructor | 5405 // -- x1 : constructor |
| 5382 // -- x2 : type info cell | 5406 // -- x2 : feedback vector (fixed array or undefined) |
| 5407 // -- x3 : slot index (if x2 is fixed array) |
| 5383 // -- sp[0] : return address | 5408 // -- sp[0] : return address |
| 5384 // -- sp[4] : last argument | 5409 // -- sp[4] : last argument |
| 5385 // ----------------------------------- | 5410 // ----------------------------------- |
| 5386 Register constructor = x1; | 5411 Register constructor = x1; |
| 5387 Register type_info_cell = x2; | 5412 Register feedback_vector = x2; |
| 5413 Register slot_index = x3; |
| 5388 | 5414 |
| 5389 if (FLAG_debug_code) { | 5415 if (FLAG_debug_code) { |
| 5390 // The array construct code is only set for the global and natives | 5416 // The array construct code is only set for the global and natives |
| 5391 // builtin Array functions which always have maps. | 5417 // builtin Array functions which always have maps. |
| 5392 | 5418 |
| 5393 Label unexpected_map, map_ok; | 5419 Label unexpected_map, map_ok; |
| 5394 // Initial map for the builtin Array function should be a map. | 5420 // Initial map for the builtin Array function should be a map. |
| 5395 __ Ldr(x10, FieldMemOperand(constructor, | 5421 __ Ldr(x10, FieldMemOperand(constructor, |
| 5396 JSFunction::kPrototypeOrInitialMapOffset)); | 5422 JSFunction::kPrototypeOrInitialMapOffset)); |
| 5397 // Will both indicate a NULL and a Smi. | 5423 // Will both indicate a NULL and a Smi. |
| 5398 __ JumpIfSmi(x10, &unexpected_map); | 5424 __ JumpIfSmi(x10, &unexpected_map); |
| 5399 __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok); | 5425 __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok); |
| 5400 __ Bind(&unexpected_map); | 5426 __ Bind(&unexpected_map); |
| 5401 __ Abort(kUnexpectedInitialMapForArrayFunction); | 5427 __ Abort(kUnexpectedInitialMapForArrayFunction); |
| 5402 __ Bind(&map_ok); | 5428 __ Bind(&map_ok); |
| 5403 | 5429 |
| 5404 // In type_info_cell, we expect either undefined or a valid Cell. | 5430 // In feedback_vector, we expect either undefined or a valid fixed array. |
| 5405 Label okay_here; | 5431 Label okay_here; |
| 5406 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); | 5432 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); |
| 5407 __ JumpIfRoot(type_info_cell, Heap::kUndefinedValueRootIndex, &okay_here); | 5433 __ JumpIfRoot(feedback_vector, Heap::kUndefinedValueRootIndex, &okay_here); |
| 5408 __ Ldr(x10, FieldMemOperand(type_info_cell, Cell::kMapOffset)); | 5434 __ Ldr(x10, FieldMemOperand(feedback_vector, Cell::kMapOffset)); |
| 5409 __ Cmp(x10, Operand(cell_map)); | 5435 __ Cmp(x10, Operand(cell_map)); |
| 5410 __ Assert(eq, kExpectedPropertyCellInTypeInfoCell); | 5436 __ Assert(eq, kExpectedFixedArrayInFeedbackVector); |
| 5437 |
| 5438 // slot_index should be a smi if we don't have undefined in feedback_vector. |
| 5439 __ AssertSmi(slot_index); |
| 5440 |
| 5411 __ Bind(&okay_here); | 5441 __ Bind(&okay_here); |
| 5412 } | 5442 } |
| 5413 | 5443 |
| 5414 Register allocation_site = x2; // Overwrites type_info_cell. | 5444 Register allocation_site = x2; // Overwrites feedback_vector. |
| 5415 Register kind = x3; | 5445 Register kind = x3; |
| 5416 Label no_info; | 5446 Label no_info; |
| 5417 // Get the elements kind and case on that. | 5447 // Get the elements kind and case on that. |
| 5418 __ JumpIfRoot(type_info_cell, Heap::kUndefinedValueRootIndex, &no_info); | 5448 __ JumpIfRoot(feedback_vector, Heap::kUndefinedValueRootIndex, &no_info); |
| 5419 __ Ldr(allocation_site, FieldMemOperand(type_info_cell, | 5449 __ Add(feedback_vector, feedback_vector, |
| 5420 PropertyCell::kValueOffset)); | 5450 Operand::UntagSmiAndScale(slot_index, kPointerSizeLog2)); |
| 5451 __ Ldr(allocation_site, FieldMemOperand(feedback_vector, |
| 5452 FixedArray::kHeaderSize)); |
| 5421 | 5453 |
| 5422 // If the type cell is undefined, or contains anything other than an | 5454 // If the feedback vector is undefined, or contains anything other than an |
| 5423 // AllocationSite, call an array constructor that doesn't use AllocationSites. | 5455 // AllocationSite, call an array constructor that doesn't use AllocationSites. |
| 5424 __ Ldr(x10, FieldMemOperand(allocation_site, AllocationSite::kMapOffset)); | 5456 __ Ldr(x10, FieldMemOperand(allocation_site, AllocationSite::kMapOffset)); |
| 5425 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex, &no_info); | 5457 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex, &no_info); |
| 5426 | 5458 |
| 5427 __ Ldrsw(kind, | 5459 __ Ldrsw(kind, |
| 5428 UntagSmiFieldMemOperand(allocation_site, | 5460 UntagSmiFieldMemOperand(allocation_site, |
| 5429 AllocationSite::kTransitionInfoOffset)); | 5461 AllocationSite::kTransitionInfoOffset)); |
| 5430 __ And(kind, kind, AllocationSite::ElementsKindBits::kMask); | 5462 __ And(kind, kind, AllocationSite::ElementsKindBits::kMask); |
| 5431 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); | 5463 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); |
| 5432 | 5464 |
| (...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5680 MemOperand(fp, 6 * kPointerSize), | 5712 MemOperand(fp, 6 * kPointerSize), |
| 5681 NULL); | 5713 NULL); |
| 5682 } | 5714 } |
| 5683 | 5715 |
| 5684 | 5716 |
| 5685 #undef __ | 5717 #undef __ |
| 5686 | 5718 |
| 5687 } } // namespace v8::internal | 5719 } } // namespace v8::internal |
| 5688 | 5720 |
| 5689 #endif // V8_TARGET_ARCH_A64 | 5721 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |