Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 106 // x1: constant properties | 106 // x1: constant properties |
| 107 // x0: object literal flags | 107 // x0: object literal flags |
| 108 static Register registers[] = { x3, x2, x1, x0 }; | 108 static Register registers[] = { x3, x2, x1, x0 }; |
| 109 descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); | 109 descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); |
| 110 descriptor->register_params_ = registers; | 110 descriptor->register_params_ = registers; |
| 111 descriptor->deoptimization_handler_ = | 111 descriptor->deoptimization_handler_ = |
| 112 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; | 112 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; |
| 113 } | 113 } |
| 114 | 114 |
| 115 | 115 |
| 116 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( | |
| 117 Isolate* isolate, | |
| 118 CodeStubInterfaceDescriptor* descriptor) { | |
| 119 // x2: feedback vector | |
| 120 // x3: call feedback slot | |
| 121 static Register registers[] = { x2, x3 }; | |
| 122 descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); | |
| 123 descriptor->register_params_ = registers; | |
| 124 descriptor->deoptimization_handler_ = NULL; | |
| 125 } | |
| 126 | |
| 127 | |
| 128 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( | 116 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( |
| 129 Isolate* isolate, | 117 Isolate* isolate, |
| 130 CodeStubInterfaceDescriptor* descriptor) { | 118 CodeStubInterfaceDescriptor* descriptor) { |
| 131 // x1: receiver | 119 // x1: receiver |
| 132 // x0: key | 120 // x0: key |
| 133 static Register registers[] = { x1, x0 }; | 121 static Register registers[] = { x1, x0 }; |
| 134 descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); | 122 descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); |
| 135 descriptor->register_params_ = registers; | 123 descriptor->register_params_ = registers; |
| 136 descriptor->deoptimization_handler_ = | 124 descriptor->deoptimization_handler_ = |
| 137 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); | 125 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); |
| (...skipping 1147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1285 | 1273 |
| 1286 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 1274 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
| 1287 // It is important that the following stubs are generated in this order | 1275 // It is important that the following stubs are generated in this order |
| 1288 // because pregenerated stubs can only call other pregenerated stubs. | 1276 // because pregenerated stubs can only call other pregenerated stubs. |
| 1289 // RecordWriteStub uses StoreBufferOverflowStub, which in turn uses | 1277 // RecordWriteStub uses StoreBufferOverflowStub, which in turn uses |
| 1290 // CEntryStub. | 1278 // CEntryStub. |
| 1291 CEntryStub::GenerateAheadOfTime(isolate); | 1279 CEntryStub::GenerateAheadOfTime(isolate); |
| 1292 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 1280 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
| 1293 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 1281 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
| 1294 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 1282 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
| 1295 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); | |
| 1296 BinaryOpICStub::GenerateAheadOfTime(isolate); | 1283 BinaryOpICStub::GenerateAheadOfTime(isolate); |
| 1297 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); | 1284 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); |
| 1298 } | 1285 } |
| 1299 | 1286 |
| 1300 | 1287 |
| 1301 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 1288 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
| 1302 // Floating-point code doesn't get special handling in A64, so there's | 1289 // Floating-point code doesn't get special handling in A64, so there's |
| 1303 // nothing to do here. | 1290 // nothing to do here. |
| 1304 USE(isolate); | 1291 USE(isolate); |
| 1305 } | 1292 } |
| (...skipping 1889 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3195 | 3182 |
| 3196 // Load the cache state. | 3183 // Load the cache state. |
| 3197 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); | 3184 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
| 3198 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); | 3185 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); |
| 3199 | 3186 |
| 3200 // A monomorphic cache hit or an already megamorphic state: invoke the | 3187 // A monomorphic cache hit or an already megamorphic state: invoke the |
| 3201 // function without changing the state. | 3188 // function without changing the state. |
| 3202 __ Cmp(x4, x1); | 3189 __ Cmp(x4, x1); |
| 3203 __ B(eq, &done); | 3190 __ B(eq, &done); |
| 3204 | 3191 |
| 3205 // If we came here, we need to see if we are the array function. | |
| 3206 // If we didn't have a matching function, and we didn't find the megamorph | |
| 3207 // sentinel, then we have in the slot either some other function or an | |
| 3208 // AllocationSite. Do a map check on the object in ecx. | |
| 3209 __ Ldr(x5, FieldMemOperand(x4, AllocationSite::kMapOffset)); | |
| 3210 __ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, &miss); | |
| 3211 | |
| 3212 // Make sure the function is the Array() function | |
| 3213 __ LoadArrayFunction(x4); | |
| 3214 __ Cmp(x1, x4); | |
| 3215 __ B(ne, &megamorphic); | |
| 3216 __ B(&done); | |
| 3217 | |
| 3218 __ Bind(&miss); | 3192 __ Bind(&miss); |
| 3219 | 3193 |
| 3220 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 3194 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
| 3221 // megamorphic. | 3195 // megamorphic. |
| 3222 __ JumpIfRoot(x4, Heap::kTheHoleValueRootIndex, &initialize); | 3196 __ JumpIfRoot(x4, Heap::kTheHoleValueRootIndex, &initialize); |
| 3223 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 3197 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
| 3224 // write-barrier is needed. | 3198 // write-barrier is needed. |
| 3225 __ Bind(&megamorphic); | 3199 __ Bind(&megamorphic); |
| 3226 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); | 3200 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
| 3227 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); | 3201 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); |
| 3228 __ Str(x10, FieldMemOperand(x4, FixedArray::kHeaderSize)); | 3202 __ Str(x10, FieldMemOperand(x4, FixedArray::kHeaderSize)); |
| 3229 __ B(&done); | 3203 __ B(&done); |
| 3230 | 3204 |
| 3231 // An uninitialized cache is patched with the function or sentinel to | 3205 // An uninitialized cache is patched with the function or sentinel to |
| 3232 // indicate the ElementsKind if function is the Array constructor. | 3206 // indicate the ElementsKind if function is the Array constructor. |
| 3233 __ Bind(&initialize); | 3207 __ Bind(&initialize); |
| 3234 // Make sure the function is the Array() function | |
| 3235 __ LoadArrayFunction(x4); | |
| 3236 __ Cmp(x1, x4); | |
| 3237 __ B(ne, ¬_array_function); | |
| 3238 | 3208 |
| 3239 // The target function is the Array constructor, | |
| 3240 // Create an AllocationSite if we don't already have it, store it in the slot. | |
| 3241 { | |
| 3242 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 3243 CreateAllocationSiteStub create_stub; | |
| 3244 | |
| 3245 // Arguments register must be smi-tagged to call out. | |
| 3246 __ SmiTag(x0); | |
| 3247 __ Push(x0, x1, x2, x3); | |
| 3248 | |
| 3249 __ CallStub(&create_stub); | |
| 3250 | |
| 3251 __ Pop(x3, x2, x1, x0); | |
| 3252 __ SmiUntag(x0); | |
| 3253 } | |
| 3254 __ B(&done); | |
| 3255 | |
| 3256 __ Bind(¬_array_function); | |
| 3257 // An uninitialized cache is patched with the function. | 3209 // An uninitialized cache is patched with the function. |
| 3258 | |
| 3259 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); | 3210 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
| 3260 // TODO(all): Does the value need to be left in x4? If not, FieldMemOperand | 3211 // TODO(all): Does the value need to be left in x4? If not, FieldMemOperand |
| 3261 // could be used to avoid this add. | 3212 // could be used to avoid this add. |
| 3262 __ Add(x4, x4, FixedArray::kHeaderSize - kHeapObjectTag); | 3213 __ Add(x4, x4, FixedArray::kHeaderSize - kHeapObjectTag); |
| 3263 __ Str(x1, MemOperand(x4, 0)); | 3214 __ Str(x1, MemOperand(x4, 0)); |
| 3264 | 3215 |
| 3265 __ Push(x4, x2, x1); | 3216 __ Push(x4, x2, x1); |
| 3266 __ RecordWrite(x2, x4, x1, kLRHasNotBeenSaved, kDontSaveFPRegs, | 3217 __ RecordWrite(x2, x4, x1, kLRHasNotBeenSaved, kDontSaveFPRegs, |
| 3267 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 3218 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 3268 __ Pop(x1, x2, x4); | 3219 __ Pop(x1, x2, x4); |
| 3269 | 3220 |
| 3270 // TODO(all): Are x4, x2 and x1 outputs? This isn't clear. | 3221 // TODO(all): Are x4, x2 and x1 outputs? This isn't clear. |
| 3271 | |
| 3272 __ Bind(&done); | 3222 __ Bind(&done); |
| 3273 } | 3223 } |
| 3274 | 3224 |
| 3275 | 3225 |
| 3276 void CallFunctionStub::Generate(MacroAssembler* masm) { | 3226 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 3277 ASM_LOCATION("CallFunctionStub::Generate"); | 3227 ASM_LOCATION("CallFunctionStub::Generate"); |
| 3278 // x1 function the function to call | 3228 // x1 function the function to call |
| 3279 // x2 : feedback vector | 3229 // x2 : feedback vector |
| 3280 // x3 : slot in feedback vector (smi) (if x2 is not undefined) | 3230 // x3 : slot in feedback vector (smi) (if x2 is not undefined) |
| 3281 Register function = x1; | 3231 Register function = x1; |
| 3282 Register cache_cell = x2; | 3232 Register cache_cell = x2; |
| 3283 Register slot = x3; | 3233 Register slot = x3; |
| 3284 Register type = x4; | 3234 Register type = x4; |
| 3285 Label slow, non_function, wrap, cont; | 3235 Label slow, non_function, wrap, cont; |
| 3286 | 3236 |
| 3287 // TODO(jbramley): This function has a lot of unnamed registers. Name them, | 3237 // TODO(jbramley): This function has a lot of unnamed registers. Name them, |
| 3288 // and tidy things up a bit. | 3238 // and tidy things up a bit. |
| 3289 | 3239 |
| 3290 if (NeedsChecks()) { | 3240 if (NeedsChecks()) { |
| 3291 // Check that the function is really a JavaScript function. | 3241 // Check that the function is really a JavaScript function. |
| 3292 __ JumpIfSmi(function, &non_function); | 3242 __ JumpIfSmi(function, &non_function); |
| 3293 | 3243 |
| 3294 // Goto slow case if we do not have a function. | 3244 // Goto slow case if we do not have a function. |
| 3295 __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow); | 3245 __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow); |
| 3296 | 3246 |
| 3297 if (RecordCallTarget()) { | 3247 if (RecordCallTarget()) { |
| 3298 GenerateRecordCallTarget(masm); | 3248 GenerateRecordCallTarget(masm); |
| 3249 // Type information was updated. Because we may call Array, which | |
| 3250 // expects either undefined or an AllocationSite in ebx we need | |
| 3251 // to set ebx to undefined. | |
| 3252 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); | |
| 3299 } | 3253 } |
| 3300 } | 3254 } |
| 3301 | 3255 |
| 3302 // Fast-case: Invoke the function now. | 3256 // Fast-case: Invoke the function now. |
| 3303 // x1 function pushed function | 3257 // x1 function pushed function |
| 3304 ParameterCount actual(argc_); | 3258 ParameterCount actual(argc_); |
| 3305 | 3259 |
| 3306 if (CallAsMethod()) { | 3260 if (CallAsMethod()) { |
| 3307 if (NeedsChecks()) { | 3261 if (NeedsChecks()) { |
| 3308 // Do not transform the receiver for strict mode functions. | 3262 // Do not transform the receiver for strict mode functions. |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3394 | 3348 |
| 3395 // Check that the function is not a smi. | 3349 // Check that the function is not a smi. |
| 3396 __ JumpIfSmi(function, &non_function_call); | 3350 __ JumpIfSmi(function, &non_function_call); |
| 3397 // Check that the function is a JSFunction. | 3351 // Check that the function is a JSFunction. |
| 3398 Register object_type = x10; | 3352 Register object_type = x10; |
| 3399 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, | 3353 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, |
| 3400 &slow); | 3354 &slow); |
| 3401 | 3355 |
| 3402 if (RecordCallTarget()) { | 3356 if (RecordCallTarget()) { |
| 3403 GenerateRecordCallTarget(masm); | 3357 GenerateRecordCallTarget(masm); |
| 3358 // Put the AllocationSite from the feedback vector into x2. | |
| 3359 // By adding kPointerSize we encode that we know the AllocationSite | |
| 3360 // entry is at the feedback vector slot given by x3 + 1. | |
| 3361 __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); | |
| 3362 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize + kPointerSize)); | |
| 3363 __ AssertUndefinedOrAllocationSite(x2, x5); | |
| 3404 } | 3364 } |
| 3405 | 3365 |
| 3406 // Jump to the function-specific construct stub. | 3366 // Jump to the function-specific construct stub. |
| 3407 Register jump_reg = x4; | 3367 Register jump_reg = x4; |
| 3408 Register shared_func_info = jump_reg; | 3368 Register shared_func_info = jump_reg; |
| 3409 Register cons_stub = jump_reg; | 3369 Register cons_stub = jump_reg; |
| 3410 Register cons_stub_code = jump_reg; | 3370 Register cons_stub_code = jump_reg; |
| 3411 __ Ldr(shared_func_info, | 3371 __ Ldr(shared_func_info, |
| 3412 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); | 3372 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
| 3413 __ Ldr(cons_stub, | 3373 __ Ldr(cons_stub, |
| (...skipping 1982 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5396 UNREACHABLE(); | 5356 UNREACHABLE(); |
| 5397 } | 5357 } |
| 5398 } | 5358 } |
| 5399 | 5359 |
| 5400 | 5360 |
| 5401 void ArrayConstructorStub::Generate(MacroAssembler* masm) { | 5361 void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
| 5402 ASM_LOCATION("ArrayConstructorStub::Generate"); | 5362 ASM_LOCATION("ArrayConstructorStub::Generate"); |
| 5403 // ----------- S t a t e ------------- | 5363 // ----------- S t a t e ------------- |
| 5404 // -- x0 : argc (only if argument_count_ == ANY) | 5364 // -- x0 : argc (only if argument_count_ == ANY) |
| 5405 // -- x1 : constructor | 5365 // -- x1 : constructor |
| 5406 // -- x2 : feedback vector (fixed array or undefined) | 5366 // -- x2 : AllocationSite or undefined |
| 5407 // -- x3 : slot index (if x2 is fixed array) | |
| 5408 // -- sp[0] : return address | 5367 // -- sp[0] : return address |
| 5409 // -- sp[4] : last argument | 5368 // -- sp[4] : last argument |
| 5410 // ----------------------------------- | 5369 // ----------------------------------- |
| 5411 Register constructor = x1; | 5370 Register constructor = x1; |
| 5412 Register feedback_vector = x2; | 5371 Register allocation_site = x2; |
| 5413 Register slot_index = x3; | |
| 5414 | 5372 |
| 5415 if (FLAG_debug_code) { | 5373 if (FLAG_debug_code) { |
| 5416 // The array construct code is only set for the global and natives | 5374 // The array construct code is only set for the global and natives |
| 5417 // builtin Array functions which always have maps. | 5375 // builtin Array functions which always have maps. |
| 5418 | 5376 |
| 5419 Label unexpected_map, map_ok; | 5377 Label unexpected_map, map_ok; |
| 5420 // Initial map for the builtin Array function should be a map. | 5378 // Initial map for the builtin Array function should be a map. |
| 5421 __ Ldr(x10, FieldMemOperand(constructor, | 5379 __ Ldr(x10, FieldMemOperand(constructor, |
| 5422 JSFunction::kPrototypeOrInitialMapOffset)); | 5380 JSFunction::kPrototypeOrInitialMapOffset)); |
| 5423 // Will both indicate a NULL and a Smi. | 5381 // Will both indicate a NULL and a Smi. |
| 5424 __ JumpIfSmi(x10, &unexpected_map); | 5382 __ JumpIfSmi(x10, &unexpected_map); |
| 5425 __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok); | 5383 __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok); |
| 5426 __ Bind(&unexpected_map); | 5384 __ Bind(&unexpected_map); |
| 5427 __ Abort(kUnexpectedInitialMapForArrayFunction); | 5385 __ Abort(kUnexpectedInitialMapForArrayFunction); |
| 5428 __ Bind(&map_ok); | 5386 __ Bind(&map_ok); |
| 5429 | 5387 |
| 5430 // In feedback_vector, we expect either undefined or a valid fixed array. | 5388 // We should either have undefined in ebx or a valid AllocationSite |
|
Hannes Payer (out of office)
2014/02/18 16:24:26
x10
mvstanton
2014/02/19 08:40:26
Done.
| |
| 5431 Label okay_here; | 5389 __ AssertUndefinedOrAllocationSite(allocation_site, x10); |
| 5432 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map(); | |
| 5433 __ JumpIfRoot(feedback_vector, Heap::kUndefinedValueRootIndex, &okay_here); | |
| 5434 __ Ldr(x10, FieldMemOperand(feedback_vector, FixedArray::kMapOffset)); | |
| 5435 __ Cmp(x10, Operand(fixed_array_map)); | |
| 5436 __ Assert(eq, kExpectedFixedArrayInFeedbackVector); | |
| 5437 | |
| 5438 // slot_index should be a smi if we don't have undefined in feedback_vector. | |
| 5439 __ AssertSmi(slot_index); | |
| 5440 | |
| 5441 __ Bind(&okay_here); | |
| 5442 } | 5390 } |
| 5443 | 5391 |
| 5444 Register allocation_site = x2; // Overwrites feedback_vector. | |
| 5445 Register kind = x3; | 5392 Register kind = x3; |
| 5446 Label no_info; | 5393 Label no_info; |
| 5447 // Get the elements kind and case on that. | 5394 // Get the elements kind and case on that. |
| 5448 __ JumpIfRoot(feedback_vector, Heap::kUndefinedValueRootIndex, &no_info); | 5395 __ JumpIfRoot(allocation_site, Heap::kUndefinedValueRootIndex, &no_info); |
| 5449 __ Add(feedback_vector, feedback_vector, | |
| 5450 Operand::UntagSmiAndScale(slot_index, kPointerSizeLog2)); | |
| 5451 __ Ldr(allocation_site, FieldMemOperand(feedback_vector, | |
| 5452 FixedArray::kHeaderSize)); | |
| 5453 | |
| 5454 // If the feedback vector is undefined, or contains anything other than an | |
| 5455 // AllocationSite, call an array constructor that doesn't use AllocationSites. | |
| 5456 __ Ldr(x10, FieldMemOperand(allocation_site, AllocationSite::kMapOffset)); | |
| 5457 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex, &no_info); | |
| 5458 | 5396 |
| 5459 __ Ldrsw(kind, | 5397 __ Ldrsw(kind, |
| 5460 UntagSmiFieldMemOperand(allocation_site, | 5398 UntagSmiFieldMemOperand(allocation_site, |
| 5461 AllocationSite::kTransitionInfoOffset)); | 5399 AllocationSite::kTransitionInfoOffset)); |
| 5462 __ And(kind, kind, AllocationSite::ElementsKindBits::kMask); | 5400 __ And(kind, kind, AllocationSite::ElementsKindBits::kMask); |
| 5463 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); | 5401 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); |
| 5464 | 5402 |
| 5465 __ Bind(&no_info); | 5403 __ Bind(&no_info); |
| 5466 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); | 5404 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); |
| 5467 } | 5405 } |
| (...skipping 244 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5712 MemOperand(fp, 6 * kPointerSize), | 5650 MemOperand(fp, 6 * kPointerSize), |
| 5713 NULL); | 5651 NULL); |
| 5714 } | 5652 } |
| 5715 | 5653 |
| 5716 | 5654 |
| 5717 #undef __ | 5655 #undef __ |
| 5718 | 5656 |
| 5719 } } // namespace v8::internal | 5657 } } // namespace v8::internal |
| 5720 | 5658 |
| 5721 #endif // V8_TARGET_ARCH_A64 | 5659 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |