Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(87)

Side by Side Diff: src/a64/code-stubs-a64.cc

Issue 184433002: A64: Tidy up some TODOs in GenerateRecordCallTarget. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 3239 matching lines...) Expand 10 before | Expand all | Expand 10 after
3250 3250
3251 // (9) Sliced string. Replace subject with parent. 3251 // (9) Sliced string. Replace subject with parent.
3252 __ Ldr(sliced_string_offset, 3252 __ Ldr(sliced_string_offset,
3253 UntagSmiFieldMemOperand(subject, SlicedString::kOffsetOffset)); 3253 UntagSmiFieldMemOperand(subject, SlicedString::kOffsetOffset));
3254 __ Ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); 3254 __ Ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
3255 __ B(&check_underlying); // Go to (4). 3255 __ B(&check_underlying); // Go to (4).
3256 #endif 3256 #endif
3257 } 3257 }
3258 3258
3259 3259
3260 // TODO(jbramley): Don't use static registers here, but take them as arguments. 3260 static void GenerateRecordCallTarget(MacroAssembler* masm,
3261 static void GenerateRecordCallTarget(MacroAssembler* masm) { 3261 Register argc,
3262 Register function,
3263 Register feedback_vector,
3264 Register index,
3265 Register scratch1,
3266 Register scratch2) {
3262 ASM_LOCATION("GenerateRecordCallTarget"); 3267 ASM_LOCATION("GenerateRecordCallTarget");
3268 ASSERT(!AreAliased(scratch1, scratch2,
3269 argc, function, feedback_vector, index));
3263 // Cache the called function in a feedback vector slot. Cache states are 3270 // Cache the called function in a feedback vector slot. Cache states are
3264 // uninitialized, monomorphic (indicated by a JSFunction), and megamorphic. 3271 // uninitialized, monomorphic (indicated by a JSFunction), and megamorphic.
3265 // x0 : number of arguments to the construct function 3272 // argc : number of arguments to the construct function
3266 // x1 : the function to call 3273 // function : the function to call
3267 // x2 : feedback vector 3274 // feedback_vector : the feedback vector
3268 // x3 : slot in feedback vector (smi) 3275 // index : slot in feedback vector (smi)
3269 Label initialize, done, miss, megamorphic, not_array_function; 3276 Label initialize, done, miss, megamorphic, not_array_function;
3270 3277
3271 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()), 3278 ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
3272 masm->isolate()->heap()->undefined_value()); 3279 masm->isolate()->heap()->undefined_value());
3273 ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()), 3280 ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
3274 masm->isolate()->heap()->the_hole_value()); 3281 masm->isolate()->heap()->the_hole_value());
3275 3282
3276 // Load the cache state. 3283 // Load the cache state.
3277 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); 3284 __ Add(scratch1, feedback_vector,
3278 __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize)); 3285 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
3286 __ Ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
3279 3287
3280 // A monomorphic cache hit or an already megamorphic state: invoke the 3288 // A monomorphic cache hit or an already megamorphic state: invoke the
3281 // function without changing the state. 3289 // function without changing the state.
3282 __ Cmp(x4, x1); 3290 __ Cmp(scratch1, function);
3283 __ B(eq, &done); 3291 __ B(eq, &done);
3284 3292
3285 // If we came here, we need to see if we are the array function. 3293 // If we came here, we need to see if we are the array function.
3286 // If we didn't have a matching function, and we didn't find the megamorph 3294 // If we didn't have a matching function, and we didn't find the megamorph
3287 // sentinel, then we have in the slot either some other function or an 3295 // sentinel, then we have in the slot either some other function or an
3288 // AllocationSite. Do a map check on the object in ecx. 3296 // AllocationSite. Do a map check on the object in scratch1 register.
3289 __ Ldr(x5, FieldMemOperand(x4, AllocationSite::kMapOffset)); 3297 __ Ldr(scratch2, FieldMemOperand(scratch1, AllocationSite::kMapOffset));
3290 __ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, &miss); 3298 __ JumpIfNotRoot(scratch2, Heap::kAllocationSiteMapRootIndex, &miss);
3291 3299
3292 // Make sure the function is the Array() function 3300 // Make sure the function is the Array() function
3293 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, x4); 3301 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
3294 __ Cmp(x1, x4); 3302 __ Cmp(function, scratch1);
3295 __ B(ne, &megamorphic); 3303 __ B(ne, &megamorphic);
3296 __ B(&done); 3304 __ B(&done);
3297 3305
3298 __ Bind(&miss); 3306 __ Bind(&miss);
3299 3307
3300 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 3308 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
3301 // megamorphic. 3309 // megamorphic.
3302 __ JumpIfRoot(x4, Heap::kTheHoleValueRootIndex, &initialize); 3310 __ JumpIfRoot(scratch1, Heap::kTheHoleValueRootIndex, &initialize);
3303 // MegamorphicSentinel is an immortal immovable object (undefined) so no 3311 // MegamorphicSentinel is an immortal immovable object (undefined) so no
3304 // write-barrier is needed. 3312 // write-barrier is needed.
3305 __ Bind(&megamorphic); 3313 __ Bind(&megamorphic);
3306 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); 3314 __ Add(scratch1, feedback_vector,
3307 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); 3315 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
3308 __ Str(x10, FieldMemOperand(x4, FixedArray::kHeaderSize)); 3316 __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex);
3317 __ Str(scratch2, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
3309 __ B(&done); 3318 __ B(&done);
3310 3319
3311 // An uninitialized cache is patched with the function or sentinel to 3320 // An uninitialized cache is patched with the function or sentinel to
3312 // indicate the ElementsKind if function is the Array constructor. 3321 // indicate the ElementsKind if function is the Array constructor.
3313 __ Bind(&initialize); 3322 __ Bind(&initialize);
3314 // Make sure the function is the Array() function 3323 // Make sure the function is the Array() function
3315 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, x4); 3324 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
3316 __ Cmp(x1, x4); 3325 __ Cmp(function, scratch1);
3317 __ B(ne, &not_array_function); 3326 __ B(ne, &not_array_function);
3318 3327
3319 // The target function is the Array constructor, 3328 // The target function is the Array constructor,
3320 // Create an AllocationSite if we don't already have it, store it in the slot. 3329 // Create an AllocationSite if we don't already have it, store it in the slot.
3321 { 3330 {
3322 FrameScope scope(masm, StackFrame::INTERNAL); 3331 FrameScope scope(masm, StackFrame::INTERNAL);
3323 CreateAllocationSiteStub create_stub; 3332 CreateAllocationSiteStub create_stub;
3324 3333
3325 // Arguments register must be smi-tagged to call out. 3334 // Arguments register must be smi-tagged to call out.
3326 __ SmiTag(x0); 3335 __ SmiTag(argc);
3327 __ Push(x0, x1, x2, x3); 3336 __ Push(argc, function, feedback_vector, index);
3328 3337
3338 // CreateAllocationSiteStub expect the feedback vector in x2 and the slot
3339 // index in x3.
3340 ASSERT(feedback_vector.Is(x2) && index.Is(x3));
3329 __ CallStub(&create_stub); 3341 __ CallStub(&create_stub);
3330 3342
3331 __ Pop(x3, x2, x1, x0); 3343 __ Pop(index, feedback_vector, function, argc);
3332 __ SmiUntag(x0); 3344 __ SmiUntag(argc);
3333 } 3345 }
3334 __ B(&done); 3346 __ B(&done);
3335 3347
3336 __ Bind(&not_array_function); 3348 __ Bind(&not_array_function);
3337 // An uninitialized cache is patched with the function. 3349 // An uninitialized cache is patched with the function.
3338 3350
3339 __ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); 3351 __ Add(scratch1, feedback_vector,
3340 // TODO(all): Does the value need to be left in x4? If not, FieldMemOperand 3352 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
3341 // could be used to avoid this add. 3353 __ Add(scratch1, scratch1, FixedArray::kHeaderSize - kHeapObjectTag);
3342 __ Add(x4, x4, FixedArray::kHeaderSize - kHeapObjectTag); 3354 __ Str(function, MemOperand(scratch1, 0));
3343 __ Str(x1, MemOperand(x4, 0));
3344 3355
3345 __ Push(x4, x2, x1); 3356 __ Push(function);
3346 __ RecordWrite(x2, x4, x1, kLRHasNotBeenSaved, kDontSaveFPRegs, 3357 __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved,
3347 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 3358 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
3348 __ Pop(x1, x2, x4); 3359 __ Pop(function);
3349
3350 // TODO(all): Are x4, x2 and x1 outputs? This isn't clear.
3351 3360
3352 __ Bind(&done); 3361 __ Bind(&done);
3353 } 3362 }
3354 3363
3355 3364
3356 void CallFunctionStub::Generate(MacroAssembler* masm) { 3365 void CallFunctionStub::Generate(MacroAssembler* masm) {
3357 ASM_LOCATION("CallFunctionStub::Generate"); 3366 ASM_LOCATION("CallFunctionStub::Generate");
3358 // x1 function the function to call 3367 // x1 function the function to call
3359 // x2 : feedback vector 3368 // x2 : feedback vector
3360 // x3 : slot in feedback vector (smi) (if x2 is not undefined) 3369 // x3 : slot in feedback vector (smi) (if x2 is not undefined)
3361 Register function = x1; 3370 Register function = x1;
3362 Register cache_cell = x2; 3371 Register cache_cell = x2;
3363 Register slot = x3; 3372 Register slot = x3;
3364 Register type = x4; 3373 Register type = x4;
3365 Label slow, non_function, wrap, cont; 3374 Label slow, non_function, wrap, cont;
3366 3375
3367 // TODO(jbramley): This function has a lot of unnamed registers. Name them, 3376 // TODO(jbramley): This function has a lot of unnamed registers. Name them,
3368 // and tidy things up a bit. 3377 // and tidy things up a bit.
3369 3378
3370 if (NeedsChecks()) { 3379 if (NeedsChecks()) {
3371 // Check that the function is really a JavaScript function. 3380 // Check that the function is really a JavaScript function.
3372 __ JumpIfSmi(function, &non_function); 3381 __ JumpIfSmi(function, &non_function);
3373 3382
3374 // Goto slow case if we do not have a function. 3383 // Goto slow case if we do not have a function.
3375 __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow); 3384 __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow);
3376 3385
3377 if (RecordCallTarget()) { 3386 if (RecordCallTarget()) {
3378 GenerateRecordCallTarget(masm); 3387 GenerateRecordCallTarget(masm, x0, function, cache_cell, slot, x4, x5);
3379 } 3388 }
3380 } 3389 }
3381 3390
3382 // Fast-case: Invoke the function now. 3391 // Fast-case: Invoke the function now.
3383 // x1 function pushed function 3392 // x1 function pushed function
3384 ParameterCount actual(argc_); 3393 ParameterCount actual(argc_);
3385 3394
3386 if (CallAsMethod()) { 3395 if (CallAsMethod()) {
3387 if (NeedsChecks()) { 3396 if (NeedsChecks()) {
3388 // Do not transform the receiver for strict mode functions. 3397 // Do not transform the receiver for strict mode functions.
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
3473 Label slow, non_function_call; 3482 Label slow, non_function_call;
3474 3483
3475 // Check that the function is not a smi. 3484 // Check that the function is not a smi.
3476 __ JumpIfSmi(function, &non_function_call); 3485 __ JumpIfSmi(function, &non_function_call);
3477 // Check that the function is a JSFunction. 3486 // Check that the function is a JSFunction.
3478 Register object_type = x10; 3487 Register object_type = x10;
3479 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, 3488 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE,
3480 &slow); 3489 &slow);
3481 3490
3482 if (RecordCallTarget()) { 3491 if (RecordCallTarget()) {
3483 GenerateRecordCallTarget(masm); 3492 GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5);
3484 } 3493 }
3485 3494
3486 // Jump to the function-specific construct stub. 3495 // Jump to the function-specific construct stub.
3487 Register jump_reg = x4; 3496 Register jump_reg = x4;
3488 Register shared_func_info = jump_reg; 3497 Register shared_func_info = jump_reg;
3489 Register cons_stub = jump_reg; 3498 Register cons_stub = jump_reg;
3490 Register cons_stub_code = jump_reg; 3499 Register cons_stub_code = jump_reg;
3491 __ Ldr(shared_func_info, 3500 __ Ldr(shared_func_info,
3492 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); 3501 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
3493 __ Ldr(cons_stub, 3502 __ Ldr(cons_stub,
(...skipping 2302 matching lines...) Expand 10 before | Expand all | Expand 10 after
5796 MemOperand(fp, 6 * kPointerSize), 5805 MemOperand(fp, 6 * kPointerSize),
5797 NULL); 5806 NULL);
5798 } 5807 }
5799 5808
5800 5809
5801 #undef __ 5810 #undef __
5802 5811
5803 } } // namespace v8::internal 5812 } } // namespace v8::internal
5804 5813
5805 #endif // V8_TARGET_ARCH_A64 5814 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698