Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(783)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 15094018: Create AllocationSite objects, pointed to by AllocationSiteInfo. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Comment response Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/type-info.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
54 Isolate* isolate, 54 Isolate* isolate,
55 CodeStubInterfaceDescriptor* descriptor) { 55 CodeStubInterfaceDescriptor* descriptor) {
56 static Register registers[] = { rax, rbx, rcx, rdx }; 56 static Register registers[] = { rax, rbx, rcx, rdx };
57 descriptor->register_param_count_ = 4; 57 descriptor->register_param_count_ = 4;
58 descriptor->register_params_ = registers; 58 descriptor->register_params_ = registers;
59 descriptor->deoptimization_handler_ = 59 descriptor->deoptimization_handler_ =
60 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; 60 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry;
61 } 61 }
62 62
63 63
64 void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
65 Isolate* isolate,
66 CodeStubInterfaceDescriptor* descriptor) {
67 static Register registers[] = { rbx };
68 descriptor->register_param_count_ = 1;
69 descriptor->register_params_ = registers;
70 descriptor->deoptimization_handler_ = NULL;
71 }
72
73
64 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( 74 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
65 Isolate* isolate, 75 Isolate* isolate,
66 CodeStubInterfaceDescriptor* descriptor) { 76 CodeStubInterfaceDescriptor* descriptor) {
67 static Register registers[] = { rdx, rax }; 77 static Register registers[] = { rdx, rax };
68 descriptor->register_param_count_ = 2; 78 descriptor->register_param_count_ = 2;
69 descriptor->register_params_ = registers; 79 descriptor->register_params_ = registers;
70 descriptor->deoptimization_handler_ = 80 descriptor->deoptimization_handler_ =
71 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); 81 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
72 } 82 }
73 83
(...skipping 3424 matching lines...) Expand 10 before | Expand all | Expand 10 after
3498 // Load the cache state into rcx. 3508 // Load the cache state into rcx.
3499 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); 3509 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset));
3500 3510
3501 // A monomorphic cache hit or an already megamorphic state: invoke the 3511 // A monomorphic cache hit or an already megamorphic state: invoke the
3502 // function without changing the state. 3512 // function without changing the state.
3503 __ cmpq(rcx, rdi); 3513 __ cmpq(rcx, rdi);
3504 __ j(equal, &done); 3514 __ j(equal, &done);
3505 __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate)); 3515 __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate));
3506 __ j(equal, &done); 3516 __ j(equal, &done);
3507 3517
3508 // Special handling of the Array() function, which caches not only the 3518 // If we came here, we need to see if we are the array function.
3509 // monomorphic Array function but the initial ElementsKind with special 3519 // If we didn't have a matching function, and we didn't find the megamorph
3510 // sentinels 3520 // sentinel, then we have in the cell either some other function or an
3511 __ JumpIfNotSmi(rcx, &miss); 3521 // AllocationSite. Do a map check on the object in rcx.
3512 if (FLAG_debug_code) { 3522 Handle<Map> allocation_site_map(
3513 Handle<Object> terminal_kind_sentinel = 3523 masm->isolate()->heap()->allocation_site_map(),
3514 TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(), 3524 masm->isolate());
3515 LAST_FAST_ELEMENTS_KIND); 3525 __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
3516 __ Cmp(rcx, terminal_kind_sentinel); 3526 __ j(not_equal, &miss);
3517 __ Assert(less_equal, "Array function sentinel is not an ElementsKind");
3518 }
3519 3527
3520 // Make sure the function is the Array() function 3528 // Make sure the function is the Array() function
3521 __ LoadArrayFunction(rcx); 3529 __ LoadArrayFunction(rcx);
3522 __ cmpq(rdi, rcx); 3530 __ cmpq(rdi, rcx);
3523 __ j(not_equal, &megamorphic); 3531 __ j(not_equal, &megamorphic);
3524 __ jmp(&done); 3532 __ jmp(&done);
3525 3533
3526 __ bind(&miss); 3534 __ bind(&miss);
3527 3535
3528 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 3536 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
3529 // megamorphic. 3537 // megamorphic.
3530 __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate)); 3538 __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate));
3531 __ j(equal, &initialize); 3539 __ j(equal, &initialize);
3532 // MegamorphicSentinel is an immortal immovable object (undefined) so no 3540 // MegamorphicSentinel is an immortal immovable object (undefined) so no
3533 // write-barrier is needed. 3541 // write-barrier is needed.
3534 __ bind(&megamorphic); 3542 __ bind(&megamorphic);
3535 __ Move(FieldOperand(rbx, Cell::kValueOffset), 3543 __ Move(FieldOperand(rbx, Cell::kValueOffset),
3536 TypeFeedbackCells::MegamorphicSentinel(isolate)); 3544 TypeFeedbackCells::MegamorphicSentinel(isolate));
3537 __ jmp(&done, Label::kNear); 3545 __ jmp(&done);
3538 3546
3539 // An uninitialized cache is patched with the function or sentinel to 3547 // An uninitialized cache is patched with the function or sentinel to
3540 // indicate the ElementsKind if function is the Array constructor. 3548 // indicate the ElementsKind if function is the Array constructor.
3541 __ bind(&initialize); 3549 __ bind(&initialize);
3542 // Make sure the function is the Array() function 3550 // Make sure the function is the Array() function
3543 __ LoadArrayFunction(rcx); 3551 __ LoadArrayFunction(rcx);
3544 __ cmpq(rdi, rcx); 3552 __ cmpq(rdi, rcx);
3545 __ j(not_equal, &not_array_function); 3553 __ j(not_equal, &not_array_function);
3546 3554
3547 // The target function is the Array constructor, install a sentinel value in 3555 // The target function is the Array constructor,
3548 // the constructor's type info cell that will track the initial ElementsKind 3556 // Create an AllocationSite if we don't already have it, store it in the cell
3549 // that should be used for the array when its constructed. 3557 {
3550 Handle<Object> initial_kind_sentinel = 3558 FrameScope scope(masm, StackFrame::INTERNAL);
3551 TypeFeedbackCells::MonomorphicArraySentinel(isolate, 3559
3552 GetInitialFastElementsKind()); 3560 __ push(rax);
3553 __ Move(FieldOperand(rbx, Cell::kValueOffset), 3561 __ push(rdi);
3554 initial_kind_sentinel); 3562 __ push(rbx);
3563
3564 CreateAllocationSiteStub create_stub;
3565 __ CallStub(&create_stub);
3566
3567 __ pop(rbx);
3568 __ pop(rdi);
3569 __ pop(rax);
3570 }
3555 __ jmp(&done); 3571 __ jmp(&done);
3556 3572
3557 __ bind(&not_array_function); 3573 __ bind(&not_array_function);
3558 __ movq(FieldOperand(rbx, Cell::kValueOffset), rdi); 3574 __ movq(FieldOperand(rbx, Cell::kValueOffset), rdi);
3559 // No need for a write barrier here - cells are rescanned. 3575 // No need for a write barrier here - cells are rescanned.
3560 3576
3561 __ bind(&done); 3577 __ bind(&done);
3562 } 3578 }
3563 3579
3564 3580
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after
3714 } 3730 }
3715 3731
3716 3732
3717 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 3733 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
3718 CEntryStub::GenerateAheadOfTime(isolate); 3734 CEntryStub::GenerateAheadOfTime(isolate);
3719 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 3735 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
3720 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 3736 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
3721 // It is important that the store buffer overflow stubs are generated first. 3737 // It is important that the store buffer overflow stubs are generated first.
3722 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); 3738 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
3723 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); 3739 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
3740 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
3724 } 3741 }
3725 3742
3726 3743
3727 void CodeStub::GenerateFPStubs(Isolate* isolate) { 3744 void CodeStub::GenerateFPStubs(Isolate* isolate) {
3728 } 3745 }
3729 3746
3730 3747
3731 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { 3748 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
3732 CEntryStub stub(1, kDontSaveFPRegs); 3749 CEntryStub stub(1, kDontSaveFPRegs);
3733 stub.GetCode(isolate)->set_is_pregenerated(true); 3750 stub.GetCode(isolate)->set_is_pregenerated(true);
(...skipping 2795 matching lines...) Expand 10 before | Expand all | Expand 10 after
6529 __ testb(rdx, Immediate(1)); 6546 __ testb(rdx, Immediate(1));
6530 Label normal_sequence; 6547 Label normal_sequence;
6531 __ j(not_zero, &normal_sequence); 6548 __ j(not_zero, &normal_sequence);
6532 6549
6533 // look at the first argument 6550 // look at the first argument
6534 __ movq(rcx, Operand(rsp, kPointerSize)); 6551 __ movq(rcx, Operand(rsp, kPointerSize));
6535 __ testq(rcx, rcx); 6552 __ testq(rcx, rcx);
6536 __ j(zero, &normal_sequence); 6553 __ j(zero, &normal_sequence);
6537 6554
6538 // We are going to create a holey array, but our kind is non-holey. 6555 // We are going to create a holey array, but our kind is non-holey.
6539 // Fix kind and retry 6556 // Fix kind and retry (only if we have an allocation site in the cell).
6540 __ incl(rdx); 6557 __ incl(rdx);
6541 __ Cmp(rbx, undefined_sentinel); 6558 __ Cmp(rbx, undefined_sentinel);
6542 __ j(equal, &normal_sequence); 6559 __ j(equal, &normal_sequence);
6543 6560 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset));
6544 // The type cell may have gone megamorphic, don't overwrite if so 6561 Handle<Map> allocation_site_map(
6545 __ movq(rcx, FieldOperand(rbx, kPointerSize)); 6562 masm->isolate()->heap()->allocation_site_map(),
6546 __ JumpIfNotSmi(rcx, &normal_sequence); 6563 masm->isolate());
6564 __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
6565 __ j(not_equal, &normal_sequence);
6547 6566
6548 // Save the resulting elements kind in type info 6567 // Save the resulting elements kind in type info
6549 __ Integer32ToSmi(rdx, rdx); 6568 __ Integer32ToSmi(rdx, rdx);
6550 __ movq(FieldOperand(rbx, kPointerSize), rdx); 6569 __ movq(FieldOperand(rcx, AllocationSite::kPayloadOffset), rdx);
6551 __ SmiToInteger32(rdx, rdx); 6570 __ SmiToInteger32(rdx, rdx);
6552 6571
6553 __ bind(&normal_sequence); 6572 __ bind(&normal_sequence);
6554 int last_index = GetSequenceIndexFromFastElementsKind( 6573 int last_index = GetSequenceIndexFromFastElementsKind(
6555 TERMINAL_FAST_ELEMENTS_KIND); 6574 TERMINAL_FAST_ELEMENTS_KIND);
6556 for (int i = 0; i <= last_index; ++i) { 6575 for (int i = 0; i <= last_index; ++i) {
6557 Label next; 6576 Label next;
6558 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 6577 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
6559 __ cmpl(rdx, Immediate(kind)); 6578 __ cmpl(rdx, Immediate(kind));
6560 __ j(not_equal, &next); 6579 __ j(not_equal, &next);
6561 ArraySingleArgumentConstructorStub stub(kind); 6580 ArraySingleArgumentConstructorStub stub(kind);
6562 __ TailCallStub(&stub); 6581 __ TailCallStub(&stub);
6563 __ bind(&next); 6582 __ bind(&next);
6564 } 6583 }
6565 6584
6566 // If we reached this point there is a problem. 6585 // If we reached this point there is a problem.
6567 __ Abort("Unexpected ElementsKind in array constructor"); 6586 __ Abort("Unexpected ElementsKind in array constructor");
6568 } 6587 }
6569 6588
6570 6589
6571 template<class T> 6590 template<class T>
6572 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 6591 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
6573 int to_index = GetSequenceIndexFromFastElementsKind( 6592 int to_index = GetSequenceIndexFromFastElementsKind(
6574 TERMINAL_FAST_ELEMENTS_KIND); 6593 TERMINAL_FAST_ELEMENTS_KIND);
6575 for (int i = 0; i <= to_index; ++i) { 6594 for (int i = 0; i <= to_index; ++i) {
6576 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 6595 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
6577 T stub(kind); 6596 T stub(kind);
6578 stub.GetCode(isolate)->set_is_pregenerated(true); 6597 stub.GetCode(isolate)->set_is_pregenerated(true);
6579 if (AllocationSiteInfo::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { 6598 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
6580 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); 6599 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES);
6581 stub1.GetCode(isolate)->set_is_pregenerated(true); 6600 stub1.GetCode(isolate)->set_is_pregenerated(true);
6582 } 6601 }
6583 } 6602 }
6584 } 6603 }
6585 6604
6586 6605
6587 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { 6606 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
6588 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( 6607 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
6589 isolate); 6608 isolate);
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
6642 __ Cmp(FieldOperand(rbx, 0), cell_map); 6661 __ Cmp(FieldOperand(rbx, 0), cell_map);
6643 __ Assert(equal, "Expected property cell in register rbx"); 6662 __ Assert(equal, "Expected property cell in register rbx");
6644 __ bind(&okay_here); 6663 __ bind(&okay_here);
6645 } 6664 }
6646 6665
6647 Label no_info, switch_ready; 6666 Label no_info, switch_ready;
6648 // Get the elements kind and case on that. 6667 // Get the elements kind and case on that.
6649 __ Cmp(rbx, undefined_sentinel); 6668 __ Cmp(rbx, undefined_sentinel);
6650 __ j(equal, &no_info); 6669 __ j(equal, &no_info);
6651 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset)); 6670 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset));
6652 __ JumpIfNotSmi(rdx, &no_info); 6671
6672 // The type cell may have undefined in its value.
6673 __ Cmp(rdx, undefined_sentinel);
6674 __ j(equal, &no_info);
6675
6676 // We should have an allocation site object
6677 if (FLAG_debug_code) {
6678 __ Cmp(FieldOperand(rdx, 0),
6679 Handle<Map>(masm->isolate()->heap()->allocation_site_map()));
6680 __ Assert(equal, "Expected AllocationSite object in register rdx");
6681 }
6682
6683 __ movq(rdx, FieldOperand(rdx, AllocationSite::kPayloadOffset));
6653 __ SmiToInteger32(rdx, rdx); 6684 __ SmiToInteger32(rdx, rdx);
6654 __ jmp(&switch_ready); 6685 __ jmp(&switch_ready);
6655 __ bind(&no_info); 6686 __ bind(&no_info);
6656 __ movq(rdx, Immediate(GetInitialFastElementsKind())); 6687 __ movq(rdx, Immediate(GetInitialFastElementsKind()));
6657 __ bind(&switch_ready); 6688 __ bind(&switch_ready);
6658 6689
6659 if (argument_count_ == ANY) { 6690 if (argument_count_ == ANY) {
6660 Label not_zero_case, not_one_case; 6691 Label not_zero_case, not_one_case;
6661 __ testq(rax, rax); 6692 __ testq(rax, rax);
6662 __ j(not_zero, &not_zero_case); 6693 __ j(not_zero, &not_zero_case);
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
6768 __ bind(&fast_elements_case); 6799 __ bind(&fast_elements_case);
6769 GenerateCase(masm, FAST_ELEMENTS); 6800 GenerateCase(masm, FAST_ELEMENTS);
6770 } 6801 }
6771 6802
6772 6803
6773 #undef __ 6804 #undef __
6774 6805
6775 } } // namespace v8::internal 6806 } } // namespace v8::internal
6776 6807
6777 #endif // V8_TARGET_ARCH_X64 6808 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/type-info.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698