OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stubs.h" | 5 #include "src/code-stubs.h" |
6 | 6 |
7 #include <sstream> | 7 #include <sstream> |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
(...skipping 4102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4113 | 4113 |
4114 | 4114 |
4115 void FastCloneShallowArrayStub::InitializeDescriptor( | 4115 void FastCloneShallowArrayStub::InitializeDescriptor( |
4116 CodeStubDescriptor* descriptor) { | 4116 CodeStubDescriptor* descriptor) { |
4117 FastCloneShallowArrayDescriptor call_descriptor(isolate()); | 4117 FastCloneShallowArrayDescriptor call_descriptor(isolate()); |
4118 descriptor->Initialize( | 4118 descriptor->Initialize( |
4119 Runtime::FunctionForId(Runtime::kCreateArrayLiteralStubBailout)->entry); | 4119 Runtime::FunctionForId(Runtime::kCreateArrayLiteralStubBailout)->entry); |
4120 } | 4120 } |
4121 | 4121 |
4122 | 4122 |
4123 void CreateAllocationSiteStub::InitializeDescriptor(CodeStubDescriptor* d) {} | |
4124 | |
4125 | |
4126 void CreateWeakCellStub::InitializeDescriptor(CodeStubDescriptor* d) {} | 4123 void CreateWeakCellStub::InitializeDescriptor(CodeStubDescriptor* d) {} |
4127 | 4124 |
4128 | 4125 |
4129 void RegExpConstructResultStub::InitializeDescriptor( | 4126 void RegExpConstructResultStub::InitializeDescriptor( |
4130 CodeStubDescriptor* descriptor) { | 4127 CodeStubDescriptor* descriptor) { |
4131 descriptor->Initialize( | 4128 descriptor->Initialize( |
4132 Runtime::FunctionForId(Runtime::kRegExpConstructResult)->entry); | 4129 Runtime::FunctionForId(Runtime::kRegExpConstructResult)->entry); |
4133 } | 4130 } |
4134 | 4131 |
4135 | 4132 |
(...skipping 563 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4699 | 4696 |
4700 | 4697 |
4701 void ProfileEntryHookStub::EntryHookTrampoline(intptr_t function, | 4698 void ProfileEntryHookStub::EntryHookTrampoline(intptr_t function, |
4702 intptr_t stack_pointer, | 4699 intptr_t stack_pointer, |
4703 Isolate* isolate) { | 4700 Isolate* isolate) { |
4704 FunctionEntryHook entry_hook = isolate->function_entry_hook(); | 4701 FunctionEntryHook entry_hook = isolate->function_entry_hook(); |
4705 DCHECK(entry_hook != NULL); | 4702 DCHECK(entry_hook != NULL); |
4706 entry_hook(function, stack_pointer); | 4703 entry_hook(function, stack_pointer); |
4707 } | 4704 } |
4708 | 4705 |
| 4706 void CreateAllocationSiteStub::GenerateAssembly( |
| 4707 CodeStubAssembler* assembler) const { |
| 4708 typedef compiler::Node Node; |
| 4709 Node* size = assembler->IntPtrConstant(AllocationSite::kSize); |
| 4710 Node* site = assembler->Allocate(size, compiler::CodeAssembler::kPretenured); |
| 4711 |
| 4712 // Store the map |
| 4713 Node* map = |
| 4714 assembler->HeapConstant(isolate()->factory()->allocation_site_map()); |
| 4715 assembler->StoreMapNoWriteBarrier(site, map); |
| 4716 |
| 4717 Node* kind = |
| 4718 assembler->SmiConstant(Smi::FromInt(GetInitialFastElementsKind())); |
| 4719 assembler->StoreObjectFieldNoWriteBarrier( |
| 4720 site, AllocationSite::kTransitionInfoOffset, kind); |
| 4721 |
| 4722 // Unlike literals, constructed arrays don't have nested sites |
| 4723 Node* zero = assembler->IntPtrConstant(0); |
| 4724 assembler->StoreObjectFieldNoWriteBarrier( |
| 4725 site, AllocationSite::kNestedSiteOffset, zero); |
| 4726 |
| 4727 // Pretenuring calculation field. |
| 4728 assembler->StoreObjectFieldNoWriteBarrier( |
| 4729 site, AllocationSite::kPretenureDataOffset, zero); |
| 4730 |
| 4731 // Pretenuring memento creation count field. |
| 4732 assembler->StoreObjectFieldNoWriteBarrier( |
| 4733 site, AllocationSite::kPretenureCreateCountOffset, zero); |
| 4734 |
| 4735 // Store an empty fixed array for the code dependency. |
| 4736 Node* empty_fixed_array = |
| 4737 assembler->HeapConstant(isolate()->factory()->empty_fixed_array()); |
| 4738 assembler->StoreObjectFieldNoWriteBarrier( |
| 4739 site, AllocationSite::kDependentCodeOffset, empty_fixed_array); |
| 4740 |
| 4741 // Link the object to the allocation site list |
| 4742 Node* site_list = assembler->ExternalConstant( |
| 4743 ExternalReference::allocation_sites_list_address(isolate())); |
| 4744 Node* next_site = assembler->LoadBufferObject(site_list, 0); |
| 4745 |
| 4746 // TODO(mvstanton): This is a store to a weak pointer, which we may want to |
| 4747 // mark as such in order to skip the write barrier, once we have a unified |
| 4748 // system for weakness. For now we decided to keep it like this because having |
| 4749 // an initial write barrier backed store makes this pointer strong until the |
| 4750 // next GC, and allocation sites are designed to survive several GCs anyway. |
| 4751 assembler->StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site); |
| 4752 assembler->StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list, |
| 4753 site); |
| 4754 |
| 4755 Node* feedback_vector = assembler->Parameter(Descriptor::kVector); |
| 4756 Node* slot = assembler->Parameter(Descriptor::kSlot); |
| 4757 |
| 4758 assembler->StoreFixedArrayElement(feedback_vector, slot, site, |
| 4759 UPDATE_WRITE_BARRIER, |
| 4760 CodeStubAssembler::SMI_PARAMETERS); |
| 4761 |
| 4762 assembler->Return(site); |
| 4763 } |
| 4764 |
4709 void ArrayNoArgumentConstructorStub::GenerateAssembly( | 4765 void ArrayNoArgumentConstructorStub::GenerateAssembly( |
4710 CodeStubAssembler* assembler) const { | 4766 CodeStubAssembler* assembler) const { |
4711 typedef compiler::Node Node; | 4767 typedef compiler::Node Node; |
4712 Node* native_context = assembler->LoadObjectField( | 4768 Node* native_context = assembler->LoadObjectField( |
4713 assembler->Parameter(Descriptor::kFunction), JSFunction::kContextOffset); | 4769 assembler->Parameter(Descriptor::kFunction), JSFunction::kContextOffset); |
4714 bool track_allocation_site = | 4770 bool track_allocation_site = |
4715 AllocationSite::GetMode(elements_kind()) == TRACK_ALLOCATION_SITE && | 4771 AllocationSite::GetMode(elements_kind()) == TRACK_ALLOCATION_SITE && |
4716 override_mode() != DISABLE_ALLOCATION_SITES; | 4772 override_mode() != DISABLE_ALLOCATION_SITES; |
4717 Node* allocation_site = | 4773 Node* allocation_site = |
4718 track_allocation_site ? assembler->Parameter(Descriptor::kAllocationSite) | 4774 track_allocation_site ? assembler->Parameter(Descriptor::kAllocationSite) |
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4867 if (type->Is(Type::UntaggedPointer())) { | 4923 if (type->Is(Type::UntaggedPointer())) { |
4868 return Representation::External(); | 4924 return Representation::External(); |
4869 } | 4925 } |
4870 | 4926 |
4871 DCHECK(!type->Is(Type::Untagged())); | 4927 DCHECK(!type->Is(Type::Untagged())); |
4872 return Representation::Tagged(); | 4928 return Representation::Tagged(); |
4873 } | 4929 } |
4874 | 4930 |
4875 } // namespace internal | 4931 } // namespace internal |
4876 } // namespace v8 | 4932 } // namespace v8 |
OLD | NEW |