Index: src/code-stubs.cc |
diff --git a/src/code-stubs.cc b/src/code-stubs.cc |
index 7ac1eb30ac0474f8985ffa02ef4102b254c68e12..ad7bcff897ec78783a3c3067718acb99767f768f 100644 |
--- a/src/code-stubs.cc |
+++ b/src/code-stubs.cc |
@@ -3016,6 +3016,62 @@ void LoadIndexedInterceptorStub::GenerateAssembly( |
slot, vector); |
} |
+void FastCloneShallowObjectStub::GenerateAssembly( |
+ compiler::CodeStubAssembler* assembler) const { |
+ typedef compiler::CodeStubAssembler::Label Label; |
+ typedef compiler::Node Node; |
+ Label call_runtime(assembler); |
+ Node* closure = assembler->Parameter(0); |
+ Node* literals_index = assembler->Parameter(1); |
+ |
+ Node* undefined = assembler->UndefinedConstant(); |
+ Node* literals_array = |
+ assembler->LoadObjectField(closure, JSFunction::kLiteralsOffset); |
+ Node* allocation_site = assembler->LoadFixedArrayElementSmiIndex( |
+ literals_array, literals_index, |
+ LiteralsArray::kFirstLiteralIndex * kPointerSize); |
+ Label if_isnotundefined(assembler); |
+ assembler->Branch(assembler->WordEqual(allocation_site, undefined), |
+ &call_runtime, &if_isnotundefined); |
+ assembler->Bind(&if_isnotundefined); |
+ |
+ Node* boilerplate = assembler->LoadObjectField( |
+ allocation_site, AllocationSite::kTransitionInfoOffset); |
+ |
+ int length = this->length(); |
+ if (length == 0) { |
+ length = JSObject::kInitialGlobalObjectUnusedPropertiesCount; |
+ } |
+ int size = JSObject::kHeaderSize + length * kPointerSize; |
+ |
+ Node* boilerplate_map = assembler->LoadMap(boilerplate); |
+ Node* instance_size = assembler->LoadMapInstanceSize(boilerplate_map); |
+ Label if_sizeiscorrect(assembler); |
+ Node* size_in_words = assembler->Int32Constant(size >> kPointerSizeLog2); |
+ assembler->Branch(assembler->Word32Equal(instance_size, size_in_words), |
+ &if_sizeiscorrect, &call_runtime); |
+ assembler->Bind(&if_sizeiscorrect); |
+ |
+ Node* copy = assembler->Allocate(size); |
Toon Verwaest
2016/03/30 15:11:01
This trashes the context register if it calls the
Benedikt Meurer
2016/03/30 17:12:35
This is fine (and true for essentially all TurboFa
|
+ |
+ for (int i = 0; i < size; i += kPointerSize) { |
+ // The Allocate above guarantees that the copy lies in new space. This |
+ // allows us to skip write barriers. This is necessary since we may also be |
+ // copying unboxed doubles. |
+ Node* field = assembler->LoadObjectField(boilerplate, i); |
Toon Verwaest
2016/03/30 15:11:01
I guess I should use a different load instruction
Benedikt Meurer
2016/03/30 17:12:35
Yes, something like LoadRawPointer/StoreRawPointer
|
+ assembler->StoreObjectFieldNoWriteBarrier(copy, i, field); |
Toon Verwaest
2016/03/30 15:11:01
And probably the same for storing it...
|
+ } |
+ // TODO(verwaest): Allocate and fill in double boxes. |
+ assembler->Return(copy); |
+ |
+ assembler->Bind(&call_runtime); |
+ Node* constant_properties = assembler->Parameter(2); |
+ Node* flags = assembler->Parameter(3); |
+ Node* context = assembler->Parameter(4); |
+ assembler->TailCallRuntime(Runtime::kCreateObjectLiteral, context, closure, |
+ literals_index, constant_properties, flags); |
+} |
+ |
template<class StateType> |
void HydrogenCodeStub::TraceTransition(StateType from, StateType to) { |
// Note: Although a no-op transition is semantically OK, it is hinting at a |
@@ -3162,14 +3218,6 @@ void FastCloneShallowArrayStub::InitializeDescriptor( |
} |
-void FastCloneShallowObjectStub::InitializeDescriptor( |
- CodeStubDescriptor* descriptor) { |
- FastCloneShallowObjectDescriptor call_descriptor(isolate()); |
- descriptor->Initialize( |
- Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry); |
-} |
- |
- |
void CreateAllocationSiteStub::InitializeDescriptor(CodeStubDescriptor* d) {} |