Index: src/interpreter/interpreter.cc |
diff --git a/src/interpreter/interpreter.cc b/src/interpreter/interpreter.cc |
index 7a38a63e128e025b18a30cc6e6a1cce18835b479..cc14a59d2e57790005aeaf3b4e92a2c36839a01a 100644 |
--- a/src/interpreter/interpreter.cc |
+++ b/src/interpreter/interpreter.cc |
@@ -1868,20 +1868,48 @@ void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) { |
// CreateArrayLiteral <element_idx> <literal_idx> <flags> |
// |
-// Creates an array literal for literal index <literal_idx> with flags <flags> |
-// and constant elements in <element_idx>. |
+// Creates an array literal for literal index <literal_idx> with |
+// CreateArrayLiteral flags <flags> and constant elements in <element_idx>. |
void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) { |
- Node* index = __ BytecodeOperandIdx(0); |
- Node* constant_elements = __ LoadConstantPoolEntry(index); |
Node* literal_index_raw = __ BytecodeOperandIdx(1); |
Node* literal_index = __ SmiTag(literal_index_raw); |
- Node* flags_raw = __ BytecodeOperandFlag(2); |
- Node* flags = __ SmiTag(flags_raw); |
+ Node* bytecode_flags = __ BytecodeOperandFlag(2); |
Node* closure = __ LoadRegister(Register::function_closure()); |
Node* context = __ GetContext(); |
- Node* result = __ CallRuntime(Runtime::kCreateArrayLiteral, context, closure, |
- literal_index, constant_elements, flags); |
- __ SetAccumulator(result); |
+ |
+ Variable result(assembler, MachineRepresentation::kTagged); |
+ Label fast_shallow_clone(assembler), |
+ call_runtime(assembler, Label::kDeferred), dispatch(assembler); |
+ Node* must_create_with_runtime = |
+ __ BitFieldDecode<CreateArrayLiteralFlags::MustUseRuntimeBit>( |
rmcilroy
2016/09/16 08:52:26
Last time I looked this emitted quite a few instru
klaasb
2016/09/19 13:53:58
It still seems to emit a bunch of code (bytecode_f
Michael Starzinger
2016/09/19 15:15:16
This looks dangerous to me. The version that corre
epertoso
2016/09/20 11:54:33
edi gets copied to esi to avoid modifying its valu
klaasb
2016/09/20 18:01:28
Thanks for the explanation Enrico!
|
+ bytecode_flags); |
+ __ BranchIf(must_create_with_runtime, &call_runtime, &fast_shallow_clone); |
+ |
+ __ Bind(&fast_shallow_clone); |
+ { |
+ DCHECK(FLAG_allocation_site_pretenuring); |
+ result.Bind(FastCloneShallowArrayStub::Generate( |
+ assembler, closure, literal_index, context, TRACK_ALLOCATION_SITE, |
+ &call_runtime)); |
+ __ Goto(&dispatch); |
rmcilroy
2016/09/16 08:52:26
do the dispatch inline for both of these branches
klaasb
2016/09/19 13:53:58
Done.
|
+ } |
+ |
+ __ Bind(&call_runtime); |
+ { |
+ STATIC_ASSERT(CreateArrayLiteralFlags::FlagsBits::kShift == 0); |
+ Node* flags_raw = __ Word32And( |
+ bytecode_flags, |
+ __ Int32Constant(CreateArrayLiteralFlags::FlagsBits::kMask)); |
+ Node* flags = __ SmiTag(flags_raw); |
+ Node* index = __ BytecodeOperandIdx(0); |
+ Node* constant_elements = __ LoadConstantPoolEntry(index); |
rmcilroy
2016/09/16 08:52:26
Hmm, it's interesting that the runtime call needs
klaasb
2016/09/19 13:53:58
Acknowledged.
|
+ result.Bind(__ CallRuntime(Runtime::kCreateArrayLiteral, context, closure, |
+ literal_index, constant_elements, flags)); |
+ __ Goto(&dispatch); |
+ } |
+ |
+ __ Bind(&dispatch); |
+ __ SetAccumulator(result.value()); |
__ Dispatch(); |
} |