Index: src/code-stub-assembler.cc |
diff --git a/src/code-stub-assembler.cc b/src/code-stub-assembler.cc |
index 376fd6eb3376346d28a89cd5f1365003e641a887..acee571e6869cb33be02cd71a74ec7d8cd308b32 100644 |
--- a/src/code-stub-assembler.cc |
+++ b/src/code-stub-assembler.cc |
@@ -884,12 +884,13 @@ |
} |
} |
-Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) { |
+compiler::Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) { |
Node* frame_pointer = LoadFramePointer(); |
return Load(rep, frame_pointer, IntPtrConstant(offset)); |
} |
-Node* CodeStubAssembler::LoadFromParentFrame(int offset, MachineType rep) { |
+compiler::Node* CodeStubAssembler::LoadFromParentFrame(int offset, |
+ MachineType rep) { |
Node* frame_pointer = LoadParentFramePointer(); |
return Load(rep, frame_pointer, IntPtrConstant(offset)); |
} |
@@ -2075,12 +2076,11 @@ |
Comment("] CopyFixedArrayElements"); |
} |
-void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string, |
- Node* from_index, Node* to_index, |
- Node* character_count, |
- String::Encoding from_encoding, |
- String::Encoding to_encoding, |
- ParameterMode mode) { |
+void CodeStubAssembler::CopyStringCharacters( |
+ compiler::Node* from_string, compiler::Node* to_string, |
+ compiler::Node* from_index, compiler::Node* to_index, |
+ compiler::Node* character_count, String::Encoding from_encoding, |
+ String::Encoding to_encoding, ParameterMode mode) { |
bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING; |
bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING; |
DCHECK_IMPLIES(to_one_byte, from_one_byte); |
@@ -2239,9 +2239,9 @@ |
return new_elements; |
} |
-void CodeStubAssembler::InitializeAllocationMemento(Node* base_allocation, |
- int base_allocation_size, |
- Node* allocation_site) { |
+void CodeStubAssembler::InitializeAllocationMemento( |
+ compiler::Node* base_allocation, int base_allocation_size, |
+ compiler::Node* allocation_site) { |
StoreObjectFieldNoWriteBarrier( |
base_allocation, AllocationMemento::kMapOffset + base_allocation_size, |
HeapConstant(Handle<Map>(isolate()->heap()->allocation_memento_map()))); |
@@ -3442,7 +3442,7 @@ |
return var_result.value(); |
} |
-Node* CodeStubAssembler::StringFromCodePoint(Node* codepoint, |
+Node* CodeStubAssembler::StringFromCodePoint(compiler::Node* codepoint, |
UnicodeEncoding encoding) { |
Variable var_result(this, MachineRepresentation::kTagged); |
var_result.Bind(EmptyStringConstant()); |
@@ -3520,7 +3520,8 @@ |
return var_result.value(); |
} |
-Node* CodeStubAssembler::NumberToString(Node* context, Node* argument) { |
+Node* CodeStubAssembler::NumberToString(compiler::Node* context, |
+ compiler::Node* argument) { |
Variable result(this, MachineRepresentation::kTagged); |
Label runtime(this, Label::kDeferred); |
Label smi(this); |
@@ -3599,6 +3600,9 @@ |
} |
Node* CodeStubAssembler::ToName(Node* context, Node* value) { |
+ typedef CodeStubAssembler::Label Label; |
+ typedef CodeStubAssembler::Variable Variable; |
+ |
Label end(this); |
Variable var_result(this, MachineRepresentation::kTagged); |
@@ -4422,10 +4426,11 @@ |
} |
} |
-void CodeStubAssembler::TryHasOwnProperty(Node* object, Node* map, |
- Node* instance_type, |
- Node* unique_name, Label* if_found, |
- Label* if_not_found, |
+void CodeStubAssembler::TryHasOwnProperty(compiler::Node* object, |
+ compiler::Node* map, |
+ compiler::Node* instance_type, |
+ compiler::Node* unique_name, |
+ Label* if_found, Label* if_not_found, |
Label* if_bailout) { |
Comment("TryHasOwnProperty"); |
Variable var_meta_storage(this, MachineRepresentation::kTagged); |
@@ -5057,10 +5062,10 @@ |
return var_result.value(); |
} |
-Node* CodeStubAssembler::ElementOffsetFromIndex(Node* index_node, |
- ElementsKind kind, |
- ParameterMode mode, |
- int base_size) { |
+compiler::Node* CodeStubAssembler::ElementOffsetFromIndex(Node* index_node, |
+ ElementsKind kind, |
+ ParameterMode mode, |
+ int base_size) { |
int element_size_shift = ElementsKindToShiftSize(kind); |
int element_size = 1 << element_size_shift; |
int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize; |
@@ -5096,16 +5101,16 @@ |
return IntPtrAddFoldConstants(IntPtrConstant(base_size), shifted_index); |
} |
-Node* CodeStubAssembler::LoadTypeFeedbackVectorForStub() { |
+compiler::Node* CodeStubAssembler::LoadTypeFeedbackVectorForStub() { |
Node* function = |
LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset); |
Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset); |
return LoadObjectField(literals, LiteralsArray::kFeedbackVectorOffset); |
} |
-void CodeStubAssembler::UpdateFeedback(Node* feedback, |
- Node* type_feedback_vector, |
- Node* slot_id) { |
+void CodeStubAssembler::UpdateFeedback(compiler::Node* feedback, |
+ compiler::Node* type_feedback_vector, |
+ compiler::Node* slot_id) { |
// This method is used for binary op and compare feedback. These |
// vector nodes are initialized with a smi 0, so we can simply OR |
// our new feedback in place. |
@@ -5118,7 +5123,7 @@ |
SKIP_WRITE_BARRIER); |
} |
-Node* CodeStubAssembler::LoadReceiverMap(Node* receiver) { |
+compiler::Node* CodeStubAssembler::LoadReceiverMap(compiler::Node* receiver) { |
Variable var_receiver_map(this, MachineRepresentation::kTagged); |
Label load_smi_map(this, Label::kDeferred), load_receiver_map(this), |
if_result(this); |
@@ -5162,7 +5167,7 @@ |
return var_intptr_key.value(); |
} |
-void CodeStubAssembler::ExtendPropertiesBackingStore(Node* object) { |
+void CodeStubAssembler::ExtendPropertiesBackingStore(compiler::Node* object) { |
Node* properties = LoadProperties(object); |
Node* length = LoadFixedArrayBaseLength(properties); |
@@ -5616,11 +5621,9 @@ |
return new_elements_var.value(); |
} |
-void CodeStubAssembler::TransitionElementsKind(Node* object, Node* map, |
- ElementsKind from_kind, |
- ElementsKind to_kind, |
- bool is_jsarray, |
- Label* bailout) { |
+void CodeStubAssembler::TransitionElementsKind( |
+ compiler::Node* object, compiler::Node* map, ElementsKind from_kind, |
+ ElementsKind to_kind, bool is_jsarray, Label* bailout) { |
DCHECK(!IsFastHoleyElementsKind(from_kind) || |
IsFastHoleyElementsKind(to_kind)); |
if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) { |
@@ -5883,10 +5886,11 @@ |
} |
void CodeStubAssembler::BuildFastFixedArrayForEach( |
- Node* fixed_array, ElementsKind kind, Node* first_element_inclusive, |
- Node* last_element_exclusive, |
- std::function<void(CodeStubAssembler* assembler, Node* fixed_array, |
- Node* offset)> |
+ compiler::Node* fixed_array, ElementsKind kind, |
+ compiler::Node* first_element_inclusive, |
+ compiler::Node* last_element_exclusive, |
+ std::function<void(CodeStubAssembler* assembler, |
+ compiler::Node* fixed_array, compiler::Node* offset)> |
body, |
ParameterMode mode, ForEachDirection direction) { |
STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); |
@@ -5939,8 +5943,10 @@ |
} |
void CodeStubAssembler::BranchIfNumericRelationalComparison( |
- RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true, |
- Label* if_false) { |
+ RelationalComparisonMode mode, compiler::Node* lhs, compiler::Node* rhs, |
+ Label* if_true, Label* if_false) { |
+ typedef compiler::Node Node; |
+ |
Label end(this); |
Variable result(this, MachineRepresentation::kTagged); |
@@ -6042,16 +6048,19 @@ |
} |
} |
-void CodeStubAssembler::GotoUnlessNumberLessThan(Node* lhs, Node* rhs, |
+void CodeStubAssembler::GotoUnlessNumberLessThan(compiler::Node* lhs, |
+ compiler::Node* rhs, |
Label* if_false) { |
Label if_true(this); |
BranchIfNumericRelationalComparison(kLessThan, lhs, rhs, &if_true, if_false); |
Bind(&if_true); |
} |
-Node* CodeStubAssembler::RelationalComparison(RelationalComparisonMode mode, |
- Node* lhs, Node* rhs, |
- Node* context) { |
+compiler::Node* CodeStubAssembler::RelationalComparison( |
+ RelationalComparisonMode mode, compiler::Node* lhs, compiler::Node* rhs, |
+ compiler::Node* context) { |
+ typedef compiler::Node Node; |
+ |
Label return_true(this), return_false(this), end(this); |
Variable result(this, MachineRepresentation::kTagged); |
@@ -6372,7 +6381,7 @@ |
namespace { |
-void GenerateEqual_Same(CodeStubAssembler* assembler, Node* value, |
+void GenerateEqual_Same(CodeStubAssembler* assembler, compiler::Node* value, |
CodeStubAssembler::Label* if_equal, |
CodeStubAssembler::Label* if_notequal) { |
// In case of abstract or strict equality checks, we need additional checks |
@@ -6382,6 +6391,7 @@ |
// seems to be what is tested in the current SIMD.js testsuite. |
typedef CodeStubAssembler::Label Label; |
+ typedef compiler::Node Node; |
// Check if {value} is a Smi or a HeapObject. |
Label if_valueissmi(assembler), if_valueisnotsmi(assembler); |
@@ -6416,9 +6426,9 @@ |
} |
void GenerateEqual_Simd128Value_HeapObject( |
- CodeStubAssembler* assembler, Node* lhs, Node* lhs_map, Node* rhs, |
- Node* rhs_map, CodeStubAssembler::Label* if_equal, |
- CodeStubAssembler::Label* if_notequal) { |
+ CodeStubAssembler* assembler, compiler::Node* lhs, compiler::Node* lhs_map, |
+ compiler::Node* rhs, compiler::Node* rhs_map, |
+ CodeStubAssembler::Label* if_equal, CodeStubAssembler::Label* if_notequal) { |
assembler->BranchIfSimd128Equal(lhs, lhs_map, rhs, rhs_map, if_equal, |
if_notequal); |
} |
@@ -6426,12 +6436,14 @@ |
} // namespace |
// ES6 section 7.2.12 Abstract Equality Comparison |
-Node* CodeStubAssembler::Equal(ResultMode mode, Node* lhs, Node* rhs, |
- Node* context) { |
+compiler::Node* CodeStubAssembler::Equal(ResultMode mode, compiler::Node* lhs, |
+ compiler::Node* rhs, |
+ compiler::Node* context) { |
// This is a slightly optimized version of Object::Equals represented as |
// scheduled TurboFan graph utilizing the CodeStubAssembler. Whenever you |
// change something functionality wise in here, remember to update the |
// Object::Equals method as well. |
+ typedef compiler::Node Node; |
Label if_equal(this), if_notequal(this), |
do_rhsstringtonumber(this, Label::kDeferred), end(this); |
@@ -6925,8 +6937,10 @@ |
return result.value(); |
} |
-Node* CodeStubAssembler::StrictEqual(ResultMode mode, Node* lhs, Node* rhs, |
- Node* context) { |
+compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode, |
+ compiler::Node* lhs, |
+ compiler::Node* rhs, |
+ compiler::Node* context) { |
// Here's pseudo-code for the algorithm below in case of kDontNegateResult |
// mode; for kNegateResult mode we properly negate the result. |
// |
@@ -6975,6 +6989,8 @@ |
// } |
// } |
+ typedef compiler::Node Node; |
+ |
Label if_equal(this), if_notequal(this), end(this); |
Variable result(this, MachineRepresentation::kTagged); |
@@ -7176,7 +7192,9 @@ |
// ECMA#sec-samevalue |
// This algorithm differs from the Strict Equality Comparison Algorithm in its |
// treatment of signed zeroes and NaNs. |
-Node* CodeStubAssembler::SameValue(Node* lhs, Node* rhs, Node* context) { |
+compiler::Node* CodeStubAssembler::SameValue(compiler::Node* lhs, |
+ compiler::Node* rhs, |
+ compiler::Node* context) { |
Variable var_result(this, MachineType::PointerRepresentation()); |
Label strict_equal(this), out(this); |
@@ -7260,7 +7278,9 @@ |
return var_result.value(); |
} |
-Node* CodeStubAssembler::ForInFilter(Node* key, Node* object, Node* context) { |
+compiler::Node* CodeStubAssembler::ForInFilter(compiler::Node* key, |
+ compiler::Node* object, |
+ compiler::Node* context) { |
Label return_undefined(this, Label::kDeferred), return_to_name(this), |
end(this); |
@@ -7288,9 +7308,13 @@ |
return var_result.value(); |
} |
-Node* CodeStubAssembler::HasProperty( |
- Node* object, Node* key, Node* context, |
+compiler::Node* CodeStubAssembler::HasProperty( |
+ compiler::Node* object, compiler::Node* key, compiler::Node* context, |
Runtime::FunctionId fallback_runtime_function_id) { |
+ typedef compiler::Node Node; |
+ typedef CodeStubAssembler::Label Label; |
+ typedef CodeStubAssembler::Variable Variable; |
+ |
Label call_runtime(this, Label::kDeferred), return_true(this), |
return_false(this), end(this); |
@@ -7338,7 +7362,8 @@ |
return result.value(); |
} |
-Node* CodeStubAssembler::Typeof(Node* value, Node* context) { |
+compiler::Node* CodeStubAssembler::Typeof(compiler::Node* value, |
+ compiler::Node* context) { |
Variable result_var(this, MachineRepresentation::kTagged); |
Label return_number(this, Label::kDeferred), if_oddball(this), |
@@ -7431,8 +7456,9 @@ |
return result_var.value(); |
} |
-Node* CodeStubAssembler::InstanceOf(Node* object, Node* callable, |
- Node* context) { |
+compiler::Node* CodeStubAssembler::InstanceOf(compiler::Node* object, |
+ compiler::Node* callable, |
+ compiler::Node* context) { |
Label return_runtime(this, Label::kDeferred), end(this); |
Variable result(this, MachineRepresentation::kTagged); |
@@ -7462,7 +7488,7 @@ |
return result.value(); |
} |
-Node* CodeStubAssembler::NumberInc(Node* value) { |
+compiler::Node* CodeStubAssembler::NumberInc(compiler::Node* value) { |
Variable var_result(this, MachineRepresentation::kTagged), |
var_finc_value(this, MachineRepresentation::kFloat64); |
Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this); |
@@ -7514,9 +7540,9 @@ |
return var_result.value(); |
} |
-Node* CodeStubAssembler::CreateArrayIterator(Node* array, Node* array_map, |
- Node* array_type, Node* context, |
- IterationKind mode) { |
+compiler::Node* CodeStubAssembler::CreateArrayIterator( |
+ compiler::Node* array, compiler::Node* array_map, |
+ compiler::Node* array_type, compiler::Node* context, IterationKind mode) { |
int kBaseMapIndex = 0; |
switch (mode) { |
case IterationKind::kKeys: |
@@ -7702,8 +7728,8 @@ |
return var_result.value(); |
} |
-Node* CodeStubAssembler::AllocateJSArrayIterator(Node* array, Node* array_map, |
- Node* map) { |
+compiler::Node* CodeStubAssembler::AllocateJSArrayIterator( |
+ compiler::Node* array, compiler::Node* array_map, compiler::Node* map) { |
Node* iterator = Allocate(JSArrayIterator::kSize); |
StoreMapNoWriteBarrier(iterator, map); |
StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOffset, |
@@ -7719,7 +7745,7 @@ |
return iterator; |
} |
-Node* CodeStubAssembler::IsDetachedBuffer(Node* buffer) { |
+compiler::Node* CodeStubAssembler::IsDetachedBuffer(compiler::Node* buffer) { |
CSA_ASSERT(this, HasInstanceType(buffer, JS_ARRAY_BUFFER_TYPE)); |
Node* buffer_bit_field = LoadObjectField( |
@@ -7730,13 +7756,14 @@ |
Int32Constant(0)); |
} |
-CodeStubArguments::CodeStubArguments(CodeStubAssembler* assembler, Node* argc, |
+CodeStubArguments::CodeStubArguments(CodeStubAssembler* assembler, |
+ compiler::Node* argc, |
CodeStubAssembler::ParameterMode mode) |
: assembler_(assembler), |
argc_(argc), |
arguments_(nullptr), |
fp_(assembler->LoadFramePointer()) { |
- Node* offset = assembler->ElementOffsetFromIndex( |
+ compiler::Node* offset = assembler->ElementOffsetFromIndex( |
argc_, FAST_ELEMENTS, mode, |
(StandardFrameConstants::kFixedSlotCountAboveFp - 1) * kPointerSize); |
arguments_ = assembler_->IntPtrAddFoldConstants(fp_, offset); |
@@ -7747,13 +7774,14 @@ |
} |
} |
-Node* CodeStubArguments::GetReceiver() { |
+compiler::Node* CodeStubArguments::GetReceiver() { |
return assembler_->Load(MachineType::AnyTagged(), arguments_, |
assembler_->IntPtrConstant(kPointerSize)); |
} |
-Node* CodeStubArguments::AtIndex(Node* index, |
- CodeStubAssembler::ParameterMode mode) { |
+compiler::Node* CodeStubArguments::AtIndex( |
+ compiler::Node* index, CodeStubAssembler::ParameterMode mode) { |
+ typedef compiler::Node Node; |
Node* negated_index = assembler_->IntPtrSubFoldConstants( |
assembler_->IntPtrOrSmiConstant(0, mode), index); |
Node* offset = |
@@ -7761,13 +7789,13 @@ |
return assembler_->Load(MachineType::AnyTagged(), arguments_, offset); |
} |
-Node* CodeStubArguments::AtIndex(int index) { |
+compiler::Node* CodeStubArguments::AtIndex(int index) { |
return AtIndex(assembler_->IntPtrConstant(index)); |
} |
void CodeStubArguments::ForEach(const CodeStubAssembler::VariableList& vars, |
CodeStubArguments::ForEachBodyFunction body, |
- Node* first, Node* last, |
+ compiler::Node* first, compiler::Node* last, |
CodeStubAssembler::ParameterMode mode) { |
assembler_->Comment("CodeStubArguments::ForEach"); |
DCHECK_IMPLIES(first == nullptr || last == nullptr, |
@@ -7778,33 +7806,35 @@ |
if (last == nullptr) { |
last = argc_; |
} |
- Node* start = assembler_->IntPtrSubFoldConstants( |
+ compiler::Node* start = assembler_->IntPtrSubFoldConstants( |
arguments_, |
assembler_->ElementOffsetFromIndex(first, FAST_ELEMENTS, mode)); |
- Node* end = assembler_->IntPtrSubFoldConstants( |
+ compiler::Node* end = assembler_->IntPtrSubFoldConstants( |
arguments_, |
assembler_->ElementOffsetFromIndex(last, FAST_ELEMENTS, mode)); |
assembler_->BuildFastLoop( |
vars, MachineType::PointerRepresentation(), start, end, |
- [body](CodeStubAssembler* assembler, Node* current) { |
+ [body](CodeStubAssembler* assembler, compiler::Node* current) { |
Node* arg = assembler->Load(MachineType::AnyTagged(), current); |
body(assembler, arg); |
}, |
-kPointerSize, CodeStubAssembler::IndexAdvanceMode::kPost); |
} |
-void CodeStubArguments::PopAndReturn(Node* value) { |
+void CodeStubArguments::PopAndReturn(compiler::Node* value) { |
assembler_->PopAndReturn( |
assembler_->IntPtrAddFoldConstants(argc_, assembler_->IntPtrConstant(1)), |
value); |
} |
-Node* CodeStubAssembler::IsFastElementsKind(Node* elements_kind) { |
+compiler::Node* CodeStubAssembler::IsFastElementsKind( |
+ compiler::Node* elements_kind) { |
return Uint32LessThanOrEqual(elements_kind, |
Int32Constant(LAST_FAST_ELEMENTS_KIND)); |
} |
-Node* CodeStubAssembler::IsHoleyFastElementsKind(Node* elements_kind) { |
+compiler::Node* CodeStubAssembler::IsHoleyFastElementsKind( |
+ compiler::Node* elements_kind) { |
CSA_ASSERT(this, IsFastElementsKind(elements_kind)); |
STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == (FAST_SMI_ELEMENTS | 1)); |