| Index: src/hydrogen.cc
|
| diff --git a/src/hydrogen.cc b/src/hydrogen.cc
|
| index 08b05509d5c6d08eaf189435305fba542252e585..5fd55f833cf1965c3ce87fef5c697b5561c1d9f0 100644
|
| --- a/src/hydrogen.cc
|
| +++ b/src/hydrogen.cc
|
| @@ -1,4 +1,4 @@
|
| -// Copyright 2012 the V8 project authors. All rights reserved.
|
| +// Copyright 2013 the V8 project authors. All rights reserved.
|
| // Redistribution and use in source and binary forms, with or without
|
| // modification, are permitted provided that the following conditions are
|
| // met:
|
| @@ -26,7 +26,6 @@
|
| // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
| #include "hydrogen.h"
|
| -#include "hydrogen-gvn.h"
|
|
|
| #include <algorithm>
|
|
|
| @@ -34,7 +33,19 @@
|
| #include "codegen.h"
|
| #include "full-codegen.h"
|
| #include "hashmap.h"
|
| +#include "hydrogen-bce.h"
|
| +#include "hydrogen-dce.h"
|
| #include "hydrogen-environment-liveness.h"
|
| +#include "hydrogen-escape-analysis.h"
|
| +#include "hydrogen-infer-representation.h"
|
| +#include "hydrogen-infer-types.h"
|
| +#include "hydrogen-gvn.h"
|
| +#include "hydrogen-osr.h"
|
| +#include "hydrogen-range-analysis.h"
|
| +#include "hydrogen-redundant-phi.h"
|
| +#include "hydrogen-representation-changes.h"
|
| +#include "hydrogen-sce.h"
|
| +#include "hydrogen-uint32-analysis.h"
|
| #include "lithium-allocator.h"
|
| #include "parser.h"
|
| #include "scopeinfo.h"
|
| @@ -528,7 +539,7 @@ class ReachabilityAnalyzer BASE_EMBEDDED {
|
|
|
|
|
| void HGraph::Verify(bool do_full_verify) const {
|
| - Heap::RelocationLock(isolate()->heap());
|
| + Heap::RelocationLock relocation_lock(isolate()->heap());
|
| AllowHandleDereference allow_deref;
|
| AllowDeferredHandleDereference allow_deferred_deref;
|
| for (int i = 0; i < blocks_.length(); i++) {
|
| @@ -646,6 +657,7 @@ HConstant* HGraph::GetConstant##Name() { \
|
| htype, \
|
| false, \
|
| true, \
|
| + false, \
|
| boolean_value); \
|
| constant->InsertAfter(GetConstantUndefined()); \
|
| constant_##name##_.set(constant); \
|
| @@ -668,6 +680,19 @@ HConstant* HGraph::GetInvalidContext() {
|
| }
|
|
|
|
|
| +bool HGraph::IsStandardConstant(HConstant* constant) {
|
| + if (constant == GetConstantUndefined()) return true;
|
| + if (constant == GetConstant0()) return true;
|
| + if (constant == GetConstant1()) return true;
|
| + if (constant == GetConstantMinus1()) return true;
|
| + if (constant == GetConstantTrue()) return true;
|
| + if (constant == GetConstantFalse()) return true;
|
| + if (constant == GetConstantHole()) return true;
|
| + if (constant == GetConstantNull()) return true;
|
| + return false;
|
| +}
|
| +
|
| +
|
| HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder, int position)
|
| : builder_(builder),
|
| position_(position),
|
| @@ -708,26 +733,6 @@ HGraphBuilder::IfBuilder::IfBuilder(
|
| }
|
|
|
|
|
| -HInstruction* HGraphBuilder::IfBuilder::IfCompare(
|
| - HValue* left,
|
| - HValue* right,
|
| - Token::Value token) {
|
| - HCompareIDAndBranch* compare =
|
| - new(zone()) HCompareIDAndBranch(left, right, token);
|
| - AddCompare(compare);
|
| - return compare;
|
| -}
|
| -
|
| -
|
| -HInstruction* HGraphBuilder::IfBuilder::IfCompareMap(HValue* left,
|
| - Handle<Map> map) {
|
| - HCompareMap* compare =
|
| - new(zone()) HCompareMap(left, map, first_true_block_, first_false_block_);
|
| - AddCompare(compare);
|
| - return compare;
|
| -}
|
| -
|
| -
|
| void HGraphBuilder::IfBuilder::AddCompare(HControlInstruction* compare) {
|
| if (split_edge_merge_block_ != NULL) {
|
| HEnvironment* env = first_false_block_->last_environment();
|
| @@ -808,8 +813,8 @@ void HGraphBuilder::IfBuilder::Then() {
|
| ToBooleanStub::Types boolean_type = ToBooleanStub::Types();
|
| boolean_type.Add(ToBooleanStub::BOOLEAN);
|
| HBranch* branch =
|
| - new(zone()) HBranch(constant_false, first_true_block_,
|
| - first_false_block_, boolean_type);
|
| + new(zone()) HBranch(constant_false, boolean_type, first_true_block_,
|
| + first_false_block_);
|
| builder_->current_block()->Finish(branch);
|
| }
|
| builder_->set_current_block(first_true_block_);
|
| @@ -913,8 +918,8 @@ HValue* HGraphBuilder::LoopBuilder::BeginBody(
|
| body_env->Pop();
|
|
|
| builder_->set_current_block(header_block_);
|
| - HCompareIDAndBranch* compare =
|
| - new(zone()) HCompareIDAndBranch(phi_, terminating, token);
|
| + HCompareNumericAndBranch* compare =
|
| + new(zone()) HCompareNumericAndBranch(phi_, terminating, token);
|
| compare->SetSuccessorAt(0, body_block_);
|
| compare->SetSuccessorAt(1, exit_block_);
|
| builder_->current_block()->Finish(compare);
|
| @@ -991,18 +996,10 @@ void HGraphBuilder::AddSimulate(BailoutId id,
|
| }
|
|
|
|
|
| -HBoundsCheck* HGraphBuilder::AddBoundsCheck(HValue* index, HValue* length) {
|
| - HBoundsCheck* result = new(graph()->zone()) HBoundsCheck(index, length);
|
| - AddInstruction(result);
|
| - return result;
|
| -}
|
| -
|
| -
|
| HReturn* HGraphBuilder::AddReturn(HValue* value) {
|
| HValue* context = environment()->LookupContext();
|
| int num_parameters = graph()->info()->num_parameters();
|
| - HValue* params = AddInstruction(new(graph()->zone())
|
| - HConstant(num_parameters));
|
| + HValue* params = Add<HConstant>(num_parameters);
|
| HReturn* return_instruction = new(graph()->zone())
|
| HReturn(value, context, params);
|
| current_block()->FinishExit(return_instruction);
|
| @@ -1010,6 +1007,17 @@ HReturn* HGraphBuilder::AddReturn(HValue* value) {
|
| }
|
|
|
|
|
| +void HGraphBuilder::AddSoftDeoptimize(SoftDeoptimizeMode mode) {
|
| + isolate()->counters()->soft_deopts_requested()->Increment();
|
| + if (FLAG_always_opt && mode == CAN_OMIT_SOFT_DEOPT) return;
|
| + if (current_block()->IsDeoptimizing()) return;
|
| + Add<HSoftDeoptimize>();
|
| + isolate()->counters()->soft_deopts_inserted()->Increment();
|
| + current_block()->MarkAsDeoptimizing();
|
| + graph()->set_has_soft_deoptimize(true);
|
| +}
|
| +
|
| +
|
| HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
|
| HBasicBlock* b = graph()->CreateBasicBlock();
|
| b->SetInitialEnvironment(env);
|
| @@ -1028,9 +1036,7 @@ HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
|
|
|
| HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
|
| if (obj->type().IsHeapObject()) return obj;
|
| - HCheckHeapObject* check = new(zone()) HCheckHeapObject(obj);
|
| - AddInstruction(check);
|
| - return check;
|
| + return Add<HCheckHeapObject>(obj);
|
| }
|
|
|
|
|
| @@ -1054,7 +1060,7 @@ HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
|
| ASSERT(val != NULL);
|
| switch (elements_kind) {
|
| case EXTERNAL_PIXEL_ELEMENTS: {
|
| - val = AddInstruction(new(zone) HClampToUint8(val));
|
| + val = Add<HClampToUint8>(val);
|
| break;
|
| }
|
| case EXTERNAL_BYTE_ELEMENTS:
|
| @@ -1138,15 +1144,15 @@ HValue* HGraphBuilder::BuildCheckForCapacityGrow(HValue* object,
|
| Zone* zone = this->zone();
|
| IfBuilder length_checker(this);
|
|
|
| - length_checker.IfCompare(length, key, Token::EQ);
|
| + length_checker.If<HCompareNumericAndBranch>(length, key, Token::EQ);
|
| length_checker.Then();
|
|
|
| - HValue* current_capacity =
|
| - AddInstruction(new(zone) HFixedArrayBaseLength(elements));
|
| + HValue* current_capacity = AddLoadFixedArrayLength(elements);
|
|
|
| IfBuilder capacity_checker(this);
|
|
|
| - capacity_checker.IfCompare(length, current_capacity, Token::EQ);
|
| + capacity_checker.If<HCompareNumericAndBranch>(length, current_capacity,
|
| + Token::EQ);
|
| capacity_checker.Then();
|
|
|
| HValue* context = environment()->LookupContext();
|
| @@ -1177,7 +1183,7 @@ HValue* HGraphBuilder::BuildCheckForCapacityGrow(HValue* object,
|
|
|
| length_checker.Else();
|
|
|
| - AddBoundsCheck(key, length);
|
| + Add<HBoundsCheck>(key, length);
|
| environment()->Push(elements);
|
|
|
| length_checker.End();
|
| @@ -1190,17 +1196,14 @@ HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
|
| HValue* elements,
|
| ElementsKind kind,
|
| HValue* length) {
|
| - Zone* zone = this->zone();
|
| - Heap* heap = isolate()->heap();
|
| + Factory* factory = isolate()->factory();
|
|
|
| IfBuilder cow_checker(this);
|
|
|
| - cow_checker.IfCompareMap(elements,
|
| - Handle<Map>(heap->fixed_cow_array_map()));
|
| + cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
|
| cow_checker.Then();
|
|
|
| - HValue* capacity =
|
| - AddInstruction(new(zone) HFixedArrayBaseLength(elements));
|
| + HValue* capacity = AddLoadFixedArrayLength(elements);
|
|
|
| HValue* new_elements = BuildGrowElementsCapacity(object, elements,
|
| kind, length, capacity);
|
| @@ -1255,22 +1258,21 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
|
| if (is_js_array) {
|
| length = AddLoad(object, HObjectAccess::ForArrayLength(), mapcheck,
|
| Representation::Smi());
|
| - length->set_type(HType::Smi());
|
| } else {
|
| - length = AddInstruction(new(zone) HFixedArrayBaseLength(elements));
|
| + length = AddLoadFixedArrayLength(elements);
|
| }
|
| + length->set_type(HType::Smi());
|
| HValue* checked_key = NULL;
|
| if (IsExternalArrayElementsKind(elements_kind)) {
|
| if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
|
| NoObservableSideEffectsScope no_effects(this);
|
| HLoadExternalArrayPointer* external_elements =
|
| - new(zone) HLoadExternalArrayPointer(elements);
|
| - AddInstruction(external_elements);
|
| + Add<HLoadExternalArrayPointer>(elements);
|
| IfBuilder length_checker(this);
|
| - length_checker.IfCompare(key, length, Token::LT);
|
| + length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
|
| length_checker.Then();
|
| IfBuilder negative_checker(this);
|
| - HValue* bounds_check = negative_checker.IfCompare(
|
| + HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
|
| key, graph()->GetConstant0(), Token::GTE);
|
| negative_checker.Then();
|
| HInstruction* result = BuildExternalArrayElementAccess(
|
| @@ -1282,10 +1284,9 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
|
| return result;
|
| } else {
|
| ASSERT(store_mode == STANDARD_STORE);
|
| - checked_key = AddBoundsCheck(key, length);
|
| + checked_key = Add<HBoundsCheck>(key, length);
|
| HLoadExternalArrayPointer* external_elements =
|
| - new(zone) HLoadExternalArrayPointer(elements);
|
| - AddInstruction(external_elements);
|
| + Add<HLoadExternalArrayPointer>(elements);
|
| return AddInstruction(BuildExternalArrayElementAccess(
|
| external_elements, checked_key, val, mapcheck,
|
| elements_kind, is_store));
|
| @@ -1300,8 +1301,7 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
|
| // deopt, leaving the backing store in an invalid state.
|
| if (is_store && IsFastSmiElementsKind(elements_kind) &&
|
| !val->type().IsSmi()) {
|
| - val = AddInstruction(new(zone) HForceRepresentation(
|
| - val, Representation::Smi()));
|
| + val = Add<HForceRepresentation>(val, Representation::Smi());
|
| }
|
|
|
| if (IsGrowStoreMode(store_mode)) {
|
| @@ -1310,7 +1310,7 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
|
| length, key, is_js_array);
|
| checked_key = key;
|
| } else {
|
| - checked_key = AddBoundsCheck(key, length);
|
| + checked_key = Add<HBoundsCheck>(key, length);
|
|
|
| if (is_store && (fast_elements || fast_smi_only_elements)) {
|
| if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
|
| @@ -1339,14 +1339,12 @@ HValue* HGraphBuilder::BuildAllocateElements(HValue* context,
|
|
|
| int elements_size = IsFastDoubleElementsKind(kind)
|
| ? kDoubleSize : kPointerSize;
|
| - HConstant* elements_size_value = new(zone) HConstant(elements_size);
|
| - AddInstruction(elements_size_value);
|
| + HConstant* elements_size_value = Add<HConstant>(elements_size);
|
| HValue* mul = AddInstruction(
|
| HMul::New(zone, context, capacity, elements_size_value));
|
| mul->ClearFlag(HValue::kCanOverflow);
|
|
|
| - HConstant* header_size = new(zone) HConstant(FixedArray::kHeaderSize);
|
| - AddInstruction(header_size);
|
| + HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
|
| HValue* total_size = AddInstruction(
|
| HAdd::New(zone, context, mul, header_size));
|
| total_size->ClearFlag(HValue::kCanOverflow);
|
| @@ -1364,10 +1362,7 @@ HValue* HGraphBuilder::BuildAllocateElements(HValue* context,
|
| }
|
| }
|
|
|
| - HValue* elements =
|
| - AddInstruction(new(zone) HAllocate(context, total_size,
|
| - HType::JSArray(), flags));
|
| - return elements;
|
| + return Add<HAllocate>(context, total_size, HType::JSArray(), flags);
|
| }
|
|
|
|
|
| @@ -1406,8 +1401,7 @@ HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
|
| AddStore(array, HObjectAccess::ForMap(), array_map);
|
|
|
| HConstant* empty_fixed_array =
|
| - new(zone()) HConstant(isolate()->factory()->empty_fixed_array());
|
| - AddInstruction(empty_fixed_array);
|
| + Add<HConstant>(isolate()->factory()->empty_fixed_array());
|
|
|
| HObjectAccess access = HObjectAccess::ForPropertiesPointer();
|
| AddStore(array, access, empty_fixed_array);
|
| @@ -1424,10 +1418,8 @@ HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
|
| elements_location += AllocationSiteInfo::kSize;
|
| }
|
|
|
| - HInnerAllocatedObject* elements = new(zone()) HInnerAllocatedObject(
|
| - array, elements_location);
|
| - AddInstruction(elements);
|
| -
|
| + HInnerAllocatedObject* elements =
|
| + Add<HInnerAllocatedObject>(array, elements_location);
|
| AddStore(array, HObjectAccess::ForElementsPointer(), elements);
|
| return elements;
|
| }
|
| @@ -1439,6 +1431,14 @@ HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
|
| }
|
|
|
|
|
| +HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) {
|
| + HLoadNamedField* instr = AddLoad(object, HObjectAccess::ForFixedArrayLength(),
|
| + NULL, Representation::Smi());
|
| + instr->set_type(HType::Smi());
|
| + return instr;
|
| +}
|
| +
|
| +
|
| HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* context,
|
| HValue* old_capacity) {
|
| Zone* zone = this->zone();
|
| @@ -1451,7 +1451,7 @@ HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* context,
|
| HAdd::New(zone, context, half_old_capacity, old_capacity));
|
| new_capacity->ClearFlag(HValue::kCanOverflow);
|
|
|
| - HValue* min_growth = AddInstruction(new(zone) HConstant(16));
|
| + HValue* min_growth = Add<HConstant>(16);
|
|
|
| new_capacity = AddInstruction(
|
| HAdd::New(zone, context, new_capacity, min_growth));
|
| @@ -1462,17 +1462,15 @@ HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* context,
|
|
|
|
|
| void HGraphBuilder::BuildNewSpaceArrayCheck(HValue* length, ElementsKind kind) {
|
| - Zone* zone = this->zone();
|
| Heap* heap = isolate()->heap();
|
| int element_size = IsFastDoubleElementsKind(kind) ? kDoubleSize
|
| : kPointerSize;
|
| int max_size = heap->MaxRegularSpaceAllocationSize() / element_size;
|
| max_size -= JSArray::kSize / element_size;
|
| - HConstant* max_size_constant = new(zone) HConstant(max_size);
|
| - AddInstruction(max_size_constant);
|
| + HConstant* max_size_constant = Add<HConstant>(max_size);
|
| // Since we're forcing Integer32 representation for this HBoundsCheck,
|
| // there's no need to Smi-check the index.
|
| - AddInstruction(new(zone) HBoundsCheck(length, max_size_constant));
|
| + Add<HBoundsCheck>(length, max_size_constant);
|
| }
|
|
|
|
|
| @@ -1508,10 +1506,9 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* context,
|
| Factory* factory = isolate()->factory();
|
|
|
| double nan_double = FixedDoubleArray::hole_nan_as_double();
|
| - Zone* zone = this->zone();
|
| HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
|
| - ? AddInstruction(new(zone) HConstant(factory->the_hole_value()))
|
| - : AddInstruction(new(zone) HConstant(nan_double));
|
| + ? Add<HConstant>(factory->the_hole_value())
|
| + : Add<HConstant>(nan_double);
|
|
|
| // Special loop unfolding case
|
| static const int kLoopUnfoldLimit = 4;
|
| @@ -1538,15 +1535,15 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* context,
|
|
|
| if (unfold_loop) {
|
| for (int i = 0; i < initial_capacity; i++) {
|
| - HInstruction* key = AddInstruction(new(zone) HConstant(i));
|
| - AddInstruction(new(zone) HStoreKeyed(elements, key, hole, elements_kind));
|
| + HInstruction* key = Add<HConstant>(i);
|
| + Add<HStoreKeyed>(elements, key, hole, elements_kind);
|
| }
|
| } else {
|
| LoopBuilder builder(this, context, LoopBuilder::kPostIncrement);
|
|
|
| HValue* key = builder.BeginBody(from, to, Token::LT);
|
|
|
| - AddInstruction(new(zone) HStoreKeyed(elements, key, hole, elements_kind));
|
| + Add<HStoreKeyed>(elements, key, hole, elements_kind);
|
|
|
| builder.EndBody();
|
| }
|
| @@ -1576,15 +1573,15 @@ void HGraphBuilder::BuildCopyElements(HValue* context,
|
|
|
| HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT);
|
|
|
| - HValue* element =
|
| - AddInstruction(new(zone()) HLoadKeyed(from_elements, key, NULL,
|
| - from_elements_kind,
|
| - ALLOW_RETURN_HOLE));
|
| + HValue* element = Add<HLoadKeyed>(from_elements, key,
|
| + static_cast<HValue*>(NULL),
|
| + from_elements_kind,
|
| + ALLOW_RETURN_HOLE);
|
|
|
| ElementsKind holey_kind = IsFastSmiElementsKind(to_elements_kind)
|
| ? FAST_HOLEY_ELEMENTS : to_elements_kind;
|
| - HInstruction* holey_store = AddInstruction(
|
| - new(zone()) HStoreKeyed(to_elements, key, element, holey_kind));
|
| + HInstruction* holey_store = Add<HStoreKeyed>(to_elements, key,
|
| + element, holey_kind);
|
| // Allow NaN hole values to converted to their tagged counterparts.
|
| if (IsFastHoleyElementsKind(to_elements_kind)) {
|
| holey_store->SetFlag(HValue::kAllowUndefinedAsNaN);
|
| @@ -1602,11 +1599,10 @@ void HGraphBuilder::BuildCopyElements(HValue* context,
|
|
|
| HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
|
| HValue* boilerplate,
|
| + HValue* allocation_site,
|
| AllocationSiteMode mode,
|
| ElementsKind kind,
|
| int length) {
|
| - Zone* zone = this->zone();
|
| -
|
| NoObservableSideEffectsScope no_effects(this);
|
|
|
| // All sizes here are multiples of kPointerSize.
|
| @@ -1624,12 +1620,11 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
|
| HAllocate::Flags allocate_flags = HAllocate::DefaultFlags(kind);
|
| // Allocate both the JS array and the elements array in one big
|
| // allocation. This avoids multiple limit checks.
|
| - HValue* size_in_bytes = AddInstruction(new(zone) HConstant(size));
|
| - HInstruction* object =
|
| - AddInstruction(new(zone) HAllocate(context,
|
| - size_in_bytes,
|
| - HType::JSObject(),
|
| - allocate_flags));
|
| + HValue* size_in_bytes = Add<HConstant>(size);
|
| + HInstruction* object = Add<HAllocate>(context,
|
| + size_in_bytes,
|
| + HType::JSObject(),
|
| + allocate_flags);
|
|
|
| // Copy the JS array part.
|
| for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
|
| @@ -1641,15 +1636,14 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
|
|
|
| // Create an allocation site info if requested.
|
| if (mode == TRACK_ALLOCATION_SITE) {
|
| - BuildCreateAllocationSiteInfo(object, JSArray::kSize, boilerplate);
|
| + BuildCreateAllocationSiteInfo(object, JSArray::kSize, allocation_site);
|
| }
|
|
|
| if (length > 0) {
|
| // Get hold of the elements array of the boilerplate and setup the
|
| // elements pointer in the resulting object.
|
| HValue* boilerplate_elements = AddLoadElements(boilerplate);
|
| - HValue* object_elements =
|
| - AddInstruction(new(zone) HInnerAllocatedObject(object, elems_offset));
|
| + HValue* object_elements = Add<HInnerAllocatedObject>(object, elems_offset);
|
| AddStore(object, HObjectAccess::ForElementsPointer(), object_elements);
|
|
|
| // Copy the elements array header.
|
| @@ -1663,16 +1657,10 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
|
| // copying loops with constant length up to a given boundary and use this
|
| // helper here instead.
|
| for (int i = 0; i < length; i++) {
|
| - HValue* key_constant = AddInstruction(new(zone) HConstant(i));
|
| - HInstruction* value =
|
| - AddInstruction(new(zone) HLoadKeyed(boilerplate_elements,
|
| - key_constant,
|
| - NULL,
|
| - kind));
|
| - AddInstruction(new(zone) HStoreKeyed(object_elements,
|
| - key_constant,
|
| - value,
|
| - kind));
|
| + HValue* key_constant = Add<HConstant>(i);
|
| + HInstruction* value = Add<HLoadKeyed>(boilerplate_elements, key_constant,
|
| + static_cast<HValue*>(NULL), kind);
|
| + Add<HStoreKeyed>(object_elements, key_constant, value, kind);
|
| }
|
| }
|
|
|
| @@ -1680,6 +1668,39 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
|
| }
|
|
|
|
|
| +HInstruction* HGraphBuilder::BuildUnaryMathOp(
|
| + HValue* input, Handle<Type> type, Token::Value operation) {
|
| + // We only handle the numeric cases here
|
| + type = handle(
|
| + Type::Intersect(type, handle(Type::Number(), isolate())), isolate());
|
| +
|
| + switch (operation) {
|
| + default:
|
| + UNREACHABLE();
|
| + case Token::SUB: {
|
| + HInstruction* instr =
|
| + HMul::New(zone(), environment()->LookupContext(),
|
| + input, graph()->GetConstantMinus1());
|
| + Representation rep = Representation::FromType(type);
|
| + if (type->Is(Type::None())) {
|
| + AddSoftDeoptimize();
|
| + }
|
| + if (instr->IsBinaryOperation()) {
|
| + HBinaryOperation* binop = HBinaryOperation::cast(instr);
|
| + binop->set_observed_input_representation(1, rep);
|
| + binop->set_observed_input_representation(2, rep);
|
| + }
|
| + return instr;
|
| + }
|
| + case Token::BIT_NOT:
|
| + if (type->Is(Type::None())) {
|
| + AddSoftDeoptimize();
|
| + }
|
| + return new(zone()) HBitNot(input);
|
| + }
|
| +}
|
| +
|
| +
|
| void HGraphBuilder::BuildCompareNil(
|
| HValue* value,
|
| Handle<Type> type,
|
| @@ -1722,22 +1743,22 @@ void HGraphBuilder::BuildCompareNil(
|
|
|
| HValue* HGraphBuilder::BuildCreateAllocationSiteInfo(HValue* previous_object,
|
| int previous_object_size,
|
| - HValue* payload) {
|
| - HInnerAllocatedObject* alloc_site = new(zone())
|
| - HInnerAllocatedObject(previous_object, previous_object_size);
|
| - AddInstruction(alloc_site);
|
| - Handle<Map> alloc_site_map(isolate()->heap()->allocation_site_info_map());
|
| - AddStoreMapConstant(alloc_site, alloc_site_map);
|
| - HObjectAccess access = HObjectAccess::ForAllocationSitePayload();
|
| - AddStore(alloc_site, access, payload);
|
| - return alloc_site;
|
| + HValue* alloc_site) {
|
| + ASSERT(alloc_site != NULL);
|
| + HInnerAllocatedObject* alloc_site_info = Add<HInnerAllocatedObject>(
|
| + previous_object, previous_object_size);
|
| + Handle<Map> alloc_site_info_map(
|
| + isolate()->heap()->allocation_site_info_map());
|
| + AddStoreMapConstant(alloc_site_info, alloc_site_info_map);
|
| + HObjectAccess access = HObjectAccess::ForAllocationSiteInfoSite();
|
| + AddStore(alloc_site_info, access, alloc_site);
|
| + return alloc_site_info;
|
| }
|
|
|
|
|
| HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* context) {
|
| // Get the global context, then the native context
|
| - HInstruction* global_object = AddInstruction(new(zone())
|
| - HGlobalObject(context));
|
| + HInstruction* global_object = Add<HGlobalObject>(context);
|
| HObjectAccess access = HObjectAccess::ForJSObjectOffset(
|
| GlobalObject::kNativeContextOffset);
|
| return AddLoad(global_object, access);
|
| @@ -1746,25 +1767,25 @@ HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* context) {
|
|
|
| HInstruction* HGraphBuilder::BuildGetArrayFunction(HValue* context) {
|
| HInstruction* native_context = BuildGetNativeContext(context);
|
| - HInstruction* index = AddInstruction(new(zone())
|
| - HConstant(Context::ARRAY_FUNCTION_INDEX));
|
| -
|
| - return AddInstruction(new (zone())
|
| - HLoadKeyed(native_context, index, NULL, FAST_ELEMENTS));
|
| + HInstruction* index =
|
| + Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
|
| + return Add<HLoadKeyed>(
|
| + native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
|
| }
|
|
|
|
|
| HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
|
| - ElementsKind kind,
|
| - HValue* allocation_site_payload,
|
| - bool disable_allocation_sites) :
|
| + ElementsKind kind,
|
| + HValue* allocation_site_payload,
|
| + HValue* constructor_function,
|
| + AllocationSiteOverrideMode override_mode) :
|
| builder_(builder),
|
| kind_(kind),
|
| allocation_site_payload_(allocation_site_payload),
|
| - constructor_function_(NULL) {
|
| - mode_ = disable_allocation_sites
|
| + constructor_function_(constructor_function) {
|
| + mode_ = override_mode == DISABLE_ALLOCATION_SITES
|
| ? DONT_TRACK_ALLOCATION_SITE
|
| - : AllocationSiteInfo::GetMode(kind);
|
| + : AllocationSite::GetMode(kind);
|
| }
|
|
|
|
|
| @@ -1780,26 +1801,35 @@ HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
|
|
|
|
|
| HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode(HValue* context) {
|
| - HInstruction* native_context = builder()->BuildGetNativeContext(context);
|
| + if (kind_ == GetInitialFastElementsKind()) {
|
| + // No need for a context lookup if the kind_ matches the initial
|
| + // map, because we can just load the map in that case.
|
| + HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
|
| + HInstruction* load =
|
| + builder()->BuildLoadNamedField(constructor_function_,
|
| + access,
|
| + Representation::Tagged());
|
| + return builder()->AddInstruction(load);
|
| + }
|
|
|
| - HInstruction* index = builder()->AddInstruction(new(zone())
|
| - HConstant(Context::JS_ARRAY_MAPS_INDEX));
|
| + HInstruction* native_context = builder()->BuildGetNativeContext(context);
|
| + HInstruction* index = builder()->Add<HConstant>(
|
| + static_cast<int32_t>(Context::JS_ARRAY_MAPS_INDEX));
|
|
|
| - HInstruction* map_array = builder()->AddInstruction(new(zone())
|
| - HLoadKeyed(native_context, index, NULL, FAST_ELEMENTS));
|
| + HInstruction* map_array = builder()->Add<HLoadKeyed>(
|
| + native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
|
|
|
| - HInstruction* kind_index = builder()->AddInstruction(new(zone())
|
| - HConstant(kind_));
|
| + HInstruction* kind_index = builder()->Add<HConstant>(kind_);
|
|
|
| - return builder()->AddInstruction(new(zone())
|
| - HLoadKeyed(map_array, kind_index, NULL, FAST_ELEMENTS));
|
| + return builder()->Add<HLoadKeyed>(
|
| + map_array, kind_index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
|
| }
|
|
|
|
|
| HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
|
| // Find the map near the constructor function
|
| HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
|
| - return AddInstruction(
|
| + return builder()->AddInstruction(
|
| builder()->BuildLoadNamedField(constructor_function_,
|
| access,
|
| Representation::Tagged()));
|
| @@ -1822,18 +1852,17 @@ HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize(
|
| base_size += FixedArray::kHeaderSize;
|
| }
|
|
|
| - HInstruction* elements_size_value = new(zone()) HConstant(elements_size());
|
| - AddInstruction(elements_size_value);
|
| + HInstruction* elements_size_value =
|
| + builder()->Add<HConstant>(elements_size());
|
| HInstruction* mul = HMul::New(zone(), context, length_node,
|
| elements_size_value);
|
| mul->ClearFlag(HValue::kCanOverflow);
|
| - AddInstruction(mul);
|
| + builder()->AddInstruction(mul);
|
|
|
| - HInstruction* base = new(zone()) HConstant(base_size);
|
| - AddInstruction(base);
|
| + HInstruction* base = builder()->Add<HConstant>(base_size);
|
| HInstruction* total_size = HAdd::New(zone(), context, base, mul);
|
| total_size->ClearFlag(HValue::kCanOverflow);
|
| - AddInstruction(total_size);
|
| + builder()->AddInstruction(total_size);
|
| return total_size;
|
| }
|
|
|
| @@ -1848,16 +1877,13 @@ HValue* HGraphBuilder::JSArrayBuilder::EstablishEmptyArrayAllocationSize() {
|
| ? FixedDoubleArray::SizeFor(initial_capacity())
|
| : FixedArray::SizeFor(initial_capacity());
|
|
|
| - HConstant* array_size = new(zone()) HConstant(base_size);
|
| - AddInstruction(array_size);
|
| - return array_size;
|
| + return builder()->Add<HConstant>(base_size);
|
| }
|
|
|
|
|
| HValue* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
|
| HValue* size_in_bytes = EstablishEmptyArrayAllocationSize();
|
| - HConstant* capacity = new(zone()) HConstant(initial_capacity());
|
| - AddInstruction(capacity);
|
| + HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
|
| return AllocateArray(size_in_bytes,
|
| capacity,
|
| builder()->graph()->GetConstant0(),
|
| @@ -1881,13 +1907,12 @@ HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
|
|
|
| // Allocate (dealing with failure appropriately)
|
| HAllocate::Flags flags = HAllocate::DefaultFlags(kind_);
|
| - HAllocate* new_object = new(zone()) HAllocate(context, size_in_bytes,
|
| - HType::JSArray(), flags);
|
| - AddInstruction(new_object);
|
| + HAllocate* new_object = builder()->Add<HAllocate>(context, size_in_bytes,
|
| + HType::JSArray(), flags);
|
|
|
| // Fill in the fields: map, properties, length
|
| HValue* map;
|
| - if (constructor_function_ != NULL) {
|
| + if (allocation_site_payload_ == NULL) {
|
| map = EmitInternalMapCode();
|
| } else {
|
| map = EmitMapCode(context);
|
| @@ -1914,10 +1939,7 @@ HStoreNamedField* HGraphBuilder::AddStore(HValue *object,
|
| HObjectAccess access,
|
| HValue *val,
|
| Representation representation) {
|
| - HStoreNamedField *instr = new(zone())
|
| - HStoreNamedField(object, access, val, representation);
|
| - AddInstruction(instr);
|
| - return instr;
|
| + return Add<HStoreNamedField>(object, access, val, representation);
|
| }
|
|
|
|
|
| @@ -1925,20 +1947,26 @@ HLoadNamedField* HGraphBuilder::AddLoad(HValue *object,
|
| HObjectAccess access,
|
| HValue *typecheck,
|
| Representation representation) {
|
| - HLoadNamedField *instr =
|
| - new(zone()) HLoadNamedField(object, access, typecheck, representation);
|
| - AddInstruction(instr);
|
| - return instr;
|
| + return Add<HLoadNamedField>(object, access, typecheck, representation);
|
| }
|
|
|
|
|
| HStoreNamedField* HGraphBuilder::AddStoreMapConstant(HValue *object,
|
| Handle<Map> map) {
|
| - HValue* constant = AddInstruction(new(zone()) HConstant(map));
|
| - HStoreNamedField *instr =
|
| - new(zone()) HStoreNamedField(object, HObjectAccess::ForMap(), constant);
|
| - AddInstruction(instr);
|
| - return instr;
|
| + return Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
|
| + Add<HConstant>(map));
|
| +}
|
| +
|
| +
|
| +HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin,
|
| + HContext* context) {
|
| + HGlobalObject* global_object = Add<HGlobalObject>(context);
|
| + HObjectAccess access = HObjectAccess::ForJSObjectOffset(
|
| + GlobalObject::kBuiltinsOffset);
|
| + HValue* builtins = AddLoad(global_object, access);
|
| + HObjectAccess function_access = HObjectAccess::ForJSObjectOffset(
|
| + JSBuiltinsObject::OffsetOfFunctionWithId(builtin));
|
| + return AddLoad(builtins, function_access);
|
| }
|
|
|
|
|
| @@ -1950,7 +1978,8 @@ HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info)
|
| break_scope_(NULL),
|
| inlined_count_(0),
|
| globals_(10, info->zone()),
|
| - inline_bailout_(false) {
|
| + inline_bailout_(false),
|
| + osr_(new(info->zone()) HOsrBuilder(this)) {
|
| // This is not initialized in the initializer list because the
|
| // constructor for the initial state relies on function_state_ == NULL
|
| // to know it's the initial state.
|
| @@ -2018,6 +2047,7 @@ HGraph::HGraph(CompilationInfo* info)
|
| values_(16, info->zone()),
|
| phi_list_(NULL),
|
| uint32_instructions_(NULL),
|
| + osr_(NULL),
|
| info_(info),
|
| zone_(info->zone()),
|
| is_recursive_(false),
|
| @@ -2053,10 +2083,8 @@ void HGraph::FinalizeUniqueValueIds() {
|
| DisallowHeapAllocation no_gc;
|
| ASSERT(!isolate()->optimizing_compiler_thread()->IsOptimizerThread());
|
| for (int i = 0; i < blocks()->length(); ++i) {
|
| - for (HInstruction* instr = blocks()->at(i)->first();
|
| - instr != NULL;
|
| - instr = instr->next()) {
|
| - instr->FinalizeUniqueValueId();
|
| + for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
|
| + it.Current()->FinalizeUniqueValueId();
|
| }
|
| }
|
| }
|
| @@ -2068,28 +2096,27 @@ void HGraph::Canonicalize() {
|
| // We must be careful not to set the flag unnecessarily, because GVN
|
| // cannot identify two instructions when their flag value differs.
|
| for (int i = 0; i < blocks()->length(); ++i) {
|
| - HInstruction* instr = blocks()->at(i)->first();
|
| - while (instr != NULL) {
|
| + for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
|
| + HInstruction* instr = it.Current();
|
| if (instr->IsArithmeticBinaryOperation() &&
|
| instr->representation().IsInteger32() &&
|
| instr->HasAtLeastOneUseWithFlagAndNoneWithout(
|
| HInstruction::kTruncatingToInt32)) {
|
| instr->SetFlag(HInstruction::kAllUsesTruncatingToInt32);
|
| }
|
| - instr = instr->next();
|
| }
|
| }
|
| // Perform actual Canonicalization pass.
|
| for (int i = 0; i < blocks()->length(); ++i) {
|
| - HInstruction* instr = blocks()->at(i)->first();
|
| - while (instr != NULL) {
|
| + for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
|
| + HInstruction* instr = it.Current();
|
| HValue* value = instr->Canonicalize();
|
| if (value != instr) instr->DeleteAndReplaceWith(value);
|
| - instr = instr->next();
|
| }
|
| }
|
| }
|
|
|
| +
|
| // Block ordering was implemented with two mutually recursive methods,
|
| // HGraph::Postorder and HGraph::PostorderLoopBlocks.
|
| // The recursion could lead to stack overflow so the algorithm has been
|
| @@ -2460,8 +2487,8 @@ void HGraph::NullifyUnreachableInstructions() {
|
| }
|
| }
|
| if (all_predecessors_deoptimizing) nullify = true;
|
| - for (HInstruction* instr = block->first(); instr != NULL;
|
| - instr = instr->next()) {
|
| + for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
|
| + HInstruction* instr = it.Current();
|
| // Leave the basic structure of the graph intact.
|
| if (instr->IsBlockEntry()) continue;
|
| if (instr->IsControlInstruction()) continue;
|
| @@ -2505,53 +2532,6 @@ void HGraph::NullifyUnreachableInstructions() {
|
| }
|
|
|
|
|
| -// Replace all phis consisting of a single non-loop operand plus any number of
|
| -// loop operands by that single non-loop operand.
|
| -void HGraph::EliminateRedundantPhis() {
|
| - HPhase phase("H_Redundant phi elimination", this);
|
| -
|
| - // We do a simple fixed point iteration without any work list, because
|
| - // machine-generated JavaScript can lead to a very dense Hydrogen graph with
|
| - // an enormous work list and will consequently result in OOM. Experiments
|
| - // showed that this simple algorithm is good enough, and even e.g. tracking
|
| - // the set or range of blocks to consider is not a real improvement.
|
| - bool need_another_iteration;
|
| - ZoneList<HPhi*> redundant_phis(blocks_.length(), zone());
|
| - do {
|
| - need_another_iteration = false;
|
| - for (int i = 0; i < blocks_.length(); ++i) {
|
| - HBasicBlock* block = blocks_[i];
|
| - for (int j = 0; j < block->phis()->length(); j++) {
|
| - HPhi* phi = block->phis()->at(j);
|
| - HValue* replacement = phi->GetRedundantReplacement();
|
| - if (replacement != NULL) {
|
| - // Remember phi to avoid concurrent modification of the block's phis.
|
| - redundant_phis.Add(phi, zone());
|
| - for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
|
| - HValue* value = it.value();
|
| - value->SetOperandAt(it.index(), replacement);
|
| - need_another_iteration |= value->IsPhi();
|
| - }
|
| - }
|
| - }
|
| - for (int i = 0; i < redundant_phis.length(); i++) {
|
| - block->RemovePhi(redundant_phis[i]);
|
| - }
|
| - redundant_phis.Clear();
|
| - }
|
| - } while (need_another_iteration);
|
| -
|
| -#if DEBUG
|
| - // Make sure that we *really* removed all redundant phis.
|
| - for (int i = 0; i < blocks_.length(); ++i) {
|
| - for (int j = 0; j < blocks_[i]->phis()->length(); j++) {
|
| - ASSERT(blocks_[i]->phis()->at(j)->GetRedundantReplacement() == NULL);
|
| - }
|
| - }
|
| -#endif
|
| -}
|
| -
|
| -
|
| bool HGraph::CheckArgumentsPhiUses() {
|
| int block_count = blocks_.length();
|
| for (int i = 0; i < block_count; ++i) {
|
| @@ -2592,380 +2572,6 @@ void HGraph::CollectPhis() {
|
| }
|
|
|
|
|
| -void HGraph::InferTypes(ZoneList<HValue*>* worklist) {
|
| - BitVector in_worklist(GetMaximumValueID(), zone());
|
| - for (int i = 0; i < worklist->length(); ++i) {
|
| - ASSERT(!in_worklist.Contains(worklist->at(i)->id()));
|
| - in_worklist.Add(worklist->at(i)->id());
|
| - }
|
| -
|
| - while (!worklist->is_empty()) {
|
| - HValue* current = worklist->RemoveLast();
|
| - in_worklist.Remove(current->id());
|
| - if (current->UpdateInferredType()) {
|
| - for (HUseIterator it(current->uses()); !it.Done(); it.Advance()) {
|
| - HValue* use = it.value();
|
| - if (!in_worklist.Contains(use->id())) {
|
| - in_worklist.Add(use->id());
|
| - worklist->Add(use, zone());
|
| - }
|
| - }
|
| - }
|
| - }
|
| -}
|
| -
|
| -
|
| -class HRangeAnalysis BASE_EMBEDDED {
|
| - public:
|
| - explicit HRangeAnalysis(HGraph* graph) :
|
| - graph_(graph), zone_(graph->zone()), changed_ranges_(16, zone_) { }
|
| -
|
| - void Analyze();
|
| -
|
| - private:
|
| - void TraceRange(const char* msg, ...);
|
| - void Analyze(HBasicBlock* block);
|
| - void InferControlFlowRange(HCompareIDAndBranch* test, HBasicBlock* dest);
|
| - void UpdateControlFlowRange(Token::Value op, HValue* value, HValue* other);
|
| - void InferRange(HValue* value);
|
| - void RollBackTo(int index);
|
| - void AddRange(HValue* value, Range* range);
|
| -
|
| - HGraph* graph_;
|
| - Zone* zone_;
|
| - ZoneList<HValue*> changed_ranges_;
|
| -};
|
| -
|
| -
|
| -void HRangeAnalysis::TraceRange(const char* msg, ...) {
|
| - if (FLAG_trace_range) {
|
| - va_list arguments;
|
| - va_start(arguments, msg);
|
| - OS::VPrint(msg, arguments);
|
| - va_end(arguments);
|
| - }
|
| -}
|
| -
|
| -
|
| -void HRangeAnalysis::Analyze() {
|
| - HPhase phase("H_Range analysis", graph_);
|
| - Analyze(graph_->entry_block());
|
| -}
|
| -
|
| -
|
| -void HRangeAnalysis::Analyze(HBasicBlock* block) {
|
| - TraceRange("Analyzing block B%d\n", block->block_id());
|
| -
|
| - int last_changed_range = changed_ranges_.length() - 1;
|
| -
|
| - // Infer range based on control flow.
|
| - if (block->predecessors()->length() == 1) {
|
| - HBasicBlock* pred = block->predecessors()->first();
|
| - if (pred->end()->IsCompareIDAndBranch()) {
|
| - InferControlFlowRange(HCompareIDAndBranch::cast(pred->end()), block);
|
| - }
|
| - }
|
| -
|
| - // Process phi instructions.
|
| - for (int i = 0; i < block->phis()->length(); ++i) {
|
| - HPhi* phi = block->phis()->at(i);
|
| - InferRange(phi);
|
| - }
|
| -
|
| - // Go through all instructions of the current block.
|
| - HInstruction* instr = block->first();
|
| - while (instr != block->end()) {
|
| - InferRange(instr);
|
| - instr = instr->next();
|
| - }
|
| -
|
| - // Continue analysis in all dominated blocks.
|
| - for (int i = 0; i < block->dominated_blocks()->length(); ++i) {
|
| - Analyze(block->dominated_blocks()->at(i));
|
| - }
|
| -
|
| - RollBackTo(last_changed_range);
|
| -}
|
| -
|
| -
|
| -void HRangeAnalysis::InferControlFlowRange(HCompareIDAndBranch* test,
|
| - HBasicBlock* dest) {
|
| - ASSERT((test->FirstSuccessor() == dest) == (test->SecondSuccessor() != dest));
|
| - if (test->representation().IsSmiOrInteger32()) {
|
| - Token::Value op = test->token();
|
| - if (test->SecondSuccessor() == dest) {
|
| - op = Token::NegateCompareOp(op);
|
| - }
|
| - Token::Value inverted_op = Token::ReverseCompareOp(op);
|
| - UpdateControlFlowRange(op, test->left(), test->right());
|
| - UpdateControlFlowRange(inverted_op, test->right(), test->left());
|
| - }
|
| -}
|
| -
|
| -
|
| -// We know that value [op] other. Use this information to update the range on
|
| -// value.
|
| -void HRangeAnalysis::UpdateControlFlowRange(Token::Value op,
|
| - HValue* value,
|
| - HValue* other) {
|
| - Range temp_range;
|
| - Range* range = other->range() != NULL ? other->range() : &temp_range;
|
| - Range* new_range = NULL;
|
| -
|
| - TraceRange("Control flow range infer %d %s %d\n",
|
| - value->id(),
|
| - Token::Name(op),
|
| - other->id());
|
| -
|
| - if (op == Token::EQ || op == Token::EQ_STRICT) {
|
| - // The same range has to apply for value.
|
| - new_range = range->Copy(zone_);
|
| - } else if (op == Token::LT || op == Token::LTE) {
|
| - new_range = range->CopyClearLower(zone_);
|
| - if (op == Token::LT) {
|
| - new_range->AddConstant(-1);
|
| - }
|
| - } else if (op == Token::GT || op == Token::GTE) {
|
| - new_range = range->CopyClearUpper(zone_);
|
| - if (op == Token::GT) {
|
| - new_range->AddConstant(1);
|
| - }
|
| - }
|
| -
|
| - if (new_range != NULL && !new_range->IsMostGeneric()) {
|
| - AddRange(value, new_range);
|
| - }
|
| -}
|
| -
|
| -
|
| -void HRangeAnalysis::InferRange(HValue* value) {
|
| - ASSERT(!value->HasRange());
|
| - if (!value->representation().IsNone()) {
|
| - value->ComputeInitialRange(zone_);
|
| - Range* range = value->range();
|
| - TraceRange("Initial inferred range of %d (%s) set to [%d,%d]\n",
|
| - value->id(),
|
| - value->Mnemonic(),
|
| - range->lower(),
|
| - range->upper());
|
| - }
|
| -}
|
| -
|
| -
|
| -void HRangeAnalysis::RollBackTo(int index) {
|
| - for (int i = index + 1; i < changed_ranges_.length(); ++i) {
|
| - changed_ranges_[i]->RemoveLastAddedRange();
|
| - }
|
| - changed_ranges_.Rewind(index + 1);
|
| -}
|
| -
|
| -
|
| -void HRangeAnalysis::AddRange(HValue* value, Range* range) {
|
| - Range* original_range = value->range();
|
| - value->AddNewRange(range, zone_);
|
| - changed_ranges_.Add(value, zone_);
|
| - Range* new_range = value->range();
|
| - TraceRange("Updated range of %d set to [%d,%d]\n",
|
| - value->id(),
|
| - new_range->lower(),
|
| - new_range->upper());
|
| - if (original_range != NULL) {
|
| - TraceRange("Original range was [%d,%d]\n",
|
| - original_range->lower(),
|
| - original_range->upper());
|
| - }
|
| - TraceRange("New information was [%d,%d]\n",
|
| - range->lower(),
|
| - range->upper());
|
| -}
|
| -
|
| -
|
| -class HStackCheckEliminator BASE_EMBEDDED {
|
| - public:
|
| - explicit HStackCheckEliminator(HGraph* graph) : graph_(graph) { }
|
| -
|
| - void Process();
|
| -
|
| - private:
|
| - HGraph* graph_;
|
| -};
|
| -
|
| -
|
| -void HStackCheckEliminator::Process() {
|
| - HPhase phase("H_Stack check elimination", graph_);
|
| - // For each loop block walk the dominator tree from the backwards branch to
|
| - // the loop header. If a call instruction is encountered the backwards branch
|
| - // is dominated by a call and the stack check in the backwards branch can be
|
| - // removed.
|
| - for (int i = 0; i < graph_->blocks()->length(); i++) {
|
| - HBasicBlock* block = graph_->blocks()->at(i);
|
| - if (block->IsLoopHeader()) {
|
| - HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
|
| - HBasicBlock* dominator = back_edge;
|
| - while (true) {
|
| - HInstruction* instr = dominator->first();
|
| - while (instr != NULL) {
|
| - if (instr->IsCall()) {
|
| - block->loop_information()->stack_check()->Eliminate();
|
| - break;
|
| - }
|
| - instr = instr->next();
|
| - }
|
| -
|
| - // Done when the loop header is processed.
|
| - if (dominator == block) break;
|
| -
|
| - // Move up the dominator tree.
|
| - dominator = dominator->dominator();
|
| - }
|
| - }
|
| - }
|
| -}
|
| -
|
| -
|
| -void HInferRepresentation::AddToWorklist(HValue* current) {
|
| - if (current->representation().IsTagged()) return;
|
| - if (!current->CheckFlag(HValue::kFlexibleRepresentation)) return;
|
| - if (in_worklist_.Contains(current->id())) return;
|
| - worklist_.Add(current, zone());
|
| - in_worklist_.Add(current->id());
|
| -}
|
| -
|
| -
|
| -void HInferRepresentation::Analyze() {
|
| - HPhase phase("H_Infer representations", graph_);
|
| -
|
| - // (1) Initialize bit vectors and count real uses. Each phi gets a
|
| - // bit-vector of length <number of phis>.
|
| - const ZoneList<HPhi*>* phi_list = graph_->phi_list();
|
| - int phi_count = phi_list->length();
|
| - ZoneList<BitVector*> connected_phis(phi_count, graph_->zone());
|
| - for (int i = 0; i < phi_count; ++i) {
|
| - phi_list->at(i)->InitRealUses(i);
|
| - BitVector* connected_set = new(zone()) BitVector(phi_count, graph_->zone());
|
| - connected_set->Add(i);
|
| - connected_phis.Add(connected_set, zone());
|
| - }
|
| -
|
| - // (2) Do a fixed point iteration to find the set of connected phis. A
|
| - // phi is connected to another phi if its value is used either directly or
|
| - // indirectly through a transitive closure of the def-use relation.
|
| - bool change = true;
|
| - while (change) {
|
| - change = false;
|
| - // We normally have far more "forward edges" than "backward edges",
|
| - // so we terminate faster when we walk backwards.
|
| - for (int i = phi_count - 1; i >= 0; --i) {
|
| - HPhi* phi = phi_list->at(i);
|
| - for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
|
| - HValue* use = it.value();
|
| - if (use->IsPhi()) {
|
| - int id = HPhi::cast(use)->phi_id();
|
| - if (connected_phis[i]->UnionIsChanged(*connected_phis[id]))
|
| - change = true;
|
| - }
|
| - }
|
| - }
|
| - }
|
| -
|
| - // Set truncation flags for groups of connected phis. This is a conservative
|
| - // approximation; the flag will be properly re-computed after representations
|
| - // have been determined.
|
| - if (phi_count > 0) {
|
| - BitVector* done = new(zone()) BitVector(phi_count, graph_->zone());
|
| - for (int i = 0; i < phi_count; ++i) {
|
| - if (done->Contains(i)) continue;
|
| -
|
| - // Check if all uses of all connected phis in this group are truncating.
|
| - bool all_uses_everywhere_truncating = true;
|
| - for (BitVector::Iterator it(connected_phis.at(i));
|
| - !it.Done();
|
| - it.Advance()) {
|
| - int index = it.Current();
|
| - all_uses_everywhere_truncating &=
|
| - phi_list->at(index)->CheckFlag(HInstruction::kTruncatingToInt32);
|
| - done->Add(index);
|
| - }
|
| - if (all_uses_everywhere_truncating) {
|
| - continue; // Great, nothing to do.
|
| - }
|
| - // Clear truncation flag of this group of connected phis.
|
| - for (BitVector::Iterator it(connected_phis.at(i));
|
| - !it.Done();
|
| - it.Advance()) {
|
| - int index = it.Current();
|
| - phi_list->at(index)->ClearFlag(HInstruction::kTruncatingToInt32);
|
| - }
|
| - }
|
| - }
|
| -
|
| - // Simplify constant phi inputs where possible.
|
| - // This step uses kTruncatingToInt32 flags of phis.
|
| - for (int i = 0; i < phi_count; ++i) {
|
| - phi_list->at(i)->SimplifyConstantInputs();
|
| - }
|
| -
|
| - // Use the phi reachability information from step 2 to
|
| - // sum up the non-phi use counts of all connected phis.
|
| - for (int i = 0; i < phi_count; ++i) {
|
| - HPhi* phi = phi_list->at(i);
|
| - for (BitVector::Iterator it(connected_phis.at(i));
|
| - !it.Done();
|
| - it.Advance()) {
|
| - int index = it.Current();
|
| - HPhi* it_use = phi_list->at(index);
|
| - if (index != i) phi->AddNonPhiUsesFrom(it_use); // Don't count twice.
|
| - }
|
| - }
|
| -
|
| - // Initialize work list
|
| - for (int i = 0; i < graph_->blocks()->length(); ++i) {
|
| - HBasicBlock* block = graph_->blocks()->at(i);
|
| - const ZoneList<HPhi*>* phis = block->phis();
|
| - for (int j = 0; j < phis->length(); ++j) {
|
| - AddToWorklist(phis->at(j));
|
| - }
|
| -
|
| - HInstruction* current = block->first();
|
| - while (current != NULL) {
|
| - AddToWorklist(current);
|
| - current = current->next();
|
| - }
|
| - }
|
| -
|
| - // Do a fixed point iteration, trying to improve representations
|
| - while (!worklist_.is_empty()) {
|
| - HValue* current = worklist_.RemoveLast();
|
| - in_worklist_.Remove(current->id());
|
| - current->InferRepresentation(this);
|
| - }
|
| -
|
| - // Lastly: any instruction that we don't have representation information
|
| - // for defaults to Tagged.
|
| - for (int i = 0; i < graph_->blocks()->length(); ++i) {
|
| - HBasicBlock* block = graph_->blocks()->at(i);
|
| - const ZoneList<HPhi*>* phis = block->phis();
|
| - for (int j = 0; j < phis->length(); ++j) {
|
| - HPhi* phi = phis->at(j);
|
| - if (phi->representation().IsNone()) {
|
| - phi->ChangeRepresentation(Representation::Tagged());
|
| - }
|
| - }
|
| - for (HInstruction* current = block->first();
|
| - current != NULL; current = current->next()) {
|
| - if (current->representation().IsNone() &&
|
| - current->CheckFlag(HInstruction::kFlexibleRepresentation)) {
|
| - if (current->CheckFlag(HInstruction::kCannotBeTagged)) {
|
| - current->ChangeRepresentation(Representation::Double());
|
| - } else {
|
| - current->ChangeRepresentation(Representation::Tagged());
|
| - }
|
| - }
|
| - }
|
| - }
|
| -}
|
| -
|
| -
|
| void HGraph::MergeRemovableSimulates() {
|
| HPhase phase("H_Merge removable simulates", this);
|
| ZoneList<HSimulate*> mergelist(2, zone());
|
| @@ -2976,8 +2582,8 @@ void HGraph::MergeRemovableSimulates() {
|
| // Nasty heuristic: Never remove the first simulate in a block. This
|
| // just so happens to have a beneficial effect on register allocation.
|
| bool first = true;
|
| - for (HInstruction* current = block->first();
|
| - current != NULL; current = current->next()) {
|
| + for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
|
| + HInstruction* current = it.Current();
|
| if (current->IsLeaveInlined()) {
|
| // Never fold simulates from inlined environments into simulates
|
| // in the outer environment.
|
| @@ -3029,45 +2635,6 @@ void HGraph::MergeRemovableSimulates() {
|
| }
|
|
|
|
|
| -void HGraph::InitializeInferredTypes() {
|
| - HPhase phase("H_Inferring types", this);
|
| - InitializeInferredTypes(0, this->blocks_.length() - 1);
|
| -}
|
| -
|
| -
|
| -void HGraph::InitializeInferredTypes(int from_inclusive, int to_inclusive) {
|
| - for (int i = from_inclusive; i <= to_inclusive; ++i) {
|
| - HBasicBlock* block = blocks_[i];
|
| -
|
| - const ZoneList<HPhi*>* phis = block->phis();
|
| - for (int j = 0; j < phis->length(); j++) {
|
| - phis->at(j)->UpdateInferredType();
|
| - }
|
| -
|
| - HInstruction* current = block->first();
|
| - while (current != NULL) {
|
| - current->UpdateInferredType();
|
| - current = current->next();
|
| - }
|
| -
|
| - if (block->IsLoopHeader()) {
|
| - HBasicBlock* last_back_edge =
|
| - block->loop_information()->GetLastBackEdge();
|
| - InitializeInferredTypes(i + 1, last_back_edge->block_id());
|
| - // Skip all blocks already processed by the recursive call.
|
| - i = last_back_edge->block_id();
|
| - // Update phis of the loop header now after the whole loop body is
|
| - // guaranteed to be processed.
|
| - ZoneList<HValue*> worklist(block->phis()->length(), zone());
|
| - for (int j = 0; j < block->phis()->length(); ++j) {
|
| - worklist.Add(block->phis()->at(j), zone());
|
| - }
|
| - InferTypes(&worklist);
|
| - }
|
| - }
|
| -}
|
| -
|
| -
|
| void HGraph::PropagateMinusZeroChecks(HValue* value, BitVector* visited) {
|
| HValue* current = value;
|
| while (current != NULL) {
|
| @@ -3099,401 +2666,43 @@ void HGraph::PropagateMinusZeroChecks(HValue* value, BitVector* visited) {
|
| HMathMinMax* minmax = HMathMinMax::cast(current);
|
| visited->Add(minmax->id());
|
| PropagateMinusZeroChecks(minmax->left(), visited);
|
| - PropagateMinusZeroChecks(minmax->right(), visited);
|
| - }
|
| -
|
| - current = current->EnsureAndPropagateNotMinusZero(visited);
|
| - }
|
| -}
|
| -
|
| -
|
| -void HGraph::InsertRepresentationChangeForUse(HValue* value,
|
| - HValue* use_value,
|
| - int use_index,
|
| - Representation to) {
|
| - // Insert the representation change right before its use. For phi-uses we
|
| - // insert at the end of the corresponding predecessor.
|
| - HInstruction* next = NULL;
|
| - if (use_value->IsPhi()) {
|
| - next = use_value->block()->predecessors()->at(use_index)->end();
|
| - } else {
|
| - next = HInstruction::cast(use_value);
|
| - }
|
| - // For constants we try to make the representation change at compile
|
| - // time. When a representation change is not possible without loss of
|
| - // information we treat constants like normal instructions and insert the
|
| - // change instructions for them.
|
| - HInstruction* new_value = NULL;
|
| - bool is_truncating = use_value->CheckFlag(HValue::kTruncatingToInt32);
|
| - bool allow_undefined_as_nan =
|
| - use_value->CheckFlag(HValue::kAllowUndefinedAsNaN);
|
| - if (value->IsConstant()) {
|
| - HConstant* constant = HConstant::cast(value);
|
| - // Try to create a new copy of the constant with the new representation.
|
| - new_value = (is_truncating && to.IsInteger32())
|
| - ? constant->CopyToTruncatedInt32(zone())
|
| - : constant->CopyToRepresentation(to, zone());
|
| - }
|
| -
|
| - if (new_value == NULL) {
|
| - new_value = new(zone()) HChange(value, to,
|
| - is_truncating, allow_undefined_as_nan);
|
| - }
|
| -
|
| - new_value->InsertBefore(next);
|
| - use_value->SetOperandAt(use_index, new_value);
|
| -}
|
| -
|
| -
|
| -void HGraph::InsertRepresentationChangesForValue(HValue* value) {
|
| - Representation r = value->representation();
|
| - if (r.IsNone()) return;
|
| - if (value->HasNoUses()) return;
|
| -
|
| - for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) {
|
| - HValue* use_value = it.value();
|
| - int use_index = it.index();
|
| - Representation req = use_value->RequiredInputRepresentation(use_index);
|
| - if (req.IsNone() || req.Equals(r)) continue;
|
| - InsertRepresentationChangeForUse(value, use_value, use_index, req);
|
| - }
|
| - if (value->HasNoUses()) {
|
| - ASSERT(value->IsConstant());
|
| - value->DeleteAndReplaceWith(NULL);
|
| - }
|
| -
|
| - // The only purpose of a HForceRepresentation is to represent the value
|
| - // after the (possible) HChange instruction. We make it disappear.
|
| - if (value->IsForceRepresentation()) {
|
| - value->DeleteAndReplaceWith(HForceRepresentation::cast(value)->value());
|
| - }
|
| -}
|
| -
|
| -
|
| -void HGraph::InsertRepresentationChanges() {
|
| - HPhase phase("H_Representation changes", this);
|
| -
|
| - // Compute truncation flag for phis: Initially assume that all
|
| - // int32-phis allow truncation and iteratively remove the ones that
|
| - // are used in an operation that does not allow a truncating
|
| - // conversion.
|
| - ZoneList<HPhi*> worklist(8, zone());
|
| -
|
| - for (int i = 0; i < phi_list()->length(); i++) {
|
| - HPhi* phi = phi_list()->at(i);
|
| - if (phi->representation().IsInteger32()) {
|
| - phi->SetFlag(HValue::kTruncatingToInt32);
|
| - }
|
| - }
|
| -
|
| - for (int i = 0; i < phi_list()->length(); i++) {
|
| - HPhi* phi = phi_list()->at(i);
|
| - for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
|
| - // If a Phi is used as a non-truncating int32 or as a double,
|
| - // clear its "truncating" flag.
|
| - HValue* use = it.value();
|
| - Representation input_representation =
|
| - use->RequiredInputRepresentation(it.index());
|
| - if (!input_representation.IsInteger32() ||
|
| - !use->CheckFlag(HValue::kTruncatingToInt32)) {
|
| - if (FLAG_trace_representation) {
|
| - PrintF("#%d Phi is not truncating because of #%d %s\n",
|
| - phi->id(), it.value()->id(), it.value()->Mnemonic());
|
| - }
|
| - phi->ClearFlag(HValue::kTruncatingToInt32);
|
| - worklist.Add(phi, zone());
|
| - break;
|
| - }
|
| - }
|
| - }
|
| -
|
| - while (!worklist.is_empty()) {
|
| - HPhi* current = worklist.RemoveLast();
|
| - for (int i = 0; i < current->OperandCount(); ++i) {
|
| - HValue* input = current->OperandAt(i);
|
| - if (input->IsPhi() &&
|
| - input->representation().IsInteger32() &&
|
| - input->CheckFlag(HValue::kTruncatingToInt32)) {
|
| - if (FLAG_trace_representation) {
|
| - PrintF("#%d Phi is not truncating because of #%d %s\n",
|
| - input->id(), current->id(), current->Mnemonic());
|
| - }
|
| - input->ClearFlag(HValue::kTruncatingToInt32);
|
| - worklist.Add(HPhi::cast(input), zone());
|
| - }
|
| - }
|
| - }
|
| -
|
| - for (int i = 0; i < blocks_.length(); ++i) {
|
| - // Process phi instructions first.
|
| - const ZoneList<HPhi*>* phis = blocks_[i]->phis();
|
| - for (int j = 0; j < phis->length(); j++) {
|
| - InsertRepresentationChangesForValue(phis->at(j));
|
| - }
|
| -
|
| - // Process normal instructions.
|
| - HInstruction* current = blocks_[i]->first();
|
| - while (current != NULL) {
|
| - HInstruction* next = current->next();
|
| - InsertRepresentationChangesForValue(current);
|
| - current = next;
|
| - }
|
| - }
|
| -}
|
| -
|
| -
|
| -void HGraph::RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi) {
|
| - if (!phi->CheckFlag(HValue::kAllowUndefinedAsNaN)) return;
|
| - phi->ClearFlag(HValue::kAllowUndefinedAsNaN);
|
| - for (int i = 0; i < phi->OperandCount(); ++i) {
|
| - HValue* input = phi->OperandAt(i);
|
| - if (input->IsPhi()) {
|
| - RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi::cast(input));
|
| - }
|
| - }
|
| -}
|
| -
|
| -
|
| -void HGraph::MarkDeoptimizeOnUndefined() {
|
| - HPhase phase("H_MarkDeoptimizeOnUndefined", this);
|
| - // Compute DeoptimizeOnUndefined flag for phis.
|
| - // Any phi that can reach a use with DeoptimizeOnUndefined set must
|
| - // have DeoptimizeOnUndefined set. Currently only HCompareIDAndBranch, with
|
| - // double input representation, has this flag set.
|
| - // The flag is used by HChange tagged->double, which must deoptimize
|
| - // if one of its uses has this flag set.
|
| - for (int i = 0; i < phi_list()->length(); i++) {
|
| - HPhi* phi = phi_list()->at(i);
|
| - for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
|
| - HValue* use_value = it.value();
|
| - if (!use_value->CheckFlag(HValue::kAllowUndefinedAsNaN)) {
|
| - RecursivelyMarkPhiDeoptimizeOnUndefined(phi);
|
| - break;
|
| - }
|
| - }
|
| - }
|
| -}
|
| -
|
| -
|
| -// Discover instructions that can be marked with kUint32 flag allowing
|
| -// them to produce full range uint32 values.
|
| -class Uint32Analysis BASE_EMBEDDED {
|
| - public:
|
| - explicit Uint32Analysis(Zone* zone) : zone_(zone), phis_(4, zone) { }
|
| -
|
| - void Analyze(HInstruction* current);
|
| -
|
| - void UnmarkUnsafePhis();
|
| -
|
| - private:
|
| - bool IsSafeUint32Use(HValue* val, HValue* use);
|
| - bool Uint32UsesAreSafe(HValue* uint32val);
|
| - bool CheckPhiOperands(HPhi* phi);
|
| - void UnmarkPhi(HPhi* phi, ZoneList<HPhi*>* worklist);
|
| -
|
| - Zone* zone_;
|
| - ZoneList<HPhi*> phis_;
|
| -};
|
| -
|
| -
|
| -bool Uint32Analysis::IsSafeUint32Use(HValue* val, HValue* use) {
|
| - // Operations that operatate on bits are safe.
|
| - if (use->IsBitwise() ||
|
| - use->IsShl() ||
|
| - use->IsSar() ||
|
| - use->IsShr() ||
|
| - use->IsBitNot()) {
|
| - return true;
|
| - } else if (use->IsChange() || use->IsSimulate()) {
|
| - // Conversions and deoptimization have special support for unt32.
|
| - return true;
|
| - } else if (use->IsStoreKeyed()) {
|
| - HStoreKeyed* store = HStoreKeyed::cast(use);
|
| - if (store->is_external()) {
|
| - // Storing a value into an external integer array is a bit level
|
| - // operation.
|
| - if (store->value() == val) {
|
| - // Clamping or a conversion to double should have beed inserted.
|
| - ASSERT(store->elements_kind() != EXTERNAL_PIXEL_ELEMENTS);
|
| - ASSERT(store->elements_kind() != EXTERNAL_FLOAT_ELEMENTS);
|
| - ASSERT(store->elements_kind() != EXTERNAL_DOUBLE_ELEMENTS);
|
| - return true;
|
| - }
|
| - }
|
| - }
|
| -
|
| - return false;
|
| -}
|
| -
|
| -
|
| -// Iterate over all uses and verify that they are uint32 safe: either don't
|
| -// distinguish between int32 and uint32 due to their bitwise nature or
|
| -// have special support for uint32 values.
|
| -// Encountered phis are optimisitically treated as safe uint32 uses,
|
| -// marked with kUint32 flag and collected in the phis_ list. A separate
|
| -// path will be performed later by UnmarkUnsafePhis to clear kUint32 from
|
| -// phis that are not actually uint32-safe (it requries fix point iteration).
|
| -bool Uint32Analysis::Uint32UsesAreSafe(HValue* uint32val) {
|
| - bool collect_phi_uses = false;
|
| - for (HUseIterator it(uint32val->uses()); !it.Done(); it.Advance()) {
|
| - HValue* use = it.value();
|
| -
|
| - if (use->IsPhi()) {
|
| - if (!use->CheckFlag(HInstruction::kUint32)) {
|
| - // There is a phi use of this value from a phis that is not yet
|
| - // collected in phis_ array. Separate pass is required.
|
| - collect_phi_uses = true;
|
| - }
|
| -
|
| - // Optimistically treat phis as uint32 safe.
|
| - continue;
|
| - }
|
| -
|
| - if (!IsSafeUint32Use(uint32val, use)) {
|
| - return false;
|
| - }
|
| - }
|
| -
|
| - if (collect_phi_uses) {
|
| - for (HUseIterator it(uint32val->uses()); !it.Done(); it.Advance()) {
|
| - HValue* use = it.value();
|
| -
|
| - // There is a phi use of this value from a phis that is not yet
|
| - // collected in phis_ array. Separate pass is required.
|
| - if (use->IsPhi() && !use->CheckFlag(HInstruction::kUint32)) {
|
| - use->SetFlag(HInstruction::kUint32);
|
| - phis_.Add(HPhi::cast(use), zone_);
|
| - }
|
| - }
|
| - }
|
| -
|
| - return true;
|
| -}
|
| -
|
| -
|
| -// Analyze instruction and mark it with kUint32 if all its uses are uint32
|
| -// safe.
|
| -void Uint32Analysis::Analyze(HInstruction* current) {
|
| - if (Uint32UsesAreSafe(current)) current->SetFlag(HInstruction::kUint32);
|
| -}
|
| -
|
| -
|
| -// Check if all operands to the given phi are marked with kUint32 flag.
|
| -bool Uint32Analysis::CheckPhiOperands(HPhi* phi) {
|
| - if (!phi->CheckFlag(HInstruction::kUint32)) {
|
| - // This phi is not uint32 safe. No need to check operands.
|
| - return false;
|
| - }
|
| -
|
| - for (int j = 0; j < phi->OperandCount(); j++) {
|
| - HValue* operand = phi->OperandAt(j);
|
| - if (!operand->CheckFlag(HInstruction::kUint32)) {
|
| - // Lazyly mark constants that fit into uint32 range with kUint32 flag.
|
| - if (operand->IsInteger32Constant() &&
|
| - operand->GetInteger32Constant() >= 0) {
|
| - operand->SetFlag(HInstruction::kUint32);
|
| - continue;
|
| - }
|
| -
|
| - // This phi is not safe, some operands are not uint32 values.
|
| - return false;
|
| - }
|
| - }
|
| -
|
| - return true;
|
| -}
|
| -
|
| -
|
| -// Remove kUint32 flag from the phi itself and its operands. If any operand
|
| -// was a phi marked with kUint32 place it into a worklist for
|
| -// transitive clearing of kUint32 flag.
|
| -void Uint32Analysis::UnmarkPhi(HPhi* phi, ZoneList<HPhi*>* worklist) {
|
| - phi->ClearFlag(HInstruction::kUint32);
|
| - for (int j = 0; j < phi->OperandCount(); j++) {
|
| - HValue* operand = phi->OperandAt(j);
|
| - if (operand->CheckFlag(HInstruction::kUint32)) {
|
| - operand->ClearFlag(HInstruction::kUint32);
|
| - if (operand->IsPhi()) {
|
| - worklist->Add(HPhi::cast(operand), zone_);
|
| - }
|
| + PropagateMinusZeroChecks(minmax->right(), visited);
|
| }
|
| - }
|
| -}
|
| -
|
| -
|
| -void Uint32Analysis::UnmarkUnsafePhis() {
|
| - // No phis were collected. Nothing to do.
|
| - if (phis_.length() == 0) return;
|
| -
|
| - // Worklist used to transitively clear kUint32 from phis that
|
| - // are used as arguments to other phis.
|
| - ZoneList<HPhi*> worklist(phis_.length(), zone_);
|
| -
|
| - // Phi can be used as a uint32 value if and only if
|
| - // all its operands are uint32 values and all its
|
| - // uses are uint32 safe.
|
| -
|
| - // Iterate over collected phis and unmark those that
|
| - // are unsafe. When unmarking phi unmark its operands
|
| - // and add it to the worklist if it is a phi as well.
|
| - // Phis that are still marked as safe are shifted down
|
| - // so that all safe phis form a prefix of the phis_ array.
|
| - int phi_count = 0;
|
| - for (int i = 0; i < phis_.length(); i++) {
|
| - HPhi* phi = phis_[i];
|
|
|
| - if (CheckPhiOperands(phi) && Uint32UsesAreSafe(phi)) {
|
| - phis_[phi_count++] = phi;
|
| - } else {
|
| - UnmarkPhi(phi, &worklist);
|
| - }
|
| + current = current->EnsureAndPropagateNotMinusZero(visited);
|
| }
|
| +}
|
|
|
| - // Now phis array contains only those phis that have safe
|
| - // non-phi uses. Start transitively clearing kUint32 flag
|
| - // from phi operands of discovered non-safe phies until
|
| - // only safe phies are left.
|
| - while (!worklist.is_empty()) {
|
| - while (!worklist.is_empty()) {
|
| - HPhi* phi = worklist.RemoveLast();
|
| - UnmarkPhi(phi, &worklist);
|
| - }
|
| -
|
| - // Check if any operands to safe phies were unmarked
|
| - // turning a safe phi into unsafe. The same value
|
| - // can flow into several phis.
|
| - int new_phi_count = 0;
|
| - for (int i = 0; i < phi_count; i++) {
|
| - HPhi* phi = phis_[i];
|
|
|
| - if (CheckPhiOperands(phi)) {
|
| - phis_[new_phi_count++] = phi;
|
| - } else {
|
| - UnmarkPhi(phi, &worklist);
|
| - }
|
| +void HGraph::RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi) {
|
| + if (!phi->CheckFlag(HValue::kAllowUndefinedAsNaN)) return;
|
| + phi->ClearFlag(HValue::kAllowUndefinedAsNaN);
|
| + for (int i = 0; i < phi->OperandCount(); ++i) {
|
| + HValue* input = phi->OperandAt(i);
|
| + if (input->IsPhi()) {
|
| + RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi::cast(input));
|
| }
|
| - phi_count = new_phi_count;
|
| }
|
| }
|
|
|
|
|
| -void HGraph::ComputeSafeUint32Operations() {
|
| - HPhase phase("H_Compute safe UInt32 operations", this);
|
| - if (uint32_instructions_ == NULL) return;
|
| -
|
| - Uint32Analysis analysis(zone());
|
| - for (int i = 0; i < uint32_instructions_->length(); ++i) {
|
| - HInstruction* current = uint32_instructions_->at(i);
|
| - if (current->IsLinked() && current->representation().IsInteger32()) {
|
| - analysis.Analyze(current);
|
| +void HGraph::MarkDeoptimizeOnUndefined() {
|
| + HPhase phase("H_MarkDeoptimizeOnUndefined", this);
|
| + // Compute DeoptimizeOnUndefined flag for phis. Any phi that can reach a use
|
| + // with DeoptimizeOnUndefined set must have DeoptimizeOnUndefined set.
|
| + // Currently only HCompareNumericAndBranch, with double input representation,
|
| + // has this flag set. The flag is used by HChange tagged->double, which must
|
| + // deoptimize if one of its uses has this flag set.
|
| + for (int i = 0; i < phi_list()->length(); i++) {
|
| + HPhi* phi = phi_list()->at(i);
|
| + for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
|
| + HValue* use_value = it.value();
|
| + if (!use_value->CheckFlag(HValue::kAllowUndefinedAsNaN)) {
|
| + RecursivelyMarkPhiDeoptimizeOnUndefined(phi);
|
| + break;
|
| + }
|
| }
|
| }
|
| -
|
| - // Some phis might have been optimistically marked with kUint32 flag.
|
| - // Remove this flag from those phis that are unsafe and propagate
|
| - // this information transitively potentially clearing kUint32 flag
|
| - // from some non-phi operations that are used as operands to unsafe phis.
|
| - analysis.UnmarkUnsafePhis();
|
| }
|
|
|
|
|
| @@ -3501,9 +2710,8 @@ void HGraph::ComputeMinusZeroChecks() {
|
| HPhase phase("H_Compute minus zero checks", this);
|
| BitVector visited(GetMaximumValueID(), zone());
|
| for (int i = 0; i < blocks_.length(); ++i) {
|
| - for (HInstruction* current = blocks_[i]->first();
|
| - current != NULL;
|
| - current = current->next()) {
|
| + for (HInstructionIterator it(blocks_[i]); !it.Done(); it.Advance()) {
|
| + HInstruction* current = it.Current();
|
| if (current->IsChange()) {
|
| HChange* change = HChange::cast(current);
|
| // Propagate flags for negative zero checks upwards from conversions
|
| @@ -3785,7 +2993,7 @@ void TestContext::BuildBranch(HValue* value) {
|
| HBasicBlock* empty_true = builder->graph()->CreateBasicBlock();
|
| HBasicBlock* empty_false = builder->graph()->CreateBasicBlock();
|
| ToBooleanStub::Types expected(condition()->to_boolean_types());
|
| - HBranch* test = new(zone()) HBranch(value, empty_true, empty_false, expected);
|
| + HBranch* test = new(zone()) HBranch(value, expected, empty_true, empty_false);
|
| builder->current_block()->Finish(test);
|
|
|
| empty_true->Goto(if_true(), builder->function_state());
|
| @@ -3853,7 +3061,7 @@ void HOptimizedGraphBuilder::VisitForControl(Expression* expr,
|
|
|
| void HOptimizedGraphBuilder::VisitArgument(Expression* expr) {
|
| CHECK_ALIVE(VisitForValue(expr));
|
| - Push(AddInstruction(new(zone()) HPushArgument(Pop())));
|
| + Push(Add<HPushArgument>(Pop()));
|
| }
|
|
|
|
|
| @@ -3919,8 +3127,7 @@ bool HOptimizedGraphBuilder::BuildGraph() {
|
| AddSimulate(BailoutId::Declarations());
|
|
|
| HValue* context = environment()->LookupContext();
|
| - AddInstruction(
|
| - new(zone()) HStackCheck(context, HStackCheck::kFunctionEntry));
|
| + Add<HStackCheck>(context, HStackCheck::kFunctionEntry);
|
|
|
| VisitStatements(current_info()->function()->body());
|
| if (HasStackOverflow()) return false;
|
| @@ -3944,6 +3151,9 @@ bool HOptimizedGraphBuilder::BuildGraph() {
|
| !type_info->matches_inlined_type_change_checksum(composite_checksum));
|
| type_info->set_inlined_type_change_checksum(composite_checksum);
|
|
|
| + // Perform any necessary OSR-specific cleanups or changes to the graph.
|
| + osr_->FinishGraph();
|
| +
|
| return true;
|
| }
|
|
|
| @@ -3974,7 +3184,7 @@ bool HGraph::Optimize(SmartArrayPointer<char>* bailout_reason) {
|
| "Unsupported phi use of const variable"));
|
| return false;
|
| }
|
| - EliminateRedundantPhis();
|
| + Run<HRedundantPhiEliminationPhase>();
|
| if (!CheckArgumentsPhiUses()) {
|
| *bailout_reason = SmartArrayPointer<char>(StrDup(
|
| "Unsupported phi use of arguments"));
|
| @@ -3982,21 +3192,12 @@ bool HGraph::Optimize(SmartArrayPointer<char>* bailout_reason) {
|
| }
|
|
|
| // Remove dead code and phis
|
| - if (FLAG_dead_code_elimination) {
|
| - DeadCodeElimination("H_Eliminate early dead code");
|
| - }
|
| + if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
|
| CollectPhis();
|
|
|
| - if (has_osr_loop_entry()) {
|
| - const ZoneList<HPhi*>* phis = osr_loop_entry()->phis();
|
| - for (int j = 0; j < phis->length(); j++) {
|
| - HPhi* phi = phis->at(j);
|
| - osr_values()->at(phi->merged_index())->set_incoming_value(phi);
|
| - }
|
| - }
|
| + if (has_osr()) osr()->FinishOsrValues();
|
|
|
| - HInferRepresentation rep(this);
|
| - rep.Analyze();
|
| + Run<HInferRepresentationPhase>();
|
|
|
| // Remove HSimulate instructions that have turned out not to be needed
|
| // after all by folding them into the following HSimulate.
|
| @@ -4004,37 +3205,34 @@ bool HGraph::Optimize(SmartArrayPointer<char>* bailout_reason) {
|
| MergeRemovableSimulates();
|
|
|
| MarkDeoptimizeOnUndefined();
|
| - InsertRepresentationChanges();
|
| + Run<HRepresentationChangesPhase>();
|
|
|
| - InitializeInferredTypes();
|
| + Run<HInferTypesPhase>();
|
|
|
| // Must be performed before canonicalization to ensure that Canonicalize
|
| // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
|
| // zero.
|
| - if (FLAG_opt_safe_uint32_operations) ComputeSafeUint32Operations();
|
| + if (FLAG_opt_safe_uint32_operations) Run<HUint32AnalysisPhase>();
|
|
|
| if (FLAG_use_canonicalizing) Canonicalize();
|
|
|
| + if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
|
| +
|
| if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
|
|
|
| - if (FLAG_use_range) {
|
| - HRangeAnalysis rangeAnalysis(this);
|
| - rangeAnalysis.Analyze();
|
| - }
|
| + if (FLAG_use_range) Run<HRangeAnalysisPhase>();
|
| +
|
| ComputeMinusZeroChecks();
|
|
|
| // Eliminate redundant stack checks on backwards branches.
|
| - HStackCheckEliminator sce(this);
|
| - sce.Process();
|
| + Run<HStackCheckEliminationPhase>();
|
|
|
| if (FLAG_idefs) SetupInformativeDefinitions();
|
| if (FLAG_array_bounds_checks_elimination && !FLAG_idefs) {
|
| - EliminateRedundantBoundsChecks();
|
| + Run<HBoundsCheckEliminationPhase>();
|
| }
|
| if (FLAG_array_index_dehoisting) DehoistSimpleArrayIndexComputations();
|
| - if (FLAG_dead_code_elimination) {
|
| - DeadCodeElimination("H_Eliminate late dead code");
|
| - }
|
| + if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
|
|
|
| RestoreActualValues();
|
|
|
| @@ -4051,7 +3249,8 @@ void HGraph::SetupInformativeDefinitionsInBlock(HBasicBlock* block) {
|
| ASSERT(!phi->IsInformativeDefinition());
|
| }
|
|
|
| - for (HInstruction* i = block->first(); i != NULL; i = i->next()) {
|
| + for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
|
| + HInstruction* i = it.Current();
|
| i->AddInformativeDefinitions();
|
| i->SetFlag(HValue::kIDefsProcessingDone);
|
| i->UpdateRedefinedUsesWhileSettingUpInformativeDefinitions();
|
| @@ -4069,7 +3268,8 @@ void HGraph::SetupInformativeDefinitionsRecursively(HBasicBlock* block) {
|
| SetupInformativeDefinitionsRecursively(block->dominated_blocks()->at(i));
|
| }
|
|
|
| - for (HInstruction* i = block->first(); i != NULL; i = i->next()) {
|
| + for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
|
| + HInstruction* i = it.Current();
|
| if (i->IsBoundsCheck()) {
|
| HBoundsCheck* check = HBoundsCheck::cast(i);
|
| check->ApplyIndexChange();
|
| @@ -4084,363 +3284,6 @@ void HGraph::SetupInformativeDefinitions() {
|
| }
|
|
|
|
|
| -// We try to "factor up" HBoundsCheck instructions towards the root of the
|
| -// dominator tree.
|
| -// For now we handle checks where the index is like "exp + int32value".
|
| -// If in the dominator tree we check "exp + v1" and later (dominated)
|
| -// "exp + v2", if v2 <= v1 we can safely remove the second check, and if
|
| -// v2 > v1 we can use v2 in the 1st check and again remove the second.
|
| -// To do so we keep a dictionary of all checks where the key if the pair
|
| -// "exp, length".
|
| -// The class BoundsCheckKey represents this key.
|
| -class BoundsCheckKey : public ZoneObject {
|
| - public:
|
| - HValue* IndexBase() const { return index_base_; }
|
| - HValue* Length() const { return length_; }
|
| -
|
| - uint32_t Hash() {
|
| - return static_cast<uint32_t>(index_base_->Hashcode() ^ length_->Hashcode());
|
| - }
|
| -
|
| - static BoundsCheckKey* Create(Zone* zone,
|
| - HBoundsCheck* check,
|
| - int32_t* offset) {
|
| - if (!check->index()->representation().IsSmiOrInteger32()) return NULL;
|
| -
|
| - HValue* index_base = NULL;
|
| - HConstant* constant = NULL;
|
| - bool is_sub = false;
|
| -
|
| - if (check->index()->IsAdd()) {
|
| - HAdd* index = HAdd::cast(check->index());
|
| - if (index->left()->IsConstant()) {
|
| - constant = HConstant::cast(index->left());
|
| - index_base = index->right();
|
| - } else if (index->right()->IsConstant()) {
|
| - constant = HConstant::cast(index->right());
|
| - index_base = index->left();
|
| - }
|
| - } else if (check->index()->IsSub()) {
|
| - HSub* index = HSub::cast(check->index());
|
| - is_sub = true;
|
| - if (index->left()->IsConstant()) {
|
| - constant = HConstant::cast(index->left());
|
| - index_base = index->right();
|
| - } else if (index->right()->IsConstant()) {
|
| - constant = HConstant::cast(index->right());
|
| - index_base = index->left();
|
| - }
|
| - }
|
| -
|
| - if (constant != NULL && constant->HasInteger32Value()) {
|
| - *offset = is_sub ? - constant->Integer32Value()
|
| - : constant->Integer32Value();
|
| - } else {
|
| - *offset = 0;
|
| - index_base = check->index();
|
| - }
|
| -
|
| - return new(zone) BoundsCheckKey(index_base, check->length());
|
| - }
|
| -
|
| - private:
|
| - BoundsCheckKey(HValue* index_base, HValue* length)
|
| - : index_base_(index_base),
|
| - length_(length) { }
|
| -
|
| - HValue* index_base_;
|
| - HValue* length_;
|
| -};
|
| -
|
| -
|
| -// Data about each HBoundsCheck that can be eliminated or moved.
|
| -// It is the "value" in the dictionary indexed by "base-index, length"
|
| -// (the key is BoundsCheckKey).
|
| -// We scan the code with a dominator tree traversal.
|
| -// Traversing the dominator tree we keep a stack (implemented as a singly
|
| -// linked list) of "data" for each basic block that contains a relevant check
|
| -// with the same key (the dictionary holds the head of the list).
|
| -// We also keep all the "data" created for a given basic block in a list, and
|
| -// use it to "clean up" the dictionary when backtracking in the dominator tree
|
| -// traversal.
|
| -// Doing this each dictionary entry always directly points to the check that
|
| -// is dominating the code being examined now.
|
| -// We also track the current "offset" of the index expression and use it to
|
| -// decide if any check is already "covered" (so it can be removed) or not.
|
| -class BoundsCheckBbData: public ZoneObject {
|
| - public:
|
| - BoundsCheckKey* Key() const { return key_; }
|
| - int32_t LowerOffset() const { return lower_offset_; }
|
| - int32_t UpperOffset() const { return upper_offset_; }
|
| - HBasicBlock* BasicBlock() const { return basic_block_; }
|
| - HBoundsCheck* LowerCheck() const { return lower_check_; }
|
| - HBoundsCheck* UpperCheck() const { return upper_check_; }
|
| - BoundsCheckBbData* NextInBasicBlock() const { return next_in_bb_; }
|
| - BoundsCheckBbData* FatherInDominatorTree() const { return father_in_dt_; }
|
| -
|
| - bool OffsetIsCovered(int32_t offset) const {
|
| - return offset >= LowerOffset() && offset <= UpperOffset();
|
| - }
|
| -
|
| - bool HasSingleCheck() { return lower_check_ == upper_check_; }
|
| -
|
| - // The goal of this method is to modify either upper_offset_ or
|
| - // lower_offset_ so that also new_offset is covered (the covered
|
| - // range grows).
|
| - //
|
| - // The precondition is that new_check follows UpperCheck() and
|
| - // LowerCheck() in the same basic block, and that new_offset is not
|
| - // covered (otherwise we could simply remove new_check).
|
| - //
|
| - // If HasSingleCheck() is true then new_check is added as "second check"
|
| - // (either upper or lower; note that HasSingleCheck() becomes false).
|
| - // Otherwise one of the current checks is modified so that it also covers
|
| - // new_offset, and new_check is removed.
|
| - //
|
| - // If the check cannot be modified because the context is unknown it
|
| - // returns false, otherwise it returns true.
|
| - bool CoverCheck(HBoundsCheck* new_check,
|
| - int32_t new_offset) {
|
| - ASSERT(new_check->index()->representation().IsSmiOrInteger32());
|
| - bool keep_new_check = false;
|
| -
|
| - if (new_offset > upper_offset_) {
|
| - upper_offset_ = new_offset;
|
| - if (HasSingleCheck()) {
|
| - keep_new_check = true;
|
| - upper_check_ = new_check;
|
| - } else {
|
| - bool result = BuildOffsetAdd(upper_check_,
|
| - &added_upper_index_,
|
| - &added_upper_offset_,
|
| - Key()->IndexBase(),
|
| - new_check->index()->representation(),
|
| - new_offset);
|
| - if (!result) return false;
|
| - upper_check_->ReplaceAllUsesWith(upper_check_->index());
|
| - upper_check_->SetOperandAt(0, added_upper_index_);
|
| - }
|
| - } else if (new_offset < lower_offset_) {
|
| - lower_offset_ = new_offset;
|
| - if (HasSingleCheck()) {
|
| - keep_new_check = true;
|
| - lower_check_ = new_check;
|
| - } else {
|
| - bool result = BuildOffsetAdd(lower_check_,
|
| - &added_lower_index_,
|
| - &added_lower_offset_,
|
| - Key()->IndexBase(),
|
| - new_check->index()->representation(),
|
| - new_offset);
|
| - if (!result) return false;
|
| - lower_check_->ReplaceAllUsesWith(lower_check_->index());
|
| - lower_check_->SetOperandAt(0, added_lower_index_);
|
| - }
|
| - } else {
|
| - ASSERT(false);
|
| - }
|
| -
|
| - if (!keep_new_check) {
|
| - new_check->DeleteAndReplaceWith(new_check->ActualValue());
|
| - }
|
| -
|
| - return true;
|
| - }
|
| -
|
| - void RemoveZeroOperations() {
|
| - RemoveZeroAdd(&added_lower_index_, &added_lower_offset_);
|
| - RemoveZeroAdd(&added_upper_index_, &added_upper_offset_);
|
| - }
|
| -
|
| - BoundsCheckBbData(BoundsCheckKey* key,
|
| - int32_t lower_offset,
|
| - int32_t upper_offset,
|
| - HBasicBlock* bb,
|
| - HBoundsCheck* lower_check,
|
| - HBoundsCheck* upper_check,
|
| - BoundsCheckBbData* next_in_bb,
|
| - BoundsCheckBbData* father_in_dt)
|
| - : key_(key),
|
| - lower_offset_(lower_offset),
|
| - upper_offset_(upper_offset),
|
| - basic_block_(bb),
|
| - lower_check_(lower_check),
|
| - upper_check_(upper_check),
|
| - added_lower_index_(NULL),
|
| - added_lower_offset_(NULL),
|
| - added_upper_index_(NULL),
|
| - added_upper_offset_(NULL),
|
| - next_in_bb_(next_in_bb),
|
| - father_in_dt_(father_in_dt) { }
|
| -
|
| - private:
|
| - BoundsCheckKey* key_;
|
| - int32_t lower_offset_;
|
| - int32_t upper_offset_;
|
| - HBasicBlock* basic_block_;
|
| - HBoundsCheck* lower_check_;
|
| - HBoundsCheck* upper_check_;
|
| - HInstruction* added_lower_index_;
|
| - HConstant* added_lower_offset_;
|
| - HInstruction* added_upper_index_;
|
| - HConstant* added_upper_offset_;
|
| - BoundsCheckBbData* next_in_bb_;
|
| - BoundsCheckBbData* father_in_dt_;
|
| -
|
| - // Given an existing add instruction and a bounds check it tries to
|
| - // find the current context (either of the add or of the check index).
|
| - HValue* IndexContext(HInstruction* add, HBoundsCheck* check) {
|
| - if (add != NULL && add->IsAdd()) {
|
| - return HAdd::cast(add)->context();
|
| - }
|
| - if (check->index()->IsBinaryOperation()) {
|
| - return HBinaryOperation::cast(check->index())->context();
|
| - }
|
| - return NULL;
|
| - }
|
| -
|
| - // This function returns false if it cannot build the add because the
|
| - // current context cannot be determined.
|
| - bool BuildOffsetAdd(HBoundsCheck* check,
|
| - HInstruction** add,
|
| - HConstant** constant,
|
| - HValue* original_value,
|
| - Representation representation,
|
| - int32_t new_offset) {
|
| - HValue* index_context = IndexContext(*add, check);
|
| - if (index_context == NULL) return false;
|
| -
|
| - HConstant* new_constant = new(BasicBlock()->zone()) HConstant(
|
| - new_offset, representation);
|
| - if (*add == NULL) {
|
| - new_constant->InsertBefore(check);
|
| - (*add) = HAdd::New(
|
| - BasicBlock()->zone(), index_context, original_value, new_constant);
|
| - (*add)->AssumeRepresentation(representation);
|
| - (*add)->InsertBefore(check);
|
| - } else {
|
| - new_constant->InsertBefore(*add);
|
| - (*constant)->DeleteAndReplaceWith(new_constant);
|
| - }
|
| - *constant = new_constant;
|
| - return true;
|
| - }
|
| -
|
| - void RemoveZeroAdd(HInstruction** add, HConstant** constant) {
|
| - if (*add != NULL && (*add)->IsAdd() && (*constant)->Integer32Value() == 0) {
|
| - (*add)->DeleteAndReplaceWith(HAdd::cast(*add)->left());
|
| - (*constant)->DeleteAndReplaceWith(NULL);
|
| - }
|
| - }
|
| -};
|
| -
|
| -
|
| -static bool BoundsCheckKeyMatch(void* key1, void* key2) {
|
| - BoundsCheckKey* k1 = static_cast<BoundsCheckKey*>(key1);
|
| - BoundsCheckKey* k2 = static_cast<BoundsCheckKey*>(key2);
|
| - return k1->IndexBase() == k2->IndexBase() && k1->Length() == k2->Length();
|
| -}
|
| -
|
| -
|
| -class BoundsCheckTable : private ZoneHashMap {
|
| - public:
|
| - BoundsCheckBbData** LookupOrInsert(BoundsCheckKey* key, Zone* zone) {
|
| - return reinterpret_cast<BoundsCheckBbData**>(
|
| - &(Lookup(key, key->Hash(), true, ZoneAllocationPolicy(zone))->value));
|
| - }
|
| -
|
| - void Insert(BoundsCheckKey* key, BoundsCheckBbData* data, Zone* zone) {
|
| - Lookup(key, key->Hash(), true, ZoneAllocationPolicy(zone))->value = data;
|
| - }
|
| -
|
| - void Delete(BoundsCheckKey* key) {
|
| - Remove(key, key->Hash());
|
| - }
|
| -
|
| - explicit BoundsCheckTable(Zone* zone)
|
| - : ZoneHashMap(BoundsCheckKeyMatch, ZoneHashMap::kDefaultHashMapCapacity,
|
| - ZoneAllocationPolicy(zone)) { }
|
| -};
|
| -
|
| -
|
| -// Eliminates checks in bb and recursively in the dominated blocks.
|
| -// Also replace the results of check instructions with the original value, if
|
| -// the result is used. This is safe now, since we don't do code motion after
|
| -// this point. It enables better register allocation since the value produced
|
| -// by check instructions is really a copy of the original value.
|
| -void HGraph::EliminateRedundantBoundsChecks(HBasicBlock* bb,
|
| - BoundsCheckTable* table) {
|
| - BoundsCheckBbData* bb_data_list = NULL;
|
| -
|
| - for (HInstruction* i = bb->first(); i != NULL; i = i->next()) {
|
| - if (!i->IsBoundsCheck()) continue;
|
| -
|
| - HBoundsCheck* check = HBoundsCheck::cast(i);
|
| - int32_t offset;
|
| - BoundsCheckKey* key =
|
| - BoundsCheckKey::Create(zone(), check, &offset);
|
| - if (key == NULL) continue;
|
| - BoundsCheckBbData** data_p = table->LookupOrInsert(key, zone());
|
| - BoundsCheckBbData* data = *data_p;
|
| - if (data == NULL) {
|
| - bb_data_list = new(zone()) BoundsCheckBbData(key,
|
| - offset,
|
| - offset,
|
| - bb,
|
| - check,
|
| - check,
|
| - bb_data_list,
|
| - NULL);
|
| - *data_p = bb_data_list;
|
| - } else if (data->OffsetIsCovered(offset)) {
|
| - check->DeleteAndReplaceWith(check->ActualValue());
|
| - } else if (data->BasicBlock() != bb ||
|
| - !data->CoverCheck(check, offset)) {
|
| - // If the check is in the current BB we try to modify it by calling
|
| - // "CoverCheck", but if also that fails we record the current offsets
|
| - // in a new data instance because from now on they are covered.
|
| - int32_t new_lower_offset = offset < data->LowerOffset()
|
| - ? offset
|
| - : data->LowerOffset();
|
| - int32_t new_upper_offset = offset > data->UpperOffset()
|
| - ? offset
|
| - : data->UpperOffset();
|
| - bb_data_list = new(zone()) BoundsCheckBbData(key,
|
| - new_lower_offset,
|
| - new_upper_offset,
|
| - bb,
|
| - data->LowerCheck(),
|
| - data->UpperCheck(),
|
| - bb_data_list,
|
| - data);
|
| - table->Insert(key, bb_data_list, zone());
|
| - }
|
| - }
|
| -
|
| - for (int i = 0; i < bb->dominated_blocks()->length(); ++i) {
|
| - EliminateRedundantBoundsChecks(bb->dominated_blocks()->at(i), table);
|
| - }
|
| -
|
| - for (BoundsCheckBbData* data = bb_data_list;
|
| - data != NULL;
|
| - data = data->NextInBasicBlock()) {
|
| - data->RemoveZeroOperations();
|
| - if (data->FatherInDominatorTree()) {
|
| - table->Insert(data->Key(), data->FatherInDominatorTree(), zone());
|
| - } else {
|
| - table->Delete(data->Key());
|
| - }
|
| - }
|
| -}
|
| -
|
| -
|
| -void HGraph::EliminateRedundantBoundsChecks() {
|
| - HPhase phase("H_Eliminate bounds checks", this);
|
| - BoundsCheckTable checks_table(zone());
|
| - EliminateRedundantBoundsChecks(entry_block(), &checks_table);
|
| -}
|
| -
|
| -
|
| static void DehoistArrayIndex(ArrayInstructionInterface* array_operation) {
|
| HValue* index = array_operation->GetKey()->ActualValue();
|
| if (!index->representation().IsSmiOrInteger32()) return;
|
| @@ -4494,9 +3337,8 @@ static void DehoistArrayIndex(ArrayInstructionInterface* array_operation) {
|
| void HGraph::DehoistSimpleArrayIndexComputations() {
|
| HPhase phase("H_Dehoist index computations", this);
|
| for (int i = 0; i < blocks()->length(); ++i) {
|
| - for (HInstruction* instr = blocks()->at(i)->first();
|
| - instr != NULL;
|
| - instr = instr->next()) {
|
| + for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
|
| + HInstruction* instr = it.Current();
|
| ArrayInstructionInterface* array_instruction = NULL;
|
| if (instr->IsLoadKeyed()) {
|
| HLoadKeyed* op = HLoadKeyed::cast(instr);
|
| @@ -4513,101 +3355,6 @@ void HGraph::DehoistSimpleArrayIndexComputations() {
|
| }
|
|
|
|
|
| -void HGraph::DeadCodeElimination(const char* phase_name) {
|
| - HPhase phase(phase_name, this);
|
| - MarkLiveInstructions();
|
| - RemoveDeadInstructions();
|
| -}
|
| -
|
| -
|
| -void HGraph::MarkLiveInstructions() {
|
| - ZoneList<HValue*> worklist(blocks_.length(), zone());
|
| -
|
| - // Mark initial root instructions for dead code elimination.
|
| - for (int i = 0; i < blocks()->length(); ++i) {
|
| - HBasicBlock* block = blocks()->at(i);
|
| - for (HInstruction* instr = block->first();
|
| - instr != NULL;
|
| - instr = instr->next()) {
|
| - if (instr->CannotBeEliminated()) MarkLive(NULL, instr, &worklist);
|
| - }
|
| - for (int j = 0; j < block->phis()->length(); j++) {
|
| - HPhi* phi = block->phis()->at(j);
|
| - if (phi->CannotBeEliminated()) MarkLive(NULL, phi, &worklist);
|
| - }
|
| - }
|
| -
|
| - // Transitively mark all inputs of live instructions live.
|
| - while (!worklist.is_empty()) {
|
| - HValue* instr = worklist.RemoveLast();
|
| - for (int i = 0; i < instr->OperandCount(); ++i) {
|
| - MarkLive(instr, instr->OperandAt(i), &worklist);
|
| - }
|
| - }
|
| -}
|
| -
|
| -
|
| -void HGraph::MarkLive(HValue* ref, HValue* instr, ZoneList<HValue*>* worklist) {
|
| - if (!instr->CheckFlag(HValue::kIsLive)) {
|
| - instr->SetFlag(HValue::kIsLive);
|
| - worklist->Add(instr, zone());
|
| -
|
| - if (FLAG_trace_dead_code_elimination) {
|
| - HeapStringAllocator allocator;
|
| - StringStream stream(&allocator);
|
| - if (ref != NULL) {
|
| - ref->PrintTo(&stream);
|
| - } else {
|
| - stream.Add("root ");
|
| - }
|
| - stream.Add(" -> ");
|
| - instr->PrintTo(&stream);
|
| - PrintF("[MarkLive %s]\n", *stream.ToCString());
|
| - }
|
| - }
|
| -}
|
| -
|
| -
|
| -void HGraph::RemoveDeadInstructions() {
|
| - ZoneList<HPhi*> dead_phis(blocks_.length(), zone());
|
| -
|
| - // Remove any instruction not marked kIsLive.
|
| - for (int i = 0; i < blocks()->length(); ++i) {
|
| - HBasicBlock* block = blocks()->at(i);
|
| - for (HInstruction* instr = block->first();
|
| - instr != NULL;
|
| - instr = instr->next()) {
|
| - if (!instr->CheckFlag(HValue::kIsLive)) {
|
| - // Instruction has not been marked live; assume it is dead and remove.
|
| - // TODO(titzer): we don't remove constants because some special ones
|
| - // might be used by later phases and are assumed to be in the graph
|
| - if (!instr->IsConstant()) instr->DeleteAndReplaceWith(NULL);
|
| - } else {
|
| - // Clear the liveness flag to leave the graph clean for the next DCE.
|
| - instr->ClearFlag(HValue::kIsLive);
|
| - }
|
| - }
|
| - // Collect phis that are dead and remove them in the next pass.
|
| - for (int j = 0; j < block->phis()->length(); j++) {
|
| - HPhi* phi = block->phis()->at(j);
|
| - if (!phi->CheckFlag(HValue::kIsLive)) {
|
| - dead_phis.Add(phi, zone());
|
| - } else {
|
| - phi->ClearFlag(HValue::kIsLive);
|
| - }
|
| - }
|
| - }
|
| -
|
| - // Process phis separately to avoid simultaneously mutating the phi list.
|
| - while (!dead_phis.is_empty()) {
|
| - HPhi* phi = dead_phis.RemoveLast();
|
| - HBasicBlock* block = phi->block();
|
| - phi->DeleteAndReplaceWith(NULL);
|
| - block->RecordDeletedPhi(phi->merged_index());
|
| - }
|
| -}
|
| -
|
| -
|
| void HGraph::RestoreActualValues() {
|
| HPhase phase("H_Restore actual values", this);
|
|
|
| @@ -4621,9 +3368,8 @@ void HGraph::RestoreActualValues() {
|
| }
|
| #endif
|
|
|
| - for (HInstruction* instruction = block->first();
|
| - instruction != NULL;
|
| - instruction = instruction->next()) {
|
| + for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
|
| + HInstruction* instruction = it.Current();
|
| if (instruction->ActualValue() != instruction) {
|
| ASSERT(instruction->IsInformativeDefinition());
|
| if (instruction->IsPurelyInformativeDefinition()) {
|
| @@ -4643,17 +3389,6 @@ void HOptimizedGraphBuilder::PushAndAdd(HInstruction* instr) {
|
| }
|
|
|
|
|
| -void HOptimizedGraphBuilder::AddSoftDeoptimize() {
|
| - isolate()->counters()->soft_deopts_requested()->Increment();
|
| - if (FLAG_always_opt) return;
|
| - if (current_block()->IsDeoptimizing()) return;
|
| - AddInstruction(new(zone()) HSoftDeoptimize());
|
| - isolate()->counters()->soft_deopts_inserted()->Increment();
|
| - current_block()->MarkAsDeoptimizing();
|
| - graph()->set_has_soft_deoptimize(true);
|
| -}
|
| -
|
| -
|
| template <class Instruction>
|
| HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
|
| int count = call->argument_count();
|
| @@ -4663,16 +3398,15 @@ HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
|
| }
|
|
|
| while (!arguments.is_empty()) {
|
| - AddInstruction(new(zone()) HPushArgument(arguments.RemoveLast()));
|
| + Add<HPushArgument>(arguments.RemoveLast());
|
| }
|
| return call;
|
| }
|
|
|
|
|
| void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
|
| - HConstant* undefined_constant = new(zone()) HConstant(
|
| + HConstant* undefined_constant = Add<HConstant>(
|
| isolate()->factory()->undefined_value());
|
| - AddInstruction(undefined_constant);
|
| graph()->set_undefined_constant(undefined_constant);
|
|
|
| // Create an arguments object containing the initial parameters. Set the
|
| @@ -4681,7 +3415,7 @@ void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
|
| HArgumentsObject* arguments_object =
|
| new(zone()) HArgumentsObject(environment()->parameter_count(), zone());
|
| for (int i = 0; i < environment()->parameter_count(); ++i) {
|
| - HInstruction* parameter = AddInstruction(new(zone()) HParameter(i));
|
| + HInstruction* parameter = Add<HParameter>(i);
|
| arguments_object->AddArgument(parameter, zone());
|
| environment()->Bind(i, parameter);
|
| }
|
| @@ -4689,7 +3423,7 @@ void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
|
| graph()->SetArgumentsObject(arguments_object);
|
|
|
| // First special is HContext.
|
| - HInstruction* context = AddInstruction(new(zone()) HContext);
|
| + HInstruction* context = Add<HContext>();
|
| environment()->BindContext(context);
|
|
|
| // Initialize specials and locals to undefined.
|
| @@ -5005,10 +3739,10 @@ void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
|
| AddSoftDeoptimize();
|
| }
|
|
|
| - HCompareIDAndBranch* compare_ =
|
| - new(zone()) HCompareIDAndBranch(tag_value,
|
| - label_value,
|
| - Token::EQ_STRICT);
|
| + HCompareNumericAndBranch* compare_ =
|
| + new(zone()) HCompareNumericAndBranch(tag_value,
|
| + label_value,
|
| + Token::EQ_STRICT);
|
| compare_->set_observed_input_representation(
|
| Representation::Smi(), Representation::Smi());
|
| compare = compare_;
|
| @@ -5102,71 +3836,14 @@ void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
|
| }
|
|
|
|
|
| -bool HOptimizedGraphBuilder::HasOsrEntryAt(IterationStatement* statement) {
|
| - return statement->OsrEntryId() == current_info()->osr_ast_id();
|
| -}
|
| -
|
| -
|
| -bool HOptimizedGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
|
| - if (!HasOsrEntryAt(statement)) return false;
|
| -
|
| - HBasicBlock* non_osr_entry = graph()->CreateBasicBlock();
|
| - HBasicBlock* osr_entry = graph()->CreateBasicBlock();
|
| - HValue* true_value = graph()->GetConstantTrue();
|
| - HBranch* test = new(zone()) HBranch(true_value, non_osr_entry, osr_entry);
|
| - current_block()->Finish(test);
|
| -
|
| - HBasicBlock* loop_predecessor = graph()->CreateBasicBlock();
|
| - non_osr_entry->Goto(loop_predecessor);
|
| -
|
| - set_current_block(osr_entry);
|
| - osr_entry->set_osr_entry();
|
| - BailoutId osr_entry_id = statement->OsrEntryId();
|
| - int first_expression_index = environment()->first_expression_index();
|
| - int length = environment()->length();
|
| - ZoneList<HUnknownOSRValue*>* osr_values =
|
| - new(zone()) ZoneList<HUnknownOSRValue*>(length, zone());
|
| -
|
| - for (int i = 0; i < first_expression_index; ++i) {
|
| - HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
|
| - AddInstruction(osr_value);
|
| - environment()->Bind(i, osr_value);
|
| - osr_values->Add(osr_value, zone());
|
| - }
|
| -
|
| - if (first_expression_index != length) {
|
| - environment()->Drop(length - first_expression_index);
|
| - for (int i = first_expression_index; i < length; ++i) {
|
| - HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
|
| - AddInstruction(osr_value);
|
| - environment()->Push(osr_value);
|
| - osr_values->Add(osr_value, zone());
|
| - }
|
| - }
|
| -
|
| - graph()->set_osr_values(osr_values);
|
| -
|
| - AddSimulate(osr_entry_id);
|
| - AddInstruction(new(zone()) HOsrEntry(osr_entry_id));
|
| - HContext* context = new(zone()) HContext;
|
| - AddInstruction(context);
|
| - environment()->BindContext(context);
|
| - current_block()->Goto(loop_predecessor);
|
| - loop_predecessor->SetJoinId(statement->EntryId());
|
| - set_current_block(loop_predecessor);
|
| - return true;
|
| -}
|
| -
|
| -
|
| void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
|
| HBasicBlock* loop_entry,
|
| BreakAndContinueInfo* break_info) {
|
| BreakAndContinueScope push(break_info, this);
|
| AddSimulate(stmt->StackCheckId());
|
| HValue* context = environment()->LookupContext();
|
| - HStackCheck* stack_check =
|
| - new(zone()) HStackCheck(context, HStackCheck::kBackwardsBranch);
|
| - AddInstruction(stack_check);
|
| + HStackCheck* stack_check = Add<HStackCheck>(
|
| + context, HStackCheck::kBackwardsBranch);
|
| ASSERT(loop_entry->IsLoopHeader());
|
| loop_entry->loop_information()->set_stack_check(stack_check);
|
| CHECK_BAILOUT(Visit(stmt->body()));
|
| @@ -5178,11 +3855,7 @@ void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
|
| ASSERT(current_block() != NULL);
|
| ASSERT(current_block()->HasPredecessor());
|
| ASSERT(current_block() != NULL);
|
| - bool osr_entry = PreProcessOsrEntry(stmt);
|
| - HBasicBlock* loop_entry = CreateLoopHeaderBlock();
|
| - current_block()->Goto(loop_entry);
|
| - set_current_block(loop_entry);
|
| - if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
|
| + HBasicBlock* loop_entry = osr_->BuildPossibleOsrLoopEntry(stmt);
|
|
|
| BreakAndContinueInfo break_info(stmt);
|
| CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
|
| @@ -5221,12 +3894,7 @@ void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
|
| ASSERT(current_block() != NULL);
|
| ASSERT(current_block()->HasPredecessor());
|
| ASSERT(current_block() != NULL);
|
| - bool osr_entry = PreProcessOsrEntry(stmt);
|
| - HBasicBlock* loop_entry = CreateLoopHeaderBlock();
|
| - current_block()->Goto(loop_entry);
|
| - set_current_block(loop_entry);
|
| - if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
|
| -
|
| + HBasicBlock* loop_entry = osr_->BuildPossibleOsrLoopEntry(stmt);
|
|
|
| // If the condition is constant true, do not generate a branch.
|
| HBasicBlock* loop_successor = NULL;
|
| @@ -5268,11 +3936,7 @@ void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
|
| CHECK_ALIVE(Visit(stmt->init()));
|
| }
|
| ASSERT(current_block() != NULL);
|
| - bool osr_entry = PreProcessOsrEntry(stmt);
|
| - HBasicBlock* loop_entry = CreateLoopHeaderBlock();
|
| - current_block()->Goto(loop_entry);
|
| - set_current_block(loop_entry);
|
| - if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
|
| + HBasicBlock* loop_entry = osr_->BuildPossibleOsrLoopEntry(stmt);
|
|
|
| HBasicBlock* loop_successor = NULL;
|
| if (stmt->cond() != NULL) {
|
| @@ -5335,45 +3999,35 @@ void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
|
| CHECK_ALIVE(VisitForValue(stmt->enumerable()));
|
| HValue* enumerable = Top(); // Leave enumerable at the top.
|
|
|
| - HInstruction* map = AddInstruction(new(zone()) HForInPrepareMap(
|
| - environment()->LookupContext(), enumerable));
|
| + HInstruction* map = Add<HForInPrepareMap>(
|
| + environment()->LookupContext(), enumerable);
|
| AddSimulate(stmt->PrepareId());
|
|
|
| - HInstruction* array = AddInstruction(
|
| - new(zone()) HForInCacheArray(
|
| - enumerable,
|
| - map,
|
| - DescriptorArray::kEnumCacheBridgeCacheIndex));
|
| + HInstruction* array = Add<HForInCacheArray>(
|
| + enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex);
|
|
|
| - HInstruction* enum_length = AddInstruction(new(zone()) HMapEnumLength(map));
|
| + HInstruction* enum_length = Add<HMapEnumLength>(map);
|
|
|
| - HInstruction* start_index = AddInstruction(new(zone()) HConstant(0));
|
| + HInstruction* start_index = Add<HConstant>(0);
|
|
|
| Push(map);
|
| Push(array);
|
| Push(enum_length);
|
| Push(start_index);
|
|
|
| - HInstruction* index_cache = AddInstruction(
|
| - new(zone()) HForInCacheArray(
|
| - enumerable,
|
| - map,
|
| - DescriptorArray::kEnumCacheBridgeIndicesCacheIndex));
|
| + HInstruction* index_cache = Add<HForInCacheArray>(
|
| + enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex);
|
| HForInCacheArray::cast(array)->set_index_cache(
|
| HForInCacheArray::cast(index_cache));
|
|
|
| - bool osr_entry = PreProcessOsrEntry(stmt);
|
| - HBasicBlock* loop_entry = CreateLoopHeaderBlock();
|
| - current_block()->Goto(loop_entry);
|
| - set_current_block(loop_entry);
|
| - if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
|
| + HBasicBlock* loop_entry = osr_->BuildPossibleOsrLoopEntry(stmt);
|
|
|
| HValue* index = environment()->ExpressionStackAt(0);
|
| HValue* limit = environment()->ExpressionStackAt(1);
|
|
|
| // Check that we still have more keys.
|
| - HCompareIDAndBranch* compare_index =
|
| - new(zone()) HCompareIDAndBranch(index, limit, Token::LT);
|
| + HCompareNumericAndBranch* compare_index =
|
| + new(zone()) HCompareNumericAndBranch(index, limit, Token::LT);
|
| compare_index->set_observed_input_representation(
|
| Representation::Smi(), Representation::Smi());
|
|
|
| @@ -5389,18 +4043,16 @@ void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
|
|
|
| set_current_block(loop_body);
|
|
|
| - HValue* key = AddInstruction(
|
| - new(zone()) HLoadKeyed(
|
| - environment()->ExpressionStackAt(2), // Enum cache.
|
| - environment()->ExpressionStackAt(0), // Iteration index.
|
| - environment()->ExpressionStackAt(0),
|
| - FAST_ELEMENTS));
|
| + HValue* key = Add<HLoadKeyed>(
|
| + environment()->ExpressionStackAt(2), // Enum cache.
|
| + environment()->ExpressionStackAt(0), // Iteration index.
|
| + environment()->ExpressionStackAt(0),
|
| + FAST_ELEMENTS);
|
|
|
| // Check if the expected map still matches that of the enumerable.
|
| // If not just deoptimize.
|
| - AddInstruction(new(zone()) HCheckMapValue(
|
| - environment()->ExpressionStackAt(4),
|
| - environment()->ExpressionStackAt(3)));
|
| + Add<HCheckMapValue>(environment()->ExpressionStackAt(4),
|
| + environment()->ExpressionStackAt(3));
|
|
|
| Bind(each_var, key);
|
|
|
| @@ -5573,9 +4225,7 @@ HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
|
| HValue* context = environment()->LookupContext();
|
| int length = current_info()->scope()->ContextChainLength(var->scope());
|
| while (length-- > 0) {
|
| - HInstruction* context_instruction = new(zone()) HOuterContext(context);
|
| - AddInstruction(context_instruction);
|
| - context = context_instruction;
|
| + context = Add<HOuterContext>(context);
|
| }
|
| return context;
|
| }
|
| @@ -5613,9 +4263,20 @@ void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
|
| if (type == kUseCell) {
|
| Handle<GlobalObject> global(current_info()->global_object());
|
| Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
|
| - HLoadGlobalCell* instr =
|
| - new(zone()) HLoadGlobalCell(cell, lookup.GetPropertyDetails());
|
| - return ast_context()->ReturnInstruction(instr, expr->id());
|
| + if (cell->type()->IsConstant()) {
|
| + cell->AddDependentCompilationInfo(top_info());
|
| + Handle<Object> constant_object = cell->type()->AsConstant();
|
| + if (constant_object->IsConsString()) {
|
| + constant_object =
|
| + FlattenGetString(Handle<String>::cast(constant_object));
|
| + }
|
| + HConstant* constant = new(zone()) HConstant(constant_object);
|
| + return ast_context()->ReturnInstruction(constant, expr->id());
|
| + } else {
|
| + HLoadGlobalCell* instr =
|
| + new(zone()) HLoadGlobalCell(cell, lookup.GetPropertyDetails());
|
| + return ast_context()->ReturnInstruction(instr, expr->id());
|
| + }
|
| } else {
|
| HValue* context = environment()->LookupContext();
|
| HGlobalObject* global_object = new(zone()) HGlobalObject(context);
|
| @@ -5868,6 +4529,7 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
|
| literal = BuildFastLiteral(context,
|
| boilerplate_object,
|
| original_boilerplate_object,
|
| + Handle<Object>::null(),
|
| data_size,
|
| pointer_size,
|
| DONT_TRACK_ALLOCATION_SITE);
|
| @@ -5881,23 +4543,18 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
|
| flags |= expr->has_function()
|
| ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags;
|
|
|
| - AddInstruction(new(zone()) HPushArgument(AddInstruction(
|
| - new(zone()) HConstant(closure_literals))));
|
| - AddInstruction(new(zone()) HPushArgument(AddInstruction(
|
| - new(zone()) HConstant(literal_index))));
|
| - AddInstruction(new(zone()) HPushArgument(AddInstruction(
|
| - new(zone()) HConstant(constant_properties))));
|
| - AddInstruction(new(zone()) HPushArgument(AddInstruction(
|
| - new(zone()) HConstant(flags))));
|
| + Add<HPushArgument>(Add<HConstant>(closure_literals));
|
| + Add<HPushArgument>(Add<HConstant>(literal_index));
|
| + Add<HPushArgument>(Add<HConstant>(constant_properties));
|
| + Add<HPushArgument>(Add<HConstant>(flags));
|
|
|
| Runtime::FunctionId function_id =
|
| (expr->depth() > 1 || expr->may_store_doubles())
|
| ? Runtime::kCreateObjectLiteral : Runtime::kCreateObjectLiteralShallow;
|
| - literal = AddInstruction(
|
| - new(zone()) HCallRuntime(context,
|
| - isolate()->factory()->empty_string(),
|
| - Runtime::FunctionForId(function_id),
|
| - 4));
|
| + literal = Add<HCallRuntime>(context,
|
| + isolate()->factory()->empty_string(),
|
| + Runtime::FunctionForId(function_id),
|
| + 4);
|
| }
|
|
|
| // The object is expected in the bailout environment during computation
|
| @@ -5963,8 +4620,7 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
|
| // of the object. This makes sure that the original object won't
|
| // be used by other optimized code before it is transformed
|
| // (e.g. because of code motion).
|
| - HToFastProperties* result = new(zone()) HToFastProperties(Pop());
|
| - AddInstruction(result);
|
| + HToFastProperties* result = Add<HToFastProperties>(Pop());
|
| return ast_context()->ReturnValue(result);
|
| } else {
|
| return ast_context()->ReturnValue(Pop());
|
| @@ -5981,23 +4637,37 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
|
| HValue* context = environment()->LookupContext();
|
| HInstruction* literal;
|
|
|
| + Handle<AllocationSite> site;
|
| Handle<FixedArray> literals(environment()->closure()->literals(), isolate());
|
| - Handle<Object> raw_boilerplate(literals->get(expr->literal_index()),
|
| - isolate());
|
| -
|
| - if (raw_boilerplate->IsUndefined()) {
|
| + bool uninitialized = false;
|
| + Handle<Object> literals_cell(literals->get(expr->literal_index()),
|
| + isolate());
|
| + Handle<Object> raw_boilerplate;
|
| + if (literals_cell->IsUndefined()) {
|
| + uninitialized = true;
|
| raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate(
|
| isolate(), literals, expr->constant_elements());
|
| if (raw_boilerplate.is_null()) {
|
| return Bailout("array boilerplate creation failed");
|
| }
|
| - literals->set(expr->literal_index(), *raw_boilerplate);
|
| +
|
| + site = isolate()->factory()->NewAllocationSite();
|
| + site->set_payload(*raw_boilerplate);
|
| + literals->set(expr->literal_index(), *site);
|
| +
|
| if (JSObject::cast(*raw_boilerplate)->elements()->map() ==
|
| isolate()->heap()->fixed_cow_array_map()) {
|
| isolate()->counters()->cow_arrays_created_runtime()->Increment();
|
| }
|
| + } else {
|
| + ASSERT(literals_cell->IsAllocationSite());
|
| + site = Handle<AllocationSite>::cast(literals_cell);
|
| + raw_boilerplate = Handle<Object>(site->payload(), isolate());
|
| }
|
|
|
| + ASSERT(!raw_boilerplate.is_null());
|
| + ASSERT(site->IsLiteralSite());
|
| +
|
| Handle<JSObject> original_boilerplate_object =
|
| Handle<JSObject>::cast(raw_boilerplate);
|
| ElementsKind boilerplate_elements_kind =
|
| @@ -6006,7 +4676,7 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
|
| // TODO(mvstanton): This heuristic is only a temporary solution. In the
|
| // end, we want to quit creating allocation site info after a certain number
|
| // of GCs for a call site.
|
| - AllocationSiteMode mode = AllocationSiteInfo::GetMode(
|
| + AllocationSiteMode mode = AllocationSite::GetMode(
|
| boilerplate_elements_kind);
|
|
|
| // Check whether to use fast or slow deep-copying for boilerplate.
|
| @@ -6026,6 +4696,7 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
|
| literal = BuildFastLiteral(context,
|
| boilerplate_object,
|
| original_boilerplate_object,
|
| + site,
|
| data_size,
|
| pointer_size,
|
| mode);
|
| @@ -6036,20 +4707,16 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
|
| Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array();
|
| int literal_index = expr->literal_index();
|
|
|
| - AddInstruction(new(zone()) HPushArgument(AddInstruction(
|
| - new(zone()) HConstant(literals))));
|
| - AddInstruction(new(zone()) HPushArgument(AddInstruction(
|
| - new(zone()) HConstant(literal_index))));
|
| - AddInstruction(new(zone()) HPushArgument(AddInstruction(
|
| - new(zone()) HConstant(constants))));
|
| + Add<HPushArgument>(Add<HConstant>(literals));
|
| + Add<HPushArgument>(Add<HConstant>(literal_index));
|
| + Add<HPushArgument>(Add<HConstant>(constants));
|
|
|
| Runtime::FunctionId function_id = (expr->depth() > 1)
|
| ? Runtime::kCreateArrayLiteral : Runtime::kCreateArrayLiteralShallow;
|
| - literal = AddInstruction(
|
| - new(zone()) HCallRuntime(context,
|
| - isolate()->factory()->empty_string(),
|
| - Runtime::FunctionForId(function_id),
|
| - 3));
|
| + literal = Add<HCallRuntime>(context,
|
| + isolate()->factory()->empty_string(),
|
| + Runtime::FunctionForId(function_id),
|
| + 3);
|
|
|
| // De-opt if elements kind changed from boilerplate_elements_kind.
|
| Handle<Map> map = Handle<Map>(original_boilerplate_object->map(),
|
| @@ -6061,7 +4728,7 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
|
| // of the property values and is the value of the entire expression.
|
| Push(literal);
|
| // The literal index is on the stack, too.
|
| - Push(AddInstruction(new(zone()) HConstant(expr->literal_index())));
|
| + Push(Add<HConstant>(expr->literal_index()));
|
|
|
| HInstruction* elements = NULL;
|
|
|
| @@ -6077,7 +4744,7 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
|
|
|
| elements = AddLoadElements(literal);
|
|
|
| - HValue* key = AddInstruction(new(zone()) HConstant(i));
|
| + HValue* key = Add<HConstant>(i);
|
|
|
| switch (boilerplate_elements_kind) {
|
| case FAST_SMI_ELEMENTS:
|
| @@ -6085,13 +4752,12 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
|
| case FAST_ELEMENTS:
|
| case FAST_HOLEY_ELEMENTS:
|
| case FAST_DOUBLE_ELEMENTS:
|
| - case FAST_HOLEY_DOUBLE_ELEMENTS:
|
| - AddInstruction(new(zone()) HStoreKeyed(
|
| - elements,
|
| - key,
|
| - value,
|
| - boilerplate_elements_kind));
|
| + case FAST_HOLEY_DOUBLE_ELEMENTS: {
|
| + HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value,
|
| + boilerplate_elements_kind);
|
| + instr->SetUninitialized(uninitialized);
|
| break;
|
| + }
|
| default:
|
| UNREACHABLE();
|
| break;
|
| @@ -6187,11 +4853,9 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
|
| ASSERT(proto->GetPrototype(isolate())->IsNull());
|
| }
|
| ASSERT(proto->IsJSObject());
|
| - AddInstruction(new(zone()) HCheckPrototypeMaps(
|
| - Handle<JSObject>(JSObject::cast(map->prototype())),
|
| - Handle<JSObject>(JSObject::cast(proto)),
|
| - zone(),
|
| - top_info()));
|
| + Add<HCheckPrototypeMaps>(Handle<JSObject>(JSObject::cast(map->prototype())),
|
| + Handle<JSObject>(JSObject::cast(proto)),
|
| + zone(), top_info());
|
| }
|
|
|
| HObjectAccess field_access = HObjectAccess::ForField(map, lookup, name);
|
| @@ -6203,11 +4867,10 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
|
| if (transition_to_field) {
|
| // The store requires a mutable HeapNumber to be allocated.
|
| NoObservableSideEffectsScope no_side_effects(this);
|
| - HInstruction* heap_number_size = AddInstruction(new(zone()) HConstant(
|
| - HeapNumber::kSize));
|
| - HInstruction* double_box = AddInstruction(new(zone()) HAllocate(
|
| + HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
|
| + HInstruction* double_box = Add<HAllocate>(
|
| environment()->LookupContext(), heap_number_size,
|
| - HType::HeapNumber(), HAllocate::CAN_ALLOCATE_IN_NEW_SPACE));
|
| + HType::HeapNumber(), HAllocate::CAN_ALLOCATE_IN_NEW_SPACE);
|
| AddStoreMapConstant(double_box, isolate()->factory()->heap_number_map());
|
| AddStore(double_box, HObjectAccess::ForHeapNumberValue(),
|
| value, Representation::Double());
|
| @@ -6257,8 +4920,8 @@ HInstruction* HOptimizedGraphBuilder::BuildCallSetter(
|
| Handle<JSFunction> setter,
|
| Handle<JSObject> holder) {
|
| AddCheckConstantFunction(holder, object, map);
|
| - AddInstruction(new(zone()) HPushArgument(object));
|
| - AddInstruction(new(zone()) HPushArgument(value));
|
| + Add<HPushArgument>(object);
|
| + Add<HPushArgument>(value);
|
| return new(zone()) HCallConstantFunction(setter, 2);
|
| }
|
|
|
| @@ -6437,7 +5100,7 @@ void HOptimizedGraphBuilder::HandlePolymorphicStoreNamedField(
|
| HBasicBlock* if_true = graph()->CreateBasicBlock();
|
| HBasicBlock* if_false = graph()->CreateBasicBlock();
|
| HCompareMap* compare =
|
| - new(zone()) HCompareMap(object, map, if_true, if_false);
|
| + new(zone()) HCompareMap(object, map, if_true, if_false);
|
| current_block()->Finish(compare);
|
|
|
| set_current_block(if_true);
|
| @@ -6538,25 +5201,32 @@ void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
|
| if (type == kUseCell) {
|
| Handle<GlobalObject> global(current_info()->global_object());
|
| Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
|
| + if (cell->type()->IsConstant()) {
|
| + IfBuilder builder(this);
|
| + HValue* constant = Add<HConstant>(cell->type()->AsConstant());
|
| + if (cell->type()->AsConstant()->IsNumber()) {
|
| + builder.If<HCompareNumericAndBranch>(value, constant, Token::EQ);
|
| + } else {
|
| + builder.If<HCompareObjectEqAndBranch>(value, constant);
|
| + }
|
| + builder.Then();
|
| + builder.Else();
|
| + AddSoftDeoptimize(MUST_EMIT_SOFT_DEOPT);
|
| + builder.End();
|
| + }
|
| HInstruction* instr =
|
| - new(zone()) HStoreGlobalCell(value, cell, lookup.GetPropertyDetails());
|
| + Add<HStoreGlobalCell>(value, cell, lookup.GetPropertyDetails());
|
| instr->set_position(position);
|
| - AddInstruction(instr);
|
| if (instr->HasObservableSideEffects()) {
|
| AddSimulate(ast_id, REMOVABLE_SIMULATE);
|
| }
|
| } else {
|
| HValue* context = environment()->LookupContext();
|
| - HGlobalObject* global_object = new(zone()) HGlobalObject(context);
|
| - AddInstruction(global_object);
|
| + HGlobalObject* global_object = Add<HGlobalObject>(context);
|
| HStoreGlobalGeneric* instr =
|
| - new(zone()) HStoreGlobalGeneric(context,
|
| - global_object,
|
| - var->name(),
|
| - value,
|
| - function_strict_mode_flag());
|
| + Add<HStoreGlobalGeneric>(context, global_object, var->name(),
|
| + value, function_strict_mode_flag());
|
| instr->set_position(position);
|
| - AddInstruction(instr);
|
| ASSERT(instr->HasObservableSideEffects());
|
| AddSimulate(ast_id, REMOVABLE_SIMULATE);
|
| }
|
| @@ -6592,8 +5262,8 @@ void HOptimizedGraphBuilder::BuildStoreNamed(Expression* expr,
|
| return;
|
| }
|
| Drop(2);
|
| - AddInstruction(new(zone()) HPushArgument(object));
|
| - AddInstruction(new(zone()) HPushArgument(value));
|
| + Add<HPushArgument>(object);
|
| + Add<HPushArgument>(value);
|
| instr = new(zone()) HCallConstantFunction(setter, 2);
|
| } else {
|
| Drop(2);
|
| @@ -6690,9 +5360,8 @@ void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
|
| }
|
|
|
| HValue* context = BuildContextChainWalk(var);
|
| - HStoreContextSlot* instr =
|
| - new(zone()) HStoreContextSlot(context, var->index(), mode, Top());
|
| - AddInstruction(instr);
|
| + HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
|
| + mode, Top());
|
| if (instr->HasObservableSideEffects()) {
|
| AddSimulate(expr->AssignmentId(), REMOVABLE_SIMULATE);
|
| }
|
| @@ -6822,7 +5491,7 @@ void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
|
| // We insert a use of the old value to detect unsupported uses of const
|
| // variables (e.g. initialization inside a loop).
|
| HValue* old_value = environment()->Lookup(var);
|
| - AddInstruction(new(zone()) HUseConst(old_value));
|
| + Add<HUseConst>(old_value);
|
| }
|
| } else if (var->mode() == CONST_HARMONY) {
|
| if (expr->op() != Token::INIT_CONST_HARMONY) {
|
| @@ -6903,9 +5572,8 @@ void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
|
| }
|
|
|
| HValue* context = BuildContextChainWalk(var);
|
| - HStoreContextSlot* instr = new(zone()) HStoreContextSlot(
|
| - context, var->index(), mode, Top());
|
| - AddInstruction(instr);
|
| + HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
|
| + mode, Top());
|
| if (instr->HasObservableSideEffects()) {
|
| AddSimulate(expr->AssignmentId(), REMOVABLE_SIMULATE);
|
| }
|
| @@ -6939,9 +5607,8 @@ void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
|
|
|
| HValue* context = environment()->LookupContext();
|
| HValue* value = environment()->Pop();
|
| - HThrow* instr = new(zone()) HThrow(context, value);
|
| + HThrow* instr = Add<HThrow>(context, value);
|
| instr->set_position(expr->position());
|
| - AddInstruction(instr);
|
| AddSimulate(expr->id());
|
| current_block()->FinishExit(new(zone()) HAbnormalExit);
|
| set_current_block(NULL);
|
| @@ -6987,7 +5654,7 @@ HInstruction* HOptimizedGraphBuilder::BuildCallGetter(
|
| Handle<JSFunction> getter,
|
| Handle<JSObject> holder) {
|
| AddCheckConstantFunction(holder, object, map);
|
| - AddInstruction(new(zone()) HPushArgument(object));
|
| + Add<HPushArgument>(object);
|
| return new(zone()) HCallConstantFunction(getter, 1);
|
| }
|
|
|
| @@ -7032,9 +5699,8 @@ HInstruction* HOptimizedGraphBuilder::BuildLoadNamedMonomorphic(
|
| Handle<JSObject> holder(lookup.holder());
|
| Handle<Map> holder_map(holder->map());
|
| AddCheckMap(object, map);
|
| - AddInstruction(new(zone()) HCheckPrototypeMaps(
|
| - prototype, holder, zone(), top_info()));
|
| - HValue* holder_value = AddInstruction(new(zone()) HConstant(holder));
|
| + Add<HCheckPrototypeMaps>(prototype, holder, zone(), top_info());
|
| + HValue* holder_value = Add<HConstant>(holder);
|
| return BuildLoadNamedField(holder_value,
|
| HObjectAccess::ForField(holder_map, &lookup, name),
|
| ComputeLoadStoreRepresentation(map, &lookup));
|
| @@ -7046,8 +5712,7 @@ HInstruction* HOptimizedGraphBuilder::BuildLoadNamedMonomorphic(
|
| Handle<JSObject> holder(lookup.holder());
|
| Handle<Map> holder_map(holder->map());
|
| AddCheckMap(object, map);
|
| - AddInstruction(new(zone()) HCheckPrototypeMaps(
|
| - prototype, holder, zone(), top_info()));
|
| + Add<HCheckPrototypeMaps>(prototype, holder, zone(), top_info());
|
| Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*holder_map));
|
| return new(zone()) HConstant(function);
|
| }
|
| @@ -7084,8 +5749,7 @@ HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
|
| isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
|
| Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
|
| Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
|
| - AddInstruction(new(zone()) HCheckPrototypeMaps(
|
| - prototype, object_prototype, zone(), top_info()));
|
| + Add<HCheckPrototypeMaps>(prototype, object_prototype, zone(), top_info());
|
| load_mode = ALLOW_RETURN_HOLE;
|
| graph()->MarkDependsOnEmptyArrayProtoElements();
|
| }
|
| @@ -7217,9 +5881,8 @@ HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
|
| map->elements_kind(),
|
| transition_target.at(i)->elements_kind()));
|
| HValue* context = environment()->LookupContext();
|
| - transition = new(zone()) HTransitionElementsKind(
|
| - context, object, map, transition_target.at(i));
|
| - AddInstruction(transition);
|
| + transition = Add<HTransitionElementsKind>(context, object, map,
|
| + transition_target.at(i));
|
| } else {
|
| type_todo[map->elements_kind()] = true;
|
| if (IsExternalArrayElementsKind(map->elements_kind())) {
|
| @@ -7252,8 +5915,7 @@ HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
|
| AddInstruction(HCheckInstanceType::NewIsSpecObject(object, zone()));
|
| HBasicBlock* join = graph()->CreateBasicBlock();
|
|
|
| - HInstruction* elements_kind_instr =
|
| - AddInstruction(new(zone()) HElementsKind(object));
|
| + HInstruction* elements_kind_instr = Add<HElementsKind>(object);
|
| HInstruction* elements = AddLoadElements(object, checkspec);
|
| HLoadExternalArrayPointer* external_elements = NULL;
|
| HInstruction* checked_key = NULL;
|
| @@ -7274,11 +5936,9 @@ HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
|
| LAST_ELEMENTS_KIND);
|
| if (elements_kind == FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND
|
| && todo_external_array) {
|
| - HInstruction* length =
|
| - AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
|
| - checked_key = AddBoundsCheck(key, length);
|
| - external_elements = new(zone()) HLoadExternalArrayPointer(elements);
|
| - AddInstruction(external_elements);
|
| + HInstruction* length = AddLoadFixedArrayLength(elements);
|
| + checked_key = Add<HBoundsCheck>(key, length);
|
| + external_elements = Add<HLoadExternalArrayPointer>(elements);
|
| }
|
| if (type_todo[elements_kind]) {
|
| HBasicBlock* if_true = graph()->CreateBasicBlock();
|
| @@ -7319,7 +5979,7 @@ HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
|
| typecheck, Representation::Smi());
|
| length->set_type(HType::Smi());
|
|
|
| - checked_key = AddBoundsCheck(key, length);
|
| + checked_key = Add<HBoundsCheck>(key, length);
|
| access = AddInstruction(BuildFastElementAccess(
|
| elements, checked_key, val, elements_kind_branch,
|
| elements_kind, is_store, NEVER_RETURN_HOLE, STANDARD_STORE));
|
| @@ -7336,8 +5996,8 @@ HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
|
| if_jsarray->GotoNoSimulate(join);
|
|
|
| set_current_block(if_fastobject);
|
| - length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
|
| - checked_key = AddBoundsCheck(key, length);
|
| + length = AddLoadFixedArrayLength(elements);
|
| + checked_key = Add<HBoundsCheck>(key, length);
|
| access = AddInstruction(BuildFastElementAccess(
|
| elements, checked_key, val, elements_kind_branch,
|
| elements_kind, is_store, NEVER_RETURN_HOLE, STANDARD_STORE));
|
| @@ -7469,8 +6129,7 @@ bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
|
| if (!name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("length"))) return false;
|
|
|
| if (function_state()->outer() == NULL) {
|
| - HInstruction* elements = AddInstruction(
|
| - new(zone()) HArgumentsElements(false));
|
| + HInstruction* elements = Add<HArgumentsElements>(false);
|
| result = new(zone()) HArgumentsLength(elements);
|
| } else {
|
| // Number of arguments without receiver.
|
| @@ -7485,11 +6144,9 @@ bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
|
| HValue* key = Pop();
|
| Drop(1); // Arguments object.
|
| if (function_state()->outer() == NULL) {
|
| - HInstruction* elements = AddInstruction(
|
| - new(zone()) HArgumentsElements(false));
|
| - HInstruction* length = AddInstruction(
|
| - new(zone()) HArgumentsLength(elements));
|
| - HInstruction* checked_key = AddBoundsCheck(key, length);
|
| + HInstruction* elements = Add<HArgumentsElements>(false);
|
| + HInstruction* length = Add<HArgumentsLength>(elements);
|
| + HInstruction* checked_key = Add<HBoundsCheck>(key, length);
|
| result = new(zone()) HAccessArgumentsAt(elements, length, checked_key);
|
| } else {
|
| EnsureArgumentsArePushedForAccess();
|
| @@ -7498,9 +6155,8 @@ bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
|
| HInstruction* elements = function_state()->arguments_elements();
|
| int argument_count = environment()->
|
| arguments_environment()->parameter_count() - 1;
|
| - HInstruction* length = AddInstruction(new(zone()) HConstant(
|
| - argument_count));
|
| - HInstruction* checked_key = AddBoundsCheck(key, length);
|
| + HInstruction* length = Add<HConstant>(argument_count);
|
| + HInstruction* checked_key = Add<HBoundsCheck>(key, length);
|
| result = new(zone()) HAccessArgumentsAt(elements, length, checked_key);
|
| }
|
| }
|
| @@ -7559,7 +6215,7 @@ void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
|
| if (LookupGetter(map, name, &getter, &holder)) {
|
| AddCheckConstantFunction(holder, Top(), map);
|
| if (FLAG_inline_accessors && TryInlineGetter(getter, expr)) return;
|
| - AddInstruction(new(zone()) HPushArgument(Pop()));
|
| + Add<HPushArgument>(Pop());
|
| instr = new(zone()) HCallConstantFunction(getter, 1);
|
| } else {
|
| instr = BuildLoadNamedMonomorphic(Pop(), name, expr, map);
|
| @@ -7601,8 +6257,7 @@ void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
|
| Handle<Map> receiver_map) {
|
| if (!holder.is_null()) {
|
| Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
|
| - AddInstruction(new(zone()) HCheckPrototypeMaps(
|
| - prototype, holder, zone(), top_info()));
|
| + Add<HCheckPrototypeMaps>(prototype, holder, zone(), top_info());
|
| }
|
| }
|
|
|
| @@ -7874,7 +6529,7 @@ bool HOptimizedGraphBuilder::TryInline(CallKind call_kind,
|
| return false;
|
| }
|
|
|
| -#if !defined(V8_TARGET_ARCH_IA32)
|
| +#if !V8_TARGET_ARCH_IA32
|
| // Target must be able to use caller's context.
|
| CompilationInfo* outer_info = current_info();
|
| if (target->context() != outer_info->closure()->context() ||
|
| @@ -8023,15 +6678,13 @@ bool HOptimizedGraphBuilder::TryInline(CallKind call_kind,
|
| undefined,
|
| function_state()->inlining_kind(),
|
| undefined_receiver);
|
| -#ifdef V8_TARGET_ARCH_IA32
|
| +#if V8_TARGET_ARCH_IA32
|
| // IA32 only, overwrite the caller's context in the deoptimization
|
| // environment with the correct one.
|
| //
|
| // TODO(kmillikin): implement the same inlining on other platforms so we
|
| // can remove the unsightly ifdefs in this function.
|
| - HConstant* context =
|
| - new(zone()) HConstant(Handle<Context>(target->context()));
|
| - AddInstruction(context);
|
| + HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
|
| inner_env->BindContext(context);
|
| #endif
|
|
|
| @@ -8045,25 +6698,19 @@ bool HOptimizedGraphBuilder::TryInline(CallKind call_kind,
|
| ASSERT(function->scope()->arguments()->IsStackAllocated());
|
| HEnvironment* arguments_env = inner_env->arguments_environment();
|
| int arguments_count = arguments_env->parameter_count();
|
| - arguments_object = new(zone()) HArgumentsObject(arguments_count, zone());
|
| + arguments_object = Add<HArgumentsObject>(arguments_count, zone());
|
| inner_env->Bind(function->scope()->arguments(), arguments_object);
|
| for (int i = 0; i < arguments_count; i++) {
|
| arguments_object->AddArgument(arguments_env->Lookup(i), zone());
|
| }
|
| - AddInstruction(arguments_object);
|
| }
|
|
|
| HEnterInlined* enter_inlined =
|
| - new(zone()) HEnterInlined(target,
|
| - arguments_count,
|
| - function,
|
| - function_state()->inlining_kind(),
|
| - function->scope()->arguments(),
|
| - arguments_object,
|
| - undefined_receiver,
|
| - zone());
|
| + Add<HEnterInlined>(target, arguments_count, function,
|
| + function_state()->inlining_kind(),
|
| + function->scope()->arguments(),
|
| + arguments_object, undefined_receiver, zone());
|
| function_state()->set_entry(enter_inlined);
|
| - AddInstruction(enter_inlined);
|
|
|
| VisitDeclarations(target_info.scope()->declarations());
|
| VisitStatements(function->body());
|
| @@ -8301,12 +6948,9 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
|
| HValue* string = Pop();
|
| HValue* context = environment()->LookupContext();
|
| ASSERT(!expr->holder().is_null());
|
| - AddInstruction(new(zone()) HCheckPrototypeMaps(
|
| - Call::GetPrototypeForPrimitiveCheck(STRING_CHECK,
|
| - expr->holder()->GetIsolate()),
|
| - expr->holder(),
|
| - zone(),
|
| - top_info()));
|
| + Add<HCheckPrototypeMaps>(Call::GetPrototypeForPrimitiveCheck(
|
| + STRING_CHECK, expr->holder()->GetIsolate()),
|
| + expr->holder(), zone(), top_info());
|
| HInstruction* char_code =
|
| BuildStringCharCodeAt(context, string, index);
|
| if (id == kStringCharCodeAt) {
|
| @@ -8397,8 +7041,7 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
|
| AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
|
| Drop(1); // Receiver.
|
| HValue* context = environment()->LookupContext();
|
| - HGlobalObject* global_object = new(zone()) HGlobalObject(context);
|
| - AddInstruction(global_object);
|
| + HGlobalObject* global_object = Add<HGlobalObject>(context);
|
| HRandom* result = new(zone()) HRandom(global_object);
|
| ast_context()->ReturnInstruction(result, expr->id());
|
| return true;
|
| @@ -8477,12 +7120,9 @@ bool HOptimizedGraphBuilder::TryCallApply(Call* expr) {
|
| HValue* receiver = Pop();
|
|
|
| if (function_state()->outer() == NULL) {
|
| - HInstruction* elements = AddInstruction(
|
| - new(zone()) HArgumentsElements(false));
|
| - HInstruction* length =
|
| - AddInstruction(new(zone()) HArgumentsLength(elements));
|
| - HValue* wrapped_receiver =
|
| - AddInstruction(new(zone()) HWrapReceiver(receiver, function));
|
| + HInstruction* elements = Add<HArgumentsElements>(false);
|
| + HInstruction* length = Add<HArgumentsLength>(elements);
|
| + HValue* wrapped_receiver = Add<HWrapReceiver>(receiver, function);
|
| HInstruction* result =
|
| new(zone()) HApplyArguments(function,
|
| wrapped_receiver,
|
| @@ -8707,14 +7347,12 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
|
|
|
| CHECK_ALIVE(VisitForValue(expr->expression()));
|
| HValue* function = Pop();
|
| - AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
|
| + Add<HCheckFunction>(function, expr->target());
|
|
|
| // Replace the global object with the global receiver.
|
| - HGlobalReceiver* global_receiver =
|
| - new(zone()) HGlobalReceiver(global_object);
|
| + HGlobalReceiver* global_receiver = Add<HGlobalReceiver>(global_object);
|
| // Index of the receiver from the top of the expression stack.
|
| const int receiver_index = argument_count - 1;
|
| - AddInstruction(global_receiver);
|
| ASSERT(environment()->ExpressionStackAt(receiver_index)->
|
| IsGlobalObject());
|
| environment()->SetExpressionStackAt(receiver_index, global_receiver);
|
| @@ -8745,8 +7383,7 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
|
| }
|
| } else {
|
| HValue* context = environment()->LookupContext();
|
| - HGlobalObject* receiver = new(zone()) HGlobalObject(context);
|
| - AddInstruction(receiver);
|
| + HGlobalObject* receiver = Add<HGlobalObject>(context);
|
| PushAndAdd(new(zone()) HPushArgument(receiver));
|
| CHECK_ALIVE(VisitArgumentList(expr->arguments()));
|
|
|
| @@ -8760,12 +7397,11 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
|
| CHECK_ALIVE(VisitForValue(expr->expression()));
|
| HValue* function = Top();
|
| HValue* context = environment()->LookupContext();
|
| - HGlobalObject* global = new(zone()) HGlobalObject(context);
|
| - AddInstruction(global);
|
| + HGlobalObject* global = Add<HGlobalObject>(context);
|
| HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global);
|
| PushAndAdd(receiver);
|
| CHECK_ALIVE(VisitExpressions(expr->arguments()));
|
| - AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
|
| + Add<HCheckFunction>(function, expr->target());
|
|
|
| if (TryInlineBuiltinFunctionCall(expr, true)) { // Drop the function.
|
| if (FLAG_trace_inlining) {
|
| @@ -8791,10 +7427,8 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
|
| CHECK_ALIVE(VisitForValue(expr->expression()));
|
| HValue* function = Top();
|
| HValue* context = environment()->LookupContext();
|
| - HGlobalObject* global_object = new(zone()) HGlobalObject(context);
|
| - AddInstruction(global_object);
|
| - HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global_object);
|
| - AddInstruction(receiver);
|
| + HGlobalObject* global_object = Add<HGlobalObject>(context);
|
| + HGlobalReceiver* receiver = Add<HGlobalReceiver>(global_object);
|
| PushAndAdd(new(zone()) HPushArgument(receiver));
|
| CHECK_ALIVE(VisitArgumentList(expr->arguments()));
|
|
|
| @@ -8832,8 +7466,7 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
|
| HValue* function = Top();
|
| CHECK_ALIVE(VisitExpressions(expr->arguments()));
|
| Handle<JSFunction> constructor = expr->target();
|
| - HValue* check = AddInstruction(
|
| - new(zone()) HCheckFunction(function, constructor));
|
| + HValue* check = Add<HCheckFunction>(function, constructor);
|
|
|
| // Force completion of inobject slack tracking before generating
|
| // allocation code to finalize instance size.
|
| @@ -8842,10 +7475,9 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
|
| }
|
|
|
| // Replace the constructor function with a newly allocated receiver.
|
| - HInstruction* receiver = new(zone()) HAllocateObject(context, constructor);
|
| + HInstruction* receiver = Add<HAllocateObject>(context, constructor);
|
| // Index of the receiver from the top of the expression stack.
|
| const int receiver_index = argument_count - 1;
|
| - AddInstruction(receiver);
|
| ASSERT(environment()->ExpressionStackAt(receiver_index) == function);
|
| environment()->SetExpressionStackAt(receiver_index, receiver);
|
|
|
| @@ -8873,9 +7505,9 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
|
| HCallNew* call;
|
| if (expr->target().is_identical_to(array_function)) {
|
| Handle<Cell> cell = expr->allocation_info_cell();
|
| - AddInstruction(new(zone()) HCheckFunction(constructor, array_function));
|
| + Add<HCheckFunction>(constructor, array_function);
|
| call = new(zone()) HCallNewArray(context, constructor, argument_count,
|
| - cell);
|
| + cell, expr->elements_kind());
|
| } else {
|
| call = new(zone()) HCallNew(context, constructor, argument_count);
|
| }
|
| @@ -9009,18 +7641,8 @@ void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
|
| void HOptimizedGraphBuilder::VisitSub(UnaryOperation* expr) {
|
| CHECK_ALIVE(VisitForValue(expr->expression()));
|
| HValue* value = Pop();
|
| - HValue* context = environment()->LookupContext();
|
| - HInstruction* instr =
|
| - HMul::New(zone(), context, value, graph()->GetConstantMinus1());
|
| Handle<Type> operand_type = expr->expression()->lower_type();
|
| - Representation rep = ToRepresentation(operand_type);
|
| - if (operand_type->Is(Type::None())) {
|
| - AddSoftDeoptimize();
|
| - }
|
| - if (instr->IsBinaryOperation()) {
|
| - HBinaryOperation::cast(instr)->set_observed_input_representation(1, rep);
|
| - HBinaryOperation::cast(instr)->set_observed_input_representation(2, rep);
|
| - }
|
| + HInstruction* instr = BuildUnaryMathOp(value, operand_type, Token::SUB);
|
| return ast_context()->ReturnInstruction(instr, expr->id());
|
| }
|
|
|
| @@ -9029,10 +7651,7 @@ void HOptimizedGraphBuilder::VisitBitNot(UnaryOperation* expr) {
|
| CHECK_ALIVE(VisitForValue(expr->expression()));
|
| HValue* value = Pop();
|
| Handle<Type> operand_type = expr->expression()->lower_type();
|
| - if (operand_type->Is(Type::None())) {
|
| - AddSoftDeoptimize();
|
| - }
|
| - HInstruction* instr = new(zone()) HBitNot(value);
|
| + HInstruction* instr = BuildUnaryMathOp(value, operand_type, Token::BIT_NOT);
|
| return ast_context()->ReturnInstruction(instr, expr->id());
|
| }
|
|
|
| @@ -9086,7 +7705,7 @@ HInstruction* HOptimizedGraphBuilder::BuildIncrement(
|
| CountOperation* expr) {
|
| // The input to the count operation is on top of the expression stack.
|
| TypeInfo info = expr->type();
|
| - Representation rep = ToRepresentation(info);
|
| + Representation rep = Representation::FromType(info);
|
| if (rep.IsNone() || rep.IsTagged()) {
|
| rep = Representation::Smi();
|
| }
|
| @@ -9096,12 +7715,11 @@ HInstruction* HOptimizedGraphBuilder::BuildIncrement(
|
| // actual HChange instruction we need is (sometimes) added in a later
|
| // phase, so it is not available now to be used as an input to HAdd and
|
| // as the return value.
|
| - HInstruction* number_input = new(zone()) HForceRepresentation(Pop(), rep);
|
| + HInstruction* number_input = Add<HForceRepresentation>(Pop(), rep);
|
| if (!rep.IsDouble()) {
|
| number_input->SetFlag(HInstruction::kFlexibleRepresentation);
|
| number_input->SetFlag(HInstruction::kCannotBeTagged);
|
| }
|
| - AddInstruction(number_input);
|
| Push(number_input);
|
| }
|
|
|
| @@ -9184,9 +7802,8 @@ void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
|
| HValue* context = BuildContextChainWalk(var);
|
| HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
|
| ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
|
| - HStoreContextSlot* instr =
|
| - new(zone()) HStoreContextSlot(context, var->index(), mode, after);
|
| - AddInstruction(instr);
|
| + HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
|
| + mode, after);
|
| if (instr->HasObservableSideEffects()) {
|
| AddSimulate(expr->AssignmentId(), REMOVABLE_SIMULATE);
|
| }
|
| @@ -9325,10 +7942,11 @@ HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
|
| AddInstruction(HCheckInstanceType::NewIsString(string, zone()));
|
| HInstruction* length = HStringLength::New(zone(), string);
|
| AddInstruction(length);
|
| - HInstruction* checked_index = AddBoundsCheck(index, length);
|
| + HInstruction* checked_index = Add<HBoundsCheck>(index, length);
|
| return new(zone()) HStringCharCodeAt(context, string, checked_index);
|
| }
|
|
|
| +
|
| // Checks if the given shift amounts have form: (sa) and (32 - sa).
|
| static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
|
| HValue* const32_minus_sa) {
|
| @@ -9396,9 +8014,10 @@ HInstruction* HOptimizedGraphBuilder::BuildBinaryOperation(
|
| Handle<Type> right_type = expr->right()->lower_type();
|
| Handle<Type> result_type = expr->lower_type();
|
| Maybe<int> fixed_right_arg = expr->fixed_right_arg();
|
| - Representation left_rep = ToRepresentation(left_type);
|
| - Representation right_rep = ToRepresentation(right_type);
|
| - Representation result_rep = ToRepresentation(result_type);
|
| + Representation left_rep = Representation::FromType(left_type);
|
| + Representation right_rep = Representation::FromType(right_type);
|
| + Representation result_rep = Representation::FromType(result_type);
|
| +
|
| if (left_type->Is(Type::None())) {
|
| AddSoftDeoptimize();
|
| // TODO(rossberg): we should be able to get rid of non-continuous defaults.
|
| @@ -9559,8 +8178,8 @@ void HOptimizedGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
|
| HBasicBlock* eval_right = graph()->CreateBasicBlock();
|
| ToBooleanStub::Types expected(expr->left()->to_boolean_types());
|
| HBranch* test = is_logical_and
|
| - ? new(zone()) HBranch(left_value, eval_right, empty_block, expected)
|
| - : new(zone()) HBranch(left_value, empty_block, eval_right, expected);
|
| + ? new(zone()) HBranch(left_value, expected, eval_right, empty_block)
|
| + : new(zone()) HBranch(left_value, expected, empty_block, eval_right);
|
| current_block()->Finish(test);
|
|
|
| set_current_block(eval_right);
|
| @@ -9626,26 +8245,6 @@ void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
|
| }
|
|
|
|
|
| -// TODO(rossberg): this should die eventually.
|
| -Representation HOptimizedGraphBuilder::ToRepresentation(TypeInfo info) {
|
| - if (info.IsUninitialized()) return Representation::None();
|
| - // TODO(verwaest): Return Smi rather than Integer32.
|
| - if (info.IsSmi()) return Representation::Integer32();
|
| - if (info.IsInteger32()) return Representation::Integer32();
|
| - if (info.IsDouble()) return Representation::Double();
|
| - if (info.IsNumber()) return Representation::Double();
|
| - return Representation::Tagged();
|
| -}
|
| -
|
| -
|
| -Representation HOptimizedGraphBuilder::ToRepresentation(Handle<Type> type) {
|
| - if (type->Is(Type::None())) return Representation::None();
|
| - if (type->Is(Type::Signed32())) return Representation::Integer32();
|
| - if (type->Is(Type::Number())) return Representation::Double();
|
| - return Representation::Tagged();
|
| -}
|
| -
|
| -
|
| void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
|
| HTypeof* typeof_expr,
|
| Handle<String> check) {
|
| @@ -9738,15 +8337,9 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
|
| Handle<Type> left_type = expr->left()->lower_type();
|
| Handle<Type> right_type = expr->right()->lower_type();
|
| Handle<Type> combined_type = expr->combined_type();
|
| - Representation combined_rep = ToRepresentation(combined_type);
|
| - Representation left_rep = ToRepresentation(left_type);
|
| - Representation right_rep = ToRepresentation(right_type);
|
| - // Check if this expression was ever executed according to type feedback.
|
| - // Note that for the special typeof/null/undefined cases we get unknown here.
|
| - if (combined_type->Is(Type::None())) {
|
| - AddSoftDeoptimize();
|
| - combined_type = left_type = right_type = handle(Type::Any(), isolate());
|
| - }
|
| + Representation combined_rep = Representation::FromType(combined_type);
|
| + Representation left_rep = Representation::FromType(left_type);
|
| + Representation right_rep = Representation::FromType(right_type);
|
|
|
| CHECK_ALIVE(VisitForValue(expr->left()));
|
| CHECK_ALIVE(VisitForValue(expr->right()));
|
| @@ -9807,17 +8400,29 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
|
| result->set_position(expr->position());
|
| return ast_context()->ReturnInstruction(result, expr->id());
|
| } else {
|
| - AddInstruction(new(zone()) HCheckFunction(right, target));
|
| + Add<HCheckFunction>(right, target);
|
| HInstanceOfKnownGlobal* result =
|
| new(zone()) HInstanceOfKnownGlobal(context, left, target);
|
| result->set_position(expr->position());
|
| return ast_context()->ReturnInstruction(result, expr->id());
|
| }
|
| +
|
| + // Code below assumes that we don't fall through.
|
| + UNREACHABLE();
|
| } else if (op == Token::IN) {
|
| HIn* result = new(zone()) HIn(context, left, right);
|
| result->set_position(expr->position());
|
| return ast_context()->ReturnInstruction(result, expr->id());
|
| - } else if (combined_type->Is(Type::Receiver())) {
|
| + }
|
| +
|
| + // Cases handled below depend on collected type feedback. They should
|
| + // soft deoptimize when there is no type feedback.
|
| + if (combined_type->Is(Type::None())) {
|
| + AddSoftDeoptimize();
|
| + combined_type = left_type = right_type = handle(Type::Any(), isolate());
|
| + }
|
| +
|
| + if (combined_type->Is(Type::Receiver())) {
|
| switch (op) {
|
| case Token::EQ:
|
| case Token::EQ_STRICT: {
|
| @@ -9863,12 +8468,12 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
|
| result->set_position(expr->position());
|
| return ast_context()->ReturnInstruction(result, expr->id());
|
| } else {
|
| - // TODO(verwaest): Remove once ToRepresentation properly returns Smi when
|
| - // the IC measures Smi.
|
| + // TODO(verwaest): Remove once Representation::FromType properly
|
| + // returns Smi when the IC measures Smi.
|
| if (left_type->Is(Type::Smi())) left_rep = Representation::Smi();
|
| if (right_type->Is(Type::Smi())) right_rep = Representation::Smi();
|
| - HCompareIDAndBranch* result =
|
| - new(zone()) HCompareIDAndBranch(left, right, op);
|
| + HCompareNumericAndBranch* result =
|
| + new(zone()) HCompareNumericAndBranch(left, right, op);
|
| result->set_observed_input_representation(left_rep, right_rep);
|
| result->set_position(expr->position());
|
| return ast_context()->ReturnControl(result, expr->id());
|
| @@ -9918,10 +8523,10 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
|
| HValue* context,
|
| Handle<JSObject> boilerplate_object,
|
| Handle<JSObject> original_boilerplate_object,
|
| + Handle<Object> allocation_site,
|
| int data_size,
|
| int pointer_size,
|
| AllocationSiteMode mode) {
|
| - Zone* zone = this->zone();
|
| NoObservableSideEffectsScope no_effects(this);
|
|
|
| HInstruction* target = NULL;
|
| @@ -9934,9 +8539,9 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
|
| HAllocate::Flags data_flags =
|
| static_cast<HAllocate::Flags>(HAllocate::DefaultFlags() |
|
| HAllocate::CAN_ALLOCATE_IN_OLD_DATA_SPACE);
|
| - HValue* size_in_bytes = AddInstruction(new(zone) HConstant(data_size));
|
| - data_target = AddInstruction(new(zone) HAllocate(
|
| - context, size_in_bytes, HType::JSObject(), data_flags));
|
| + HValue* size_in_bytes = Add<HConstant>(data_size);
|
| + data_target = Add<HAllocate>(context, size_in_bytes,
|
| + HType::JSObject(), data_flags);
|
| Handle<Map> free_space_map = isolate()->factory()->free_space_map();
|
| AddStoreMapConstant(data_target, free_space_map);
|
| HObjectAccess access =
|
| @@ -9946,21 +8551,19 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
|
| if (pointer_size != 0) {
|
| flags = static_cast<HAllocate::Flags>(
|
| flags | HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
|
| - HValue* size_in_bytes = AddInstruction(new(zone) HConstant(pointer_size));
|
| - target = AddInstruction(new(zone) HAllocate(context,
|
| - size_in_bytes, HType::JSObject(), flags));
|
| + HValue* size_in_bytes = Add<HConstant>(pointer_size);
|
| + target = Add<HAllocate>(context, size_in_bytes, HType::JSObject(), flags);
|
| }
|
| } else {
|
| - HValue* size_in_bytes =
|
| - AddInstruction(new(zone) HConstant(data_size + pointer_size));
|
| - target = AddInstruction(new(zone) HAllocate(context, size_in_bytes,
|
| - HType::JSObject(), flags));
|
| + HValue* size_in_bytes = Add<HConstant>(data_size + pointer_size);
|
| + target = Add<HAllocate>(context, size_in_bytes, HType::JSObject(), flags);
|
| }
|
|
|
| int offset = 0;
|
| int data_offset = 0;
|
| - BuildEmitDeepCopy(boilerplate_object, original_boilerplate_object, target,
|
| - &offset, data_target, &data_offset, mode);
|
| + BuildEmitDeepCopy(boilerplate_object, original_boilerplate_object,
|
| + allocation_site, target, &offset, data_target,
|
| + &data_offset, mode);
|
| return target;
|
| }
|
|
|
| @@ -9968,6 +8571,7 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
|
| void HOptimizedGraphBuilder::BuildEmitDeepCopy(
|
| Handle<JSObject> boilerplate_object,
|
| Handle<JSObject> original_boilerplate_object,
|
| + Handle<Object> allocation_site_object,
|
| HInstruction* target,
|
| int* offset,
|
| HInstruction* data_target,
|
| @@ -9975,6 +8579,22 @@ void HOptimizedGraphBuilder::BuildEmitDeepCopy(
|
| AllocationSiteMode mode) {
|
| Zone* zone = this->zone();
|
|
|
| + bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
|
| + boilerplate_object->map()->CanTrackAllocationSite();
|
| +
|
| + // If using allocation sites, then the payload on the site should already
|
| + // be filled in as a valid (boilerplate) array.
|
| + ASSERT(!create_allocation_site_info ||
|
| + AllocationSite::cast(*allocation_site_object)->IsLiteralSite());
|
| +
|
| + HInstruction* allocation_site = NULL;
|
| +
|
| + if (create_allocation_site_info) {
|
| + allocation_site = AddInstruction(new(zone) HConstant(
|
| + allocation_site_object, Representation::Tagged()));
|
| + }
|
| +
|
| + // Only elements backing stores for non-COW arrays need to be copied.
|
| Handle<FixedArrayBase> elements(boilerplate_object->elements());
|
| Handle<FixedArrayBase> original_elements(
|
| original_boilerplate_object->elements());
|
| @@ -10008,19 +8628,19 @@ void HOptimizedGraphBuilder::BuildEmitDeepCopy(
|
| }
|
|
|
| // Copy in-object properties.
|
| - HValue* object_properties =
|
| - AddInstruction(new(zone) HInnerAllocatedObject(target, object_offset));
|
| - BuildEmitInObjectProperties(boilerplate_object, original_boilerplate_object,
|
| - object_properties, target, offset, data_target, data_offset);
|
| + if (boilerplate_object->map()->NumberOfFields() != 0) {
|
| + HValue* object_properties =
|
| + Add<HInnerAllocatedObject>(target, object_offset);
|
| + BuildEmitInObjectProperties(boilerplate_object, original_boilerplate_object,
|
| + object_properties, target, offset, data_target, data_offset);
|
| + }
|
|
|
| // Create allocation site info.
|
| if (mode == TRACK_ALLOCATION_SITE &&
|
| boilerplate_object->map()->CanTrackAllocationSite()) {
|
| elements_offset += AllocationSiteInfo::kSize;
|
| *offset += AllocationSiteInfo::kSize;
|
| - HInstruction* original_boilerplate = AddInstruction(new(zone) HConstant(
|
| - original_boilerplate_object));
|
| - BuildCreateAllocationSiteInfo(target, JSArray::kSize, original_boilerplate);
|
| + BuildCreateAllocationSiteInfo(target, JSArray::kSize, allocation_site);
|
| }
|
| }
|
|
|
| @@ -10033,11 +8653,9 @@ HValue* HOptimizedGraphBuilder::BuildEmitObjectHeader(
|
| int elements_offset,
|
| int elements_size) {
|
| ASSERT(boilerplate_object->properties()->length() == 0);
|
| - Zone* zone = this->zone();
|
| HValue* result = NULL;
|
|
|
| - HValue* object_header =
|
| - AddInstruction(new(zone) HInnerAllocatedObject(target, object_offset));
|
| + HValue* object_header = Add<HInnerAllocatedObject>(target, object_offset);
|
| Handle<Map> boilerplate_object_map(boilerplate_object->map());
|
| AddStoreMapConstant(object_header, boilerplate_object_map);
|
|
|
| @@ -10045,14 +8663,12 @@ HValue* HOptimizedGraphBuilder::BuildEmitObjectHeader(
|
| if (elements_size == 0) {
|
| Handle<Object> elements_field =
|
| Handle<Object>(boilerplate_object->elements(), isolate());
|
| - elements = AddInstruction(new(zone) HConstant(elements_field));
|
| + elements = Add<HConstant>(elements_field);
|
| } else {
|
| if (data_target != NULL && boilerplate_object->HasFastDoubleElements()) {
|
| - elements = AddInstruction(new(zone) HInnerAllocatedObject(
|
| - data_target, elements_offset));
|
| + elements = Add<HInnerAllocatedObject>(data_target, elements_offset);
|
| } else {
|
| - elements = AddInstruction(new(zone) HInnerAllocatedObject(
|
| - target, elements_offset));
|
| + elements = Add<HInnerAllocatedObject>(target, elements_offset);
|
| }
|
| result = elements;
|
| }
|
| @@ -10061,8 +8677,7 @@ HValue* HOptimizedGraphBuilder::BuildEmitObjectHeader(
|
| Handle<Object> properties_field =
|
| Handle<Object>(boilerplate_object->properties(), isolate());
|
| ASSERT(*properties_field == isolate()->heap()->empty_fixed_array());
|
| - HInstruction* properties = AddInstruction(new(zone) HConstant(
|
| - properties_field));
|
| + HInstruction* properties = Add<HConstant>(properties_field);
|
| HObjectAccess access = HObjectAccess::ForPropertiesPointer();
|
| AddStore(object_header, access, properties);
|
|
|
| @@ -10071,7 +8686,7 @@ HValue* HOptimizedGraphBuilder::BuildEmitObjectHeader(
|
| Handle<JSArray>::cast(boilerplate_object);
|
| Handle<Object> length_field =
|
| Handle<Object>(boilerplate_array->length(), isolate());
|
| - HInstruction* length = AddInstruction(new(zone) HConstant(length_field));
|
| + HInstruction* length = Add<HConstant>(length_field);
|
|
|
| ASSERT(boilerplate_array->length()->IsSmi());
|
| Representation representation =
|
| @@ -10093,7 +8708,6 @@ void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
|
| int* offset,
|
| HInstruction* data_target,
|
| int* data_offset) {
|
| - Zone* zone = this->zone();
|
| Handle<DescriptorArray> descriptors(
|
| boilerplate_object->map()->instance_descriptors());
|
| int limit = boilerplate_object->map()->NumberOfOwnDescriptors();
|
| @@ -10120,28 +8734,26 @@ void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
|
| Handle<JSObject> original_value_object = Handle<JSObject>::cast(
|
| Handle<Object>(original_boilerplate_object->InObjectPropertyAt(index),
|
| isolate()));
|
| - HInstruction* value_instruction =
|
| - AddInstruction(new(zone) HInnerAllocatedObject(target, *offset));
|
| + HInstruction* value_instruction = Add<HInnerAllocatedObject>(target,
|
| + *offset);
|
|
|
| AddStore(object_properties, access, value_instruction);
|
| -
|
| - BuildEmitDeepCopy(value_object, original_value_object, target,
|
| - offset, data_target, data_offset, DONT_TRACK_ALLOCATION_SITE);
|
| + BuildEmitDeepCopy(value_object, original_value_object,
|
| + Handle<Object>::null(), target,
|
| + offset, data_target, data_offset,
|
| + DONT_TRACK_ALLOCATION_SITE);
|
| } else {
|
| Representation representation = details.representation();
|
| - HInstruction* value_instruction =
|
| - AddInstruction(new(zone) HConstant(value));
|
| + HInstruction* value_instruction = Add<HConstant>(value);
|
|
|
| if (representation.IsDouble()) {
|
| // Allocate a HeapNumber box and store the value into it.
|
| HInstruction* double_box;
|
| if (data_target != NULL) {
|
| - double_box = AddInstruction(new(zone) HInnerAllocatedObject(
|
| - data_target, *data_offset));
|
| + double_box = Add<HInnerAllocatedObject>(data_target, *data_offset);
|
| *data_offset += HeapNumber::kSize;
|
| } else {
|
| - double_box = AddInstruction(new(zone) HInnerAllocatedObject(
|
| - target, *offset));
|
| + double_box = Add<HInnerAllocatedObject>(target, *offset);
|
| *offset += HeapNumber::kSize;
|
| }
|
| AddStoreMapConstant(double_box,
|
| @@ -10156,8 +8768,8 @@ void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
|
| }
|
|
|
| int inobject_properties = boilerplate_object->map()->inobject_properties();
|
| - HInstruction* value_instruction = AddInstruction(new(zone)
|
| - HConstant(isolate()->factory()->one_pointer_filler_map()));
|
| + HInstruction* value_instruction =
|
| + Add<HConstant>(isolate()->factory()->one_pointer_filler_map());
|
| for (int i = copied_fields; i < inobject_properties; i++) {
|
| ASSERT(boilerplate_object->IsJSObject());
|
| int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
|
| @@ -10176,11 +8788,8 @@ void HOptimizedGraphBuilder::BuildEmitElements(
|
| int* offset,
|
| HInstruction* data_target,
|
| int* data_offset) {
|
| - Zone* zone = this->zone();
|
| -
|
| int elements_length = elements->length();
|
| - HValue* object_elements_length =
|
| - AddInstruction(new(zone) HConstant(elements_length));
|
| + HValue* object_elements_length = Add<HConstant>(elements_length);
|
|
|
| BuildInitializeElementsHeader(object_elements, kind, object_elements_length);
|
|
|
| @@ -10200,17 +8809,16 @@ void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray(
|
| Handle<FixedArrayBase> elements,
|
| ElementsKind kind,
|
| HValue* object_elements) {
|
| - Zone* zone = this->zone();
|
| - HInstruction* boilerplate_elements =
|
| - AddInstruction(new(zone) HConstant(elements));
|
| + HInstruction* boilerplate_elements = Add<HConstant>(elements);
|
| int elements_length = elements->length();
|
| for (int i = 0; i < elements_length; i++) {
|
| - HValue* key_constant = AddInstruction(new(zone) HConstant(i));
|
| + HValue* key_constant = Add<HConstant>(i);
|
| HInstruction* value_instruction =
|
| - AddInstruction(new(zone) HLoadKeyed(
|
| - boilerplate_elements, key_constant, NULL, kind, ALLOW_RETURN_HOLE));
|
| - HInstruction* store = AddInstruction(new(zone) HStoreKeyed(
|
| - object_elements, key_constant, value_instruction, kind));
|
| + Add<HLoadKeyed>(boilerplate_elements, key_constant,
|
| + static_cast<HValue*>(NULL), kind,
|
| + ALLOW_RETURN_HOLE);
|
| + HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
|
| + value_instruction, kind);
|
| store->SetFlag(HValue::kAllowUndefinedAsNaN);
|
| }
|
| }
|
| @@ -10225,37 +8833,36 @@ void HOptimizedGraphBuilder::BuildEmitFixedArray(
|
| int* offset,
|
| HInstruction* data_target,
|
| int* data_offset) {
|
| - Zone* zone = this->zone();
|
| - HInstruction* boilerplate_elements =
|
| - AddInstruction(new(zone) HConstant(elements));
|
| + HInstruction* boilerplate_elements = Add<HConstant>(elements);
|
| int elements_length = elements->length();
|
| Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
|
| Handle<FixedArray> original_fast_elements =
|
| Handle<FixedArray>::cast(original_elements);
|
| for (int i = 0; i < elements_length; i++) {
|
| Handle<Object> value(fast_elements->get(i), isolate());
|
| - HValue* key_constant = AddInstruction(new(zone) HConstant(i));
|
| + HValue* key_constant = Add<HConstant>(i);
|
| if (value->IsJSObject()) {
|
| Handle<JSObject> value_object = Handle<JSObject>::cast(value);
|
| Handle<JSObject> original_value_object = Handle<JSObject>::cast(
|
| Handle<Object>(original_fast_elements->get(i), isolate()));
|
| - HInstruction* value_instruction =
|
| - AddInstruction(new(zone) HInnerAllocatedObject(target, *offset));
|
| - AddInstruction(new(zone) HStoreKeyed(
|
| - object_elements, key_constant, value_instruction, kind));
|
| - BuildEmitDeepCopy(value_object, original_value_object, target,
|
| - offset, data_target, data_offset, DONT_TRACK_ALLOCATION_SITE);
|
| + HInstruction* value_instruction = Add<HInnerAllocatedObject>(target,
|
| + *offset);
|
| + Add<HStoreKeyed>(object_elements, key_constant, value_instruction, kind);
|
| + BuildEmitDeepCopy(value_object, original_value_object,
|
| + Handle<Object>::null(), target,
|
| + offset, data_target, data_offset,
|
| + DONT_TRACK_ALLOCATION_SITE);
|
| } else {
|
| HInstruction* value_instruction =
|
| - AddInstruction(new(zone) HLoadKeyed(
|
| - boilerplate_elements, key_constant, NULL, kind,
|
| - ALLOW_RETURN_HOLE));
|
| - AddInstruction(new(zone) HStoreKeyed(
|
| - object_elements, key_constant, value_instruction, kind));
|
| + Add<HLoadKeyed>(boilerplate_elements, key_constant,
|
| + static_cast<HValue*>(NULL), kind,
|
| + ALLOW_RETURN_HOLE);
|
| + Add<HStoreKeyed>(object_elements, key_constant, value_instruction, kind);
|
| }
|
| }
|
| }
|
|
|
| +
|
| void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
|
| ASSERT(!HasStackOverflow());
|
| ASSERT(current_block() != NULL);
|
| @@ -10276,9 +8883,7 @@ void HOptimizedGraphBuilder::VisitDeclarations(
|
| int flags = DeclareGlobalsEvalFlag::encode(current_info()->is_eval()) |
|
| DeclareGlobalsNativeFlag::encode(current_info()->is_native()) |
|
| DeclareGlobalsLanguageMode::encode(current_info()->language_mode());
|
| - HInstruction* result = new(zone()) HDeclareGlobals(
|
| - environment()->LookupContext(), array, flags);
|
| - AddInstruction(result);
|
| + Add<HDeclareGlobals>(environment()->LookupContext(), array, flags);
|
| globals_.Clear();
|
| }
|
| }
|
| @@ -10308,9 +8913,8 @@ void HOptimizedGraphBuilder::VisitVariableDeclaration(
|
| if (hole_init) {
|
| HValue* value = graph()->GetConstantHole();
|
| HValue* context = environment()->LookupContext();
|
| - HStoreContextSlot* store = new(zone()) HStoreContextSlot(
|
| + HStoreContextSlot* store = Add<HStoreContextSlot>(
|
| context, variable->index(), HStoreContextSlot::kNoCheck, value);
|
| - AddInstruction(store);
|
| if (store->HasObservableSideEffects()) {
|
| AddSimulate(proxy->id(), REMOVABLE_SIMULATE);
|
| }
|
| @@ -10347,9 +8951,8 @@ void HOptimizedGraphBuilder::VisitFunctionDeclaration(
|
| CHECK_ALIVE(VisitForValue(declaration->fun()));
|
| HValue* value = Pop();
|
| HValue* context = environment()->LookupContext();
|
| - HStoreContextSlot* store = new(zone()) HStoreContextSlot(
|
| + HStoreContextSlot* store = Add<HStoreContextSlot>(
|
| context, variable->index(), HStoreContextSlot::kNoCheck, value);
|
| - AddInstruction(store);
|
| if (store->HasObservableSideEffects()) {
|
| AddSimulate(proxy->id(), REMOVABLE_SIMULATE);
|
| }
|
| @@ -10521,8 +9124,7 @@ void HOptimizedGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
|
| // function is blacklisted by AstNode::IsInlineable.
|
| ASSERT(function_state()->outer() == NULL);
|
| ASSERT(call->arguments()->length() == 0);
|
| - HInstruction* elements = AddInstruction(
|
| - new(zone()) HArgumentsElements(false));
|
| + HInstruction* elements = Add<HArgumentsElements>(false);
|
| HArgumentsLength* result = new(zone()) HArgumentsLength(elements);
|
| return ast_context()->ReturnInstruction(result, call->id());
|
| }
|
| @@ -10536,10 +9138,9 @@ void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) {
|
| ASSERT(call->arguments()->length() == 1);
|
| CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
|
| HValue* index = Pop();
|
| - HInstruction* elements = AddInstruction(
|
| - new(zone()) HArgumentsElements(false));
|
| - HInstruction* length = AddInstruction(new(zone()) HArgumentsLength(elements));
|
| - HInstruction* checked_index = AddBoundsCheck(index, length);
|
| + HInstruction* elements = Add<HArgumentsElements>(false);
|
| + HInstruction* length = Add<HArgumentsLength>(elements);
|
| + HInstruction* checked_index = Add<HBoundsCheck>(index, length);
|
| HAccessArgumentsAt* result =
|
| new(zone()) HAccessArgumentsAt(elements, length, checked_index);
|
| return ast_context()->ReturnInstruction(result, call->id());
|
| @@ -10703,8 +9304,7 @@ void HOptimizedGraphBuilder::GenerateLog(CallRuntime* call) {
|
| // Fast support for Math.random().
|
| void HOptimizedGraphBuilder::GenerateRandomHeapNumber(CallRuntime* call) {
|
| HValue* context = environment()->LookupContext();
|
| - HGlobalObject* global_object = new(zone()) HGlobalObject(context);
|
| - AddInstruction(global_object);
|
| + HGlobalObject* global_object = Add<HGlobalObject>(context);
|
| HRandom* result = new(zone()) HRandom(global_object);
|
| return ast_context()->ReturnInstruction(result, call->id());
|
| }
|
| @@ -10810,15 +9410,14 @@ void HOptimizedGraphBuilder::GenerateCallFunction(CallRuntime* call) {
|
| current_block()->Finish(typecheck);
|
|
|
| set_current_block(if_jsfunction);
|
| - HInstruction* invoke_result = AddInstruction(
|
| - new(zone()) HInvokeFunction(context, function, arg_count));
|
| + HInstruction* invoke_result =
|
| + Add<HInvokeFunction>(context, function, arg_count);
|
| Drop(arg_count);
|
| Push(invoke_result);
|
| if_jsfunction->Goto(join);
|
|
|
| set_current_block(if_nonfunction);
|
| - HInstruction* call_result = AddInstruction(
|
| - new(zone()) HCallFunction(context, function, arg_count));
|
| + HInstruction* call_result = Add<HCallFunction>(context, function, arg_count);
|
| Drop(arg_count);
|
| Push(call_result);
|
| if_nonfunction->Goto(join);
|
| @@ -10931,6 +9530,13 @@ void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) {
|
| }
|
|
|
|
|
| +void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
|
| + CallRuntime* call) {
|
| + AddInstruction(new(zone()) HDebugBreak());
|
| + return ast_context()->ReturnValue(graph()->GetConstant0());
|
| +}
|
| +
|
| +
|
| #undef CHECK_BAILOUT
|
| #undef CHECK_ALIVE
|
|
|
| @@ -11341,8 +9947,8 @@ void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
|
|
|
| {
|
| Tag HIR_tag(this, "HIR");
|
| - HInstruction* instruction = current->first();
|
| - while (instruction != NULL) {
|
| + for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
|
| + HInstruction* instruction = it.Current();
|
| int bci = 0;
|
| int uses = instruction->UseCount();
|
| PrintIndent();
|
| @@ -11351,7 +9957,6 @@ void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
|
| trace_.Add(" ");
|
| instruction->PrintTo(&trace_);
|
| trace_.Add(" <|@\n");
|
| - instruction = instruction->next();
|
| }
|
| }
|
|
|
|
|