Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/hydrogen-instructions.cc

Issue 1405363003: Move Hydrogen and Lithium to src/crankshaft/ (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: rebased Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/hydrogen-instructions.h ('k') | src/hydrogen-load-elimination.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/hydrogen-instructions.h"
6
7 #include "src/base/bits.h"
8 #include "src/double.h"
9 #include "src/elements.h"
10 #include "src/factory.h"
11 #include "src/hydrogen-infer-representation.h"
12
13 #if V8_TARGET_ARCH_IA32
14 #include "src/ia32/lithium-ia32.h" // NOLINT
15 #elif V8_TARGET_ARCH_X64
16 #include "src/x64/lithium-x64.h" // NOLINT
17 #elif V8_TARGET_ARCH_ARM64
18 #include "src/arm64/lithium-arm64.h" // NOLINT
19 #elif V8_TARGET_ARCH_ARM
20 #include "src/arm/lithium-arm.h" // NOLINT
21 #elif V8_TARGET_ARCH_PPC
22 #include "src/ppc/lithium-ppc.h" // NOLINT
23 #elif V8_TARGET_ARCH_MIPS
24 #include "src/mips/lithium-mips.h" // NOLINT
25 #elif V8_TARGET_ARCH_MIPS64
26 #include "src/mips64/lithium-mips64.h" // NOLINT
27 #elif V8_TARGET_ARCH_X87
28 #include "src/x87/lithium-x87.h" // NOLINT
29 #else
30 #error Unsupported target architecture.
31 #endif
32
33 #include "src/base/safe_math.h"
34
35 namespace v8 {
36 namespace internal {
37
38 #define DEFINE_COMPILE(type) \
39 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
40 return builder->Do##type(this); \
41 }
42 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
43 #undef DEFINE_COMPILE
44
45
46 Isolate* HValue::isolate() const {
47 DCHECK(block() != NULL);
48 return block()->isolate();
49 }
50
51
52 void HValue::AssumeRepresentation(Representation r) {
53 if (CheckFlag(kFlexibleRepresentation)) {
54 ChangeRepresentation(r);
55 // The representation of the value is dictated by type feedback and
56 // will not be changed later.
57 ClearFlag(kFlexibleRepresentation);
58 }
59 }
60
61
62 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) {
63 DCHECK(CheckFlag(kFlexibleRepresentation));
64 Representation new_rep = RepresentationFromInputs();
65 UpdateRepresentation(new_rep, h_infer, "inputs");
66 new_rep = RepresentationFromUses();
67 UpdateRepresentation(new_rep, h_infer, "uses");
68 if (representation().IsSmi() && HasNonSmiUse()) {
69 UpdateRepresentation(
70 Representation::Integer32(), h_infer, "use requirements");
71 }
72 }
73
74
75 Representation HValue::RepresentationFromUses() {
76 if (HasNoUses()) return Representation::None();
77 Representation result = Representation::None();
78
79 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
80 HValue* use = it.value();
81 Representation rep = use->observed_input_representation(it.index());
82 result = result.generalize(rep);
83
84 if (FLAG_trace_representation) {
85 PrintF("#%d %s is used by #%d %s as %s%s\n",
86 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(),
87 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
88 }
89 }
90 if (IsPhi()) {
91 result = result.generalize(
92 HPhi::cast(this)->representation_from_indirect_uses());
93 }
94
95 // External representations are dealt with separately.
96 return result.IsExternal() ? Representation::None() : result;
97 }
98
99
100 void HValue::UpdateRepresentation(Representation new_rep,
101 HInferRepresentationPhase* h_infer,
102 const char* reason) {
103 Representation r = representation();
104 if (new_rep.is_more_general_than(r)) {
105 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return;
106 if (FLAG_trace_representation) {
107 PrintF("Changing #%d %s representation %s -> %s based on %s\n",
108 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason);
109 }
110 ChangeRepresentation(new_rep);
111 AddDependantsToWorklist(h_infer);
112 }
113 }
114
115
116 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
117 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
118 h_infer->AddToWorklist(it.value());
119 }
120 for (int i = 0; i < OperandCount(); ++i) {
121 h_infer->AddToWorklist(OperandAt(i));
122 }
123 }
124
125
126 static int32_t ConvertAndSetOverflow(Representation r,
127 int64_t result,
128 bool* overflow) {
129 if (r.IsSmi()) {
130 if (result > Smi::kMaxValue) {
131 *overflow = true;
132 return Smi::kMaxValue;
133 }
134 if (result < Smi::kMinValue) {
135 *overflow = true;
136 return Smi::kMinValue;
137 }
138 } else {
139 if (result > kMaxInt) {
140 *overflow = true;
141 return kMaxInt;
142 }
143 if (result < kMinInt) {
144 *overflow = true;
145 return kMinInt;
146 }
147 }
148 return static_cast<int32_t>(result);
149 }
150
151
152 static int32_t AddWithoutOverflow(Representation r,
153 int32_t a,
154 int32_t b,
155 bool* overflow) {
156 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b);
157 return ConvertAndSetOverflow(r, result, overflow);
158 }
159
160
161 static int32_t SubWithoutOverflow(Representation r,
162 int32_t a,
163 int32_t b,
164 bool* overflow) {
165 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b);
166 return ConvertAndSetOverflow(r, result, overflow);
167 }
168
169
170 static int32_t MulWithoutOverflow(const Representation& r,
171 int32_t a,
172 int32_t b,
173 bool* overflow) {
174 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b);
175 return ConvertAndSetOverflow(r, result, overflow);
176 }
177
178
179 int32_t Range::Mask() const {
180 if (lower_ == upper_) return lower_;
181 if (lower_ >= 0) {
182 int32_t res = 1;
183 while (res < upper_) {
184 res = (res << 1) | 1;
185 }
186 return res;
187 }
188 return 0xffffffff;
189 }
190
191
192 void Range::AddConstant(int32_t value) {
193 if (value == 0) return;
194 bool may_overflow = false; // Overflow is ignored here.
195 Representation r = Representation::Integer32();
196 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
197 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
198 #ifdef DEBUG
199 Verify();
200 #endif
201 }
202
203
204 void Range::Intersect(Range* other) {
205 upper_ = Min(upper_, other->upper_);
206 lower_ = Max(lower_, other->lower_);
207 bool b = CanBeMinusZero() && other->CanBeMinusZero();
208 set_can_be_minus_zero(b);
209 }
210
211
212 void Range::Union(Range* other) {
213 upper_ = Max(upper_, other->upper_);
214 lower_ = Min(lower_, other->lower_);
215 bool b = CanBeMinusZero() || other->CanBeMinusZero();
216 set_can_be_minus_zero(b);
217 }
218
219
220 void Range::CombinedMax(Range* other) {
221 upper_ = Max(upper_, other->upper_);
222 lower_ = Max(lower_, other->lower_);
223 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
224 }
225
226
227 void Range::CombinedMin(Range* other) {
228 upper_ = Min(upper_, other->upper_);
229 lower_ = Min(lower_, other->lower_);
230 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
231 }
232
233
234 void Range::Sar(int32_t value) {
235 int32_t bits = value & 0x1F;
236 lower_ = lower_ >> bits;
237 upper_ = upper_ >> bits;
238 set_can_be_minus_zero(false);
239 }
240
241
242 void Range::Shl(int32_t value) {
243 int32_t bits = value & 0x1F;
244 int old_lower = lower_;
245 int old_upper = upper_;
246 lower_ = lower_ << bits;
247 upper_ = upper_ << bits;
248 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
249 upper_ = kMaxInt;
250 lower_ = kMinInt;
251 }
252 set_can_be_minus_zero(false);
253 }
254
255
256 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) {
257 bool may_overflow = false;
258 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
259 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
260 KeepOrder();
261 #ifdef DEBUG
262 Verify();
263 #endif
264 return may_overflow;
265 }
266
267
268 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) {
269 bool may_overflow = false;
270 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
271 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
272 KeepOrder();
273 #ifdef DEBUG
274 Verify();
275 #endif
276 return may_overflow;
277 }
278
279
280 void Range::KeepOrder() {
281 if (lower_ > upper_) {
282 int32_t tmp = lower_;
283 lower_ = upper_;
284 upper_ = tmp;
285 }
286 }
287
288
289 #ifdef DEBUG
290 void Range::Verify() const {
291 DCHECK(lower_ <= upper_);
292 }
293 #endif
294
295
296 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) {
297 bool may_overflow = false;
298 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
299 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
300 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
301 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
302 lower_ = Min(Min(v1, v2), Min(v3, v4));
303 upper_ = Max(Max(v1, v2), Max(v3, v4));
304 #ifdef DEBUG
305 Verify();
306 #endif
307 return may_overflow;
308 }
309
310
311 bool HValue::IsDefinedAfter(HBasicBlock* other) const {
312 return block()->block_id() > other->block_id();
313 }
314
315
316 HUseListNode* HUseListNode::tail() {
317 // Skip and remove dead items in the use list.
318 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
319 tail_ = tail_->tail_;
320 }
321 return tail_;
322 }
323
324
325 bool HValue::CheckUsesForFlag(Flag f) const {
326 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
327 if (it.value()->IsSimulate()) continue;
328 if (!it.value()->CheckFlag(f)) return false;
329 }
330 return true;
331 }
332
333
334 bool HValue::CheckUsesForFlag(Flag f, HValue** value) const {
335 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
336 if (it.value()->IsSimulate()) continue;
337 if (!it.value()->CheckFlag(f)) {
338 *value = it.value();
339 return false;
340 }
341 }
342 return true;
343 }
344
345
346 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
347 bool return_value = false;
348 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
349 if (it.value()->IsSimulate()) continue;
350 if (!it.value()->CheckFlag(f)) return false;
351 return_value = true;
352 }
353 return return_value;
354 }
355
356
357 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
358 Advance();
359 }
360
361
362 void HUseIterator::Advance() {
363 current_ = next_;
364 if (current_ != NULL) {
365 next_ = current_->tail();
366 value_ = current_->value();
367 index_ = current_->index();
368 }
369 }
370
371
372 int HValue::UseCount() const {
373 int count = 0;
374 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count;
375 return count;
376 }
377
378
379 HUseListNode* HValue::RemoveUse(HValue* value, int index) {
380 HUseListNode* previous = NULL;
381 HUseListNode* current = use_list_;
382 while (current != NULL) {
383 if (current->value() == value && current->index() == index) {
384 if (previous == NULL) {
385 use_list_ = current->tail();
386 } else {
387 previous->set_tail(current->tail());
388 }
389 break;
390 }
391
392 previous = current;
393 current = current->tail();
394 }
395
396 #ifdef DEBUG
397 // Do not reuse use list nodes in debug mode, zap them.
398 if (current != NULL) {
399 HUseListNode* temp =
400 new(block()->zone())
401 HUseListNode(current->value(), current->index(), NULL);
402 current->Zap();
403 current = temp;
404 }
405 #endif
406 return current;
407 }
408
409
410 bool HValue::Equals(HValue* other) {
411 if (other->opcode() != opcode()) return false;
412 if (!other->representation().Equals(representation())) return false;
413 if (!other->type_.Equals(type_)) return false;
414 if (other->flags() != flags()) return false;
415 if (OperandCount() != other->OperandCount()) return false;
416 for (int i = 0; i < OperandCount(); ++i) {
417 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false;
418 }
419 bool result = DataEquals(other);
420 DCHECK(!result || Hashcode() == other->Hashcode());
421 return result;
422 }
423
424
425 intptr_t HValue::Hashcode() {
426 intptr_t result = opcode();
427 int count = OperandCount();
428 for (int i = 0; i < count; ++i) {
429 result = result * 19 + OperandAt(i)->id() + (result >> 7);
430 }
431 return result;
432 }
433
434
435 const char* HValue::Mnemonic() const {
436 switch (opcode()) {
437 #define MAKE_CASE(type) case k##type: return #type;
438 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE)
439 #undef MAKE_CASE
440 case kPhi: return "Phi";
441 default: return "";
442 }
443 }
444
445
446 bool HValue::CanReplaceWithDummyUses() {
447 return FLAG_unreachable_code_elimination &&
448 !(block()->IsReachable() ||
449 IsBlockEntry() ||
450 IsControlInstruction() ||
451 IsArgumentsObject() ||
452 IsCapturedObject() ||
453 IsSimulate() ||
454 IsEnterInlined() ||
455 IsLeaveInlined());
456 }
457
458
459 bool HValue::IsInteger32Constant() {
460 return IsConstant() && HConstant::cast(this)->HasInteger32Value();
461 }
462
463
464 int32_t HValue::GetInteger32Constant() {
465 return HConstant::cast(this)->Integer32Value();
466 }
467
468
469 bool HValue::EqualsInteger32Constant(int32_t value) {
470 return IsInteger32Constant() && GetInteger32Constant() == value;
471 }
472
473
474 void HValue::SetOperandAt(int index, HValue* value) {
475 RegisterUse(index, value);
476 InternalSetOperandAt(index, value);
477 }
478
479
480 void HValue::DeleteAndReplaceWith(HValue* other) {
481 // We replace all uses first, so Delete can assert that there are none.
482 if (other != NULL) ReplaceAllUsesWith(other);
483 Kill();
484 DeleteFromGraph();
485 }
486
487
488 void HValue::ReplaceAllUsesWith(HValue* other) {
489 while (use_list_ != NULL) {
490 HUseListNode* list_node = use_list_;
491 HValue* value = list_node->value();
492 DCHECK(!value->block()->IsStartBlock());
493 value->InternalSetOperandAt(list_node->index(), other);
494 use_list_ = list_node->tail();
495 list_node->set_tail(other->use_list_);
496 other->use_list_ = list_node;
497 }
498 }
499
500
501 void HValue::Kill() {
502 // Instead of going through the entire use list of each operand, we only
503 // check the first item in each use list and rely on the tail() method to
504 // skip dead items, removing them lazily next time we traverse the list.
505 SetFlag(kIsDead);
506 for (int i = 0; i < OperandCount(); ++i) {
507 HValue* operand = OperandAt(i);
508 if (operand == NULL) continue;
509 HUseListNode* first = operand->use_list_;
510 if (first != NULL && first->value()->CheckFlag(kIsDead)) {
511 operand->use_list_ = first->tail();
512 }
513 }
514 }
515
516
517 void HValue::SetBlock(HBasicBlock* block) {
518 DCHECK(block_ == NULL || block == NULL);
519 block_ = block;
520 if (id_ == kNoNumber && block != NULL) {
521 id_ = block->graph()->GetNextValueID(this);
522 }
523 }
524
525
526 std::ostream& operator<<(std::ostream& os, const HValue& v) {
527 return v.PrintTo(os);
528 }
529
530
531 std::ostream& operator<<(std::ostream& os, const TypeOf& t) {
532 if (t.value->representation().IsTagged() &&
533 !t.value->type().Equals(HType::Tagged()))
534 return os;
535 return os << " type:" << t.value->type();
536 }
537
538
539 std::ostream& operator<<(std::ostream& os, const ChangesOf& c) {
540 GVNFlagSet changes_flags = c.value->ChangesFlags();
541 if (changes_flags.IsEmpty()) return os;
542 os << " changes[";
543 if (changes_flags == c.value->AllSideEffectsFlagSet()) {
544 os << "*";
545 } else {
546 bool add_comma = false;
547 #define PRINT_DO(Type) \
548 if (changes_flags.Contains(k##Type)) { \
549 if (add_comma) os << ","; \
550 add_comma = true; \
551 os << #Type; \
552 }
553 GVN_TRACKED_FLAG_LIST(PRINT_DO);
554 GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
555 #undef PRINT_DO
556 }
557 return os << "]";
558 }
559
560
561 bool HValue::HasMonomorphicJSObjectType() {
562 return !GetMonomorphicJSObjectMap().is_null();
563 }
564
565
566 bool HValue::UpdateInferredType() {
567 HType type = CalculateInferredType();
568 bool result = (!type.Equals(type_));
569 type_ = type;
570 return result;
571 }
572
573
574 void HValue::RegisterUse(int index, HValue* new_value) {
575 HValue* old_value = OperandAt(index);
576 if (old_value == new_value) return;
577
578 HUseListNode* removed = NULL;
579 if (old_value != NULL) {
580 removed = old_value->RemoveUse(this, index);
581 }
582
583 if (new_value != NULL) {
584 if (removed == NULL) {
585 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
586 this, index, new_value->use_list_);
587 } else {
588 removed->set_tail(new_value->use_list_);
589 new_value->use_list_ = removed;
590 }
591 }
592 }
593
594
595 void HValue::AddNewRange(Range* r, Zone* zone) {
596 if (!HasRange()) ComputeInitialRange(zone);
597 if (!HasRange()) range_ = new(zone) Range();
598 DCHECK(HasRange());
599 r->StackUpon(range_);
600 range_ = r;
601 }
602
603
604 void HValue::RemoveLastAddedRange() {
605 DCHECK(HasRange());
606 DCHECK(range_->next() != NULL);
607 range_ = range_->next();
608 }
609
610
611 void HValue::ComputeInitialRange(Zone* zone) {
612 DCHECK(!HasRange());
613 range_ = InferRange(zone);
614 DCHECK(HasRange());
615 }
616
617
618 std::ostream& HInstruction::PrintTo(std::ostream& os) const { // NOLINT
619 os << Mnemonic() << " ";
620 PrintDataTo(os) << ChangesOf(this) << TypeOf(this);
621 if (CheckFlag(HValue::kHasNoObservableSideEffects)) os << " [noOSE]";
622 if (CheckFlag(HValue::kIsDead)) os << " [dead]";
623 return os;
624 }
625
626
627 std::ostream& HInstruction::PrintDataTo(std::ostream& os) const { // NOLINT
628 for (int i = 0; i < OperandCount(); ++i) {
629 if (i > 0) os << " ";
630 os << NameOf(OperandAt(i));
631 }
632 return os;
633 }
634
635
636 void HInstruction::Unlink() {
637 DCHECK(IsLinked());
638 DCHECK(!IsControlInstruction()); // Must never move control instructions.
639 DCHECK(!IsBlockEntry()); // Doesn't make sense to delete these.
640 DCHECK(previous_ != NULL);
641 previous_->next_ = next_;
642 if (next_ == NULL) {
643 DCHECK(block()->last() == this);
644 block()->set_last(previous_);
645 } else {
646 next_->previous_ = previous_;
647 }
648 clear_block();
649 }
650
651
652 void HInstruction::InsertBefore(HInstruction* next) {
653 DCHECK(!IsLinked());
654 DCHECK(!next->IsBlockEntry());
655 DCHECK(!IsControlInstruction());
656 DCHECK(!next->block()->IsStartBlock());
657 DCHECK(next->previous_ != NULL);
658 HInstruction* prev = next->previous();
659 prev->next_ = this;
660 next->previous_ = this;
661 next_ = next;
662 previous_ = prev;
663 SetBlock(next->block());
664 if (!has_position() && next->has_position()) {
665 set_position(next->position());
666 }
667 }
668
669
670 void HInstruction::InsertAfter(HInstruction* previous) {
671 DCHECK(!IsLinked());
672 DCHECK(!previous->IsControlInstruction());
673 DCHECK(!IsControlInstruction() || previous->next_ == NULL);
674 HBasicBlock* block = previous->block();
675 // Never insert anything except constants into the start block after finishing
676 // it.
677 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
678 DCHECK(block->end()->SecondSuccessor() == NULL);
679 InsertAfter(block->end()->FirstSuccessor()->first());
680 return;
681 }
682
683 // If we're inserting after an instruction with side-effects that is
684 // followed by a simulate instruction, we need to insert after the
685 // simulate instruction instead.
686 HInstruction* next = previous->next_;
687 if (previous->HasObservableSideEffects() && next != NULL) {
688 DCHECK(next->IsSimulate());
689 previous = next;
690 next = previous->next_;
691 }
692
693 previous_ = previous;
694 next_ = next;
695 SetBlock(block);
696 previous->next_ = this;
697 if (next != NULL) next->previous_ = this;
698 if (block->last() == previous) {
699 block->set_last(this);
700 }
701 if (!has_position() && previous->has_position()) {
702 set_position(previous->position());
703 }
704 }
705
706
707 bool HInstruction::Dominates(HInstruction* other) {
708 if (block() != other->block()) {
709 return block()->Dominates(other->block());
710 }
711 // Both instructions are in the same basic block. This instruction
712 // should precede the other one in order to dominate it.
713 for (HInstruction* instr = next(); instr != NULL; instr = instr->next()) {
714 if (instr == other) {
715 return true;
716 }
717 }
718 return false;
719 }
720
721
722 #ifdef DEBUG
723 void HInstruction::Verify() {
724 // Verify that input operands are defined before use.
725 HBasicBlock* cur_block = block();
726 for (int i = 0; i < OperandCount(); ++i) {
727 HValue* other_operand = OperandAt(i);
728 if (other_operand == NULL) continue;
729 HBasicBlock* other_block = other_operand->block();
730 if (cur_block == other_block) {
731 if (!other_operand->IsPhi()) {
732 HInstruction* cur = this->previous();
733 while (cur != NULL) {
734 if (cur == other_operand) break;
735 cur = cur->previous();
736 }
737 // Must reach other operand in the same block!
738 DCHECK(cur == other_operand);
739 }
740 } else {
741 // If the following assert fires, you may have forgotten an
742 // AddInstruction.
743 DCHECK(other_block->Dominates(cur_block));
744 }
745 }
746
747 // Verify that instructions that may have side-effects are followed
748 // by a simulate instruction.
749 if (HasObservableSideEffects() && !IsOsrEntry()) {
750 DCHECK(next()->IsSimulate());
751 }
752
753 // Verify that instructions that can be eliminated by GVN have overridden
754 // HValue::DataEquals. The default implementation is UNREACHABLE. We
755 // don't actually care whether DataEquals returns true or false here.
756 if (CheckFlag(kUseGVN)) DataEquals(this);
757
758 // Verify that all uses are in the graph.
759 for (HUseIterator use = uses(); !use.Done(); use.Advance()) {
760 if (use.value()->IsInstruction()) {
761 DCHECK(HInstruction::cast(use.value())->IsLinked());
762 }
763 }
764 }
765 #endif
766
767
768 bool HInstruction::CanDeoptimize() {
769 // TODO(titzer): make this a virtual method?
770 switch (opcode()) {
771 case HValue::kAbnormalExit:
772 case HValue::kAccessArgumentsAt:
773 case HValue::kAllocate:
774 case HValue::kArgumentsElements:
775 case HValue::kArgumentsLength:
776 case HValue::kArgumentsObject:
777 case HValue::kBlockEntry:
778 case HValue::kBoundsCheckBaseIndexInformation:
779 case HValue::kCallFunction:
780 case HValue::kCallNew:
781 case HValue::kCallNewArray:
782 case HValue::kCallStub:
783 case HValue::kCapturedObject:
784 case HValue::kClassOfTestAndBranch:
785 case HValue::kCompareGeneric:
786 case HValue::kCompareHoleAndBranch:
787 case HValue::kCompareMap:
788 case HValue::kCompareMinusZeroAndBranch:
789 case HValue::kCompareNumericAndBranch:
790 case HValue::kCompareObjectEqAndBranch:
791 case HValue::kConstant:
792 case HValue::kConstructDouble:
793 case HValue::kContext:
794 case HValue::kDebugBreak:
795 case HValue::kDeclareGlobals:
796 case HValue::kDoubleBits:
797 case HValue::kDummyUse:
798 case HValue::kEnterInlined:
799 case HValue::kEnvironmentMarker:
800 case HValue::kForceRepresentation:
801 case HValue::kGetCachedArrayIndex:
802 case HValue::kGoto:
803 case HValue::kHasCachedArrayIndexAndBranch:
804 case HValue::kHasInstanceTypeAndBranch:
805 case HValue::kInnerAllocatedObject:
806 case HValue::kInstanceOf:
807 case HValue::kIsConstructCallAndBranch:
808 case HValue::kHasInPrototypeChainAndBranch:
809 case HValue::kIsSmiAndBranch:
810 case HValue::kIsStringAndBranch:
811 case HValue::kIsUndetectableAndBranch:
812 case HValue::kLeaveInlined:
813 case HValue::kLoadFieldByIndex:
814 case HValue::kLoadGlobalGeneric:
815 case HValue::kLoadGlobalViaContext:
816 case HValue::kLoadNamedField:
817 case HValue::kLoadNamedGeneric:
818 case HValue::kLoadRoot:
819 case HValue::kMapEnumLength:
820 case HValue::kMathMinMax:
821 case HValue::kParameter:
822 case HValue::kPhi:
823 case HValue::kPushArguments:
824 case HValue::kRegExpLiteral:
825 case HValue::kReturn:
826 case HValue::kSeqStringGetChar:
827 case HValue::kStoreCodeEntry:
828 case HValue::kStoreFrameContext:
829 case HValue::kStoreGlobalViaContext:
830 case HValue::kStoreKeyed:
831 case HValue::kStoreNamedField:
832 case HValue::kStoreNamedGeneric:
833 case HValue::kStringCharCodeAt:
834 case HValue::kStringCharFromCode:
835 case HValue::kThisFunction:
836 case HValue::kTypeofIsAndBranch:
837 case HValue::kUnknownOSRValue:
838 case HValue::kUseConst:
839 return false;
840
841 case HValue::kAdd:
842 case HValue::kAllocateBlockContext:
843 case HValue::kApplyArguments:
844 case HValue::kBitwise:
845 case HValue::kBoundsCheck:
846 case HValue::kBranch:
847 case HValue::kCallJSFunction:
848 case HValue::kCallRuntime:
849 case HValue::kCallWithDescriptor:
850 case HValue::kChange:
851 case HValue::kCheckArrayBufferNotNeutered:
852 case HValue::kCheckHeapObject:
853 case HValue::kCheckInstanceType:
854 case HValue::kCheckMapValue:
855 case HValue::kCheckMaps:
856 case HValue::kCheckSmi:
857 case HValue::kCheckValue:
858 case HValue::kClampToUint8:
859 case HValue::kDateField:
860 case HValue::kDeoptimize:
861 case HValue::kDiv:
862 case HValue::kForInCacheArray:
863 case HValue::kForInPrepareMap:
864 case HValue::kInvokeFunction:
865 case HValue::kLoadContextSlot:
866 case HValue::kLoadFunctionPrototype:
867 case HValue::kLoadKeyed:
868 case HValue::kLoadKeyedGeneric:
869 case HValue::kMathFloorOfDiv:
870 case HValue::kMaybeGrowElements:
871 case HValue::kMod:
872 case HValue::kMul:
873 case HValue::kOsrEntry:
874 case HValue::kPower:
875 case HValue::kPrologue:
876 case HValue::kRor:
877 case HValue::kSar:
878 case HValue::kSeqStringSetChar:
879 case HValue::kShl:
880 case HValue::kShr:
881 case HValue::kSimulate:
882 case HValue::kStackCheck:
883 case HValue::kStoreContextSlot:
884 case HValue::kStoreKeyedGeneric:
885 case HValue::kStringAdd:
886 case HValue::kStringCompareAndBranch:
887 case HValue::kSub:
888 case HValue::kToFastProperties:
889 case HValue::kTransitionElementsKind:
890 case HValue::kTrapAllocationMemento:
891 case HValue::kTypeof:
892 case HValue::kUnaryMathOperation:
893 case HValue::kWrapReceiver:
894 return true;
895 }
896 UNREACHABLE();
897 return true;
898 }
899
900
901 std::ostream& operator<<(std::ostream& os, const NameOf& v) {
902 return os << v.value->representation().Mnemonic() << v.value->id();
903 }
904
905 std::ostream& HDummyUse::PrintDataTo(std::ostream& os) const { // NOLINT
906 return os << NameOf(value());
907 }
908
909
910 std::ostream& HEnvironmentMarker::PrintDataTo(
911 std::ostream& os) const { // NOLINT
912 return os << (kind() == BIND ? "bind" : "lookup") << " var[" << index()
913 << "]";
914 }
915
916
917 std::ostream& HUnaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
918 return os << NameOf(value()) << " #" << argument_count();
919 }
920
921
922 std::ostream& HCallJSFunction::PrintDataTo(std::ostream& os) const { // NOLINT
923 return os << NameOf(function()) << " #" << argument_count();
924 }
925
926
927 HCallJSFunction* HCallJSFunction::New(Isolate* isolate, Zone* zone,
928 HValue* context, HValue* function,
929 int argument_count) {
930 bool has_stack_check = false;
931 if (function->IsConstant()) {
932 HConstant* fun_const = HConstant::cast(function);
933 Handle<JSFunction> jsfun =
934 Handle<JSFunction>::cast(fun_const->handle(isolate));
935 has_stack_check = !jsfun.is_null() &&
936 (jsfun->code()->kind() == Code::FUNCTION ||
937 jsfun->code()->kind() == Code::OPTIMIZED_FUNCTION);
938 }
939
940 return new (zone) HCallJSFunction(function, argument_count, has_stack_check);
941 }
942
943
944 std::ostream& HBinaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
945 return os << NameOf(first()) << " " << NameOf(second()) << " #"
946 << argument_count();
947 }
948
949
950 std::ostream& HCallFunction::PrintDataTo(std::ostream& os) const { // NOLINT
951 os << NameOf(context()) << " " << NameOf(function());
952 if (HasVectorAndSlot()) {
953 os << " (type-feedback-vector icslot " << slot().ToInt() << ")";
954 }
955 return os;
956 }
957
958
959 void HBoundsCheck::ApplyIndexChange() {
960 if (skip_check()) return;
961
962 DecompositionResult decomposition;
963 bool index_is_decomposable = index()->TryDecompose(&decomposition);
964 if (index_is_decomposable) {
965 DCHECK(decomposition.base() == base());
966 if (decomposition.offset() == offset() &&
967 decomposition.scale() == scale()) return;
968 } else {
969 return;
970 }
971
972 ReplaceAllUsesWith(index());
973
974 HValue* current_index = decomposition.base();
975 int actual_offset = decomposition.offset() + offset();
976 int actual_scale = decomposition.scale() + scale();
977
978 HGraph* graph = block()->graph();
979 Isolate* isolate = graph->isolate();
980 Zone* zone = graph->zone();
981 HValue* context = graph->GetInvalidContext();
982 if (actual_offset != 0) {
983 HConstant* add_offset =
984 HConstant::New(isolate, zone, context, actual_offset);
985 add_offset->InsertBefore(this);
986 HInstruction* add =
987 HAdd::New(isolate, zone, context, current_index, add_offset);
988 add->InsertBefore(this);
989 add->AssumeRepresentation(index()->representation());
990 add->ClearFlag(kCanOverflow);
991 current_index = add;
992 }
993
994 if (actual_scale != 0) {
995 HConstant* sar_scale = HConstant::New(isolate, zone, context, actual_scale);
996 sar_scale->InsertBefore(this);
997 HInstruction* sar =
998 HSar::New(isolate, zone, context, current_index, sar_scale);
999 sar->InsertBefore(this);
1000 sar->AssumeRepresentation(index()->representation());
1001 current_index = sar;
1002 }
1003
1004 SetOperandAt(0, current_index);
1005
1006 base_ = NULL;
1007 offset_ = 0;
1008 scale_ = 0;
1009 }
1010
1011
1012 std::ostream& HBoundsCheck::PrintDataTo(std::ostream& os) const { // NOLINT
1013 os << NameOf(index()) << " " << NameOf(length());
1014 if (base() != NULL && (offset() != 0 || scale() != 0)) {
1015 os << " base: ((";
1016 if (base() != index()) {
1017 os << NameOf(index());
1018 } else {
1019 os << "index";
1020 }
1021 os << " + " << offset() << ") >> " << scale() << ")";
1022 }
1023 if (skip_check()) os << " [DISABLED]";
1024 return os;
1025 }
1026
1027
1028 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
1029 DCHECK(CheckFlag(kFlexibleRepresentation));
1030 HValue* actual_index = index()->ActualValue();
1031 HValue* actual_length = length()->ActualValue();
1032 Representation index_rep = actual_index->representation();
1033 Representation length_rep = actual_length->representation();
1034 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
1035 index_rep = Representation::Smi();
1036 }
1037 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
1038 length_rep = Representation::Smi();
1039 }
1040 Representation r = index_rep.generalize(length_rep);
1041 if (r.is_more_general_than(Representation::Integer32())) {
1042 r = Representation::Integer32();
1043 }
1044 UpdateRepresentation(r, h_infer, "boundscheck");
1045 }
1046
1047
1048 Range* HBoundsCheck::InferRange(Zone* zone) {
1049 Representation r = representation();
1050 if (r.IsSmiOrInteger32() && length()->HasRange()) {
1051 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
1052 int lower = 0;
1053
1054 Range* result = new(zone) Range(lower, upper);
1055 if (index()->HasRange()) {
1056 result->Intersect(index()->range());
1057 }
1058
1059 // In case of Smi representation, clamp result to Smi::kMaxValue.
1060 if (r.IsSmi()) result->ClampToSmi();
1061 return result;
1062 }
1063 return HValue::InferRange(zone);
1064 }
1065
1066
1067 std::ostream& HBoundsCheckBaseIndexInformation::PrintDataTo(
1068 std::ostream& os) const { // NOLINT
1069 // TODO(svenpanne) This 2nd base_index() looks wrong...
1070 return os << "base: " << NameOf(base_index())
1071 << ", check: " << NameOf(base_index());
1072 }
1073
1074
1075 std::ostream& HCallWithDescriptor::PrintDataTo(
1076 std::ostream& os) const { // NOLINT
1077 for (int i = 0; i < OperandCount(); i++) {
1078 os << NameOf(OperandAt(i)) << " ";
1079 }
1080 return os << "#" << argument_count();
1081 }
1082
1083
1084 std::ostream& HCallNewArray::PrintDataTo(std::ostream& os) const { // NOLINT
1085 os << ElementsKindToString(elements_kind()) << " ";
1086 return HBinaryCall::PrintDataTo(os);
1087 }
1088
1089
1090 std::ostream& HCallRuntime::PrintDataTo(std::ostream& os) const { // NOLINT
1091 os << function()->name << " ";
1092 if (save_doubles() == kSaveFPRegs) os << "[save doubles] ";
1093 return os << "#" << argument_count();
1094 }
1095
1096
1097 std::ostream& HClassOfTestAndBranch::PrintDataTo(
1098 std::ostream& os) const { // NOLINT
1099 return os << "class_of_test(" << NameOf(value()) << ", \""
1100 << class_name()->ToCString().get() << "\")";
1101 }
1102
1103
1104 std::ostream& HWrapReceiver::PrintDataTo(std::ostream& os) const { // NOLINT
1105 return os << NameOf(receiver()) << " " << NameOf(function());
1106 }
1107
1108
1109 std::ostream& HAccessArgumentsAt::PrintDataTo(
1110 std::ostream& os) const { // NOLINT
1111 return os << NameOf(arguments()) << "[" << NameOf(index()) << "], length "
1112 << NameOf(length());
1113 }
1114
1115
1116 std::ostream& HAllocateBlockContext::PrintDataTo(
1117 std::ostream& os) const { // NOLINT
1118 return os << NameOf(context()) << " " << NameOf(function());
1119 }
1120
1121
1122 std::ostream& HControlInstruction::PrintDataTo(
1123 std::ostream& os) const { // NOLINT
1124 os << " goto (";
1125 bool first_block = true;
1126 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) {
1127 if (!first_block) os << ", ";
1128 os << *it.Current();
1129 first_block = false;
1130 }
1131 return os << ")";
1132 }
1133
1134
1135 std::ostream& HUnaryControlInstruction::PrintDataTo(
1136 std::ostream& os) const { // NOLINT
1137 os << NameOf(value());
1138 return HControlInstruction::PrintDataTo(os);
1139 }
1140
1141
1142 std::ostream& HReturn::PrintDataTo(std::ostream& os) const { // NOLINT
1143 return os << NameOf(value()) << " (pop " << NameOf(parameter_count())
1144 << " values)";
1145 }
1146
1147
1148 Representation HBranch::observed_input_representation(int index) {
1149 if (expected_input_types_.Contains(ToBooleanStub::NULL_TYPE) ||
1150 expected_input_types_.Contains(ToBooleanStub::SPEC_OBJECT) ||
1151 expected_input_types_.Contains(ToBooleanStub::STRING) ||
1152 expected_input_types_.Contains(ToBooleanStub::SYMBOL) ||
1153 expected_input_types_.Contains(ToBooleanStub::SIMD_VALUE)) {
1154 return Representation::Tagged();
1155 }
1156 if (expected_input_types_.Contains(ToBooleanStub::UNDEFINED)) {
1157 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1158 return Representation::Double();
1159 }
1160 return Representation::Tagged();
1161 }
1162 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1163 return Representation::Double();
1164 }
1165 if (expected_input_types_.Contains(ToBooleanStub::SMI)) {
1166 return Representation::Smi();
1167 }
1168 return Representation::None();
1169 }
1170
1171
1172 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1173 HValue* value = this->value();
1174 if (value->EmitAtUses()) {
1175 DCHECK(value->IsConstant());
1176 DCHECK(!value->representation().IsDouble());
1177 *block = HConstant::cast(value)->BooleanValue()
1178 ? FirstSuccessor()
1179 : SecondSuccessor();
1180 return true;
1181 }
1182 *block = NULL;
1183 return false;
1184 }
1185
1186
1187 std::ostream& HBranch::PrintDataTo(std::ostream& os) const { // NOLINT
1188 return HUnaryControlInstruction::PrintDataTo(os) << " "
1189 << expected_input_types();
1190 }
1191
1192
1193 std::ostream& HCompareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1194 os << NameOf(value()) << " (" << *map().handle() << ")";
1195 HControlInstruction::PrintDataTo(os);
1196 if (known_successor_index() == 0) {
1197 os << " [true]";
1198 } else if (known_successor_index() == 1) {
1199 os << " [false]";
1200 }
1201 return os;
1202 }
1203
1204
1205 const char* HUnaryMathOperation::OpName() const {
1206 switch (op()) {
1207 case kMathFloor:
1208 return "floor";
1209 case kMathFround:
1210 return "fround";
1211 case kMathRound:
1212 return "round";
1213 case kMathAbs:
1214 return "abs";
1215 case kMathLog:
1216 return "log";
1217 case kMathExp:
1218 return "exp";
1219 case kMathSqrt:
1220 return "sqrt";
1221 case kMathPowHalf:
1222 return "pow-half";
1223 case kMathClz32:
1224 return "clz32";
1225 default:
1226 UNREACHABLE();
1227 return NULL;
1228 }
1229 }
1230
1231
1232 Range* HUnaryMathOperation::InferRange(Zone* zone) {
1233 Representation r = representation();
1234 if (op() == kMathClz32) return new(zone) Range(0, 32);
1235 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1236 if (op() == kMathAbs) {
1237 int upper = value()->range()->upper();
1238 int lower = value()->range()->lower();
1239 bool spans_zero = value()->range()->CanBeZero();
1240 // Math.abs(kMinInt) overflows its representation, on which the
1241 // instruction deopts. Hence clamp it to kMaxInt.
1242 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper);
1243 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower);
1244 Range* result =
1245 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper),
1246 Max(abs_lower, abs_upper));
1247 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to
1248 // Smi::kMaxValue.
1249 if (r.IsSmi()) result->ClampToSmi();
1250 return result;
1251 }
1252 }
1253 return HValue::InferRange(zone);
1254 }
1255
1256
1257 std::ostream& HUnaryMathOperation::PrintDataTo(
1258 std::ostream& os) const { // NOLINT
1259 return os << OpName() << " " << NameOf(value());
1260 }
1261
1262
1263 std::ostream& HUnaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
1264 return os << NameOf(value());
1265 }
1266
1267
1268 std::ostream& HHasInstanceTypeAndBranch::PrintDataTo(
1269 std::ostream& os) const { // NOLINT
1270 os << NameOf(value());
1271 switch (from_) {
1272 case FIRST_JS_RECEIVER_TYPE:
1273 if (to_ == LAST_TYPE) os << " spec_object";
1274 break;
1275 case JS_REGEXP_TYPE:
1276 if (to_ == JS_REGEXP_TYPE) os << " reg_exp";
1277 break;
1278 case JS_ARRAY_TYPE:
1279 if (to_ == JS_ARRAY_TYPE) os << " array";
1280 break;
1281 case JS_FUNCTION_TYPE:
1282 if (to_ == JS_FUNCTION_TYPE) os << " function";
1283 break;
1284 default:
1285 break;
1286 }
1287 return os;
1288 }
1289
1290
1291 std::ostream& HTypeofIsAndBranch::PrintDataTo(
1292 std::ostream& os) const { // NOLINT
1293 os << NameOf(value()) << " == " << type_literal()->ToCString().get();
1294 return HControlInstruction::PrintDataTo(os);
1295 }
1296
1297
1298 namespace {
1299
1300 String* TypeOfString(HConstant* constant, Isolate* isolate) {
1301 Heap* heap = isolate->heap();
1302 if (constant->HasNumberValue()) return heap->number_string();
1303 if (constant->IsUndetectable()) return heap->undefined_string();
1304 if (constant->HasStringValue()) return heap->string_string();
1305 switch (constant->GetInstanceType()) {
1306 case ODDBALL_TYPE: {
1307 Unique<Object> unique = constant->GetUnique();
1308 if (unique.IsKnownGlobal(heap->true_value()) ||
1309 unique.IsKnownGlobal(heap->false_value())) {
1310 return heap->boolean_string();
1311 }
1312 if (unique.IsKnownGlobal(heap->null_value())) {
1313 return heap->object_string();
1314 }
1315 DCHECK(unique.IsKnownGlobal(heap->undefined_value()));
1316 return heap->undefined_string();
1317 }
1318 case SYMBOL_TYPE:
1319 return heap->symbol_string();
1320 case SIMD128_VALUE_TYPE: {
1321 Unique<Map> map = constant->ObjectMap();
1322 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
1323 if (map.IsKnownGlobal(heap->type##_map())) { \
1324 return heap->type##_string(); \
1325 }
1326 SIMD128_TYPES(SIMD128_TYPE)
1327 #undef SIMD128_TYPE
1328 UNREACHABLE();
1329 return nullptr;
1330 }
1331 default:
1332 if (constant->IsCallable()) return heap->function_string();
1333 return heap->object_string();
1334 }
1335 }
1336
1337 } // namespace
1338
1339
1340 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1341 if (FLAG_fold_constants && value()->IsConstant()) {
1342 HConstant* constant = HConstant::cast(value());
1343 String* type_string = TypeOfString(constant, isolate());
1344 bool same_type = type_literal_.IsKnownGlobal(type_string);
1345 *block = same_type ? FirstSuccessor() : SecondSuccessor();
1346 return true;
1347 } else if (value()->representation().IsSpecialization()) {
1348 bool number_type =
1349 type_literal_.IsKnownGlobal(isolate()->heap()->number_string());
1350 *block = number_type ? FirstSuccessor() : SecondSuccessor();
1351 return true;
1352 }
1353 *block = NULL;
1354 return false;
1355 }
1356
1357
1358 std::ostream& HCheckMapValue::PrintDataTo(std::ostream& os) const { // NOLINT
1359 return os << NameOf(value()) << " " << NameOf(map());
1360 }
1361
1362
1363 HValue* HCheckMapValue::Canonicalize() {
1364 if (map()->IsConstant()) {
1365 HConstant* c_map = HConstant::cast(map());
1366 return HCheckMaps::CreateAndInsertAfter(
1367 block()->graph()->zone(), value(), c_map->MapValue(),
1368 c_map->HasStableMapValue(), this);
1369 }
1370 return this;
1371 }
1372
1373
1374 std::ostream& HForInPrepareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1375 return os << NameOf(enumerable());
1376 }
1377
1378
1379 std::ostream& HForInCacheArray::PrintDataTo(std::ostream& os) const { // NOLINT
1380 return os << NameOf(enumerable()) << " " << NameOf(map()) << "[" << idx_
1381 << "]";
1382 }
1383
1384
1385 std::ostream& HLoadFieldByIndex::PrintDataTo(
1386 std::ostream& os) const { // NOLINT
1387 return os << NameOf(object()) << " " << NameOf(index());
1388 }
1389
1390
1391 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1392 if (!l->EqualsInteger32Constant(~0)) return false;
1393 *negated = r;
1394 return true;
1395 }
1396
1397
1398 static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1399 if (!instr->IsBitwise()) return false;
1400 HBitwise* b = HBitwise::cast(instr);
1401 return (b->op() == Token::BIT_XOR) &&
1402 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1403 MatchLeftIsOnes(b->right(), b->left(), negated));
1404 }
1405
1406
1407 static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1408 HValue* negated;
1409 return MatchNegationViaXor(instr, &negated) &&
1410 MatchNegationViaXor(negated, arg);
1411 }
1412
1413
1414 HValue* HBitwise::Canonicalize() {
1415 if (!representation().IsSmiOrInteger32()) return this;
1416 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
1417 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1418 if (left()->EqualsInteger32Constant(nop_constant) &&
1419 !right()->CheckFlag(kUint32)) {
1420 return right();
1421 }
1422 if (right()->EqualsInteger32Constant(nop_constant) &&
1423 !left()->CheckFlag(kUint32)) {
1424 return left();
1425 }
1426 // Optimize double negation, a common pattern used for ToInt32(x).
1427 HValue* arg;
1428 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
1429 return arg;
1430 }
1431 return this;
1432 }
1433
1434
1435 // static
1436 HInstruction* HAdd::New(Isolate* isolate, Zone* zone, HValue* context,
1437 HValue* left, HValue* right, Strength strength,
1438 ExternalAddType external_add_type) {
1439 // For everything else, you should use the other factory method without
1440 // ExternalAddType.
1441 DCHECK_EQ(external_add_type, AddOfExternalAndTagged);
1442 return new (zone) HAdd(context, left, right, strength, external_add_type);
1443 }
1444
1445
1446 Representation HAdd::RepresentationFromInputs() {
1447 Representation left_rep = left()->representation();
1448 if (left_rep.IsExternal()) {
1449 return Representation::External();
1450 }
1451 return HArithmeticBinaryOperation::RepresentationFromInputs();
1452 }
1453
1454
1455 Representation HAdd::RequiredInputRepresentation(int index) {
1456 if (index == 2) {
1457 Representation left_rep = left()->representation();
1458 if (left_rep.IsExternal()) {
1459 if (external_add_type_ == AddOfExternalAndTagged) {
1460 return Representation::Tagged();
1461 } else {
1462 return Representation::Integer32();
1463 }
1464 }
1465 }
1466 return HArithmeticBinaryOperation::RequiredInputRepresentation(index);
1467 }
1468
1469
1470 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) {
1471 return arg1->representation().IsSpecialization() &&
1472 arg2->EqualsInteger32Constant(identity);
1473 }
1474
1475
1476 HValue* HAdd::Canonicalize() {
1477 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0
1478 if (IsIdentityOperation(left(), right(), 0) &&
1479 !left()->representation().IsDouble()) { // Left could be -0.
1480 return left();
1481 }
1482 if (IsIdentityOperation(right(), left(), 0) &&
1483 !left()->representation().IsDouble()) { // Right could be -0.
1484 return right();
1485 }
1486 return this;
1487 }
1488
1489
1490 HValue* HSub::Canonicalize() {
1491 if (IsIdentityOperation(left(), right(), 0)) return left();
1492 return this;
1493 }
1494
1495
1496 HValue* HMul::Canonicalize() {
1497 if (IsIdentityOperation(left(), right(), 1)) return left();
1498 if (IsIdentityOperation(right(), left(), 1)) return right();
1499 return this;
1500 }
1501
1502
1503 bool HMul::MulMinusOne() {
1504 if (left()->EqualsInteger32Constant(-1) ||
1505 right()->EqualsInteger32Constant(-1)) {
1506 return true;
1507 }
1508
1509 return false;
1510 }
1511
1512
1513 HValue* HMod::Canonicalize() {
1514 return this;
1515 }
1516
1517
1518 HValue* HDiv::Canonicalize() {
1519 if (IsIdentityOperation(left(), right(), 1)) return left();
1520 return this;
1521 }
1522
1523
1524 HValue* HChange::Canonicalize() {
1525 return (from().Equals(to())) ? value() : this;
1526 }
1527
1528
1529 HValue* HWrapReceiver::Canonicalize() {
1530 if (HasNoUses()) return NULL;
1531 if (receiver()->type().IsJSObject()) {
1532 return receiver();
1533 }
1534 return this;
1535 }
1536
1537
1538 std::ostream& HTypeof::PrintDataTo(std::ostream& os) const { // NOLINT
1539 return os << NameOf(value());
1540 }
1541
1542
1543 HInstruction* HForceRepresentation::New(Isolate* isolate, Zone* zone,
1544 HValue* context, HValue* value,
1545 Representation representation) {
1546 if (FLAG_fold_constants && value->IsConstant()) {
1547 HConstant* c = HConstant::cast(value);
1548 c = c->CopyToRepresentation(representation, zone);
1549 if (c != NULL) return c;
1550 }
1551 return new(zone) HForceRepresentation(value, representation);
1552 }
1553
1554
1555 std::ostream& HForceRepresentation::PrintDataTo(
1556 std::ostream& os) const { // NOLINT
1557 return os << representation().Mnemonic() << " " << NameOf(value());
1558 }
1559
1560
1561 std::ostream& HChange::PrintDataTo(std::ostream& os) const { // NOLINT
1562 HUnaryOperation::PrintDataTo(os);
1563 os << " " << from().Mnemonic() << " to " << to().Mnemonic();
1564
1565 if (CanTruncateToSmi()) os << " truncating-smi";
1566 if (CanTruncateToInt32()) os << " truncating-int32";
1567 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
1568 if (CheckFlag(kAllowUndefinedAsNaN)) os << " allow-undefined-as-nan";
1569 return os;
1570 }
1571
1572
1573 HValue* HUnaryMathOperation::Canonicalize() {
1574 if (op() == kMathRound || op() == kMathFloor) {
1575 HValue* val = value();
1576 if (val->IsChange()) val = HChange::cast(val)->value();
1577 if (val->representation().IsSmiOrInteger32()) {
1578 if (val->representation().Equals(representation())) return val;
1579 return Prepend(new(block()->zone()) HChange(
1580 val, representation(), false, false));
1581 }
1582 }
1583 if (op() == kMathFloor && value()->IsDiv() && value()->HasOneUse()) {
1584 HDiv* hdiv = HDiv::cast(value());
1585
1586 HValue* left = hdiv->left();
1587 if (left->representation().IsInteger32()) {
1588 // A value with an integer representation does not need to be transformed.
1589 } else if (left->IsChange() && HChange::cast(left)->from().IsInteger32()) {
1590 // A change from an integer32 can be replaced by the integer32 value.
1591 left = HChange::cast(left)->value();
1592 } else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1593 left = Prepend(new(block()->zone()) HChange(
1594 left, Representation::Integer32(), false, false));
1595 } else {
1596 return this;
1597 }
1598
1599 HValue* right = hdiv->right();
1600 if (right->IsInteger32Constant()) {
1601 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1602 Representation::Integer32(), right->block()->zone()));
1603 } else if (right->representation().IsInteger32()) {
1604 // A value with an integer representation does not need to be transformed.
1605 } else if (right->IsChange() &&
1606 HChange::cast(right)->from().IsInteger32()) {
1607 // A change from an integer32 can be replaced by the integer32 value.
1608 right = HChange::cast(right)->value();
1609 } else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1610 right = Prepend(new(block()->zone()) HChange(
1611 right, Representation::Integer32(), false, false));
1612 } else {
1613 return this;
1614 }
1615
1616 return Prepend(HMathFloorOfDiv::New(
1617 block()->graph()->isolate(), block()->zone(), context(), left, right));
1618 }
1619 return this;
1620 }
1621
1622
1623 HValue* HCheckInstanceType::Canonicalize() {
1624 if ((check_ == IS_SPEC_OBJECT && value()->type().IsJSObject()) ||
1625 (check_ == IS_JS_ARRAY && value()->type().IsJSArray()) ||
1626 (check_ == IS_STRING && value()->type().IsString())) {
1627 return value();
1628 }
1629
1630 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1631 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1632 return value();
1633 }
1634 }
1635 return this;
1636 }
1637
1638
1639 void HCheckInstanceType::GetCheckInterval(InstanceType* first,
1640 InstanceType* last) {
1641 DCHECK(is_interval_check());
1642 switch (check_) {
1643 case IS_SPEC_OBJECT:
1644 *first = FIRST_SPEC_OBJECT_TYPE;
1645 *last = LAST_SPEC_OBJECT_TYPE;
1646 return;
1647 case IS_JS_ARRAY:
1648 *first = *last = JS_ARRAY_TYPE;
1649 return;
1650 case IS_JS_DATE:
1651 *first = *last = JS_DATE_TYPE;
1652 return;
1653 default:
1654 UNREACHABLE();
1655 }
1656 }
1657
1658
1659 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1660 DCHECK(!is_interval_check());
1661 switch (check_) {
1662 case IS_STRING:
1663 *mask = kIsNotStringMask;
1664 *tag = kStringTag;
1665 return;
1666 case IS_INTERNALIZED_STRING:
1667 *mask = kIsNotStringMask | kIsNotInternalizedMask;
1668 *tag = kInternalizedTag;
1669 return;
1670 default:
1671 UNREACHABLE();
1672 }
1673 }
1674
1675
1676 std::ostream& HCheckMaps::PrintDataTo(std::ostream& os) const { // NOLINT
1677 os << NameOf(value()) << " [" << *maps()->at(0).handle();
1678 for (int i = 1; i < maps()->size(); ++i) {
1679 os << "," << *maps()->at(i).handle();
1680 }
1681 os << "]";
1682 if (IsStabilityCheck()) os << "(stability-check)";
1683 return os;
1684 }
1685
1686
1687 HValue* HCheckMaps::Canonicalize() {
1688 if (!IsStabilityCheck() && maps_are_stable() && value()->IsConstant()) {
1689 HConstant* c_value = HConstant::cast(value());
1690 if (c_value->HasObjectMap()) {
1691 for (int i = 0; i < maps()->size(); ++i) {
1692 if (c_value->ObjectMap() == maps()->at(i)) {
1693 if (maps()->size() > 1) {
1694 set_maps(new(block()->graph()->zone()) UniqueSet<Map>(
1695 maps()->at(i), block()->graph()->zone()));
1696 }
1697 MarkAsStabilityCheck();
1698 break;
1699 }
1700 }
1701 }
1702 }
1703 return this;
1704 }
1705
1706
1707 std::ostream& HCheckValue::PrintDataTo(std::ostream& os) const { // NOLINT
1708 return os << NameOf(value()) << " " << Brief(*object().handle());
1709 }
1710
1711
1712 HValue* HCheckValue::Canonicalize() {
1713 return (value()->IsConstant() &&
1714 HConstant::cast(value())->EqualsUnique(object_)) ? NULL : this;
1715 }
1716
1717
1718 const char* HCheckInstanceType::GetCheckName() const {
1719 switch (check_) {
1720 case IS_SPEC_OBJECT: return "object";
1721 case IS_JS_ARRAY: return "array";
1722 case IS_JS_DATE:
1723 return "date";
1724 case IS_STRING: return "string";
1725 case IS_INTERNALIZED_STRING: return "internalized_string";
1726 }
1727 UNREACHABLE();
1728 return "";
1729 }
1730
1731
1732 std::ostream& HCheckInstanceType::PrintDataTo(
1733 std::ostream& os) const { // NOLINT
1734 os << GetCheckName() << " ";
1735 return HUnaryOperation::PrintDataTo(os);
1736 }
1737
1738
1739 std::ostream& HCallStub::PrintDataTo(std::ostream& os) const { // NOLINT
1740 os << CodeStub::MajorName(major_key_) << " ";
1741 return HUnaryCall::PrintDataTo(os);
1742 }
1743
1744
1745 std::ostream& HUnknownOSRValue::PrintDataTo(std::ostream& os) const { // NOLINT
1746 const char* type = "expression";
1747 if (environment_->is_local_index(index_)) type = "local";
1748 if (environment_->is_special_index(index_)) type = "special";
1749 if (environment_->is_parameter_index(index_)) type = "parameter";
1750 return os << type << " @ " << index_;
1751 }
1752
1753
1754 std::ostream& HInstanceOf::PrintDataTo(std::ostream& os) const { // NOLINT
1755 return os << NameOf(left()) << " " << NameOf(right()) << " "
1756 << NameOf(context());
1757 }
1758
1759
1760 Range* HValue::InferRange(Zone* zone) {
1761 Range* result;
1762 if (representation().IsSmi() || type().IsSmi()) {
1763 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue);
1764 result->set_can_be_minus_zero(false);
1765 } else {
1766 result = new(zone) Range();
1767 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32));
1768 // TODO(jkummerow): The range cannot be minus zero when the upper type
1769 // bound is Integer32.
1770 }
1771 return result;
1772 }
1773
1774
1775 Range* HChange::InferRange(Zone* zone) {
1776 Range* input_range = value()->range();
1777 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) &&
1778 (to().IsSmi() ||
1779 (to().IsTagged() &&
1780 input_range != NULL &&
1781 input_range->IsInSmiRange()))) {
1782 set_type(HType::Smi());
1783 ClearChangesFlag(kNewSpacePromotion);
1784 }
1785 if (to().IsSmiOrTagged() &&
1786 input_range != NULL &&
1787 input_range->IsInSmiRange() &&
1788 (!SmiValuesAre32Bits() ||
1789 !value()->CheckFlag(HValue::kUint32) ||
1790 input_range->upper() != kMaxInt)) {
1791 // The Range class can't express upper bounds in the (kMaxInt, kMaxUint32]
1792 // interval, so we treat kMaxInt as a sentinel for this entire interval.
1793 ClearFlag(kCanOverflow);
1794 }
1795 Range* result = (input_range != NULL)
1796 ? input_range->Copy(zone)
1797 : HValue::InferRange(zone);
1798 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1799 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1800 CheckFlag(kAllUsesTruncatingToSmi)));
1801 if (to().IsSmi()) result->ClampToSmi();
1802 return result;
1803 }
1804
1805
1806 Range* HConstant::InferRange(Zone* zone) {
1807 if (HasInteger32Value()) {
1808 Range* result = new(zone) Range(int32_value_, int32_value_);
1809 result->set_can_be_minus_zero(false);
1810 return result;
1811 }
1812 return HValue::InferRange(zone);
1813 }
1814
1815
1816 SourcePosition HPhi::position() const { return block()->first()->position(); }
1817
1818
1819 Range* HPhi::InferRange(Zone* zone) {
1820 Representation r = representation();
1821 if (r.IsSmiOrInteger32()) {
1822 if (block()->IsLoopHeader()) {
1823 Range* range = r.IsSmi()
1824 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue)
1825 : new(zone) Range(kMinInt, kMaxInt);
1826 return range;
1827 } else {
1828 Range* range = OperandAt(0)->range()->Copy(zone);
1829 for (int i = 1; i < OperandCount(); ++i) {
1830 range->Union(OperandAt(i)->range());
1831 }
1832 return range;
1833 }
1834 } else {
1835 return HValue::InferRange(zone);
1836 }
1837 }
1838
1839
1840 Range* HAdd::InferRange(Zone* zone) {
1841 Representation r = representation();
1842 if (r.IsSmiOrInteger32()) {
1843 Range* a = left()->range();
1844 Range* b = right()->range();
1845 Range* res = a->Copy(zone);
1846 if (!res->AddAndCheckOverflow(r, b) ||
1847 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1848 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1849 ClearFlag(kCanOverflow);
1850 }
1851 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1852 !CheckFlag(kAllUsesTruncatingToInt32) &&
1853 a->CanBeMinusZero() && b->CanBeMinusZero());
1854 return res;
1855 } else {
1856 return HValue::InferRange(zone);
1857 }
1858 }
1859
1860
1861 Range* HSub::InferRange(Zone* zone) {
1862 Representation r = representation();
1863 if (r.IsSmiOrInteger32()) {
1864 Range* a = left()->range();
1865 Range* b = right()->range();
1866 Range* res = a->Copy(zone);
1867 if (!res->SubAndCheckOverflow(r, b) ||
1868 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1869 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1870 ClearFlag(kCanOverflow);
1871 }
1872 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1873 !CheckFlag(kAllUsesTruncatingToInt32) &&
1874 a->CanBeMinusZero() && b->CanBeZero());
1875 return res;
1876 } else {
1877 return HValue::InferRange(zone);
1878 }
1879 }
1880
1881
1882 Range* HMul::InferRange(Zone* zone) {
1883 Representation r = representation();
1884 if (r.IsSmiOrInteger32()) {
1885 Range* a = left()->range();
1886 Range* b = right()->range();
1887 Range* res = a->Copy(zone);
1888 if (!res->MulAndCheckOverflow(r, b) ||
1889 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1890 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1891 MulMinusOne())) {
1892 // Truncated int multiplication is too precise and therefore not the
1893 // same as converting to Double and back.
1894 // Handle truncated integer multiplication by -1 special.
1895 ClearFlag(kCanOverflow);
1896 }
1897 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1898 !CheckFlag(kAllUsesTruncatingToInt32) &&
1899 ((a->CanBeZero() && b->CanBeNegative()) ||
1900 (a->CanBeNegative() && b->CanBeZero())));
1901 return res;
1902 } else {
1903 return HValue::InferRange(zone);
1904 }
1905 }
1906
1907
1908 Range* HDiv::InferRange(Zone* zone) {
1909 if (representation().IsInteger32()) {
1910 Range* a = left()->range();
1911 Range* b = right()->range();
1912 Range* result = new(zone) Range();
1913 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1914 (a->CanBeMinusZero() ||
1915 (a->CanBeZero() && b->CanBeNegative())));
1916 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1917 ClearFlag(kCanOverflow);
1918 }
1919
1920 if (!b->CanBeZero()) {
1921 ClearFlag(kCanBeDivByZero);
1922 }
1923 return result;
1924 } else {
1925 return HValue::InferRange(zone);
1926 }
1927 }
1928
1929
1930 Range* HMathFloorOfDiv::InferRange(Zone* zone) {
1931 if (representation().IsInteger32()) {
1932 Range* a = left()->range();
1933 Range* b = right()->range();
1934 Range* result = new(zone) Range();
1935 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1936 (a->CanBeMinusZero() ||
1937 (a->CanBeZero() && b->CanBeNegative())));
1938 if (!a->Includes(kMinInt)) {
1939 ClearFlag(kLeftCanBeMinInt);
1940 }
1941
1942 if (!a->CanBeNegative()) {
1943 ClearFlag(HValue::kLeftCanBeNegative);
1944 }
1945
1946 if (!a->CanBePositive()) {
1947 ClearFlag(HValue::kLeftCanBePositive);
1948 }
1949
1950 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1951 ClearFlag(kCanOverflow);
1952 }
1953
1954 if (!b->CanBeZero()) {
1955 ClearFlag(kCanBeDivByZero);
1956 }
1957 return result;
1958 } else {
1959 return HValue::InferRange(zone);
1960 }
1961 }
1962
1963
1964 // Returns the absolute value of its argument minus one, avoiding undefined
1965 // behavior at kMinInt.
1966 static int32_t AbsMinus1(int32_t a) { return a < 0 ? -(a + 1) : (a - 1); }
1967
1968
1969 Range* HMod::InferRange(Zone* zone) {
1970 if (representation().IsInteger32()) {
1971 Range* a = left()->range();
1972 Range* b = right()->range();
1973
1974 // The magnitude of the modulus is bounded by the right operand.
1975 int32_t positive_bound = Max(AbsMinus1(b->lower()), AbsMinus1(b->upper()));
1976
1977 // The result of the modulo operation has the sign of its left operand.
1978 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1979 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1980 a->CanBePositive() ? positive_bound : 0);
1981
1982 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1983 left_can_be_negative);
1984
1985 if (!a->CanBeNegative()) {
1986 ClearFlag(HValue::kLeftCanBeNegative);
1987 }
1988
1989 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1990 ClearFlag(HValue::kCanOverflow);
1991 }
1992
1993 if (!b->CanBeZero()) {
1994 ClearFlag(HValue::kCanBeDivByZero);
1995 }
1996 return result;
1997 } else {
1998 return HValue::InferRange(zone);
1999 }
2000 }
2001
2002
2003 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
2004 if (phi->block()->loop_information() == NULL) return NULL;
2005 if (phi->OperandCount() != 2) return NULL;
2006 int32_t candidate_increment;
2007
2008 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0));
2009 if (candidate_increment != 0) {
2010 return new(phi->block()->graph()->zone())
2011 InductionVariableData(phi, phi->OperandAt(1), candidate_increment);
2012 }
2013
2014 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1));
2015 if (candidate_increment != 0) {
2016 return new(phi->block()->graph()->zone())
2017 InductionVariableData(phi, phi->OperandAt(0), candidate_increment);
2018 }
2019
2020 return NULL;
2021 }
2022
2023
2024 /*
2025 * This function tries to match the following patterns (and all the relevant
2026 * variants related to |, & and + being commutative):
2027 * base | constant_or_mask
2028 * base & constant_and_mask
2029 * (base + constant_offset) & constant_and_mask
2030 * (base - constant_offset) & constant_and_mask
2031 */
2032 void InductionVariableData::DecomposeBitwise(
2033 HValue* value,
2034 BitwiseDecompositionResult* result) {
2035 HValue* base = IgnoreOsrValue(value);
2036 result->base = value;
2037
2038 if (!base->representation().IsInteger32()) return;
2039
2040 if (base->IsBitwise()) {
2041 bool allow_offset = false;
2042 int32_t mask = 0;
2043
2044 HBitwise* bitwise = HBitwise::cast(base);
2045 if (bitwise->right()->IsInteger32Constant()) {
2046 mask = bitwise->right()->GetInteger32Constant();
2047 base = bitwise->left();
2048 } else if (bitwise->left()->IsInteger32Constant()) {
2049 mask = bitwise->left()->GetInteger32Constant();
2050 base = bitwise->right();
2051 } else {
2052 return;
2053 }
2054 if (bitwise->op() == Token::BIT_AND) {
2055 result->and_mask = mask;
2056 allow_offset = true;
2057 } else if (bitwise->op() == Token::BIT_OR) {
2058 result->or_mask = mask;
2059 } else {
2060 return;
2061 }
2062
2063 result->context = bitwise->context();
2064
2065 if (allow_offset) {
2066 if (base->IsAdd()) {
2067 HAdd* add = HAdd::cast(base);
2068 if (add->right()->IsInteger32Constant()) {
2069 base = add->left();
2070 } else if (add->left()->IsInteger32Constant()) {
2071 base = add->right();
2072 }
2073 } else if (base->IsSub()) {
2074 HSub* sub = HSub::cast(base);
2075 if (sub->right()->IsInteger32Constant()) {
2076 base = sub->left();
2077 }
2078 }
2079 }
2080
2081 result->base = base;
2082 }
2083 }
2084
2085
2086 void InductionVariableData::AddCheck(HBoundsCheck* check,
2087 int32_t upper_limit) {
2088 DCHECK(limit_validity() != NULL);
2089 if (limit_validity() != check->block() &&
2090 !limit_validity()->Dominates(check->block())) return;
2091 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2092 check->block()->current_loop())) return;
2093
2094 ChecksRelatedToLength* length_checks = checks();
2095 while (length_checks != NULL) {
2096 if (length_checks->length() == check->length()) break;
2097 length_checks = length_checks->next();
2098 }
2099 if (length_checks == NULL) {
2100 length_checks = new(check->block()->zone())
2101 ChecksRelatedToLength(check->length(), checks());
2102 checks_ = length_checks;
2103 }
2104
2105 length_checks->AddCheck(check, upper_limit);
2106 }
2107
2108
2109 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() {
2110 if (checks() != NULL) {
2111 InductionVariableCheck* c = checks();
2112 HBasicBlock* current_block = c->check()->block();
2113 while (c != NULL && c->check()->block() == current_block) {
2114 c->set_upper_limit(current_upper_limit_);
2115 c = c->next();
2116 }
2117 }
2118 }
2119
2120
2121 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
2122 Token::Value token,
2123 int32_t mask,
2124 HValue* index_base,
2125 HValue* context) {
2126 DCHECK(first_check_in_block() != NULL);
2127 HValue* previous_index = first_check_in_block()->index();
2128 DCHECK(context != NULL);
2129
2130 Zone* zone = index_base->block()->graph()->zone();
2131 Isolate* isolate = index_base->block()->graph()->isolate();
2132 set_added_constant(HConstant::New(isolate, zone, context, mask));
2133 if (added_index() != NULL) {
2134 added_constant()->InsertBefore(added_index());
2135 } else {
2136 added_constant()->InsertBefore(first_check_in_block());
2137 }
2138
2139 if (added_index() == NULL) {
2140 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index());
2141 HInstruction* new_index = HBitwise::New(isolate, zone, context, token,
2142 index_base, added_constant());
2143 DCHECK(new_index->IsBitwise());
2144 new_index->ClearAllSideEffects();
2145 new_index->AssumeRepresentation(Representation::Integer32());
2146 set_added_index(HBitwise::cast(new_index));
2147 added_index()->InsertBefore(first_check_in_block());
2148 }
2149 DCHECK(added_index()->op() == token);
2150
2151 added_index()->SetOperandAt(1, index_base);
2152 added_index()->SetOperandAt(2, added_constant());
2153 first_check_in_block()->SetOperandAt(0, added_index());
2154 if (previous_index->HasNoUses()) {
2155 previous_index->DeleteAndReplaceWith(NULL);
2156 }
2157 }
2158
2159 void InductionVariableData::ChecksRelatedToLength::AddCheck(
2160 HBoundsCheck* check,
2161 int32_t upper_limit) {
2162 BitwiseDecompositionResult decomposition;
2163 InductionVariableData::DecomposeBitwise(check->index(), &decomposition);
2164
2165 if (first_check_in_block() == NULL ||
2166 first_check_in_block()->block() != check->block()) {
2167 CloseCurrentBlock();
2168
2169 first_check_in_block_ = check;
2170 set_added_index(NULL);
2171 set_added_constant(NULL);
2172 current_and_mask_in_block_ = decomposition.and_mask;
2173 current_or_mask_in_block_ = decomposition.or_mask;
2174 current_upper_limit_ = upper_limit;
2175
2176 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2177 InductionVariableCheck(check, checks_, upper_limit);
2178 checks_ = new_check;
2179 return;
2180 }
2181
2182 if (upper_limit > current_upper_limit()) {
2183 current_upper_limit_ = upper_limit;
2184 }
2185
2186 if (decomposition.and_mask != 0 &&
2187 current_or_mask_in_block() == 0) {
2188 if (current_and_mask_in_block() == 0 ||
2189 decomposition.and_mask > current_and_mask_in_block()) {
2190 UseNewIndexInCurrentBlock(Token::BIT_AND,
2191 decomposition.and_mask,
2192 decomposition.base,
2193 decomposition.context);
2194 current_and_mask_in_block_ = decomposition.and_mask;
2195 }
2196 check->set_skip_check();
2197 }
2198 if (current_and_mask_in_block() == 0) {
2199 if (decomposition.or_mask > current_or_mask_in_block()) {
2200 UseNewIndexInCurrentBlock(Token::BIT_OR,
2201 decomposition.or_mask,
2202 decomposition.base,
2203 decomposition.context);
2204 current_or_mask_in_block_ = decomposition.or_mask;
2205 }
2206 check->set_skip_check();
2207 }
2208
2209 if (!check->skip_check()) {
2210 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2211 InductionVariableCheck(check, checks_, upper_limit);
2212 checks_ = new_check;
2213 }
2214 }
2215
2216
2217 /*
2218 * This method detects if phi is an induction variable, with phi_operand as
2219 * its "incremented" value (the other operand would be the "base" value).
2220 *
2221 * It cheks is phi_operand has the form "phi + constant".
2222 * If yes, the constant is the increment that the induction variable gets at
2223 * every loop iteration.
2224 * Otherwise it returns 0.
2225 */
2226 int32_t InductionVariableData::ComputeIncrement(HPhi* phi,
2227 HValue* phi_operand) {
2228 if (!phi_operand->representation().IsSmiOrInteger32()) return 0;
2229
2230 if (phi_operand->IsAdd()) {
2231 HAdd* operation = HAdd::cast(phi_operand);
2232 if (operation->left() == phi &&
2233 operation->right()->IsInteger32Constant()) {
2234 return operation->right()->GetInteger32Constant();
2235 } else if (operation->right() == phi &&
2236 operation->left()->IsInteger32Constant()) {
2237 return operation->left()->GetInteger32Constant();
2238 }
2239 } else if (phi_operand->IsSub()) {
2240 HSub* operation = HSub::cast(phi_operand);
2241 if (operation->left() == phi &&
2242 operation->right()->IsInteger32Constant()) {
2243 int constant = operation->right()->GetInteger32Constant();
2244 if (constant == kMinInt) return 0;
2245 return -constant;
2246 }
2247 }
2248
2249 return 0;
2250 }
2251
2252
2253 /*
2254 * Swaps the information in "update" with the one contained in "this".
2255 * The swapping is important because this method is used while doing a
2256 * dominator tree traversal, and "update" will retain the old data that
2257 * will be restored while backtracking.
2258 */
2259 void InductionVariableData::UpdateAdditionalLimit(
2260 InductionVariableLimitUpdate* update) {
2261 DCHECK(update->updated_variable == this);
2262 if (update->limit_is_upper) {
2263 swap(&additional_upper_limit_, &update->limit);
2264 swap(&additional_upper_limit_is_included_, &update->limit_is_included);
2265 } else {
2266 swap(&additional_lower_limit_, &update->limit);
2267 swap(&additional_lower_limit_is_included_, &update->limit_is_included);
2268 }
2269 }
2270
2271
2272 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask,
2273 int32_t or_mask) {
2274 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway.
2275 const int32_t MAX_LIMIT = 1 << 30;
2276
2277 int32_t result = MAX_LIMIT;
2278
2279 if (limit() != NULL &&
2280 limit()->IsInteger32Constant()) {
2281 int32_t limit_value = limit()->GetInteger32Constant();
2282 if (!limit_included()) {
2283 limit_value--;
2284 }
2285 if (limit_value < result) result = limit_value;
2286 }
2287
2288 if (additional_upper_limit() != NULL &&
2289 additional_upper_limit()->IsInteger32Constant()) {
2290 int32_t limit_value = additional_upper_limit()->GetInteger32Constant();
2291 if (!additional_upper_limit_is_included()) {
2292 limit_value--;
2293 }
2294 if (limit_value < result) result = limit_value;
2295 }
2296
2297 if (and_mask > 0 && and_mask < MAX_LIMIT) {
2298 if (and_mask < result) result = and_mask;
2299 return result;
2300 }
2301
2302 // Add the effect of the or_mask.
2303 result |= or_mask;
2304
2305 return result >= MAX_LIMIT ? kNoLimit : result;
2306 }
2307
2308
2309 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) {
2310 if (!v->IsPhi()) return v;
2311 HPhi* phi = HPhi::cast(v);
2312 if (phi->OperandCount() != 2) return v;
2313 if (phi->OperandAt(0)->block()->is_osr_entry()) {
2314 return phi->OperandAt(1);
2315 } else if (phi->OperandAt(1)->block()->is_osr_entry()) {
2316 return phi->OperandAt(0);
2317 } else {
2318 return v;
2319 }
2320 }
2321
2322
2323 InductionVariableData* InductionVariableData::GetInductionVariableData(
2324 HValue* v) {
2325 v = IgnoreOsrValue(v);
2326 if (v->IsPhi()) {
2327 return HPhi::cast(v)->induction_variable_data();
2328 }
2329 return NULL;
2330 }
2331
2332
2333 /*
2334 * Check if a conditional branch to "current_branch" with token "token" is
2335 * the branch that keeps the induction loop running (and, conversely, will
2336 * terminate it if the "other_branch" is taken).
2337 *
2338 * Three conditions must be met:
2339 * - "current_branch" must be in the induction loop.
2340 * - "other_branch" must be out of the induction loop.
2341 * - "token" and the induction increment must be "compatible": the token should
2342 * be a condition that keeps the execution inside the loop until the limit is
2343 * reached.
2344 */
2345 bool InductionVariableData::CheckIfBranchIsLoopGuard(
2346 Token::Value token,
2347 HBasicBlock* current_branch,
2348 HBasicBlock* other_branch) {
2349 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2350 current_branch->current_loop())) {
2351 return false;
2352 }
2353
2354 if (phi()->block()->current_loop()->IsNestedInThisLoop(
2355 other_branch->current_loop())) {
2356 return false;
2357 }
2358
2359 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) {
2360 return true;
2361 }
2362 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) {
2363 return true;
2364 }
2365 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) {
2366 return true;
2367 }
2368
2369 return false;
2370 }
2371
2372
2373 void InductionVariableData::ComputeLimitFromPredecessorBlock(
2374 HBasicBlock* block,
2375 LimitFromPredecessorBlock* result) {
2376 if (block->predecessors()->length() != 1) return;
2377 HBasicBlock* predecessor = block->predecessors()->at(0);
2378 HInstruction* end = predecessor->last();
2379
2380 if (!end->IsCompareNumericAndBranch()) return;
2381 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end);
2382
2383 Token::Value token = branch->token();
2384 if (!Token::IsArithmeticCompareOp(token)) return;
2385
2386 HBasicBlock* other_target;
2387 if (block == branch->SuccessorAt(0)) {
2388 other_target = branch->SuccessorAt(1);
2389 } else {
2390 other_target = branch->SuccessorAt(0);
2391 token = Token::NegateCompareOp(token);
2392 DCHECK(block == branch->SuccessorAt(1));
2393 }
2394
2395 InductionVariableData* data;
2396
2397 data = GetInductionVariableData(branch->left());
2398 HValue* limit = branch->right();
2399 if (data == NULL) {
2400 data = GetInductionVariableData(branch->right());
2401 token = Token::ReverseCompareOp(token);
2402 limit = branch->left();
2403 }
2404
2405 if (data != NULL) {
2406 result->variable = data;
2407 result->token = token;
2408 result->limit = limit;
2409 result->other_target = other_target;
2410 }
2411 }
2412
2413
2414 /*
2415 * Compute the limit that is imposed on an induction variable when entering
2416 * "block" (if any).
2417 * If the limit is the "proper" induction limit (the one that makes the loop
2418 * terminate when the induction variable reaches it) it is stored directly in
2419 * the induction variable data.
2420 * Otherwise the limit is written in "additional_limit" and the method
2421 * returns true.
2422 */
2423 bool InductionVariableData::ComputeInductionVariableLimit(
2424 HBasicBlock* block,
2425 InductionVariableLimitUpdate* additional_limit) {
2426 LimitFromPredecessorBlock limit;
2427 ComputeLimitFromPredecessorBlock(block, &limit);
2428 if (!limit.LimitIsValid()) return false;
2429
2430 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token,
2431 block,
2432 limit.other_target)) {
2433 limit.variable->limit_ = limit.limit;
2434 limit.variable->limit_included_ = limit.LimitIsIncluded();
2435 limit.variable->limit_validity_ = block;
2436 limit.variable->induction_exit_block_ = block->predecessors()->at(0);
2437 limit.variable->induction_exit_target_ = limit.other_target;
2438 return false;
2439 } else {
2440 additional_limit->updated_variable = limit.variable;
2441 additional_limit->limit = limit.limit;
2442 additional_limit->limit_is_upper = limit.LimitIsUpper();
2443 additional_limit->limit_is_included = limit.LimitIsIncluded();
2444 return true;
2445 }
2446 }
2447
2448
2449 Range* HMathMinMax::InferRange(Zone* zone) {
2450 if (representation().IsSmiOrInteger32()) {
2451 Range* a = left()->range();
2452 Range* b = right()->range();
2453 Range* res = a->Copy(zone);
2454 if (operation_ == kMathMax) {
2455 res->CombinedMax(b);
2456 } else {
2457 DCHECK(operation_ == kMathMin);
2458 res->CombinedMin(b);
2459 }
2460 return res;
2461 } else {
2462 return HValue::InferRange(zone);
2463 }
2464 }
2465
2466
2467 void HPushArguments::AddInput(HValue* value) {
2468 inputs_.Add(NULL, value->block()->zone());
2469 SetOperandAt(OperandCount() - 1, value);
2470 }
2471
2472
2473 std::ostream& HPhi::PrintTo(std::ostream& os) const { // NOLINT
2474 os << "[";
2475 for (int i = 0; i < OperandCount(); ++i) {
2476 os << " " << NameOf(OperandAt(i)) << " ";
2477 }
2478 return os << " uses" << UseCount()
2479 << representation_from_indirect_uses().Mnemonic() << " "
2480 << TypeOf(this) << "]";
2481 }
2482
2483
2484 void HPhi::AddInput(HValue* value) {
2485 inputs_.Add(NULL, value->block()->zone());
2486 SetOperandAt(OperandCount() - 1, value);
2487 // Mark phis that may have 'arguments' directly or indirectly as an operand.
2488 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
2489 SetFlag(kIsArguments);
2490 }
2491 }
2492
2493
2494 bool HPhi::HasRealUses() {
2495 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2496 if (!it.value()->IsPhi()) return true;
2497 }
2498 return false;
2499 }
2500
2501
2502 HValue* HPhi::GetRedundantReplacement() {
2503 HValue* candidate = NULL;
2504 int count = OperandCount();
2505 int position = 0;
2506 while (position < count && candidate == NULL) {
2507 HValue* current = OperandAt(position++);
2508 if (current != this) candidate = current;
2509 }
2510 while (position < count) {
2511 HValue* current = OperandAt(position++);
2512 if (current != this && current != candidate) return NULL;
2513 }
2514 DCHECK(candidate != this);
2515 return candidate;
2516 }
2517
2518
2519 void HPhi::DeleteFromGraph() {
2520 DCHECK(block() != NULL);
2521 block()->RemovePhi(this);
2522 DCHECK(block() == NULL);
2523 }
2524
2525
2526 void HPhi::InitRealUses(int phi_id) {
2527 // Initialize real uses.
2528 phi_id_ = phi_id;
2529 // Compute a conservative approximation of truncating uses before inferring
2530 // representations. The proper, exact computation will be done later, when
2531 // inserting representation changes.
2532 SetFlag(kTruncatingToSmi);
2533 SetFlag(kTruncatingToInt32);
2534 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2535 HValue* value = it.value();
2536 if (!value->IsPhi()) {
2537 Representation rep = value->observed_input_representation(it.index());
2538 representation_from_non_phi_uses_ =
2539 representation_from_non_phi_uses().generalize(rep);
2540 if (rep.IsSmi() || rep.IsInteger32() || rep.IsDouble()) {
2541 has_type_feedback_from_uses_ = true;
2542 }
2543
2544 if (FLAG_trace_representation) {
2545 PrintF("#%d Phi is used by real #%d %s as %s\n",
2546 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2547 }
2548 if (!value->IsSimulate()) {
2549 if (!value->CheckFlag(kTruncatingToSmi)) {
2550 ClearFlag(kTruncatingToSmi);
2551 }
2552 if (!value->CheckFlag(kTruncatingToInt32)) {
2553 ClearFlag(kTruncatingToInt32);
2554 }
2555 }
2556 }
2557 }
2558 }
2559
2560
2561 void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2562 if (FLAG_trace_representation) {
2563 PrintF(
2564 "generalizing use representation '%s' of #%d Phi "
2565 "with uses of #%d Phi '%s'\n",
2566 representation_from_indirect_uses().Mnemonic(), id(), other->id(),
2567 other->representation_from_non_phi_uses().Mnemonic());
2568 }
2569
2570 representation_from_indirect_uses_ =
2571 representation_from_indirect_uses().generalize(
2572 other->representation_from_non_phi_uses());
2573 }
2574
2575
2576 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2577 while (!list->is_empty()) {
2578 HSimulate* from = list->RemoveLast();
2579 ZoneList<HValue*>* from_values = &from->values_;
2580 for (int i = 0; i < from_values->length(); ++i) {
2581 if (from->HasAssignedIndexAt(i)) {
2582 int index = from->GetAssignedIndexAt(i);
2583 if (HasValueForIndex(index)) continue;
2584 AddAssignedValue(index, from_values->at(i));
2585 } else {
2586 if (pop_count_ > 0) {
2587 pop_count_--;
2588 } else {
2589 AddPushedValue(from_values->at(i));
2590 }
2591 }
2592 }
2593 pop_count_ += from->pop_count_;
2594 from->DeleteAndReplaceWith(NULL);
2595 }
2596 }
2597
2598
2599 std::ostream& HSimulate::PrintDataTo(std::ostream& os) const { // NOLINT
2600 os << "id=" << ast_id().ToInt();
2601 if (pop_count_ > 0) os << " pop " << pop_count_;
2602 if (values_.length() > 0) {
2603 if (pop_count_ > 0) os << " /";
2604 for (int i = values_.length() - 1; i >= 0; --i) {
2605 if (HasAssignedIndexAt(i)) {
2606 os << " var[" << GetAssignedIndexAt(i) << "] = ";
2607 } else {
2608 os << " push ";
2609 }
2610 os << NameOf(values_[i]);
2611 if (i > 0) os << ",";
2612 }
2613 }
2614 return os;
2615 }
2616
2617
2618 void HSimulate::ReplayEnvironment(HEnvironment* env) {
2619 if (is_done_with_replay()) return;
2620 DCHECK(env != NULL);
2621 env->set_ast_id(ast_id());
2622 env->Drop(pop_count());
2623 for (int i = values()->length() - 1; i >= 0; --i) {
2624 HValue* value = values()->at(i);
2625 if (HasAssignedIndexAt(i)) {
2626 env->Bind(GetAssignedIndexAt(i), value);
2627 } else {
2628 env->Push(value);
2629 }
2630 }
2631 set_done_with_replay();
2632 }
2633
2634
2635 static void ReplayEnvironmentNested(const ZoneList<HValue*>* values,
2636 HCapturedObject* other) {
2637 for (int i = 0; i < values->length(); ++i) {
2638 HValue* value = values->at(i);
2639 if (value->IsCapturedObject()) {
2640 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2641 values->at(i) = other;
2642 } else {
2643 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other);
2644 }
2645 }
2646 }
2647 }
2648
2649
2650 // Replay captured objects by replacing all captured objects with the
2651 // same capture id in the current and all outer environments.
2652 void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2653 DCHECK(env != NULL);
2654 while (env != NULL) {
2655 ReplayEnvironmentNested(env->values(), this);
2656 env = env->outer();
2657 }
2658 }
2659
2660
2661 std::ostream& HCapturedObject::PrintDataTo(std::ostream& os) const { // NOLINT
2662 os << "#" << capture_id() << " ";
2663 return HDematerializedObject::PrintDataTo(os);
2664 }
2665
2666
2667 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2668 Zone* zone) {
2669 DCHECK(return_target->IsInlineReturnTarget());
2670 return_targets_.Add(return_target, zone);
2671 }
2672
2673
2674 std::ostream& HEnterInlined::PrintDataTo(std::ostream& os) const { // NOLINT
2675 return os << function()->debug_name()->ToCString().get();
2676 }
2677
2678
2679 static bool IsInteger32(double value) {
2680 if (value >= std::numeric_limits<int32_t>::min() &&
2681 value <= std::numeric_limits<int32_t>::max()) {
2682 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
2683 return bit_cast<int64_t>(roundtrip_value) == bit_cast<int64_t>(value);
2684 }
2685 return false;
2686 }
2687
2688
2689 HConstant::HConstant(Special special)
2690 : HTemplateInstruction<0>(HType::TaggedNumber()),
2691 object_(Handle<Object>::null()),
2692 object_map_(Handle<Map>::null()),
2693 bit_field_(HasDoubleValueField::encode(true) |
2694 InstanceTypeField::encode(kUnknownInstanceType)),
2695 int32_value_(0) {
2696 DCHECK_EQ(kHoleNaN, special);
2697 std::memcpy(&double_value_, &kHoleNanInt64, sizeof(double_value_));
2698 Initialize(Representation::Double());
2699 }
2700
2701
2702 HConstant::HConstant(Handle<Object> object, Representation r)
2703 : HTemplateInstruction<0>(HType::FromValue(object)),
2704 object_(Unique<Object>::CreateUninitialized(object)),
2705 object_map_(Handle<Map>::null()),
2706 bit_field_(
2707 HasStableMapValueField::encode(false) |
2708 HasSmiValueField::encode(false) | HasInt32ValueField::encode(false) |
2709 HasDoubleValueField::encode(false) |
2710 HasExternalReferenceValueField::encode(false) |
2711 IsNotInNewSpaceField::encode(true) |
2712 BooleanValueField::encode(object->BooleanValue()) |
2713 IsUndetectableField::encode(false) | IsCallableField::encode(false) |
2714 InstanceTypeField::encode(kUnknownInstanceType)) {
2715 if (object->IsHeapObject()) {
2716 Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
2717 Isolate* isolate = heap_object->GetIsolate();
2718 Handle<Map> map(heap_object->map(), isolate);
2719 bit_field_ = IsNotInNewSpaceField::update(
2720 bit_field_, !isolate->heap()->InNewSpace(*object));
2721 bit_field_ = InstanceTypeField::update(bit_field_, map->instance_type());
2722 bit_field_ =
2723 IsUndetectableField::update(bit_field_, map->is_undetectable());
2724 bit_field_ = IsCallableField::update(bit_field_, map->is_callable());
2725 if (map->is_stable()) object_map_ = Unique<Map>::CreateImmovable(map);
2726 bit_field_ = HasStableMapValueField::update(
2727 bit_field_,
2728 HasMapValue() && Handle<Map>::cast(heap_object)->is_stable());
2729 }
2730 if (object->IsNumber()) {
2731 double n = object->Number();
2732 bool has_int32_value = IsInteger32(n);
2733 bit_field_ = HasInt32ValueField::update(bit_field_, has_int32_value);
2734 int32_value_ = DoubleToInt32(n);
2735 bit_field_ = HasSmiValueField::update(
2736 bit_field_, has_int32_value && Smi::IsValid(int32_value_));
2737 double_value_ = n;
2738 bit_field_ = HasDoubleValueField::update(bit_field_, true);
2739 // TODO(titzer): if this heap number is new space, tenure a new one.
2740 }
2741
2742 Initialize(r);
2743 }
2744
2745
2746 HConstant::HConstant(Unique<Object> object, Unique<Map> object_map,
2747 bool has_stable_map_value, Representation r, HType type,
2748 bool is_not_in_new_space, bool boolean_value,
2749 bool is_undetectable, InstanceType instance_type)
2750 : HTemplateInstruction<0>(type),
2751 object_(object),
2752 object_map_(object_map),
2753 bit_field_(HasStableMapValueField::encode(has_stable_map_value) |
2754 HasSmiValueField::encode(false) |
2755 HasInt32ValueField::encode(false) |
2756 HasDoubleValueField::encode(false) |
2757 HasExternalReferenceValueField::encode(false) |
2758 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2759 BooleanValueField::encode(boolean_value) |
2760 IsUndetectableField::encode(is_undetectable) |
2761 InstanceTypeField::encode(instance_type)) {
2762 DCHECK(!object.handle().is_null());
2763 DCHECK(!type.IsTaggedNumber() || type.IsNone());
2764 Initialize(r);
2765 }
2766
2767
2768 HConstant::HConstant(int32_t integer_value, Representation r,
2769 bool is_not_in_new_space, Unique<Object> object)
2770 : object_(object),
2771 object_map_(Handle<Map>::null()),
2772 bit_field_(HasStableMapValueField::encode(false) |
2773 HasSmiValueField::encode(Smi::IsValid(integer_value)) |
2774 HasInt32ValueField::encode(true) |
2775 HasDoubleValueField::encode(true) |
2776 HasExternalReferenceValueField::encode(false) |
2777 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2778 BooleanValueField::encode(integer_value != 0) |
2779 IsUndetectableField::encode(false) |
2780 InstanceTypeField::encode(kUnknownInstanceType)),
2781 int32_value_(integer_value),
2782 double_value_(FastI2D(integer_value)) {
2783 // It's possible to create a constant with a value in Smi-range but stored
2784 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2785 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2786 bool is_smi = HasSmiValue() && !could_be_heapobject;
2787 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2788 Initialize(r);
2789 }
2790
2791
2792 HConstant::HConstant(double double_value, Representation r,
2793 bool is_not_in_new_space, Unique<Object> object)
2794 : object_(object),
2795 object_map_(Handle<Map>::null()),
2796 bit_field_(HasStableMapValueField::encode(false) |
2797 HasInt32ValueField::encode(IsInteger32(double_value)) |
2798 HasDoubleValueField::encode(true) |
2799 HasExternalReferenceValueField::encode(false) |
2800 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2801 BooleanValueField::encode(double_value != 0 &&
2802 !std::isnan(double_value)) |
2803 IsUndetectableField::encode(false) |
2804 InstanceTypeField::encode(kUnknownInstanceType)),
2805 int32_value_(DoubleToInt32(double_value)),
2806 double_value_(double_value) {
2807 bit_field_ = HasSmiValueField::update(
2808 bit_field_, HasInteger32Value() && Smi::IsValid(int32_value_));
2809 // It's possible to create a constant with a value in Smi-range but stored
2810 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2811 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2812 bool is_smi = HasSmiValue() && !could_be_heapobject;
2813 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2814 Initialize(r);
2815 }
2816
2817
2818 HConstant::HConstant(ExternalReference reference)
2819 : HTemplateInstruction<0>(HType::Any()),
2820 object_(Unique<Object>(Handle<Object>::null())),
2821 object_map_(Handle<Map>::null()),
2822 bit_field_(
2823 HasStableMapValueField::encode(false) |
2824 HasSmiValueField::encode(false) | HasInt32ValueField::encode(false) |
2825 HasDoubleValueField::encode(false) |
2826 HasExternalReferenceValueField::encode(true) |
2827 IsNotInNewSpaceField::encode(true) | BooleanValueField::encode(true) |
2828 IsUndetectableField::encode(false) |
2829 InstanceTypeField::encode(kUnknownInstanceType)),
2830 external_reference_value_(reference) {
2831 Initialize(Representation::External());
2832 }
2833
2834
2835 void HConstant::Initialize(Representation r) {
2836 if (r.IsNone()) {
2837 if (HasSmiValue() && SmiValuesAre31Bits()) {
2838 r = Representation::Smi();
2839 } else if (HasInteger32Value()) {
2840 r = Representation::Integer32();
2841 } else if (HasDoubleValue()) {
2842 r = Representation::Double();
2843 } else if (HasExternalReferenceValue()) {
2844 r = Representation::External();
2845 } else {
2846 Handle<Object> object = object_.handle();
2847 if (object->IsJSObject()) {
2848 // Try to eagerly migrate JSObjects that have deprecated maps.
2849 Handle<JSObject> js_object = Handle<JSObject>::cast(object);
2850 if (js_object->map()->is_deprecated()) {
2851 JSObject::TryMigrateInstance(js_object);
2852 }
2853 }
2854 r = Representation::Tagged();
2855 }
2856 }
2857 if (r.IsSmi()) {
2858 // If we have an existing handle, zap it, because it might be a heap
2859 // number which we must not re-use when copying this HConstant to
2860 // Tagged representation later, because having Smi representation now
2861 // could cause heap object checks not to get emitted.
2862 object_ = Unique<Object>(Handle<Object>::null());
2863 }
2864 if (r.IsSmiOrInteger32() && object_.handle().is_null()) {
2865 // If it's not a heap object, it can't be in new space.
2866 bit_field_ = IsNotInNewSpaceField::update(bit_field_, true);
2867 }
2868 set_representation(r);
2869 SetFlag(kUseGVN);
2870 }
2871
2872
2873 bool HConstant::ImmortalImmovable() const {
2874 if (HasInteger32Value()) {
2875 return false;
2876 }
2877 if (HasDoubleValue()) {
2878 if (IsSpecialDouble()) {
2879 return true;
2880 }
2881 return false;
2882 }
2883 if (HasExternalReferenceValue()) {
2884 return false;
2885 }
2886
2887 DCHECK(!object_.handle().is_null());
2888 Heap* heap = isolate()->heap();
2889 DCHECK(!object_.IsKnownGlobal(heap->minus_zero_value()));
2890 DCHECK(!object_.IsKnownGlobal(heap->nan_value()));
2891 return
2892 #define IMMORTAL_IMMOVABLE_ROOT(name) \
2893 object_.IsKnownGlobal(heap->root(Heap::k##name##RootIndex)) ||
2894 IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
2895 #undef IMMORTAL_IMMOVABLE_ROOT
2896 #define INTERNALIZED_STRING(name, value) \
2897 object_.IsKnownGlobal(heap->name()) ||
2898 INTERNALIZED_STRING_LIST(INTERNALIZED_STRING)
2899 #undef INTERNALIZED_STRING
2900 #define STRING_TYPE(NAME, size, name, Name) \
2901 object_.IsKnownGlobal(heap->name##_map()) ||
2902 STRING_TYPE_LIST(STRING_TYPE)
2903 #undef STRING_TYPE
2904 false;
2905 }
2906
2907
2908 bool HConstant::EmitAtUses() {
2909 DCHECK(IsLinked());
2910 if (block()->graph()->has_osr() &&
2911 block()->graph()->IsStandardConstant(this)) {
2912 // TODO(titzer): this seems like a hack that should be fixed by custom OSR.
2913 return true;
2914 }
2915 if (HasNoUses()) return true;
2916 if (IsCell()) return false;
2917 if (representation().IsDouble()) return false;
2918 if (representation().IsExternal()) return false;
2919 return true;
2920 }
2921
2922
2923 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
2924 if (r.IsSmi() && !HasSmiValue()) return NULL;
2925 if (r.IsInteger32() && !HasInteger32Value()) return NULL;
2926 if (r.IsDouble() && !HasDoubleValue()) return NULL;
2927 if (r.IsExternal() && !HasExternalReferenceValue()) return NULL;
2928 if (HasInteger32Value()) {
2929 return new (zone) HConstant(int32_value_, r, NotInNewSpace(), object_);
2930 }
2931 if (HasDoubleValue()) {
2932 return new (zone) HConstant(double_value_, r, NotInNewSpace(), object_);
2933 }
2934 if (HasExternalReferenceValue()) {
2935 return new(zone) HConstant(external_reference_value_);
2936 }
2937 DCHECK(!object_.handle().is_null());
2938 return new (zone) HConstant(object_, object_map_, HasStableMapValue(), r,
2939 type_, NotInNewSpace(), BooleanValue(),
2940 IsUndetectable(), GetInstanceType());
2941 }
2942
2943
2944 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2945 HConstant* res = NULL;
2946 if (HasInteger32Value()) {
2947 res = new (zone) HConstant(int32_value_, Representation::Integer32(),
2948 NotInNewSpace(), object_);
2949 } else if (HasDoubleValue()) {
2950 res = new (zone)
2951 HConstant(DoubleToInt32(double_value_), Representation::Integer32(),
2952 NotInNewSpace(), object_);
2953 }
2954 return res != NULL ? Just(res) : Nothing<HConstant*>();
2955 }
2956
2957
2958 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Isolate* isolate,
2959 Zone* zone) {
2960 HConstant* res = NULL;
2961 Handle<Object> handle = this->handle(isolate);
2962 if (handle->IsBoolean()) {
2963 res = handle->BooleanValue() ?
2964 new(zone) HConstant(1) : new(zone) HConstant(0);
2965 } else if (handle->IsUndefined()) {
2966 res = new (zone) HConstant(std::numeric_limits<double>::quiet_NaN());
2967 } else if (handle->IsNull()) {
2968 res = new(zone) HConstant(0);
2969 }
2970 return res != NULL ? Just(res) : Nothing<HConstant*>();
2971 }
2972
2973
2974 std::ostream& HConstant::PrintDataTo(std::ostream& os) const { // NOLINT
2975 if (HasInteger32Value()) {
2976 os << int32_value_ << " ";
2977 } else if (HasDoubleValue()) {
2978 os << double_value_ << " ";
2979 } else if (HasExternalReferenceValue()) {
2980 os << reinterpret_cast<void*>(external_reference_value_.address()) << " ";
2981 } else {
2982 // The handle() method is silently and lazily mutating the object.
2983 Handle<Object> h = const_cast<HConstant*>(this)->handle(isolate());
2984 os << Brief(*h) << " ";
2985 if (HasStableMapValue()) os << "[stable-map] ";
2986 if (HasObjectMap()) os << "[map " << *ObjectMap().handle() << "] ";
2987 }
2988 if (!NotInNewSpace()) os << "[new space] ";
2989 return os;
2990 }
2991
2992
2993 std::ostream& HBinaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
2994 os << NameOf(left()) << " " << NameOf(right());
2995 if (CheckFlag(kCanOverflow)) os << " !";
2996 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
2997 return os;
2998 }
2999
3000
3001 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) {
3002 DCHECK(CheckFlag(kFlexibleRepresentation));
3003 Representation new_rep = RepresentationFromInputs();
3004 UpdateRepresentation(new_rep, h_infer, "inputs");
3005
3006 if (representation().IsSmi() && HasNonSmiUse()) {
3007 UpdateRepresentation(
3008 Representation::Integer32(), h_infer, "use requirements");
3009 }
3010
3011 if (observed_output_representation_.IsNone()) {
3012 new_rep = RepresentationFromUses();
3013 UpdateRepresentation(new_rep, h_infer, "uses");
3014 } else {
3015 new_rep = RepresentationFromOutput();
3016 UpdateRepresentation(new_rep, h_infer, "output");
3017 }
3018 }
3019
3020
3021 Representation HBinaryOperation::RepresentationFromInputs() {
3022 // Determine the worst case of observed input representations and
3023 // the currently assumed output representation.
3024 Representation rep = representation();
3025 for (int i = 1; i <= 2; ++i) {
3026 rep = rep.generalize(observed_input_representation(i));
3027 }
3028 // If any of the actual input representation is more general than what we
3029 // have so far but not Tagged, use that representation instead.
3030 Representation left_rep = left()->representation();
3031 Representation right_rep = right()->representation();
3032 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3033 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3034
3035 return rep;
3036 }
3037
3038
3039 bool HBinaryOperation::IgnoreObservedOutputRepresentation(
3040 Representation current_rep) {
3041 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
3042 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
3043 // Mul in Integer32 mode would be too precise.
3044 (!this->IsMul() || HMul::cast(this)->MulMinusOne());
3045 }
3046
3047
3048 Representation HBinaryOperation::RepresentationFromOutput() {
3049 Representation rep = representation();
3050 // Consider observed output representation, but ignore it if it's Double,
3051 // this instruction is not a division, and all its uses are truncating
3052 // to Integer32.
3053 if (observed_output_representation_.is_more_general_than(rep) &&
3054 !IgnoreObservedOutputRepresentation(rep)) {
3055 return observed_output_representation_;
3056 }
3057 return Representation::None();
3058 }
3059
3060
3061 void HBinaryOperation::AssumeRepresentation(Representation r) {
3062 set_observed_input_representation(1, r);
3063 set_observed_input_representation(2, r);
3064 HValue::AssumeRepresentation(r);
3065 }
3066
3067
3068 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) {
3069 DCHECK(CheckFlag(kFlexibleRepresentation));
3070 Representation new_rep = RepresentationFromInputs();
3071 UpdateRepresentation(new_rep, h_infer, "inputs");
3072 // Do not care about uses.
3073 }
3074
3075
3076 Range* HBitwise::InferRange(Zone* zone) {
3077 if (op() == Token::BIT_XOR) {
3078 if (left()->HasRange() && right()->HasRange()) {
3079 // The maximum value has the high bit, and all bits below, set:
3080 // (1 << high) - 1.
3081 // If the range can be negative, the minimum int is a negative number with
3082 // the high bit, and all bits below, unset:
3083 // -(1 << high).
3084 // If it cannot be negative, conservatively choose 0 as minimum int.
3085 int64_t left_upper = left()->range()->upper();
3086 int64_t left_lower = left()->range()->lower();
3087 int64_t right_upper = right()->range()->upper();
3088 int64_t right_lower = right()->range()->lower();
3089
3090 if (left_upper < 0) left_upper = ~left_upper;
3091 if (left_lower < 0) left_lower = ~left_lower;
3092 if (right_upper < 0) right_upper = ~right_upper;
3093 if (right_lower < 0) right_lower = ~right_lower;
3094
3095 int high = MostSignificantBit(
3096 static_cast<uint32_t>(
3097 left_upper | left_lower | right_upper | right_lower));
3098
3099 int64_t limit = 1;
3100 limit <<= high;
3101 int32_t min = (left()->range()->CanBeNegative() ||
3102 right()->range()->CanBeNegative())
3103 ? static_cast<int32_t>(-limit) : 0;
3104 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
3105 }
3106 Range* result = HValue::InferRange(zone);
3107 result->set_can_be_minus_zero(false);
3108 return result;
3109 }
3110 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
3111 int32_t left_mask = (left()->range() != NULL)
3112 ? left()->range()->Mask()
3113 : kDefaultMask;
3114 int32_t right_mask = (right()->range() != NULL)
3115 ? right()->range()->Mask()
3116 : kDefaultMask;
3117 int32_t result_mask = (op() == Token::BIT_AND)
3118 ? left_mask & right_mask
3119 : left_mask | right_mask;
3120 if (result_mask >= 0) return new(zone) Range(0, result_mask);
3121
3122 Range* result = HValue::InferRange(zone);
3123 result->set_can_be_minus_zero(false);
3124 return result;
3125 }
3126
3127
3128 Range* HSar::InferRange(Zone* zone) {
3129 if (right()->IsConstant()) {
3130 HConstant* c = HConstant::cast(right());
3131 if (c->HasInteger32Value()) {
3132 Range* result = (left()->range() != NULL)
3133 ? left()->range()->Copy(zone)
3134 : new(zone) Range();
3135 result->Sar(c->Integer32Value());
3136 return result;
3137 }
3138 }
3139 return HValue::InferRange(zone);
3140 }
3141
3142
3143 Range* HShr::InferRange(Zone* zone) {
3144 if (right()->IsConstant()) {
3145 HConstant* c = HConstant::cast(right());
3146 if (c->HasInteger32Value()) {
3147 int shift_count = c->Integer32Value() & 0x1f;
3148 if (left()->range()->CanBeNegative()) {
3149 // Only compute bounds if the result always fits into an int32.
3150 return (shift_count >= 1)
3151 ? new(zone) Range(0,
3152 static_cast<uint32_t>(0xffffffff) >> shift_count)
3153 : new(zone) Range();
3154 } else {
3155 // For positive inputs we can use the >> operator.
3156 Range* result = (left()->range() != NULL)
3157 ? left()->range()->Copy(zone)
3158 : new(zone) Range();
3159 result->Sar(c->Integer32Value());
3160 return result;
3161 }
3162 }
3163 }
3164 return HValue::InferRange(zone);
3165 }
3166
3167
3168 Range* HShl::InferRange(Zone* zone) {
3169 if (right()->IsConstant()) {
3170 HConstant* c = HConstant::cast(right());
3171 if (c->HasInteger32Value()) {
3172 Range* result = (left()->range() != NULL)
3173 ? left()->range()->Copy(zone)
3174 : new(zone) Range();
3175 result->Shl(c->Integer32Value());
3176 return result;
3177 }
3178 }
3179 return HValue::InferRange(zone);
3180 }
3181
3182
3183 Range* HLoadNamedField::InferRange(Zone* zone) {
3184 if (access().representation().IsInteger8()) {
3185 return new(zone) Range(kMinInt8, kMaxInt8);
3186 }
3187 if (access().representation().IsUInteger8()) {
3188 return new(zone) Range(kMinUInt8, kMaxUInt8);
3189 }
3190 if (access().representation().IsInteger16()) {
3191 return new(zone) Range(kMinInt16, kMaxInt16);
3192 }
3193 if (access().representation().IsUInteger16()) {
3194 return new(zone) Range(kMinUInt16, kMaxUInt16);
3195 }
3196 if (access().IsStringLength()) {
3197 return new(zone) Range(0, String::kMaxLength);
3198 }
3199 return HValue::InferRange(zone);
3200 }
3201
3202
3203 Range* HLoadKeyed::InferRange(Zone* zone) {
3204 switch (elements_kind()) {
3205 case INT8_ELEMENTS:
3206 return new(zone) Range(kMinInt8, kMaxInt8);
3207 case UINT8_ELEMENTS:
3208 case UINT8_CLAMPED_ELEMENTS:
3209 return new(zone) Range(kMinUInt8, kMaxUInt8);
3210 case INT16_ELEMENTS:
3211 return new(zone) Range(kMinInt16, kMaxInt16);
3212 case UINT16_ELEMENTS:
3213 return new(zone) Range(kMinUInt16, kMaxUInt16);
3214 default:
3215 return HValue::InferRange(zone);
3216 }
3217 }
3218
3219
3220 std::ostream& HCompareGeneric::PrintDataTo(std::ostream& os) const { // NOLINT
3221 os << Token::Name(token()) << " ";
3222 return HBinaryOperation::PrintDataTo(os);
3223 }
3224
3225
3226 std::ostream& HStringCompareAndBranch::PrintDataTo(
3227 std::ostream& os) const { // NOLINT
3228 os << Token::Name(token()) << " ";
3229 return HControlInstruction::PrintDataTo(os);
3230 }
3231
3232
3233 std::ostream& HCompareNumericAndBranch::PrintDataTo(
3234 std::ostream& os) const { // NOLINT
3235 os << Token::Name(token()) << " " << NameOf(left()) << " " << NameOf(right());
3236 return HControlInstruction::PrintDataTo(os);
3237 }
3238
3239
3240 std::ostream& HCompareObjectEqAndBranch::PrintDataTo(
3241 std::ostream& os) const { // NOLINT
3242 os << NameOf(left()) << " " << NameOf(right());
3243 return HControlInstruction::PrintDataTo(os);
3244 }
3245
3246
3247 bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3248 if (known_successor_index() != kNoKnownSuccessorIndex) {
3249 *block = SuccessorAt(known_successor_index());
3250 return true;
3251 }
3252 if (FLAG_fold_constants && left()->IsConstant() && right()->IsConstant()) {
3253 *block = HConstant::cast(left())->DataEquals(HConstant::cast(right()))
3254 ? FirstSuccessor() : SecondSuccessor();
3255 return true;
3256 }
3257 *block = NULL;
3258 return false;
3259 }
3260
3261
3262 bool HIsStringAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3263 if (known_successor_index() != kNoKnownSuccessorIndex) {
3264 *block = SuccessorAt(known_successor_index());
3265 return true;
3266 }
3267 if (FLAG_fold_constants && value()->IsConstant()) {
3268 *block = HConstant::cast(value())->HasStringValue()
3269 ? FirstSuccessor() : SecondSuccessor();
3270 return true;
3271 }
3272 if (value()->type().IsString()) {
3273 *block = FirstSuccessor();
3274 return true;
3275 }
3276 if (value()->type().IsSmi() ||
3277 value()->type().IsNull() ||
3278 value()->type().IsBoolean() ||
3279 value()->type().IsUndefined() ||
3280 value()->type().IsJSObject()) {
3281 *block = SecondSuccessor();
3282 return true;
3283 }
3284 *block = NULL;
3285 return false;
3286 }
3287
3288
3289 bool HIsUndetectableAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3290 if (FLAG_fold_constants && value()->IsConstant()) {
3291 *block = HConstant::cast(value())->IsUndetectable()
3292 ? FirstSuccessor() : SecondSuccessor();
3293 return true;
3294 }
3295 *block = NULL;
3296 return false;
3297 }
3298
3299
3300 bool HHasInstanceTypeAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3301 if (FLAG_fold_constants && value()->IsConstant()) {
3302 InstanceType type = HConstant::cast(value())->GetInstanceType();
3303 *block = (from_ <= type) && (type <= to_)
3304 ? FirstSuccessor() : SecondSuccessor();
3305 return true;
3306 }
3307 *block = NULL;
3308 return false;
3309 }
3310
3311
3312 void HCompareHoleAndBranch::InferRepresentation(
3313 HInferRepresentationPhase* h_infer) {
3314 ChangeRepresentation(value()->representation());
3315 }
3316
3317
3318 bool HCompareNumericAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3319 if (left() == right() &&
3320 left()->representation().IsSmiOrInteger32()) {
3321 *block = (token() == Token::EQ ||
3322 token() == Token::EQ_STRICT ||
3323 token() == Token::LTE ||
3324 token() == Token::GTE)
3325 ? FirstSuccessor() : SecondSuccessor();
3326 return true;
3327 }
3328 *block = NULL;
3329 return false;
3330 }
3331
3332
3333 bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3334 if (FLAG_fold_constants && value()->IsConstant()) {
3335 HConstant* constant = HConstant::cast(value());
3336 if (constant->HasDoubleValue()) {
3337 *block = IsMinusZero(constant->DoubleValue())
3338 ? FirstSuccessor() : SecondSuccessor();
3339 return true;
3340 }
3341 }
3342 if (value()->representation().IsSmiOrInteger32()) {
3343 // A Smi or Integer32 cannot contain minus zero.
3344 *block = SecondSuccessor();
3345 return true;
3346 }
3347 *block = NULL;
3348 return false;
3349 }
3350
3351
3352 void HCompareMinusZeroAndBranch::InferRepresentation(
3353 HInferRepresentationPhase* h_infer) {
3354 ChangeRepresentation(value()->representation());
3355 }
3356
3357
3358 std::ostream& HGoto::PrintDataTo(std::ostream& os) const { // NOLINT
3359 return os << *SuccessorAt(0);
3360 }
3361
3362
3363 void HCompareNumericAndBranch::InferRepresentation(
3364 HInferRepresentationPhase* h_infer) {
3365 Representation left_rep = left()->representation();
3366 Representation right_rep = right()->representation();
3367 Representation observed_left = observed_input_representation(0);
3368 Representation observed_right = observed_input_representation(1);
3369
3370 Representation rep = Representation::None();
3371 rep = rep.generalize(observed_left);
3372 rep = rep.generalize(observed_right);
3373 if (rep.IsNone() || rep.IsSmiOrInteger32()) {
3374 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3375 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3376 } else {
3377 rep = Representation::Double();
3378 }
3379
3380 if (rep.IsDouble()) {
3381 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
3382 // and !=) have special handling of undefined, e.g. undefined == undefined
3383 // is 'true'. Relational comparisons have a different semantic, first
3384 // calling ToPrimitive() on their arguments. The standard Crankshaft
3385 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's
3386 // inputs are doubles caused 'undefined' to be converted to NaN. That's
3387 // compatible out-of-the box with ordered relational comparisons (<, >, <=,
3388 // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
3389 // it is not consistent with the spec. For example, it would cause undefined
3390 // == undefined (should be true) to be evaluated as NaN == NaN
3391 // (false). Therefore, any comparisons other than ordered relational
3392 // comparisons must cause a deopt when one of their arguments is undefined.
3393 // See also v8:1434
3394 if (Token::IsOrderedRelationalCompareOp(token_) && !is_strong(strength())) {
3395 SetFlag(kAllowUndefinedAsNaN);
3396 }
3397 }
3398 ChangeRepresentation(rep);
3399 }
3400
3401
3402 std::ostream& HParameter::PrintDataTo(std::ostream& os) const { // NOLINT
3403 return os << index();
3404 }
3405
3406
3407 std::ostream& HLoadNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
3408 os << NameOf(object()) << access_;
3409
3410 if (maps() != NULL) {
3411 os << " [" << *maps()->at(0).handle();
3412 for (int i = 1; i < maps()->size(); ++i) {
3413 os << "," << *maps()->at(i).handle();
3414 }
3415 os << "]";
3416 }
3417
3418 if (HasDependency()) os << " " << NameOf(dependency());
3419 return os;
3420 }
3421
3422
3423 std::ostream& HLoadNamedGeneric::PrintDataTo(
3424 std::ostream& os) const { // NOLINT
3425 Handle<String> n = Handle<String>::cast(name());
3426 return os << NameOf(object()) << "." << n->ToCString().get();
3427 }
3428
3429
3430 std::ostream& HLoadKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
3431 if (!is_fixed_typed_array()) {
3432 os << NameOf(elements());
3433 } else {
3434 DCHECK(elements_kind() >= FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND &&
3435 elements_kind() <= LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
3436 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3437 }
3438
3439 os << "[" << NameOf(key());
3440 if (IsDehoisted()) os << " + " << base_offset();
3441 os << "]";
3442
3443 if (HasDependency()) os << " " << NameOf(dependency());
3444 if (RequiresHoleCheck()) os << " check_hole";
3445 return os;
3446 }
3447
3448
3449 bool HLoadKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
3450 // The base offset is usually simply the size of the array header, except
3451 // with dehoisting adds an addition offset due to a array index key
3452 // manipulation, in which case it becomes (array header size +
3453 // constant-offset-from-key * kPointerSize)
3454 uint32_t base_offset = BaseOffsetField::decode(bit_field_);
3455 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset;
3456 addition_result += increase_by_value;
3457 if (!addition_result.IsValid()) return false;
3458 base_offset = addition_result.ValueOrDie();
3459 if (!BaseOffsetField::is_valid(base_offset)) return false;
3460 bit_field_ = BaseOffsetField::update(bit_field_, base_offset);
3461 return true;
3462 }
3463
3464
3465 bool HLoadKeyed::UsesMustHandleHole() const {
3466 if (IsFastPackedElementsKind(elements_kind())) {
3467 return false;
3468 }
3469
3470 if (IsFixedTypedArrayElementsKind(elements_kind())) {
3471 return false;
3472 }
3473
3474 if (hole_mode() == ALLOW_RETURN_HOLE) {
3475 if (IsFastDoubleElementsKind(elements_kind())) {
3476 return AllUsesCanTreatHoleAsNaN();
3477 }
3478 return true;
3479 }
3480
3481 if (IsFastDoubleElementsKind(elements_kind())) {
3482 return false;
3483 }
3484
3485 // Holes are only returned as tagged values.
3486 if (!representation().IsTagged()) {
3487 return false;
3488 }
3489
3490 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3491 HValue* use = it.value();
3492 if (!use->IsChange()) return false;
3493 }
3494
3495 return true;
3496 }
3497
3498
3499 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
3500 return IsFastDoubleElementsKind(elements_kind()) &&
3501 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
3502 }
3503
3504
3505 bool HLoadKeyed::RequiresHoleCheck() const {
3506 if (IsFastPackedElementsKind(elements_kind())) {
3507 return false;
3508 }
3509
3510 if (IsFixedTypedArrayElementsKind(elements_kind())) {
3511 return false;
3512 }
3513
3514 if (hole_mode() == CONVERT_HOLE_TO_UNDEFINED) {
3515 return false;
3516 }
3517
3518 return !UsesMustHandleHole();
3519 }
3520
3521
3522 std::ostream& HLoadKeyedGeneric::PrintDataTo(
3523 std::ostream& os) const { // NOLINT
3524 return os << NameOf(object()) << "[" << NameOf(key()) << "]";
3525 }
3526
3527
3528 HValue* HLoadKeyedGeneric::Canonicalize() {
3529 // Recognize generic keyed loads that use property name generated
3530 // by for-in statement as a key and rewrite them into fast property load
3531 // by index.
3532 if (key()->IsLoadKeyed()) {
3533 HLoadKeyed* key_load = HLoadKeyed::cast(key());
3534 if (key_load->elements()->IsForInCacheArray()) {
3535 HForInCacheArray* names_cache =
3536 HForInCacheArray::cast(key_load->elements());
3537
3538 if (names_cache->enumerable() == object()) {
3539 HForInCacheArray* index_cache =
3540 names_cache->index_cache();
3541 HCheckMapValue* map_check = HCheckMapValue::New(
3542 block()->graph()->isolate(), block()->graph()->zone(),
3543 block()->graph()->GetInvalidContext(), object(),
3544 names_cache->map());
3545 HInstruction* index = HLoadKeyed::New(
3546 block()->graph()->isolate(), block()->graph()->zone(),
3547 block()->graph()->GetInvalidContext(), index_cache, key_load->key(),
3548 key_load->key(), key_load->elements_kind());
3549 map_check->InsertBefore(this);
3550 index->InsertBefore(this);
3551 return Prepend(new(block()->zone()) HLoadFieldByIndex(
3552 object(), index));
3553 }
3554 }
3555 }
3556
3557 return this;
3558 }
3559
3560
3561 std::ostream& HStoreNamedGeneric::PrintDataTo(
3562 std::ostream& os) const { // NOLINT
3563 Handle<String> n = Handle<String>::cast(name());
3564 return os << NameOf(object()) << "." << n->ToCString().get() << " = "
3565 << NameOf(value());
3566 }
3567
3568
3569 std::ostream& HStoreGlobalViaContext::PrintDataTo(
3570 std::ostream& os) const { // NOLINT
3571 return os << " depth:" << depth() << " slot:" << slot_index() << " = "
3572 << NameOf(value());
3573 }
3574
3575
3576 std::ostream& HStoreNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
3577 os << NameOf(object()) << access_ << " = " << NameOf(value());
3578 if (NeedsWriteBarrier()) os << " (write-barrier)";
3579 if (has_transition()) os << " (transition map " << *transition_map() << ")";
3580 return os;
3581 }
3582
3583
3584 std::ostream& HStoreKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
3585 if (!is_fixed_typed_array()) {
3586 os << NameOf(elements());
3587 } else {
3588 DCHECK(elements_kind() >= FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND &&
3589 elements_kind() <= LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
3590 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3591 }
3592
3593 os << "[" << NameOf(key());
3594 if (IsDehoisted()) os << " + " << base_offset();
3595 return os << "] = " << NameOf(value());
3596 }
3597
3598
3599 std::ostream& HStoreKeyedGeneric::PrintDataTo(
3600 std::ostream& os) const { // NOLINT
3601 return os << NameOf(object()) << "[" << NameOf(key())
3602 << "] = " << NameOf(value());
3603 }
3604
3605
3606 std::ostream& HTransitionElementsKind::PrintDataTo(
3607 std::ostream& os) const { // NOLINT
3608 os << NameOf(object());
3609 ElementsKind from_kind = original_map().handle()->elements_kind();
3610 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3611 os << " " << *original_map().handle() << " ["
3612 << ElementsAccessor::ForKind(from_kind)->name() << "] -> "
3613 << *transitioned_map().handle() << " ["
3614 << ElementsAccessor::ForKind(to_kind)->name() << "]";
3615 if (IsSimpleMapChangeTransition(from_kind, to_kind)) os << " (simple)";
3616 return os;
3617 }
3618
3619
3620 std::ostream& HLoadGlobalGeneric::PrintDataTo(
3621 std::ostream& os) const { // NOLINT
3622 return os << name()->ToCString().get() << " ";
3623 }
3624
3625
3626 std::ostream& HLoadGlobalViaContext::PrintDataTo(
3627 std::ostream& os) const { // NOLINT
3628 return os << "depth:" << depth() << " slot:" << slot_index();
3629 }
3630
3631
3632 std::ostream& HInnerAllocatedObject::PrintDataTo(
3633 std::ostream& os) const { // NOLINT
3634 os << NameOf(base_object()) << " offset ";
3635 return offset()->PrintTo(os);
3636 }
3637
3638
3639 std::ostream& HLoadContextSlot::PrintDataTo(std::ostream& os) const { // NOLINT
3640 return os << NameOf(value()) << "[" << slot_index() << "]";
3641 }
3642
3643
3644 std::ostream& HStoreContextSlot::PrintDataTo(
3645 std::ostream& os) const { // NOLINT
3646 return os << NameOf(context()) << "[" << slot_index()
3647 << "] = " << NameOf(value());
3648 }
3649
3650
3651 // Implementation of type inference and type conversions. Calculates
3652 // the inferred type of this instruction based on the input operands.
3653
3654 HType HValue::CalculateInferredType() {
3655 return type_;
3656 }
3657
3658
3659 HType HPhi::CalculateInferredType() {
3660 if (OperandCount() == 0) return HType::Tagged();
3661 HType result = OperandAt(0)->type();
3662 for (int i = 1; i < OperandCount(); ++i) {
3663 HType current = OperandAt(i)->type();
3664 result = result.Combine(current);
3665 }
3666 return result;
3667 }
3668
3669
3670 HType HChange::CalculateInferredType() {
3671 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
3672 return type();
3673 }
3674
3675
3676 Representation HUnaryMathOperation::RepresentationFromInputs() {
3677 if (SupportsFlexibleFloorAndRound() &&
3678 (op_ == kMathFloor || op_ == kMathRound)) {
3679 // Floor and Round always take a double input. The integral result can be
3680 // used as an integer or a double. Infer the representation from the uses.
3681 return Representation::None();
3682 }
3683 Representation rep = representation();
3684 // If any of the actual input representation is more general than what we
3685 // have so far but not Tagged, use that representation instead.
3686 Representation input_rep = value()->representation();
3687 if (!input_rep.IsTagged()) {
3688 rep = rep.generalize(input_rep);
3689 }
3690 return rep;
3691 }
3692
3693
3694 bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
3695 HValue* dominator) {
3696 DCHECK(side_effect == kNewSpacePromotion);
3697 Zone* zone = block()->zone();
3698 Isolate* isolate = block()->isolate();
3699 if (!FLAG_use_allocation_folding) return false;
3700
3701 // Try to fold allocations together with their dominating allocations.
3702 if (!dominator->IsAllocate()) {
3703 if (FLAG_trace_allocation_folding) {
3704 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3705 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3706 }
3707 return false;
3708 }
3709
3710 // Check whether we are folding within the same block for local folding.
3711 if (FLAG_use_local_allocation_folding && dominator->block() != block()) {
3712 if (FLAG_trace_allocation_folding) {
3713 PrintF("#%d (%s) cannot fold into #%d (%s), crosses basic blocks\n",
3714 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3715 }
3716 return false;
3717 }
3718
3719 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3720 HValue* dominator_size = dominator_allocate->size();
3721 HValue* current_size = size();
3722
3723 // TODO(hpayer): Add support for non-constant allocation in dominator.
3724 if (!dominator_size->IsInteger32Constant()) {
3725 if (FLAG_trace_allocation_folding) {
3726 PrintF("#%d (%s) cannot fold into #%d (%s), "
3727 "dynamic allocation size in dominator\n",
3728 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3729 }
3730 return false;
3731 }
3732
3733
3734 if (!IsFoldable(dominator_allocate)) {
3735 if (FLAG_trace_allocation_folding) {
3736 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", id(),
3737 Mnemonic(), dominator->id(), dominator->Mnemonic());
3738 }
3739 return false;
3740 }
3741
3742 if (!has_size_upper_bound()) {
3743 if (FLAG_trace_allocation_folding) {
3744 PrintF("#%d (%s) cannot fold into #%d (%s), "
3745 "can't estimate total allocation size\n",
3746 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3747 }
3748 return false;
3749 }
3750
3751 if (!current_size->IsInteger32Constant()) {
3752 // If it's not constant then it is a size_in_bytes calculation graph
3753 // like this: (const_header_size + const_element_size * size).
3754 DCHECK(current_size->IsInstruction());
3755
3756 HInstruction* current_instr = HInstruction::cast(current_size);
3757 if (!current_instr->Dominates(dominator_allocate)) {
3758 if (FLAG_trace_allocation_folding) {
3759 PrintF("#%d (%s) cannot fold into #%d (%s), dynamic size "
3760 "value does not dominate target allocation\n",
3761 id(), Mnemonic(), dominator_allocate->id(),
3762 dominator_allocate->Mnemonic());
3763 }
3764 return false;
3765 }
3766 }
3767
3768 DCHECK(
3769 (IsNewSpaceAllocation() && dominator_allocate->IsNewSpaceAllocation()) ||
3770 (IsOldSpaceAllocation() && dominator_allocate->IsOldSpaceAllocation()));
3771
3772 // First update the size of the dominator allocate instruction.
3773 dominator_size = dominator_allocate->size();
3774 int32_t original_object_size =
3775 HConstant::cast(dominator_size)->GetInteger32Constant();
3776 int32_t dominator_size_constant = original_object_size;
3777
3778 if (MustAllocateDoubleAligned()) {
3779 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
3780 dominator_size_constant += kDoubleSize / 2;
3781 }
3782 }
3783
3784 int32_t current_size_max_value = size_upper_bound()->GetInteger32Constant();
3785 int32_t new_dominator_size = dominator_size_constant + current_size_max_value;
3786
3787 // Since we clear the first word after folded memory, we cannot use the
3788 // whole Page::kMaxRegularHeapObjectSize memory.
3789 if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) {
3790 if (FLAG_trace_allocation_folding) {
3791 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3792 id(), Mnemonic(), dominator_allocate->id(),
3793 dominator_allocate->Mnemonic(), new_dominator_size);
3794 }
3795 return false;
3796 }
3797
3798 HInstruction* new_dominator_size_value;
3799
3800 if (current_size->IsInteger32Constant()) {
3801 new_dominator_size_value = HConstant::CreateAndInsertBefore(
3802 isolate, zone, context(), new_dominator_size, Representation::None(),
3803 dominator_allocate);
3804 } else {
3805 HValue* new_dominator_size_constant = HConstant::CreateAndInsertBefore(
3806 isolate, zone, context(), dominator_size_constant,
3807 Representation::Integer32(), dominator_allocate);
3808
3809 // Add old and new size together and insert.
3810 current_size->ChangeRepresentation(Representation::Integer32());
3811
3812 new_dominator_size_value = HAdd::New(
3813 isolate, zone, context(), new_dominator_size_constant, current_size);
3814 new_dominator_size_value->ClearFlag(HValue::kCanOverflow);
3815 new_dominator_size_value->ChangeRepresentation(Representation::Integer32());
3816
3817 new_dominator_size_value->InsertBefore(dominator_allocate);
3818 }
3819
3820 dominator_allocate->UpdateSize(new_dominator_size_value);
3821
3822 if (MustAllocateDoubleAligned()) {
3823 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3824 dominator_allocate->MakeDoubleAligned();
3825 }
3826 }
3827
3828 bool keep_new_space_iterable = FLAG_log_gc || FLAG_heap_stats;
3829 #ifdef VERIFY_HEAP
3830 keep_new_space_iterable = keep_new_space_iterable || FLAG_verify_heap;
3831 #endif
3832
3833 if (keep_new_space_iterable && dominator_allocate->IsNewSpaceAllocation()) {
3834 dominator_allocate->MakePrefillWithFiller();
3835 } else {
3836 // TODO(hpayer): This is a short-term hack to make allocation mementos
3837 // work again in new space.
3838 dominator_allocate->ClearNextMapWord(original_object_size);
3839 }
3840
3841 dominator_allocate->UpdateClearNextMapWord(MustClearNextMapWord());
3842
3843 // After that replace the dominated allocate instruction.
3844 HInstruction* inner_offset = HConstant::CreateAndInsertBefore(
3845 isolate, zone, context(), dominator_size_constant, Representation::None(),
3846 this);
3847
3848 HInstruction* dominated_allocate_instr = HInnerAllocatedObject::New(
3849 isolate, zone, context(), dominator_allocate, inner_offset, type());
3850 dominated_allocate_instr->InsertBefore(this);
3851 DeleteAndReplaceWith(dominated_allocate_instr);
3852 if (FLAG_trace_allocation_folding) {
3853 PrintF("#%d (%s) folded into #%d (%s)\n",
3854 id(), Mnemonic(), dominator_allocate->id(),
3855 dominator_allocate->Mnemonic());
3856 }
3857 return true;
3858 }
3859
3860
3861 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
3862 DCHECK(filler_free_space_size_ != NULL);
3863 Zone* zone = block()->zone();
3864 // We must explicitly force Smi representation here because on x64 we
3865 // would otherwise automatically choose int32, but the actual store
3866 // requires a Smi-tagged value.
3867 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore(
3868 block()->isolate(), zone, context(),
3869 filler_free_space_size_->value()->GetInteger32Constant() +
3870 free_space_size,
3871 Representation::Smi(), filler_free_space_size_);
3872 filler_free_space_size_->UpdateValue(new_free_space_size);
3873 }
3874
3875
3876 void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) {
3877 DCHECK(filler_free_space_size_ == NULL);
3878 Isolate* isolate = block()->isolate();
3879 Zone* zone = block()->zone();
3880 HInstruction* free_space_instr =
3881 HInnerAllocatedObject::New(isolate, zone, context(), dominating_allocate_,
3882 dominating_allocate_->size(), type());
3883 free_space_instr->InsertBefore(this);
3884 HConstant* filler_map = HConstant::CreateAndInsertAfter(
3885 zone, Unique<Map>::CreateImmovable(isolate->factory()->free_space_map()),
3886 true, free_space_instr);
3887 HInstruction* store_map =
3888 HStoreNamedField::New(isolate, zone, context(), free_space_instr,
3889 HObjectAccess::ForMap(), filler_map);
3890 store_map->SetFlag(HValue::kHasNoObservableSideEffects);
3891 store_map->InsertAfter(filler_map);
3892
3893 // We must explicitly force Smi representation here because on x64 we
3894 // would otherwise automatically choose int32, but the actual store
3895 // requires a Smi-tagged value.
3896 HConstant* filler_size =
3897 HConstant::CreateAndInsertAfter(isolate, zone, context(), free_space_size,
3898 Representation::Smi(), store_map);
3899 // Must force Smi representation for x64 (see comment above).
3900 HObjectAccess access = HObjectAccess::ForMapAndOffset(
3901 isolate->factory()->free_space_map(), FreeSpace::kSizeOffset,
3902 Representation::Smi());
3903 HStoreNamedField* store_size = HStoreNamedField::New(
3904 isolate, zone, context(), free_space_instr, access, filler_size);
3905 store_size->SetFlag(HValue::kHasNoObservableSideEffects);
3906 store_size->InsertAfter(filler_size);
3907 filler_free_space_size_ = store_size;
3908 }
3909
3910
3911 void HAllocate::ClearNextMapWord(int offset) {
3912 if (MustClearNextMapWord()) {
3913 Zone* zone = block()->zone();
3914 HObjectAccess access =
3915 HObjectAccess::ForObservableJSObjectOffset(offset);
3916 HStoreNamedField* clear_next_map =
3917 HStoreNamedField::New(block()->isolate(), zone, context(), this, access,
3918 block()->graph()->GetConstant0());
3919 clear_next_map->ClearAllSideEffects();
3920 clear_next_map->InsertAfter(this);
3921 }
3922 }
3923
3924
3925 std::ostream& HAllocate::PrintDataTo(std::ostream& os) const { // NOLINT
3926 os << NameOf(size()) << " (";
3927 if (IsNewSpaceAllocation()) os << "N";
3928 if (IsOldSpaceAllocation()) os << "P";
3929 if (MustAllocateDoubleAligned()) os << "A";
3930 if (MustPrefillWithFiller()) os << "F";
3931 return os << ")";
3932 }
3933
3934
3935 bool HStoreKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
3936 // The base offset is usually simply the size of the array header, except
3937 // with dehoisting adds an addition offset due to a array index key
3938 // manipulation, in which case it becomes (array header size +
3939 // constant-offset-from-key * kPointerSize)
3940 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset_;
3941 addition_result += increase_by_value;
3942 if (!addition_result.IsValid()) return false;
3943 base_offset_ = addition_result.ValueOrDie();
3944 return true;
3945 }
3946
3947
3948 bool HStoreKeyed::NeedsCanonicalization() {
3949 switch (value()->opcode()) {
3950 case kLoadKeyed: {
3951 ElementsKind load_kind = HLoadKeyed::cast(value())->elements_kind();
3952 return IsFixedFloatElementsKind(load_kind);
3953 }
3954 case kChange: {
3955 Representation from = HChange::cast(value())->from();
3956 return from.IsTagged() || from.IsHeapObject();
3957 }
3958 case kLoadNamedField:
3959 case kPhi: {
3960 // Better safe than sorry...
3961 return true;
3962 }
3963 default:
3964 return false;
3965 }
3966 }
3967
3968
3969 #define H_CONSTANT_INT(val) \
3970 HConstant::New(isolate, zone, context, static_cast<int32_t>(val))
3971 #define H_CONSTANT_DOUBLE(val) \
3972 HConstant::New(isolate, zone, context, static_cast<double>(val))
3973
3974 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
3975 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
3976 HValue* left, HValue* right, Strength strength) { \
3977 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
3978 HConstant* c_left = HConstant::cast(left); \
3979 HConstant* c_right = HConstant::cast(right); \
3980 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
3981 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
3982 if (IsInt32Double(double_res)) { \
3983 return H_CONSTANT_INT(double_res); \
3984 } \
3985 return H_CONSTANT_DOUBLE(double_res); \
3986 } \
3987 } \
3988 return new (zone) HInstr(context, left, right, strength); \
3989 }
3990
3991
3992 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
3993 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
3994 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
3995
3996 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
3997
3998
3999 HInstruction* HStringAdd::New(Isolate* isolate, Zone* zone, HValue* context,
4000 HValue* left, HValue* right,
4001 PretenureFlag pretenure_flag,
4002 StringAddFlags flags,
4003 Handle<AllocationSite> allocation_site) {
4004 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4005 HConstant* c_right = HConstant::cast(right);
4006 HConstant* c_left = HConstant::cast(left);
4007 if (c_left->HasStringValue() && c_right->HasStringValue()) {
4008 Handle<String> left_string = c_left->StringValue();
4009 Handle<String> right_string = c_right->StringValue();
4010 // Prevent possible exception by invalid string length.
4011 if (left_string->length() + right_string->length() < String::kMaxLength) {
4012 MaybeHandle<String> concat = isolate->factory()->NewConsString(
4013 c_left->StringValue(), c_right->StringValue());
4014 return HConstant::New(isolate, zone, context, concat.ToHandleChecked());
4015 }
4016 }
4017 }
4018 return new (zone)
4019 HStringAdd(context, left, right, pretenure_flag, flags, allocation_site);
4020 }
4021
4022
4023 std::ostream& HStringAdd::PrintDataTo(std::ostream& os) const { // NOLINT
4024 if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
4025 os << "_CheckBoth";
4026 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_LEFT) {
4027 os << "_CheckLeft";
4028 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_RIGHT) {
4029 os << "_CheckRight";
4030 }
4031 HBinaryOperation::PrintDataTo(os);
4032 os << " (";
4033 if (pretenure_flag() == NOT_TENURED)
4034 os << "N";
4035 else if (pretenure_flag() == TENURED)
4036 os << "D";
4037 return os << ")";
4038 }
4039
4040
4041 HInstruction* HStringCharFromCode::New(Isolate* isolate, Zone* zone,
4042 HValue* context, HValue* char_code) {
4043 if (FLAG_fold_constants && char_code->IsConstant()) {
4044 HConstant* c_code = HConstant::cast(char_code);
4045 if (c_code->HasNumberValue()) {
4046 if (std::isfinite(c_code->DoubleValue())) {
4047 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
4048 return HConstant::New(
4049 isolate, zone, context,
4050 isolate->factory()->LookupSingleCharacterStringFromCode(code));
4051 }
4052 return HConstant::New(isolate, zone, context,
4053 isolate->factory()->empty_string());
4054 }
4055 }
4056 return new(zone) HStringCharFromCode(context, char_code);
4057 }
4058
4059
4060 HInstruction* HUnaryMathOperation::New(Isolate* isolate, Zone* zone,
4061 HValue* context, HValue* value,
4062 BuiltinFunctionId op) {
4063 do {
4064 if (!FLAG_fold_constants) break;
4065 if (!value->IsConstant()) break;
4066 HConstant* constant = HConstant::cast(value);
4067 if (!constant->HasNumberValue()) break;
4068 double d = constant->DoubleValue();
4069 if (std::isnan(d)) { // NaN poisons everything.
4070 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
4071 }
4072 if (std::isinf(d)) { // +Infinity and -Infinity.
4073 switch (op) {
4074 case kMathExp:
4075 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0);
4076 case kMathLog:
4077 case kMathSqrt:
4078 return H_CONSTANT_DOUBLE(
4079 (d > 0.0) ? d : std::numeric_limits<double>::quiet_NaN());
4080 case kMathPowHalf:
4081 case kMathAbs:
4082 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d);
4083 case kMathRound:
4084 case kMathFround:
4085 case kMathFloor:
4086 return H_CONSTANT_DOUBLE(d);
4087 case kMathClz32:
4088 return H_CONSTANT_INT(32);
4089 default:
4090 UNREACHABLE();
4091 break;
4092 }
4093 }
4094 switch (op) {
4095 case kMathExp:
4096 return H_CONSTANT_DOUBLE(fast_exp(d));
4097 case kMathLog:
4098 return H_CONSTANT_DOUBLE(std::log(d));
4099 case kMathSqrt:
4100 return H_CONSTANT_DOUBLE(fast_sqrt(d));
4101 case kMathPowHalf:
4102 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5));
4103 case kMathAbs:
4104 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d);
4105 case kMathRound:
4106 // -0.5 .. -0.0 round to -0.0.
4107 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0);
4108 // Doubles are represented as Significant * 2 ^ Exponent. If the
4109 // Exponent is not negative, the double value is already an integer.
4110 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d);
4111 return H_CONSTANT_DOUBLE(Floor(d + 0.5));
4112 case kMathFround:
4113 return H_CONSTANT_DOUBLE(static_cast<double>(static_cast<float>(d)));
4114 case kMathFloor:
4115 return H_CONSTANT_DOUBLE(Floor(d));
4116 case kMathClz32: {
4117 uint32_t i = DoubleToUint32(d);
4118 return H_CONSTANT_INT(base::bits::CountLeadingZeros32(i));
4119 }
4120 default:
4121 UNREACHABLE();
4122 break;
4123 }
4124 } while (false);
4125 return new(zone) HUnaryMathOperation(context, value, op);
4126 }
4127
4128
4129 Representation HUnaryMathOperation::RepresentationFromUses() {
4130 if (op_ != kMathFloor && op_ != kMathRound) {
4131 return HValue::RepresentationFromUses();
4132 }
4133
4134 // The instruction can have an int32 or double output. Prefer a double
4135 // representation if there are double uses.
4136 bool use_double = false;
4137
4138 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4139 HValue* use = it.value();
4140 int use_index = it.index();
4141 Representation rep_observed = use->observed_input_representation(use_index);
4142 Representation rep_required = use->RequiredInputRepresentation(use_index);
4143 use_double |= (rep_observed.IsDouble() || rep_required.IsDouble());
4144 if (use_double && !FLAG_trace_representation) {
4145 // Having seen one double is enough.
4146 break;
4147 }
4148 if (FLAG_trace_representation) {
4149 if (!rep_required.IsDouble() || rep_observed.IsDouble()) {
4150 PrintF("#%d %s is used by #%d %s as %s%s\n",
4151 id(), Mnemonic(), use->id(),
4152 use->Mnemonic(), rep_observed.Mnemonic(),
4153 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
4154 } else {
4155 PrintF("#%d %s is required by #%d %s as %s%s\n",
4156 id(), Mnemonic(), use->id(),
4157 use->Mnemonic(), rep_required.Mnemonic(),
4158 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
4159 }
4160 }
4161 }
4162 return use_double ? Representation::Double() : Representation::Integer32();
4163 }
4164
4165
4166 HInstruction* HPower::New(Isolate* isolate, Zone* zone, HValue* context,
4167 HValue* left, HValue* right) {
4168 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4169 HConstant* c_left = HConstant::cast(left);
4170 HConstant* c_right = HConstant::cast(right);
4171 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4172 double result = power_helper(c_left->DoubleValue(),
4173 c_right->DoubleValue());
4174 return H_CONSTANT_DOUBLE(std::isnan(result)
4175 ? std::numeric_limits<double>::quiet_NaN()
4176 : result);
4177 }
4178 }
4179 return new(zone) HPower(left, right);
4180 }
4181
4182
4183 HInstruction* HMathMinMax::New(Isolate* isolate, Zone* zone, HValue* context,
4184 HValue* left, HValue* right, Operation op) {
4185 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4186 HConstant* c_left = HConstant::cast(left);
4187 HConstant* c_right = HConstant::cast(right);
4188 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
4189 double d_left = c_left->DoubleValue();
4190 double d_right = c_right->DoubleValue();
4191 if (op == kMathMin) {
4192 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right);
4193 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left);
4194 if (d_left == d_right) {
4195 // Handle +0 and -0.
4196 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left
4197 : d_right);
4198 }
4199 } else {
4200 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right);
4201 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left);
4202 if (d_left == d_right) {
4203 // Handle +0 and -0.
4204 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right
4205 : d_left);
4206 }
4207 }
4208 // All comparisons failed, must be NaN.
4209 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
4210 }
4211 }
4212 return new(zone) HMathMinMax(context, left, right, op);
4213 }
4214
4215
4216 HInstruction* HMod::New(Isolate* isolate, Zone* zone, HValue* context,
4217 HValue* left, HValue* right, Strength strength) {
4218 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4219 HConstant* c_left = HConstant::cast(left);
4220 HConstant* c_right = HConstant::cast(right);
4221 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
4222 int32_t dividend = c_left->Integer32Value();
4223 int32_t divisor = c_right->Integer32Value();
4224 if (dividend == kMinInt && divisor == -1) {
4225 return H_CONSTANT_DOUBLE(-0.0);
4226 }
4227 if (divisor != 0) {
4228 int32_t res = dividend % divisor;
4229 if ((res == 0) && (dividend < 0)) {
4230 return H_CONSTANT_DOUBLE(-0.0);
4231 }
4232 return H_CONSTANT_INT(res);
4233 }
4234 }
4235 }
4236 return new (zone) HMod(context, left, right, strength);
4237 }
4238
4239
4240 HInstruction* HDiv::New(Isolate* isolate, Zone* zone, HValue* context,
4241 HValue* left, HValue* right, Strength strength) {
4242 // If left and right are constant values, try to return a constant value.
4243 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4244 HConstant* c_left = HConstant::cast(left);
4245 HConstant* c_right = HConstant::cast(right);
4246 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4247 if (c_right->DoubleValue() != 0) {
4248 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
4249 if (IsInt32Double(double_res)) {
4250 return H_CONSTANT_INT(double_res);
4251 }
4252 return H_CONSTANT_DOUBLE(double_res);
4253 } else {
4254 int sign = Double(c_left->DoubleValue()).Sign() *
4255 Double(c_right->DoubleValue()).Sign(); // Right could be -0.
4256 return H_CONSTANT_DOUBLE(sign * V8_INFINITY);
4257 }
4258 }
4259 }
4260 return new (zone) HDiv(context, left, right, strength);
4261 }
4262
4263
4264 HInstruction* HBitwise::New(Isolate* isolate, Zone* zone, HValue* context,
4265 Token::Value op, HValue* left, HValue* right,
4266 Strength strength) {
4267 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4268 HConstant* c_left = HConstant::cast(left);
4269 HConstant* c_right = HConstant::cast(right);
4270 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4271 int32_t result;
4272 int32_t v_left = c_left->NumberValueAsInteger32();
4273 int32_t v_right = c_right->NumberValueAsInteger32();
4274 switch (op) {
4275 case Token::BIT_XOR:
4276 result = v_left ^ v_right;
4277 break;
4278 case Token::BIT_AND:
4279 result = v_left & v_right;
4280 break;
4281 case Token::BIT_OR:
4282 result = v_left | v_right;
4283 break;
4284 default:
4285 result = 0; // Please the compiler.
4286 UNREACHABLE();
4287 }
4288 return H_CONSTANT_INT(result);
4289 }
4290 }
4291 return new (zone) HBitwise(context, op, left, right, strength);
4292 }
4293
4294
4295 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
4296 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
4297 HValue* left, HValue* right, Strength strength) { \
4298 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4299 HConstant* c_left = HConstant::cast(left); \
4300 HConstant* c_right = HConstant::cast(right); \
4301 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4302 return H_CONSTANT_INT(result); \
4303 } \
4304 } \
4305 return new (zone) HInstr(context, left, right, strength); \
4306 }
4307
4308
4309 DEFINE_NEW_H_BITWISE_INSTR(HSar,
4310 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
4311 DEFINE_NEW_H_BITWISE_INSTR(HShl,
4312 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
4313
4314 #undef DEFINE_NEW_H_BITWISE_INSTR
4315
4316
4317 HInstruction* HShr::New(Isolate* isolate, Zone* zone, HValue* context,
4318 HValue* left, HValue* right, Strength strength) {
4319 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4320 HConstant* c_left = HConstant::cast(left);
4321 HConstant* c_right = HConstant::cast(right);
4322 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4323 int32_t left_val = c_left->NumberValueAsInteger32();
4324 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
4325 if ((right_val == 0) && (left_val < 0)) {
4326 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val));
4327 }
4328 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
4329 }
4330 }
4331 return new (zone) HShr(context, left, right, strength);
4332 }
4333
4334
4335 HInstruction* HSeqStringGetChar::New(Isolate* isolate, Zone* zone,
4336 HValue* context, String::Encoding encoding,
4337 HValue* string, HValue* index) {
4338 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
4339 HConstant* c_string = HConstant::cast(string);
4340 HConstant* c_index = HConstant::cast(index);
4341 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
4342 Handle<String> s = c_string->StringValue();
4343 int32_t i = c_index->Integer32Value();
4344 DCHECK_LE(0, i);
4345 DCHECK_LT(i, s->length());
4346 return H_CONSTANT_INT(s->Get(i));
4347 }
4348 }
4349 return new(zone) HSeqStringGetChar(encoding, string, index);
4350 }
4351
4352
4353 #undef H_CONSTANT_INT
4354 #undef H_CONSTANT_DOUBLE
4355
4356
4357 std::ostream& HBitwise::PrintDataTo(std::ostream& os) const { // NOLINT
4358 os << Token::Name(op_) << " ";
4359 return HBitwiseBinaryOperation::PrintDataTo(os);
4360 }
4361
4362
4363 void HPhi::SimplifyConstantInputs() {
4364 // Convert constant inputs to integers when all uses are truncating.
4365 // This must happen before representation inference takes place.
4366 if (!CheckUsesForFlag(kTruncatingToInt32)) return;
4367 for (int i = 0; i < OperandCount(); ++i) {
4368 if (!OperandAt(i)->IsConstant()) return;
4369 }
4370 HGraph* graph = block()->graph();
4371 for (int i = 0; i < OperandCount(); ++i) {
4372 HConstant* operand = HConstant::cast(OperandAt(i));
4373 if (operand->HasInteger32Value()) {
4374 continue;
4375 } else if (operand->HasDoubleValue()) {
4376 HConstant* integer_input = HConstant::New(
4377 graph->isolate(), graph->zone(), graph->GetInvalidContext(),
4378 DoubleToInt32(operand->DoubleValue()));
4379 integer_input->InsertAfter(operand);
4380 SetOperandAt(i, integer_input);
4381 } else if (operand->HasBooleanValue()) {
4382 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
4383 : graph->GetConstant0());
4384 } else if (operand->ImmortalImmovable()) {
4385 SetOperandAt(i, graph->GetConstant0());
4386 }
4387 }
4388 // Overwrite observed input representations because they are likely Tagged.
4389 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4390 HValue* use = it.value();
4391 if (use->IsBinaryOperation()) {
4392 HBinaryOperation::cast(use)->set_observed_input_representation(
4393 it.index(), Representation::Smi());
4394 }
4395 }
4396 }
4397
4398
4399 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
4400 DCHECK(CheckFlag(kFlexibleRepresentation));
4401 Representation new_rep = RepresentationFromUses();
4402 UpdateRepresentation(new_rep, h_infer, "uses");
4403 new_rep = RepresentationFromInputs();
4404 UpdateRepresentation(new_rep, h_infer, "inputs");
4405 new_rep = RepresentationFromUseRequirements();
4406 UpdateRepresentation(new_rep, h_infer, "use requirements");
4407 }
4408
4409
4410 Representation HPhi::RepresentationFromInputs() {
4411 Representation r = representation();
4412 for (int i = 0; i < OperandCount(); ++i) {
4413 // Ignore conservative Tagged assumption of parameters if we have
4414 // reason to believe that it's too conservative.
4415 if (has_type_feedback_from_uses() && OperandAt(i)->IsParameter()) {
4416 continue;
4417 }
4418
4419 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
4420 }
4421 return r;
4422 }
4423
4424
4425 // Returns a representation if all uses agree on the same representation.
4426 // Integer32 is also returned when some uses are Smi but others are Integer32.
4427 Representation HValue::RepresentationFromUseRequirements() {
4428 Representation rep = Representation::None();
4429 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4430 // Ignore the use requirement from never run code
4431 if (it.value()->block()->IsUnreachable()) continue;
4432
4433 // We check for observed_input_representation elsewhere.
4434 Representation use_rep =
4435 it.value()->RequiredInputRepresentation(it.index());
4436 if (rep.IsNone()) {
4437 rep = use_rep;
4438 continue;
4439 }
4440 if (use_rep.IsNone() || rep.Equals(use_rep)) continue;
4441 if (rep.generalize(use_rep).IsInteger32()) {
4442 rep = Representation::Integer32();
4443 continue;
4444 }
4445 return Representation::None();
4446 }
4447 return rep;
4448 }
4449
4450
4451 bool HValue::HasNonSmiUse() {
4452 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4453 // We check for observed_input_representation elsewhere.
4454 Representation use_rep =
4455 it.value()->RequiredInputRepresentation(it.index());
4456 if (!use_rep.IsNone() &&
4457 !use_rep.IsSmi() &&
4458 !use_rep.IsTagged()) {
4459 return true;
4460 }
4461 }
4462 return false;
4463 }
4464
4465
4466 // Node-specific verification code is only included in debug mode.
4467 #ifdef DEBUG
4468
4469 void HPhi::Verify() {
4470 DCHECK(OperandCount() == block()->predecessors()->length());
4471 for (int i = 0; i < OperandCount(); ++i) {
4472 HValue* value = OperandAt(i);
4473 HBasicBlock* defining_block = value->block();
4474 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
4475 DCHECK(defining_block == predecessor_block ||
4476 defining_block->Dominates(predecessor_block));
4477 }
4478 }
4479
4480
4481 void HSimulate::Verify() {
4482 HInstruction::Verify();
4483 DCHECK(HasAstId() || next()->IsEnterInlined());
4484 }
4485
4486
4487 void HCheckHeapObject::Verify() {
4488 HInstruction::Verify();
4489 DCHECK(HasNoUses());
4490 }
4491
4492
4493 void HCheckValue::Verify() {
4494 HInstruction::Verify();
4495 DCHECK(HasNoUses());
4496 }
4497
4498 #endif
4499
4500
4501 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) {
4502 DCHECK(offset >= 0);
4503 DCHECK(offset < FixedArray::kHeaderSize);
4504 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength();
4505 return HObjectAccess(kInobject, offset);
4506 }
4507
4508
4509 HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map> map, int offset,
4510 Representation representation) {
4511 DCHECK(offset >= 0);
4512 Portion portion = kInobject;
4513
4514 if (offset == JSObject::kElementsOffset) {
4515 portion = kElementsPointer;
4516 } else if (offset == JSObject::kMapOffset) {
4517 portion = kMaps;
4518 }
4519 bool existing_inobject_property = true;
4520 if (!map.is_null()) {
4521 existing_inobject_property = (offset <
4522 map->instance_size() - map->unused_property_fields() * kPointerSize);
4523 }
4524 return HObjectAccess(portion, offset, representation, Handle<String>::null(),
4525 false, existing_inobject_property);
4526 }
4527
4528
4529 HObjectAccess HObjectAccess::ForAllocationSiteOffset(int offset) {
4530 switch (offset) {
4531 case AllocationSite::kTransitionInfoOffset:
4532 return HObjectAccess(kInobject, offset, Representation::Tagged());
4533 case AllocationSite::kNestedSiteOffset:
4534 return HObjectAccess(kInobject, offset, Representation::Tagged());
4535 case AllocationSite::kPretenureDataOffset:
4536 return HObjectAccess(kInobject, offset, Representation::Smi());
4537 case AllocationSite::kPretenureCreateCountOffset:
4538 return HObjectAccess(kInobject, offset, Representation::Smi());
4539 case AllocationSite::kDependentCodeOffset:
4540 return HObjectAccess(kInobject, offset, Representation::Tagged());
4541 case AllocationSite::kWeakNextOffset:
4542 return HObjectAccess(kInobject, offset, Representation::Tagged());
4543 default:
4544 UNREACHABLE();
4545 }
4546 return HObjectAccess(kInobject, offset);
4547 }
4548
4549
4550 HObjectAccess HObjectAccess::ForContextSlot(int index) {
4551 DCHECK(index >= 0);
4552 Portion portion = kInobject;
4553 int offset = Context::kHeaderSize + index * kPointerSize;
4554 DCHECK_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag);
4555 return HObjectAccess(portion, offset, Representation::Tagged());
4556 }
4557
4558
4559 HObjectAccess HObjectAccess::ForScriptContext(int index) {
4560 DCHECK(index >= 0);
4561 Portion portion = kInobject;
4562 int offset = ScriptContextTable::GetContextOffset(index);
4563 return HObjectAccess(portion, offset, Representation::Tagged());
4564 }
4565
4566
4567 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) {
4568 DCHECK(offset >= 0);
4569 Portion portion = kInobject;
4570
4571 if (offset == JSObject::kElementsOffset) {
4572 portion = kElementsPointer;
4573 } else if (offset == JSArray::kLengthOffset) {
4574 portion = kArrayLengths;
4575 } else if (offset == JSObject::kMapOffset) {
4576 portion = kMaps;
4577 }
4578 return HObjectAccess(portion, offset);
4579 }
4580
4581
4582 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset,
4583 Representation representation) {
4584 DCHECK(offset >= 0);
4585 return HObjectAccess(kBackingStore, offset, representation,
4586 Handle<String>::null(), false, false);
4587 }
4588
4589
4590 HObjectAccess HObjectAccess::ForField(Handle<Map> map, int index,
4591 Representation representation,
4592 Handle<Name> name) {
4593 if (index < 0) {
4594 // Negative property indices are in-object properties, indexed
4595 // from the end of the fixed part of the object.
4596 int offset = (index * kPointerSize) + map->instance_size();
4597 return HObjectAccess(kInobject, offset, representation, name, false, true);
4598 } else {
4599 // Non-negative property indices are in the properties array.
4600 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
4601 return HObjectAccess(kBackingStore, offset, representation, name,
4602 false, false);
4603 }
4604 }
4605
4606
4607 void HObjectAccess::SetGVNFlags(HValue *instr, PropertyAccessType access_type) {
4608 // set the appropriate GVN flags for a given load or store instruction
4609 if (access_type == STORE) {
4610 // track dominating allocations in order to eliminate write barriers
4611 instr->SetDependsOnFlag(::v8::internal::kNewSpacePromotion);
4612 instr->SetFlag(HValue::kTrackSideEffectDominators);
4613 } else {
4614 // try to GVN loads, but don't hoist above map changes
4615 instr->SetFlag(HValue::kUseGVN);
4616 instr->SetDependsOnFlag(::v8::internal::kMaps);
4617 }
4618
4619 switch (portion()) {
4620 case kArrayLengths:
4621 if (access_type == STORE) {
4622 instr->SetChangesFlag(::v8::internal::kArrayLengths);
4623 } else {
4624 instr->SetDependsOnFlag(::v8::internal::kArrayLengths);
4625 }
4626 break;
4627 case kStringLengths:
4628 if (access_type == STORE) {
4629 instr->SetChangesFlag(::v8::internal::kStringLengths);
4630 } else {
4631 instr->SetDependsOnFlag(::v8::internal::kStringLengths);
4632 }
4633 break;
4634 case kInobject:
4635 if (access_type == STORE) {
4636 instr->SetChangesFlag(::v8::internal::kInobjectFields);
4637 } else {
4638 instr->SetDependsOnFlag(::v8::internal::kInobjectFields);
4639 }
4640 break;
4641 case kDouble:
4642 if (access_type == STORE) {
4643 instr->SetChangesFlag(::v8::internal::kDoubleFields);
4644 } else {
4645 instr->SetDependsOnFlag(::v8::internal::kDoubleFields);
4646 }
4647 break;
4648 case kBackingStore:
4649 if (access_type == STORE) {
4650 instr->SetChangesFlag(::v8::internal::kBackingStoreFields);
4651 } else {
4652 instr->SetDependsOnFlag(::v8::internal::kBackingStoreFields);
4653 }
4654 break;
4655 case kElementsPointer:
4656 if (access_type == STORE) {
4657 instr->SetChangesFlag(::v8::internal::kElementsPointer);
4658 } else {
4659 instr->SetDependsOnFlag(::v8::internal::kElementsPointer);
4660 }
4661 break;
4662 case kMaps:
4663 if (access_type == STORE) {
4664 instr->SetChangesFlag(::v8::internal::kMaps);
4665 } else {
4666 instr->SetDependsOnFlag(::v8::internal::kMaps);
4667 }
4668 break;
4669 case kExternalMemory:
4670 if (access_type == STORE) {
4671 instr->SetChangesFlag(::v8::internal::kExternalMemory);
4672 } else {
4673 instr->SetDependsOnFlag(::v8::internal::kExternalMemory);
4674 }
4675 break;
4676 }
4677 }
4678
4679
4680 std::ostream& operator<<(std::ostream& os, const HObjectAccess& access) {
4681 os << ".";
4682
4683 switch (access.portion()) {
4684 case HObjectAccess::kArrayLengths:
4685 case HObjectAccess::kStringLengths:
4686 os << "%length";
4687 break;
4688 case HObjectAccess::kElementsPointer:
4689 os << "%elements";
4690 break;
4691 case HObjectAccess::kMaps:
4692 os << "%map";
4693 break;
4694 case HObjectAccess::kDouble: // fall through
4695 case HObjectAccess::kInobject:
4696 if (!access.name().is_null()) {
4697 os << Handle<String>::cast(access.name())->ToCString().get();
4698 }
4699 os << "[in-object]";
4700 break;
4701 case HObjectAccess::kBackingStore:
4702 if (!access.name().is_null()) {
4703 os << Handle<String>::cast(access.name())->ToCString().get();
4704 }
4705 os << "[backing-store]";
4706 break;
4707 case HObjectAccess::kExternalMemory:
4708 os << "[external-memory]";
4709 break;
4710 }
4711
4712 return os << "@" << access.offset();
4713 }
4714
4715 } // namespace internal
4716 } // namespace v8
OLDNEW
« no previous file with comments | « src/hydrogen-instructions.h ('k') | src/hydrogen-load-elimination.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698