Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(434)

Side by Side Diff: src/ia32/lithium-ia32.cc

Issue 6529055: [Isolates] Merge crankshaft (r5922 from bleeding_edge). (Closed)
Patch Set: Win32 port Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ia32/lithium-ia32.h ('k') | src/ia32/macro-assembler-ia32.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "ia32/lithium-ia32.h"
29 #include "ia32/lithium-codegen-ia32.h"
30
31 namespace v8 {
32 namespace internal {
33
34 #define DEFINE_COMPILE(type) \
35 void L##type::CompileToNative(LCodeGen* generator) { \
36 generator->Do##type(this); \
37 }
38 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
39 #undef DEFINE_COMPILE
40
41 LOsrEntry::LOsrEntry() {
42 for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
43 register_spills_[i] = NULL;
44 }
45 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
46 double_register_spills_[i] = NULL;
47 }
48 }
49
50
51 void LOsrEntry::MarkSpilledRegister(int allocation_index,
52 LOperand* spill_operand) {
53 ASSERT(spill_operand->IsStackSlot());
54 ASSERT(register_spills_[allocation_index] == NULL);
55 register_spills_[allocation_index] = spill_operand;
56 }
57
58
59 void LOsrEntry::MarkSpilledDoubleRegister(int allocation_index,
60 LOperand* spill_operand) {
61 ASSERT(spill_operand->IsDoubleStackSlot());
62 ASSERT(double_register_spills_[allocation_index] == NULL);
63 double_register_spills_[allocation_index] = spill_operand;
64 }
65
66
67 void LInstruction::PrintTo(StringStream* stream) const {
68 stream->Add("%s ", this->Mnemonic());
69 if (HasResult()) {
70 result()->PrintTo(stream);
71 stream->Add(" ");
72 }
73 PrintDataTo(stream);
74
75 if (HasEnvironment()) {
76 stream->Add(" ");
77 environment()->PrintTo(stream);
78 }
79
80 if (HasPointerMap()) {
81 stream->Add(" ");
82 pointer_map()->PrintTo(stream);
83 }
84 }
85
86
87 void LLabel::PrintDataTo(StringStream* stream) const {
88 LGap::PrintDataTo(stream);
89 LLabel* rep = replacement();
90 if (rep != NULL) {
91 stream->Add(" Dead block replaced with B%d", rep->block_id());
92 }
93 }
94
95
96 bool LParallelMove::IsRedundant() const {
97 for (int i = 0; i < move_operands_.length(); ++i) {
98 if (!move_operands_[i].IsRedundant()) return false;
99 }
100 return true;
101 }
102
103
104 void LParallelMove::PrintDataTo(StringStream* stream) const {
105 for (int i = move_operands_.length() - 1; i >= 0; --i) {
106 if (!move_operands_[i].IsEliminated()) {
107 LOperand* from = move_operands_[i].from();
108 LOperand* to = move_operands_[i].to();
109 if (from->Equals(to)) {
110 to->PrintTo(stream);
111 } else {
112 to->PrintTo(stream);
113 stream->Add(" = ");
114 from->PrintTo(stream);
115 }
116 stream->Add("; ");
117 }
118 }
119 }
120
121
122 bool LGap::IsRedundant() const {
123 for (int i = 0; i < 4; i++) {
124 if (parallel_moves_[i] != NULL && !parallel_moves_[i]->IsRedundant()) {
125 return false;
126 }
127 }
128
129 return true;
130 }
131
132
133 void LGap::PrintDataTo(StringStream* stream) const {
134 for (int i = 0; i < 4; i++) {
135 stream->Add("(");
136 if (parallel_moves_[i] != NULL) {
137 parallel_moves_[i]->PrintDataTo(stream);
138 }
139 stream->Add(") ");
140 }
141 }
142
143
144 const char* LArithmeticD::Mnemonic() const {
145 switch (op()) {
146 case Token::ADD: return "add-d";
147 case Token::SUB: return "sub-d";
148 case Token::MUL: return "mul-d";
149 case Token::DIV: return "div-d";
150 case Token::MOD: return "mod-d";
151 default:
152 UNREACHABLE();
153 return NULL;
154 }
155 }
156
157
158 const char* LArithmeticT::Mnemonic() const {
159 switch (op()) {
160 case Token::ADD: return "add-t";
161 case Token::SUB: return "sub-t";
162 case Token::MUL: return "mul-t";
163 case Token::MOD: return "mod-t";
164 case Token::DIV: return "div-t";
165 default:
166 UNREACHABLE();
167 return NULL;
168 }
169 }
170
171
172
173 void LBinaryOperation::PrintDataTo(StringStream* stream) const {
174 stream->Add("= ");
175 left()->PrintTo(stream);
176 stream->Add(" ");
177 right()->PrintTo(stream);
178 }
179
180
181 void LGoto::PrintDataTo(StringStream* stream) const {
182 stream->Add("B%d", block_id());
183 }
184
185
186 void LBranch::PrintDataTo(StringStream* stream) const {
187 stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
188 input()->PrintTo(stream);
189 }
190
191
192 void LCmpIDAndBranch::PrintDataTo(StringStream* stream) const {
193 stream->Add("if ");
194 left()->PrintTo(stream);
195 stream->Add(" %s ", Token::String(op()));
196 right()->PrintTo(stream);
197 stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
198 }
199
200
201 void LIsNullAndBranch::PrintDataTo(StringStream* stream) const {
202 stream->Add("if ");
203 input()->PrintTo(stream);
204 stream->Add(is_strict() ? " === null" : " == null");
205 stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
206 }
207
208
209 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) const {
210 stream->Add("if is_smi(");
211 input()->PrintTo(stream);
212 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
213 }
214
215
216 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) const {
217 stream->Add("if has_instance_type(");
218 input()->PrintTo(stream);
219 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
220 }
221
222
223 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) const {
224 stream->Add("if has_cached_array_index(");
225 input()->PrintTo(stream);
226 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
227 }
228
229
230 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) const {
231 stream->Add("if class_of_test(");
232 input()->PrintTo(stream);
233 stream->Add(", \"%o\") then B%d else B%d",
234 *hydrogen()->class_name(),
235 true_block_id(),
236 false_block_id());
237 }
238
239
240 void LTypeofIs::PrintDataTo(StringStream* stream) const {
241 input()->PrintTo(stream);
242 stream->Add(" == \"%s\"", *hydrogen()->type_literal()->ToCString());
243 }
244
245
246 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) const {
247 stream->Add("if typeof ");
248 input()->PrintTo(stream);
249 stream->Add(" == \"%s\" then B%d else B%d",
250 *hydrogen()->type_literal()->ToCString(),
251 true_block_id(), false_block_id());
252 }
253
254
255 void LCallConstantFunction::PrintDataTo(StringStream* stream) const {
256 stream->Add("#%d / ", arity());
257 }
258
259
260 void LUnaryMathOperation::PrintDataTo(StringStream* stream) const {
261 stream->Add("/%s ", hydrogen()->OpName());
262 input()->PrintTo(stream);
263 }
264
265
266 void LCallKeyed::PrintDataTo(StringStream* stream) const {
267 stream->Add("[ecx] #%d / ", arity());
268 }
269
270
271 void LCallNamed::PrintDataTo(StringStream* stream) const {
272 SmartPointer<char> name_string = name()->ToCString();
273 stream->Add("%s #%d / ", *name_string, arity());
274 }
275
276
277 void LCallGlobal::PrintDataTo(StringStream* stream) const {
278 SmartPointer<char> name_string = name()->ToCString();
279 stream->Add("%s #%d / ", *name_string, arity());
280 }
281
282
283 void LCallKnownGlobal::PrintDataTo(StringStream* stream) const {
284 stream->Add("#%d / ", arity());
285 }
286
287
288 void LCallNew::PrintDataTo(StringStream* stream) const {
289 LUnaryOperation::PrintDataTo(stream);
290 stream->Add(" #%d / ", arity());
291 }
292
293
294 void LClassOfTest::PrintDataTo(StringStream* stream) const {
295 stream->Add("= class_of_test(");
296 input()->PrintTo(stream);
297 stream->Add(", \"%o\")", *hydrogen()->class_name());
298 }
299
300
301 void LUnaryOperation::PrintDataTo(StringStream* stream) const {
302 stream->Add("= ");
303 input()->PrintTo(stream);
304 }
305
306
307 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) const {
308 arguments()->PrintTo(stream);
309
310 stream->Add(" length ");
311 length()->PrintTo(stream);
312
313 stream->Add(" index ");
314 index()->PrintTo(stream);
315 }
316
317
318 LChunk::LChunk(HGraph* graph)
319 : spill_slot_count_(0),
320 graph_(graph),
321 instructions_(32),
322 pointer_maps_(8),
323 inlined_closures_(1) {
324 }
325
326
327 void LChunk::Verify() const {
328 // TODO(twuerthinger): Implement verification for chunk.
329 }
330
331
332 int LChunk::GetNextSpillIndex(bool is_double) {
333 // Skip a slot if for a double-width slot.
334 if (is_double) spill_slot_count_++;
335 return spill_slot_count_++;
336 }
337
338
339 LOperand* LChunk::GetNextSpillSlot(bool is_double) {
340 int index = GetNextSpillIndex(is_double);
341 if (is_double) {
342 return LDoubleStackSlot::Create(index);
343 } else {
344 return LStackSlot::Create(index);
345 }
346 }
347
348
349 void LChunk::MarkEmptyBlocks() {
350 HPhase phase("Mark empty blocks", this);
351 for (int i = 0; i < graph()->blocks()->length(); ++i) {
352 HBasicBlock* block = graph()->blocks()->at(i);
353 int first = block->first_instruction_index();
354 int last = block->last_instruction_index();
355 LInstruction* first_instr = instructions()->at(first);
356 LInstruction* last_instr = instructions()->at(last);
357
358 LLabel* label = LLabel::cast(first_instr);
359 if (last_instr->IsGoto()) {
360 LGoto* goto_instr = LGoto::cast(last_instr);
361 if (!goto_instr->include_stack_check() &&
362 label->IsRedundant() &&
363 !label->is_loop_header()) {
364 bool can_eliminate = true;
365 for (int i = first + 1; i < last && can_eliminate; ++i) {
366 LInstruction* cur = instructions()->at(i);
367 if (cur->IsGap()) {
368 LGap* gap = LGap::cast(cur);
369 if (!gap->IsRedundant()) {
370 can_eliminate = false;
371 }
372 } else {
373 can_eliminate = false;
374 }
375 }
376
377 if (can_eliminate) {
378 label->set_replacement(GetLabel(goto_instr->block_id()));
379 }
380 }
381 }
382 }
383 }
384
385
386 void LStoreNamed::PrintDataTo(StringStream* stream) const {
387 object()->PrintTo(stream);
388 stream->Add(".");
389 stream->Add(*String::cast(*name())->ToCString());
390 stream->Add(" <- ");
391 value()->PrintTo(stream);
392 }
393
394
395 void LStoreKeyed::PrintDataTo(StringStream* stream) const {
396 object()->PrintTo(stream);
397 stream->Add("[");
398 key()->PrintTo(stream);
399 stream->Add("] <- ");
400 value()->PrintTo(stream);
401 }
402
403
404 int LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
405 LGap* gap = new LGap(block);
406 int index = -1;
407 if (instr->IsControl()) {
408 instructions_.Add(gap);
409 index = instructions_.length();
410 instructions_.Add(instr);
411 } else {
412 index = instructions_.length();
413 instructions_.Add(instr);
414 instructions_.Add(gap);
415 }
416 if (instr->HasPointerMap()) {
417 pointer_maps_.Add(instr->pointer_map());
418 instr->pointer_map()->set_lithium_position(index);
419 }
420 return index;
421 }
422
423
424 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
425 return LConstantOperand::Create(constant->id());
426 }
427
428
429 int LChunk::GetParameterStackSlot(int index) const {
430 // The receiver is at index 0, the first parameter at index 1, so we
431 // shift all parameter indexes down by the number of parameters, and
432 // make sure they end up negative so they are distinguishable from
433 // spill slots.
434 int result = index - graph()->info()->scope()->num_parameters() - 1;
435 ASSERT(result < 0);
436 return result;
437 }
438
439 // A parameter relative to ebp in the arguments stub.
440 int LChunk::ParameterAt(int index) {
441 ASSERT(-1 <= index); // -1 is the receiver.
442 return (1 + graph()->info()->scope()->num_parameters() - index) *
443 kPointerSize;
444 }
445
446
447 LGap* LChunk::GetGapAt(int index) const {
448 return LGap::cast(instructions_[index]);
449 }
450
451
452 bool LChunk::IsGapAt(int index) const {
453 return instructions_[index]->IsGap();
454 }
455
456
457 int LChunk::NearestGapPos(int index) const {
458 while (!IsGapAt(index)) index--;
459 return index;
460 }
461
462
463 int LChunk::NearestNextGapPos(int index) const {
464 while (!IsGapAt(index)) index++;
465 return index;
466 }
467
468
469 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
470 GetGapAt(index)->GetOrCreateParallelMove(LGap::START)->AddMove(from, to);
471 }
472
473
474 class LGapNode: public ZoneObject {
475 public:
476 explicit LGapNode(LOperand* operand)
477 : operand_(operand), resolved_(false), visited_id_(-1) { }
478
479 LOperand* operand() const { return operand_; }
480 bool IsResolved() const { return !IsAssigned() || resolved_; }
481 void MarkResolved() {
482 ASSERT(!IsResolved());
483 resolved_ = true;
484 }
485 int visited_id() const { return visited_id_; }
486 void set_visited_id(int id) {
487 ASSERT(id > visited_id_);
488 visited_id_ = id;
489 }
490
491 bool IsAssigned() const { return assigned_from_.is_set(); }
492 LGapNode* assigned_from() const { return assigned_from_.get(); }
493 void set_assigned_from(LGapNode* n) { assigned_from_.set(n); }
494
495 private:
496 LOperand* operand_;
497 SetOncePointer<LGapNode> assigned_from_;
498 bool resolved_;
499 int visited_id_;
500 };
501
502
503 LGapResolver::LGapResolver(const ZoneList<LMoveOperands>* moves,
504 LOperand* marker_operand)
505 : nodes_(4),
506 identified_cycles_(4),
507 result_(4),
508 marker_operand_(marker_operand),
509 next_visited_id_(0) {
510 for (int i = 0; i < moves->length(); ++i) {
511 LMoveOperands move = moves->at(i);
512 if (!move.IsRedundant()) RegisterMove(move);
513 }
514 }
515
516
517 const ZoneList<LMoveOperands>* LGapResolver::ResolveInReverseOrder() {
518 for (int i = 0; i < identified_cycles_.length(); ++i) {
519 ResolveCycle(identified_cycles_[i]);
520 }
521
522 int unresolved_nodes;
523 do {
524 unresolved_nodes = 0;
525 for (int j = 0; j < nodes_.length(); j++) {
526 LGapNode* node = nodes_[j];
527 if (!node->IsResolved() && node->assigned_from()->IsResolved()) {
528 AddResultMove(node->assigned_from(), node);
529 node->MarkResolved();
530 }
531 if (!node->IsResolved()) ++unresolved_nodes;
532 }
533 } while (unresolved_nodes > 0);
534 return &result_;
535 }
536
537
538 void LGapResolver::AddResultMove(LGapNode* from, LGapNode* to) {
539 AddResultMove(from->operand(), to->operand());
540 }
541
542
543 void LGapResolver::AddResultMove(LOperand* from, LOperand* to) {
544 result_.Add(LMoveOperands(from, to));
545 }
546
547
548 void LGapResolver::ResolveCycle(LGapNode* start) {
549 ZoneList<LOperand*> circle_operands(8);
550 circle_operands.Add(marker_operand_);
551 LGapNode* cur = start;
552 do {
553 cur->MarkResolved();
554 circle_operands.Add(cur->operand());
555 cur = cur->assigned_from();
556 } while (cur != start);
557 circle_operands.Add(marker_operand_);
558
559 for (int i = circle_operands.length() - 1; i > 0; --i) {
560 LOperand* from = circle_operands[i];
561 LOperand* to = circle_operands[i - 1];
562 AddResultMove(from, to);
563 }
564 }
565
566
567 bool LGapResolver::CanReach(LGapNode* a, LGapNode* b, int visited_id) {
568 ASSERT(a != b);
569 LGapNode* cur = a;
570 while (cur != b && cur->visited_id() != visited_id && cur->IsAssigned()) {
571 cur->set_visited_id(visited_id);
572 cur = cur->assigned_from();
573 }
574
575 return cur == b;
576 }
577
578
579 bool LGapResolver::CanReach(LGapNode* a, LGapNode* b) {
580 ASSERT(a != b);
581 return CanReach(a, b, next_visited_id_++);
582 }
583
584
585 void LGapResolver::RegisterMove(LMoveOperands move) {
586 if (move.from()->IsConstantOperand()) {
587 // Constant moves should be last in the machine code. Therefore add them
588 // first to the result set.
589 AddResultMove(move.from(), move.to());
590 } else {
591 LGapNode* from = LookupNode(move.from());
592 LGapNode* to = LookupNode(move.to());
593 if (to->IsAssigned() && to->assigned_from() == from) {
594 move.Eliminate();
595 return;
596 }
597 ASSERT(!to->IsAssigned());
598 if (CanReach(from, to)) {
599 // This introduces a circle. Save.
600 identified_cycles_.Add(from);
601 }
602 to->set_assigned_from(from);
603 }
604 }
605
606
607 LGapNode* LGapResolver::LookupNode(LOperand* operand) {
608 for (int i = 0; i < nodes_.length(); ++i) {
609 if (nodes_[i]->operand()->Equals(operand)) return nodes_[i];
610 }
611
612 // No node found => create a new one.
613 LGapNode* result = new LGapNode(operand);
614 nodes_.Add(result);
615 return result;
616 }
617
618
619 Handle<Object> LChunk::LookupLiteral(LConstantOperand* operand) const {
620 return HConstant::cast(graph_->LookupValue(operand->index()))->handle();
621 }
622
623
624 Representation LChunk::LookupLiteralRepresentation(
625 LConstantOperand* operand) const {
626 return graph_->LookupValue(operand->index())->representation();
627 }
628
629
630 LChunk* LChunkBuilder::Build() {
631 ASSERT(is_unused());
632 chunk_ = new LChunk(graph());
633 HPhase phase("Building chunk", chunk_);
634 status_ = BUILDING;
635 const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
636 for (int i = 0; i < blocks->length(); i++) {
637 HBasicBlock* next = NULL;
638 if (i < blocks->length() - 1) next = blocks->at(i + 1);
639 DoBasicBlock(blocks->at(i), next);
640 if (is_aborted()) return NULL;
641 }
642 status_ = DONE;
643 return chunk_;
644 }
645
646
647 void LChunkBuilder::Abort(const char* format, ...) {
648 if (FLAG_trace_bailout) {
649 SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
650 PrintF("Aborting LChunk building in @\"%s\": ", *debug_name);
651 va_list arguments;
652 va_start(arguments, format);
653 OS::VPrint(format, arguments);
654 va_end(arguments);
655 PrintF("\n");
656 }
657 status_ = ABORTED;
658 }
659
660
661 LRegister* LChunkBuilder::ToOperand(Register reg) {
662 return LRegister::Create(Register::ToAllocationIndex(reg));
663 }
664
665
666 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
667 return new LUnallocated(LUnallocated::FIXED_REGISTER,
668 Register::ToAllocationIndex(reg));
669 }
670
671
672 LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
673 return new LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
674 XMMRegister::ToAllocationIndex(reg));
675 }
676
677
678 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
679 return Use(value, ToUnallocated(fixed_register));
680 }
681
682
683 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
684 return Use(value, ToUnallocated(reg));
685 }
686
687
688 LOperand* LChunkBuilder::UseRegister(HValue* value) {
689 return Use(value, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
690 }
691
692
693 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
694 return Use(value,
695 new LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
696 LUnallocated::USED_AT_START));
697 }
698
699
700 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
701 return Use(value, new LUnallocated(LUnallocated::WRITABLE_REGISTER));
702 }
703
704
705 LOperand* LChunkBuilder::Use(HValue* value) {
706 return Use(value, new LUnallocated(LUnallocated::NONE));
707 }
708
709
710 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
711 return Use(value, new LUnallocated(LUnallocated::NONE,
712 LUnallocated::USED_AT_START));
713 }
714
715
716 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
717 return value->IsConstant()
718 ? chunk_->DefineConstantOperand(HConstant::cast(value))
719 : Use(value);
720 }
721
722
723 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
724 return value->IsConstant()
725 ? chunk_->DefineConstantOperand(HConstant::cast(value))
726 : UseAtStart(value);
727 }
728
729
730 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
731 return value->IsConstant()
732 ? chunk_->DefineConstantOperand(HConstant::cast(value))
733 : UseRegister(value);
734 }
735
736
737 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
738 return value->IsConstant()
739 ? chunk_->DefineConstantOperand(HConstant::cast(value))
740 : UseRegisterAtStart(value);
741 }
742
743
744 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
745 if (value->EmitAtUses()) {
746 HInstruction* instr = HInstruction::cast(value);
747 VisitInstruction(instr);
748 }
749 allocator_->RecordUse(value, operand);
750 return operand;
751 }
752
753
754 LInstruction* LChunkBuilder::Define(LInstruction* instr) {
755 return Define(instr, new LUnallocated(LUnallocated::NONE));
756 }
757
758
759 LInstruction* LChunkBuilder::DefineAsRegister(LInstruction* instr) {
760 return Define(instr, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
761 }
762
763
764 LInstruction* LChunkBuilder::DefineAsSpilled(LInstruction* instr, int index) {
765 return Define(instr, new LUnallocated(LUnallocated::FIXED_SLOT, index));
766 }
767
768
769 LInstruction* LChunkBuilder::DefineSameAsAny(LInstruction* instr) {
770 return Define(instr, new LUnallocated(LUnallocated::SAME_AS_ANY_INPUT));
771 }
772
773
774 LInstruction* LChunkBuilder::DefineSameAsFirst(LInstruction* instr) {
775 return Define(instr, new LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
776 }
777
778
779 LInstruction* LChunkBuilder::DefineFixed(LInstruction* instr, Register reg) {
780 return Define(instr, ToUnallocated(reg));
781 }
782
783
784 LInstruction* LChunkBuilder::DefineFixedDouble(LInstruction* instr,
785 XMMRegister reg) {
786 return Define(instr, ToUnallocated(reg));
787 }
788
789
790 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
791 HEnvironment* hydrogen_env = current_block_->last_environment();
792 instr->set_environment(CreateEnvironment(hydrogen_env));
793 return instr;
794 }
795
796
797 LInstruction* LChunkBuilder::SetInstructionPendingDeoptimizationEnvironment(
798 LInstruction* instr, int ast_id) {
799 ASSERT(instructions_pending_deoptimization_environment_ == NULL);
800 ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
801 instructions_pending_deoptimization_environment_ = instr;
802 pending_deoptimization_ast_id_ = ast_id;
803 return instr;
804 }
805
806
807 void LChunkBuilder::ClearInstructionPendingDeoptimizationEnvironment() {
808 instructions_pending_deoptimization_environment_ = NULL;
809 pending_deoptimization_ast_id_ = AstNode::kNoNumber;
810 }
811
812
813 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
814 HInstruction* hinstr,
815 CanDeoptimize can_deoptimize) {
816 allocator_->MarkAsCall();
817 instr = AssignPointerMap(instr);
818
819 if (hinstr->HasSideEffects()) {
820 ASSERT(hinstr->next()->IsSimulate());
821 HSimulate* sim = HSimulate::cast(hinstr->next());
822 instr = SetInstructionPendingDeoptimizationEnvironment(
823 instr, sim->ast_id());
824 }
825
826 // If instruction does not have side-effects lazy deoptimization
827 // after the call will try to deoptimize to the point before the call.
828 // Thus we still need to attach environment to this call even if
829 // call sequence can not deoptimize eagerly.
830 bool needs_environment =
831 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) || !hinstr->HasSideEffects();
832 if (needs_environment && !instr->HasEnvironment()) {
833 instr = AssignEnvironment(instr);
834 }
835
836 return instr;
837 }
838
839
840 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
841 ASSERT(!instr->HasPointerMap());
842 instr->set_pointer_map(new LPointerMap(position_));
843 return instr;
844 }
845
846
847 LInstruction* LChunkBuilder::Define(LInstruction* instr, LUnallocated* result) {
848 allocator_->RecordDefinition(current_instruction_, result);
849 instr->set_result(result);
850 return instr;
851 }
852
853
854 LOperand* LChunkBuilder::Temp() {
855 LUnallocated* operand = new LUnallocated(LUnallocated::NONE);
856 allocator_->RecordTemporary(operand);
857 return operand;
858 }
859
860
861 LUnallocated* LChunkBuilder::TempRegister() {
862 LUnallocated* operand = new LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
863 allocator_->RecordTemporary(operand);
864 return operand;
865 }
866
867
868 LOperand* LChunkBuilder::FixedTemp(Register reg) {
869 LUnallocated* operand = ToUnallocated(reg);
870 allocator_->RecordTemporary(operand);
871 return operand;
872 }
873
874
875 LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
876 LUnallocated* operand = ToUnallocated(reg);
877 allocator_->RecordTemporary(operand);
878 return operand;
879 }
880
881
882 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
883 HBasicBlock* deopt_predecessor = instr->block()->deopt_predecessor();
884 if (deopt_predecessor != NULL &&
885 deopt_predecessor->inverted()) {
886 HEnvironment* env = current_block_->last_environment();
887 HValue* value = env->Pop();
888 ASSERT(value->IsConstant());
889 Handle<Object> obj = HConstant::cast(value)->handle();
890 ASSERT(*obj == *FACTORY->true_value() || *obj == *FACTORY->false_value());
891 env->Push(*obj == *FACTORY->true_value()
892 ? current_block_->graph()->GetConstantFalse()
893 : current_block_->graph()->GetConstantTrue());
894 }
895
896 return new LLabel(instr->block());
897 }
898
899
900 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
901 return AssignEnvironment(new LDeoptimize);
902 }
903
904
905 LInstruction* LChunkBuilder::DoBit(Token::Value op,
906 HBitwiseBinaryOperation* instr) {
907 ASSERT(instr->representation().IsInteger32());
908 ASSERT(instr->left()->representation().IsInteger32());
909 ASSERT(instr->right()->representation().IsInteger32());
910
911 LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
912 LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
913 return DefineSameAsFirst(new LBitI(op, left, right));
914 }
915
916
917 LInstruction* LChunkBuilder::DoShift(Token::Value op,
918 HBitwiseBinaryOperation* instr) {
919 ASSERT(instr->representation().IsInteger32());
920 ASSERT(instr->OperandAt(0)->representation().IsInteger32());
921 ASSERT(instr->OperandAt(1)->representation().IsInteger32());
922 LOperand* left = UseRegisterAtStart(instr->OperandAt(0));
923
924 HValue* right_value = instr->OperandAt(1);
925 LOperand* right = NULL;
926 int constant_value = 0;
927 if (right_value->IsConstant()) {
928 HConstant* constant = HConstant::cast(right_value);
929 right = chunk_->DefineConstantOperand(constant);
930 constant_value = constant->Integer32Value() & 0x1f;
931 } else {
932 right = UseFixed(right_value, ecx);
933 }
934
935 // Shift operations can only deoptimize if we do a logical shift
936 // by 0 and the result cannot be truncated to int32.
937 bool can_deopt = (op == Token::SHR && constant_value == 0);
938 if (can_deopt) {
939 bool can_truncate = true;
940 for (int i = 0; i < instr->uses()->length(); i++) {
941 if (!instr->uses()->at(i)->CheckFlag(HValue::kTruncatingToInt32)) {
942 can_truncate = false;
943 break;
944 }
945 }
946 can_deopt = !can_truncate;
947 }
948
949 LInstruction* result =
950 DefineSameAsFirst(new LShiftI(op, left, right, can_deopt));
951 if (can_deopt) AssignEnvironment(result);
952 return result;
953 }
954
955
956 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
957 HArithmeticBinaryOperation* instr) {
958 ASSERT(instr->representation().IsDouble());
959 ASSERT(instr->left()->representation().IsDouble());
960 ASSERT(instr->right()->representation().IsDouble());
961 LOperand* left = UseRegisterAtStart(instr->left());
962 LOperand* right = UseRegisterAtStart(instr->right());
963 LArithmeticD* result = new LArithmeticD(op, left, right);
964 return DefineSameAsFirst(result);
965 }
966
967
968 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
969 HArithmeticBinaryOperation* instr) {
970 ASSERT(op == Token::ADD ||
971 op == Token::DIV ||
972 op == Token::MOD ||
973 op == Token::MUL ||
974 op == Token::SUB);
975 HValue* left = instr->left();
976 HValue* right = instr->right();
977 ASSERT(left->representation().IsTagged());
978 ASSERT(right->representation().IsTagged());
979 LOperand* left_operand = UseFixed(left, edx);
980 LOperand* right_operand = UseFixed(right, eax);
981 LInstruction* result = new LArithmeticT(op, left_operand, right_operand);
982 return MarkAsCall(DefineFixed(result, eax), instr);
983 }
984
985 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
986 ASSERT(is_building());
987 current_block_ = block;
988 next_block_ = next_block;
989 if (block->IsStartBlock()) {
990 block->UpdateEnvironment(graph_->start_environment());
991 argument_count_ = 0;
992 } else if (block->predecessors()->length() == 1) {
993 // We have a single predecessor => copy environment and outgoing
994 // argument count from the predecessor.
995 ASSERT(block->phis()->length() == 0);
996 HBasicBlock* pred = block->predecessors()->at(0);
997 HEnvironment* last_environment = pred->last_environment();
998 ASSERT(last_environment != NULL);
999 // Only copy the environment, if it is later used again.
1000 if (pred->end()->SecondSuccessor() == NULL) {
1001 ASSERT(pred->end()->FirstSuccessor() == block);
1002 } else {
1003 if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
1004 pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
1005 last_environment = last_environment->Copy();
1006 }
1007 }
1008 block->UpdateEnvironment(last_environment);
1009 ASSERT(pred->argument_count() >= 0);
1010 argument_count_ = pred->argument_count();
1011 } else {
1012 // We are at a state join => process phis.
1013 HBasicBlock* pred = block->predecessors()->at(0);
1014 // No need to copy the environment, it cannot be used later.
1015 HEnvironment* last_environment = pred->last_environment();
1016 for (int i = 0; i < block->phis()->length(); ++i) {
1017 HPhi* phi = block->phis()->at(i);
1018 last_environment->SetValueAt(phi->merged_index(), phi);
1019 }
1020 for (int i = 0; i < block->deleted_phis()->length(); ++i) {
1021 last_environment->SetValueAt(block->deleted_phis()->at(i),
1022 graph_->GetConstantUndefined());
1023 }
1024 block->UpdateEnvironment(last_environment);
1025 // Pick up the outgoing argument count of one of the predecessors.
1026 argument_count_ = pred->argument_count();
1027 }
1028 HInstruction* current = block->first();
1029 int start = chunk_->instructions()->length();
1030 while (current != NULL && !is_aborted()) {
1031 if (FLAG_trace_environment) {
1032 PrintF("Process instruction %d\n", current->id());
1033 }
1034 // Code for constants in registers is generated lazily.
1035 if (!current->EmitAtUses()) {
1036 VisitInstruction(current);
1037 }
1038 current = current->next();
1039 }
1040 int end = chunk_->instructions()->length() - 1;
1041 if (end >= start) {
1042 block->set_first_instruction_index(start);
1043 block->set_last_instruction_index(end);
1044 }
1045 block->set_argument_count(argument_count_);
1046 next_block_ = NULL;
1047 current_block_ = NULL;
1048 }
1049
1050
1051 void LChunkBuilder::VisitInstruction(HInstruction* current) {
1052 HInstruction* old_current = current_instruction_;
1053 current_instruction_ = current;
1054 allocator_->BeginInstruction();
1055 if (current->has_position()) position_ = current->position();
1056 LInstruction* instr = current->CompileToLithium(this);
1057
1058 if (instr != NULL) {
1059 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
1060 instr = AssignPointerMap(instr);
1061 }
1062 if (FLAG_stress_environments && !instr->HasEnvironment()) {
1063 instr = AssignEnvironment(instr);
1064 }
1065 if (current->IsBranch()) {
1066 instr->set_hydrogen_value(HBranch::cast(current)->value());
1067 } else {
1068 instr->set_hydrogen_value(current);
1069 }
1070
1071 int index = chunk_->AddInstruction(instr, current_block_);
1072 allocator_->SummarizeInstruction(index);
1073 } else {
1074 // This instruction should be omitted.
1075 allocator_->OmitInstruction();
1076 }
1077 current_instruction_ = old_current;
1078 }
1079
1080
1081 void LEnvironment::WriteTranslation(LCodeGen* cgen,
1082 Translation* translation) const {
1083 if (this == NULL) return;
1084
1085 // The translation includes one command per value in the environment.
1086 int translation_size = values()->length();
1087 // The output frame height does not include the parameters.
1088 int height = translation_size - parameter_count();
1089
1090 outer()->WriteTranslation(cgen, translation);
1091 int closure_id = cgen->DefineDeoptimizationLiteral(closure());
1092 translation->BeginFrame(ast_id(), closure_id, height);
1093 for (int i = 0; i < translation_size; ++i) {
1094 LOperand* value = values()->at(i);
1095 // spilled_registers_ and spilled_double_registers_ are either
1096 // both NULL or both set.
1097 if (spilled_registers_ != NULL && value != NULL) {
1098 if (value->IsRegister() &&
1099 spilled_registers_[value->index()] != NULL) {
1100 translation->MarkDuplicate();
1101 cgen->AddToTranslation(translation,
1102 spilled_registers_[value->index()],
1103 HasTaggedValueAt(i));
1104 } else if (value->IsDoubleRegister() &&
1105 spilled_double_registers_[value->index()] != NULL) {
1106 translation->MarkDuplicate();
1107 cgen->AddToTranslation(translation,
1108 spilled_double_registers_[value->index()],
1109 false);
1110 }
1111 }
1112
1113 cgen->AddToTranslation(translation, value, HasTaggedValueAt(i));
1114 }
1115 }
1116
1117
1118 void LEnvironment::PrintTo(StringStream* stream) const {
1119 stream->Add("[id=%d|", ast_id());
1120 stream->Add("[parameters=%d|", parameter_count());
1121 stream->Add("[arguments_stack_height=%d|", arguments_stack_height());
1122 for (int i = 0; i < values_.length(); ++i) {
1123 if (i != 0) stream->Add(";");
1124 if (values_[i] == NULL) {
1125 stream->Add("[hole]");
1126 } else {
1127 values_[i]->PrintTo(stream);
1128 }
1129 }
1130 stream->Add("]");
1131 }
1132
1133
1134 LEnvironment* LChunkBuilder::CreateEnvironment(HEnvironment* hydrogen_env) {
1135 if (hydrogen_env == NULL) return NULL;
1136
1137 LEnvironment* outer = CreateEnvironment(hydrogen_env->outer());
1138 int ast_id = hydrogen_env->ast_id();
1139 ASSERT(ast_id != AstNode::kNoNumber);
1140 int value_count = hydrogen_env->values()->length();
1141 LEnvironment* result = new LEnvironment(hydrogen_env->closure(),
1142 ast_id,
1143 hydrogen_env->parameter_count(),
1144 argument_count_,
1145 value_count,
1146 outer);
1147 int argument_index = 0;
1148 for (int i = 0; i < value_count; ++i) {
1149 HValue* value = hydrogen_env->values()->at(i);
1150 LOperand* op = NULL;
1151 if (value->IsArgumentsObject()) {
1152 op = NULL;
1153 } else if (value->IsPushArgument()) {
1154 op = new LArgument(argument_index++);
1155 } else {
1156 op = UseOrConstant(value);
1157 if (op->IsUnallocated()) {
1158 LUnallocated* unalloc = LUnallocated::cast(op);
1159 unalloc->set_policy(LUnallocated::ANY);
1160 }
1161 }
1162 result->AddValue(op, value->representation());
1163 }
1164
1165 return result;
1166 }
1167
1168
1169 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1170 LInstruction* result = new LGoto(instr->FirstSuccessor()->block_id(),
1171 instr->include_stack_check());
1172 if (instr->include_stack_check()) result = AssignPointerMap(result);
1173 return result;
1174 }
1175
1176
1177 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
1178 HValue* v = instr->value();
1179 HBasicBlock* first = instr->FirstSuccessor();
1180 HBasicBlock* second = instr->SecondSuccessor();
1181 ASSERT(first != NULL && second != NULL);
1182 int first_id = first->block_id();
1183 int second_id = second->block_id();
1184
1185 if (v->EmitAtUses()) {
1186 if (v->IsClassOfTest()) {
1187 HClassOfTest* compare = HClassOfTest::cast(v);
1188 ASSERT(compare->value()->representation().IsTagged());
1189
1190 return new LClassOfTestAndBranch(UseTempRegister(compare->value()),
1191 TempRegister(),
1192 TempRegister(),
1193 first_id,
1194 second_id);
1195 } else if (v->IsCompare()) {
1196 HCompare* compare = HCompare::cast(v);
1197 Token::Value op = compare->token();
1198 HValue* left = compare->left();
1199 HValue* right = compare->right();
1200 if (left->representation().IsInteger32()) {
1201 ASSERT(right->representation().IsInteger32());
1202 return new LCmpIDAndBranch(op,
1203 UseRegisterAtStart(left),
1204 UseOrConstantAtStart(right),
1205 first_id,
1206 second_id,
1207 false);
1208 } else if (left->representation().IsDouble()) {
1209 ASSERT(right->representation().IsDouble());
1210 return new LCmpIDAndBranch(op,
1211 UseRegisterAtStart(left),
1212 UseRegisterAtStart(right),
1213 first_id,
1214 second_id,
1215 true);
1216 } else {
1217 ASSERT(left->representation().IsTagged());
1218 ASSERT(right->representation().IsTagged());
1219 bool reversed = op == Token::GT || op == Token::LTE;
1220 LOperand* left_operand = UseFixed(left, reversed ? eax : edx);
1221 LOperand* right_operand = UseFixed(right, reversed ? edx : eax);
1222 LInstruction* result = new LCmpTAndBranch(left_operand,
1223 right_operand,
1224 first_id,
1225 second_id);
1226 return MarkAsCall(result, instr);
1227 }
1228 } else if (v->IsIsSmi()) {
1229 HIsSmi* compare = HIsSmi::cast(v);
1230 ASSERT(compare->value()->representation().IsTagged());
1231
1232 return new LIsSmiAndBranch(Use(compare->value()),
1233 first_id,
1234 second_id);
1235 } else if (v->IsHasInstanceType()) {
1236 HHasInstanceType* compare = HHasInstanceType::cast(v);
1237 ASSERT(compare->value()->representation().IsTagged());
1238
1239 return new LHasInstanceTypeAndBranch(UseRegisterAtStart(compare->value()),
1240 TempRegister(),
1241 first_id,
1242 second_id);
1243 } else if (v->IsHasCachedArrayIndex()) {
1244 HHasCachedArrayIndex* compare = HHasCachedArrayIndex::cast(v);
1245 ASSERT(compare->value()->representation().IsTagged());
1246
1247 return new LHasCachedArrayIndexAndBranch(
1248 UseRegisterAtStart(compare->value()), first_id, second_id);
1249 } else if (v->IsIsNull()) {
1250 HIsNull* compare = HIsNull::cast(v);
1251 ASSERT(compare->value()->representation().IsTagged());
1252
1253 // We only need a temp register for non-strict compare.
1254 LOperand* temp = compare->is_strict() ? NULL : TempRegister();
1255 return new LIsNullAndBranch(UseRegisterAtStart(compare->value()),
1256 compare->is_strict(),
1257 temp,
1258 first_id,
1259 second_id);
1260 } else if (v->IsCompareJSObjectEq()) {
1261 HCompareJSObjectEq* compare = HCompareJSObjectEq::cast(v);
1262 return new LCmpJSObjectEqAndBranch(UseRegisterAtStart(compare->left()),
1263 UseRegisterAtStart(compare->right()),
1264 first_id,
1265 second_id);
1266 } else if (v->IsInstanceOf()) {
1267 HInstanceOf* instance_of = HInstanceOf::cast(v);
1268 LInstruction* result =
1269 new LInstanceOfAndBranch(Use(instance_of->left()),
1270 Use(instance_of->right()),
1271 first_id,
1272 second_id);
1273 return MarkAsCall(result, instr);
1274 } else if (v->IsTypeofIs()) {
1275 HTypeofIs* typeof_is = HTypeofIs::cast(v);
1276 return new LTypeofIsAndBranch(UseTempRegister(typeof_is->value()),
1277 first_id,
1278 second_id);
1279 } else {
1280 if (v->IsConstant()) {
1281 if (HConstant::cast(v)->handle()->IsTrue()) {
1282 return new LGoto(first_id);
1283 } else if (HConstant::cast(v)->handle()->IsFalse()) {
1284 return new LGoto(second_id);
1285 }
1286 }
1287 Abort("Undefined compare before branch");
1288 return NULL;
1289 }
1290 }
1291 return new LBranch(UseRegisterAtStart(v), first_id, second_id);
1292 }
1293
1294
1295 LInstruction* LChunkBuilder::DoCompareMapAndBranch(
1296 HCompareMapAndBranch* instr) {
1297 ASSERT(instr->value()->representation().IsTagged());
1298 LOperand* value = UseRegisterAtStart(instr->value());
1299 HBasicBlock* first = instr->FirstSuccessor();
1300 HBasicBlock* second = instr->SecondSuccessor();
1301 return new LCmpMapAndBranch(value,
1302 instr->map(),
1303 first->block_id(),
1304 second->block_id());
1305 }
1306
1307
1308 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
1309 return DefineAsRegister(new LArgumentsLength(Use(length->value())));
1310 }
1311
1312
1313 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
1314 return DefineAsRegister(new LArgumentsElements);
1315 }
1316
1317
1318 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1319 LInstruction* result =
1320 new LInstanceOf(Use(instr->left()), Use(instr->right()));
1321 return MarkAsCall(DefineFixed(result, eax), instr);
1322 }
1323
1324
1325 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1326 LOperand* function = UseFixed(instr->function(), edi);
1327 LOperand* receiver = UseFixed(instr->receiver(), eax);
1328 LOperand* length = UseRegisterAtStart(instr->length());
1329 LOperand* elements = UseRegisterAtStart(instr->elements());
1330 LInstruction* result = new LApplyArguments(function,
1331 receiver,
1332 length,
1333 elements);
1334 return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
1335 }
1336
1337
1338 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1339 ++argument_count_;
1340 LOperand* argument = Use(instr->argument());
1341 return new LPushArgument(argument);
1342 }
1343
1344
1345 LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
1346 return DefineAsRegister(new LGlobalObject);
1347 }
1348
1349
1350 LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
1351 return DefineAsRegister(new LGlobalReceiver);
1352 }
1353
1354
1355 LInstruction* LChunkBuilder::DoCallConstantFunction(
1356 HCallConstantFunction* instr) {
1357 argument_count_ -= instr->argument_count();
1358 return MarkAsCall(DefineFixed(new LCallConstantFunction, eax), instr);
1359 }
1360
1361
1362 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1363 MathFunctionId op = instr->op();
1364 LOperand* input = UseRegisterAtStart(instr->value());
1365 LInstruction* result = new LUnaryMathOperation(input);
1366 switch (op) {
1367 case kMathAbs:
1368 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1369 case kMathFloor:
1370 return AssignEnvironment(DefineAsRegister(result));
1371 case kMathRound:
1372 return AssignEnvironment(DefineAsRegister(result));
1373 case kMathSqrt:
1374 return DefineSameAsFirst(result);
1375 default:
1376 UNREACHABLE();
1377 return NULL;
1378 }
1379 }
1380
1381
1382 LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
1383 ASSERT(instr->key()->representation().IsTagged());
1384 argument_count_ -= instr->argument_count();
1385 UseFixed(instr->key(), ecx);
1386 return MarkAsCall(DefineFixed(new LCallKeyed, eax), instr);
1387 }
1388
1389
1390 LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
1391 argument_count_ -= instr->argument_count();
1392 return MarkAsCall(DefineFixed(new LCallNamed, eax), instr);
1393 }
1394
1395
1396 LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
1397 argument_count_ -= instr->argument_count();
1398 return MarkAsCall(DefineFixed(new LCallGlobal, eax), instr);
1399 }
1400
1401
1402 LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
1403 argument_count_ -= instr->argument_count();
1404 return MarkAsCall(DefineFixed(new LCallKnownGlobal, eax), instr);
1405 }
1406
1407
1408 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1409 LOperand* constructor = UseFixed(instr->constructor(), edi);
1410 argument_count_ -= instr->argument_count();
1411 LInstruction* result = new LCallNew(constructor);
1412 return MarkAsCall(DefineFixed(result, eax), instr);
1413 }
1414
1415
1416 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1417 argument_count_ -= instr->argument_count();
1418 return MarkAsCall(DefineFixed(new LCallFunction, eax), instr);
1419 }
1420
1421
1422 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1423 argument_count_ -= instr->argument_count();
1424 return MarkAsCall(DefineFixed(new LCallRuntime, eax), instr);
1425 }
1426
1427
1428 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1429 return DoShift(Token::SHR, instr);
1430 }
1431
1432
1433 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1434 return DoShift(Token::SAR, instr);
1435 }
1436
1437
1438 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1439 return DoShift(Token::SHL, instr);
1440 }
1441
1442
1443 LInstruction* LChunkBuilder::DoBitAnd(HBitAnd* instr) {
1444 return DoBit(Token::BIT_AND, instr);
1445 }
1446
1447
1448 LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
1449 ASSERT(instr->value()->representation().IsInteger32());
1450 ASSERT(instr->representation().IsInteger32());
1451 return DefineSameAsFirst(new LBitNotI(UseRegisterAtStart(instr->value())));
1452 }
1453
1454
1455 LInstruction* LChunkBuilder::DoBitOr(HBitOr* instr) {
1456 return DoBit(Token::BIT_OR, instr);
1457 }
1458
1459
1460 LInstruction* LChunkBuilder::DoBitXor(HBitXor* instr) {
1461 return DoBit(Token::BIT_XOR, instr);
1462 }
1463
1464
1465 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1466 if (instr->representation().IsDouble()) {
1467 return DoArithmeticD(Token::DIV, instr);
1468 } else if (instr->representation().IsInteger32()) {
1469 // The temporary operand is necessary to ensure that right is not allocated
1470 // into edx.
1471 FixedTemp(edx);
1472 LOperand* value = UseFixed(instr->left(), eax);
1473 LOperand* divisor = UseRegister(instr->right());
1474 return AssignEnvironment(DefineFixed(new LDivI(value, divisor), eax));
1475 } else {
1476 ASSERT(instr->representation().IsTagged());
1477 return DoArithmeticT(Token::DIV, instr);
1478 }
1479 }
1480
1481
1482 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1483 if (instr->representation().IsInteger32()) {
1484 ASSERT(instr->left()->representation().IsInteger32());
1485 ASSERT(instr->right()->representation().IsInteger32());
1486 // The temporary operand is necessary to ensure that right is not allocated
1487 // into edx.
1488 FixedTemp(edx);
1489 LOperand* value = UseFixed(instr->left(), eax);
1490 LOperand* divisor = UseRegister(instr->right());
1491 LInstruction* result = DefineFixed(new LModI(value, divisor), edx);
1492 if (instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1493 instr->CheckFlag(HValue::kCanBeDivByZero)) {
1494 result = AssignEnvironment(result);
1495 }
1496 return result;
1497 } else if (instr->representation().IsTagged()) {
1498 return DoArithmeticT(Token::MOD, instr);
1499 } else {
1500 ASSERT(instr->representation().IsDouble());
1501 // We call a C function for double modulo. It can't trigger a GC.
1502 // We need to use fixed result register for the call.
1503 // TODO(fschneider): Allow any register as input registers.
1504 LOperand* left = UseFixedDouble(instr->left(), xmm1);
1505 LOperand* right = UseFixedDouble(instr->right(), xmm2);
1506 LArithmeticD* result = new LArithmeticD(Token::MOD, left, right);
1507 return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
1508 }
1509 }
1510
1511
1512 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1513 if (instr->representation().IsInteger32()) {
1514 ASSERT(instr->left()->representation().IsInteger32());
1515 ASSERT(instr->right()->representation().IsInteger32());
1516 LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1517 LOperand* right = UseOrConstant(instr->MostConstantOperand());
1518 LOperand* temp = NULL;
1519 if (instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1520 temp = TempRegister();
1521 }
1522 LMulI* mul = new LMulI(left, right, temp);
1523 return AssignEnvironment(DefineSameAsFirst(mul));
1524 } else if (instr->representation().IsDouble()) {
1525 return DoArithmeticD(Token::MUL, instr);
1526 } else {
1527 ASSERT(instr->representation().IsTagged());
1528 return DoArithmeticT(Token::MUL, instr);
1529 }
1530 }
1531
1532
1533 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1534 if (instr->representation().IsInteger32()) {
1535 ASSERT(instr->left()->representation().IsInteger32());
1536 ASSERT(instr->right()->representation().IsInteger32());
1537 LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1538 LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1539 LSubI* sub = new LSubI(left, right);
1540 LInstruction* result = DefineSameAsFirst(sub);
1541 if (instr->CheckFlag(HValue::kCanOverflow)) {
1542 result = AssignEnvironment(result);
1543 }
1544 return result;
1545 } else if (instr->representation().IsDouble()) {
1546 return DoArithmeticD(Token::SUB, instr);
1547 } else {
1548 ASSERT(instr->representation().IsTagged());
1549 return DoArithmeticT(Token::SUB, instr);
1550 }
1551 }
1552
1553
1554 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1555 if (instr->representation().IsInteger32()) {
1556 ASSERT(instr->left()->representation().IsInteger32());
1557 ASSERT(instr->right()->representation().IsInteger32());
1558 LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
1559 LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
1560 LAddI* add = new LAddI(left, right);
1561 LInstruction* result = DefineSameAsFirst(add);
1562 if (instr->CheckFlag(HValue::kCanOverflow)) {
1563 result = AssignEnvironment(result);
1564 }
1565 return result;
1566 } else if (instr->representation().IsDouble()) {
1567 return DoArithmeticD(Token::ADD, instr);
1568 } else {
1569 ASSERT(instr->representation().IsTagged());
1570 return DoArithmeticT(Token::ADD, instr);
1571 }
1572 }
1573
1574
1575 LInstruction* LChunkBuilder::DoCompare(HCompare* instr) {
1576 Token::Value op = instr->token();
1577 if (instr->left()->representation().IsInteger32()) {
1578 ASSERT(instr->right()->representation().IsInteger32());
1579 LOperand* left = UseRegisterAtStart(instr->left());
1580 LOperand* right = UseOrConstantAtStart(instr->right());
1581 return DefineAsRegister(new LCmpID(op, left, right, false));
1582 } else if (instr->left()->representation().IsDouble()) {
1583 ASSERT(instr->right()->representation().IsDouble());
1584 LOperand* left = UseRegisterAtStart(instr->left());
1585 LOperand* right = UseRegisterAtStart(instr->right());
1586 return DefineAsRegister(new LCmpID(op, left, right, true));
1587 } else {
1588 bool reversed = (op == Token::GT || op == Token::LTE);
1589 LOperand* left = UseFixed(instr->left(), reversed ? eax : edx);
1590 LOperand* right = UseFixed(instr->right(), reversed ? edx : eax);
1591 LInstruction* result = new LCmpT(left, right);
1592 return MarkAsCall(DefineFixed(result, eax), instr);
1593 }
1594 }
1595
1596
1597 LInstruction* LChunkBuilder::DoCompareJSObjectEq(
1598 HCompareJSObjectEq* instr) {
1599 LOperand* left = UseRegisterAtStart(instr->left());
1600 LOperand* right = UseRegisterAtStart(instr->right());
1601 LInstruction* result = new LCmpJSObjectEq(left, right);
1602 return DefineAsRegister(result);
1603 }
1604
1605
1606 LInstruction* LChunkBuilder::DoIsNull(HIsNull* instr) {
1607 ASSERT(instr->value()->representation().IsTagged());
1608 LOperand* value = UseRegisterAtStart(instr->value());
1609
1610 return DefineAsRegister(new LIsNull(value,
1611 instr->is_strict()));
1612 }
1613
1614
1615 LInstruction* LChunkBuilder::DoIsSmi(HIsSmi* instr) {
1616 ASSERT(instr->value()->representation().IsTagged());
1617 LOperand* value = UseAtStart(instr->value());
1618
1619 return DefineAsRegister(new LIsSmi(value));
1620 }
1621
1622
1623 LInstruction* LChunkBuilder::DoHasInstanceType(HHasInstanceType* instr) {
1624 ASSERT(instr->value()->representation().IsTagged());
1625 LOperand* value = UseRegisterAtStart(instr->value());
1626
1627 return DefineAsRegister(new LHasInstanceType(value));
1628 }
1629
1630
1631 LInstruction* LChunkBuilder::DoHasCachedArrayIndex(
1632 HHasCachedArrayIndex* instr) {
1633 ASSERT(instr->value()->representation().IsTagged());
1634 LOperand* value = UseRegister(instr->value());
1635
1636 return DefineAsRegister(new LHasCachedArrayIndex(value));
1637 }
1638
1639
1640 LInstruction* LChunkBuilder::DoClassOfTest(HClassOfTest* instr) {
1641 ASSERT(instr->value()->representation().IsTagged());
1642 LOperand* value = UseTempRegister(instr->value());
1643
1644 return DefineSameAsFirst(new LClassOfTest(value, TempRegister()));
1645 }
1646
1647
1648 LInstruction* LChunkBuilder::DoArrayLength(HArrayLength* instr) {
1649 LOperand* array = NULL;
1650 LOperand* temporary = NULL;
1651
1652 if (instr->value()->IsLoadElements()) {
1653 array = UseRegisterAtStart(instr->value());
1654 } else {
1655 array = UseRegister(instr->value());
1656 temporary = TempRegister();
1657 }
1658
1659 LInstruction* result = new LArrayLength(array, temporary);
1660 return AssignEnvironment(DefineAsRegister(result));
1661 }
1662
1663
1664 LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
1665 LOperand* object = UseRegister(instr->value());
1666 LInstruction* result = new LValueOf(object, TempRegister());
1667 return AssignEnvironment(DefineSameAsFirst(result));
1668 }
1669
1670
1671 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1672 return AssignEnvironment(new LBoundsCheck(UseRegisterAtStart(instr->index()),
1673 Use(instr->length())));
1674 }
1675
1676
1677 LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
1678 LOperand* value = UseFixed(instr->value(), eax);
1679 return MarkAsCall(new LThrow(value), instr);
1680 }
1681
1682
1683 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1684 Representation from = instr->from();
1685 Representation to = instr->to();
1686 if (from.IsTagged()) {
1687 if (to.IsDouble()) {
1688 LOperand* value = UseRegister(instr->value());
1689 LInstruction* res = new LNumberUntagD(value);
1690 return AssignEnvironment(DefineAsRegister(res));
1691 } else {
1692 ASSERT(to.IsInteger32());
1693 LOperand* value = UseRegister(instr->value());
1694 bool needs_check = !instr->value()->type().IsSmi();
1695 if (needs_check) {
1696 CpuFeatures* cpu_features = Isolate::Current()->cpu_features();
1697 LOperand* xmm_temp =
1698 (instr->CanTruncateToInt32() && !cpu_features->IsSupported(SSE3))
1699 ? NULL
1700 : FixedTemp(xmm1);
1701 LInstruction* res = new LTaggedToI(value, xmm_temp);
1702 return AssignEnvironment(DefineSameAsFirst(res));
1703 } else {
1704 return DefineSameAsFirst(new LSmiUntag(value, needs_check));
1705 }
1706 }
1707 } else if (from.IsDouble()) {
1708 if (to.IsTagged()) {
1709 LOperand* value = UseRegister(instr->value());
1710 LOperand* temp = TempRegister();
1711
1712 // Make sure that temp and result_temp are different registers.
1713 LUnallocated* result_temp = TempRegister();
1714 LInstruction* result = new LNumberTagD(value, temp);
1715 return AssignPointerMap(Define(result, result_temp));
1716 } else {
1717 ASSERT(to.IsInteger32());
1718 LOperand* value = UseRegister(instr->value());
1719 return AssignEnvironment(DefineAsRegister(new LDoubleToI(value)));
1720 }
1721 } else if (from.IsInteger32()) {
1722 if (to.IsTagged()) {
1723 HValue* val = instr->value();
1724 LOperand* value = UseRegister(val);
1725 if (val->HasRange() && val->range()->IsInSmiRange()) {
1726 return DefineSameAsFirst(new LSmiTag(value));
1727 } else {
1728 LInstruction* result = new LNumberTagI(value);
1729 return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
1730 }
1731 } else {
1732 ASSERT(to.IsDouble());
1733 return DefineAsRegister(new LInteger32ToDouble(Use(instr->value())));
1734 }
1735 }
1736 UNREACHABLE();
1737 return NULL;
1738 }
1739
1740
1741 LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
1742 LOperand* value = UseRegisterAtStart(instr->value());
1743 return AssignEnvironment(new LCheckSmi(value, zero));
1744 }
1745
1746
1747 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1748 LOperand* value = UseRegisterAtStart(instr->value());
1749 LOperand* temp = TempRegister();
1750 LInstruction* result = new LCheckInstanceType(value, temp);
1751 return AssignEnvironment(result);
1752 }
1753
1754
1755 LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
1756 LOperand* temp = TempRegister();
1757 LInstruction* result =
1758 new LCheckPrototypeMaps(temp,
1759 instr->holder(),
1760 instr->receiver_map());
1761 return AssignEnvironment(result);
1762 }
1763
1764
1765 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1766 LOperand* value = UseRegisterAtStart(instr->value());
1767 return AssignEnvironment(new LCheckSmi(value, not_zero));
1768 }
1769
1770
1771 LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
1772 LOperand* value = UseRegisterAtStart(instr->value());
1773 return AssignEnvironment(new LCheckFunction(value));
1774 }
1775
1776
1777 LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) {
1778 LOperand* value = UseRegisterAtStart(instr->value());
1779 LInstruction* result = new LCheckMap(value);
1780 return AssignEnvironment(result);
1781 }
1782
1783
1784 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1785 return new LReturn(UseFixed(instr->value(), eax));
1786 }
1787
1788
1789 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1790 Representation r = instr->representation();
1791 if (r.IsInteger32()) {
1792 int32_t value = instr->Integer32Value();
1793 return DefineAsRegister(new LConstantI(value));
1794 } else if (r.IsDouble()) {
1795 double value = instr->DoubleValue();
1796 return DefineAsRegister(new LConstantD(value));
1797 } else if (r.IsTagged()) {
1798 return DefineAsRegister(new LConstantT(instr->handle()));
1799 } else {
1800 Abort("unsupported constant of type double");
1801 return NULL;
1802 }
1803 }
1804
1805
1806 LInstruction* LChunkBuilder::DoLoadGlobal(HLoadGlobal* instr) {
1807 LInstruction* result = new LLoadGlobal;
1808 return instr->check_hole_value()
1809 ? AssignEnvironment(DefineAsRegister(result))
1810 : DefineAsRegister(result);
1811 }
1812
1813
1814 LInstruction* LChunkBuilder::DoStoreGlobal(HStoreGlobal* instr) {
1815 return new LStoreGlobal(UseRegisterAtStart(instr->value()));
1816 }
1817
1818
1819 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1820 return DefineAsRegister(
1821 new LLoadNamedField(UseRegisterAtStart(instr->object())));
1822 }
1823
1824
1825 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1826 LOperand* object = UseFixed(instr->object(), eax);
1827 LInstruction* result = DefineFixed(new LLoadNamedGeneric(object), eax);
1828 return MarkAsCall(result, instr);
1829 }
1830
1831
1832 LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
1833 LOperand* input = UseRegisterAtStart(instr->value());
1834 return DefineSameAsFirst(new LLoadElements(input));
1835 }
1836
1837
1838 LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
1839 HLoadKeyedFastElement* instr) {
1840 Representation r = instr->representation();
1841 LOperand* obj = UseRegisterAtStart(instr->object());
1842 ASSERT(instr->key()->representation().IsInteger32());
1843 LOperand* key = UseRegisterAtStart(instr->key());
1844 LOperand* load_result = NULL;
1845 // Double needs an extra temp, because the result is converted from heap
1846 // number to a double register.
1847 if (r.IsDouble()) load_result = TempRegister();
1848 LInstruction* result = new LLoadKeyedFastElement(obj,
1849 key,
1850 load_result);
1851 if (r.IsDouble()) {
1852 result = DefineAsRegister(result);
1853 } else {
1854 result = DefineSameAsFirst(result);
1855 }
1856 return AssignEnvironment(result);
1857 }
1858
1859
1860 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
1861 LOperand* object = UseFixed(instr->object(), edx);
1862 LOperand* key = UseFixed(instr->key(), eax);
1863
1864 LInstruction* result =
1865 DefineFixed(new LLoadKeyedGeneric(object, key), eax);
1866 return MarkAsCall(result, instr);
1867 }
1868
1869
1870 LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
1871 HStoreKeyedFastElement* instr) {
1872 bool needs_write_barrier = instr->NeedsWriteBarrier();
1873 ASSERT(instr->value()->representation().IsTagged());
1874 ASSERT(instr->object()->representation().IsTagged());
1875 ASSERT(instr->key()->representation().IsInteger32());
1876
1877 LOperand* obj = UseTempRegister(instr->object());
1878 LOperand* val = needs_write_barrier
1879 ? UseTempRegister(instr->value())
1880 : UseRegisterAtStart(instr->value());
1881 LOperand* key = needs_write_barrier
1882 ? UseTempRegister(instr->key())
1883 : UseRegisterOrConstantAtStart(instr->key());
1884
1885 return AssignEnvironment(new LStoreKeyedFastElement(obj, key, val));
1886 }
1887
1888
1889 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
1890 LOperand* obj = UseFixed(instr->object(), edx);
1891 LOperand* key = UseFixed(instr->key(), ecx);
1892 LOperand* val = UseFixed(instr->value(), eax);
1893
1894 ASSERT(instr->object()->representation().IsTagged());
1895 ASSERT(instr->key()->representation().IsTagged());
1896 ASSERT(instr->value()->representation().IsTagged());
1897
1898 return MarkAsCall(new LStoreKeyedGeneric(obj, key, val), instr);
1899 }
1900
1901
1902 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
1903 bool needs_write_barrier = !instr->value()->type().IsSmi();
1904
1905 LOperand* obj = needs_write_barrier
1906 ? UseTempRegister(instr->object())
1907 : UseRegisterAtStart(instr->object());
1908
1909 LOperand* val = needs_write_barrier
1910 ? UseTempRegister(instr->value())
1911 : UseRegister(instr->value());
1912
1913 // We only need a scratch register if we have a write barrier or we
1914 // have a store into the properties array (not in-object-property).
1915 LOperand* temp = (!instr->is_in_object() || needs_write_barrier)
1916 ? TempRegister() : NULL;
1917
1918 return new LStoreNamedField(obj,
1919 instr->name(),
1920 val,
1921 instr->is_in_object(),
1922 instr->offset(),
1923 temp,
1924 needs_write_barrier,
1925 instr->transition());
1926 }
1927
1928
1929 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
1930 LOperand* obj = UseFixed(instr->object(), edx);
1931 LOperand* val = UseFixed(instr->value(), eax);
1932
1933 LInstruction* result = new LStoreNamedGeneric(obj, instr->name(), val);
1934 return MarkAsCall(result, instr);
1935 }
1936
1937
1938 LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
1939 return MarkAsCall(DefineFixed(new LArrayLiteral, eax), instr);
1940 }
1941
1942
1943 LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
1944 return MarkAsCall(DefineFixed(new LObjectLiteral, eax), instr);
1945 }
1946
1947
1948 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
1949 return MarkAsCall(DefineFixed(new LRegExpLiteral, eax), instr);
1950 }
1951
1952
1953 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
1954 return MarkAsCall(DefineFixed(new LFunctionLiteral, eax), instr);
1955 }
1956
1957
1958 LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
1959 LInstruction* result = new LDeleteProperty(Use(instr->object()),
1960 UseOrConstant(instr->key()));
1961 return MarkAsCall(DefineFixed(result, eax), instr);
1962 }
1963
1964
1965 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
1966 allocator_->MarkAsOsrEntry();
1967 current_block_->last_environment()->set_ast_id(instr->ast_id());
1968 return AssignEnvironment(new LOsrEntry);
1969 }
1970
1971
1972 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
1973 int spill_index = chunk()->GetParameterStackSlot(instr->index());
1974 return DefineAsSpilled(new LParameter, spill_index);
1975 }
1976
1977
1978 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
1979 int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
1980 return DefineAsSpilled(new LUnknownOSRValue, spill_index);
1981 }
1982
1983
1984 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
1985 argument_count_ -= instr->argument_count();
1986 return MarkAsCall(DefineFixed(new LCallStub, eax), instr);
1987 }
1988
1989
1990 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
1991 // There are no real uses of the arguments object (we bail out in all other
1992 // cases).
1993 return NULL;
1994 }
1995
1996
1997 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
1998 LOperand* arguments = UseRegister(instr->arguments());
1999 LOperand* length = UseTempRegister(instr->length());
2000 LOperand* index = Use(instr->index());
2001 LInstruction* result = new LAccessArgumentsAt(arguments, length, index);
2002 return DefineAsRegister(AssignEnvironment(result));
2003 }
2004
2005
2006 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2007 LInstruction* result = new LTypeof(Use(instr->value()));
2008 return MarkAsCall(DefineFixed(result, eax), instr);
2009 }
2010
2011
2012 LInstruction* LChunkBuilder::DoTypeofIs(HTypeofIs* instr) {
2013 return DefineSameAsFirst(new LTypeofIs(UseRegister(instr->value())));
2014 }
2015
2016 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2017 HEnvironment* env = current_block_->last_environment();
2018 ASSERT(env != NULL);
2019
2020 env->set_ast_id(instr->ast_id());
2021
2022 env->Drop(instr->pop_count());
2023 for (int i = 0; i < instr->values()->length(); ++i) {
2024 HValue* value = instr->values()->at(i);
2025 if (instr->HasAssignedIndexAt(i)) {
2026 env->Bind(instr->GetAssignedIndexAt(i), value);
2027 } else {
2028 env->Push(value);
2029 }
2030 }
2031
2032 if (FLAG_trace_environment) {
2033 PrintF("Reconstructed environment ast_id=%d, instr_id=%d\n",
2034 instr->ast_id(),
2035 instr->id());
2036 env->PrintToStd();
2037 }
2038 ASSERT(env->values()->length() == instr->environment_height());
2039
2040 // If there is an instruction pending deoptimization environment create a
2041 // lazy bailout instruction to capture the environment.
2042 if (pending_deoptimization_ast_id_ == instr->ast_id()) {
2043 LInstruction* result = new LLazyBailout;
2044 result = AssignEnvironment(result);
2045 instructions_pending_deoptimization_environment_->
2046 set_deoptimization_environment(result->environment());
2047 ClearInstructionPendingDeoptimizationEnvironment();
2048 return result;
2049 }
2050
2051 return NULL;
2052 }
2053
2054
2055 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2056 return MarkAsCall(new LStackCheck, instr);
2057 }
2058
2059
2060 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2061 HEnvironment* outer = current_block_->last_environment();
2062 HConstant* undefined = graph()->GetConstantUndefined();
2063 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2064 instr->function(),
2065 false,
2066 undefined);
2067 current_block_->UpdateEnvironment(inner);
2068 chunk_->AddInlinedClosure(instr->closure());
2069 return NULL;
2070 }
2071
2072
2073 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2074 HEnvironment* outer = current_block_->last_environment()->outer();
2075 current_block_->UpdateEnvironment(outer);
2076 return NULL;
2077 }
2078
2079
2080 void LPointerMap::RecordPointer(LOperand* op) {
2081 // Do not record arguments as pointers.
2082 if (op->IsStackSlot() && op->index() < 0) return;
2083 ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
2084 pointer_operands_.Add(op);
2085 }
2086
2087
2088 void LPointerMap::PrintTo(StringStream* stream) const {
2089 stream->Add("{");
2090 for (int i = 0; i < pointer_operands_.length(); ++i) {
2091 if (i != 0) stream->Add(";");
2092 pointer_operands_[i]->PrintTo(stream);
2093 }
2094 stream->Add("} @%d", position());
2095 }
2096
2097 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/ia32/lithium-ia32.h ('k') | src/ia32/macro-assembler-ia32.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698