| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/instruction-selector.h" | 5 #include "src/compiler/instruction-selector.h" |
| 6 | 6 |
| 7 #include "src/compiler/instruction-selector-impl.h" | 7 #include "src/compiler/instruction-selector-impl.h" |
| 8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
| 9 #include "src/compiler/node-properties-inl.h" | 9 #include "src/compiler/node-properties-inl.h" |
| 10 | 10 |
| (...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 336 ASSERT(deopt_node == NULL); | 336 ASSERT(deopt_node == NULL); |
| 337 } | 337 } |
| 338 | 338 |
| 339 ASSERT(input_count == (buffer->fixed_count + buffer->pushed_count)); | 339 ASSERT(input_count == (buffer->fixed_count + buffer->pushed_count)); |
| 340 } | 340 } |
| 341 | 341 |
| 342 | 342 |
| 343 void InstructionSelector::VisitBlock(BasicBlock* block) { | 343 void InstructionSelector::VisitBlock(BasicBlock* block) { |
| 344 ASSERT_EQ(NULL, current_block_); | 344 ASSERT_EQ(NULL, current_block_); |
| 345 current_block_ = block; | 345 current_block_ = block; |
| 346 size_t current_block_end = instructions_.size(); | 346 int current_block_end = static_cast<int>(instructions_.size()); |
| 347 | 347 |
| 348 // Generate code for the block control "top down", but schedule the code | 348 // Generate code for the block control "top down", but schedule the code |
| 349 // "bottom up". | 349 // "bottom up". |
| 350 VisitControl(block); | 350 VisitControl(block); |
| 351 std::reverse(instructions_.begin() + current_block_end, instructions_.end()); | 351 std::reverse(instructions_.begin() + current_block_end, instructions_.end()); |
| 352 | 352 |
| 353 // Visit code in reverse control flow order, because architecture-specific | 353 // Visit code in reverse control flow order, because architecture-specific |
| 354 // matching may cover more than one node at a time. | 354 // matching may cover more than one node at a time. |
| 355 for (BasicBlock::reverse_iterator i = block->rbegin(); i != block->rend(); | 355 for (BasicBlock::reverse_iterator i = block->rbegin(); i != block->rend(); |
| 356 ++i) { | 356 ++i) { |
| 357 Node* node = *i; | 357 Node* node = *i; |
| 358 if (!IsUsed(node)) continue; | 358 if (!IsUsed(node)) continue; |
| 359 // Generate code for this node "top down", but schedule the code "bottom | 359 // Generate code for this node "top down", but schedule the code "bottom |
| 360 // up". | 360 // up". |
| 361 size_t current_node_end = instructions_.size(); | 361 size_t current_node_end = instructions_.size(); |
| 362 VisitNode(node); | 362 VisitNode(node); |
| 363 std::reverse(instructions_.begin() + current_node_end, instructions_.end()); | 363 std::reverse(instructions_.begin() + current_node_end, instructions_.end()); |
| 364 } | 364 } |
| 365 | 365 |
| 366 // We're done with the block. | 366 // We're done with the block. |
| 367 // TODO(bmeurer): We should not mutate the schedule. | 367 // TODO(bmeurer): We should not mutate the schedule. |
| 368 block->code_end_ = current_block_end; | 368 block->code_end_ = current_block_end; |
| 369 block->code_start_ = instructions_.size(); | 369 block->code_start_ = static_cast<int>(instructions_.size()); |
| 370 | 370 |
| 371 current_block_ = NULL; | 371 current_block_ = NULL; |
| 372 } | 372 } |
| 373 | 373 |
| 374 | 374 |
| 375 static inline void CheckNoPhis(const BasicBlock* block) { | 375 static inline void CheckNoPhis(const BasicBlock* block) { |
| 376 #ifdef DEBUG | 376 #ifdef DEBUG |
| 377 // Branch targets should not have phis. | 377 // Branch targets should not have phis. |
| 378 for (BasicBlock::const_iterator i = block->begin(); i != block->end(); ++i) { | 378 for (BasicBlock::const_iterator i = block->begin(); i != block->end(); ++i) { |
| 379 const Node* node = *i; | 379 const Node* node = *i; |
| (...skipping 484 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 864 FrameStateDescriptor descriptor = OpParameter<FrameStateDescriptor>(state); | 864 FrameStateDescriptor descriptor = OpParameter<FrameStateDescriptor>(state); |
| 865 // TODO(jarin) We should also add an instruction input for every input to | 865 // TODO(jarin) We should also add an instruction input for every input to |
| 866 // the framestate node (and recurse for the inlined framestates). | 866 // the framestate node (and recurse for the inlined framestates). |
| 867 int deoptimization_id = sequence()->AddDeoptimizationEntry(descriptor); | 867 int deoptimization_id = sequence()->AddDeoptimizationEntry(descriptor); |
| 868 Emit(kArchDeoptimize | MiscField::encode(deoptimization_id), NULL); | 868 Emit(kArchDeoptimize | MiscField::encode(deoptimization_id), NULL); |
| 869 } | 869 } |
| 870 | 870 |
| 871 } // namespace compiler | 871 } // namespace compiler |
| 872 } // namespace internal | 872 } // namespace internal |
| 873 } // namespace v8 | 873 } // namespace v8 |
| OLD | NEW |