Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(281)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 694473002: [turbofan] optimize hot loop in ResolveControlFlow (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/linkage.h" 5 #include "src/compiler/linkage.h"
6 #include "src/compiler/pipeline-statistics.h" 6 #include "src/compiler/pipeline-statistics.h"
7 #include "src/compiler/register-allocator.h" 7 #include "src/compiler/register-allocator.h"
8 #include "src/macro-assembler.h" // TODO(dcarney): remove this. 8 #include "src/macro-assembler.h" // TODO(dcarney): remove this.
9 #include "src/string-stream.h" 9 #include "src/string-stream.h"
10 10
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after
193 bool LiveRange::CanBeSpilled(LifetimePosition pos) { 193 bool LiveRange::CanBeSpilled(LifetimePosition pos) {
194 // We cannot spill a live range that has a use requiring a register 194 // We cannot spill a live range that has a use requiring a register
195 // at the current or the immediate next position. 195 // at the current or the immediate next position.
196 UsePosition* use_pos = NextRegisterPosition(pos); 196 UsePosition* use_pos = NextRegisterPosition(pos);
197 if (use_pos == NULL) return true; 197 if (use_pos == NULL) return true;
198 return use_pos->pos().Value() > 198 return use_pos->pos().Value() >
199 pos.NextInstruction().InstructionEnd().Value(); 199 pos.NextInstruction().InstructionEnd().Value();
200 } 200 }
201 201
202 202
203 InstructionOperand* LiveRange::CreateAssignedOperand(Zone* zone) { 203 InstructionOperand* LiveRange::CreateAssignedOperand(Zone* zone) const {
204 InstructionOperand* op = NULL; 204 InstructionOperand* op = NULL;
205 if (HasRegisterAssigned()) { 205 if (HasRegisterAssigned()) {
206 DCHECK(!IsSpilled()); 206 DCHECK(!IsSpilled());
207 switch (Kind()) { 207 switch (Kind()) {
208 case GENERAL_REGISTERS: 208 case GENERAL_REGISTERS:
209 op = RegisterOperand::Create(assigned_register(), zone); 209 op = RegisterOperand::Create(assigned_register(), zone);
210 break; 210 break;
211 case DOUBLE_REGISTERS: 211 case DOUBLE_REGISTERS:
212 op = DoubleRegisterOperand::Create(assigned_register(), zone); 212 op = DoubleRegisterOperand::Create(assigned_register(), zone);
213 break; 213 break;
(...skipping 951 matching lines...) Expand 10 before | Expand all | Expand 10 after
1165 PhaseScope phase_scope(stats, "resolve control flow"); 1165 PhaseScope phase_scope(stats, "resolve control flow");
1166 ResolveControlFlow(); 1166 ResolveControlFlow();
1167 } 1167 }
1168 frame()->SetAllocatedRegisters(assigned_registers_); 1168 frame()->SetAllocatedRegisters(assigned_registers_);
1169 frame()->SetAllocatedDoubleRegisters(assigned_double_registers_); 1169 frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1170 return true; 1170 return true;
1171 } 1171 }
1172 1172
1173 1173
1174 void RegisterAllocator::MeetRegisterConstraints() { 1174 void RegisterAllocator::MeetRegisterConstraints() {
1175 for (int i = 0; i < code()->InstructionBlockCount(); ++i) { 1175 for (auto block : code()->instruction_blocks()) {
1176 MeetRegisterConstraints( 1176 MeetRegisterConstraints(block);
1177 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(i)));
1178 if (!AllocationOk()) return; 1177 if (!AllocationOk()) return;
1179 } 1178 }
1180 } 1179 }
1181 1180
1182 1181
1183 void RegisterAllocator::ResolvePhis() { 1182 void RegisterAllocator::ResolvePhis() {
1184 // Process the blocks in reverse order. 1183 // Process the blocks in reverse order.
1185 for (int i = code()->InstructionBlockCount() - 1; i >= 0; --i) { 1184 for (auto i = code()->instruction_blocks().rbegin();
1186 ResolvePhis(code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(i))); 1185 i != code()->instruction_blocks().rend(); ++i) {
1186 ResolvePhis(*i);
1187 } 1187 }
1188 } 1188 }
1189 1189
1190
1191 void RegisterAllocator::ResolveControlFlow(LiveRange* range,
1192 const InstructionBlock* block,
1193 const InstructionBlock* pred) {
1194 LifetimePosition pred_end =
1195 LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1196 LifetimePosition cur_start =
1197 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1198 LiveRange* pred_cover = NULL;
1199 LiveRange* cur_cover = NULL;
1200 LiveRange* cur_range = range;
1201 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1202 if (cur_range->CanCover(cur_start)) {
1203 DCHECK(cur_cover == NULL);
1204 cur_cover = cur_range;
1205 }
1206 if (cur_range->CanCover(pred_end)) {
1207 DCHECK(pred_cover == NULL);
1208 pred_cover = cur_range;
1209 }
1210 cur_range = cur_range->next();
1211 }
1212
1213 if (cur_cover->IsSpilled()) return;
1214 DCHECK(pred_cover != NULL && cur_cover != NULL);
1215 if (pred_cover != cur_cover) {
1216 InstructionOperand* pred_op =
1217 pred_cover->CreateAssignedOperand(code_zone());
1218 InstructionOperand* cur_op = cur_cover->CreateAssignedOperand(code_zone());
1219 if (!pred_op->Equals(cur_op)) {
1220 GapInstruction* gap = NULL;
1221 if (block->PredecessorCount() == 1) {
1222 gap = code()->GapAt(block->first_instruction_index());
1223 } else {
1224 DCHECK(pred->SuccessorCount() == 1);
1225 gap = GetLastGap(pred);
1226
1227 Instruction* branch = InstructionAt(pred->last_instruction_index());
1228 DCHECK(!branch->HasPointerMap());
1229 USE(branch);
1230 }
1231 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone())
1232 ->AddMove(pred_op, cur_op, code_zone());
1233 }
1234 }
1235 }
1236
1237 1190
1238 ParallelMove* RegisterAllocator::GetConnectingParallelMove( 1191 ParallelMove* RegisterAllocator::GetConnectingParallelMove(
1239 LifetimePosition pos) { 1192 LifetimePosition pos) {
1240 int index = pos.InstructionIndex(); 1193 int index = pos.InstructionIndex();
1241 if (code()->IsGapAt(index)) { 1194 if (code()->IsGapAt(index)) {
1242 GapInstruction* gap = code()->GapAt(index); 1195 GapInstruction* gap = code()->GapAt(index);
1243 return gap->GetOrCreateParallelMove( 1196 return gap->GetOrCreateParallelMove(
1244 pos.IsInstructionStart() ? GapInstruction::START : GapInstruction::END, 1197 pos.IsInstructionStart() ? GapInstruction::START : GapInstruction::END,
1245 code_zone()); 1198 code_zone());
1246 } 1199 }
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
1293 } 1246 }
1294 1247
1295 1248
1296 bool RegisterAllocator::CanEagerlyResolveControlFlow( 1249 bool RegisterAllocator::CanEagerlyResolveControlFlow(
1297 const InstructionBlock* block) const { 1250 const InstructionBlock* block) const {
1298 if (block->PredecessorCount() != 1) return false; 1251 if (block->PredecessorCount() != 1) return false;
1299 return block->predecessors()[0].IsNext(block->rpo_number()); 1252 return block->predecessors()[0].IsNext(block->rpo_number());
1300 } 1253 }
1301 1254
1302 1255
1256 namespace {
1257
1258 class LiveRangeBound {
1259 public:
1260 explicit LiveRangeBound(const LiveRange* range)
1261 : range_(range), start_(range->Start()), end_(range->End()) {
1262 DCHECK(!range->IsEmpty());
1263 }
1264
1265 bool CanCover(LifetimePosition position) {
1266 return start_.Value() <= position.Value() &&
1267 position.Value() < end_.Value();
1268 }
1269
1270 const LiveRange* const range_;
1271 const LifetimePosition start_;
1272 const LifetimePosition end_;
1273
1274 private:
1275 DISALLOW_COPY_AND_ASSIGN(LiveRangeBound);
1276 };
1277
1278
1279 struct FindResult {
1280 const LiveRange* cur_cover_;
1281 const LiveRange* pred_cover_;
1282 };
1283
1284
1285 class LiveRangeBoundArray {
1286 public:
1287 LiveRangeBoundArray() : length_(0), start_(nullptr) {}
1288
1289 bool ShouldInitialize() { return start_ == NULL; }
1290
1291 void Initialize(Zone* zone, const LiveRange* const range) {
1292 size_t length = 0;
1293 for (const LiveRange* i = range; i != NULL; i = i->next()) length++;
1294 start_ = zone->NewArray<LiveRangeBound>(static_cast<int>(length));
1295 length_ = length;
1296 LiveRangeBound* curr = start_;
1297 for (const LiveRange* i = range; i != NULL; i = i->next(), ++curr) {
1298 new (curr) LiveRangeBound(i);
1299 }
1300 }
1301
1302 LiveRangeBound* Find(const LifetimePosition position) {
Jarin 2014/11/04 10:42:08 Could not we use std::lower_bound for this?
dcarney 2014/11/05 09:02:02 benchmarking says no i refactored the current code
1303 size_t left_index = 0;
1304 size_t right_index = length_ - 1;
1305 while (true) {
1306 size_t current_index = left_index + (right_index - left_index) / 2;
1307 LiveRangeBound* bound = &start_[current_index];
1308 if (bound->CanCover(position)) return bound;
1309 if (position.Value() < bound->start_.Value()) {
1310 DCHECK(right_index != current_index);
Jarin 2014/11/04 10:42:08 How about DCHECK(right_index > current_index)?
dcarney 2014/11/05 09:02:02 Done.
1311 right_index = current_index;
1312 } else {
1313 DCHECK(position.Value() >= bound->end_.Value());
1314 if (left_index == current_index) {
1315 DCHECK(right_index == left_index + 1);
1316 left_index = right_index;
Jarin 2014/11/04 10:42:09 I think this degenerate case would not appear if y
dcarney 2014/11/05 09:02:02 Done.
1317 } else {
1318 left_index = current_index;
1319 }
1320 }
1321 }
1322 }
1323
1324 void Find(const InstructionBlock* block, const InstructionBlock* pred,
1325 FindResult* result) {
1326 const LifetimePosition pred_end =
1327 LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1328 LiveRangeBound* bound = Find(pred_end);
1329 result->pred_cover_ = bound->range_;
1330 const LifetimePosition cur_start = LifetimePosition::FromInstructionIndex(
1331 block->first_instruction_index());
1332 // Common case.
1333 if (bound->CanCover(cur_start)) {
1334 result->cur_cover_ = bound->range_;
1335 return;
1336 }
1337 result->cur_cover_ = Find(cur_start)->range_;
1338 DCHECK(result->pred_cover_ != NULL && result->cur_cover_ != NULL);
1339 }
1340
1341 private:
1342 size_t length_;
1343 LiveRangeBound* start_;
1344
1345 DISALLOW_COPY_AND_ASSIGN(LiveRangeBoundArray);
1346 };
1347
1348
1349 class LiveRangeFinder {
1350 public:
1351 explicit LiveRangeFinder(const RegisterAllocator& allocator)
1352 : allocator_(allocator),
1353 bounds_length_(allocator.live_ranges().length()),
1354 bounds_(
1355 allocator.zone()->NewArray<LiveRangeBoundArray>(bounds_length_)) {
1356 for (int i = 0; i < bounds_length_; ++i) {
1357 new (&bounds_[i]) LiveRangeBoundArray();
1358 }
1359 }
1360
1361 bool SetCurrent(int operand_index) {
Jarin 2014/11/04 10:42:09 Maybe this could return the LiveRangeBound pointer
dcarney 2014/11/05 09:02:02 Done.
1362 current_ = NULL;
1363 // TODO(dcarney): can this happen at this point?
Jarin 2014/11/04 10:42:09 Yeah, just DCHECK here that the things that should
dcarney 2014/11/05 09:02:02 Done.
1364 if (operand_index >= bounds_length_) return false;
1365 const LiveRange* range = allocator_.live_ranges()[operand_index];
1366 // TODO(dcarney): can this happen at this point?
1367 if (range == NULL || range->IsEmpty()) return false;
1368 LiveRangeBoundArray* array = &bounds_[operand_index];
1369 if (array->ShouldInitialize()) {
1370 array->Initialize(allocator_.zone(), range);
1371 }
1372 current_ = array;
1373 return true;
1374 }
1375
1376 void Find(const InstructionBlock* block, const InstructionBlock* pred,
1377 FindResult* result) {
1378 current_->Find(block, pred, result);
1379 }
1380
1381 private:
1382 const RegisterAllocator& allocator_;
1383 const int bounds_length_;
1384 LiveRangeBoundArray* const bounds_;
1385 LiveRangeBoundArray* current_;
1386
1387 DISALLOW_COPY_AND_ASSIGN(LiveRangeFinder);
1388 };
1389
1390 } // namespace
1391
1392
1303 void RegisterAllocator::ResolveControlFlow() { 1393 void RegisterAllocator::ResolveControlFlow() {
1304 for (int block_id = 1; block_id < code()->InstructionBlockCount(); 1394 // Lazily linearize live ranges in memory for fast lookup.
1305 ++block_id) { 1395 LiveRangeFinder finder(*this);
1306 const InstructionBlock* block = 1396 for (auto block : code()->instruction_blocks()) {
1307 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id));
1308 if (CanEagerlyResolveControlFlow(block)) continue; 1397 if (CanEagerlyResolveControlFlow(block)) continue;
1309 BitVector* live = live_in_sets_[block->rpo_number().ToInt()]; 1398 BitVector* live = live_in_sets_[block->rpo_number().ToInt()];
1310 BitVector::Iterator iterator(live); 1399 BitVector::Iterator iterator(live);
1311 while (!iterator.Done()) { 1400 while (!iterator.Done()) {
1312 int operand_index = iterator.Current(); 1401 if (!finder.SetCurrent(iterator.Current())) continue;
1313 for (auto pred : block->predecessors()) { 1402 for (auto pred : block->predecessors()) {
1314 const InstructionBlock* cur = code()->InstructionBlockAt(pred); 1403 FindResult result;
1315 LiveRange* cur_range = LiveRangeFor(operand_index); 1404 const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
1316 ResolveControlFlow(cur_range, block, cur); 1405 finder.Find(block, pred_block, &result);
1406 if (result.cur_cover_ == result.pred_cover_ ||
1407 result.cur_cover_->IsSpilled())
1408 continue;
1409 ResolveControlFlow(block, result.cur_cover_, pred_block,
1410 result.pred_cover_);
1317 } 1411 }
1318 iterator.Advance(); 1412 iterator.Advance();
1319 } 1413 }
1320 } 1414 }
1321 } 1415 }
1322 1416
1323 1417
1418 void RegisterAllocator::ResolveControlFlow(const InstructionBlock* block,
1419 const LiveRange* cur_cover,
1420 const InstructionBlock* pred,
1421 const LiveRange* pred_cover) {
1422 InstructionOperand* pred_op = pred_cover->CreateAssignedOperand(code_zone());
1423 InstructionOperand* cur_op = cur_cover->CreateAssignedOperand(code_zone());
1424 if (!pred_op->Equals(cur_op)) {
1425 GapInstruction* gap = NULL;
1426 if (block->PredecessorCount() == 1) {
1427 gap = code()->GapAt(block->first_instruction_index());
1428 } else {
1429 DCHECK(pred->SuccessorCount() == 1);
1430 gap = GetLastGap(pred);
1431
1432 Instruction* branch = InstructionAt(pred->last_instruction_index());
1433 DCHECK(!branch->HasPointerMap());
1434 USE(branch);
1435 }
1436 gap->GetOrCreateParallelMove(GapInstruction::START, code_zone())
1437 ->AddMove(pred_op, cur_op, code_zone());
1438 }
1439 }
1440
1441
1324 void RegisterAllocator::BuildLiveRanges() { 1442 void RegisterAllocator::BuildLiveRanges() {
1325 InitializeLivenessAnalysis(); 1443 InitializeLivenessAnalysis();
1326 // Process the blocks in reverse order. 1444 // Process the blocks in reverse order.
1327 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0; 1445 for (int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
1328 --block_id) { 1446 --block_id) {
1329 const InstructionBlock* block = 1447 const InstructionBlock* block =
1330 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id)); 1448 code()->InstructionBlockAt(BasicBlock::RpoNumber::FromInt(block_id));
1331 BitVector* live = ComputeLiveOut(block); 1449 BitVector* live = ComputeLiveOut(block);
1332 // Initially consider all live_out values live for the entire block. We 1450 // Initially consider all live_out values live for the entire block. We
1333 // will shorten these intervals if necessary. 1451 // will shorten these intervals if necessary.
(...skipping 884 matching lines...) Expand 10 before | Expand all | Expand 10 after
2218 } else { 2336 } else {
2219 DCHECK(range->Kind() == GENERAL_REGISTERS); 2337 DCHECK(range->Kind() == GENERAL_REGISTERS);
2220 assigned_registers_->Add(reg); 2338 assigned_registers_->Add(reg);
2221 } 2339 }
2222 range->set_assigned_register(reg, code_zone()); 2340 range->set_assigned_register(reg, code_zone());
2223 } 2341 }
2224 2342
2225 } 2343 }
2226 } 2344 }
2227 } // namespace v8::internal::compiler 2345 } // namespace v8::internal::compiler
OLDNEW
« src/compiler/register-allocator.h ('K') | « src/compiler/register-allocator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698