Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(766)

Side by Side Diff: src/compiler/register-allocator.cc

Issue 1080953006: Revert of Introducing the LLVM greedy register allocator. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/register-allocator.h ('k') | src/flag-definitions.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/adapters.h" 5 #include "src/base/adapters.h"
6 #include "src/compiler/linkage.h" 6 #include "src/compiler/linkage.h"
7 #include "src/compiler/register-allocator.h" 7 #include "src/compiler/register-allocator.h"
8 #include "src/string-stream.h" 8 #include "src/string-stream.h"
9 9
10 namespace v8 { 10 namespace v8 {
11 namespace internal { 11 namespace internal {
12 namespace compiler { 12 namespace compiler {
13 13
14 #define TRACE(...) \ 14 #define TRACE(...) \
15 do { \ 15 do { \
16 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ 16 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
17 } while (false) 17 } while (false)
18 18
19
20 namespace { 19 namespace {
21 20
22 inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { 21 inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
23 return a.Value() < b.Value() ? a : b; 22 return a.Value() < b.Value() ? a : b;
24 } 23 }
25 24
26 25
27 inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { 26 inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
28 return a.Value() > b.Value() ? a : b; 27 return a.Value() > b.Value() ? a : b;
29 } 28 }
(...skipping 1428 matching lines...) Expand 10 before | Expand all | Expand 10 after
1458 } 1457 }
1459 1458
1460 1459
1461 void LiveRangeBuilder::Verify() const { 1460 void LiveRangeBuilder::Verify() const {
1462 for (auto current : data()->live_ranges()) { 1461 for (auto current : data()->live_ranges()) {
1463 if (current != nullptr) current->Verify(); 1462 if (current != nullptr) current->Verify();
1464 } 1463 }
1465 } 1464 }
1466 1465
1467 1466
1468 RegisterAllocator::RegisterAllocator(RegisterAllocationData* data, 1467 RegisterAllocator::RegisterAllocator(RegisterAllocationData* data)
1469 RegisterKind kind) 1468 : data_(data) {}
1470 : data_(data),
1471 mode_(kind),
1472 num_registers_(GetRegisterCount(data->config(), kind)) {}
1473 1469
1474 1470
1475 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, 1471 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
1476 LifetimePosition pos) { 1472 LifetimePosition pos) {
1477 DCHECK(!range->IsFixed()); 1473 DCHECK(!range->IsFixed());
1478 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value()); 1474 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value());
1479 1475
1480 if (pos.Value() <= range->Start().Value()) return range; 1476 if (pos.Value() <= range->Start().Value()) return range;
1481 1477
1482 // We can't properly connect liveranges if splitting occurred at the end 1478 // We can't properly connect liveranges if splitting occurred at the end
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
1579 auto first = range->TopLevel(); 1575 auto first = range->TopLevel();
1580 if (first->HasNoSpillType()) { 1576 if (first->HasNoSpillType()) {
1581 data()->AssignSpillRangeToLiveRange(first); 1577 data()->AssignSpillRangeToLiveRange(first);
1582 } 1578 }
1583 range->MakeSpilled(); 1579 range->MakeSpilled();
1584 } 1580 }
1585 1581
1586 1582
1587 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data, 1583 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
1588 RegisterKind kind, Zone* local_zone) 1584 RegisterKind kind, Zone* local_zone)
1589 : RegisterAllocator(data, kind), 1585 : RegisterAllocator(data),
1586 mode_(kind),
1587 num_registers_(GetRegisterCount(data->config(), kind)),
1590 unhandled_live_ranges_(local_zone), 1588 unhandled_live_ranges_(local_zone),
1591 active_live_ranges_(local_zone), 1589 active_live_ranges_(local_zone),
1592 inactive_live_ranges_(local_zone) { 1590 inactive_live_ranges_(local_zone) {
1593 unhandled_live_ranges().reserve( 1591 unhandled_live_ranges().reserve(
1594 static_cast<size_t>(code()->VirtualRegisterCount() * 2)); 1592 static_cast<size_t>(code()->VirtualRegisterCount() * 2));
1595 active_live_ranges().reserve(8); 1593 active_live_ranges().reserve(8);
1596 inactive_live_ranges().reserve(8); 1594 inactive_live_ranges().reserve(8);
1597 // TryAllocateFreeReg and AllocateBlockedReg assume this 1595 // TryAllocateFreeReg and AllocateBlockedReg assume this
1598 // when allocating local arrays. 1596 // when allocating local arrays.
1599 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= 1597 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >=
1600 this->data()->config()->num_general_registers()); 1598 this->data()->config()->num_general_registers());
1601 } 1599 }
1602 1600
1603 1601
1604 void LinearScanAllocator::AllocateRegisters() { 1602 void LinearScanAllocator::AllocateRegisters() {
1605 DCHECK(unhandled_live_ranges().empty()); 1603 DCHECK(unhandled_live_ranges().empty());
1606 DCHECK(active_live_ranges().empty()); 1604 DCHECK(active_live_ranges().empty());
1607 DCHECK(inactive_live_ranges().empty()); 1605 DCHECK(inactive_live_ranges().empty());
1608 1606
1609 for (auto range : data()->live_ranges()) { 1607 for (auto range : data()->live_ranges()) {
1610 if (range == nullptr) continue; 1608 if (range == nullptr) continue;
1611 if (range->Kind() == mode()) { 1609 if (range->Kind() == mode_) {
1612 AddToUnhandledUnsorted(range); 1610 AddToUnhandledUnsorted(range);
1613 } 1611 }
1614 } 1612 }
1615 SortUnhandled(); 1613 SortUnhandled();
1616 DCHECK(UnhandledIsSorted()); 1614 DCHECK(UnhandledIsSorted());
1617 1615
1618 auto& fixed_ranges = GetFixedRegisters(data(), mode()); 1616 auto& fixed_ranges = GetFixedRegisters(data(), mode_);
1619 for (auto current : fixed_ranges) { 1617 for (auto current : fixed_ranges) {
1620 if (current != nullptr) { 1618 if (current != nullptr) {
1621 DCHECK_EQ(mode(), current->Kind()); 1619 DCHECK_EQ(mode_, current->Kind());
1622 AddToInactive(current); 1620 AddToInactive(current);
1623 } 1621 }
1624 } 1622 }
1625 1623
1626 while (!unhandled_live_ranges().empty()) { 1624 while (!unhandled_live_ranges().empty()) {
1627 DCHECK(UnhandledIsSorted()); 1625 DCHECK(UnhandledIsSorted());
1628 auto current = unhandled_live_ranges().back(); 1626 auto current = unhandled_live_ranges().back();
1629 unhandled_live_ranges().pop_back(); 1627 unhandled_live_ranges().pop_back();
1630 DCHECK(UnhandledIsSorted()); 1628 DCHECK(UnhandledIsSorted());
1631 auto position = current->Start(); 1629 auto position = current->Start();
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1684 bool result = TryAllocateFreeReg(current); 1682 bool result = TryAllocateFreeReg(current);
1685 if (!result) AllocateBlockedReg(current); 1683 if (!result) AllocateBlockedReg(current);
1686 if (current->HasRegisterAssigned()) { 1684 if (current->HasRegisterAssigned()) {
1687 AddToActive(current); 1685 AddToActive(current);
1688 } 1686 }
1689 } 1687 }
1690 } 1688 }
1691 1689
1692 1690
1693 const char* LinearScanAllocator::RegisterName(int allocation_index) const { 1691 const char* LinearScanAllocator::RegisterName(int allocation_index) const {
1694 if (mode() == GENERAL_REGISTERS) { 1692 if (mode_ == GENERAL_REGISTERS) {
1695 return data()->config()->general_register_name(allocation_index); 1693 return data()->config()->general_register_name(allocation_index);
1696 } else { 1694 } else {
1697 return data()->config()->double_register_name(allocation_index); 1695 return data()->config()->double_register_name(allocation_index);
1698 } 1696 }
1699 } 1697 }
1700 1698
1701 1699
1702 void LinearScanAllocator::AddToActive(LiveRange* range) { 1700 void LinearScanAllocator::AddToActive(LiveRange* range) {
1703 TRACE("Add live range %d to active\n", range->id()); 1701 TRACE("Add live range %d to active\n", range->id());
1704 active_live_ranges().push_back(range); 1702 active_live_ranges().push_back(range);
(...skipping 409 matching lines...) Expand 10 before | Expand all | Expand 10 after
2114 Spill(second_part); 2112 Spill(second_part);
2115 AddToUnhandledSorted(third_part); 2113 AddToUnhandledSorted(third_part);
2116 } else { 2114 } else {
2117 // The split result does not intersect with [start, end[. 2115 // The split result does not intersect with [start, end[.
2118 // Nothing to spill. Just put it to unhandled as whole. 2116 // Nothing to spill. Just put it to unhandled as whole.
2119 AddToUnhandledSorted(second_part); 2117 AddToUnhandledSorted(second_part);
2120 } 2118 }
2121 } 2119 }
2122 2120
2123 2121
2124 class CoallescedLiveRanges : public ZoneObject {
2125 public:
2126 explicit CoallescedLiveRanges(Zone* zone) : storage_(zone) {}
2127
2128 LiveRange* Find(UseInterval* query) {
2129 ZoneSplayTree<Config>::Locator locator;
2130
2131 if (storage_.Find(GetKey(query), &locator)) {
2132 return locator.value();
2133 }
2134 return nullptr;
2135 }
2136
2137 // TODO(mtrofin): Change to void returning if we do not care if the interval
2138 // was previously added.
2139 bool Insert(LiveRange* range) {
2140 auto* interval = range->first_interval();
2141 while (interval != nullptr) {
2142 if (!Insert(interval, range)) return false;
2143 interval = interval->next();
2144 }
2145 return true;
2146 }
2147
2148 bool Remove(LiveRange* range) {
2149 bool ret = false;
2150 auto* segment = range->first_interval();
2151 while (segment != nullptr) {
2152 ret |= Remove(segment);
2153 segment = segment->next();
2154 }
2155 return ret;
2156 }
2157
2158 bool IsEmpty() { return storage_.is_empty(); }
2159
2160 private:
2161 struct Config {
2162 typedef std::pair<int, int> Key;
2163 typedef LiveRange* Value;
2164 static const Key kNoKey;
2165 static Value NoValue() { return nullptr; }
2166 static int Compare(const Key& a, const Key& b) {
2167 if (a.second <= b.first) return -1;
2168 if (a.first >= b.second) return 1;
2169 return 0;
2170 }
2171 };
2172
2173 Config::Key GetKey(UseInterval* interval) {
2174 if (interval == nullptr) return std::make_pair(0, 0);
2175 return std::make_pair(interval->start().Value(), interval->end().Value());
2176 }
2177
2178 // TODO(mtrofin): Change to void returning if we do not care if the interval
2179 // was previously added.
2180 bool Insert(UseInterval* interval, LiveRange* range) {
2181 ZoneSplayTree<Config>::Locator locator;
2182 bool ret = storage_.Insert(GetKey(interval), &locator);
2183 if (ret) locator.set_value(range);
2184 return ret;
2185 }
2186
2187 bool Remove(UseInterval* key) { return storage_.Remove(GetKey(key)); }
2188
2189 ZoneSplayTree<Config> storage_;
2190 DISALLOW_COPY_AND_ASSIGN(CoallescedLiveRanges);
2191 };
2192
2193
2194 const std::pair<int, int> CoallescedLiveRanges::Config::kNoKey =
2195 std::make_pair<int, int>(0, 0);
2196
2197
2198 GreedyAllocator::GreedyAllocator(RegisterAllocationData* data,
2199 RegisterKind kind, Zone* local_zone)
2200 : RegisterAllocator(data, kind),
2201 allocations_(local_zone),
2202 queue_(local_zone) {}
2203
2204
2205 unsigned GreedyAllocator::GetLiveRangeSize(LiveRange* range) {
2206 auto interval = range->first_interval();
2207 if (interval == nullptr) return 0;
2208
2209 unsigned size = 0;
2210 while (interval != nullptr) {
2211 size += (interval->end().Value() - interval->start().Value());
2212 interval = interval->next();
2213 }
2214
2215 return size;
2216 }
2217
2218
2219 void GreedyAllocator::AssignRangeToRegister(int reg_id, LiveRange* range) {
2220 allocations_[reg_id]->Insert(range);
2221 if (range->HasRegisterAssigned()) {
2222 DCHECK_EQ(reg_id, range->assigned_register());
2223 return;
2224 }
2225 range->set_assigned_register(reg_id);
2226 }
2227
2228
2229 float GreedyAllocator::CalculateSpillWeight(LiveRange* range) {
2230 if (range->IsFixed()) return std::numeric_limits<float>::max();
2231
2232 if (range->FirstHint() != nullptr && range->FirstHint()->IsRegister()) {
2233 return std::numeric_limits<float>::max();
2234 }
2235
2236 unsigned use_count = 0;
2237 auto* pos = range->first_pos();
2238 while (pos != nullptr) {
2239 use_count++;
2240 pos = pos->next();
2241 }
2242
2243 // GetLiveRangeSize is DCHECK-ed to not be 0
2244 unsigned range_size = GetLiveRangeSize(range);
2245 DCHECK_NE(0, range_size);
2246
2247 return static_cast<float>(use_count) / static_cast<float>(range_size);
2248 }
2249
2250
2251 float GreedyAllocator::CalculateMaxSpillWeight(
2252 const ZoneSet<LiveRange*>& ranges) {
2253 float max = 0.0;
2254 for (auto* r : ranges) {
2255 max = std::max(max, CalculateSpillWeight(r));
2256 }
2257 return max;
2258 }
2259
2260
2261 void GreedyAllocator::Evict(LiveRange* range) {
2262 bool removed = allocations_[range->assigned_register()]->Remove(range);
2263 CHECK(removed);
2264 }
2265
2266
2267 bool GreedyAllocator::TryAllocatePhysicalRegister(
2268 unsigned reg_id, LiveRange* range, ZoneSet<LiveRange*>* conflicting) {
2269 auto* segment = range->first_interval();
2270
2271 auto* alloc_info = allocations_[reg_id];
2272 while (segment != nullptr) {
2273 if (auto* existing = alloc_info->Find(segment)) {
2274 DCHECK(existing->HasRegisterAssigned());
2275 conflicting->insert(existing);
2276 }
2277 segment = segment->next();
2278 }
2279 if (!conflicting->empty()) return false;
2280 // No conflicts means we can safely allocate this register to this range.
2281 AssignRangeToRegister(reg_id, range);
2282 return true;
2283 }
2284
2285 bool GreedyAllocator::TryAllocate(LiveRange* current,
2286 ZoneSet<LiveRange*>* conflicting) {
2287 if (current->HasSpillOperand()) {
2288 Spill(current);
2289 return true;
2290 }
2291 if (current->IsFixed()) {
2292 return TryAllocatePhysicalRegister(current->assigned_register(), current,
2293 conflicting);
2294 }
2295
2296 if (current->HasRegisterAssigned()) {
2297 int reg_id = current->assigned_register();
2298 return TryAllocatePhysicalRegister(reg_id, current, conflicting);
2299 }
2300
2301 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size();
2302 candidate_reg++) {
2303 if (TryAllocatePhysicalRegister(candidate_reg, current, conflicting)) {
2304 conflicting->clear();
2305 return true;
2306 }
2307 }
2308 return false;
2309 }
2310
2311 LiveRange* GreedyAllocator::SpillBetweenUntil(LiveRange* range,
2312 LifetimePosition start,
2313 LifetimePosition until,
2314 LifetimePosition end) {
2315 CHECK(start.Value() < end.Value());
2316 auto second_part = SplitRangeAt(range, start);
2317
2318 if (second_part->Start().Value() < end.Value()) {
2319 // The split result intersects with [start, end[.
2320 // Split it at position between ]start+1, end[, spill the middle part
2321 // and put the rest to unhandled.
2322 auto third_part_end = end.PrevStart().End();
2323 if (IsBlockBoundary(code(), end.Start())) {
2324 third_part_end = end.Start();
2325 }
2326 auto third_part = SplitBetween(
2327 second_part, Max(second_part->Start().End(), until), third_part_end);
2328
2329 DCHECK(third_part != second_part);
2330
2331 Spill(second_part);
2332 return third_part;
2333 } else {
2334 // The split result does not intersect with [start, end[.
2335 // Nothing to spill. Just return it for re-processing.
2336 return second_part;
2337 }
2338 }
2339
2340
2341 void GreedyAllocator::Enqueue(LiveRange* range) {
2342 if (range == nullptr || range->IsEmpty()) return;
2343 unsigned size = GetLiveRangeSize(range);
2344 queue_.push(std::make_pair(size, range));
2345 }
2346
2347
2348 // TODO(mtrofin): consolidate with identical code segment in
2349 // LinearScanAllocator::AllocateRegisters
2350 bool GreedyAllocator::HandleSpillOperands(LiveRange* range) {
2351 auto position = range->Start();
2352 TRACE("Processing interval %d start=%d\n", range->id(), position.Value());
2353
2354 if (!range->HasNoSpillType()) {
2355 TRACE("Live range %d already has a spill operand\n", range->id());
2356 auto next_pos = position;
2357 if (next_pos.IsGapPosition()) {
2358 next_pos = next_pos.NextStart();
2359 }
2360 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
2361 // If the range already has a spill operand and it doesn't need a
2362 // register immediately, split it and spill the first part of the range.
2363 if (pos == nullptr) {
2364 Spill(range);
2365 return true;
2366 } else if (pos->pos().Value() > range->Start().NextStart().Value()) {
2367 // Do not spill live range eagerly if use position that can benefit from
2368 // the register is too close to the start of live range.
2369 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos());
2370 Enqueue(reminder);
2371 return true;
2372 }
2373 }
2374 return false;
2375 // TODO(mtrofin): Do we need this?
2376 // return (TryReuseSpillForPhi(range));
2377 }
2378
2379
2380 void GreedyAllocator::AllocateRegisters() {
2381 for (auto range : data()->live_ranges()) {
2382 if (range == nullptr) continue;
2383 if (range->Kind() == mode()) {
2384 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
2385 TRACE("Enqueueing live range %d to priority queue \n", range->id());
2386 Enqueue(range);
2387 }
2388 }
2389
2390 allocations_.resize(num_registers());
2391 auto* zone = data()->allocation_zone();
2392 for (int i = 0; i < num_registers(); i++) {
2393 allocations_[i] = new (zone) CoallescedLiveRanges(zone);
2394 }
2395
2396 for (auto* current : GetFixedRegisters(data(), mode())) {
2397 if (current != nullptr) {
2398 DCHECK_EQ(mode(), current->Kind());
2399 int reg_nr = current->assigned_register();
2400 bool inserted = allocations_[reg_nr]->Insert(current);
2401 CHECK(inserted);
2402 }
2403 }
2404
2405 while (!queue_.empty()) {
2406 auto current_pair = queue_.top();
2407 queue_.pop();
2408 auto current = current_pair.second;
2409 if (HandleSpillOperands(current)) continue;
2410 ZoneSet<LiveRange*> conflicting(zone);
2411 if (!TryAllocate(current, &conflicting)) {
2412 DCHECK(!conflicting.empty());
2413 float this_max = CalculateSpillWeight(current);
2414 float max_conflicting = CalculateMaxSpillWeight(conflicting);
2415 if (max_conflicting < this_max) {
2416 for (auto* conflict : conflicting) {
2417 Evict(conflict);
2418 Enqueue(conflict);
2419 }
2420 conflicting.clear();
2421 bool allocated = TryAllocate(current, &conflicting);
2422 CHECK(allocated);
2423 DCHECK(conflicting.empty());
2424 } else {
2425 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start()));
2426 bool allocated = AllocateBlockedRange(current, conflicting);
2427 CHECK(allocated);
2428 }
2429 }
2430 }
2431
2432 BitVector* assigned_registers = new (zone) BitVector(num_registers(), zone);
2433 for (size_t i = 0; i < allocations_.size(); ++i) {
2434 if (!allocations_[i]->IsEmpty()) {
2435 assigned_registers->Add(static_cast<int>(i));
2436 }
2437 }
2438 data()->frame()->SetAllocatedRegisters(assigned_registers);
2439 }
2440
2441
2442 bool GreedyAllocator::AllocateBlockedRange(
2443 LiveRange* current, const ZoneSet<LiveRange*>& conflicts) {
2444 auto register_use = current->NextRegisterPosition(current->Start());
2445 if (register_use == nullptr) {
2446 // There is no use in the current live range that requires a register.
2447 // We can just spill it.
2448 Spill(current);
2449 return true;
2450 }
2451
2452 auto second_part = SplitRangeAt(current, register_use->pos());
2453 Spill(second_part);
2454
2455 return true;
2456 }
2457
2458
2459 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} 2122 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
2460 2123
2461 2124
2462 void OperandAssigner::AssignSpillSlots() { 2125 void OperandAssigner::AssignSpillSlots() {
2463 auto& spill_ranges = data()->spill_ranges(); 2126 auto& spill_ranges = data()->spill_ranges();
2464 // Merge disjoint spill ranges 2127 // Merge disjoint spill ranges
2465 for (size_t i = 0; i < spill_ranges.size(); i++) { 2128 for (size_t i = 0; i < spill_ranges.size(); i++) {
2466 auto range = spill_ranges[i]; 2129 auto range = spill_ranges[i];
2467 if (range->IsEmpty()) continue; 2130 if (range->IsEmpty()) continue;
2468 for (size_t j = i + 1; j < spill_ranges.size(); j++) { 2131 for (size_t j = i + 1; j < spill_ranges.size(); j++) {
(...skipping 393 matching lines...) Expand 10 before | Expand all | Expand 10 after
2862 moves = it->first.first; 2525 moves = it->first.first;
2863 } 2526 }
2864 // Gather all MoveOperands for a single ParallelMove. 2527 // Gather all MoveOperands for a single ParallelMove.
2865 auto move = new (code_zone()) MoveOperands(it->first.second, it->second); 2528 auto move = new (code_zone()) MoveOperands(it->first.second, it->second);
2866 auto eliminate = moves->PrepareInsertAfter(move); 2529 auto eliminate = moves->PrepareInsertAfter(move);
2867 to_insert.push_back(move); 2530 to_insert.push_back(move);
2868 if (eliminate != nullptr) to_eliminate.push_back(eliminate); 2531 if (eliminate != nullptr) to_eliminate.push_back(eliminate);
2869 } 2532 }
2870 } 2533 }
2871 2534
2872
2873 } // namespace compiler 2535 } // namespace compiler
2874 } // namespace internal 2536 } // namespace internal
2875 } // namespace v8 2537 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/register-allocator.h ('k') | src/flag-definitions.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698