OLD | NEW |
---|---|
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/base/adapters.h" | 5 #include "src/base/adapters.h" |
6 #include "src/compiler/linkage.h" | 6 #include "src/compiler/linkage.h" |
7 #include "src/compiler/register-allocator.h" | 7 #include "src/compiler/register-allocator.h" |
8 #include "src/string-stream.h" | 8 #include "src/string-stream.h" |
9 | 9 |
10 namespace v8 { | 10 namespace v8 { |
11 namespace internal { | 11 namespace internal { |
12 namespace compiler { | 12 namespace compiler { |
13 | 13 |
14 #define TRACE(...) \ | 14 #define TRACE(...) \ |
15 do { \ | 15 do { \ |
16 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ | 16 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ |
17 } while (false) | 17 } while (false) |
18 | 18 |
19 | |
19 namespace { | 20 namespace { |
20 | 21 |
21 inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { | 22 inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) { |
22 return a.Value() < b.Value() ? a : b; | 23 return a.Value() < b.Value() ? a : b; |
23 } | 24 } |
24 | 25 |
25 | 26 |
26 inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { | 27 inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) { |
27 return a.Value() > b.Value() ? a : b; | 28 return a.Value() > b.Value() ? a : b; |
28 } | 29 } |
(...skipping 1428 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1457 } | 1458 } |
1458 | 1459 |
1459 | 1460 |
1460 void LiveRangeBuilder::Verify() const { | 1461 void LiveRangeBuilder::Verify() const { |
1461 for (auto current : data()->live_ranges()) { | 1462 for (auto current : data()->live_ranges()) { |
1462 if (current != nullptr) current->Verify(); | 1463 if (current != nullptr) current->Verify(); |
1463 } | 1464 } |
1464 } | 1465 } |
1465 | 1466 |
1466 | 1467 |
1467 RegisterAllocator::RegisterAllocator(RegisterAllocationData* data) | 1468 RegisterAllocator::RegisterAllocator(RegisterAllocationData* data, |
1468 : data_(data) {} | 1469 RegisterKind kind) |
1470 : data_(data), | |
1471 mode_(kind), | |
1472 num_registers_(GetRegisterCount(data->config(), kind)) {} | |
1469 | 1473 |
1470 | 1474 |
1471 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, | 1475 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range, |
1472 LifetimePosition pos) { | 1476 LifetimePosition pos) { |
1473 DCHECK(!range->IsFixed()); | 1477 DCHECK(!range->IsFixed()); |
1474 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value()); | 1478 TRACE("Splitting live range %d at %d\n", range->id(), pos.Value()); |
1475 | 1479 |
1476 if (pos.Value() <= range->Start().Value()) return range; | 1480 if (pos.Value() <= range->Start().Value()) return range; |
1477 | 1481 |
1478 // We can't properly connect liveranges if splitting occurred at the end | 1482 // We can't properly connect liveranges if splitting occurred at the end |
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1575 auto first = range->TopLevel(); | 1579 auto first = range->TopLevel(); |
1576 if (first->HasNoSpillType()) { | 1580 if (first->HasNoSpillType()) { |
1577 data()->AssignSpillRangeToLiveRange(first); | 1581 data()->AssignSpillRangeToLiveRange(first); |
1578 } | 1582 } |
1579 range->MakeSpilled(); | 1583 range->MakeSpilled(); |
1580 } | 1584 } |
1581 | 1585 |
1582 | 1586 |
1583 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data, | 1587 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data, |
1584 RegisterKind kind, Zone* local_zone) | 1588 RegisterKind kind, Zone* local_zone) |
1585 : RegisterAllocator(data), | 1589 : RegisterAllocator(data, kind), |
1586 mode_(kind), | |
1587 num_registers_(GetRegisterCount(data->config(), kind)), | |
1588 unhandled_live_ranges_(local_zone), | 1590 unhandled_live_ranges_(local_zone), |
1589 active_live_ranges_(local_zone), | 1591 active_live_ranges_(local_zone), |
1590 inactive_live_ranges_(local_zone) { | 1592 inactive_live_ranges_(local_zone) { |
1591 unhandled_live_ranges().reserve( | 1593 unhandled_live_ranges().reserve( |
1592 static_cast<size_t>(code()->VirtualRegisterCount() * 2)); | 1594 static_cast<size_t>(code()->VirtualRegisterCount() * 2)); |
1593 active_live_ranges().reserve(8); | 1595 active_live_ranges().reserve(8); |
1594 inactive_live_ranges().reserve(8); | 1596 inactive_live_ranges().reserve(8); |
1595 // TryAllocateFreeReg and AllocateBlockedReg assume this | 1597 // TryAllocateFreeReg and AllocateBlockedReg assume this |
1596 // when allocating local arrays. | 1598 // when allocating local arrays. |
1597 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= | 1599 DCHECK(RegisterConfiguration::kMaxDoubleRegisters >= |
1598 this->data()->config()->num_general_registers()); | 1600 this->data()->config()->num_general_registers()); |
1599 } | 1601 } |
1600 | 1602 |
1601 | 1603 |
1602 void LinearScanAllocator::AllocateRegisters() { | 1604 void LinearScanAllocator::AllocateRegisters() { |
1603 DCHECK(unhandled_live_ranges().empty()); | 1605 DCHECK(unhandled_live_ranges().empty()); |
1604 DCHECK(active_live_ranges().empty()); | 1606 DCHECK(active_live_ranges().empty()); |
1605 DCHECK(inactive_live_ranges().empty()); | 1607 DCHECK(inactive_live_ranges().empty()); |
1606 | 1608 |
1607 for (auto range : data()->live_ranges()) { | 1609 for (auto range : data()->live_ranges()) { |
1608 if (range == nullptr) continue; | 1610 if (range == nullptr) continue; |
1609 if (range->Kind() == mode_) { | 1611 if (range->Kind() == mode()) { |
1610 AddToUnhandledUnsorted(range); | 1612 AddToUnhandledUnsorted(range); |
1611 } | 1613 } |
1612 } | 1614 } |
1613 SortUnhandled(); | 1615 SortUnhandled(); |
1614 DCHECK(UnhandledIsSorted()); | 1616 DCHECK(UnhandledIsSorted()); |
1615 | 1617 |
1616 auto& fixed_ranges = GetFixedRegisters(data(), mode_); | 1618 auto& fixed_ranges = GetFixedRegisters(data(), mode()); |
1617 for (auto current : fixed_ranges) { | 1619 for (auto current : fixed_ranges) { |
1618 if (current != nullptr) { | 1620 if (current != nullptr) { |
1619 DCHECK_EQ(mode_, current->Kind()); | 1621 DCHECK_EQ(mode(), current->Kind()); |
1620 AddToInactive(current); | 1622 AddToInactive(current); |
1621 } | 1623 } |
1622 } | 1624 } |
1623 | 1625 |
1624 while (!unhandled_live_ranges().empty()) { | 1626 while (!unhandled_live_ranges().empty()) { |
1625 DCHECK(UnhandledIsSorted()); | 1627 DCHECK(UnhandledIsSorted()); |
1626 auto current = unhandled_live_ranges().back(); | 1628 auto current = unhandled_live_ranges().back(); |
1627 unhandled_live_ranges().pop_back(); | 1629 unhandled_live_ranges().pop_back(); |
1628 DCHECK(UnhandledIsSorted()); | 1630 DCHECK(UnhandledIsSorted()); |
1629 auto position = current->Start(); | 1631 auto position = current->Start(); |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1682 bool result = TryAllocateFreeReg(current); | 1684 bool result = TryAllocateFreeReg(current); |
1683 if (!result) AllocateBlockedReg(current); | 1685 if (!result) AllocateBlockedReg(current); |
1684 if (current->HasRegisterAssigned()) { | 1686 if (current->HasRegisterAssigned()) { |
1685 AddToActive(current); | 1687 AddToActive(current); |
1686 } | 1688 } |
1687 } | 1689 } |
1688 } | 1690 } |
1689 | 1691 |
1690 | 1692 |
1691 const char* LinearScanAllocator::RegisterName(int allocation_index) const { | 1693 const char* LinearScanAllocator::RegisterName(int allocation_index) const { |
1692 if (mode_ == GENERAL_REGISTERS) { | 1694 if (mode() == GENERAL_REGISTERS) { |
1693 return data()->config()->general_register_name(allocation_index); | 1695 return data()->config()->general_register_name(allocation_index); |
1694 } else { | 1696 } else { |
1695 return data()->config()->double_register_name(allocation_index); | 1697 return data()->config()->double_register_name(allocation_index); |
1696 } | 1698 } |
1697 } | 1699 } |
1698 | 1700 |
1699 | 1701 |
1700 void LinearScanAllocator::AddToActive(LiveRange* range) { | 1702 void LinearScanAllocator::AddToActive(LiveRange* range) { |
1701 TRACE("Add live range %d to active\n", range->id()); | 1703 TRACE("Add live range %d to active\n", range->id()); |
1702 active_live_ranges().push_back(range); | 1704 active_live_ranges().push_back(range); |
(...skipping 409 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2112 Spill(second_part); | 2114 Spill(second_part); |
2113 AddToUnhandledSorted(third_part); | 2115 AddToUnhandledSorted(third_part); |
2114 } else { | 2116 } else { |
2115 // The split result does not intersect with [start, end[. | 2117 // The split result does not intersect with [start, end[. |
2116 // Nothing to spill. Just put it to unhandled as whole. | 2118 // Nothing to spill. Just put it to unhandled as whole. |
2117 AddToUnhandledSorted(second_part); | 2119 AddToUnhandledSorted(second_part); |
2118 } | 2120 } |
2119 } | 2121 } |
2120 | 2122 |
2121 | 2123 |
2124 class CoallescedLiveRanges : public ZoneObject { | |
2125 public: | |
2126 explicit CoallescedLiveRanges(Zone* zone) : storage_(zone) {} | |
2127 | |
2128 LiveRange* Find(UseInterval* query) { | |
2129 ZoneSplayTree<Config>::Locator locator; | |
2130 | |
2131 if (storage_.Find(GetKey(query), &locator)) { | |
2132 return locator.value(); | |
2133 } | |
2134 return nullptr; | |
2135 } | |
2136 | |
2137 // TODO(mtrofin): Change to void returning if we do not care if the interval | |
2138 // was previously added. | |
2139 bool Insert(LiveRange* range) { | |
2140 auto* interval = range->first_interval(); | |
2141 while (interval != nullptr) { | |
2142 if (!Insert(interval, range)) return false; | |
2143 interval = interval->next(); | |
2144 } | |
2145 return true; | |
2146 } | |
2147 | |
2148 bool Remove(LiveRange* range) { | |
2149 bool ret = false; | |
2150 auto* segment = range->first_interval(); | |
2151 while (segment != nullptr) { | |
2152 ret |= Remove(segment); | |
2153 segment = segment->next(); | |
2154 } | |
2155 return ret; | |
2156 } | |
2157 | |
2158 bool IsEmpty() { return storage_.is_empty(); } | |
2159 | |
2160 private: | |
2161 struct Config { | |
2162 typedef std::pair<int, int> Key; | |
2163 typedef LiveRange* Value; | |
2164 static const Key kNoKey; | |
2165 static Value NoValue() { return nullptr; } | |
2166 static int Compare(const Key& a, const Key& b) { | |
2167 if (a.second <= b.first) return -1; | |
2168 if (a.first >= b.second) return 1; | |
2169 return 0; | |
2170 } | |
2171 }; | |
2172 | |
2173 Config::Key GetKey(UseInterval* interval) { | |
2174 if (interval == nullptr) return std::make_pair(0, 0); | |
2175 return std::make_pair(interval->start().Value(), interval->end().Value()); | |
2176 } | |
2177 | |
2178 // TODO(mtrofin): Change to void returning if we do not care if the interval | |
2179 // was previously added. | |
2180 bool Insert(UseInterval* interval, LiveRange* range) { | |
2181 ZoneSplayTree<Config>::Locator locator; | |
2182 bool ret = storage_.Insert(GetKey(interval), &locator); | |
2183 if (ret) locator.set_value(range); | |
2184 return ret; | |
2185 } | |
2186 | |
2187 bool Remove(UseInterval* key) { return storage_.Remove(GetKey(key)); } | |
2188 | |
2189 ZoneSplayTree<Config> storage_; | |
2190 DISALLOW_COPY_AND_ASSIGN(CoallescedLiveRanges); | |
2191 }; | |
2192 | |
2193 | |
2194 const std::pair<int, int> CoallescedLiveRanges::Config::kNoKey = {0, 0}; | |
2195 | |
dcarney
2015/04/23 12:41:49
two spaces here, and between pretty much everythin
| |
2196 GreedyAllocator::GreedyAllocator(RegisterAllocationData* data, | |
2197 RegisterKind kind, Zone* local_zone) | |
2198 : RegisterAllocator(data, kind), | |
2199 allocations_(local_zone), | |
2200 queue_(local_zone) {} | |
2201 | |
2202 | |
2203 unsigned GreedyAllocator::GetLiveRangeSize(LiveRange* range) { | |
2204 auto interval = range->first_interval(); | |
2205 if (interval == nullptr) return 0; | |
2206 | |
2207 unsigned size = 0; | |
2208 while (interval != nullptr) { | |
2209 size += (interval->end().Value() - interval->start().Value()); | |
2210 interval = interval->next(); | |
2211 } | |
2212 | |
2213 return size; | |
2214 } | |
2215 | |
2216 | |
2217 void GreedyAllocator::AssignRangeToRegister(int reg_id, LiveRange* range) { | |
2218 allocations_[reg_id]->Insert(range); | |
2219 if (range->HasRegisterAssigned()) { | |
2220 DCHECK_EQ(reg_id, range->assigned_register()); | |
2221 return; | |
2222 } | |
2223 range->set_assigned_register(reg_id); | |
2224 } | |
2225 | |
2226 | |
2227 float GreedyAllocator::CalculateSpillWeight(LiveRange* range) { | |
2228 if (range->IsFixed()) return std::numeric_limits<float>::max(); | |
2229 | |
2230 if (range->FirstHint() != nullptr && range->FirstHint()->IsRegister()) { | |
2231 return std::numeric_limits<float>::max(); | |
2232 } | |
2233 | |
2234 unsigned use_count = 0; | |
2235 auto* pos = range->first_pos(); | |
2236 while (pos != nullptr) { | |
2237 use_count++; | |
2238 pos = pos->next(); | |
2239 } | |
2240 | |
2241 // GetLiveRangeSize is DCHECK-ed to not be 0 | |
2242 unsigned range_size = GetLiveRangeSize(range); | |
2243 DCHECK_NE(0, range_size); | |
2244 | |
2245 return static_cast<float>(use_count) / static_cast<float>(range_size); | |
2246 } | |
2247 | |
2248 | |
2249 float GreedyAllocator::CalculateMaxSpillWeight( | |
2250 const ZoneSet<LiveRange*>& ranges) { | |
2251 float max = 0.0; | |
2252 for (auto* r : ranges) { | |
2253 max = std::max(max, CalculateSpillWeight(r)); | |
2254 } | |
2255 return max; | |
2256 } | |
2257 | |
2258 | |
2259 void GreedyAllocator::Evict(LiveRange* range) { | |
2260 bool removed = allocations_[range->assigned_register()]->Remove(range); | |
2261 CHECK(removed); | |
2262 } | |
2263 | |
2264 | |
2265 bool GreedyAllocator::TryAllocatePhysicalRegister( | |
2266 unsigned reg_id, LiveRange* range, ZoneSet<LiveRange*>* conflicting) { | |
2267 auto* segment = range->first_interval(); | |
2268 | |
2269 auto* alloc_info = allocations_[reg_id]; | |
2270 while (segment != nullptr) { | |
2271 if (auto* existing = alloc_info->Find(segment)) { | |
2272 DCHECK(existing->HasRegisterAssigned()); | |
2273 conflicting->insert(existing); | |
2274 } | |
2275 segment = segment->next(); | |
2276 } | |
2277 if (!conflicting->empty()) return false; | |
2278 // No conflicts means we can safely allocate this register to this range. | |
2279 AssignRangeToRegister(reg_id, range); | |
2280 return true; | |
2281 } | |
2282 | |
dcarney
2015/04/23 12:41:49
two spaces
| |
2283 bool GreedyAllocator::TryAllocate(LiveRange* current, | |
2284 ZoneSet<LiveRange*>* conflicting) { | |
2285 if (current->HasSpillOperand()) { | |
2286 Spill(current); | |
2287 return true; | |
2288 } | |
2289 if (current->IsFixed()) { | |
2290 return TryAllocatePhysicalRegister(current->assigned_register(), current, | |
2291 conflicting); | |
2292 } | |
2293 | |
2294 if (current->HasRegisterAssigned()) { | |
2295 int reg_id = current->assigned_register(); | |
2296 return TryAllocatePhysicalRegister(reg_id, current, conflicting); | |
2297 } | |
2298 | |
2299 for (unsigned candidate_reg = 0; candidate_reg < allocations_.size(); | |
2300 candidate_reg++) { | |
2301 if (TryAllocatePhysicalRegister(candidate_reg, current, conflicting)) { | |
2302 conflicting->clear(); | |
2303 return true; | |
2304 } | |
2305 } | |
2306 return false; | |
2307 } | |
2308 | |
dcarney
2015/04/23 12:41:49
two spaces
| |
2309 LiveRange* GreedyAllocator::SpillBetweenUntil(LiveRange* range, | |
2310 LifetimePosition start, | |
2311 LifetimePosition until, | |
2312 LifetimePosition end) { | |
2313 CHECK(start.Value() < end.Value()); | |
2314 auto second_part = SplitRangeAt(range, start); | |
2315 | |
2316 if (second_part->Start().Value() < end.Value()) { | |
2317 // The split result intersects with [start, end[. | |
2318 // Split it at position between ]start+1, end[, spill the middle part | |
2319 // and put the rest to unhandled. | |
2320 auto third_part_end = end.PrevStart().End(); | |
2321 if (IsBlockBoundary(code(), end.Start())) { | |
2322 third_part_end = end.Start(); | |
2323 } | |
2324 auto third_part = SplitBetween( | |
2325 second_part, Max(second_part->Start().End(), until), third_part_end); | |
2326 | |
2327 DCHECK(third_part != second_part); | |
2328 | |
2329 Spill(second_part); | |
2330 return third_part; | |
2331 } else { | |
2332 // The split result does not intersect with [start, end[. | |
2333 // Nothing to spill. Just return it for re-processing. | |
2334 return second_part; | |
2335 } | |
2336 } | |
2337 | |
2338 | |
2339 void GreedyAllocator::Enqueue(LiveRange* range) { | |
2340 if (range == nullptr || range->IsEmpty()) return; | |
2341 unsigned size = GetLiveRangeSize(range); | |
2342 queue_.push(std::make_pair(size, range)); | |
2343 } | |
2344 | |
2345 | |
2346 // TODO(mtrofin): consolidate with identical code segment in | |
2347 // LinearScanAllocator::AllocateRegisters | |
dcarney
2015/04/23 12:41:49
remove this comment. we want as little consolidati
| |
2348 bool GreedyAllocator::HandleSpillOperands(LiveRange* range) { | |
2349 auto position = range->Start(); | |
2350 TRACE("Processing interval %d start=%d\n", range->id(), position.Value()); | |
2351 | |
2352 if (!range->HasNoSpillType()) { | |
2353 TRACE("Live range %d already has a spill operand\n", range->id()); | |
2354 auto next_pos = position; | |
2355 if (next_pos.IsGapPosition()) { | |
2356 next_pos = next_pos.NextStart(); | |
2357 } | |
2358 auto pos = range->NextUsePositionRegisterIsBeneficial(next_pos); | |
2359 // If the range already has a spill operand and it doesn't need a | |
2360 // register immediately, split it and spill the first part of the range. | |
2361 if (pos == nullptr) { | |
2362 Spill(range); | |
2363 return true; | |
2364 } else if (pos->pos().Value() > range->Start().NextStart().Value()) { | |
2365 // Do not spill live range eagerly if use position that can benefit from | |
2366 // the register is too close to the start of live range. | |
2367 auto* reminder = SpillBetweenUntil(range, position, position, pos->pos()); | |
2368 Enqueue(reminder); | |
2369 return true; | |
2370 } | |
2371 } | |
2372 return false; | |
2373 // TODO(mtrofin): Do we need this? | |
2374 // return (TryReuseSpillForPhi(range)); | |
2375 } | |
2376 | |
2377 | |
2378 void GreedyAllocator::AllocateRegisters() { | |
2379 for (auto range : data()->live_ranges()) { | |
2380 if (range == nullptr) continue; | |
2381 if (range->Kind() == mode()) { | |
2382 DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled()); | |
2383 TRACE("Enqueueing live range %d to priority queue \n", range->id()); | |
2384 Enqueue(range); | |
2385 } | |
2386 } | |
2387 | |
2388 allocations_.resize(num_registers()); | |
2389 auto* zone = data()->allocation_zone(); | |
2390 for (int i = 0; i < num_registers(); i++) { | |
2391 allocations_[i] = new (zone) CoallescedLiveRanges(zone); | |
dcarney
2015/04/23 12:41:49
these should go in the local zone, which you have
| |
2392 } | |
2393 | |
2394 for (auto* current : GetFixedRegisters(data(), mode())) { | |
2395 if (current != nullptr) { | |
2396 DCHECK_EQ(mode(), current->Kind()); | |
2397 int reg_nr = current->assigned_register(); | |
2398 bool inserted = allocations_[reg_nr]->Insert(current); | |
2399 CHECK(inserted); | |
2400 } | |
2401 } | |
2402 | |
2403 while (!queue_.empty()) { | |
2404 auto current_pair = queue_.top(); | |
2405 queue_.pop(); | |
2406 auto current = current_pair.second; | |
2407 if (HandleSpillOperands(current)) continue; | |
2408 ZoneSet<LiveRange*> conflicting(zone); | |
2409 if (!TryAllocate(current, &conflicting)) { | |
2410 DCHECK(!conflicting.empty()); | |
2411 float this_max = CalculateSpillWeight(current); | |
2412 float max_conflicting = CalculateMaxSpillWeight(conflicting); | |
2413 if (max_conflicting < this_max) { | |
2414 for (auto* conflict : conflicting) { | |
2415 Evict(conflict); | |
2416 Enqueue(conflict); | |
2417 } | |
2418 conflicting.clear(); | |
2419 bool allocated = TryAllocate(current, &conflicting); | |
2420 CHECK(allocated); | |
2421 DCHECK(conflicting.empty()); | |
2422 } else { | |
2423 DCHECK(!current->IsFixed() || current->CanBeSpilled(current->Start())); | |
2424 bool allocated = AllocateBlockedRange(current, conflicting); | |
2425 CHECK(allocated); | |
2426 } | |
2427 } | |
2428 } | |
2429 | |
2430 BitVector* assigned_registers = new (zone) BitVector(num_registers(), zone); | |
dcarney
2015/04/23 12:41:49
this is actually a bug, the assigned_registers sho
| |
2431 for (size_t i = 0; i < allocations_.size(); ++i) { | |
2432 if (!allocations_[i]->IsEmpty()) { | |
2433 assigned_registers->Add(static_cast<int>(i)); | |
2434 } | |
2435 } | |
2436 data()->frame()->SetAllocatedRegisters(assigned_registers); | |
dcarney
2015/04/23 12:41:49
remove
| |
2437 } | |
2438 | |
2439 | |
2440 bool GreedyAllocator::AllocateBlockedRange( | |
2441 LiveRange* current, const ZoneSet<LiveRange*>& conflicts) { | |
2442 auto register_use = current->NextRegisterPosition(current->Start()); | |
2443 if (register_use == nullptr) { | |
2444 // There is no use in the current live range that requires a register. | |
2445 // We can just spill it. | |
2446 Spill(current); | |
2447 return true; | |
2448 } | |
2449 | |
2450 auto second_part = SplitRangeAt(current, register_use->pos()); | |
2451 Spill(second_part); | |
2452 | |
2453 return true; | |
2454 } | |
2455 | |
2456 | |
2122 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} | 2457 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {} |
2123 | 2458 |
2124 | 2459 |
2125 void OperandAssigner::AssignSpillSlots() { | 2460 void OperandAssigner::AssignSpillSlots() { |
2126 auto& spill_ranges = data()->spill_ranges(); | 2461 auto& spill_ranges = data()->spill_ranges(); |
2127 // Merge disjoint spill ranges | 2462 // Merge disjoint spill ranges |
2128 for (size_t i = 0; i < spill_ranges.size(); i++) { | 2463 for (size_t i = 0; i < spill_ranges.size(); i++) { |
2129 auto range = spill_ranges[i]; | 2464 auto range = spill_ranges[i]; |
2130 if (range->IsEmpty()) continue; | 2465 if (range->IsEmpty()) continue; |
2131 for (size_t j = i + 1; j < spill_ranges.size(); j++) { | 2466 for (size_t j = i + 1; j < spill_ranges.size(); j++) { |
(...skipping 393 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2525 moves = it->first.first; | 2860 moves = it->first.first; |
2526 } | 2861 } |
2527 // Gather all MoveOperands for a single ParallelMove. | 2862 // Gather all MoveOperands for a single ParallelMove. |
2528 auto move = new (code_zone()) MoveOperands(it->first.second, it->second); | 2863 auto move = new (code_zone()) MoveOperands(it->first.second, it->second); |
2529 auto eliminate = moves->PrepareInsertAfter(move); | 2864 auto eliminate = moves->PrepareInsertAfter(move); |
2530 to_insert.push_back(move); | 2865 to_insert.push_back(move); |
2531 if (eliminate != nullptr) to_eliminate.push_back(eliminate); | 2866 if (eliminate != nullptr) to_eliminate.push_back(eliminate); |
2532 } | 2867 } |
2533 } | 2868 } |
2534 | 2869 |
2870 | |
2535 } // namespace compiler | 2871 } // namespace compiler |
2536 } // namespace internal | 2872 } // namespace internal |
2537 } // namespace v8 | 2873 } // namespace v8 |
OLD | NEW |