Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(646)

Side by Side Diff: src/compiler/register-allocator.h

Issue 1311983002: [turbofan] Separate LiveRange and TopLevelLiveRange concepts (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/live-range-separator.cc ('k') | src/compiler/register-allocator.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_REGISTER_ALLOCATOR_H_ 5 #ifndef V8_REGISTER_ALLOCATOR_H_
6 #define V8_REGISTER_ALLOCATOR_H_ 6 #define V8_REGISTER_ALLOCATOR_H_
7 7
8 #include "src/compiler/instruction.h" 8 #include "src/compiler/instruction.h"
9 #include "src/ostreams.h" 9 #include "src/ostreams.h"
10 #include "src/zone-containers.h" 10 #include "src/zone-containers.h"
(...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after
267 UsePosition* next_; 267 UsePosition* next_;
268 LifetimePosition const pos_; 268 LifetimePosition const pos_;
269 uint32_t flags_; 269 uint32_t flags_;
270 270
271 DISALLOW_COPY_AND_ASSIGN(UsePosition); 271 DISALLOW_COPY_AND_ASSIGN(UsePosition);
272 }; 272 };
273 273
274 274
275 class SpillRange; 275 class SpillRange;
276 class RegisterAllocationData; 276 class RegisterAllocationData;
277 class TopLevelLiveRange;
277 278
278 // Representation of SSA values' live ranges as a collection of (continuous) 279 // Representation of SSA values' live ranges as a collection of (continuous)
279 // intervals over the instruction ordering. 280 // intervals over the instruction ordering.
280 class LiveRange final : public ZoneObject { 281 class LiveRange : public ZoneObject {
281 public: 282 public:
282 explicit LiveRange(int id, MachineType machine_type);
283
284 UseInterval* first_interval() const { return first_interval_; } 283 UseInterval* first_interval() const { return first_interval_; }
285 UsePosition* first_pos() const { return first_pos_; } 284 UsePosition* first_pos() const { return first_pos_; }
286 LiveRange* parent() const { return parent_; } 285 TopLevelLiveRange* TopLevel() { return top_level_; }
287 LiveRange* TopLevel() { return (parent_ == nullptr) ? this : parent_; } 286 const TopLevelLiveRange* TopLevel() const { return top_level_; }
288 const LiveRange* TopLevel() const { 287
289 return (parent_ == nullptr) ? this : parent_; 288 bool IsTopLevel() const;
290 } 289
291 LiveRange* next() const { return next_; } 290 LiveRange* next() const { return next_; }
292 bool IsChild() const { return parent() != nullptr; } 291
293 int id() const { return id_; } 292 int relative_id() const { return relative_id_; }
294 bool IsFixed() const { return id_ < 0; } 293
295 bool IsEmpty() const { return first_interval() == nullptr; } 294 bool IsEmpty() const { return first_interval() == nullptr; }
295
296 InstructionOperand GetAssignedOperand() const; 296 InstructionOperand GetAssignedOperand() const;
297 int spill_start_index() const { return spill_start_index_; }
298 297
299 MachineType machine_type() const { return MachineTypeField::decode(bits_); } 298 MachineType machine_type() const { return MachineTypeField::decode(bits_); }
300 299
301 int assigned_register() const { return AssignedRegisterField::decode(bits_); } 300 int assigned_register() const { return AssignedRegisterField::decode(bits_); }
302 bool HasRegisterAssigned() const { 301 bool HasRegisterAssigned() const {
303 return assigned_register() != kUnassignedRegister; 302 return assigned_register() != kUnassignedRegister;
304 } 303 }
305 void set_assigned_register(int reg); 304 void set_assigned_register(int reg);
306 void UnsetAssignedRegister(); 305 void UnsetAssignedRegister();
307 306
308 bool spilled() const { return SpilledField::decode(bits_); } 307 bool spilled() const { return SpilledField::decode(bits_); }
309 void Spill(); 308 void Spill();
310 309
311 RegisterKind kind() const; 310 RegisterKind kind() const;
312 311
313 // Correct only for parent.
314 bool is_phi() const { return IsPhiField::decode(bits_); }
315 void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
316
317 // Correct only for parent.
318 bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
319 void set_is_non_loop_phi(bool value) {
320 bits_ = IsNonLoopPhiField::update(bits_, value);
321 }
322
323 // Relevant only for parent.
324 bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
325 void set_has_slot_use(bool value) {
326 bits_ = HasSlotUseField::update(bits_, value);
327 }
328
329 // Returns use position in this live range that follows both start 312 // Returns use position in this live range that follows both start
330 // and last processed use position. 313 // and last processed use position.
331 UsePosition* NextUsePosition(LifetimePosition start) const; 314 UsePosition* NextUsePosition(LifetimePosition start) const;
332 315
333 // Returns use position for which register is required in this live 316 // Returns use position for which register is required in this live
334 // range and which follows both start and last processed use position 317 // range and which follows both start and last processed use position
335 UsePosition* NextRegisterPosition(LifetimePosition start) const; 318 UsePosition* NextRegisterPosition(LifetimePosition start) const;
336 319
337 // Returns the first use position requiring stack slot, or nullptr. 320 // Returns the first use position requiring stack slot, or nullptr.
338 UsePosition* NextSlotPosition(LifetimePosition start) const; 321 UsePosition* NextSlotPosition(LifetimePosition start) const;
339 322
340 // Returns use position for which register is beneficial in this live 323 // Returns use position for which register is beneficial in this live
341 // range and which follows both start and last processed use position 324 // range and which follows both start and last processed use position
342 UsePosition* NextUsePositionRegisterIsBeneficial( 325 UsePosition* NextUsePositionRegisterIsBeneficial(
343 LifetimePosition start) const; 326 LifetimePosition start) const;
344 327
345 // Returns use position for which register is beneficial in this live 328 // Returns use position for which register is beneficial in this live
346 // range and which precedes start. 329 // range and which precedes start.
347 UsePosition* PreviousUsePositionRegisterIsBeneficial( 330 UsePosition* PreviousUsePositionRegisterIsBeneficial(
348 LifetimePosition start) const; 331 LifetimePosition start) const;
349 332
350 // Can this live range be spilled at this position. 333 // Can this live range be spilled at this position.
351 bool CanBeSpilled(LifetimePosition pos) const; 334 bool CanBeSpilled(LifetimePosition pos) const;
352 335
353 // Split this live range at the given position which must follow the start of 336 // Splitting primitive used by both splitting and splintering members.
354 // the range. 337 // Performs the split, but does not link the resulting ranges.
338 // The given position must follow the start of the range.
355 // All uses following the given position will be moved from this 339 // All uses following the given position will be moved from this
356 // live range to the result live range. 340 // live range to the result live range.
357 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone); 341 // The current range will terminate at position, while result will start from
358 void Splinter(LifetimePosition start, LifetimePosition end, LiveRange* result, 342 // position.
359 Zone* zone); 343 void DetachAt(LifetimePosition position, LiveRange* result, Zone* zone);
360 void Merge(LiveRange* other, RegisterAllocationData* data); 344
345 // Detaches at position, and then links the resulting ranges. Returns the
346 // child, which starts at position.
347 LiveRange* SplitAt(LifetimePosition position, Zone* zone);
361 348
362 // Returns nullptr when no register is hinted, otherwise sets register_index. 349 // Returns nullptr when no register is hinted, otherwise sets register_index.
363 UsePosition* FirstHintPosition(int* register_index) const; 350 UsePosition* FirstHintPosition(int* register_index) const;
364 UsePosition* FirstHintPosition() const { 351 UsePosition* FirstHintPosition() const {
365 int register_index; 352 int register_index;
366 return FirstHintPosition(&register_index); 353 return FirstHintPosition(&register_index);
367 } 354 }
368 355
369 UsePosition* current_hint_position() const { 356 UsePosition* current_hint_position() const {
370 DCHECK(current_hint_position_ == FirstHintPosition()); 357 DCHECK(current_hint_position_ == FirstHintPosition());
371 return current_hint_position_; 358 return current_hint_position_;
372 } 359 }
373 360
374 LifetimePosition Start() const { 361 LifetimePosition Start() const {
375 DCHECK(!IsEmpty()); 362 DCHECK(!IsEmpty());
376 return first_interval()->start(); 363 return first_interval()->start();
377 } 364 }
378 365
379 LifetimePosition End() const { 366 LifetimePosition End() const {
380 DCHECK(!IsEmpty()); 367 DCHECK(!IsEmpty());
381 return last_interval_->end(); 368 return last_interval_->end();
382 } 369 }
383 370
371 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
372 bool CanCover(LifetimePosition position) const;
373 bool Covers(LifetimePosition position) const;
374 LifetimePosition FirstIntersection(LiveRange* other) const;
375
376 void Verify() const;
377
378 void ConvertUsesToOperand(const InstructionOperand& op,
379 const InstructionOperand& spill_op);
380 void SetUseHints(int register_index);
381 void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
382
383 // Used solely by the Greedy Allocator:
384 unsigned GetSize();
385 float weight() const { return weight_; }
386 void set_weight(float weight) { weight_ = weight; }
387
388 static const int kInvalidSize = -1;
389 static const float kInvalidWeight;
390 static const float kMaxWeight;
391
392 private:
393 friend class TopLevelLiveRange;
394 explicit LiveRange(int relative_id, MachineType machine_type,
395 TopLevelLiveRange* top_level);
396
397 void AppendAsChild(TopLevelLiveRange* other);
398 void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
399
400 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
401
402 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
403 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
404 LifetimePosition but_not_past) const;
405
406 typedef BitField<bool, 0, 1> SpilledField;
407 typedef BitField<int32_t, 6, 6> AssignedRegisterField;
408 typedef BitField<MachineType, 12, 15> MachineTypeField;
409
410 int relative_id_;
411 uint32_t bits_;
412 UseInterval* last_interval_;
413 UseInterval* first_interval_;
414 UsePosition* first_pos_;
415 TopLevelLiveRange* top_level_;
416 LiveRange* next_;
417 // This is used as a cache, it doesn't affect correctness.
418 mutable UseInterval* current_interval_;
419 // This is used as a cache, it doesn't affect correctness.
420 mutable UsePosition* last_processed_use_;
421 // This is used as a cache, it's invalid outside of BuildLiveRanges.
422 mutable UsePosition* current_hint_position_;
423
424 // greedy: the number of LifetimePositions covered by this range. Used to
425 // prioritize selecting live ranges for register assignment, as well as
426 // in weight calculations.
427 int size_;
428
429 // greedy: a metric for resolving conflicts between ranges with an assigned
430 // register and ranges that intersect them and need a register.
431 float weight_;
432
433 DISALLOW_COPY_AND_ASSIGN(LiveRange);
434 };
435
436
437 class TopLevelLiveRange final : public LiveRange {
438 public:
439 explicit TopLevelLiveRange(int vreg, MachineType machine_type);
440 int spill_start_index() const { return spill_start_index_; }
441
442 bool IsFixed() const { return vreg_ < 0; }
443
444 bool is_phi() const { return IsPhiField::decode(bits_); }
445 void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
446
447 bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
448 void set_is_non_loop_phi(bool value) {
449 bits_ = IsNonLoopPhiField::update(bits_, value);
450 }
451
452 bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
453 void set_has_slot_use(bool value) {
454 bits_ = HasSlotUseField::update(bits_, value);
455 }
456
457 // Add a new interval or a new use position to this live range.
458 void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
459 void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
460 void AddUsePosition(UsePosition* pos);
461
462 // Shorten the most recently added interval by setting a new start.
463 void ShortenTo(LifetimePosition start);
464
465 // Detaches between start and end, and attributes the resulting range to
466 // result.
467 // The current range is pointed to as "splintered_from". No parent/child
468 // relationship is established between this and result.
469 void Splinter(LifetimePosition start, LifetimePosition end,
470 TopLevelLiveRange* result, Zone* zone);
471
472 // Assuming other was splintered from this range, embeds other and its
473 // children as part of the children sequence of this range.
474 void Merge(TopLevelLiveRange* other, RegisterAllocationData* data);
475
476 // Spill range management.
477 void SetSpillRange(SpillRange* spill_range);
384 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange }; 478 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
479 void set_spill_type(SpillType value) {
480 bits_ = SpillTypeField::update(bits_, value);
481 }
385 SpillType spill_type() const { return SpillTypeField::decode(bits_); } 482 SpillType spill_type() const { return SpillTypeField::decode(bits_); }
386 InstructionOperand* GetSpillOperand() const { 483 InstructionOperand* GetSpillOperand() const {
387 DCHECK(spill_type() == SpillType::kSpillOperand); 484 DCHECK(spill_type() == SpillType::kSpillOperand);
388 return spill_operand_; 485 return spill_operand_;
389 } 486 }
390 487
391 SpillRange* GetAllocatedSpillRange() const { 488 SpillRange* GetAllocatedSpillRange() const {
392 DCHECK(spill_type() != SpillType::kSpillOperand); 489 DCHECK(spill_type() != SpillType::kSpillOperand);
393 return spill_range_; 490 return spill_range_;
394 } 491 }
395 492
396 SpillRange* GetSpillRange() const { 493 SpillRange* GetSpillRange() const {
397 DCHECK(spill_type() == SpillType::kSpillRange); 494 DCHECK(spill_type() == SpillType::kSpillRange);
398 return spill_range_; 495 return spill_range_;
399 } 496 }
400 bool HasNoSpillType() const { 497 bool HasNoSpillType() const {
401 return spill_type() == SpillType::kNoSpillType; 498 return spill_type() == SpillType::kNoSpillType;
402 } 499 }
403 bool HasSpillOperand() const { 500 bool HasSpillOperand() const {
404 return spill_type() == SpillType::kSpillOperand; 501 return spill_type() == SpillType::kSpillOperand;
405 } 502 }
406 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; } 503 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
407 bool MayRequireSpillRange() const {
408 DCHECK(!IsChild() && !IsSplinter());
409 return !HasSpillOperand() && spill_range_ == nullptr;
410 }
411 504
412 AllocatedOperand GetSpillRangeOperand() const; 505 AllocatedOperand GetSpillRangeOperand() const;
413 506
414 void SpillAtDefinition(Zone* zone, int gap_index, 507 void SpillAtDefinition(Zone* zone, int gap_index,
415 InstructionOperand* operand); 508 InstructionOperand* operand);
416 void SetSpillOperand(InstructionOperand* operand); 509 void SetSpillOperand(InstructionOperand* operand);
417 void SetSpillRange(SpillRange* spill_range); 510 void SetSpillStartIndex(int start) {
511 spill_start_index_ = Min(start, spill_start_index_);
512 }
513
514 void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
418 void CommitSpillsAtDefinition(InstructionSequence* sequence, 515 void CommitSpillsAtDefinition(InstructionSequence* sequence,
419 const InstructionOperand& operand, 516 const InstructionOperand& operand,
420 bool might_be_duplicated); 517 bool might_be_duplicated);
421 // This must be applied on top level ranges. 518
422 // If all the children of this range are spilled in deferred blocks, and if 519 // If all the children of this range are spilled in deferred blocks, and if
423 // for any non-spilled child with a use position requiring a slot, that range 520 // for any non-spilled child with a use position requiring a slot, that range
424 // is contained in a deferred block, mark the range as 521 // is contained in a deferred block, mark the range as
425 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition, 522 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
426 // and instead let the LiveRangeConnector perform the spills within the 523 // and instead let the LiveRangeConnector perform the spills within the
427 // deferred blocks. If so, we insert here spills for non-spilled ranges 524 // deferred blocks. If so, we insert here spills for non-spilled ranges
428 // with slot use positions. 525 // with slot use positions.
429 bool TryCommitSpillInDeferredBlock(InstructionSequence* code, 526 bool TryCommitSpillInDeferredBlock(InstructionSequence* code,
430 const InstructionOperand& spill_operand); 527 const InstructionOperand& spill_operand);
431 528
432 void SetSpillStartIndex(int start) { 529 TopLevelLiveRange* splintered_from() const { return splintered_from_; }
433 spill_start_index_ = Min(start, spill_start_index_); 530 bool IsSplinter() const { return splintered_from_ != nullptr; }
531 bool MayRequireSpillRange() const {
532 DCHECK(!IsSplinter());
533 return !HasSpillOperand() && spill_range_ == nullptr;
434 } 534 }
535 void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
536 int vreg() const { return vreg_; }
435 537
436 bool ShouldBeAllocatedBefore(const LiveRange* other) const; 538 int GetNextChildId() { return ++last_child_id_; }
437 bool CanCover(LifetimePosition position) const; 539 bool IsSpilledOnlyInDeferredBlocks() const {
438 bool Covers(LifetimePosition position) const; 540 return spilled_in_deferred_blocks_;
439 LifetimePosition FirstIntersection(LiveRange* other) const; 541 }
440
441 // Add a new interval or a new use position to this live range.
442 void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
443 void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
444 void AddUsePosition(UsePosition* pos);
445
446 // Shorten the most recently added interval by setting a new start.
447 void ShortenTo(LifetimePosition start);
448
449 void Verify() const;
450
451 void ConvertUsesToOperand(const InstructionOperand& op,
452 const InstructionOperand& spill_op);
453 void SetUseHints(int register_index);
454 void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
455 542
456 struct SpillAtDefinitionList; 543 struct SpillAtDefinitionList;
457 544
458 SpillAtDefinitionList* spills_at_definition() const { 545 SpillAtDefinitionList* spills_at_definition() const {
459 return spills_at_definition_; 546 return spills_at_definition_;
460 } 547 }
461 548
462 // Used solely by the Greedy Allocator:
463 unsigned GetSize();
464 float weight() const { return weight_; }
465 void set_weight(float weight) { weight_ = weight; }
466
467 bool IsSpilledOnlyInDeferredBlocks() const {
468 return spilled_in_deferred_block_;
469 }
470
471 static const int kInvalidSize = -1;
472 static const float kInvalidWeight;
473 static const float kMaxWeight;
474
475 LiveRange* splintered_from() const {
476 DCHECK(!IsChild());
477 return splintered_from_;
478 }
479 bool IsSplinter() const {
480 DCHECK(!IsChild());
481 return splintered_from_ != nullptr;
482 }
483
484 void set_spill_type(SpillType value) {
485 bits_ = SpillTypeField::update(bits_, value);
486 }
487
488 private: 549 private:
489 void AppendChild(LiveRange* other);
490 void UpdateParentForAllChildren(LiveRange* new_parent);
491 void UpdateSpillRangePostMerge(LiveRange* merged);
492
493 void SetSplinteredFrom(LiveRange* splinter_parent);
494
495
496 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
497
498 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
499 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
500 LifetimePosition but_not_past) const;
501
502 LiveRange* GetLastChild();
503
504 typedef BitField<bool, 0, 1> SpilledField;
505 typedef BitField<bool, 1, 1> HasSlotUseField; 550 typedef BitField<bool, 1, 1> HasSlotUseField;
506 typedef BitField<bool, 2, 1> IsPhiField; 551 typedef BitField<bool, 2, 1> IsPhiField;
507 typedef BitField<bool, 3, 1> IsNonLoopPhiField; 552 typedef BitField<bool, 3, 1> IsNonLoopPhiField;
508 typedef BitField<SpillType, 4, 2> SpillTypeField; 553 typedef BitField<SpillType, 4, 2> SpillTypeField;
509 typedef BitField<int32_t, 6, 6> AssignedRegisterField;
510 typedef BitField<MachineType, 12, 15> MachineTypeField;
511 554
512 int id_; 555 LiveRange* GetLastChild();
513 int spill_start_index_; 556
514 uint32_t bits_; 557 int vreg_;
515 UseInterval* last_interval_; 558 int last_child_id_;
516 UseInterval* first_interval_; 559 TopLevelLiveRange* splintered_from_;
517 UsePosition* first_pos_;
518 LiveRange* parent_;
519 LiveRange* next_;
520 LiveRange* splintered_from_;
521 union { 560 union {
522 // Correct value determined by spill_type() 561 // Correct value determined by spill_type()
523 InstructionOperand* spill_operand_; 562 InstructionOperand* spill_operand_;
524 SpillRange* spill_range_; 563 SpillRange* spill_range_;
525 }; 564 };
526 SpillAtDefinitionList* spills_at_definition_; 565 SpillAtDefinitionList* spills_at_definition_;
527 // This is used as a cache, it doesn't affect correctness.
528 mutable UseInterval* current_interval_;
529 // This is used as a cache, it doesn't affect correctness.
530 mutable UsePosition* last_processed_use_;
531 // This is used as a cache, it's invalid outside of BuildLiveRanges.
532 mutable UsePosition* current_hint_position_;
533
534 // greedy: the number of LifetimePositions covered by this range. Used to
535 // prioritize selecting live ranges for register assignment, as well as
536 // in weight calculations.
537 int size_;
538
539 // greedy: a metric for resolving conflicts between ranges with an assigned
540 // register and ranges that intersect them and need a register.
541 float weight_;
542
543 // TODO(mtrofin): generalize spilling after definition, currently specialized 566 // TODO(mtrofin): generalize spilling after definition, currently specialized
544 // just for spill in a single deferred block. 567 // just for spill in a single deferred block.
545 bool spilled_in_deferred_block_; 568 bool spilled_in_deferred_blocks_;
546 DISALLOW_COPY_AND_ASSIGN(LiveRange); 569 int spill_start_index_;
570
571 DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
547 }; 572 };
548 573
549 574
550 struct PrintableLiveRange { 575 struct PrintableLiveRange {
551 const RegisterConfiguration* register_configuration_; 576 const RegisterConfiguration* register_configuration_;
552 const LiveRange* range_; 577 const LiveRange* range_;
553 }; 578 };
554 579
555 580
556 std::ostream& operator<<(std::ostream& os, 581 std::ostream& operator<<(std::ostream& os,
557 const PrintableLiveRange& printable_range); 582 const PrintableLiveRange& printable_range);
558 583
559 584
560 class SpillRange final : public ZoneObject { 585 class SpillRange final : public ZoneObject {
561 public: 586 public:
562 static const int kUnassignedSlot = -1; 587 static const int kUnassignedSlot = -1;
563 SpillRange(LiveRange* range, Zone* zone); 588 SpillRange(TopLevelLiveRange* range, Zone* zone);
564 589
565 UseInterval* interval() const { return use_interval_; } 590 UseInterval* interval() const { return use_interval_; }
566 // Currently, only 4 or 8 byte slots are supported. 591 // Currently, only 4 or 8 byte slots are supported.
567 int ByteWidth() const; 592 int ByteWidth() const;
568 bool IsEmpty() const { return live_ranges_.empty(); } 593 bool IsEmpty() const { return live_ranges_.empty(); }
569 bool TryMerge(SpillRange* other); 594 bool TryMerge(SpillRange* other);
570 595
571 void set_assigned_slot(int index) { 596 void set_assigned_slot(int index) {
572 DCHECK_EQ(kUnassignedSlot, assigned_slot_); 597 DCHECK_EQ(kUnassignedSlot, assigned_slot_);
573 assigned_slot_ = index; 598 assigned_slot_ = index;
574 } 599 }
575 int assigned_slot() { 600 int assigned_slot() {
576 DCHECK_NE(kUnassignedSlot, assigned_slot_); 601 DCHECK_NE(kUnassignedSlot, assigned_slot_);
577 return assigned_slot_; 602 return assigned_slot_;
578 } 603 }
579 const ZoneVector<LiveRange*>& live_ranges() const { return live_ranges_; } 604 const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
580 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } 605 return live_ranges_;
606 }
607 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
581 int byte_width() const { return byte_width_; } 608 int byte_width() const { return byte_width_; }
582 RegisterKind kind() const { return kind_; } 609 RegisterKind kind() const { return kind_; }
583 610
584 private: 611 private:
585 LifetimePosition End() const { return end_position_; } 612 LifetimePosition End() const { return end_position_; }
586 bool IsIntersectingWith(SpillRange* other) const; 613 bool IsIntersectingWith(SpillRange* other) const;
587 // Merge intervals, making sure the use intervals are sorted 614 // Merge intervals, making sure the use intervals are sorted
588 void MergeDisjointIntervals(UseInterval* other); 615 void MergeDisjointIntervals(UseInterval* other);
589 616
590 ZoneVector<LiveRange*> live_ranges_; 617 ZoneVector<TopLevelLiveRange*> live_ranges_;
591 UseInterval* use_interval_; 618 UseInterval* use_interval_;
592 LifetimePosition end_position_; 619 LifetimePosition end_position_;
593 int assigned_slot_; 620 int assigned_slot_;
594 int byte_width_; 621 int byte_width_;
595 RegisterKind kind_; 622 RegisterKind kind_;
596 623
597 DISALLOW_COPY_AND_ASSIGN(SpillRange); 624 DISALLOW_COPY_AND_ASSIGN(SpillRange);
598 }; 625 };
599 626
600 627
(...skipping 29 matching lines...) Expand all
630 ReferenceMap* map; 657 ReferenceMap* map;
631 InstructionOperand* operand; 658 InstructionOperand* operand;
632 }; 659 };
633 typedef ZoneVector<DelayedReference> DelayedReferences; 660 typedef ZoneVector<DelayedReference> DelayedReferences;
634 661
635 RegisterAllocationData(const RegisterConfiguration* config, 662 RegisterAllocationData(const RegisterConfiguration* config,
636 Zone* allocation_zone, Frame* frame, 663 Zone* allocation_zone, Frame* frame,
637 InstructionSequence* code, 664 InstructionSequence* code,
638 const char* debug_name = nullptr); 665 const char* debug_name = nullptr);
639 666
640 const ZoneVector<LiveRange*>& live_ranges() const { return live_ranges_; } 667 const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
641 ZoneVector<LiveRange*>& live_ranges() { return live_ranges_; } 668 return live_ranges_;
642 const ZoneVector<LiveRange*>& fixed_live_ranges() const { 669 }
670 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
671 const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
643 return fixed_live_ranges_; 672 return fixed_live_ranges_;
644 } 673 }
645 ZoneVector<LiveRange*>& fixed_live_ranges() { return fixed_live_ranges_; } 674 ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
646 ZoneVector<LiveRange*>& fixed_double_live_ranges() { 675 return fixed_live_ranges_;
676 }
677 ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
647 return fixed_double_live_ranges_; 678 return fixed_double_live_ranges_;
648 } 679 }
649 const ZoneVector<LiveRange*>& fixed_double_live_ranges() const { 680 const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
650 return fixed_double_live_ranges_; 681 return fixed_double_live_ranges_;
651 } 682 }
652 ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; } 683 ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
653 ZoneSet<SpillRange*>& spill_ranges() { return spill_ranges_; } 684 ZoneSet<SpillRange*>& spill_ranges() { return spill_ranges_; }
654 DelayedReferences& delayed_references() { return delayed_references_; } 685 DelayedReferences& delayed_references() { return delayed_references_; }
655 InstructionSequence* code() const { return code_; } 686 InstructionSequence* code() const { return code_; }
656 // This zone is for datastructures only needed during register allocation 687 // This zone is for datastructures only needed during register allocation
657 // phases. 688 // phases.
658 Zone* allocation_zone() const { return allocation_zone_; } 689 Zone* allocation_zone() const { return allocation_zone_; }
659 // This zone is for InstructionOperands and moves that live beyond register 690 // This zone is for InstructionOperands and moves that live beyond register
660 // allocation. 691 // allocation.
661 Zone* code_zone() const { return code()->zone(); } 692 Zone* code_zone() const { return code()->zone(); }
662 Frame* frame() const { return frame_; } 693 Frame* frame() const { return frame_; }
663 const char* debug_name() const { return debug_name_; } 694 const char* debug_name() const { return debug_name_; }
664 const RegisterConfiguration* config() const { return config_; } 695 const RegisterConfiguration* config() const { return config_; }
665 696
666 MachineType MachineTypeFor(int virtual_register); 697 MachineType MachineTypeFor(int virtual_register);
667 698
668 LiveRange* LiveRangeFor(int index); 699 TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
669 // Creates a new live range. 700 // Creates a new live range.
670 LiveRange* NewLiveRange(int index, MachineType machine_type); 701 TopLevelLiveRange* NewLiveRange(int index, MachineType machine_type);
671 LiveRange* NextLiveRange(MachineType machine_type); 702 TopLevelLiveRange* NextLiveRange(MachineType machine_type);
672 LiveRange* NewChildRangeFor(LiveRange* range);
673 703
674 SpillRange* AssignSpillRangeToLiveRange(LiveRange* range); 704 SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
675 SpillRange* CreateSpillRangeForLiveRange(LiveRange* range); 705 SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
676 706
677 MoveOperands* AddGapMove(int index, Instruction::GapPosition position, 707 MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
678 const InstructionOperand& from, 708 const InstructionOperand& from,
679 const InstructionOperand& to); 709 const InstructionOperand& to);
680 710
681 bool IsReference(int virtual_register) const { 711 bool IsReference(TopLevelLiveRange* top_range) const {
682 return code()->IsReference(virtual_register); 712 return code()->IsReference(top_range->vreg());
683 } 713 }
684 714
685 bool ExistsUseWithoutDefinition(); 715 bool ExistsUseWithoutDefinition();
686 716
687 void MarkAllocated(RegisterKind kind, int index); 717 void MarkAllocated(RegisterKind kind, int index);
688 718
689 PhiMapValue* InitializePhiMap(const InstructionBlock* block, 719 PhiMapValue* InitializePhiMap(const InstructionBlock* block,
690 PhiInstruction* phi); 720 PhiInstruction* phi);
721 PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
691 PhiMapValue* GetPhiMapValueFor(int virtual_register); 722 PhiMapValue* GetPhiMapValueFor(int virtual_register);
692 bool IsBlockBoundary(LifetimePosition pos) const; 723 bool IsBlockBoundary(LifetimePosition pos) const;
693 724
694 void Print(const InstructionSequence* instructionSequence); 725 void Print(const InstructionSequence* instructionSequence);
695 void Print(const Instruction* instruction); 726 void Print(const Instruction* instruction);
696 void Print(const LiveRange* range, bool with_children = false); 727 void Print(const LiveRange* range, bool with_children = false);
697 void Print(const InstructionOperand& op); 728 void Print(const InstructionOperand& op);
698 void Print(const MoveOperands* move); 729 void Print(const MoveOperands* move);
699 void Print(const SpillRange* spill_range); 730 void Print(const SpillRange* spill_range);
700 731
701 private: 732 private:
733 int GetNextLiveRangeId();
734
702 Zone* const allocation_zone_; 735 Zone* const allocation_zone_;
703 Frame* const frame_; 736 Frame* const frame_;
704 InstructionSequence* const code_; 737 InstructionSequence* const code_;
705 const char* const debug_name_; 738 const char* const debug_name_;
706 const RegisterConfiguration* const config_; 739 const RegisterConfiguration* const config_;
707 PhiMap phi_map_; 740 PhiMap phi_map_;
708 ZoneVector<BitVector*> live_in_sets_; 741 ZoneVector<BitVector*> live_in_sets_;
709 ZoneVector<LiveRange*> live_ranges_; 742 ZoneVector<TopLevelLiveRange*> live_ranges_;
710 ZoneVector<LiveRange*> fixed_live_ranges_; 743 ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
711 ZoneVector<LiveRange*> fixed_double_live_ranges_; 744 ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
712 ZoneSet<SpillRange*> spill_ranges_; 745 ZoneSet<SpillRange*> spill_ranges_;
713 DelayedReferences delayed_references_; 746 DelayedReferences delayed_references_;
714 BitVector* assigned_registers_; 747 BitVector* assigned_registers_;
715 BitVector* assigned_double_registers_; 748 BitVector* assigned_double_registers_;
716 int virtual_register_count_; 749 int virtual_register_count_;
717 750
718 DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData); 751 DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
719 }; 752 };
720 753
721 754
722 class ConstraintBuilder final : public ZoneObject { 755 class ConstraintBuilder final : public ZoneObject {
723 public: 756 public:
724 explicit ConstraintBuilder(RegisterAllocationData* data); 757 explicit ConstraintBuilder(RegisterAllocationData* data);
725 758
726 // Phase 1 : insert moves to account for fixed register operands. 759 // Phase 1 : insert moves to account for fixed register operands.
727 void MeetRegisterConstraints(); 760 void MeetRegisterConstraints();
728 761
729 // Phase 2: deconstruct SSA by inserting moves in successors and the headers 762 // Phase 2: deconstruct SSA by inserting moves in successors and the headers
730 // of blocks containing phis. 763 // of blocks containing phis.
731 void ResolvePhis(); 764 void ResolvePhis();
732 765
733 private: 766 private:
734 RegisterAllocationData* data() const { return data_; } 767 RegisterAllocationData* data() const { return data_; }
735 InstructionSequence* code() const { return data()->code(); } 768 InstructionSequence* code() const { return data()->code(); }
736 Zone* allocation_zone() const { return data()->allocation_zone(); } 769 Zone* allocation_zone() const { return data()->allocation_zone(); }
737 770
738 Instruction* InstructionAt(int index) { return code()->InstructionAt(index); }
739 bool IsReference(int virtual_register) const {
740 return data()->IsReference(virtual_register);
741 }
742 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); }
743
744 InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos, 771 InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
745 bool is_tagged); 772 bool is_tagged);
746 void MeetRegisterConstraints(const InstructionBlock* block); 773 void MeetRegisterConstraints(const InstructionBlock* block);
747 void MeetConstraintsBefore(int index); 774 void MeetConstraintsBefore(int index);
748 void MeetConstraintsAfter(int index); 775 void MeetConstraintsAfter(int index);
749 void MeetRegisterConstraintsForLastInstructionInBlock( 776 void MeetRegisterConstraintsForLastInstructionInBlock(
750 const InstructionBlock* block); 777 const InstructionBlock* block);
751 void ResolvePhis(const InstructionBlock* block); 778 void ResolvePhis(const InstructionBlock* block);
752 779
753 RegisterAllocationData* const data_; 780 RegisterAllocationData* const data_;
(...skipping 14 matching lines...) Expand all
768 private: 795 private:
769 RegisterAllocationData* data() const { return data_; } 796 RegisterAllocationData* data() const { return data_; }
770 InstructionSequence* code() const { return data()->code(); } 797 InstructionSequence* code() const { return data()->code(); }
771 Zone* allocation_zone() const { return data()->allocation_zone(); } 798 Zone* allocation_zone() const { return data()->allocation_zone(); }
772 Zone* code_zone() const { return code()->zone(); } 799 Zone* code_zone() const { return code()->zone(); }
773 const RegisterConfiguration* config() const { return data()->config(); } 800 const RegisterConfiguration* config() const { return data()->config(); }
774 ZoneVector<BitVector*>& live_in_sets() const { 801 ZoneVector<BitVector*>& live_in_sets() const {
775 return data()->live_in_sets(); 802 return data()->live_in_sets();
776 } 803 }
777 804
778 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); }
779
780 void Verify() const; 805 void Verify() const;
781 806
782 // Liveness analysis support. 807 // Liveness analysis support.
783 void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out); 808 void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
784 void ProcessInstructions(const InstructionBlock* block, BitVector* live); 809 void ProcessInstructions(const InstructionBlock* block, BitVector* live);
785 void ProcessPhis(const InstructionBlock* block, BitVector* live); 810 void ProcessPhis(const InstructionBlock* block, BitVector* live);
786 void ProcessLoopHeader(const InstructionBlock* block, BitVector* live); 811 void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
787 812
788 static int FixedLiveRangeID(int index) { return -index - 1; } 813 static int FixedLiveRangeID(int index) { return -index - 1; }
789 int FixedDoubleLiveRangeID(int index); 814 int FixedDoubleLiveRangeID(int index);
790 LiveRange* FixedLiveRangeFor(int index); 815 TopLevelLiveRange* FixedLiveRangeFor(int index);
791 LiveRange* FixedDoubleLiveRangeFor(int index); 816 TopLevelLiveRange* FixedDoubleLiveRangeFor(int index);
792 817
793 void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos); 818 void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
794 void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos); 819 void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
795 820
796 UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand, 821 UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
797 void* hint, UsePositionHintType hint_type); 822 void* hint, UsePositionHintType hint_type);
798 UsePosition* NewUsePosition(LifetimePosition pos) { 823 UsePosition* NewUsePosition(LifetimePosition pos) {
799 return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone); 824 return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
800 } 825 }
801 LiveRange* LiveRangeFor(InstructionOperand* operand); 826 TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
802 // Helper methods for building intervals. 827 // Helper methods for building intervals.
803 UsePosition* Define(LifetimePosition position, InstructionOperand* operand, 828 UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
804 void* hint, UsePositionHintType hint_type); 829 void* hint, UsePositionHintType hint_type);
805 void Define(LifetimePosition position, InstructionOperand* operand) { 830 void Define(LifetimePosition position, InstructionOperand* operand) {
806 Define(position, operand, nullptr, UsePositionHintType::kNone); 831 Define(position, operand, nullptr, UsePositionHintType::kNone);
807 } 832 }
808 UsePosition* Use(LifetimePosition block_start, LifetimePosition position, 833 UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
809 InstructionOperand* operand, void* hint, 834 InstructionOperand* operand, void* hint,
810 UsePositionHintType hint_type); 835 UsePositionHintType hint_type);
811 void Use(LifetimePosition block_start, LifetimePosition position, 836 void Use(LifetimePosition block_start, LifetimePosition position,
(...skipping 13 matching lines...) Expand all
825 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind); 850 explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
826 851
827 protected: 852 protected:
828 RegisterAllocationData* data() const { return data_; } 853 RegisterAllocationData* data() const { return data_; }
829 InstructionSequence* code() const { return data()->code(); } 854 InstructionSequence* code() const { return data()->code(); }
830 RegisterKind mode() const { return mode_; } 855 RegisterKind mode() const { return mode_; }
831 int num_registers() const { return num_registers_; } 856 int num_registers() const { return num_registers_; }
832 857
833 Zone* allocation_zone() const { return data()->allocation_zone(); } 858 Zone* allocation_zone() const { return data()->allocation_zone(); }
834 859
835 LiveRange* LiveRangeFor(int index) { return data()->LiveRangeFor(index); }
836
837 // Split the given range at the given position. 860 // Split the given range at the given position.
838 // If range starts at or after the given position then the 861 // If range starts at or after the given position then the
839 // original range is returned. 862 // original range is returned.
840 // Otherwise returns the live range that starts at pos and contains 863 // Otherwise returns the live range that starts at pos and contains
841 // all uses from the original range that follow pos. Uses at pos will 864 // all uses from the original range that follow pos. Uses at pos will
842 // still be owned by the original range after splitting. 865 // still be owned by the original range after splitting.
843 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos); 866 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
844 867
845 // Split the given range in a position from the interval [start, end]. 868 // Split the given range in a position from the interval [start, end].
846 LiveRange* SplitBetween(LiveRange* range, LifetimePosition start, 869 LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
847 LifetimePosition end); 870 LifetimePosition end);
848 871
849 // Find a lifetime position in the interval [start, end] which 872 // Find a lifetime position in the interval [start, end] which
850 // is optimal for splitting: it is either header of the outermost 873 // is optimal for splitting: it is either header of the outermost
851 // loop covered by this interval or the latest possible position. 874 // loop covered by this interval or the latest possible position.
852 LifetimePosition FindOptimalSplitPos(LifetimePosition start, 875 LifetimePosition FindOptimalSplitPos(LifetimePosition start,
853 LifetimePosition end); 876 LifetimePosition end);
854 877
855 void Spill(LiveRange* range); 878 void Spill(LiveRange* range);
856 879
857 // If we are trying to spill a range inside the loop try to 880 // If we are trying to spill a range inside the loop try to
858 // hoist spill position out to the point just before the loop. 881 // hoist spill position out to the point just before the loop.
859 LifetimePosition FindOptimalSpillingPos(LiveRange* range, 882 LifetimePosition FindOptimalSpillingPos(LiveRange* range,
860 LifetimePosition pos); 883 LifetimePosition pos);
861 884
862 const ZoneVector<LiveRange*>& GetFixedRegisters() const; 885 const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
863 const char* RegisterName(int allocation_index) const; 886 const char* RegisterName(int allocation_index) const;
864 887
865 private: 888 private:
866 RegisterAllocationData* const data_; 889 RegisterAllocationData* const data_;
867 const RegisterKind mode_; 890 const RegisterKind mode_;
868 const int num_registers_; 891 const int num_registers_;
869 892
870 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator); 893 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
871 }; 894 };
872 895
(...skipping 23 matching lines...) Expand all
896 void AddToUnhandledSorted(LiveRange* range); 919 void AddToUnhandledSorted(LiveRange* range);
897 void AddToUnhandledUnsorted(LiveRange* range); 920 void AddToUnhandledUnsorted(LiveRange* range);
898 void SortUnhandled(); 921 void SortUnhandled();
899 bool UnhandledIsSorted(); 922 bool UnhandledIsSorted();
900 void ActiveToHandled(LiveRange* range); 923 void ActiveToHandled(LiveRange* range);
901 void ActiveToInactive(LiveRange* range); 924 void ActiveToInactive(LiveRange* range);
902 void InactiveToHandled(LiveRange* range); 925 void InactiveToHandled(LiveRange* range);
903 void InactiveToActive(LiveRange* range); 926 void InactiveToActive(LiveRange* range);
904 927
905 // Helper methods for allocating registers. 928 // Helper methods for allocating registers.
906 bool TryReuseSpillForPhi(LiveRange* range); 929 bool TryReuseSpillForPhi(TopLevelLiveRange* range);
907 bool TryAllocateFreeReg(LiveRange* range); 930 bool TryAllocateFreeReg(LiveRange* range);
908 void AllocateBlockedReg(LiveRange* range); 931 void AllocateBlockedReg(LiveRange* range);
909 932
910 // Spill the given life range after position pos. 933 // Spill the given life range after position pos.
911 void SpillAfter(LiveRange* range, LifetimePosition pos); 934 void SpillAfter(LiveRange* range, LifetimePosition pos);
912 935
913 // Spill the given life range after position [start] and up to position [end]. 936 // Spill the given life range after position [start] and up to position [end].
914 void SpillBetween(LiveRange* range, LifetimePosition start, 937 void SpillBetween(LiveRange* range, LifetimePosition start,
915 LifetimePosition end); 938 LifetimePosition end);
916 939
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
1020 RegisterAllocationData* const data_; 1043 RegisterAllocationData* const data_;
1021 1044
1022 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); 1045 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
1023 }; 1046 };
1024 1047
1025 } // namespace compiler 1048 } // namespace compiler
1026 } // namespace internal 1049 } // namespace internal
1027 } // namespace v8 1050 } // namespace v8
1028 1051
1029 #endif // V8_REGISTER_ALLOCATOR_H_ 1052 #endif // V8_REGISTER_ALLOCATOR_H_
OLDNEW
« no previous file with comments | « src/compiler/live-range-separator.cc ('k') | src/compiler/register-allocator.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698