1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_COMPILER_REGISTER_ALLOCATOR_H_
6 #define V8_COMPILER_REGISTER_ALLOCATOR_H_
7
8 #include "src/base/bits.h"
9 #include "src/base/compiler-specific.h"
10 #include "src/compiler/instruction.h"
11 #include "src/globals.h"
12 #include "src/ostreams.h"
13 #include "src/register-configuration.h"
14 #include "src/zone/zone-containers.h"
15
16 namespace v8 {
17 namespace internal {
18 namespace compiler {
19
20 enum RegisterKind { GENERAL_REGISTERS, FP_REGISTERS };
21
22 // This class represents a single point of a InstructionOperand's lifetime. For
23 // each instruction there are four lifetime positions:
24 //
25 // [[START, END], [START, END]]
26 //
27 // Where the first half position corresponds to
28 //
29 // [GapPosition::START, GapPosition::END]
30 //
31 // and the second half position corresponds to
32 //
33 // [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
34 //
35 class LifetimePosition final {
36 public:
37 // Return the lifetime position that corresponds to the beginning of
38 // the gap with the given index.
GapFromInstructionIndex(int index)39 static LifetimePosition GapFromInstructionIndex(int index) {
40 return LifetimePosition(index * kStep);
41 }
42 // Return the lifetime position that corresponds to the beginning of
43 // the instruction with the given index.
InstructionFromInstructionIndex(int index)44 static LifetimePosition InstructionFromInstructionIndex(int index) {
45 return LifetimePosition(index * kStep + kHalfStep);
46 }
47
ExistsGapPositionBetween(LifetimePosition pos1,LifetimePosition pos2)48 static bool ExistsGapPositionBetween(LifetimePosition pos1,
49 LifetimePosition pos2) {
50 if (pos1 > pos2) std::swap(pos1, pos2);
51 LifetimePosition next(pos1.value_ + 1);
52 if (next.IsGapPosition()) return next < pos2;
53 return next.NextFullStart() < pos2;
54 }
55
56 // Returns a numeric representation of this lifetime position.
value()57 int value() const { return value_; }
58
59 // Returns the index of the instruction to which this lifetime position
60 // corresponds.
ToInstructionIndex()61 int ToInstructionIndex() const {
62 DCHECK(IsValid());
63 return value_ / kStep;
64 }
65
66 // Returns true if this lifetime position corresponds to a START value
IsStart()67 bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
68 // Returns true if this lifetime position corresponds to an END value
IsEnd()69 bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
70 // Returns true if this lifetime position corresponds to a gap START value
IsFullStart()71 bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
72
IsGapPosition()73 bool IsGapPosition() const { return (value_ & 0x2) == 0; }
IsInstructionPosition()74 bool IsInstructionPosition() const { return !IsGapPosition(); }
75
76 // Returns the lifetime position for the current START.
Start()77 LifetimePosition Start() const {
78 DCHECK(IsValid());
79 return LifetimePosition(value_ & ~(kHalfStep - 1));
80 }
81
82 // Returns the lifetime position for the current gap START.
FullStart()83 LifetimePosition FullStart() const {
84 DCHECK(IsValid());
85 return LifetimePosition(value_ & ~(kStep - 1));
86 }
87
88 // Returns the lifetime position for the current END.
End()89 LifetimePosition End() const {
90 DCHECK(IsValid());
91 return LifetimePosition(Start().value_ + kHalfStep / 2);
92 }
93
94 // Returns the lifetime position for the beginning of the next START.
NextStart()95 LifetimePosition NextStart() const {
96 DCHECK(IsValid());
97 return LifetimePosition(Start().value_ + kHalfStep);
98 }
99
100 // Returns the lifetime position for the beginning of the next gap START.
NextFullStart()101 LifetimePosition NextFullStart() const {
102 DCHECK(IsValid());
103 return LifetimePosition(FullStart().value_ + kStep);
104 }
105
106 // Returns the lifetime position for the beginning of the previous START.
PrevStart()107 LifetimePosition PrevStart() const {
108 DCHECK(IsValid());
109 DCHECK_LE(kHalfStep, value_);
110 return LifetimePosition(Start().value_ - kHalfStep);
111 }
112
113 // Constructs the lifetime position which does not correspond to any
114 // instruction.
LifetimePosition()115 LifetimePosition() : value_(-1) {}
116
117 // Returns true if this lifetime positions corrensponds to some
118 // instruction.
IsValid()119 bool IsValid() const { return value_ != -1; }
120
121 bool operator<(const LifetimePosition& that) const {
122 return this->value_ < that.value_;
123 }
124
125 bool operator<=(const LifetimePosition& that) const {
126 return this->value_ <= that.value_;
127 }
128
129 bool operator==(const LifetimePosition& that) const {
130 return this->value_ == that.value_;
131 }
132
133 bool operator!=(const LifetimePosition& that) const {
134 return this->value_ != that.value_;
135 }
136
137 bool operator>(const LifetimePosition& that) const {
138 return this->value_ > that.value_;
139 }
140
141 bool operator>=(const LifetimePosition& that) const {
142 return this->value_ >= that.value_;
143 }
144
145 void Print() const;
146
Invalid()147 static inline LifetimePosition Invalid() { return LifetimePosition(); }
148
MaxPosition()149 static inline LifetimePosition MaxPosition() {
150 // We have to use this kind of getter instead of static member due to
151 // crash bug in GDB.
152 return LifetimePosition(kMaxInt);
153 }
154
FromInt(int value)155 static inline LifetimePosition FromInt(int value) {
156 return LifetimePosition(value);
157 }
158
159 private:
160 static const int kHalfStep = 2;
161 static const int kStep = 2 * kHalfStep;
162
163 static_assert(base::bits::IsPowerOfTwo(kHalfStep),
164 "Code relies on kStep and kHalfStep being a power of two");
165
LifetimePosition(int value)166 explicit LifetimePosition(int value) : value_(value) {}
167
168 int value_;
169 };
170
171
172 std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
173
174
175 // Representation of the non-empty interval [start,end[.
176 class UseInterval final : public ZoneObject {
177 public:
UseInterval(LifetimePosition start,LifetimePosition end)178 UseInterval(LifetimePosition start, LifetimePosition end)
179 : start_(start), end_(end), next_(nullptr) {
180 DCHECK(start < end);
181 }
182
start()183 LifetimePosition start() const { return start_; }
set_start(LifetimePosition start)184 void set_start(LifetimePosition start) { start_ = start; }
end()185 LifetimePosition end() const { return end_; }
set_end(LifetimePosition end)186 void set_end(LifetimePosition end) { end_ = end; }
next()187 UseInterval* next() const { return next_; }
set_next(UseInterval * next)188 void set_next(UseInterval* next) { next_ = next; }
189
190 // Split this interval at the given position without effecting the
191 // live range that owns it. The interval must contain the position.
192 UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
193
194 // If this interval intersects with other return smallest position
195 // that belongs to both of them.
Intersect(const UseInterval * other)196 LifetimePosition Intersect(const UseInterval* other) const {
197 if (other->start() < start_) return other->Intersect(this);
198 if (other->start() < end_) return other->start();
199 return LifetimePosition::Invalid();
200 }
201
Contains(LifetimePosition point)202 bool Contains(LifetimePosition point) const {
203 return start_ <= point && point < end_;
204 }
205
206 // Returns the index of the first gap covered by this interval.
FirstGapIndex()207 int FirstGapIndex() const {
208 int ret = start_.ToInstructionIndex();
209 if (start_.IsInstructionPosition()) {
210 ++ret;
211 }
212 return ret;
213 }
214
215 // Returns the index of the last gap covered by this interval.
LastGapIndex()216 int LastGapIndex() const {
217 int ret = end_.ToInstructionIndex();
218 if (end_.IsGapPosition() && end_.IsStart()) {
219 --ret;
220 }
221 return ret;
222 }
223
224 private:
225 LifetimePosition start_;
226 LifetimePosition end_;
227 UseInterval* next_;
228
229 DISALLOW_COPY_AND_ASSIGN(UseInterval);
230 };
231
232 enum class UsePositionType : uint8_t {
233 kRegisterOrSlot,
234 kRegisterOrSlotOrConstant,
235 kRequiresRegister,
236 kRequiresSlot
237 };
238
239 enum class UsePositionHintType : uint8_t {
240 kNone,
241 kOperand,
242 kUsePos,
243 kPhi,
244 kUnresolved
245 };
246
247
248 static const int32_t kUnassignedRegister =
249 RegisterConfiguration::kMaxGeneralRegisters;
250
251 static_assert(kUnassignedRegister <= RegisterConfiguration::kMaxFPRegisters,
252 "kUnassignedRegister too small");
253
254 // Representation of a use position.
255 class V8_EXPORT_PRIVATE UsePosition final
NON_EXPORTED_BASE(ZoneObject)256 : public NON_EXPORTED_BASE(ZoneObject) {
257 public:
258 UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
259 UsePositionHintType hint_type);
260
261 InstructionOperand* operand() const { return operand_; }
262 bool HasOperand() const { return operand_ != nullptr; }
263
264 bool RegisterIsBeneficial() const {
265 return RegisterBeneficialField::decode(flags_);
266 }
267 UsePositionType type() const { return TypeField::decode(flags_); }
268 void set_type(UsePositionType type, bool register_beneficial);
269
270 LifetimePosition pos() const { return pos_; }
271
272 UsePosition* next() const { return next_; }
273 void set_next(UsePosition* next) { next_ = next; }
274
275 // For hinting only.
276 void set_assigned_register(int register_code) {
277 flags_ = AssignedRegisterField::update(flags_, register_code);
278 }
279
280 UsePositionHintType hint_type() const {
281 return HintTypeField::decode(flags_);
282 }
283 bool HasHint() const;
284 bool HintRegister(int* register_code) const;
285 void SetHint(UsePosition* use_pos);
286 void ResolveHint(UsePosition* use_pos);
287 bool IsResolved() const {
288 return hint_type() != UsePositionHintType::kUnresolved;
289 }
290 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
291
292 private:
293 typedef BitField<UsePositionType, 0, 2> TypeField;
294 typedef BitField<UsePositionHintType, 2, 3> HintTypeField;
295 typedef BitField<bool, 5, 1> RegisterBeneficialField;
296 typedef BitField<int32_t, 6, 6> AssignedRegisterField;
297
298 InstructionOperand* const operand_;
299 void* hint_;
300 UsePosition* next_;
301 LifetimePosition const pos_;
302 uint32_t flags_;
303
304 DISALLOW_COPY_AND_ASSIGN(UsePosition);
305 };
306
307
308 class SpillRange;
309 class RegisterAllocationData;
310 class TopLevelLiveRange;
311 class LiveRangeGroup;
312
313 // Representation of SSA values' live ranges as a collection of (continuous)
314 // intervals over the instruction ordering.
NON_EXPORTED_BASE(ZoneObject)315 class V8_EXPORT_PRIVATE LiveRange : public NON_EXPORTED_BASE(ZoneObject) {
316 public:
317 UseInterval* first_interval() const { return first_interval_; }
318 UsePosition* first_pos() const { return first_pos_; }
319 TopLevelLiveRange* TopLevel() { return top_level_; }
320 const TopLevelLiveRange* TopLevel() const { return top_level_; }
321
322 bool IsTopLevel() const;
323
324 LiveRange* next() const { return next_; }
325
326 int relative_id() const { return relative_id_; }
327
328 bool IsEmpty() const { return first_interval() == nullptr; }
329
330 InstructionOperand GetAssignedOperand() const;
331
332 MachineRepresentation representation() const {
333 return RepresentationField::decode(bits_);
334 }
335
336 int assigned_register() const { return AssignedRegisterField::decode(bits_); }
337 bool HasRegisterAssigned() const {
338 return assigned_register() != kUnassignedRegister;
339 }
340 void set_assigned_register(int reg);
341 void UnsetAssignedRegister();
342
343 bool spilled() const { return SpilledField::decode(bits_); }
344 void Spill();
345
346 RegisterKind kind() const;
347
348 // Returns use position in this live range that follows both start
349 // and last processed use position.
350 UsePosition* NextUsePosition(LifetimePosition start) const;
351
352 // Returns use position for which register is required in this live
353 // range and which follows both start and last processed use position
354 UsePosition* NextRegisterPosition(LifetimePosition start) const;
355
356 // Returns the first use position requiring stack slot, or nullptr.
357 UsePosition* NextSlotPosition(LifetimePosition start) const;
358
359 // Returns use position for which register is beneficial in this live
360 // range and which follows both start and last processed use position
361 UsePosition* NextUsePositionRegisterIsBeneficial(
362 LifetimePosition start) const;
363
364 // Returns lifetime position for which register is beneficial in this live
365 // range and which follows both start and last processed use position.
366 LifetimePosition NextLifetimePositionRegisterIsBeneficial(
367 const LifetimePosition& start) const;
368
369 // Returns use position for which register is beneficial in this live
370 // range and which precedes start.
371 UsePosition* PreviousUsePositionRegisterIsBeneficial(
372 LifetimePosition start) const;
373
374 // Can this live range be spilled at this position.
375 bool CanBeSpilled(LifetimePosition pos) const;
376
377 // Splitting primitive used by both splitting and splintering members.
378 // Performs the split, but does not link the resulting ranges.
379 // The given position must follow the start of the range.
380 // All uses following the given position will be moved from this
381 // live range to the result live range.
382 // The current range will terminate at position, while result will start from
383 // position.
384 enum HintConnectionOption : bool {
385 DoNotConnectHints = false,
386 ConnectHints = true
387 };
388 UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
389 Zone* zone, HintConnectionOption connect_hints);
390
391 // Detaches at position, and then links the resulting ranges. Returns the
392 // child, which starts at position.
393 LiveRange* SplitAt(LifetimePosition position, Zone* zone);
394
395 // Returns nullptr when no register is hinted, otherwise sets register_index.
396 UsePosition* FirstHintPosition(int* register_index) const;
397 UsePosition* FirstHintPosition() const {
398 int register_index;
399 return FirstHintPosition(®ister_index);
400 }
401
402 UsePosition* current_hint_position() const {
403 DCHECK(current_hint_position_ == FirstHintPosition());
404 return current_hint_position_;
405 }
406
407 LifetimePosition Start() const {
408 DCHECK(!IsEmpty());
409 return first_interval()->start();
410 }
411
412 LifetimePosition End() const {
413 DCHECK(!IsEmpty());
414 return last_interval_->end();
415 }
416
417 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
418 bool CanCover(LifetimePosition position) const;
419 bool Covers(LifetimePosition position) const;
420 LifetimePosition FirstIntersection(LiveRange* other) const;
421
422 void VerifyChildStructure() const {
423 VerifyIntervals();
424 VerifyPositions();
425 }
426
427 void ConvertUsesToOperand(const InstructionOperand& op,
428 const InstructionOperand& spill_op);
429 void SetUseHints(int register_index);
430 void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
431
432 void Print(const RegisterConfiguration* config, bool with_children) const;
433 void Print(bool with_children) const;
434
435 private:
436 friend class TopLevelLiveRange;
437 explicit LiveRange(int relative_id, MachineRepresentation rep,
438 TopLevelLiveRange* top_level);
439
440 void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
441
442 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
443
444 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
445 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
446 LifetimePosition but_not_past) const;
447
448 void VerifyPositions() const;
449 void VerifyIntervals() const;
450
451 typedef BitField<bool, 0, 1> SpilledField;
452 typedef BitField<int32_t, 6, 6> AssignedRegisterField;
453 typedef BitField<MachineRepresentation, 12, 8> RepresentationField;
454
455 // Unique among children and splinters of the same virtual register.
456 int relative_id_;
457 uint32_t bits_;
458 UseInterval* last_interval_;
459 UseInterval* first_interval_;
460 UsePosition* first_pos_;
461 TopLevelLiveRange* top_level_;
462 LiveRange* next_;
463 // This is used as a cache, it doesn't affect correctness.
464 mutable UseInterval* current_interval_;
465 // This is used as a cache, it doesn't affect correctness.
466 mutable UsePosition* last_processed_use_;
467 // This is used as a cache, it's invalid outside of BuildLiveRanges.
468 mutable UsePosition* current_hint_position_;
469 // Cache the last position splintering stopped at.
470 mutable UsePosition* splitting_pointer_;
471
472 DISALLOW_COPY_AND_ASSIGN(LiveRange);
473 };
474
475
476 class LiveRangeGroup final : public ZoneObject {
477 public:
LiveRangeGroup(Zone * zone)478 explicit LiveRangeGroup(Zone* zone) : ranges_(zone) {}
ranges()479 ZoneVector<LiveRange*>& ranges() { return ranges_; }
ranges()480 const ZoneVector<LiveRange*>& ranges() const { return ranges_; }
481
assigned_register()482 int assigned_register() const { return assigned_register_; }
set_assigned_register(int reg)483 void set_assigned_register(int reg) { assigned_register_ = reg; }
484
485 private:
486 ZoneVector<LiveRange*> ranges_;
487 int assigned_register_;
488 DISALLOW_COPY_AND_ASSIGN(LiveRangeGroup);
489 };
490
491 class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange {
492 public:
493 explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
spill_start_index()494 int spill_start_index() const { return spill_start_index_; }
495
IsFixed()496 bool IsFixed() const { return vreg_ < 0; }
497
is_phi()498 bool is_phi() const { return IsPhiField::decode(bits_); }
set_is_phi(bool value)499 void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
500
is_non_loop_phi()501 bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
set_is_non_loop_phi(bool value)502 void set_is_non_loop_phi(bool value) {
503 bits_ = IsNonLoopPhiField::update(bits_, value);
504 }
505
has_slot_use()506 bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
set_has_slot_use(bool value)507 void set_has_slot_use(bool value) {
508 bits_ = HasSlotUseField::update(bits_, value);
509 }
510
511 // Add a new interval or a new use position to this live range.
512 void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
513 void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
514 void AddUsePosition(UsePosition* pos);
515
516 // Shorten the most recently added interval by setting a new start.
517 void ShortenTo(LifetimePosition start);
518
519 // Detaches between start and end, and attributes the resulting range to
520 // result.
521 // The current range is pointed to as "splintered_from". No parent/child
522 // relationship is established between this and result.
523 void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone);
524
525 // Assuming other was splintered from this range, embeds other and its
526 // children as part of the children sequence of this range.
527 void Merge(TopLevelLiveRange* other, Zone* zone);
528
529 // Spill range management.
530 void SetSpillRange(SpillRange* spill_range);
531 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
set_spill_type(SpillType value)532 void set_spill_type(SpillType value) {
533 bits_ = SpillTypeField::update(bits_, value);
534 }
spill_type()535 SpillType spill_type() const { return SpillTypeField::decode(bits_); }
GetSpillOperand()536 InstructionOperand* GetSpillOperand() const {
537 DCHECK_EQ(SpillType::kSpillOperand, spill_type());
538 return spill_operand_;
539 }
540
GetAllocatedSpillRange()541 SpillRange* GetAllocatedSpillRange() const {
542 DCHECK_NE(SpillType::kSpillOperand, spill_type());
543 return spill_range_;
544 }
545
GetSpillRange()546 SpillRange* GetSpillRange() const {
547 DCHECK_EQ(SpillType::kSpillRange, spill_type());
548 return spill_range_;
549 }
HasNoSpillType()550 bool HasNoSpillType() const {
551 return spill_type() == SpillType::kNoSpillType;
552 }
HasSpillOperand()553 bool HasSpillOperand() const {
554 return spill_type() == SpillType::kSpillOperand;
555 }
HasSpillRange()556 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
557
558 AllocatedOperand GetSpillRangeOperand() const;
559
560 void RecordSpillLocation(Zone* zone, int gap_index,
561 InstructionOperand* operand);
562 void SetSpillOperand(InstructionOperand* operand);
SetSpillStartIndex(int start)563 void SetSpillStartIndex(int start) {
564 spill_start_index_ = Min(start, spill_start_index_);
565 }
566
567 void CommitSpillMoves(InstructionSequence* sequence,
568 const InstructionOperand& operand,
569 bool might_be_duplicated);
570
571 // If all the children of this range are spilled in deferred blocks, and if
572 // for any non-spilled child with a use position requiring a slot, that range
573 // is contained in a deferred block, mark the range as
574 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
575 // and instead let the LiveRangeConnector perform the spills within the
576 // deferred blocks. If so, we insert here spills for non-spilled ranges
577 // with slot use positions.
TreatAsSpilledInDeferredBlock(Zone * zone,int total_block_count)578 void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
579 spill_start_index_ = -1;
580 spilled_in_deferred_blocks_ = true;
581 spill_move_insertion_locations_ = nullptr;
582 list_of_blocks_requiring_spill_operands_ =
583 new (zone) BitVector(total_block_count, zone);
584 }
585
586 void CommitSpillInDeferredBlocks(RegisterAllocationData* data,
587 const InstructionOperand& spill_operand,
588 BitVector* necessary_spill_points);
589
splintered_from()590 TopLevelLiveRange* splintered_from() const { return splintered_from_; }
IsSplinter()591 bool IsSplinter() const { return splintered_from_ != nullptr; }
MayRequireSpillRange()592 bool MayRequireSpillRange() const {
593 DCHECK(!IsSplinter());
594 return !HasSpillOperand() && spill_range_ == nullptr;
595 }
596 void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
vreg()597 int vreg() const { return vreg_; }
598
599 #if DEBUG
600 int debug_virt_reg() const;
601 #endif
602
603 void Verify() const;
604 void VerifyChildrenInOrder() const;
605
GetNextChildId()606 int GetNextChildId() {
607 return IsSplinter() ? splintered_from()->GetNextChildId()
608 : ++last_child_id_;
609 }
610
GetChildCount()611 int GetChildCount() const { return last_child_id_ + 1; }
612
IsSpilledOnlyInDeferredBlocks()613 bool IsSpilledOnlyInDeferredBlocks() const {
614 return spilled_in_deferred_blocks_;
615 }
616
617 struct SpillMoveInsertionList;
618
GetSpillMoveInsertionLocations()619 SpillMoveInsertionList* GetSpillMoveInsertionLocations() const {
620 DCHECK(!IsSpilledOnlyInDeferredBlocks());
621 return spill_move_insertion_locations_;
622 }
splinter()623 TopLevelLiveRange* splinter() const { return splinter_; }
SetSplinter(TopLevelLiveRange * splinter)624 void SetSplinter(TopLevelLiveRange* splinter) {
625 DCHECK_NULL(splinter_);
626 DCHECK_NOT_NULL(splinter);
627
628 splinter_ = splinter;
629 splinter->relative_id_ = GetNextChildId();
630 splinter->set_spill_type(spill_type());
631 splinter->SetSplinteredFrom(this);
632 }
633
MarkHasPreassignedSlot()634 void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
has_preassigned_slot()635 bool has_preassigned_slot() const { return has_preassigned_slot_; }
636
AddBlockRequiringSpillOperand(RpoNumber block_id)637 void AddBlockRequiringSpillOperand(RpoNumber block_id) {
638 DCHECK(IsSpilledOnlyInDeferredBlocks());
639 GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt());
640 }
641
GetListOfBlocksRequiringSpillOperands()642 BitVector* GetListOfBlocksRequiringSpillOperands() const {
643 DCHECK(IsSpilledOnlyInDeferredBlocks());
644 return list_of_blocks_requiring_spill_operands_;
645 }
646
647 private:
648 void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
649
650 typedef BitField<bool, 1, 1> HasSlotUseField;
651 typedef BitField<bool, 2, 1> IsPhiField;
652 typedef BitField<bool, 3, 1> IsNonLoopPhiField;
653 typedef BitField<SpillType, 4, 2> SpillTypeField;
654
655 int vreg_;
656 int last_child_id_;
657 TopLevelLiveRange* splintered_from_;
658 union {
659 // Correct value determined by spill_type()
660 InstructionOperand* spill_operand_;
661 SpillRange* spill_range_;
662 };
663
664 union {
665 SpillMoveInsertionList* spill_move_insertion_locations_;
666 BitVector* list_of_blocks_requiring_spill_operands_;
667 };
668
669 // TODO(mtrofin): generalize spilling after definition, currently specialized
670 // just for spill in a single deferred block.
671 bool spilled_in_deferred_blocks_;
672 int spill_start_index_;
673 UsePosition* last_pos_;
674 TopLevelLiveRange* splinter_;
675 bool has_preassigned_slot_;
676
677 DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
678 };
679
680
681 struct PrintableLiveRange {
682 const RegisterConfiguration* register_configuration_;
683 const LiveRange* range_;
684 };
685
686
687 std::ostream& operator<<(std::ostream& os,
688 const PrintableLiveRange& printable_range);
689
690
691 class SpillRange final : public ZoneObject {
692 public:
693 static const int kUnassignedSlot = -1;
694 SpillRange(TopLevelLiveRange* range, Zone* zone);
695
interval()696 UseInterval* interval() const { return use_interval_; }
697
IsEmpty()698 bool IsEmpty() const { return live_ranges_.empty(); }
699 bool TryMerge(SpillRange* other);
HasSlot()700 bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
701
set_assigned_slot(int index)702 void set_assigned_slot(int index) {
703 DCHECK_EQ(kUnassignedSlot, assigned_slot_);
704 assigned_slot_ = index;
705 }
assigned_slot()706 int assigned_slot() {
707 DCHECK_NE(kUnassignedSlot, assigned_slot_);
708 return assigned_slot_;
709 }
live_ranges()710 const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
711 return live_ranges_;
712 }
live_ranges()713 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
714 // Spill slots can be 4, 8, or 16 bytes wide.
byte_width()715 int byte_width() const { return byte_width_; }
716 void Print() const;
717
718 private:
End()719 LifetimePosition End() const { return end_position_; }
720 bool IsIntersectingWith(SpillRange* other) const;
721 // Merge intervals, making sure the use intervals are sorted
722 void MergeDisjointIntervals(UseInterval* other);
723
724 ZoneVector<TopLevelLiveRange*> live_ranges_;
725 UseInterval* use_interval_;
726 LifetimePosition end_position_;
727 int assigned_slot_;
728 int byte_width_;
729
730 DISALLOW_COPY_AND_ASSIGN(SpillRange);
731 };
732
733
734 class RegisterAllocationData final : public ZoneObject {
735 public:
736 class PhiMapValue : public ZoneObject {
737 public:
738 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
739
phi()740 const PhiInstruction* phi() const { return phi_; }
block()741 const InstructionBlock* block() const { return block_; }
742
743 // For hinting.
assigned_register()744 int assigned_register() const { return assigned_register_; }
set_assigned_register(int register_code)745 void set_assigned_register(int register_code) {
746 DCHECK_EQ(assigned_register_, kUnassignedRegister);
747 assigned_register_ = register_code;
748 }
UnsetAssignedRegister()749 void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
750
751 void AddOperand(InstructionOperand* operand);
752 void CommitAssignment(const InstructionOperand& operand);
753
754 private:
755 PhiInstruction* const phi_;
756 const InstructionBlock* const block_;
757 ZoneVector<InstructionOperand*> incoming_operands_;
758 int assigned_register_;
759 };
760 typedef ZoneMap<int, PhiMapValue*> PhiMap;
761
762 struct DelayedReference {
763 ReferenceMap* map;
764 InstructionOperand* operand;
765 };
766 typedef ZoneVector<DelayedReference> DelayedReferences;
767 typedef ZoneVector<std::pair<TopLevelLiveRange*, int>>
768 RangesWithPreassignedSlots;
769
770 RegisterAllocationData(const RegisterConfiguration* config,
771 Zone* allocation_zone, Frame* frame,
772 InstructionSequence* code,
773 const char* debug_name = nullptr);
774
live_ranges()775 const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
776 return live_ranges_;
777 }
live_ranges()778 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
fixed_live_ranges()779 const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
780 return fixed_live_ranges_;
781 }
fixed_live_ranges()782 ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
783 return fixed_live_ranges_;
784 }
fixed_float_live_ranges()785 ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() {
786 return fixed_float_live_ranges_;
787 }
fixed_float_live_ranges()788 const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() const {
789 return fixed_float_live_ranges_;
790 }
fixed_double_live_ranges()791 ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
792 return fixed_double_live_ranges_;
793 }
fixed_double_live_ranges()794 const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
795 return fixed_double_live_ranges_;
796 }
fixed_simd128_live_ranges()797 ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() {
798 return fixed_simd128_live_ranges_;
799 }
fixed_simd128_live_ranges()800 const ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() const {
801 return fixed_simd128_live_ranges_;
802 }
live_in_sets()803 ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
live_out_sets()804 ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
spill_ranges()805 ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
delayed_references()806 DelayedReferences& delayed_references() { return delayed_references_; }
code()807 InstructionSequence* code() const { return code_; }
808 // This zone is for data structures only needed during register allocation
809 // phases.
allocation_zone()810 Zone* allocation_zone() const { return allocation_zone_; }
811 // This zone is for InstructionOperands and moves that live beyond register
812 // allocation.
code_zone()813 Zone* code_zone() const { return code()->zone(); }
frame()814 Frame* frame() const { return frame_; }
debug_name()815 const char* debug_name() const { return debug_name_; }
config()816 const RegisterConfiguration* config() const { return config_; }
817
818 MachineRepresentation RepresentationFor(int virtual_register);
819
820 TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
821 // Creates a new live range.
822 TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
823 TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
824
825 SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
826 SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
827
828 MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
829 const InstructionOperand& from,
830 const InstructionOperand& to);
831
IsReference(TopLevelLiveRange * top_range)832 bool IsReference(TopLevelLiveRange* top_range) const {
833 return code()->IsReference(top_range->vreg());
834 }
835
836 bool ExistsUseWithoutDefinition();
837 bool RangesDefinedInDeferredStayInDeferred();
838
839 void MarkAllocated(MachineRepresentation rep, int index);
840
841 PhiMapValue* InitializePhiMap(const InstructionBlock* block,
842 PhiInstruction* phi);
843 PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
844 PhiMapValue* GetPhiMapValueFor(int virtual_register);
845 bool IsBlockBoundary(LifetimePosition pos) const;
846
preassigned_slot_ranges()847 RangesWithPreassignedSlots& preassigned_slot_ranges() {
848 return preassigned_slot_ranges_;
849 }
850
851 private:
852 int GetNextLiveRangeId();
853
854 Zone* const allocation_zone_;
855 Frame* const frame_;
856 InstructionSequence* const code_;
857 const char* const debug_name_;
858 const RegisterConfiguration* const config_;
859 PhiMap phi_map_;
860 ZoneVector<BitVector*> live_in_sets_;
861 ZoneVector<BitVector*> live_out_sets_;
862 ZoneVector<TopLevelLiveRange*> live_ranges_;
863 ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
864 ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_;
865 ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
866 ZoneVector<TopLevelLiveRange*> fixed_simd128_live_ranges_;
867 ZoneVector<SpillRange*> spill_ranges_;
868 DelayedReferences delayed_references_;
869 BitVector* assigned_registers_;
870 BitVector* assigned_double_registers_;
871 int virtual_register_count_;
872 RangesWithPreassignedSlots preassigned_slot_ranges_;
873
874 DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
875 };
876
877
878 class ConstraintBuilder final : public ZoneObject {
879 public:
880 explicit ConstraintBuilder(RegisterAllocationData* data);
881
882 // Phase 1 : insert moves to account for fixed register operands.
883 void MeetRegisterConstraints();
884
885 // Phase 2: deconstruct SSA by inserting moves in successors and the headers
886 // of blocks containing phis.
887 void ResolvePhis();
888
889 private:
data()890 RegisterAllocationData* data() const { return data_; }
code()891 InstructionSequence* code() const { return data()->code(); }
allocation_zone()892 Zone* allocation_zone() const { return data()->allocation_zone(); }
893
894 InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
895 bool is_tagged);
896 void MeetRegisterConstraints(const InstructionBlock* block);
897 void MeetConstraintsBefore(int index);
898 void MeetConstraintsAfter(int index);
899 void MeetRegisterConstraintsForLastInstructionInBlock(
900 const InstructionBlock* block);
901 void ResolvePhis(const InstructionBlock* block);
902
903 RegisterAllocationData* const data_;
904
905 DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder);
906 };
907
908
909 class LiveRangeBuilder final : public ZoneObject {
910 public:
911 explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone);
912
913 // Phase 3: compute liveness of all virtual register.
914 void BuildLiveRanges();
915 static BitVector* ComputeLiveOut(const InstructionBlock* block,
916 RegisterAllocationData* data);
917
918 private:
data()919 RegisterAllocationData* data() const { return data_; }
code()920 InstructionSequence* code() const { return data()->code(); }
allocation_zone()921 Zone* allocation_zone() const { return data()->allocation_zone(); }
code_zone()922 Zone* code_zone() const { return code()->zone(); }
config()923 const RegisterConfiguration* config() const { return data()->config(); }
live_in_sets()924 ZoneVector<BitVector*>& live_in_sets() const {
925 return data()->live_in_sets();
926 }
927
928 // Verification.
929 void Verify() const;
930 bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
931 bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
932 const TopLevelLiveRange* range) const;
933 bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
934
935 // Liveness analysis support.
936 void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
937 void ProcessInstructions(const InstructionBlock* block, BitVector* live);
938 void ProcessPhis(const InstructionBlock* block, BitVector* live);
939 void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
940
FixedLiveRangeID(int index)941 static int FixedLiveRangeID(int index) { return -index - 1; }
942 int FixedFPLiveRangeID(int index, MachineRepresentation rep);
943 TopLevelLiveRange* FixedLiveRangeFor(int index);
944 TopLevelLiveRange* FixedFPLiveRangeFor(int index, MachineRepresentation rep);
945
946 void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
947 void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
948
949 UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
950 void* hint, UsePositionHintType hint_type);
NewUsePosition(LifetimePosition pos)951 UsePosition* NewUsePosition(LifetimePosition pos) {
952 return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
953 }
954 TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
955 // Helper methods for building intervals.
956 UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
957 void* hint, UsePositionHintType hint_type);
Define(LifetimePosition position,InstructionOperand * operand)958 void Define(LifetimePosition position, InstructionOperand* operand) {
959 Define(position, operand, nullptr, UsePositionHintType::kNone);
960 }
961 UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
962 InstructionOperand* operand, void* hint,
963 UsePositionHintType hint_type);
Use(LifetimePosition block_start,LifetimePosition position,InstructionOperand * operand)964 void Use(LifetimePosition block_start, LifetimePosition position,
965 InstructionOperand* operand) {
966 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone);
967 }
968
969 RegisterAllocationData* const data_;
970 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
971
972 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
973 };
974
975
976 class RegisterAllocator : public ZoneObject {
977 public:
978 RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
979
980 protected:
data()981 RegisterAllocationData* data() const { return data_; }
code()982 InstructionSequence* code() const { return data()->code(); }
mode()983 RegisterKind mode() const { return mode_; }
num_registers()984 int num_registers() const { return num_registers_; }
num_allocatable_registers()985 int num_allocatable_registers() const { return num_allocatable_registers_; }
allocatable_register_codes()986 const int* allocatable_register_codes() const {
987 return allocatable_register_codes_;
988 }
989 // Returns true iff. we must check float register aliasing.
check_fp_aliasing()990 bool check_fp_aliasing() const { return check_fp_aliasing_; }
991
992 // TODO(mtrofin): explain why splitting in gap START is always OK.
993 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
994 int instruction_index);
995
allocation_zone()996 Zone* allocation_zone() const { return data()->allocation_zone(); }
997
998 // Find the optimal split for ranges defined by a memory operand, e.g.
999 // constants or function parameters passed on the stack.
1000 void SplitAndSpillRangesDefinedByMemoryOperand();
1001
1002 // Split the given range at the given position.
1003 // If range starts at or after the given position then the
1004 // original range is returned.
1005 // Otherwise returns the live range that starts at pos and contains
1006 // all uses from the original range that follow pos. Uses at pos will
1007 // still be owned by the original range after splitting.
1008 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
1009
CanProcessRange(LiveRange * range)1010 bool CanProcessRange(LiveRange* range) const {
1011 return range != nullptr && !range->IsEmpty() && range->kind() == mode();
1012 }
1013
1014
1015 // Split the given range in a position from the interval [start, end].
1016 LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
1017 LifetimePosition end);
1018
1019 // Find a lifetime position in the interval [start, end] which
1020 // is optimal for splitting: it is either header of the outermost
1021 // loop covered by this interval or the latest possible position.
1022 LifetimePosition FindOptimalSplitPos(LifetimePosition start,
1023 LifetimePosition end);
1024
1025 void Spill(LiveRange* range);
1026
1027 // If we are trying to spill a range inside the loop try to
1028 // hoist spill position out to the point just before the loop.
1029 LifetimePosition FindOptimalSpillingPos(LiveRange* range,
1030 LifetimePosition pos);
1031
1032 const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
1033 const char* RegisterName(int allocation_index) const;
1034
1035 private:
1036 RegisterAllocationData* const data_;
1037 const RegisterKind mode_;
1038 const int num_registers_;
1039 int num_allocatable_registers_;
1040 const int* allocatable_register_codes_;
1041 bool check_fp_aliasing_;
1042
1043 private:
1044 bool no_combining_;
1045
1046 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
1047 };
1048
1049
1050 class LinearScanAllocator final : public RegisterAllocator {
1051 public:
1052 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
1053 Zone* local_zone);
1054
1055 // Phase 4: compute register assignments.
1056 void AllocateRegisters();
1057
1058 private:
unhandled_live_ranges()1059 ZoneVector<LiveRange*>& unhandled_live_ranges() {
1060 return unhandled_live_ranges_;
1061 }
active_live_ranges()1062 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
inactive_live_ranges()1063 ZoneVector<LiveRange*>& inactive_live_ranges() {
1064 return inactive_live_ranges_;
1065 }
1066
1067 void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
1068
1069 // Helper methods for updating the life range lists.
1070 void AddToActive(LiveRange* range);
1071 void AddToInactive(LiveRange* range);
1072 void AddToUnhandledSorted(LiveRange* range);
1073 void AddToUnhandledUnsorted(LiveRange* range);
1074 void SortUnhandled();
1075 bool UnhandledIsSorted();
1076 void ActiveToHandled(LiveRange* range);
1077 void ActiveToInactive(LiveRange* range);
1078 void InactiveToHandled(LiveRange* range);
1079 void InactiveToActive(LiveRange* range);
1080
1081 // Helper methods for allocating registers.
1082 bool TryReuseSpillForPhi(TopLevelLiveRange* range);
1083 bool TryAllocateFreeReg(LiveRange* range,
1084 const Vector<LifetimePosition>& free_until_pos);
1085 bool TryAllocatePreferredReg(LiveRange* range,
1086 const Vector<LifetimePosition>& free_until_pos);
1087 void GetFPRegisterSet(MachineRepresentation rep, int* num_regs,
1088 int* num_codes, const int** codes) const;
1089 void FindFreeRegistersForRange(LiveRange* range,
1090 Vector<LifetimePosition> free_until_pos);
1091 void ProcessCurrentRange(LiveRange* current);
1092 void AllocateBlockedReg(LiveRange* range);
1093 bool TrySplitAndSpillSplinter(LiveRange* range);
1094
1095 // Spill the given life range after position pos.
1096 void SpillAfter(LiveRange* range, LifetimePosition pos);
1097
1098 // Spill the given life range after position [start] and up to position [end].
1099 void SpillBetween(LiveRange* range, LifetimePosition start,
1100 LifetimePosition end);
1101
1102 // Spill the given life range after position [start] and up to position [end].
1103 // Range is guaranteed to be spilled at least until position [until].
1104 void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
1105 LifetimePosition until, LifetimePosition end);
1106
1107 void SplitAndSpillIntersecting(LiveRange* range);
1108
1109 ZoneVector<LiveRange*> unhandled_live_ranges_;
1110 ZoneVector<LiveRange*> active_live_ranges_;
1111 ZoneVector<LiveRange*> inactive_live_ranges_;
1112
1113 #ifdef DEBUG
1114 LifetimePosition allocation_finger_;
1115 #endif
1116
1117 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
1118 };
1119
1120
1121 class SpillSlotLocator final : public ZoneObject {
1122 public:
1123 explicit SpillSlotLocator(RegisterAllocationData* data);
1124
1125 void LocateSpillSlots();
1126
1127 private:
data()1128 RegisterAllocationData* data() const { return data_; }
1129
1130 RegisterAllocationData* const data_;
1131
1132 DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator);
1133 };
1134
1135
1136 class OperandAssigner final : public ZoneObject {
1137 public:
1138 explicit OperandAssigner(RegisterAllocationData* data);
1139
1140 // Phase 5: assign spill splots.
1141 void AssignSpillSlots();
1142
1143 // Phase 6: commit assignment.
1144 void CommitAssignment();
1145
1146 private:
data()1147 RegisterAllocationData* data() const { return data_; }
1148
1149 RegisterAllocationData* const data_;
1150
1151 DISALLOW_COPY_AND_ASSIGN(OperandAssigner);
1152 };
1153
1154
1155 class ReferenceMapPopulator final : public ZoneObject {
1156 public:
1157 explicit ReferenceMapPopulator(RegisterAllocationData* data);
1158
1159 // Phase 7: compute values for pointer maps.
1160 void PopulateReferenceMaps();
1161
1162 private:
data()1163 RegisterAllocationData* data() const { return data_; }
1164
1165 bool SafePointsAreInOrder() const;
1166
1167 RegisterAllocationData* const data_;
1168
1169 DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
1170 };
1171
1172
1173 class LiveRangeBoundArray;
1174 // Insert moves of the form
1175 //
1176 // Operand(child_(k+1)) = Operand(child_k)
1177 //
1178 // where child_k and child_(k+1) are consecutive children of a range (so
1179 // child_k->next() == child_(k+1)), and Operand(...) refers to the
1180 // assigned operand, be it a register or a slot.
1181 class LiveRangeConnector final : public ZoneObject {
1182 public:
1183 explicit LiveRangeConnector(RegisterAllocationData* data);
1184
1185 // Phase 8: reconnect split ranges with moves, when the control flow
1186 // between the ranges is trivial (no branches).
1187 void ConnectRanges(Zone* local_zone);
1188
1189 // Phase 9: insert moves to connect ranges across basic blocks, when the
1190 // control flow between them cannot be trivially resolved, such as joining
1191 // branches.
1192 void ResolveControlFlow(Zone* local_zone);
1193
1194 private:
data()1195 RegisterAllocationData* data() const { return data_; }
code()1196 InstructionSequence* code() const { return data()->code(); }
code_zone()1197 Zone* code_zone() const { return code()->zone(); }
1198
1199 bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1200
1201 int ResolveControlFlow(const InstructionBlock* block,
1202 const InstructionOperand& cur_op,
1203 const InstructionBlock* pred,
1204 const InstructionOperand& pred_op);
1205
1206 void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
1207 LiveRangeBoundArray* array,
1208 Zone* temp_zone);
1209
1210 RegisterAllocationData* const data_;
1211
1212 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
1213 };
1214
1215 } // namespace compiler
1216 } // namespace internal
1217 } // namespace v8
1218
1219 #endif // V8_COMPILER_REGISTER_ALLOCATOR_H_
1220