1 /*
2  * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4  *
5  * This code is free software; you can redistribute it and/or modify it
6  * under the terms of the GNU General Public License version 2 only, as
7  * published by the Free Software Foundation.
8  *
9  * This code is distributed in the hope that it will be useful, but WITHOUT
10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12  * version 2 for more details (a copy is included in the LICENSE file that
13  * accompanied this code).
14  *
15  * You should have received a copy of the GNU General Public License version
16  * 2 along with this work; if not, write to the Free Software Foundation,
17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18  *
19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20  * or visit www.oracle.com if you need additional information or have any
21  * questions.
22  */
23 
24 
25 package org.graalvm.compiler.lir.alloc.lsra;
26 
27 import static jdk.vm.ci.code.ValueUtil.asRegister;
28 import static jdk.vm.ci.code.ValueUtil.asStackSlot;
29 import static jdk.vm.ci.code.ValueUtil.isRegister;
30 import static jdk.vm.ci.code.ValueUtil.isStackSlot;
31 import static org.graalvm.compiler.lir.LIRValueUtil.asVariable;
32 import static org.graalvm.compiler.lir.LIRValueUtil.isVariable;
33 import static org.graalvm.compiler.lir.debug.LIRGenerationDebugContext.getSourceForOperandFromDebugContext;
34 
35 import java.util.ArrayDeque;
36 import java.util.ArrayList;
37 import java.util.BitSet;
38 import java.util.EnumSet;
39 
40 import jdk.internal.vm.compiler.collections.EconomicSet;
41 import jdk.internal.vm.compiler.collections.Equivalence;
42 import org.graalvm.compiler.core.common.LIRKind;
43 import org.graalvm.compiler.core.common.PermanentBailoutException;
44 import org.graalvm.compiler.core.common.alloc.ComputeBlockOrder;
45 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase;
46 import org.graalvm.compiler.core.common.util.BitMap2D;
47 import org.graalvm.compiler.debug.Assertions;
48 import org.graalvm.compiler.debug.DebugContext;
49 import org.graalvm.compiler.debug.GraalError;
50 import org.graalvm.compiler.debug.Indent;
51 import org.graalvm.compiler.lir.InstructionValueConsumer;
52 import org.graalvm.compiler.lir.LIRInstruction;
53 import org.graalvm.compiler.lir.LIRInstruction.OperandFlag;
54 import org.graalvm.compiler.lir.LIRInstruction.OperandMode;
55 import org.graalvm.compiler.lir.StandardOp.LoadConstantOp;
56 import org.graalvm.compiler.lir.StandardOp.ValueMoveOp;
57 import org.graalvm.compiler.lir.ValueConsumer;
58 import org.graalvm.compiler.lir.alloc.lsra.Interval.RegisterPriority;
59 import org.graalvm.compiler.lir.alloc.lsra.Interval.SpillState;
60 import org.graalvm.compiler.lir.alloc.lsra.LinearScan.BlockData;
61 import org.graalvm.compiler.lir.gen.LIRGenerationResult;
62 import org.graalvm.compiler.lir.phases.AllocationPhase.AllocationContext;
63 
64 import jdk.vm.ci.code.Register;
65 import jdk.vm.ci.code.RegisterArray;
66 import jdk.vm.ci.code.StackSlot;
67 import jdk.vm.ci.code.TargetDescription;
68 import jdk.vm.ci.meta.AllocatableValue;
69 import jdk.vm.ci.meta.Constant;
70 import jdk.vm.ci.meta.JavaConstant;
71 import jdk.vm.ci.meta.Value;
72 import jdk.vm.ci.meta.ValueKind;
73 
74 public class LinearScanLifetimeAnalysisPhase extends LinearScanAllocationPhase {
75 
76     protected final LinearScan allocator;
77     protected final DebugContext debug;
78 
79     /**
80      * @param linearScan
81      */
LinearScanLifetimeAnalysisPhase(LinearScan linearScan)82     protected LinearScanLifetimeAnalysisPhase(LinearScan linearScan) {
83         allocator = linearScan;
84         debug = allocator.getDebug();
85     }
86 
87     @Override
run(TargetDescription target, LIRGenerationResult lirGenRes, AllocationContext context)88     protected void run(TargetDescription target, LIRGenerationResult lirGenRes, AllocationContext context) {
89         numberInstructions();
90         debug.dump(DebugContext.VERBOSE_LEVEL, lirGenRes.getLIR(), "Before register allocation");
91         computeLocalLiveSets();
92         computeGlobalLiveSets();
93         buildIntervals(Assertions.detailedAssertionsEnabled(allocator.getOptions()));
94     }
95 
96     /**
97      * Bit set for each variable that is contained in each loop.
98      */
99     private BitMap2D intervalInLoop;
100 
isIntervalInLoop(int interval, int loop)101     boolean isIntervalInLoop(int interval, int loop) {
102         return intervalInLoop.at(interval, loop);
103     }
104 
105     /**
106      * Numbers all instructions in all blocks. The numbering follows the
107      * {@linkplain ComputeBlockOrder linear scan order}.
108      */
numberInstructions()109     protected void numberInstructions() {
110 
111         allocator.initIntervals();
112 
113         ValueConsumer setVariableConsumer = (value, mode, flags) -> {
114             if (isVariable(value)) {
115                 allocator.getOrCreateInterval(asVariable(value));
116             }
117         };
118 
119         // Assign IDs to LIR nodes and build a mapping, lirOps, from ID to LIRInstruction node.
120         int numInstructions = 0;
121         for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
122             numInstructions += allocator.getLIR().getLIRforBlock(block).size();
123         }
124 
125         // initialize with correct length
126         allocator.initOpIdMaps(numInstructions);
127 
128         int opId = 0;
129         int index = 0;
130         for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
131             allocator.initBlockData(block);
132 
133             ArrayList<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
134 
135             int numInst = instructions.size();
136             for (int j = 0; j < numInst; j++) {
137                 LIRInstruction op = instructions.get(j);
138                 op.setId(opId);
139 
140                 allocator.putOpIdMaps(index, op, block);
141                 assert allocator.instructionForId(opId) == op : "must match";
142 
143                 op.visitEachTemp(setVariableConsumer);
144                 op.visitEachOutput(setVariableConsumer);
145 
146                 index++;
147                 opId += 2; // numbering of lirOps by two
148             }
149         }
150         assert index == numInstructions : "must match";
151         assert (index << 1) == opId : "must match: " + (index << 1);
152     }
153 
154     /**
155      * Computes local live sets (i.e. {@link BlockData#liveGen} and {@link BlockData#liveKill})
156      * separately for each block.
157      */
158     @SuppressWarnings("try")
computeLocalLiveSets()159     void computeLocalLiveSets() {
160         int liveSize = allocator.liveSetSize();
161 
162         intervalInLoop = new BitMap2D(allocator.operandSize(), allocator.numLoops());
163 
164         try {
165             final BitSet liveGenScratch = new BitSet(liveSize);
166             final BitSet liveKillScratch = new BitSet(liveSize);
167             // iterate all blocks
168             for (final AbstractBlockBase<?> block : allocator.sortedBlocks()) {
169                 try (Indent indent = debug.logAndIndent("compute local live sets for block %s", block)) {
170 
171                     liveGenScratch.clear();
172                     liveKillScratch.clear();
173 
174                     ArrayList<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
175                     int numInst = instructions.size();
176 
177                     ValueConsumer useConsumer = (operand, mode, flags) -> {
178                         if (isVariable(operand)) {
179                             int operandNum = allocator.operandNumber(operand);
180                             if (!liveKillScratch.get(operandNum)) {
181                                 liveGenScratch.set(operandNum);
182                                 if (debug.isLogEnabled()) {
183                                     debug.log("liveGen for operand %d(%s)", operandNum, operand);
184                                 }
185                             }
186                             if (block.getLoop() != null) {
187                                 intervalInLoop.setBit(operandNum, block.getLoop().getIndex());
188                             }
189                         }
190 
191                         if (allocator.detailedAsserts) {
192                             verifyInput(block, liveKillScratch, operand);
193                         }
194                     };
195                     ValueConsumer stateConsumer = (operand, mode, flags) -> {
196                         if (LinearScan.isVariableOrRegister(operand)) {
197                             int operandNum = allocator.operandNumber(operand);
198                             if (!liveKillScratch.get(operandNum)) {
199                                 liveGenScratch.set(operandNum);
200                                 if (debug.isLogEnabled()) {
201                                     debug.log("liveGen in state for operand %d(%s)", operandNum, operand);
202                                 }
203                             }
204                         }
205                     };
206                     ValueConsumer defConsumer = (operand, mode, flags) -> {
207                         if (isVariable(operand)) {
208                             int varNum = allocator.operandNumber(operand);
209                             liveKillScratch.set(varNum);
210                             if (debug.isLogEnabled()) {
211                                 debug.log("liveKill for operand %d(%s)", varNum, operand);
212                             }
213                             if (block.getLoop() != null) {
214                                 intervalInLoop.setBit(varNum, block.getLoop().getIndex());
215                             }
216                         }
217 
218                         if (allocator.detailedAsserts) {
219                             /*
220                              * Fixed intervals are never live at block boundaries, so they need not
221                              * be processed in live sets. Process them only in debug mode so that
222                              * this can be checked
223                              */
224                             verifyTemp(liveKillScratch, operand);
225                         }
226                     };
227 
228                     // iterate all instructions of the block
229                     for (int j = 0; j < numInst; j++) {
230                         final LIRInstruction op = instructions.get(j);
231 
232                         try (Indent indent2 = debug.logAndIndent("handle op %d: %s", op.id(), op)) {
233                             op.visitEachInput(useConsumer);
234                             op.visitEachAlive(useConsumer);
235                             /*
236                              * Add uses of live locals from interpreter's point of view for proper
237                              * debug information generation.
238                              */
239                             op.visitEachState(stateConsumer);
240                             op.visitEachTemp(defConsumer);
241                             op.visitEachOutput(defConsumer);
242                         }
243                     } // end of instruction iteration
244 
245                     BlockData blockSets = allocator.getBlockData(block);
246                     blockSets.liveGen = trimClone(liveGenScratch);
247                     blockSets.liveKill = trimClone(liveKillScratch);
248                     // sticky size, will get non-sticky in computeGlobalLiveSets
249                     blockSets.liveIn = new BitSet(0);
250                     blockSets.liveOut = new BitSet(0);
251 
252                     if (debug.isLogEnabled()) {
253                         debug.log("liveGen  B%d %s", block.getId(), blockSets.liveGen);
254                         debug.log("liveKill B%d %s", block.getId(), blockSets.liveKill);
255                     }
256 
257                 }
258             } // end of block iteration
259         } catch (OutOfMemoryError oom) {
260             throw new PermanentBailoutException(oom, "Out-of-memory during live set allocation of size %d", liveSize);
261         }
262     }
263 
verifyTemp(BitSet liveKill, Value operand)264     private void verifyTemp(BitSet liveKill, Value operand) {
265         /*
266          * Fixed intervals are never live at block boundaries, so they need not be processed in live
267          * sets. Process them only in debug mode so that this can be checked
268          */
269         if (isRegister(operand)) {
270             if (allocator.isProcessed(operand)) {
271                 liveKill.set(allocator.operandNumber(operand));
272             }
273         }
274     }
275 
verifyInput(AbstractBlockBase<?> block, BitSet liveKill, Value operand)276     private void verifyInput(AbstractBlockBase<?> block, BitSet liveKill, Value operand) {
277         /*
278          * Fixed intervals are never live at block boundaries, so they need not be processed in live
279          * sets. This is checked by these assertions to be sure about it. The entry block may have
280          * incoming values in registers, which is ok.
281          */
282         if (isRegister(operand) && block != allocator.getLIR().getControlFlowGraph().getStartBlock()) {
283             if (allocator.isProcessed(operand)) {
284                 assert liveKill.get(allocator.operandNumber(operand)) : "using fixed register " + asRegister(operand) + " that is not defined in this block " + block;
285             }
286         }
287     }
288 
289     /**
290      * Performs a backward dataflow analysis to compute global live sets (i.e.
291      * {@link BlockData#liveIn} and {@link BlockData#liveOut}) for each block.
292      */
293     @SuppressWarnings("try")
computeGlobalLiveSets()294     protected void computeGlobalLiveSets() {
295         try (Indent indent = debug.logAndIndent("compute global live sets")) {
296             int numBlocks = allocator.blockCount();
297             boolean changeOccurred;
298             boolean changeOccurredInBlock;
299             int iterationCount = 0;
300             BitSet scratch = new BitSet(allocator.liveSetSize()); // scratch set for calculations
301 
302             /*
303              * Perform a backward dataflow analysis to compute liveOut and liveIn for each block.
304              * The loop is executed until a fixpoint is reached (no changes in an iteration).
305              */
306             do {
307                 changeOccurred = false;
308 
309                 try (Indent indent2 = debug.logAndIndent("new iteration %d", iterationCount)) {
310 
311                     // iterate all blocks in reverse order
312                     for (int i = numBlocks - 1; i >= 0; i--) {
313                         AbstractBlockBase<?> block = allocator.blockAt(i);
314                         BlockData blockSets = allocator.getBlockData(block);
315 
316                         changeOccurredInBlock = false;
317 
318                         /*
319                          * liveOut(block) is the union of liveIn(sux), for successors sux of block.
320                          */
321                         int n = block.getSuccessorCount();
322                         if (n > 0) {
323                             scratch.clear();
324                             // block has successors
325                             if (n > 0) {
326                                 for (AbstractBlockBase<?> successor : block.getSuccessors()) {
327                                     scratch.or(allocator.getBlockData(successor).liveIn);
328                                 }
329                             }
330 
331                             if (!blockSets.liveOut.equals(scratch)) {
332                                 blockSets.liveOut = trimClone(scratch);
333 
334                                 changeOccurred = true;
335                                 changeOccurredInBlock = true;
336                             }
337                         }
338 
339                         if (iterationCount == 0 || changeOccurredInBlock) {
340                             /*
341                              * liveIn(block) is the union of liveGen(block) with (liveOut(block) &
342                              * !liveKill(block)).
343                              *
344                              * Note: liveIn has to be computed only in first iteration or if liveOut
345                              * has changed!
346                              *
347                              * Note: liveIn set can only grow, never shrink. No need to clear it.
348                              */
349                             BitSet liveIn = blockSets.liveIn;
350                             /*
351                              * BitSet#or will call BitSet#ensureSize (since the bit set is of length
352                              * 0 initially) and set sticky to false
353                              */
354                             liveIn.or(blockSets.liveOut);
355                             liveIn.andNot(blockSets.liveKill);
356                             liveIn.or(blockSets.liveGen);
357 
358                             liveIn.clone(); // trimToSize()
359 
360                             if (debug.isLogEnabled()) {
361                                 debug.log("block %d: livein = %s,  liveout = %s", block.getId(), liveIn, blockSets.liveOut);
362                             }
363                         }
364                     }
365                     iterationCount++;
366 
367                     if (changeOccurred && iterationCount > 50) {
368                         /*
369                          * Very unlikely, should never happen: If it happens we cannot guarantee it
370                          * won't happen again.
371                          */
372                         throw new PermanentBailoutException("too many iterations in computeGlobalLiveSets");
373                     }
374                 }
375             } while (changeOccurred);
376 
377             if (Assertions.detailedAssertionsEnabled(allocator.getOptions())) {
378                 verifyLiveness();
379             }
380 
381             // check that the liveIn set of the first block is empty
382             AbstractBlockBase<?> startBlock = allocator.getLIR().getControlFlowGraph().getStartBlock();
383             if (allocator.getBlockData(startBlock).liveIn.cardinality() != 0) {
384                 if (Assertions.detailedAssertionsEnabled(allocator.getOptions())) {
385                     reportFailure(numBlocks);
386                 }
387                 // bailout if this occurs in product mode.
388                 throw new GraalError("liveIn set of first block must be empty: " + allocator.getBlockData(startBlock).liveIn);
389             }
390         }
391     }
392 
393     /**
394      * Creates a trimmed copy a bit set.
395      *
396      * {@link BitSet#clone()} cannot be used since it will not {@linkplain BitSet#trimToSize trim}
397      * the array if the bit set is {@linkplain BitSet#sizeIsSticky sticky}.
398      */
399     @SuppressWarnings("javadoc")
trimClone(BitSet set)400     private static BitSet trimClone(BitSet set) {
401         BitSet trimmedSet = new BitSet(0); // zero-length words array, sticky
402         trimmedSet.or(set); // words size ensured to be words-in-use of set,
403                             // also makes it non-sticky
404         return trimmedSet;
405     }
406 
407     @SuppressWarnings("try")
reportFailure(int numBlocks)408     protected void reportFailure(int numBlocks) {
409         try (DebugContext.Scope s = debug.forceLog()) {
410             try (Indent indent = debug.logAndIndent("report failure")) {
411 
412                 BitSet startBlockLiveIn = allocator.getBlockData(allocator.getLIR().getControlFlowGraph().getStartBlock()).liveIn;
413                 try (Indent indent2 = debug.logAndIndent("Error: liveIn set of first block must be empty (when this fails, variables are used before they are defined):")) {
414                     for (int operandNum = startBlockLiveIn.nextSetBit(0); operandNum >= 0; operandNum = startBlockLiveIn.nextSetBit(operandNum + 1)) {
415                         Interval interval = allocator.intervalFor(operandNum);
416                         if (interval != null) {
417                             Value operand = interval.operand;
418                             debug.log("var %d; operand=%s; node=%s", operandNum, operand, getSourceForOperandFromDebugContext(debug, operand));
419                         } else {
420                             debug.log("var %d; missing operand", operandNum);
421                         }
422                     }
423                 }
424 
425                 // print some additional information to simplify debugging
426                 for (int operandNum = startBlockLiveIn.nextSetBit(0); operandNum >= 0; operandNum = startBlockLiveIn.nextSetBit(operandNum + 1)) {
427                     Interval interval = allocator.intervalFor(operandNum);
428                     Value operand = null;
429                     Object valueForOperandFromDebugContext = null;
430                     if (interval != null) {
431                         operand = interval.operand;
432                         valueForOperandFromDebugContext = getSourceForOperandFromDebugContext(debug, operand);
433                     }
434                     try (Indent indent2 = debug.logAndIndent("---- Detailed information for var %d; operand=%s; node=%s ----", operandNum, operand, valueForOperandFromDebugContext)) {
435 
436                         ArrayDeque<AbstractBlockBase<?>> definedIn = new ArrayDeque<>();
437                         EconomicSet<AbstractBlockBase<?>> usedIn = EconomicSet.create(Equivalence.IDENTITY);
438                         for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
439                             if (allocator.getBlockData(block).liveGen.get(operandNum)) {
440                                 usedIn.add(block);
441                                 try (Indent indent3 = debug.logAndIndent("used in block B%d", block.getId())) {
442                                     for (LIRInstruction ins : allocator.getLIR().getLIRforBlock(block)) {
443                                         try (Indent indent4 = debug.logAndIndent("%d: %s", ins.id(), ins)) {
444                                             ins.forEachState((liveStateOperand, mode, flags) -> {
445                                                 debug.log("operand=%s", liveStateOperand);
446                                                 return liveStateOperand;
447                                             });
448                                         }
449                                     }
450                                 }
451                             }
452                             if (allocator.getBlockData(block).liveKill.get(operandNum)) {
453                                 definedIn.add(block);
454                                 try (Indent indent3 = debug.logAndIndent("defined in block B%d", block.getId())) {
455                                     for (LIRInstruction ins : allocator.getLIR().getLIRforBlock(block)) {
456                                         debug.log("%d: %s", ins.id(), ins);
457                                     }
458                                 }
459                             }
460                         }
461 
462                         int[] hitCount = new int[numBlocks];
463 
464                         while (!definedIn.isEmpty()) {
465                             AbstractBlockBase<?> block = definedIn.removeFirst();
466                             usedIn.remove(block);
467                             for (AbstractBlockBase<?> successor : block.getSuccessors()) {
468                                 if (successor.isLoopHeader()) {
469                                     if (!block.isLoopEnd()) {
470                                         definedIn.add(successor);
471                                     }
472                                 } else {
473                                     if (++hitCount[successor.getId()] == successor.getPredecessorCount()) {
474                                         definedIn.add(successor);
475                                     }
476                                 }
477                             }
478                         }
479                         try (Indent indent3 = debug.logAndIndent("**** offending usages are in: ")) {
480                             for (AbstractBlockBase<?> block : usedIn) {
481                                 debug.log("B%d", block.getId());
482                             }
483                         }
484                     }
485                 }
486             }
487         } catch (Throwable e) {
488             throw debug.handle(e);
489         }
490     }
491 
verifyLiveness()492     protected void verifyLiveness() {
493         /*
494          * Check that fixed intervals are not live at block boundaries (live set must be empty at
495          * fixed intervals).
496          */
497         for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
498             for (int j = 0; j <= allocator.maxRegisterNumber(); j++) {
499                 assert !allocator.getBlockData(block).liveIn.get(j) : "liveIn  set of fixed register must be empty";
500                 assert !allocator.getBlockData(block).liveOut.get(j) : "liveOut set of fixed register must be empty";
501                 assert !allocator.getBlockData(block).liveGen.get(j) : "liveGen set of fixed register must be empty";
502             }
503         }
504     }
505 
addUse(AllocatableValue operand, int from, int to, RegisterPriority registerPriority, ValueKind<?> kind, boolean detailedAsserts)506     protected void addUse(AllocatableValue operand, int from, int to, RegisterPriority registerPriority, ValueKind<?> kind, boolean detailedAsserts) {
507         if (!allocator.isProcessed(operand)) {
508             return;
509         }
510 
511         Interval interval = allocator.getOrCreateInterval(operand);
512         if (!kind.equals(LIRKind.Illegal)) {
513             interval.setKind(kind);
514         }
515 
516         interval.addRange(from, to);
517 
518         // Register use position at even instruction id.
519         interval.addUsePos(to & ~1, registerPriority, detailedAsserts);
520 
521         if (debug.isLogEnabled()) {
522             debug.log("add use: %s, from %d to %d (%s)", interval, from, to, registerPriority.name());
523         }
524     }
525 
addTemp(AllocatableValue operand, int tempPos, RegisterPriority registerPriority, ValueKind<?> kind, boolean detailedAsserts)526     protected void addTemp(AllocatableValue operand, int tempPos, RegisterPriority registerPriority, ValueKind<?> kind, boolean detailedAsserts) {
527         if (!allocator.isProcessed(operand)) {
528             return;
529         }
530 
531         Interval interval = allocator.getOrCreateInterval(operand);
532         if (!kind.equals(LIRKind.Illegal)) {
533             interval.setKind(kind);
534         }
535 
536         interval.addRange(tempPos, tempPos + 1);
537         interval.addUsePos(tempPos, registerPriority, detailedAsserts);
538         interval.addMaterializationValue(null);
539 
540         if (debug.isLogEnabled()) {
541             debug.log("add temp: %s tempPos %d (%s)", interval, tempPos, RegisterPriority.MustHaveRegister.name());
542         }
543     }
544 
addDef(AllocatableValue operand, LIRInstruction op, RegisterPriority registerPriority, ValueKind<?> kind, boolean detailedAsserts)545     protected void addDef(AllocatableValue operand, LIRInstruction op, RegisterPriority registerPriority, ValueKind<?> kind, boolean detailedAsserts) {
546         if (!allocator.isProcessed(operand)) {
547             return;
548         }
549         int defPos = op.id();
550 
551         Interval interval = allocator.getOrCreateInterval(operand);
552         if (!kind.equals(LIRKind.Illegal)) {
553             interval.setKind(kind);
554         }
555 
556         Range r = interval.first();
557         if (r.from <= defPos) {
558             /*
559              * Update the starting point (when a range is first created for a use, its start is the
560              * beginning of the current block until a def is encountered).
561              */
562             r.from = defPos;
563             interval.addUsePos(defPos, registerPriority, detailedAsserts);
564 
565         } else {
566             /*
567              * Dead value - make vacuous interval also add register priority for dead intervals
568              */
569             interval.addRange(defPos, defPos + 1);
570             interval.addUsePos(defPos, registerPriority, detailedAsserts);
571             if (debug.isLogEnabled()) {
572                 debug.log("Warning: def of operand %s at %d occurs without use", operand, defPos);
573             }
574         }
575 
576         changeSpillDefinitionPos(op, operand, interval, defPos);
577         if (registerPriority == RegisterPriority.None && interval.spillState().ordinal() <= SpillState.StartInMemory.ordinal() && isStackSlot(operand)) {
578             // detection of method-parameters and roundfp-results
579             interval.setSpillState(SpillState.StartInMemory);
580         }
581         interval.addMaterializationValue(getMaterializedValue(op, operand, interval));
582 
583         if (debug.isLogEnabled()) {
584             debug.log("add def: %s defPos %d (%s)", interval, defPos, registerPriority.name());
585         }
586     }
587 
588     /**
589      * Optimizes moves related to incoming stack based arguments. The interval for the destination
590      * of such moves is assigned the stack slot (which is in the caller's frame) as its spill slot.
591      */
handleMethodArguments(LIRInstruction op)592     protected void handleMethodArguments(LIRInstruction op) {
593         if (ValueMoveOp.isValueMoveOp(op)) {
594             ValueMoveOp move = ValueMoveOp.asValueMoveOp(op);
595             if (optimizeMethodArgument(move.getInput())) {
596                 StackSlot slot = asStackSlot(move.getInput());
597                 if (Assertions.detailedAssertionsEnabled(allocator.getOptions())) {
598                     assert op.id() > 0 : "invalid id";
599                     assert allocator.blockForId(op.id()).getPredecessorCount() == 0 : "move from stack must be in first block";
600                     assert isVariable(move.getResult()) : "result of move must be a variable";
601 
602                     if (debug.isLogEnabled()) {
603                         debug.log("found move from stack slot %s to %s", slot, move.getResult());
604                     }
605                 }
606 
607                 Interval interval = allocator.intervalFor(move.getResult());
608                 interval.setSpillSlot(slot);
609                 interval.assignLocation(slot);
610             }
611         }
612     }
613 
addRegisterHint(final LIRInstruction op, final Value targetValue, OperandMode mode, EnumSet<OperandFlag> flags, final boolean hintAtDef)614     protected void addRegisterHint(final LIRInstruction op, final Value targetValue, OperandMode mode, EnumSet<OperandFlag> flags, final boolean hintAtDef) {
615         if (flags.contains(OperandFlag.HINT) && LinearScan.isVariableOrRegister(targetValue)) {
616 
617             op.forEachRegisterHint(targetValue, mode, (registerHint, valueMode, valueFlags) -> {
618                 if (LinearScan.isVariableOrRegister(registerHint)) {
619                     Interval from = allocator.getOrCreateInterval((AllocatableValue) registerHint);
620                     Interval to = allocator.getOrCreateInterval((AllocatableValue) targetValue);
621 
622                     /* hints always point from def to use */
623                     if (hintAtDef) {
624                         to.setLocationHint(from);
625                     } else {
626                         from.setLocationHint(to);
627                     }
628                     if (debug.isLogEnabled()) {
629                         debug.log("operation at opId %d: added hint from interval %d to %d", op.id(), from.operandNumber, to.operandNumber);
630                     }
631 
632                     return registerHint;
633                 }
634                 return null;
635             });
636         }
637     }
638 
639     /**
640      * Eliminates moves from register to stack if the stack slot is known to be correct.
641      *
642      * @param op
643      * @param operand
644      */
changeSpillDefinitionPos(LIRInstruction op, AllocatableValue operand, Interval interval, int defPos)645     protected void changeSpillDefinitionPos(LIRInstruction op, AllocatableValue operand, Interval interval, int defPos) {
646         assert interval.isSplitParent() : "can only be called for split parents";
647 
648         switch (interval.spillState()) {
649             case NoDefinitionFound:
650                 assert interval.spillDefinitionPos() == -1 : "must no be set before";
651                 interval.setSpillDefinitionPos(defPos);
652                 interval.setSpillState(SpillState.NoSpillStore);
653                 break;
654 
655             case NoSpillStore:
656                 assert defPos <= interval.spillDefinitionPos() : "positions are processed in reverse order when intervals are created";
657                 if (defPos < interval.spillDefinitionPos() - 2) {
658                     // second definition found, so no spill optimization possible for this interval
659                     interval.setSpillState(SpillState.NoOptimization);
660                 } else {
661                     // two consecutive definitions (because of two-operand LIR form)
662                     assert allocator.blockForId(defPos) == allocator.blockForId(interval.spillDefinitionPos()) : "block must be equal";
663                 }
664                 break;
665 
666             case NoOptimization:
667                 // nothing to do
668                 break;
669 
670             default:
671                 throw GraalError.shouldNotReachHere("other states not allowed at this time");
672         }
673     }
674 
optimizeMethodArgument(Value value)675     private static boolean optimizeMethodArgument(Value value) {
676         /*
677          * Object method arguments that are passed on the stack are currently not optimized because
678          * this requires that the runtime visits method arguments during stack walking.
679          */
680         return isStackSlot(value) && asStackSlot(value).isInCallerFrame() && LIRKind.isValue(value);
681     }
682 
683     /**
684      * Determines the register priority for an instruction's output/result operand.
685      */
registerPriorityOfOutputOperand(LIRInstruction op)686     protected RegisterPriority registerPriorityOfOutputOperand(LIRInstruction op) {
687         if (ValueMoveOp.isValueMoveOp(op)) {
688             ValueMoveOp move = ValueMoveOp.asValueMoveOp(op);
689             if (optimizeMethodArgument(move.getInput())) {
690                 return RegisterPriority.None;
691             }
692         }
693 
694         // all other operands require a register
695         return RegisterPriority.MustHaveRegister;
696     }
697 
698     /**
699      * Determines the priority which with an instruction's input operand will be allocated a
700      * register.
701      */
registerPriorityOfInputOperand(EnumSet<OperandFlag> flags)702     protected static RegisterPriority registerPriorityOfInputOperand(EnumSet<OperandFlag> flags) {
703         if (flags.contains(OperandFlag.STACK)) {
704             return RegisterPriority.ShouldHaveRegister;
705         }
706         // all other operands require a register
707         return RegisterPriority.MustHaveRegister;
708     }
709 
710     @SuppressWarnings("try")
buildIntervals(boolean detailedAsserts)711     protected void buildIntervals(boolean detailedAsserts) {
712 
713         try (Indent indent = debug.logAndIndent("build intervals")) {
714             InstructionValueConsumer outputConsumer = (op, operand, mode, flags) -> {
715                 if (LinearScan.isVariableOrRegister(operand)) {
716                     addDef((AllocatableValue) operand, op, registerPriorityOfOutputOperand(op), operand.getValueKind(), detailedAsserts);
717                     addRegisterHint(op, operand, mode, flags, true);
718                 }
719             };
720 
721             InstructionValueConsumer tempConsumer = (op, operand, mode, flags) -> {
722                 if (LinearScan.isVariableOrRegister(operand)) {
723                     addTemp((AllocatableValue) operand, op.id(), RegisterPriority.MustHaveRegister, operand.getValueKind(), detailedAsserts);
724                     addRegisterHint(op, operand, mode, flags, false);
725                 }
726             };
727 
728             InstructionValueConsumer aliveConsumer = (op, operand, mode, flags) -> {
729                 if (LinearScan.isVariableOrRegister(operand)) {
730                     RegisterPriority p = registerPriorityOfInputOperand(flags);
731                     int opId = op.id();
732                     int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
733                     addUse((AllocatableValue) operand, blockFrom, opId + 1, p, operand.getValueKind(), detailedAsserts);
734                     addRegisterHint(op, operand, mode, flags, false);
735                 }
736             };
737 
738             InstructionValueConsumer inputConsumer = (op, operand, mode, flags) -> {
739                 if (LinearScan.isVariableOrRegister(operand)) {
740                     int opId = op.id();
741                     int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
742                     RegisterPriority p = registerPriorityOfInputOperand(flags);
743                     addUse((AllocatableValue) operand, blockFrom, opId, p, operand.getValueKind(), detailedAsserts);
744                     addRegisterHint(op, operand, mode, flags, false);
745                 }
746             };
747 
748             InstructionValueConsumer stateProc = (op, operand, mode, flags) -> {
749                 if (LinearScan.isVariableOrRegister(operand)) {
750                     int opId = op.id();
751                     int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
752                     addUse((AllocatableValue) operand, blockFrom, opId + 1, RegisterPriority.None, operand.getValueKind(), detailedAsserts);
753                 }
754             };
755 
756             // create a list with all caller-save registers (cpu, fpu, xmm)
757             RegisterArray callerSaveRegs = allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters();
758 
759             // iterate all blocks in reverse order
760             for (int i = allocator.blockCount() - 1; i >= 0; i--) {
761 
762                 AbstractBlockBase<?> block = allocator.blockAt(i);
763                 try (Indent indent2 = debug.logAndIndent("handle block %d", block.getId())) {
764 
765                     ArrayList<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
766                     final int blockFrom = allocator.getFirstLirInstructionId(block);
767                     int blockTo = allocator.getLastLirInstructionId(block);
768 
769                     assert blockFrom == instructions.get(0).id();
770                     assert blockTo == instructions.get(instructions.size() - 1).id();
771 
772                     // Update intervals for operands live at the end of this block;
773                     BitSet live = allocator.getBlockData(block).liveOut;
774                     for (int operandNum = live.nextSetBit(0); operandNum >= 0; operandNum = live.nextSetBit(operandNum + 1)) {
775                         assert live.get(operandNum) : "should not stop here otherwise";
776                         AllocatableValue operand = allocator.intervalFor(operandNum).operand;
777                         if (debug.isLogEnabled()) {
778                             debug.log("live in %d: %s", operandNum, operand);
779                         }
780 
781                         addUse(operand, blockFrom, blockTo + 2, RegisterPriority.None, LIRKind.Illegal, detailedAsserts);
782 
783                         /*
784                          * Add special use positions for loop-end blocks when the interval is used
785                          * anywhere inside this loop. It's possible that the block was part of a
786                          * non-natural loop, so it might have an invalid loop index.
787                          */
788                         if (block.isLoopEnd() && block.getLoop() != null && isIntervalInLoop(operandNum, block.getLoop().getIndex())) {
789                             allocator.intervalFor(operandNum).addUsePos(blockTo + 1, RegisterPriority.LiveAtLoopEnd, detailedAsserts);
790                         }
791                     }
792 
793                     /*
794                      * Iterate all instructions of the block in reverse order. definitions of
795                      * intervals are processed before uses.
796                      */
797                     for (int j = instructions.size() - 1; j >= 0; j--) {
798                         final LIRInstruction op = instructions.get(j);
799                         final int opId = op.id();
800 
801                         try (Indent indent3 = debug.logAndIndent("handle inst %d: %s", opId, op)) {
802 
803                             // add a temp range for each register if operation destroys
804                             // caller-save registers
805                             if (op.destroysCallerSavedRegisters()) {
806                                 for (Register r : callerSaveRegs) {
807                                     if (allocator.attributes(r).isAllocatable()) {
808                                         addTemp(r.asValue(), opId, RegisterPriority.None, LIRKind.Illegal, detailedAsserts);
809                                     }
810                                 }
811                                 if (debug.isLogEnabled()) {
812                                     debug.log("operation destroys all caller-save registers");
813                                 }
814                             }
815 
816                             op.visitEachOutput(outputConsumer);
817                             op.visitEachTemp(tempConsumer);
818                             op.visitEachAlive(aliveConsumer);
819                             op.visitEachInput(inputConsumer);
820 
821                             /*
822                              * Add uses of live locals from interpreter's point of view for proper
823                              * debug information generation. Treat these operands as temp values (if
824                              * the live range is extended to a call site, the value would be in a
825                              * register at the call otherwise).
826                              */
827                             op.visitEachState(stateProc);
828 
829                             // special steps for some instructions (especially moves)
830                             handleMethodArguments(op);
831 
832                         }
833 
834                     } // end of instruction iteration
835                 }
836             } // end of block iteration
837 
838             /*
839              * Add the range [0, 1] to all fixed intervals. the register allocator need not handle
840              * unhandled fixed intervals.
841              */
842             for (Interval interval : allocator.intervals()) {
843                 if (interval != null && isRegister(interval.operand)) {
844                     interval.addRange(0, 1);
845                 }
846             }
847         }
848     }
849 
850     /**
851      * Returns a value for a interval definition, which can be used for re-materialization.
852      *
853      * @param op An instruction which defines a value
854      * @param operand The destination operand of the instruction
855      * @param interval The interval for this defined value.
856      * @return Returns the value which is moved to the instruction and which can be reused at all
857      *         reload-locations in case the interval of this instruction is spilled. Currently this
858      *         can only be a {@link JavaConstant}.
859      */
getMaterializedValue(LIRInstruction op, Value operand, Interval interval)860     protected Constant getMaterializedValue(LIRInstruction op, Value operand, Interval interval) {
861         if (LoadConstantOp.isLoadConstantOp(op)) {
862             LoadConstantOp move = LoadConstantOp.asLoadConstantOp(op);
863 
864             if (!allocator.neverSpillConstants()) {
865                 /*
866                  * Check if the interval has any uses which would accept an stack location (priority
867                  * == ShouldHaveRegister). Rematerialization of such intervals can result in a
868                  * degradation, because rematerialization always inserts a constant load, even if
869                  * the value is not needed in a register.
870                  */
871                 Interval.UsePosList usePosList = interval.usePosList();
872                 int numUsePos = usePosList.size();
873                 for (int useIdx = 0; useIdx < numUsePos; useIdx++) {
874                     Interval.RegisterPriority priority = usePosList.registerPriority(useIdx);
875                     if (priority == Interval.RegisterPriority.ShouldHaveRegister) {
876                         return null;
877                     }
878                 }
879             }
880             return move.getConstant();
881         }
882         return null;
883     }
884 }
885