1 /* 2 * Copyright (c) 2014, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 24 25 package org.graalvm.compiler.lir.alloc.lsra; 26 27 import static jdk.vm.ci.code.ValueUtil.asRegister; 28 import static jdk.vm.ci.code.ValueUtil.isRegister; 29 import static org.graalvm.compiler.lir.LIRValueUtil.isStackSlotValue; 30 31 import org.graalvm.compiler.core.common.cfg.AbstractBlockBase; 32 import org.graalvm.compiler.debug.DebugContext; 33 import org.graalvm.compiler.debug.Indent; 34 import org.graalvm.compiler.lir.alloc.lsra.Interval.RegisterBinding; 35 import org.graalvm.compiler.lir.alloc.lsra.Interval.RegisterBindingLists; 36 import org.graalvm.compiler.lir.alloc.lsra.Interval.RegisterPriority; 37 import org.graalvm.compiler.lir.alloc.lsra.Interval.State; 38 import org.graalvm.compiler.options.Option; 39 import org.graalvm.compiler.options.OptionKey; 40 import org.graalvm.compiler.options.OptionType; 41 42 import jdk.vm.ci.code.Register; 43 import jdk.vm.ci.meta.AllocatableValue; 44 45 public class OptimizingLinearScanWalker extends LinearScanWalker { 46 47 public static class Options { 48 // @formatter:off 49 @Option(help = "Enable LSRA optimization", type = OptionType.Debug) 50 public static final OptionKey<Boolean> LSRAOptimization = new OptionKey<>(false); 51 @Option(help = "LSRA optimization: Only split but do not reassign", type = OptionType.Debug) 52 public static final OptionKey<Boolean> LSRAOptSplitOnly = new OptionKey<>(false); 53 // @formatter:on 54 } 55 OptimizingLinearScanWalker(LinearScan allocator, Interval unhandledFixedFirst, Interval unhandledAnyFirst)56 OptimizingLinearScanWalker(LinearScan allocator, Interval unhandledFixedFirst, Interval unhandledAnyFirst) { 57 super(allocator, unhandledFixedFirst, unhandledAnyFirst); 58 } 59 60 @SuppressWarnings("try") 61 @Override handleSpillSlot(Interval interval)62 protected void handleSpillSlot(Interval interval) { 63 assert interval.location() != null : "interval not assigned " + interval; 64 if (interval.canMaterialize()) { 65 assert !isStackSlotValue(interval.location()) : "interval can materialize but assigned to a stack slot " + interval; 66 return; 67 } 68 assert isStackSlotValue(interval.location()) : "interval not assigned to a stack slot " + interval; 69 DebugContext debug = allocator.getDebug(); 70 try (DebugContext.Scope s1 = debug.scope("LSRAOptimization")) { 71 debug.log("adding stack to unhandled list %s", interval); 72 unhandledLists.addToListSortedByStartAndUsePositions(RegisterBinding.Stack, interval); 73 } 74 } 75 76 @SuppressWarnings("unused") printRegisterBindingList(DebugContext debug, RegisterBindingLists list, RegisterBinding binding)77 private static void printRegisterBindingList(DebugContext debug, RegisterBindingLists list, RegisterBinding binding) { 78 for (Interval interval = list.get(binding); !interval.isEndMarker(); interval = interval.next) { 79 debug.log("%s", interval); 80 } 81 } 82 83 @SuppressWarnings("try") 84 @Override walk()85 void walk() { 86 try (DebugContext.Scope s = allocator.getDebug().scope("OptimizingLinearScanWalker")) { 87 for (AbstractBlockBase<?> block : allocator.sortedBlocks()) { 88 optimizeBlock(block); 89 } 90 } 91 super.walk(); 92 } 93 94 @SuppressWarnings("try") optimizeBlock(AbstractBlockBase<?> block)95 private void optimizeBlock(AbstractBlockBase<?> block) { 96 if (block.getPredecessorCount() == 1) { 97 int nextBlock = allocator.getFirstLirInstructionId(block); 98 DebugContext debug = allocator.getDebug(); 99 try (DebugContext.Scope s1 = debug.scope("LSRAOptimization")) { 100 debug.log("next block: %s (%d)", block, nextBlock); 101 } 102 try (Indent indent0 = debug.indent()) { 103 walkTo(nextBlock); 104 105 try (DebugContext.Scope s1 = debug.scope("LSRAOptimization")) { 106 boolean changed = true; 107 // we need to do this because the active lists might change 108 loop: while (changed) { 109 changed = false; 110 try (Indent indent1 = debug.logAndIndent("Active intervals: (block %s [%d])", block, nextBlock)) { 111 for (Interval active = activeLists.get(RegisterBinding.Any); !active.isEndMarker(); active = active.next) { 112 debug.log("active (any): %s", active); 113 if (optimize(nextBlock, block, active, RegisterBinding.Any)) { 114 changed = true; 115 break loop; 116 } 117 } 118 for (Interval active = activeLists.get(RegisterBinding.Stack); !active.isEndMarker(); active = active.next) { 119 debug.log("active (stack): %s", active); 120 if (optimize(nextBlock, block, active, RegisterBinding.Stack)) { 121 changed = true; 122 break loop; 123 } 124 } 125 } 126 } 127 } 128 } 129 } 130 } 131 132 @SuppressWarnings("try") optimize(int currentPos, AbstractBlockBase<?> currentBlock, Interval currentInterval, RegisterBinding binding)133 private boolean optimize(int currentPos, AbstractBlockBase<?> currentBlock, Interval currentInterval, RegisterBinding binding) { 134 // BEGIN initialize and sanity checks 135 assert currentBlock != null : "block must not be null"; 136 assert currentInterval != null : "interval must not be null"; 137 138 assert currentBlock.getPredecessorCount() == 1 : "more than one predecessors -> optimization not possible"; 139 140 if (!currentInterval.isSplitChild()) { 141 // interval is not a split child -> no need for optimization 142 return false; 143 } 144 145 if (currentInterval.from() == currentPos) { 146 // the interval starts at the current position so no need for splitting 147 return false; 148 } 149 150 // get current location 151 AllocatableValue currentLocation = currentInterval.location(); 152 assert currentLocation != null : "active intervals must have a location assigned!"; 153 154 // get predecessor stuff 155 AbstractBlockBase<?> predecessorBlock = currentBlock.getPredecessors()[0]; 156 int predEndId = allocator.getLastLirInstructionId(predecessorBlock); 157 Interval predecessorInterval = currentInterval.getIntervalCoveringOpId(predEndId); 158 assert predecessorInterval != null : "variable not live at the end of the only predecessor! " + predecessorBlock + " -> " + currentBlock + " interval: " + currentInterval; 159 AllocatableValue predecessorLocation = predecessorInterval.location(); 160 assert predecessorLocation != null : "handled intervals must have a location assigned!"; 161 162 // END initialize and sanity checks 163 164 if (currentLocation.equals(predecessorLocation)) { 165 // locations are already equal -> nothing to optimize 166 return false; 167 } 168 169 if (!isStackSlotValue(predecessorLocation) && !isRegister(predecessorLocation)) { 170 assert predecessorInterval.canMaterialize(); 171 // value is materialized -> no need for optimization 172 return false; 173 } 174 175 assert isStackSlotValue(currentLocation) || isRegister(currentLocation) : "current location not a register or stack slot " + currentLocation; 176 177 DebugContext debug = allocator.getDebug(); 178 try (Indent indent = debug.logAndIndent("location differs: %s vs. %s", predecessorLocation, currentLocation)) { 179 // split current interval at current position 180 debug.log("splitting at position %d", currentPos); 181 182 assert allocator.isBlockBegin(currentPos) && ((currentPos & 1) == 0) : "split pos must be even when on block boundary"; 183 184 Interval splitPart = currentInterval.split(currentPos, allocator); 185 activeLists.remove(binding, currentInterval); 186 187 assert splitPart.from() >= currentPosition : "cannot append new interval before current walk position"; 188 189 // the currentSplitChild is needed later when moves are inserted for reloading 190 assert splitPart.currentSplitChild() == currentInterval : "overwriting wrong currentSplitChild"; 191 splitPart.makeCurrentSplitChild(); 192 193 if (debug.isLogEnabled()) { 194 debug.log("left interval : %s", currentInterval.logString(allocator)); 195 debug.log("right interval : %s", splitPart.logString(allocator)); 196 } 197 198 if (Options.LSRAOptSplitOnly.getValue(allocator.getOptions())) { 199 // just add the split interval to the unhandled list 200 unhandledLists.addToListSortedByStartAndUsePositions(RegisterBinding.Any, splitPart); 201 } else { 202 if (isRegister(predecessorLocation)) { 203 splitRegisterInterval(splitPart, asRegister(predecessorLocation)); 204 } else { 205 assert isStackSlotValue(predecessorLocation); 206 debug.log("assigning interval %s to %s", splitPart, predecessorLocation); 207 splitPart.assignLocation(predecessorLocation); 208 // activate interval 209 activeLists.addToListSortedByCurrentFromPositions(RegisterBinding.Stack, splitPart); 210 splitPart.state = State.Active; 211 212 splitStackInterval(splitPart); 213 } 214 } 215 } 216 return true; 217 } 218 219 @SuppressWarnings("try") splitRegisterInterval(Interval interval, Register reg)220 private void splitRegisterInterval(Interval interval, Register reg) { 221 // collect current usage of registers 222 initVarsForAlloc(interval); 223 initUseLists(false); 224 spillExcludeActiveFixed(); 225 // spillBlockUnhandledFixed(cur); 226 assert unhandledLists.get(RegisterBinding.Fixed).isEndMarker() : "must not have unhandled fixed intervals because all fixed intervals have a use at position 0"; 227 spillBlockInactiveFixed(interval); 228 spillCollectActiveAny(RegisterPriority.LiveAtLoopEnd); 229 spillCollectInactiveAny(interval); 230 231 DebugContext debug = allocator.getDebug(); 232 if (debug.isLogEnabled()) { 233 try (Indent indent2 = debug.logAndIndent("state of registers:")) { 234 for (Register register : availableRegs) { 235 int i = register.number; 236 try (Indent indent3 = debug.logAndIndent("reg %d: usePos: %d, blockPos: %d, intervals: ", i, usePos[i], blockPos[i])) { 237 for (int j = 0; j < spillIntervals[i].size(); j++) { 238 debug.log("%d ", spillIntervals[i].get(j).operandNumber); 239 } 240 } 241 } 242 } 243 } 244 245 // the register must be free at least until this position 246 boolean needSplit = blockPos[reg.number] <= interval.to(); 247 248 int splitPos = blockPos[reg.number]; 249 250 assert splitPos > 0 : "invalid splitPos"; 251 assert needSplit || splitPos > interval.from() : "splitting interval at from"; 252 253 debug.log("assigning interval %s to %s", interval, reg); 254 interval.assignLocation(reg.asValue(interval.kind())); 255 if (needSplit) { 256 // register not available for full interval : so split it 257 splitWhenPartialRegisterAvailable(interval, splitPos); 258 } 259 260 // perform splitting and spilling for all affected intervals 261 splitAndSpillIntersectingIntervals(reg); 262 263 // activate interval 264 activeLists.addToListSortedByCurrentFromPositions(RegisterBinding.Any, interval); 265 interval.state = State.Active; 266 267 } 268 } 269