1 /*
2  * Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4  *
5  * This code is free software; you can redistribute it and/or modify it
6  * under the terms of the GNU General Public License version 2 only, as
7  * published by the Free Software Foundation.
8  *
9  * This code is distributed in the hope that it will be useful, but WITHOUT
10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12  * version 2 for more details (a copy is included in the LICENSE file that
13  * accompanied this code).
14  *
15  * You should have received a copy of the GNU General Public License version
16  * 2 along with this work; if not, write to the Free Software Foundation,
17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18  *
19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20  * or visit www.oracle.com if you need additional information or have any
21  * questions.
22  */
23 
24 
25 package org.graalvm.compiler.lir.aarch64;
26 
27 import static jdk.vm.ci.code.ValueUtil.asAllocatableValue;
28 import static jdk.vm.ci.code.ValueUtil.asRegister;
29 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.HINT;
30 import static org.graalvm.compiler.lir.LIRInstruction.OperandFlag.REG;
31 
32 import java.util.function.Function;
33 
34 import org.graalvm.compiler.asm.Label;
35 import org.graalvm.compiler.core.common.NumUtil;
36 import org.graalvm.compiler.asm.aarch64.AArch64Assembler;
37 import org.graalvm.compiler.asm.aarch64.AArch64Assembler.ConditionFlag;
38 import org.graalvm.compiler.asm.aarch64.AArch64Assembler.ExtendType;
39 import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler;
40 import org.graalvm.compiler.code.CompilationResult.JumpTable;
41 import org.graalvm.compiler.core.common.LIRKind;
42 import org.graalvm.compiler.core.common.calc.Condition;
43 import org.graalvm.compiler.debug.GraalError;
44 import org.graalvm.compiler.lir.ConstantValue;
45 import org.graalvm.compiler.lir.LIRInstructionClass;
46 import org.graalvm.compiler.lir.LabelRef;
47 import org.graalvm.compiler.lir.Opcode;
48 import org.graalvm.compiler.lir.StandardOp;
49 import org.graalvm.compiler.lir.SwitchStrategy;
50 import org.graalvm.compiler.lir.SwitchStrategy.BaseSwitchClosure;
51 import org.graalvm.compiler.lir.Variable;
52 import org.graalvm.compiler.lir.asm.CompilationResultBuilder;
53 
54 import jdk.vm.ci.aarch64.AArch64Kind;
55 import jdk.vm.ci.code.Register;
56 import jdk.vm.ci.meta.Constant;
57 import jdk.vm.ci.meta.JavaConstant;
58 import jdk.vm.ci.meta.Value;
59 
60 public class AArch64ControlFlow {
61 
62     /**
63      * Compares integer register to 0 and branches if condition is true. Condition may only be equal
64      * or non-equal.
65      */
66     // TODO (das) where do we need this?
67     // public static class CompareAndBranchOp extends AArch64LIRInstruction implements
68     // StandardOp.BranchOp {
69     // private final ConditionFlag condition;
70     // private final LabelRef destination;
71     // @Use({REG}) private Value x;
72     //
73     // public CompareAndBranchOp(Condition condition, LabelRef destination, Value x) {
74     // assert condition == Condition.EQ || condition == Condition.NE;
75     // assert ARMv8.isGpKind(x.getKind());
76     // this.condition = condition == Condition.EQ ? ConditionFlag.EQ : ConditionFlag.NE;
77     // this.destination = destination;
78     // this.x = x;
79     // }
80     //
81     // @Override
82     // public void emitCode(CompilationResultBuilder crb, ARMv8MacroAssembler masm) {
83     // int size = ARMv8.bitsize(x.getKind());
84     // if (condition == ConditionFlag.EQ) {
85     // masm.cbz(size, asRegister(x), destination.label());
86     // } else {
87     // masm.cbnz(size, asRegister(x), destination.label());
88     // }
89     // }
90     // }
91 
92     public static class BranchOp extends AArch64BlockEndOp implements StandardOp.BranchOp {
93         public static final LIRInstructionClass<BranchOp> TYPE = LIRInstructionClass.create(BranchOp.class);
94 
95         private final AArch64Assembler.ConditionFlag condition;
96         private final LabelRef trueDestination;
97         private final LabelRef falseDestination;
98 
99         private final double trueDestinationProbability;
100 
BranchOp(AArch64Assembler.ConditionFlag condition, LabelRef trueDestination, LabelRef falseDestination, double trueDestinationProbability)101         public BranchOp(AArch64Assembler.ConditionFlag condition, LabelRef trueDestination, LabelRef falseDestination, double trueDestinationProbability) {
102             super(TYPE);
103             this.condition = condition;
104             this.trueDestination = trueDestination;
105             this.falseDestination = falseDestination;
106             this.trueDestinationProbability = trueDestinationProbability;
107         }
108 
109         @Override
emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm)110         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
111             /*
112              * Explanation: Depending on what the successor edge is, we can use the fall-through to
113              * optimize the generated code. If neither is a successor edge, use the branch
114              * probability to try to take the conditional jump as often as possible to avoid
115              * executing two instructions instead of one.
116              */
117             if (crb.isSuccessorEdge(trueDestination)) {
118                 masm.branchConditionally(condition.negate(), falseDestination.label());
119             } else if (crb.isSuccessorEdge(falseDestination)) {
120                 masm.branchConditionally(condition, trueDestination.label());
121             } else if (trueDestinationProbability < 0.5) {
122                 masm.branchConditionally(condition.negate(), falseDestination.label());
123                 masm.jmp(trueDestination.label());
124             } else {
125                 masm.branchConditionally(condition, trueDestination.label());
126                 masm.jmp(falseDestination.label());
127             }
128         }
129 
130     }
131 
132     @Opcode("CMOVE")
133     public static class CondMoveOp extends AArch64LIRInstruction {
134         public static final LIRInstructionClass<CondMoveOp> TYPE = LIRInstructionClass.create(CondMoveOp.class);
135 
136         @Def protected Value result;
137         @Use protected Value trueValue;
138         @Use protected Value falseValue;
139         private final AArch64Assembler.ConditionFlag condition;
140 
CondMoveOp(Variable result, AArch64Assembler.ConditionFlag condition, Value trueValue, Value falseValue)141         public CondMoveOp(Variable result, AArch64Assembler.ConditionFlag condition, Value trueValue, Value falseValue) {
142             super(TYPE);
143             assert trueValue.getPlatformKind() == falseValue.getPlatformKind() && trueValue.getPlatformKind() == result.getPlatformKind();
144             this.result = result;
145             this.condition = condition;
146             this.trueValue = trueValue;
147             this.falseValue = falseValue;
148         }
149 
150         @Override
emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm)151         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
152             AArch64Kind kind = (AArch64Kind) trueValue.getPlatformKind();
153             int size = kind.getSizeInBytes() * Byte.SIZE;
154             if (kind.isInteger()) {
155                 masm.cmov(size, asRegister(result), asRegister(trueValue), asRegister(falseValue), condition);
156             } else {
157                 masm.fcmov(size, asRegister(result), asRegister(trueValue), asRegister(falseValue), condition);
158             }
159         }
160     }
161 
162     public static class StrategySwitchOp extends AArch64BlockEndOp implements StandardOp.BlockEndOp {
163         public static final LIRInstructionClass<StrategySwitchOp> TYPE = LIRInstructionClass.create(StrategySwitchOp.class);
164 
165         private final Constant[] keyConstants;
166         protected final SwitchStrategy strategy;
167         private final Function<Condition, ConditionFlag> converter;
168         private final LabelRef[] keyTargets;
169         private final LabelRef defaultTarget;
170         @Alive protected Value key;
171         // TODO (das) This could be optimized: We only need the scratch register in case of a
172         // datapatch, or too large immediates.
173         @Temp protected Value scratch;
174 
StrategySwitchOp(SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Value key, Value scratch, Function<Condition, ConditionFlag> converter)175         public StrategySwitchOp(SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Value key, Value scratch,
176                         Function<Condition, ConditionFlag> converter) {
177             this(TYPE, strategy, keyTargets, defaultTarget, key, scratch, converter);
178         }
179 
StrategySwitchOp(LIRInstructionClass<? extends StrategySwitchOp> c, SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Value key, Value scratch, Function<Condition, ConditionFlag> converter)180         protected StrategySwitchOp(LIRInstructionClass<? extends StrategySwitchOp> c, SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Value key, Value scratch,
181                         Function<Condition, ConditionFlag> converter) {
182             super(c);
183             this.strategy = strategy;
184             this.converter = converter;
185             this.keyConstants = strategy.getKeyConstants();
186             this.keyTargets = keyTargets;
187             this.defaultTarget = defaultTarget;
188             this.key = key;
189             this.scratch = scratch;
190             assert keyConstants.length == keyTargets.length;
191             assert keyConstants.length == strategy.keyProbabilities.length;
192         }
193 
194         @Override
emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm)195         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
196             strategy.run(new SwitchClosure(asRegister(key), crb, masm));
197         }
198 
199         public class SwitchClosure extends BaseSwitchClosure {
200 
201             protected final Register keyRegister;
202             protected final CompilationResultBuilder crb;
203             protected final AArch64MacroAssembler masm;
204 
SwitchClosure(Register keyRegister, CompilationResultBuilder crb, AArch64MacroAssembler masm)205             protected SwitchClosure(Register keyRegister, CompilationResultBuilder crb, AArch64MacroAssembler masm) {
206                 super(crb, masm, keyTargets, defaultTarget);
207                 this.keyRegister = keyRegister;
208                 this.crb = crb;
209                 this.masm = masm;
210             }
211 
emitComparison(Constant c)212             protected void emitComparison(Constant c) {
213                 JavaConstant jc = (JavaConstant) c;
214                 ConstantValue constVal = new ConstantValue(LIRKind.value(key.getPlatformKind()), c);
215                 switch (jc.getJavaKind()) {
216                     case Int:
217                         long lc = jc.asLong();
218                         assert NumUtil.isInt(lc);
219                         emitCompare(crb, masm, key, scratch, constVal);
220                         break;
221                     case Long:
222                         emitCompare(crb, masm, key, scratch, constVal);
223                         break;
224                     case Object:
225                         emitCompare(crb, masm, key, scratch, constVal);
226                         break;
227                     default:
228                         throw new GraalError("switch only supported for int, long and object");
229                 }
230             }
231 
232             @Override
conditionalJump(int index, Condition condition, Label target)233             protected void conditionalJump(int index, Condition condition, Label target) {
234                 emitComparison(keyConstants[index]);
235                 masm.branchConditionally(converter.apply(condition), target);
236             }
237         }
238     }
239 
240     public static final class TableSwitchOp extends AArch64BlockEndOp {
241         public static final LIRInstructionClass<TableSwitchOp> TYPE = LIRInstructionClass.create(TableSwitchOp.class);
242         private final int lowKey;
243         private final LabelRef defaultTarget;
244         private final LabelRef[] targets;
245         @Use protected Value index;
246         @Temp({REG, HINT}) protected Value idxScratch;
247         @Temp protected Value scratch;
248 
TableSwitchOp(final int lowKey, final LabelRef defaultTarget, final LabelRef[] targets, Value index, Variable scratch, Variable idxScratch)249         public TableSwitchOp(final int lowKey, final LabelRef defaultTarget, final LabelRef[] targets, Value index, Variable scratch, Variable idxScratch) {
250             super(TYPE);
251             this.lowKey = lowKey;
252             this.defaultTarget = defaultTarget;
253             this.targets = targets;
254             this.index = index;
255             this.scratch = scratch;
256             this.idxScratch = idxScratch;
257         }
258 
259         @Override
emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm)260         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
261             Register indexReg = asRegister(index, AArch64Kind.DWORD);
262             Register idxScratchReg = asRegister(idxScratch, AArch64Kind.DWORD);
263             Register scratchReg = asRegister(scratch, AArch64Kind.QWORD);
264 
265             // Compare index against jump table bounds
266             int highKey = lowKey + targets.length - 1;
267             masm.sub(32, idxScratchReg, indexReg, lowKey);
268             masm.cmp(32, idxScratchReg, highKey - lowKey);
269 
270             // Jump to default target if index is not within the jump table
271             if (defaultTarget != null) {
272                 masm.branchConditionally(ConditionFlag.HI, defaultTarget.label());
273             }
274 
275             Label jumpTable = new Label();
276             masm.adr(scratchReg, jumpTable);
277             masm.add(64, scratchReg, scratchReg, idxScratchReg, ExtendType.UXTW, 2);
278             masm.jmp(scratchReg);
279             masm.bind(jumpTable);
280             // emit jump table entries
281             for (LabelRef target : targets) {
282                 masm.jmp(target.label());
283             }
284             JumpTable jt = new JumpTable(jumpTable.position(), lowKey, highKey - 1, 4);
285             crb.compilationResult.addAnnotation(jt);
286         }
287     }
288 
emitCompare(CompilationResultBuilder crb, AArch64MacroAssembler masm, Value key, Value scratchValue, ConstantValue c)289     private static void emitCompare(CompilationResultBuilder crb, AArch64MacroAssembler masm, Value key, Value scratchValue, ConstantValue c) {
290         long imm = c.getJavaConstant().asLong();
291         final int size = key.getPlatformKind().getSizeInBytes() * Byte.SIZE;
292         if (AArch64MacroAssembler.isComparisonImmediate(imm)) {
293             masm.cmp(size, asRegister(key), (int) imm);
294         } else {
295             AArch64Move.move(crb, masm, asAllocatableValue(scratchValue), c);
296             masm.cmp(size, asRegister(key), asRegister(scratchValue));
297         }
298     }
299 
300 }
301