1 //===-- ARMExpandPseudoInsts.cpp - Expand pseudo instructions -------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains a pass that expands pseudo instructions into target
10 // instructions to allow proper scheduling, if-conversion, and other late
11 // optimizations. This pass should be run after register allocation but before
12 // the post-regalloc scheduling pass.
13 //
14 //===----------------------------------------------------------------------===//
15
16 #include "ARM.h"
17 #include "ARMBaseInstrInfo.h"
18 #include "ARMBaseRegisterInfo.h"
19 #include "ARMConstantPoolValue.h"
20 #include "ARMMachineFunctionInfo.h"
21 #include "ARMSubtarget.h"
22 #include "MCTargetDesc/ARMAddressingModes.h"
23 #include "llvm/CodeGen/LivePhysRegs.h"
24 #include "llvm/CodeGen/MachineFrameInfo.h"
25 #include "llvm/CodeGen/MachineFunctionPass.h"
26 #include "llvm/Support/Debug.h"
27
28 using namespace llvm;
29
30 #define DEBUG_TYPE "arm-pseudo"
31
32 static cl::opt<bool>
33 VerifyARMPseudo("verify-arm-pseudo-expand", cl::Hidden,
34 cl::desc("Verify machine code after expanding ARM pseudos"));
35
36 #define ARM_EXPAND_PSEUDO_NAME "ARM pseudo instruction expansion pass"
37
38 namespace {
39 class ARMExpandPseudo : public MachineFunctionPass {
40 public:
41 static char ID;
42 ARMExpandPseudo() : MachineFunctionPass(ID) {}
43
44 const ARMBaseInstrInfo *TII;
45 const TargetRegisterInfo *TRI;
46 const ARMSubtarget *STI;
47 ARMFunctionInfo *AFI;
48
49 bool runOnMachineFunction(MachineFunction &Fn) override;
50
51 MachineFunctionProperties getRequiredProperties() const override {
52 return MachineFunctionProperties().set(
53 MachineFunctionProperties::Property::NoVRegs);
54 }
55
56 StringRef getPassName() const override {
57 return ARM_EXPAND_PSEUDO_NAME;
58 }
59
60 private:
61 void TransferImpOps(MachineInstr &OldMI,
62 MachineInstrBuilder &UseMI, MachineInstrBuilder &DefMI);
63 bool ExpandMI(MachineBasicBlock &MBB,
64 MachineBasicBlock::iterator MBBI,
65 MachineBasicBlock::iterator &NextMBBI);
66 bool ExpandMBB(MachineBasicBlock &MBB);
67 void ExpandVLD(MachineBasicBlock::iterator &MBBI);
68 void ExpandVST(MachineBasicBlock::iterator &MBBI);
69 void ExpandLaneOp(MachineBasicBlock::iterator &MBBI);
70 void ExpandVTBL(MachineBasicBlock::iterator &MBBI,
71 unsigned Opc, bool IsExt);
72 void ExpandMOV32BitImm(MachineBasicBlock &MBB,
73 MachineBasicBlock::iterator &MBBI);
74 void CMSEClearGPRegs(MachineBasicBlock &MBB,
75 MachineBasicBlock::iterator MBBI, const DebugLoc &DL,
76 const SmallVectorImpl<unsigned> &ClearRegs,
77 unsigned ClobberReg);
78 MachineBasicBlock &CMSEClearFPRegs(MachineBasicBlock &MBB,
getExistingMachineCPValueImpl(MachineConstantPool * CP,Align Alignment)79 MachineBasicBlock::iterator MBBI);
80 MachineBasicBlock &CMSEClearFPRegsV8(MachineBasicBlock &MBB,
81 MachineBasicBlock::iterator MBBI,
82 const BitVector &ClearRegs);
83 MachineBasicBlock &CMSEClearFPRegsV81(MachineBasicBlock &MBB,
84 MachineBasicBlock::iterator MBBI,
85 const BitVector &ClearRegs);
86 void CMSESaveClearFPRegs(MachineBasicBlock &MBB,
87 MachineBasicBlock::iterator MBBI, DebugLoc &DL,
88 const LivePhysRegs &LiveRegs,
89 SmallVectorImpl<unsigned> &AvailableRegs);
90 void CMSESaveClearFPRegsV8(MachineBasicBlock &MBB,
91 MachineBasicBlock::iterator MBBI, DebugLoc &DL,
92 const LivePhysRegs &LiveRegs,
93 SmallVectorImpl<unsigned> &ScratchRegs);
94 void CMSESaveClearFPRegsV81(MachineBasicBlock &MBB,
95 MachineBasicBlock::iterator MBBI, DebugLoc &DL,
96 const LivePhysRegs &LiveRegs);
97 void CMSERestoreFPRegs(MachineBasicBlock &MBB,
getModifier()98 MachineBasicBlock::iterator MBBI, DebugLoc &DL,
99 SmallVectorImpl<unsigned> &AvailableRegs);
hasModifier()100 void CMSERestoreFPRegsV8(MachineBasicBlock &MBB,
101 MachineBasicBlock::iterator MBBI, DebugLoc &DL,
102 SmallVectorImpl<unsigned> &AvailableRegs);
103 void CMSERestoreFPRegsV81(MachineBasicBlock &MBB,
getLabelId()104 MachineBasicBlock::iterator MBBI, DebugLoc &DL,
105 SmallVectorImpl<unsigned> &AvailableRegs);
106 bool ExpandCMP_SWAP(MachineBasicBlock &MBB,
isGlobalValue()107 MachineBasicBlock::iterator MBBI, unsigned LdrexOp,
108 unsigned StrexOp, unsigned UxtOp,
109 MachineBasicBlock::iterator &NextMBBI);
isLSDA()110
111 bool ExpandCMP_SWAP_64(MachineBasicBlock &MBB,
112 MachineBasicBlock::iterator MBBI,
113 MachineBasicBlock::iterator &NextMBBI);
114 };
115 char ARMExpandPseudo::ID = 0;
116 }
117
118 INITIALIZE_PASS(ARMExpandPseudo, DEBUG_TYPE, ARM_EXPAND_PSEUDO_NAME, false,
119 false)
120
121 /// TransferImpOps - Transfer implicit operands on the pseudo instruction to
122 /// the instructions created from the expansion.
equals(const ARMConstantPoolValue * A)123 void ARMExpandPseudo::TransferImpOps(MachineInstr &OldMI,
124 MachineInstrBuilder &UseMI,
125 MachineInstrBuilder &DefMI) {
126 const MCInstrDesc &Desc = OldMI.getDesc();
127 for (unsigned i = Desc.getNumOperands(), e = OldMI.getNumOperands();
128 i != e; ++i) {
129 const MachineOperand &MO = OldMI.getOperand(i);
130 assert(MO.isReg() && MO.getReg());
131 if (MO.isUse())
132 UseMI.add(MO);
133 else
134 DefMI.add(MO);
135 }
136 }
137
138 namespace {
139 // Constants for register spacing in NEON load/store instructions.
140 // For quad-register load-lane and store-lane pseudo instructors, the
141 // spacing is initially assumed to be EvenDblSpc, and that is changed to
142 // OddDblSpc depending on the lane number operand.
143 enum NEONRegSpacing {
144 SingleSpc,
145 SingleLowSpc , // Single spacing, low registers, three and four vectors.
146 SingleHighQSpc, // Single spacing, high registers, four vectors.
147 SingleHighTSpc, // Single spacing, high registers, three vectors.
148 EvenDblSpc,
149 OddDblSpc
150 };
151
152 // Entries for NEON load/store information table. The table is sorted by
153 // PseudoOpc for fast binary-search lookups.
154 struct NEONLdStTableEntry {
155 uint16_t PseudoOpc;
156 uint16_t RealOpc;
157 bool IsLoad;
158 bool isUpdating;
159 bool hasWritebackOperand;
160 uint8_t RegSpacing; // One of type NEONRegSpacing
161 uint8_t NumRegs; // D registers loaded or stored
162 uint8_t RegElts; // elements per D register; used for lane ops
163 // FIXME: Temporary flag to denote whether the real instruction takes
164 // a single register (like the encoding) or all of the registers in
165 // the list (like the asm syntax and the isel DAG). When all definitions
166 // are converted to take only the single encoded register, this will
167 // go away.
168 bool copyAllListRegs;
169
170 // Comparison methods for binary search of the table.
171 bool operator<(const NEONLdStTableEntry &TE) const {
172 return PseudoOpc < TE.PseudoOpc;
173 }
174 friend bool operator<(const NEONLdStTableEntry &TE, unsigned PseudoOpc) {
175 return TE.PseudoOpc < PseudoOpc;
176 }
177 friend bool LLVM_ATTRIBUTE_UNUSED operator<(unsigned PseudoOpc,
178 const NEONLdStTableEntry &TE) {
promotedGlobals()179 return PseudoOpc < TE.PseudoOpc;
180 }
181 };
182 }
getPromotedGlobalInit()183
184 static const NEONLdStTableEntry NEONLdStTable[] = {
185 { ARM::VLD1LNq16Pseudo, ARM::VLD1LNd16, true, false, false, EvenDblSpc, 1, 4 ,true},
186 { ARM::VLD1LNq16Pseudo_UPD, ARM::VLD1LNd16_UPD, true, true, true, EvenDblSpc, 1, 4 ,true},
187 { ARM::VLD1LNq32Pseudo, ARM::VLD1LNd32, true, false, false, EvenDblSpc, 1, 2 ,true},
188 { ARM::VLD1LNq32Pseudo_UPD, ARM::VLD1LNd32_UPD, true, true, true, EvenDblSpc, 1, 2 ,true},
189 { ARM::VLD1LNq8Pseudo, ARM::VLD1LNd8, true, false, false, EvenDblSpc, 1, 8 ,true},
190 { ARM::VLD1LNq8Pseudo_UPD, ARM::VLD1LNd8_UPD, true, true, true, EvenDblSpc, 1, 8 ,true},
191
192 { ARM::VLD1d16QPseudo, ARM::VLD1d16Q, true, false, false, SingleSpc, 4, 4 ,false},
193 { ARM::VLD1d16QPseudoWB_fixed, ARM::VLD1d16Qwb_fixed, true, true, false, SingleSpc, 4, 4 ,false},
194 { ARM::VLD1d16QPseudoWB_register, ARM::VLD1d16Qwb_register, true, true, true, SingleSpc, 4, 4 ,false},
195 { ARM::VLD1d16TPseudo, ARM::VLD1d16T, true, false, false, SingleSpc, 3, 4 ,false},
196 { ARM::VLD1d16TPseudoWB_fixed, ARM::VLD1d16Twb_fixed, true, true, false, SingleSpc, 3, 4 ,false},
197 { ARM::VLD1d16TPseudoWB_register, ARM::VLD1d16Twb_register, true, true, true, SingleSpc, 3, 4 ,false},
198
199 { ARM::VLD1d32QPseudo, ARM::VLD1d32Q, true, false, false, SingleSpc, 4, 2 ,false},
200 { ARM::VLD1d32QPseudoWB_fixed, ARM::VLD1d32Qwb_fixed, true, true, false, SingleSpc, 4, 2 ,false},
201 { ARM::VLD1d32QPseudoWB_register, ARM::VLD1d32Qwb_register, true, true, true, SingleSpc, 4, 2 ,false},
202 { ARM::VLD1d32TPseudo, ARM::VLD1d32T, true, false, false, SingleSpc, 3, 2 ,false},
203 { ARM::VLD1d32TPseudoWB_fixed, ARM::VLD1d32Twb_fixed, true, true, false, SingleSpc, 3, 2 ,false},
204 { ARM::VLD1d32TPseudoWB_register, ARM::VLD1d32Twb_register, true, true, true, SingleSpc, 3, 2 ,false},
205
206 { ARM::VLD1d64QPseudo, ARM::VLD1d64Q, true, false, false, SingleSpc, 4, 1 ,false},
207 { ARM::VLD1d64QPseudoWB_fixed, ARM::VLD1d64Qwb_fixed, true, true, false, SingleSpc, 4, 1 ,false},
208 { ARM::VLD1d64QPseudoWB_register, ARM::VLD1d64Qwb_register, true, true, true, SingleSpc, 4, 1 ,false},
209 { ARM::VLD1d64TPseudo, ARM::VLD1d64T, true, false, false, SingleSpc, 3, 1 ,false},
210 { ARM::VLD1d64TPseudoWB_fixed, ARM::VLD1d64Twb_fixed, true, true, false, SingleSpc, 3, 1 ,false},
211 { ARM::VLD1d64TPseudoWB_register, ARM::VLD1d64Twb_register, true, true, true, SingleSpc, 3, 1 ,false},
212
213 { ARM::VLD1d8QPseudo, ARM::VLD1d8Q, true, false, false, SingleSpc, 4, 8 ,false},
214 { ARM::VLD1d8QPseudoWB_fixed, ARM::VLD1d8Qwb_fixed, true, true, false, SingleSpc, 4, 8 ,false},
215 { ARM::VLD1d8QPseudoWB_register, ARM::VLD1d8Qwb_register, true, true, true, SingleSpc, 4, 8 ,false},
216 { ARM::VLD1d8TPseudo, ARM::VLD1d8T, true, false, false, SingleSpc, 3, 8 ,false},
217 { ARM::VLD1d8TPseudoWB_fixed, ARM::VLD1d8Twb_fixed, true, true, false, SingleSpc, 3, 8 ,false},
218 { ARM::VLD1d8TPseudoWB_register, ARM::VLD1d8Twb_register, true, true, true, SingleSpc, 3, 8 ,false},
219
220 { ARM::VLD1q16HighQPseudo, ARM::VLD1d16Q, true, false, false, SingleHighQSpc, 4, 4 ,false},
221 { ARM::VLD1q16HighQPseudo_UPD, ARM::VLD1d16Qwb_fixed, true, true, true, SingleHighQSpc, 4, 4 ,false},
222 { ARM::VLD1q16HighTPseudo, ARM::VLD1d16T, true, false, false, SingleHighTSpc, 3, 4 ,false},
223 { ARM::VLD1q16HighTPseudo_UPD, ARM::VLD1d16Twb_fixed, true, true, true, SingleHighTSpc, 3, 4 ,false},
224 { ARM::VLD1q16LowQPseudo_UPD, ARM::VLD1d16Qwb_fixed, true, true, true, SingleLowSpc, 4, 4 ,false},
225 { ARM::VLD1q16LowTPseudo_UPD, ARM::VLD1d16Twb_fixed, true, true, true, SingleLowSpc, 3, 4 ,false},
226
227 { ARM::VLD1q32HighQPseudo, ARM::VLD1d32Q, true, false, false, SingleHighQSpc, 4, 2 ,false},
228 { ARM::VLD1q32HighQPseudo_UPD, ARM::VLD1d32Qwb_fixed, true, true, true, SingleHighQSpc, 4, 2 ,false},
229 { ARM::VLD1q32HighTPseudo, ARM::VLD1d32T, true, false, false, SingleHighTSpc, 3, 2 ,false},
230 { ARM::VLD1q32HighTPseudo_UPD, ARM::VLD1d32Twb_fixed, true, true, true, SingleHighTSpc, 3, 2 ,false},
231 { ARM::VLD1q32LowQPseudo_UPD, ARM::VLD1d32Qwb_fixed, true, true, true, SingleLowSpc, 4, 2 ,false},
232 { ARM::VLD1q32LowTPseudo_UPD, ARM::VLD1d32Twb_fixed, true, true, true, SingleLowSpc, 3, 2 ,false},
233
234 { ARM::VLD1q64HighQPseudo, ARM::VLD1d64Q, true, false, false, SingleHighQSpc, 4, 1 ,false},
235 { ARM::VLD1q64HighQPseudo_UPD, ARM::VLD1d64Qwb_fixed, true, true, true, SingleHighQSpc, 4, 1 ,false},
236 { ARM::VLD1q64HighTPseudo, ARM::VLD1d64T, true, false, false, SingleHighTSpc, 3, 1 ,false},
237 { ARM::VLD1q64HighTPseudo_UPD, ARM::VLD1d64Twb_fixed, true, true, true, SingleHighTSpc, 3, 1 ,false},
238 { ARM::VLD1q64LowQPseudo_UPD, ARM::VLD1d64Qwb_fixed, true, true, true, SingleLowSpc, 4, 1 ,false},
239 { ARM::VLD1q64LowTPseudo_UPD, ARM::VLD1d64Twb_fixed, true, true, true, SingleLowSpc, 3, 1 ,false},
240
241 { ARM::VLD1q8HighQPseudo, ARM::VLD1d8Q, true, false, false, SingleHighQSpc, 4, 8 ,false},
242 { ARM::VLD1q8HighQPseudo_UPD, ARM::VLD1d8Qwb_fixed, true, true, true, SingleHighQSpc, 4, 8 ,false},
243 { ARM::VLD1q8HighTPseudo, ARM::VLD1d8T, true, false, false, SingleHighTSpc, 3, 8 ,false},
244 { ARM::VLD1q8HighTPseudo_UPD, ARM::VLD1d8Twb_fixed, true, true, true, SingleHighTSpc, 3, 8 ,false},
245 { ARM::VLD1q8LowQPseudo_UPD, ARM::VLD1d8Qwb_fixed, true, true, true, SingleLowSpc, 4, 8 ,false},
246 { ARM::VLD1q8LowTPseudo_UPD, ARM::VLD1d8Twb_fixed, true, true, true, SingleLowSpc, 3, 8 ,false},
247
248 { ARM::VLD2DUPq16EvenPseudo, ARM::VLD2DUPd16x2, true, false, false, EvenDblSpc, 2, 4 ,false},
249 { ARM::VLD2DUPq16OddPseudo, ARM::VLD2DUPd16x2, true, false, false, OddDblSpc, 2, 4 ,false},
250 { ARM::VLD2DUPq16OddPseudoWB_fixed, ARM::VLD2DUPd16x2wb_fixed, true, true, false, OddDblSpc, 2, 4 ,false},
251 { ARM::VLD2DUPq16OddPseudoWB_register, ARM::VLD2DUPd16x2wb_register, true, true, true, OddDblSpc, 2, 4 ,false},
252 { ARM::VLD2DUPq32EvenPseudo, ARM::VLD2DUPd32x2, true, false, false, EvenDblSpc, 2, 2 ,false},
253 { ARM::VLD2DUPq32OddPseudo, ARM::VLD2DUPd32x2, true, false, false, OddDblSpc, 2, 2 ,false},
254 { ARM::VLD2DUPq32OddPseudoWB_fixed, ARM::VLD2DUPd32x2wb_fixed, true, true, false, OddDblSpc, 2, 2 ,false},
255 { ARM::VLD2DUPq32OddPseudoWB_register, ARM::VLD2DUPd32x2wb_register, true, true, true, OddDblSpc, 2, 2 ,false},
256 { ARM::VLD2DUPq8EvenPseudo, ARM::VLD2DUPd8x2, true, false, false, EvenDblSpc, 2, 8 ,false},
257 { ARM::VLD2DUPq8OddPseudo, ARM::VLD2DUPd8x2, true, false, false, OddDblSpc, 2, 8 ,false},
258 { ARM::VLD2DUPq8OddPseudoWB_fixed, ARM::VLD2DUPd8x2wb_fixed, true, true, false, OddDblSpc, 2, 8 ,false},
259 { ARM::VLD2DUPq8OddPseudoWB_register, ARM::VLD2DUPd8x2wb_register, true, true, true, OddDblSpc, 2, 8 ,false},
260
261 { ARM::VLD2LNd16Pseudo, ARM::VLD2LNd16, true, false, false, SingleSpc, 2, 4 ,true},
262 { ARM::VLD2LNd16Pseudo_UPD, ARM::VLD2LNd16_UPD, true, true, true, SingleSpc, 2, 4 ,true},
263 { ARM::VLD2LNd32Pseudo, ARM::VLD2LNd32, true, false, false, SingleSpc, 2, 2 ,true},
264 { ARM::VLD2LNd32Pseudo_UPD, ARM::VLD2LNd32_UPD, true, true, true, SingleSpc, 2, 2 ,true},
265 { ARM::VLD2LNd8Pseudo, ARM::VLD2LNd8, true, false, false, SingleSpc, 2, 8 ,true},
266 { ARM::VLD2LNd8Pseudo_UPD, ARM::VLD2LNd8_UPD, true, true, true, SingleSpc, 2, 8 ,true},
267 { ARM::VLD2LNq16Pseudo, ARM::VLD2LNq16, true, false, false, EvenDblSpc, 2, 4 ,true},
268 { ARM::VLD2LNq16Pseudo_UPD, ARM::VLD2LNq16_UPD, true, true, true, EvenDblSpc, 2, 4 ,true},
269 { ARM::VLD2LNq32Pseudo, ARM::VLD2LNq32, true, false, false, EvenDblSpc, 2, 2 ,true},
270 { ARM::VLD2LNq32Pseudo_UPD, ARM::VLD2LNq32_UPD, true, true, true, EvenDblSpc, 2, 2 ,true},
271
272 { ARM::VLD2q16Pseudo, ARM::VLD2q16, true, false, false, SingleSpc, 4, 4 ,false},
273 { ARM::VLD2q16PseudoWB_fixed, ARM::VLD2q16wb_fixed, true, true, false, SingleSpc, 4, 4 ,false},
274 { ARM::VLD2q16PseudoWB_register, ARM::VLD2q16wb_register, true, true, true, SingleSpc, 4, 4 ,false},
275 { ARM::VLD2q32Pseudo, ARM::VLD2q32, true, false, false, SingleSpc, 4, 2 ,false},
276 { ARM::VLD2q32PseudoWB_fixed, ARM::VLD2q32wb_fixed, true, true, false, SingleSpc, 4, 2 ,false},
277 { ARM::VLD2q32PseudoWB_register, ARM::VLD2q32wb_register, true, true, true, SingleSpc, 4, 2 ,false},
278 { ARM::VLD2q8Pseudo, ARM::VLD2q8, true, false, false, SingleSpc, 4, 8 ,false},
279 { ARM::VLD2q8PseudoWB_fixed, ARM::VLD2q8wb_fixed, true, true, false, SingleSpc, 4, 8 ,false},
280 { ARM::VLD2q8PseudoWB_register, ARM::VLD2q8wb_register, true, true, true, SingleSpc, 4, 8 ,false},
281
282 { ARM::VLD3DUPd16Pseudo, ARM::VLD3DUPd16, true, false, false, SingleSpc, 3, 4,true},
283 { ARM::VLD3DUPd16Pseudo_UPD, ARM::VLD3DUPd16_UPD, true, true, true, SingleSpc, 3, 4,true},
284 { ARM::VLD3DUPd32Pseudo, ARM::VLD3DUPd32, true, false, false, SingleSpc, 3, 2,true},
285 { ARM::VLD3DUPd32Pseudo_UPD, ARM::VLD3DUPd32_UPD, true, true, true, SingleSpc, 3, 2,true},
286 { ARM::VLD3DUPd8Pseudo, ARM::VLD3DUPd8, true, false, false, SingleSpc, 3, 8,true},
287 { ARM::VLD3DUPd8Pseudo_UPD, ARM::VLD3DUPd8_UPD, true, true, true, SingleSpc, 3, 8,true},
288 { ARM::VLD3DUPq16EvenPseudo, ARM::VLD3DUPq16, true, false, false, EvenDblSpc, 3, 4 ,true},
289 { ARM::VLD3DUPq16OddPseudo, ARM::VLD3DUPq16, true, false, false, OddDblSpc, 3, 4 ,true},
290 { ARM::VLD3DUPq16OddPseudo_UPD, ARM::VLD3DUPq16_UPD, true, true, true, OddDblSpc, 3, 4 ,true},
291 { ARM::VLD3DUPq32EvenPseudo, ARM::VLD3DUPq32, true, false, false, EvenDblSpc, 3, 2 ,true},
292 { ARM::VLD3DUPq32OddPseudo, ARM::VLD3DUPq32, true, false, false, OddDblSpc, 3, 2 ,true},
293 { ARM::VLD3DUPq32OddPseudo_UPD, ARM::VLD3DUPq32_UPD, true, true, true, OddDblSpc, 3, 2 ,true},
294 { ARM::VLD3DUPq8EvenPseudo, ARM::VLD3DUPq8, true, false, false, EvenDblSpc, 3, 8 ,true},
295 { ARM::VLD3DUPq8OddPseudo, ARM::VLD3DUPq8, true, false, false, OddDblSpc, 3, 8 ,true},
296 { ARM::VLD3DUPq8OddPseudo_UPD, ARM::VLD3DUPq8_UPD, true, true, true, OddDblSpc, 3, 8 ,true},
297
298 { ARM::VLD3LNd16Pseudo, ARM::VLD3LNd16, true, false, false, SingleSpc, 3, 4 ,true},
299 { ARM::VLD3LNd16Pseudo_UPD, ARM::VLD3LNd16_UPD, true, true, true, SingleSpc, 3, 4 ,true},
300 { ARM::VLD3LNd32Pseudo, ARM::VLD3LNd32, true, false, false, SingleSpc, 3, 2 ,true},
301 { ARM::VLD3LNd32Pseudo_UPD, ARM::VLD3LNd32_UPD, true, true, true, SingleSpc, 3, 2 ,true},
302 { ARM::VLD3LNd8Pseudo, ARM::VLD3LNd8, true, false, false, SingleSpc, 3, 8 ,true},
303 { ARM::VLD3LNd8Pseudo_UPD, ARM::VLD3LNd8_UPD, true, true, true, SingleSpc, 3, 8 ,true},
304 { ARM::VLD3LNq16Pseudo, ARM::VLD3LNq16, true, false, false, EvenDblSpc, 3, 4 ,true},
305 { ARM::VLD3LNq16Pseudo_UPD, ARM::VLD3LNq16_UPD, true, true, true, EvenDblSpc, 3, 4 ,true},
306 { ARM::VLD3LNq32Pseudo, ARM::VLD3LNq32, true, false, false, EvenDblSpc, 3, 2 ,true},
307 { ARM::VLD3LNq32Pseudo_UPD, ARM::VLD3LNq32_UPD, true, true, true, EvenDblSpc, 3, 2 ,true},
308
309 { ARM::VLD3d16Pseudo, ARM::VLD3d16, true, false, false, SingleSpc, 3, 4 ,true},
310 { ARM::VLD3d16Pseudo_UPD, ARM::VLD3d16_UPD, true, true, true, SingleSpc, 3, 4 ,true},
311 { ARM::VLD3d32Pseudo, ARM::VLD3d32, true, false, false, SingleSpc, 3, 2 ,true},
312 { ARM::VLD3d32Pseudo_UPD, ARM::VLD3d32_UPD, true, true, true, SingleSpc, 3, 2 ,true},
313 { ARM::VLD3d8Pseudo, ARM::VLD3d8, true, false, false, SingleSpc, 3, 8 ,true},
314 { ARM::VLD3d8Pseudo_UPD, ARM::VLD3d8_UPD, true, true, true, SingleSpc, 3, 8 ,true},
315
316 { ARM::VLD3q16Pseudo_UPD, ARM::VLD3q16_UPD, true, true, true, EvenDblSpc, 3, 4 ,true},
317 { ARM::VLD3q16oddPseudo, ARM::VLD3q16, true, false, false, OddDblSpc, 3, 4 ,true},
318 { ARM::VLD3q16oddPseudo_UPD, ARM::VLD3q16_UPD, true, true, true, OddDblSpc, 3, 4 ,true},
319 { ARM::VLD3q32Pseudo_UPD, ARM::VLD3q32_UPD, true, true, true, EvenDblSpc, 3, 2 ,true},
320 { ARM::VLD3q32oddPseudo, ARM::VLD3q32, true, false, false, OddDblSpc, 3, 2 ,true},
321 { ARM::VLD3q32oddPseudo_UPD, ARM::VLD3q32_UPD, true, true, true, OddDblSpc, 3, 2 ,true},
322 { ARM::VLD3q8Pseudo_UPD, ARM::VLD3q8_UPD, true, true, true, EvenDblSpc, 3, 8 ,true},
323 { ARM::VLD3q8oddPseudo, ARM::VLD3q8, true, false, false, OddDblSpc, 3, 8 ,true},
324 { ARM::VLD3q8oddPseudo_UPD, ARM::VLD3q8_UPD, true, true, true, OddDblSpc, 3, 8 ,true},
325
326 { ARM::VLD4DUPd16Pseudo, ARM::VLD4DUPd16, true, false, false, SingleSpc, 4, 4,true},
327 { ARM::VLD4DUPd16Pseudo_UPD, ARM::VLD4DUPd16_UPD, true, true, true, SingleSpc, 4, 4,true},
328 { ARM::VLD4DUPd32Pseudo, ARM::VLD4DUPd32, true, false, false, SingleSpc, 4, 2,true},
329 { ARM::VLD4DUPd32Pseudo_UPD, ARM::VLD4DUPd32_UPD, true, true, true, SingleSpc, 4, 2,true},
330 { ARM::VLD4DUPd8Pseudo, ARM::VLD4DUPd8, true, false, false, SingleSpc, 4, 8,true},
331 { ARM::VLD4DUPd8Pseudo_UPD, ARM::VLD4DUPd8_UPD, true, true, true, SingleSpc, 4, 8,true},
332 { ARM::VLD4DUPq16EvenPseudo, ARM::VLD4DUPq16, true, false, false, EvenDblSpc, 4, 4 ,true},
333 { ARM::VLD4DUPq16OddPseudo, ARM::VLD4DUPq16, true, false, false, OddDblSpc, 4, 4 ,true},
334 { ARM::VLD4DUPq16OddPseudo_UPD, ARM::VLD4DUPq16_UPD, true, true, true, OddDblSpc, 4, 4 ,true},
335 { ARM::VLD4DUPq32EvenPseudo, ARM::VLD4DUPq32, true, false, false, EvenDblSpc, 4, 2 ,true},
336 { ARM::VLD4DUPq32OddPseudo, ARM::VLD4DUPq32, true, false, false, OddDblSpc, 4, 2 ,true},
337 { ARM::VLD4DUPq32OddPseudo_UPD, ARM::VLD4DUPq32_UPD, true, true, true, OddDblSpc, 4, 2 ,true},
338 { ARM::VLD4DUPq8EvenPseudo, ARM::VLD4DUPq8, true, false, false, EvenDblSpc, 4, 8 ,true},
339 { ARM::VLD4DUPq8OddPseudo, ARM::VLD4DUPq8, true, false, false, OddDblSpc, 4, 8 ,true},
340 { ARM::VLD4DUPq8OddPseudo_UPD, ARM::VLD4DUPq8_UPD, true, true, true, OddDblSpc, 4, 8 ,true},
341
342 { ARM::VLD4LNd16Pseudo, ARM::VLD4LNd16, true, false, false, SingleSpc, 4, 4 ,true},
343 { ARM::VLD4LNd16Pseudo_UPD, ARM::VLD4LNd16_UPD, true, true, true, SingleSpc, 4, 4 ,true},
344 { ARM::VLD4LNd32Pseudo, ARM::VLD4LNd32, true, false, false, SingleSpc, 4, 2 ,true},
345 { ARM::VLD4LNd32Pseudo_UPD, ARM::VLD4LNd32_UPD, true, true, true, SingleSpc, 4, 2 ,true},
346 { ARM::VLD4LNd8Pseudo, ARM::VLD4LNd8, true, false, false, SingleSpc, 4, 8 ,true},
347 { ARM::VLD4LNd8Pseudo_UPD, ARM::VLD4LNd8_UPD, true, true, true, SingleSpc, 4, 8 ,true},
348 { ARM::VLD4LNq16Pseudo, ARM::VLD4LNq16, true, false, false, EvenDblSpc, 4, 4 ,true},
349 { ARM::VLD4LNq16Pseudo_UPD, ARM::VLD4LNq16_UPD, true, true, true, EvenDblSpc, 4, 4 ,true},
350 { ARM::VLD4LNq32Pseudo, ARM::VLD4LNq32, true, false, false, EvenDblSpc, 4, 2 ,true},
351 { ARM::VLD4LNq32Pseudo_UPD, ARM::VLD4LNq32_UPD, true, true, true, EvenDblSpc, 4, 2 ,true},
352
353 { ARM::VLD4d16Pseudo, ARM::VLD4d16, true, false, false, SingleSpc, 4, 4 ,true},
354 { ARM::VLD4d16Pseudo_UPD, ARM::VLD4d16_UPD, true, true, true, SingleSpc, 4, 4 ,true},
355 { ARM::VLD4d32Pseudo, ARM::VLD4d32, true, false, false, SingleSpc, 4, 2 ,true},
356 { ARM::VLD4d32Pseudo_UPD, ARM::VLD4d32_UPD, true, true, true, SingleSpc, 4, 2 ,true},
357 { ARM::VLD4d8Pseudo, ARM::VLD4d8, true, false, false, SingleSpc, 4, 8 ,true},
358 { ARM::VLD4d8Pseudo_UPD, ARM::VLD4d8_UPD, true, true, true, SingleSpc, 4, 8 ,true},
359
360 { ARM::VLD4q16Pseudo_UPD, ARM::VLD4q16_UPD, true, true, true, EvenDblSpc, 4, 4 ,true},
361 { ARM::VLD4q16oddPseudo, ARM::VLD4q16, true, false, false, OddDblSpc, 4, 4 ,true},
362 { ARM::VLD4q16oddPseudo_UPD, ARM::VLD4q16_UPD, true, true, true, OddDblSpc, 4, 4 ,true},
363 { ARM::VLD4q32Pseudo_UPD, ARM::VLD4q32_UPD, true, true, true, EvenDblSpc, 4, 2 ,true},
364 { ARM::VLD4q32oddPseudo, ARM::VLD4q32, true, false, false, OddDblSpc, 4, 2 ,true},
365 { ARM::VLD4q32oddPseudo_UPD, ARM::VLD4q32_UPD, true, true, true, OddDblSpc, 4, 2 ,true},
366 { ARM::VLD4q8Pseudo_UPD, ARM::VLD4q8_UPD, true, true, true, EvenDblSpc, 4, 8 ,true},
367 { ARM::VLD4q8oddPseudo, ARM::VLD4q8, true, false, false, OddDblSpc, 4, 8 ,true},
368 { ARM::VLD4q8oddPseudo_UPD, ARM::VLD4q8_UPD, true, true, true, OddDblSpc, 4, 8 ,true},
369
370 { ARM::VST1LNq16Pseudo, ARM::VST1LNd16, false, false, false, EvenDblSpc, 1, 4 ,true},
371 { ARM::VST1LNq16Pseudo_UPD, ARM::VST1LNd16_UPD, false, true, true, EvenDblSpc, 1, 4 ,true},
372 { ARM::VST1LNq32Pseudo, ARM::VST1LNd32, false, false, false, EvenDblSpc, 1, 2 ,true},
373 { ARM::VST1LNq32Pseudo_UPD, ARM::VST1LNd32_UPD, false, true, true, EvenDblSpc, 1, 2 ,true},
374 { ARM::VST1LNq8Pseudo, ARM::VST1LNd8, false, false, false, EvenDblSpc, 1, 8 ,true},
375 { ARM::VST1LNq8Pseudo_UPD, ARM::VST1LNd8_UPD, false, true, true, EvenDblSpc, 1, 8 ,true},
376
377 { ARM::VST1d16QPseudo, ARM::VST1d16Q, false, false, false, SingleSpc, 4, 4 ,false},
378 { ARM::VST1d16QPseudoWB_fixed, ARM::VST1d16Qwb_fixed, false, true, false, SingleSpc, 4, 4 ,false},
379 { ARM::VST1d16QPseudoWB_register, ARM::VST1d16Qwb_register, false, true, true, SingleSpc, 4, 4 ,false},
380 { ARM::VST1d16TPseudo, ARM::VST1d16T, false, false, false, SingleSpc, 3, 4 ,false},
381 { ARM::VST1d16TPseudoWB_fixed, ARM::VST1d16Twb_fixed, false, true, false, SingleSpc, 3, 4 ,false},
382 { ARM::VST1d16TPseudoWB_register, ARM::VST1d16Twb_register, false, true, true, SingleSpc, 3, 4 ,false},
383
384 { ARM::VST1d32QPseudo, ARM::VST1d32Q, false, false, false, SingleSpc, 4, 2 ,false},
385 { ARM::VST1d32QPseudoWB_fixed, ARM::VST1d32Qwb_fixed, false, true, false, SingleSpc, 4, 2 ,false},
386 { ARM::VST1d32QPseudoWB_register, ARM::VST1d32Qwb_register, false, true, true, SingleSpc, 4, 2 ,false},
387 { ARM::VST1d32TPseudo, ARM::VST1d32T, false, false, false, SingleSpc, 3, 2 ,false},
388 { ARM::VST1d32TPseudoWB_fixed, ARM::VST1d32Twb_fixed, false, true, false, SingleSpc, 3, 2 ,false},
389 { ARM::VST1d32TPseudoWB_register, ARM::VST1d32Twb_register, false, true, true, SingleSpc, 3, 2 ,false},
390
391 { ARM::VST1d64QPseudo, ARM::VST1d64Q, false, false, false, SingleSpc, 4, 1 ,false},
392 { ARM::VST1d64QPseudoWB_fixed, ARM::VST1d64Qwb_fixed, false, true, false, SingleSpc, 4, 1 ,false},
393 { ARM::VST1d64QPseudoWB_register, ARM::VST1d64Qwb_register, false, true, true, SingleSpc, 4, 1 ,false},
394 { ARM::VST1d64TPseudo, ARM::VST1d64T, false, false, false, SingleSpc, 3, 1 ,false},
395 { ARM::VST1d64TPseudoWB_fixed, ARM::VST1d64Twb_fixed, false, true, false, SingleSpc, 3, 1 ,false},
396 { ARM::VST1d64TPseudoWB_register, ARM::VST1d64Twb_register, false, true, true, SingleSpc, 3, 1 ,false},
397
398 { ARM::VST1d8QPseudo, ARM::VST1d8Q, false, false, false, SingleSpc, 4, 8 ,false},
399 { ARM::VST1d8QPseudoWB_fixed, ARM::VST1d8Qwb_fixed, false, true, false, SingleSpc, 4, 8 ,false},
400 { ARM::VST1d8QPseudoWB_register, ARM::VST1d8Qwb_register, false, true, true, SingleSpc, 4, 8 ,false},
401 { ARM::VST1d8TPseudo, ARM::VST1d8T, false, false, false, SingleSpc, 3, 8 ,false},
402 { ARM::VST1d8TPseudoWB_fixed, ARM::VST1d8Twb_fixed, false, true, false, SingleSpc, 3, 8 ,false},
403 { ARM::VST1d8TPseudoWB_register, ARM::VST1d8Twb_register, false, true, true, SingleSpc, 3, 8 ,false},
404
405 { ARM::VST1q16HighQPseudo, ARM::VST1d16Q, false, false, false, SingleHighQSpc, 4, 4 ,false},
406 { ARM::VST1q16HighQPseudo_UPD, ARM::VST1d16Qwb_fixed, false, true, true, SingleHighQSpc, 4, 8 ,false},
407 { ARM::VST1q16HighTPseudo, ARM::VST1d16T, false, false, false, SingleHighTSpc, 3, 4 ,false},
408 { ARM::VST1q16HighTPseudo_UPD, ARM::VST1d16Twb_fixed, false, true, true, SingleHighTSpc, 3, 4 ,false},
409 { ARM::VST1q16LowQPseudo_UPD, ARM::VST1d16Qwb_fixed, false, true, true, SingleLowSpc, 4, 4 ,false},
410 { ARM::VST1q16LowTPseudo_UPD, ARM::VST1d16Twb_fixed, false, true, true, SingleLowSpc, 3, 4 ,false},
411
412 { ARM::VST1q32HighQPseudo, ARM::VST1d32Q, false, false, false, SingleHighQSpc, 4, 2 ,false},
413 { ARM::VST1q32HighQPseudo_UPD, ARM::VST1d32Qwb_fixed, false, true, true, SingleHighQSpc, 4, 8 ,false},
414 { ARM::VST1q32HighTPseudo, ARM::VST1d32T, false, false, false, SingleHighTSpc, 3, 2 ,false},
415 { ARM::VST1q32HighTPseudo_UPD, ARM::VST1d32Twb_fixed, false, true, true, SingleHighTSpc, 3, 2 ,false},
416 { ARM::VST1q32LowQPseudo_UPD, ARM::VST1d32Qwb_fixed, false, true, true, SingleLowSpc, 4, 2 ,false},
417 { ARM::VST1q32LowTPseudo_UPD, ARM::VST1d32Twb_fixed, false, true, true, SingleLowSpc, 3, 2 ,false},
418
419 { ARM::VST1q64HighQPseudo, ARM::VST1d64Q, false, false, false, SingleHighQSpc, 4, 1 ,false},
420 { ARM::VST1q64HighQPseudo_UPD, ARM::VST1d64Qwb_fixed, false, true, true, SingleHighQSpc, 4, 8 ,false},
421 { ARM::VST1q64HighTPseudo, ARM::VST1d64T, false, false, false, SingleHighTSpc, 3, 1 ,false},
422 { ARM::VST1q64HighTPseudo_UPD, ARM::VST1d64Twb_fixed, false, true, true, SingleHighTSpc, 3, 1 ,false},
423 { ARM::VST1q64LowQPseudo_UPD, ARM::VST1d64Qwb_fixed, false, true, true, SingleLowSpc, 4, 1 ,false},
424 { ARM::VST1q64LowTPseudo_UPD, ARM::VST1d64Twb_fixed, false, true, true, SingleLowSpc, 3, 1 ,false},
425
426 { ARM::VST1q8HighQPseudo, ARM::VST1d8Q, false, false, false, SingleHighQSpc, 4, 8 ,false},
427 { ARM::VST1q8HighQPseudo_UPD, ARM::VST1d8Qwb_fixed, false, true, true, SingleHighQSpc, 4, 8 ,false},
428 { ARM::VST1q8HighTPseudo, ARM::VST1d8T, false, false, false, SingleHighTSpc, 3, 8 ,false},
429 { ARM::VST1q8HighTPseudo_UPD, ARM::VST1d8Twb_fixed, false, true, true, SingleHighTSpc, 3, 8 ,false},
430 { ARM::VST1q8LowQPseudo_UPD, ARM::VST1d8Qwb_fixed, false, true, true, SingleLowSpc, 4, 8 ,false},
431 { ARM::VST1q8LowTPseudo_UPD, ARM::VST1d8Twb_fixed, false, true, true, SingleLowSpc, 3, 8 ,false},
432
433 { ARM::VST2LNd16Pseudo, ARM::VST2LNd16, false, false, false, SingleSpc, 2, 4 ,true},
434 { ARM::VST2LNd16Pseudo_UPD, ARM::VST2LNd16_UPD, false, true, true, SingleSpc, 2, 4 ,true},
435 { ARM::VST2LNd32Pseudo, ARM::VST2LNd32, false, false, false, SingleSpc, 2, 2 ,true},
436 { ARM::VST2LNd32Pseudo_UPD, ARM::VST2LNd32_UPD, false, true, true, SingleSpc, 2, 2 ,true},
437 { ARM::VST2LNd8Pseudo, ARM::VST2LNd8, false, false, false, SingleSpc, 2, 8 ,true},
438 { ARM::VST2LNd8Pseudo_UPD, ARM::VST2LNd8_UPD, false, true, true, SingleSpc, 2, 8 ,true},
439 { ARM::VST2LNq16Pseudo, ARM::VST2LNq16, false, false, false, EvenDblSpc, 2, 4,true},
440 { ARM::VST2LNq16Pseudo_UPD, ARM::VST2LNq16_UPD, false, true, true, EvenDblSpc, 2, 4,true},
441 { ARM::VST2LNq32Pseudo, ARM::VST2LNq32, false, false, false, EvenDblSpc, 2, 2,true},
442 { ARM::VST2LNq32Pseudo_UPD, ARM::VST2LNq32_UPD, false, true, true, EvenDblSpc, 2, 2,true},
443
444 { ARM::VST2q16Pseudo, ARM::VST2q16, false, false, false, SingleSpc, 4, 4 ,false},
445 { ARM::VST2q16PseudoWB_fixed, ARM::VST2q16wb_fixed, false, true, false, SingleSpc, 4, 4 ,false},
446 { ARM::VST2q16PseudoWB_register, ARM::VST2q16wb_register, false, true, true, SingleSpc, 4, 4 ,false},
447 { ARM::VST2q32Pseudo, ARM::VST2q32, false, false, false, SingleSpc, 4, 2 ,false},
448 { ARM::VST2q32PseudoWB_fixed, ARM::VST2q32wb_fixed, false, true, false, SingleSpc, 4, 2 ,false},
449 { ARM::VST2q32PseudoWB_register, ARM::VST2q32wb_register, false, true, true, SingleSpc, 4, 2 ,false},
450 { ARM::VST2q8Pseudo, ARM::VST2q8, false, false, false, SingleSpc, 4, 8 ,false},
451 { ARM::VST2q8PseudoWB_fixed, ARM::VST2q8wb_fixed, false, true, false, SingleSpc, 4, 8 ,false},
452 { ARM::VST2q8PseudoWB_register, ARM::VST2q8wb_register, false, true, true, SingleSpc, 4, 8 ,false},
453
454 { ARM::VST3LNd16Pseudo, ARM::VST3LNd16, false, false, false, SingleSpc, 3, 4 ,true},
455 { ARM::VST3LNd16Pseudo_UPD, ARM::VST3LNd16_UPD, false, true, true, SingleSpc, 3, 4 ,true},
456 { ARM::VST3LNd32Pseudo, ARM::VST3LNd32, false, false, false, SingleSpc, 3, 2 ,true},
457 { ARM::VST3LNd32Pseudo_UPD, ARM::VST3LNd32_UPD, false, true, true, SingleSpc, 3, 2 ,true},
458 { ARM::VST3LNd8Pseudo, ARM::VST3LNd8, false, false, false, SingleSpc, 3, 8 ,true},
459 { ARM::VST3LNd8Pseudo_UPD, ARM::VST3LNd8_UPD, false, true, true, SingleSpc, 3, 8 ,true},
460 { ARM::VST3LNq16Pseudo, ARM::VST3LNq16, false, false, false, EvenDblSpc, 3, 4,true},
461 { ARM::VST3LNq16Pseudo_UPD, ARM::VST3LNq16_UPD, false, true, true, EvenDblSpc, 3, 4,true},
462 { ARM::VST3LNq32Pseudo, ARM::VST3LNq32, false, false, false, EvenDblSpc, 3, 2,true},
463 { ARM::VST3LNq32Pseudo_UPD, ARM::VST3LNq32_UPD, false, true, true, EvenDblSpc, 3, 2,true},
464
465 { ARM::VST3d16Pseudo, ARM::VST3d16, false, false, false, SingleSpc, 3, 4 ,true},
466 { ARM::VST3d16Pseudo_UPD, ARM::VST3d16_UPD, false, true, true, SingleSpc, 3, 4 ,true},
467 { ARM::VST3d32Pseudo, ARM::VST3d32, false, false, false, SingleSpc, 3, 2 ,true},
468 { ARM::VST3d32Pseudo_UPD, ARM::VST3d32_UPD, false, true, true, SingleSpc, 3, 2 ,true},
469 { ARM::VST3d8Pseudo, ARM::VST3d8, false, false, false, SingleSpc, 3, 8 ,true},
470 { ARM::VST3d8Pseudo_UPD, ARM::VST3d8_UPD, false, true, true, SingleSpc, 3, 8 ,true},
471
472 { ARM::VST3q16Pseudo_UPD, ARM::VST3q16_UPD, false, true, true, EvenDblSpc, 3, 4 ,true},
473 { ARM::VST3q16oddPseudo, ARM::VST3q16, false, false, false, OddDblSpc, 3, 4 ,true},
474 { ARM::VST3q16oddPseudo_UPD, ARM::VST3q16_UPD, false, true, true, OddDblSpc, 3, 4 ,true},
475 { ARM::VST3q32Pseudo_UPD, ARM::VST3q32_UPD, false, true, true, EvenDblSpc, 3, 2 ,true},
476 { ARM::VST3q32oddPseudo, ARM::VST3q32, false, false, false, OddDblSpc, 3, 2 ,true},
477 { ARM::VST3q32oddPseudo_UPD, ARM::VST3q32_UPD, false, true, true, OddDblSpc, 3, 2 ,true},
478 { ARM::VST3q8Pseudo_UPD, ARM::VST3q8_UPD, false, true, true, EvenDblSpc, 3, 8 ,true},
479 { ARM::VST3q8oddPseudo, ARM::VST3q8, false, false, false, OddDblSpc, 3, 8 ,true},
480 { ARM::VST3q8oddPseudo_UPD, ARM::VST3q8_UPD, false, true, true, OddDblSpc, 3, 8 ,true},
481
482 { ARM::VST4LNd16Pseudo, ARM::VST4LNd16, false, false, false, SingleSpc, 4, 4 ,true},
483 { ARM::VST4LNd16Pseudo_UPD, ARM::VST4LNd16_UPD, false, true, true, SingleSpc, 4, 4 ,true},
484 { ARM::VST4LNd32Pseudo, ARM::VST4LNd32, false, false, false, SingleSpc, 4, 2 ,true},
485 { ARM::VST4LNd32Pseudo_UPD, ARM::VST4LNd32_UPD, false, true, true, SingleSpc, 4, 2 ,true},
486 { ARM::VST4LNd8Pseudo, ARM::VST4LNd8, false, false, false, SingleSpc, 4, 8 ,true},
487 { ARM::VST4LNd8Pseudo_UPD, ARM::VST4LNd8_UPD, false, true, true, SingleSpc, 4, 8 ,true},
488 { ARM::VST4LNq16Pseudo, ARM::VST4LNq16, false, false, false, EvenDblSpc, 4, 4,true},
489 { ARM::VST4LNq16Pseudo_UPD, ARM::VST4LNq16_UPD, false, true, true, EvenDblSpc, 4, 4,true},
490 { ARM::VST4LNq32Pseudo, ARM::VST4LNq32, false, false, false, EvenDblSpc, 4, 2,true},
491 { ARM::VST4LNq32Pseudo_UPD, ARM::VST4LNq32_UPD, false, true, true, EvenDblSpc, 4, 2,true},
492
493 { ARM::VST4d16Pseudo, ARM::VST4d16, false, false, false, SingleSpc, 4, 4 ,true},
494 { ARM::VST4d16Pseudo_UPD, ARM::VST4d16_UPD, false, true, true, SingleSpc, 4, 4 ,true},
495 { ARM::VST4d32Pseudo, ARM::VST4d32, false, false, false, SingleSpc, 4, 2 ,true},
496 { ARM::VST4d32Pseudo_UPD, ARM::VST4d32_UPD, false, true, true, SingleSpc, 4, 2 ,true},
497 { ARM::VST4d8Pseudo, ARM::VST4d8, false, false, false, SingleSpc, 4, 8 ,true},
498 { ARM::VST4d8Pseudo_UPD, ARM::VST4d8_UPD, false, true, true, SingleSpc, 4, 8 ,true},
499
500 { ARM::VST4q16Pseudo_UPD, ARM::VST4q16_UPD, false, true, true, EvenDblSpc, 4, 4 ,true},
501 { ARM::VST4q16oddPseudo, ARM::VST4q16, false, false, false, OddDblSpc, 4, 4 ,true},
502 { ARM::VST4q16oddPseudo_UPD, ARM::VST4q16_UPD, false, true, true, OddDblSpc, 4, 4 ,true},
503 { ARM::VST4q32Pseudo_UPD, ARM::VST4q32_UPD, false, true, true, EvenDblSpc, 4, 2 ,true},
504 { ARM::VST4q32oddPseudo, ARM::VST4q32, false, false, false, OddDblSpc, 4, 2 ,true},
505 { ARM::VST4q32oddPseudo_UPD, ARM::VST4q32_UPD, false, true, true, OddDblSpc, 4, 2 ,true},
506 { ARM::VST4q8Pseudo_UPD, ARM::VST4q8_UPD, false, true, true, EvenDblSpc, 4, 8 ,true},
507 { ARM::VST4q8oddPseudo, ARM::VST4q8, false, false, false, OddDblSpc, 4, 8 ,true},
508 { ARM::VST4q8oddPseudo_UPD, ARM::VST4q8_UPD, false, true, true, OddDblSpc, 4, 8 ,true}
509 };
510
511 /// LookupNEONLdSt - Search the NEONLdStTable for information about a NEON
512 /// load or store pseudo instruction.
513 static const NEONLdStTableEntry *LookupNEONLdSt(unsigned Opcode) {
514 #ifndef NDEBUG
515 // Make sure the table is sorted.
516 static std::atomic<bool> TableChecked(false);
517 if (!TableChecked.load(std::memory_order_relaxed)) {
518 assert(llvm::is_sorted(NEONLdStTable) && "NEONLdStTable is not sorted!");
519 TableChecked.store(true, std::memory_order_relaxed);
520 }
521 #endif
522
523 auto I = llvm::lower_bound(NEONLdStTable, Opcode);
524 if (I != std::end(NEONLdStTable) && I->PseudoOpc == Opcode)
525 return I;
526 return nullptr;
527 }
528
529 /// GetDSubRegs - Get 4 D subregisters of a Q, QQ, or QQQQ register,
530 /// corresponding to the specified register spacing. Not all of the results
531 /// are necessarily valid, e.g., a Q register only has 2 D subregisters.
532 static void GetDSubRegs(unsigned Reg, NEONRegSpacing RegSpc,
533 const TargetRegisterInfo *TRI, unsigned &D0,
534 unsigned &D1, unsigned &D2, unsigned &D3) {
535 if (RegSpc == SingleSpc || RegSpc == SingleLowSpc) {
536 D0 = TRI->getSubReg(Reg, ARM::dsub_0);
537 D1 = TRI->getSubReg(Reg, ARM::dsub_1);
538 D2 = TRI->getSubReg(Reg, ARM::dsub_2);
539 D3 = TRI->getSubReg(Reg, ARM::dsub_3);
540 } else if (RegSpc == SingleHighQSpc) {
541 D0 = TRI->getSubReg(Reg, ARM::dsub_4);
542 D1 = TRI->getSubReg(Reg, ARM::dsub_5);
543 D2 = TRI->getSubReg(Reg, ARM::dsub_6);
544 D3 = TRI->getSubReg(Reg, ARM::dsub_7);
545 } else if (RegSpc == SingleHighTSpc) {
546 D0 = TRI->getSubReg(Reg, ARM::dsub_3);
547 D1 = TRI->getSubReg(Reg, ARM::dsub_4);
548 D2 = TRI->getSubReg(Reg, ARM::dsub_5);
549 D3 = TRI->getSubReg(Reg, ARM::dsub_6);
550 } else if (RegSpc == EvenDblSpc) {
551 D0 = TRI->getSubReg(Reg, ARM::dsub_0);
552 D1 = TRI->getSubReg(Reg, ARM::dsub_2);
553 D2 = TRI->getSubReg(Reg, ARM::dsub_4);
554 D3 = TRI->getSubReg(Reg, ARM::dsub_6);
555 } else {
556 assert(RegSpc == OddDblSpc && "unknown register spacing");
557 D0 = TRI->getSubReg(Reg, ARM::dsub_1);
558 D1 = TRI->getSubReg(Reg, ARM::dsub_3);
559 D2 = TRI->getSubReg(Reg, ARM::dsub_5);
560 D3 = TRI->getSubReg(Reg, ARM::dsub_7);
561 }
562 }
563
564 /// ExpandVLD - Translate VLD pseudo instructions with Q, QQ or QQQQ register
565 /// operands to real VLD instructions with D register operands.
566 void ARMExpandPseudo::ExpandVLD(MachineBasicBlock::iterator &MBBI) {
567 MachineInstr &MI = *MBBI;
568 MachineBasicBlock &MBB = *MI.getParent();
569 LLVM_DEBUG(dbgs() << "Expanding: "; MI.dump());
570
571 const NEONLdStTableEntry *TableEntry = LookupNEONLdSt(MI.getOpcode());
572 assert(TableEntry && TableEntry->IsLoad && "NEONLdStTable lookup failed");
573 NEONRegSpacing RegSpc = (NEONRegSpacing)TableEntry->RegSpacing;
574 unsigned NumRegs = TableEntry->NumRegs;
575
576 MachineInstrBuilder MIB = BuildMI(MBB, MBBI, MI.getDebugLoc(),
577 TII->get(TableEntry->RealOpc));
578 unsigned OpIdx = 0;
579
580 bool DstIsDead = MI.getOperand(OpIdx).isDead();
581 Register DstReg = MI.getOperand(OpIdx++).getReg();
582
583 bool IsVLD2DUP = TableEntry->RealOpc == ARM::VLD2DUPd8x2 ||
584 TableEntry->RealOpc == ARM::VLD2DUPd16x2 ||
585 TableEntry->RealOpc == ARM::VLD2DUPd32x2 ||
586 TableEntry->RealOpc == ARM::VLD2DUPd8x2wb_fixed ||
587 TableEntry->RealOpc == ARM::VLD2DUPd16x2wb_fixed ||
588 TableEntry->RealOpc == ARM::VLD2DUPd32x2wb_fixed ||
589 TableEntry->RealOpc == ARM::VLD2DUPd8x2wb_register ||
590 TableEntry->RealOpc == ARM::VLD2DUPd16x2wb_register ||
591 TableEntry->RealOpc == ARM::VLD2DUPd32x2wb_register;
592
593 if (IsVLD2DUP) {
594 unsigned SubRegIndex;
595 if (RegSpc == EvenDblSpc) {
596 SubRegIndex = ARM::dsub_0;
597 } else {
598 assert(RegSpc == OddDblSpc && "Unexpected spacing!");
599 SubRegIndex = ARM::dsub_1;
600 }
601 Register SubReg = TRI->getSubReg(DstReg, SubRegIndex);
602 unsigned DstRegPair = TRI->getMatchingSuperReg(SubReg, ARM::dsub_0,
603 &ARM::DPairSpcRegClass);
604 MIB.addReg(DstRegPair, RegState::Define | getDeadRegState(DstIsDead));
605 } else {
606 unsigned D0, D1, D2, D3;
607 GetDSubRegs(DstReg, RegSpc, TRI, D0, D1, D2, D3);
608 MIB.addReg(D0, RegState::Define | getDeadRegState(DstIsDead));
609 if (NumRegs > 1 && TableEntry->copyAllListRegs)
610 MIB.addReg(D1, RegState::Define | getDeadRegState(DstIsDead));
611 if (NumRegs > 2 && TableEntry->copyAllListRegs)
612 MIB.addReg(D2, RegState::Define | getDeadRegState(DstIsDead));
613 if (NumRegs > 3 && TableEntry->copyAllListRegs)
614 MIB.addReg(D3, RegState::Define | getDeadRegState(DstIsDead));
615 }
616
617 if (TableEntry->isUpdating)
618 MIB.add(MI.getOperand(OpIdx++));
619
620 // Copy the addrmode6 operands.
621 MIB.add(MI.getOperand(OpIdx++));
622 MIB.add(MI.getOperand(OpIdx++));
623
624 // Copy the am6offset operand.
625 if (TableEntry->hasWritebackOperand) {
626 // TODO: The writing-back pseudo instructions we translate here are all
627 // defined to take am6offset nodes that are capable to represent both fixed
628 // and register forms. Some real instructions, however, do not rely on
629 // am6offset and have separate definitions for such forms. When this is the
630 // case, fixed forms do not take any offset nodes, so here we skip them for
631 // such instructions. Once all real and pseudo writing-back instructions are
632 // rewritten without use of am6offset nodes, this code will go away.
633 const MachineOperand &AM6Offset = MI.getOperand(OpIdx++);
634 if (TableEntry->RealOpc == ARM::VLD1d8Qwb_fixed ||
635 TableEntry->RealOpc == ARM::VLD1d16Qwb_fixed ||
636 TableEntry->RealOpc == ARM::VLD1d32Qwb_fixed ||
637 TableEntry->RealOpc == ARM::VLD1d64Qwb_fixed ||
638 TableEntry->RealOpc == ARM::VLD1d8Twb_fixed ||
639 TableEntry->RealOpc == ARM::VLD1d16Twb_fixed ||
640 TableEntry->RealOpc == ARM::VLD1d32Twb_fixed ||
641 TableEntry->RealOpc == ARM::VLD1d64Twb_fixed ||
642 TableEntry->RealOpc == ARM::VLD2DUPd8x2wb_fixed ||
643 TableEntry->RealOpc == ARM::VLD2DUPd16x2wb_fixed ||
644 TableEntry->RealOpc == ARM::VLD2DUPd32x2wb_fixed) {
645 assert(AM6Offset.getReg() == 0 &&
646 "A fixed writing-back pseudo instruction provides an offset "
647 "register!");
648 } else {
649 MIB.add(AM6Offset);
650 }
651 }
652
653 // For an instruction writing double-spaced subregs, the pseudo instruction
654 // has an extra operand that is a use of the super-register. Record the
655 // operand index and skip over it.
656 unsigned SrcOpIdx = 0;
657 if (!IsVLD2DUP) {
658 if (RegSpc == EvenDblSpc || RegSpc == OddDblSpc ||
659 RegSpc == SingleLowSpc || RegSpc == SingleHighQSpc ||
660 RegSpc == SingleHighTSpc)
661 SrcOpIdx = OpIdx++;
662 }
663
664 // Copy the predicate operands.
665 MIB.add(MI.getOperand(OpIdx++));
666 MIB.add(MI.getOperand(OpIdx++));
667
668 // Copy the super-register source operand used for double-spaced subregs over
669 // to the new instruction as an implicit operand.
670 if (SrcOpIdx != 0) {
671 MachineOperand MO = MI.getOperand(SrcOpIdx);
672 MO.setImplicit(true);
673 MIB.add(MO);
674 }
675 // Add an implicit def for the super-register.
676 MIB.addReg(DstReg, RegState::ImplicitDefine | getDeadRegState(DstIsDead));
677 TransferImpOps(MI, MIB, MIB);
678
679 // Transfer memoperands.
680 MIB.cloneMemRefs(MI);
681 MI.eraseFromParent();
682 LLVM_DEBUG(dbgs() << "To: "; MIB.getInstr()->dump(););
683 }
684
685 /// ExpandVST - Translate VST pseudo instructions with Q, QQ or QQQQ register
686 /// operands to real VST instructions with D register operands.
687 void ARMExpandPseudo::ExpandVST(MachineBasicBlock::iterator &MBBI) {
688 MachineInstr &MI = *MBBI;
689 MachineBasicBlock &MBB = *MI.getParent();
690 LLVM_DEBUG(dbgs() << "Expanding: "; MI.dump());
691
692 const NEONLdStTableEntry *TableEntry = LookupNEONLdSt(MI.getOpcode());
693 assert(TableEntry && !TableEntry->IsLoad && "NEONLdStTable lookup failed");
694 NEONRegSpacing RegSpc = (NEONRegSpacing)TableEntry->RegSpacing;
695 unsigned NumRegs = TableEntry->NumRegs;
696
697 MachineInstrBuilder MIB = BuildMI(MBB, MBBI, MI.getDebugLoc(),
698 TII->get(TableEntry->RealOpc));
699 unsigned OpIdx = 0;
700 if (TableEntry->isUpdating)
701 MIB.add(MI.getOperand(OpIdx++));
702
703 // Copy the addrmode6 operands.
704 MIB.add(MI.getOperand(OpIdx++));
705 MIB.add(MI.getOperand(OpIdx++));
706
707 if (TableEntry->hasWritebackOperand) {
708 // TODO: The writing-back pseudo instructions we translate here are all
709 // defined to take am6offset nodes that are capable to represent both fixed
710 // and register forms. Some real instructions, however, do not rely on
711 // am6offset and have separate definitions for such forms. When this is the
712 // case, fixed forms do not take any offset nodes, so here we skip them for
713 // such instructions. Once all real and pseudo writing-back instructions are
714 // rewritten without use of am6offset nodes, this code will go away.
715 const MachineOperand &AM6Offset = MI.getOperand(OpIdx++);
716 if (TableEntry->RealOpc == ARM::VST1d8Qwb_fixed ||
717 TableEntry->RealOpc == ARM::VST1d16Qwb_fixed ||
718 TableEntry->RealOpc == ARM::VST1d32Qwb_fixed ||
719 TableEntry->RealOpc == ARM::VST1d64Qwb_fixed ||
720 TableEntry->RealOpc == ARM::VST1d8Twb_fixed ||
721 TableEntry->RealOpc == ARM::VST1d16Twb_fixed ||
722 TableEntry->RealOpc == ARM::VST1d32Twb_fixed ||
723 TableEntry->RealOpc == ARM::VST1d64Twb_fixed) {
724 assert(AM6Offset.getReg() == 0 &&
725 "A fixed writing-back pseudo instruction provides an offset "
726 "register!");
727 } else {
728 MIB.add(AM6Offset);
729 }
730 }
731
732 bool SrcIsKill = MI.getOperand(OpIdx).isKill();
733 bool SrcIsUndef = MI.getOperand(OpIdx).isUndef();
734 Register SrcReg = MI.getOperand(OpIdx++).getReg();
735 unsigned D0, D1, D2, D3;
736 GetDSubRegs(SrcReg, RegSpc, TRI, D0, D1, D2, D3);
737 MIB.addReg(D0, getUndefRegState(SrcIsUndef));
738 if (NumRegs > 1 && TableEntry->copyAllListRegs)
739 MIB.addReg(D1, getUndefRegState(SrcIsUndef));
740 if (NumRegs > 2 && TableEntry->copyAllListRegs)
741 MIB.addReg(D2, getUndefRegState(SrcIsUndef));
742 if (NumRegs > 3 && TableEntry->copyAllListRegs)
743 MIB.addReg(D3, getUndefRegState(SrcIsUndef));
744
745 // Copy the predicate operands.
746 MIB.add(MI.getOperand(OpIdx++));
747 MIB.add(MI.getOperand(OpIdx++));
748
749 if (SrcIsKill && !SrcIsUndef) // Add an implicit kill for the super-reg.
750 MIB->addRegisterKilled(SrcReg, TRI, true);
751 else if (!SrcIsUndef)
752 MIB.addReg(SrcReg, RegState::Implicit); // Add implicit uses for src reg.
753 TransferImpOps(MI, MIB, MIB);
754
755 // Transfer memoperands.
756 MIB.cloneMemRefs(MI);
757 MI.eraseFromParent();
758 LLVM_DEBUG(dbgs() << "To: "; MIB.getInstr()->dump(););
759 }
760
761 /// ExpandLaneOp - Translate VLD*LN and VST*LN instructions with Q, QQ or QQQQ
762 /// register operands to real instructions with D register operands.
763 void ARMExpandPseudo::ExpandLaneOp(MachineBasicBlock::iterator &MBBI) {
764 MachineInstr &MI = *MBBI;
765 MachineBasicBlock &MBB = *MI.getParent();
766 LLVM_DEBUG(dbgs() << "Expanding: "; MI.dump());
767
768 const NEONLdStTableEntry *TableEntry = LookupNEONLdSt(MI.getOpcode());
769 assert(TableEntry && "NEONLdStTable lookup failed");
770 NEONRegSpacing RegSpc = (NEONRegSpacing)TableEntry->RegSpacing;
771 unsigned NumRegs = TableEntry->NumRegs;
772 unsigned RegElts = TableEntry->RegElts;
773
774 MachineInstrBuilder MIB = BuildMI(MBB, MBBI, MI.getDebugLoc(),
775 TII->get(TableEntry->RealOpc));
776 unsigned OpIdx = 0;
777 // The lane operand is always the 3rd from last operand, before the 2
778 // predicate operands.
779 unsigned Lane = MI.getOperand(MI.getDesc().getNumOperands() - 3).getImm();
780
781 // Adjust the lane and spacing as needed for Q registers.
782 assert(RegSpc != OddDblSpc && "unexpected register spacing for VLD/VST-lane");
783 if (RegSpc == EvenDblSpc && Lane >= RegElts) {
784 RegSpc = OddDblSpc;
785 Lane -= RegElts;
786 }
787 assert(Lane < RegElts && "out of range lane for VLD/VST-lane");
788
789 unsigned D0 = 0, D1 = 0, D2 = 0, D3 = 0;
790 unsigned DstReg = 0;
791 bool DstIsDead = false;
792 if (TableEntry->IsLoad) {
793 DstIsDead = MI.getOperand(OpIdx).isDead();
794 DstReg = MI.getOperand(OpIdx++).getReg();
795 GetDSubRegs(DstReg, RegSpc, TRI, D0, D1, D2, D3);
796 MIB.addReg(D0, RegState::Define | getDeadRegState(DstIsDead));
797 if (NumRegs > 1)
798 MIB.addReg(D1, RegState::Define | getDeadRegState(DstIsDead));
799 if (NumRegs > 2)
800 MIB.addReg(D2, RegState::Define | getDeadRegState(DstIsDead));
801 if (NumRegs > 3)
802 MIB.addReg(D3, RegState::Define | getDeadRegState(DstIsDead));
803 }
804
805 if (TableEntry->isUpdating)
806 MIB.add(MI.getOperand(OpIdx++));
807
808 // Copy the addrmode6 operands.
809 MIB.add(MI.getOperand(OpIdx++));
810 MIB.add(MI.getOperand(OpIdx++));
811 // Copy the am6offset operand.
812 if (TableEntry->hasWritebackOperand)
813 MIB.add(MI.getOperand(OpIdx++));
814
815 // Grab the super-register source.
816 MachineOperand MO = MI.getOperand(OpIdx++);
817 if (!TableEntry->IsLoad)
818 GetDSubRegs(MO.getReg(), RegSpc, TRI, D0, D1, D2, D3);
819
820 // Add the subregs as sources of the new instruction.
821 unsigned SrcFlags = (getUndefRegState(MO.isUndef()) |
822 getKillRegState(MO.isKill()));
823 MIB.addReg(D0, SrcFlags);
824 if (NumRegs > 1)
825 MIB.addReg(D1, SrcFlags);
826 if (NumRegs > 2)
827 MIB.addReg(D2, SrcFlags);
828 if (NumRegs > 3)
829 MIB.addReg(D3, SrcFlags);
830
831 // Add the lane number operand.
832 MIB.addImm(Lane);
833 OpIdx += 1;
834
835 // Copy the predicate operands.
836 MIB.add(MI.getOperand(OpIdx++));
837 MIB.add(MI.getOperand(OpIdx++));
838
839 // Copy the super-register source to be an implicit source.
840 MO.setImplicit(true);
841 MIB.add(MO);
842 if (TableEntry->IsLoad)
843 // Add an implicit def for the super-register.
844 MIB.addReg(DstReg, RegState::ImplicitDefine | getDeadRegState(DstIsDead));
845 TransferImpOps(MI, MIB, MIB);
846 // Transfer memoperands.
847 MIB.cloneMemRefs(MI);
848 MI.eraseFromParent();
849 }
850
851 /// ExpandVTBL - Translate VTBL and VTBX pseudo instructions with Q or QQ
852 /// register operands to real instructions with D register operands.
853 void ARMExpandPseudo::ExpandVTBL(MachineBasicBlock::iterator &MBBI,
854 unsigned Opc, bool IsExt) {
855 MachineInstr &MI = *MBBI;
856 MachineBasicBlock &MBB = *MI.getParent();
857 LLVM_DEBUG(dbgs() << "Expanding: "; MI.dump());
858
859 MachineInstrBuilder MIB = BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(Opc));
860 unsigned OpIdx = 0;
861
862 // Transfer the destination register operand.
863 MIB.add(MI.getOperand(OpIdx++));
864 if (IsExt) {
865 MachineOperand VdSrc(MI.getOperand(OpIdx++));
866 MIB.add(VdSrc);
867 }
868
869 bool SrcIsKill = MI.getOperand(OpIdx).isKill();
870 Register SrcReg = MI.getOperand(OpIdx++).getReg();
871 unsigned D0, D1, D2, D3;
872 GetDSubRegs(SrcReg, SingleSpc, TRI, D0, D1, D2, D3);
873 MIB.addReg(D0);
874
875 // Copy the other source register operand.
876 MachineOperand VmSrc(MI.getOperand(OpIdx++));
877 MIB.add(VmSrc);
878
879 // Copy the predicate operands.
880 MIB.add(MI.getOperand(OpIdx++));
881 MIB.add(MI.getOperand(OpIdx++));
882
883 // Add an implicit kill and use for the super-reg.
884 MIB.addReg(SrcReg, RegState::Implicit | getKillRegState(SrcIsKill));
885 TransferImpOps(MI, MIB, MIB);
886 MI.eraseFromParent();
887 LLVM_DEBUG(dbgs() << "To: "; MIB.getInstr()->dump(););
888 }
889
890 static bool IsAnAddressOperand(const MachineOperand &MO) {
891 // This check is overly conservative. Unless we are certain that the machine
892 // operand is not a symbol reference, we return that it is a symbol reference.
893 // This is important as the load pair may not be split up Windows.
894 switch (MO.getType()) {
895 case MachineOperand::MO_Register:
896 case MachineOperand::MO_Immediate:
897 case MachineOperand::MO_CImmediate:
898 case MachineOperand::MO_FPImmediate:
899 case MachineOperand::MO_ShuffleMask:
900 return false;
901 case MachineOperand::MO_MachineBasicBlock:
902 return true;
903 case MachineOperand::MO_FrameIndex:
904 return false;
905 case MachineOperand::MO_ConstantPoolIndex:
906 case MachineOperand::MO_TargetIndex:
907 case MachineOperand::MO_JumpTableIndex:
908 case MachineOperand::MO_ExternalSymbol:
909 case MachineOperand::MO_GlobalAddress:
910 case MachineOperand::MO_BlockAddress:
911 return true;
912 case MachineOperand::MO_RegisterMask:
913 case MachineOperand::MO_RegisterLiveOut:
914 return false;
915 case MachineOperand::MO_Metadata:
916 case MachineOperand::MO_MCSymbol:
917 return true;
918 case MachineOperand::MO_CFIIndex:
919 return false;
920 case MachineOperand::MO_IntrinsicID:
921 case MachineOperand::MO_Predicate:
922 llvm_unreachable("should not exist post-isel");
923 }
924 llvm_unreachable("unhandled machine operand type");
925 }
926
927 static MachineOperand makeImplicit(const MachineOperand &MO) {
928 MachineOperand NewMO = MO;
929 NewMO.setImplicit();
930 return NewMO;
931 }
932
933 void ARMExpandPseudo::ExpandMOV32BitImm(MachineBasicBlock &MBB,
934 MachineBasicBlock::iterator &MBBI) {
935 MachineInstr &MI = *MBBI;
936 unsigned Opcode = MI.getOpcode();
937 Register PredReg;
938 ARMCC::CondCodes Pred = getInstrPredicate(MI, PredReg);
939 Register DstReg = MI.getOperand(0).getReg();
940 bool DstIsDead = MI.getOperand(0).isDead();
941 bool isCC = Opcode == ARM::MOVCCi32imm || Opcode == ARM::t2MOVCCi32imm;
942 const MachineOperand &MO = MI.getOperand(isCC ? 2 : 1);
943 bool RequiresBundling = STI->isTargetWindows() && IsAnAddressOperand(MO);
944 MachineInstrBuilder LO16, HI16;
945 LLVM_DEBUG(dbgs() << "Expanding: "; MI.dump());
946
947 if (!STI->hasV6T2Ops() &&
948 (Opcode == ARM::MOVi32imm || Opcode == ARM::MOVCCi32imm)) {
949 // FIXME Windows CE supports older ARM CPUs
950 assert(!STI->isTargetWindows() && "Windows on ARM requires ARMv7+");
951
952 assert (MO.isImm() && "MOVi32imm w/ non-immediate source operand!");
953 unsigned ImmVal = (unsigned)MO.getImm();
954 unsigned SOImmValV1 = 0, SOImmValV2 = 0;
955
956 if (ARM_AM::isSOImmTwoPartVal(ImmVal)) { // Expand into a movi + orr.
957 LO16 = BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::MOVi), DstReg);
958 HI16 = BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::ORRri))
959 .addReg(DstReg, RegState::Define | getDeadRegState(DstIsDead))
960 .addReg(DstReg);
961 SOImmValV1 = ARM_AM::getSOImmTwoPartFirst(ImmVal);
962 SOImmValV2 = ARM_AM::getSOImmTwoPartSecond(ImmVal);
963 } else { // Expand into a mvn + sub.
964 LO16 = BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::MVNi), DstReg);
965 HI16 = BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::SUBri))
966 .addReg(DstReg, RegState::Define | getDeadRegState(DstIsDead))
967 .addReg(DstReg);
968 SOImmValV1 = ARM_AM::getSOImmTwoPartFirst(-ImmVal);
969 SOImmValV2 = ARM_AM::getSOImmTwoPartSecond(-ImmVal);
970 SOImmValV1 = ~(-SOImmValV1);
971 }
972
973 unsigned MIFlags = MI.getFlags();
974 LO16 = LO16.addImm(SOImmValV1);
975 HI16 = HI16.addImm(SOImmValV2);
976 LO16.cloneMemRefs(MI);
977 HI16.cloneMemRefs(MI);
978 LO16.setMIFlags(MIFlags);
979 HI16.setMIFlags(MIFlags);
980 LO16.addImm(Pred).addReg(PredReg).add(condCodeOp());
981 HI16.addImm(Pred).addReg(PredReg).add(condCodeOp());
982 if (isCC)
983 LO16.add(makeImplicit(MI.getOperand(1)));
984 TransferImpOps(MI, LO16, HI16);
985 MI.eraseFromParent();
986 return;
987 }
988
989 unsigned LO16Opc = 0;
990 unsigned HI16Opc = 0;
991 unsigned MIFlags = MI.getFlags();
992 if (Opcode == ARM::t2MOVi32imm || Opcode == ARM::t2MOVCCi32imm) {
993 LO16Opc = ARM::t2MOVi16;
994 HI16Opc = ARM::t2MOVTi16;
995 } else {
996 LO16Opc = ARM::MOVi16;
997 HI16Opc = ARM::MOVTi16;
998 }
999
1000 LO16 = BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(LO16Opc), DstReg);
1001 HI16 = BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(HI16Opc))
1002 .addReg(DstReg, RegState::Define | getDeadRegState(DstIsDead))
1003 .addReg(DstReg);
1004
1005 LO16.setMIFlags(MIFlags);
1006 HI16.setMIFlags(MIFlags);
1007
1008 switch (MO.getType()) {
1009 case MachineOperand::MO_Immediate: {
1010 unsigned Imm = MO.getImm();
1011 unsigned Lo16 = Imm & 0xffff;
1012 unsigned Hi16 = (Imm >> 16) & 0xffff;
1013 LO16 = LO16.addImm(Lo16);
1014 HI16 = HI16.addImm(Hi16);
1015 break;
1016 }
1017 case MachineOperand::MO_ExternalSymbol: {
1018 const char *ES = MO.getSymbolName();
1019 unsigned TF = MO.getTargetFlags();
1020 LO16 = LO16.addExternalSymbol(ES, TF | ARMII::MO_LO16);
1021 HI16 = HI16.addExternalSymbol(ES, TF | ARMII::MO_HI16);
1022 break;
1023 }
1024 default: {
1025 const GlobalValue *GV = MO.getGlobal();
1026 unsigned TF = MO.getTargetFlags();
1027 LO16 = LO16.addGlobalAddress(GV, MO.getOffset(), TF | ARMII::MO_LO16);
1028 HI16 = HI16.addGlobalAddress(GV, MO.getOffset(), TF | ARMII::MO_HI16);
1029 break;
1030 }
1031 }
1032
1033 LO16.cloneMemRefs(MI);
1034 HI16.cloneMemRefs(MI);
1035 LO16.addImm(Pred).addReg(PredReg);
1036 HI16.addImm(Pred).addReg(PredReg);
1037
1038 if (RequiresBundling)
1039 finalizeBundle(MBB, LO16->getIterator(), MBBI->getIterator());
1040
1041 if (isCC)
1042 LO16.add(makeImplicit(MI.getOperand(1)));
1043 TransferImpOps(MI, LO16, HI16);
1044 MI.eraseFromParent();
1045 LLVM_DEBUG(dbgs() << "To: "; LO16.getInstr()->dump(););
1046 LLVM_DEBUG(dbgs() << "And: "; HI16.getInstr()->dump(););
1047 }
1048
1049 // The size of the area, accessed by that VLSTM/VLLDM
1050 // S0-S31 + FPSCR + 8 more bytes (VPR + pad, or just pad)
1051 static const int CMSE_FP_SAVE_SIZE = 136;
1052
1053 static void determineGPRegsToClear(const MachineInstr &MI,
1054 const std::initializer_list<unsigned> &Regs,
1055 SmallVectorImpl<unsigned> &ClearRegs) {
1056 SmallVector<unsigned, 4> OpRegs;
1057 for (const MachineOperand &Op : MI.operands()) {
1058 if (!Op.isReg() || !Op.isUse())
1059 continue;
1060 OpRegs.push_back(Op.getReg());
1061 }
1062 llvm::sort(OpRegs);
1063
1064 std::set_difference(Regs.begin(), Regs.end(), OpRegs.begin(), OpRegs.end(),
1065 std::back_inserter(ClearRegs));
1066 }
1067
1068 void ARMExpandPseudo::CMSEClearGPRegs(
1069 MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI,
1070 const DebugLoc &DL, const SmallVectorImpl<unsigned> &ClearRegs,
1071 unsigned ClobberReg) {
1072
1073 if (STI->hasV8_1MMainlineOps()) {
1074 // Clear the registers using the CLRM instruction.
1075 MachineInstrBuilder CLRM =
1076 BuildMI(MBB, MBBI, DL, TII->get(ARM::t2CLRM)).add(predOps(ARMCC::AL));
1077 for (unsigned R : ClearRegs)
1078 CLRM.addReg(R, RegState::Define);
1079 CLRM.addReg(ARM::APSR, RegState::Define);
1080 CLRM.addReg(ARM::CPSR, RegState::Define | RegState::Implicit);
1081 } else {
1082 // Clear the registers and flags by copying ClobberReg into them.
1083 // (Baseline can't do a high register clear in one instruction).
1084 for (unsigned Reg : ClearRegs) {
1085 if (Reg == ClobberReg)
1086 continue;
1087 BuildMI(MBB, MBBI, DL, TII->get(ARM::tMOVr), Reg)
1088 .addReg(ClobberReg)
1089 .add(predOps(ARMCC::AL));
1090 }
1091
1092 BuildMI(MBB, MBBI, DL, TII->get(ARM::t2MSR_M))
1093 .addImm(STI->hasDSP() ? 0xc00 : 0x800)
1094 .addReg(ClobberReg)
1095 .add(predOps(ARMCC::AL));
1096 }
1097 }
1098
1099 // Find which FP registers need to be cleared. The parameter `ClearRegs` is
1100 // initialised with all elements set to true, and this function resets all the
1101 // bits, which correspond to register uses. Returns true if any floating point
1102 // register is defined, false otherwise.
1103 static bool determineFPRegsToClear(const MachineInstr &MI,
1104 BitVector &ClearRegs) {
1105 bool DefFP = false;
1106 for (const MachineOperand &Op : MI.operands()) {
1107 if (!Op.isReg())
1108 continue;
1109
1110 unsigned Reg = Op.getReg();
1111 if (Op.isDef()) {
1112 if ((Reg >= ARM::Q0 && Reg <= ARM::Q7) ||
1113 (Reg >= ARM::D0 && Reg <= ARM::D15) ||
1114 (Reg >= ARM::S0 && Reg <= ARM::S31))
1115 DefFP = true;
1116 continue;
1117 }
1118
1119 if (Reg >= ARM::Q0 && Reg <= ARM::Q7) {
1120 int R = Reg - ARM::Q0;
1121 ClearRegs.reset(R * 4, (R + 1) * 4);
1122 } else if (Reg >= ARM::D0 && Reg <= ARM::D15) {
1123 int R = Reg - ARM::D0;
1124 ClearRegs.reset(R * 2, (R + 1) * 2);
1125 } else if (Reg >= ARM::S0 && Reg <= ARM::S31) {
1126 ClearRegs[Reg - ARM::S0] = false;
1127 }
1128 }
1129 return DefFP;
1130 }
1131
1132 MachineBasicBlock &
1133 ARMExpandPseudo::CMSEClearFPRegs(MachineBasicBlock &MBB,
1134 MachineBasicBlock::iterator MBBI) {
1135 BitVector ClearRegs(16, true);
1136 (void)determineFPRegsToClear(*MBBI, ClearRegs);
1137
1138 if (STI->hasV8_1MMainlineOps())
1139 return CMSEClearFPRegsV81(MBB, MBBI, ClearRegs);
1140 else
1141 return CMSEClearFPRegsV8(MBB, MBBI, ClearRegs);
1142 }
1143
1144 // Clear the FP registers for v8.0-M, by copying over the content
1145 // of LR. Uses R12 as a scratch register.
1146 MachineBasicBlock &
1147 ARMExpandPseudo::CMSEClearFPRegsV8(MachineBasicBlock &MBB,
1148 MachineBasicBlock::iterator MBBI,
1149 const BitVector &ClearRegs) {
1150 if (!STI->hasFPRegs())
1151 return MBB;
1152
1153 auto &RetI = *MBBI;
1154 const DebugLoc &DL = RetI.getDebugLoc();
1155
1156 // If optimising for minimum size, clear FP registers unconditionally.
1157 // Otherwise, check the CONTROL.SFPA (Secure Floating-Point Active) bit and
1158 // don't clear them if they belong to the non-secure state.
1159 MachineBasicBlock *ClearBB, *DoneBB;
1160 if (STI->hasMinSize()) {
1161 ClearBB = DoneBB = &MBB;
1162 } else {
1163 MachineFunction *MF = MBB.getParent();
1164 ClearBB = MF->CreateMachineBasicBlock(MBB.getBasicBlock());
1165 DoneBB = MF->CreateMachineBasicBlock(MBB.getBasicBlock());
1166
1167 MF->insert(++MBB.getIterator(), ClearBB);
1168 MF->insert(++ClearBB->getIterator(), DoneBB);
1169
1170 DoneBB->splice(DoneBB->end(), &MBB, MBBI, MBB.end());
1171 DoneBB->transferSuccessors(&MBB);
1172 MBB.addSuccessor(ClearBB);
1173 MBB.addSuccessor(DoneBB);
1174 ClearBB->addSuccessor(DoneBB);
1175
1176 // At the new basic blocks we need to have live-in the registers, used
1177 // for the return value as well as LR, used to clear registers.
1178 for (const MachineOperand &Op : RetI.operands()) {
1179 if (!Op.isReg())
1180 continue;
1181 Register Reg = Op.getReg();
1182 if (Reg == ARM::NoRegister || Reg == ARM::LR)
1183 continue;
1184 assert(Register::isPhysicalRegister(Reg) && "Unallocated register");
1185 ClearBB->addLiveIn(Reg);
1186 DoneBB->addLiveIn(Reg);
1187 }
1188 ClearBB->addLiveIn(ARM::LR);
1189 DoneBB->addLiveIn(ARM::LR);
1190
1191 // Read the CONTROL register.
1192 BuildMI(MBB, MBB.end(), DL, TII->get(ARM::t2MRS_M), ARM::R12)
1193 .addImm(20)
1194 .add(predOps(ARMCC::AL));
1195 // Check bit 3 (SFPA).
1196 BuildMI(MBB, MBB.end(), DL, TII->get(ARM::t2TSTri))
1197 .addReg(ARM::R12)
1198 .addImm(8)
1199 .add(predOps(ARMCC::AL));
1200 // If SFPA is clear, jump over ClearBB to DoneBB.
1201 BuildMI(MBB, MBB.end(), DL, TII->get(ARM::tBcc))
1202 .addMBB(DoneBB)
1203 .addImm(ARMCC::EQ)
1204 .addReg(ARM::CPSR, RegState::Kill);
1205 }
1206
1207 // Emit the clearing sequence
1208 for (unsigned D = 0; D < 8; D++) {
1209 // Attempt to clear as double
1210 if (ClearRegs[D * 2 + 0] && ClearRegs[D * 2 + 1]) {
1211 unsigned Reg = ARM::D0 + D;
1212 BuildMI(ClearBB, DL, TII->get(ARM::VMOVDRR), Reg)
1213 .addReg(ARM::LR)
1214 .addReg(ARM::LR)
1215 .add(predOps(ARMCC::AL));
1216 } else {
1217 // Clear first part as single
1218 if (ClearRegs[D * 2 + 0]) {
1219 unsigned Reg = ARM::S0 + D * 2;
1220 BuildMI(ClearBB, DL, TII->get(ARM::VMOVSR), Reg)
1221 .addReg(ARM::LR)
1222 .add(predOps(ARMCC::AL));
1223 }
1224 // Clear second part as single
1225 if (ClearRegs[D * 2 + 1]) {
1226 unsigned Reg = ARM::S0 + D * 2 + 1;
1227 BuildMI(ClearBB, DL, TII->get(ARM::VMOVSR), Reg)
1228 .addReg(ARM::LR)
1229 .add(predOps(ARMCC::AL));
1230 }
1231 }
1232 }
1233
1234 // Clear FPSCR bits 0-4, 7, 28-31
1235 // The other bits are program global according to the AAPCS
1236 BuildMI(ClearBB, DL, TII->get(ARM::VMRS), ARM::R12)
1237 .add(predOps(ARMCC::AL));
1238 BuildMI(ClearBB, DL, TII->get(ARM::t2BICri), ARM::R12)
1239 .addReg(ARM::R12)
1240 .addImm(0x0000009F)
1241 .add(predOps(ARMCC::AL))
1242 .add(condCodeOp());
1243 BuildMI(ClearBB, DL, TII->get(ARM::t2BICri), ARM::R12)
1244 .addReg(ARM::R12)
1245 .addImm(0xF0000000)
1246 .add(predOps(ARMCC::AL))
1247 .add(condCodeOp());
1248 BuildMI(ClearBB, DL, TII->get(ARM::VMSR))
1249 .addReg(ARM::R12)
1250 .add(predOps(ARMCC::AL));
1251
1252 return *DoneBB;
1253 }
1254
1255 MachineBasicBlock &
1256 ARMExpandPseudo::CMSEClearFPRegsV81(MachineBasicBlock &MBB,
1257 MachineBasicBlock::iterator MBBI,
1258 const BitVector &ClearRegs) {
1259 auto &RetI = *MBBI;
1260
1261 // Emit a sequence of VSCCLRM <sreglist> instructions, one instruction for
1262 // each contiguous sequence of S-registers.
1263 int Start = -1, End = -1;
1264 for (int S = 0, E = ClearRegs.size(); S != E; ++S) {
1265 if (ClearRegs[S] && S == End + 1) {
1266 End = S; // extend range
1267 continue;
1268 }
1269 // Emit current range.
1270 if (Start < End) {
1271 MachineInstrBuilder VSCCLRM =
1272 BuildMI(MBB, MBBI, RetI.getDebugLoc(), TII->get(ARM::VSCCLRMS))
1273 .add(predOps(ARMCC::AL));
1274 while (++Start <= End)
1275 VSCCLRM.addReg(ARM::S0 + Start, RegState::Define);
1276 VSCCLRM.addReg(ARM::VPR, RegState::Define);
1277 }
1278 Start = End = S;
1279 }
1280 // Emit last range.
1281 if (Start < End) {
1282 MachineInstrBuilder VSCCLRM =
1283 BuildMI(MBB, MBBI, RetI.getDebugLoc(), TII->get(ARM::VSCCLRMS))
1284 .add(predOps(ARMCC::AL));
1285 while (++Start <= End)
1286 VSCCLRM.addReg(ARM::S0 + Start, RegState::Define);
1287 VSCCLRM.addReg(ARM::VPR, RegState::Define);
1288 }
1289
1290 return MBB;
1291 }
1292
1293 void ARMExpandPseudo::CMSESaveClearFPRegs(
1294 MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, DebugLoc &DL,
1295 const LivePhysRegs &LiveRegs, SmallVectorImpl<unsigned> &ScratchRegs) {
1296 if (STI->hasV8_1MMainlineOps())
1297 CMSESaveClearFPRegsV81(MBB, MBBI, DL, LiveRegs);
1298 else
1299 CMSESaveClearFPRegsV8(MBB, MBBI, DL, LiveRegs, ScratchRegs);
1300 }
1301
1302 // Save and clear FP registers if present
1303 void ARMExpandPseudo::CMSESaveClearFPRegsV8(
1304 MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, DebugLoc &DL,
1305 const LivePhysRegs &LiveRegs, SmallVectorImpl<unsigned> &ScratchRegs) {
1306 if (!STI->hasFPRegs())
1307 return;
1308
1309 // Store an available register for FPSCR clearing
1310 assert(!ScratchRegs.empty());
1311 unsigned SpareReg = ScratchRegs.front();
1312
1313 // save space on stack for VLSTM
1314 BuildMI(MBB, MBBI, DL, TII->get(ARM::tSUBspi), ARM::SP)
1315 .addReg(ARM::SP)
1316 .addImm(CMSE_FP_SAVE_SIZE >> 2)
1317 .add(predOps(ARMCC::AL));
1318
1319 // Use ScratchRegs to store the fp regs
1320 std::vector<std::tuple<unsigned, unsigned, unsigned>> ClearedFPRegs;
1321 std::vector<unsigned> NonclearedFPRegs;
1322 for (const MachineOperand &Op : MBBI->operands()) {
1323 if (Op.isReg() && Op.isUse()) {
1324 unsigned Reg = Op.getReg();
1325 assert(!ARM::DPRRegClass.contains(Reg) ||
1326 ARM::DPR_VFP2RegClass.contains(Reg));
1327 assert(!ARM::QPRRegClass.contains(Reg));
1328 if (ARM::DPR_VFP2RegClass.contains(Reg)) {
1329 if (ScratchRegs.size() >= 2) {
1330 unsigned SaveReg2 = ScratchRegs.pop_back_val();
1331 unsigned SaveReg1 = ScratchRegs.pop_back_val();
1332 ClearedFPRegs.emplace_back(Reg, SaveReg1, SaveReg2);
1333
1334 // Save the fp register to the normal registers
1335 BuildMI(MBB, MBBI, DL, TII->get(ARM::VMOVRRD))
1336 .addReg(SaveReg1, RegState::Define)
1337 .addReg(SaveReg2, RegState::Define)
1338 .addReg(Reg)
1339 .add(predOps(ARMCC::AL));
1340 } else {
1341 NonclearedFPRegs.push_back(Reg);
1342 }
1343 } else if (ARM::SPRRegClass.contains(Reg)) {
1344 if (ScratchRegs.size() >= 1) {
1345 unsigned SaveReg = ScratchRegs.pop_back_val();
1346 ClearedFPRegs.emplace_back(Reg, SaveReg, 0);
1347
1348 // Save the fp register to the normal registers
1349 BuildMI(MBB, MBBI, DL, TII->get(ARM::VMOVRS), SaveReg)
1350 .addReg(Reg)
1351 .add(predOps(ARMCC::AL));
1352 } else {
1353 NonclearedFPRegs.push_back(Reg);
1354 }
1355 }
1356 }
1357 }
1358
1359 bool passesFPReg = (!NonclearedFPRegs.empty() || !ClearedFPRegs.empty());
1360
1361 // Lazy store all fp registers to the stack
1362 MachineInstrBuilder VLSTM = BuildMI(MBB, MBBI, DL, TII->get(ARM::VLSTM))
1363 .addReg(ARM::SP)
1364 .add(predOps(ARMCC::AL));
1365 for (auto R : {ARM::VPR, ARM::FPSCR, ARM::FPSCR_NZCV, ARM::Q0, ARM::Q1,
1366 ARM::Q2, ARM::Q3, ARM::Q4, ARM::Q5, ARM::Q6, ARM::Q7})
1367 VLSTM.addReg(R, RegState::Implicit |
1368 (LiveRegs.contains(R) ? 0 : RegState::Undef));
1369
1370 // Restore all arguments
1371 for (const auto &Regs : ClearedFPRegs) {
1372 unsigned Reg, SaveReg1, SaveReg2;
1373 std::tie(Reg, SaveReg1, SaveReg2) = Regs;
1374 if (ARM::DPR_VFP2RegClass.contains(Reg))
1375 BuildMI(MBB, MBBI, DL, TII->get(ARM::VMOVDRR), Reg)
1376 .addReg(SaveReg1)
1377 .addReg(SaveReg2)
1378 .add(predOps(ARMCC::AL));
1379 else if (ARM::SPRRegClass.contains(Reg))
1380 BuildMI(MBB, MBBI, DL, TII->get(ARM::VMOVSR), Reg)
1381 .addReg(SaveReg1)
1382 .add(predOps(ARMCC::AL));
1383 }
1384
1385 for (unsigned Reg : NonclearedFPRegs) {
1386 if (ARM::DPR_VFP2RegClass.contains(Reg)) {
1387 if (STI->isLittle()) {
1388 BuildMI(MBB, MBBI, DL, TII->get(ARM::VLDRD), Reg)
1389 .addReg(ARM::SP)
1390 .addImm((Reg - ARM::D0) * 2)
1391 .add(predOps(ARMCC::AL));
1392 } else {
1393 // For big-endian targets we need to load the two subregisters of Reg
1394 // manually because VLDRD would load them in wrong order
1395 unsigned SReg0 = TRI->getSubReg(Reg, ARM::ssub_0);
1396 BuildMI(MBB, MBBI, DL, TII->get(ARM::VLDRS), SReg0)
1397 .addReg(ARM::SP)
1398 .addImm((Reg - ARM::D0) * 2)
1399 .add(predOps(ARMCC::AL));
1400 BuildMI(MBB, MBBI, DL, TII->get(ARM::VLDRS), SReg0 + 1)
1401 .addReg(ARM::SP)
1402 .addImm((Reg - ARM::D0) * 2 + 1)
1403 .add(predOps(ARMCC::AL));
1404 }
1405 } else if (ARM::SPRRegClass.contains(Reg)) {
1406 BuildMI(MBB, MBBI, DL, TII->get(ARM::VLDRS), Reg)
1407 .addReg(ARM::SP)
1408 .addImm(Reg - ARM::S0)
1409 .add(predOps(ARMCC::AL));
1410 }
1411 }
1412 // restore FPSCR from stack and clear bits 0-4, 7, 28-31
1413 // The other bits are program global according to the AAPCS
1414 if (passesFPReg) {
1415 BuildMI(MBB, MBBI, DL, TII->get(ARM::t2LDRi8), SpareReg)
1416 .addReg(ARM::SP)
1417 .addImm(0x40)
1418 .add(predOps(ARMCC::AL));
1419 BuildMI(MBB, MBBI, DL, TII->get(ARM::t2BICri), SpareReg)
1420 .addReg(SpareReg)
1421 .addImm(0x0000009F)
1422 .add(predOps(ARMCC::AL))
1423 .add(condCodeOp());
1424 BuildMI(MBB, MBBI, DL, TII->get(ARM::t2BICri), SpareReg)
1425 .addReg(SpareReg)
1426 .addImm(0xF0000000)
1427 .add(predOps(ARMCC::AL))
1428 .add(condCodeOp());
1429 BuildMI(MBB, MBBI, DL, TII->get(ARM::VMSR))
1430 .addReg(SpareReg)
1431 .add(predOps(ARMCC::AL));
1432 // The ldr must happen after a floating point instruction. To prevent the
1433 // post-ra scheduler to mess with the order, we create a bundle.
1434 finalizeBundle(MBB, VLSTM->getIterator(), MBBI->getIterator());
1435 }
1436 }
1437
1438 void ARMExpandPseudo::CMSESaveClearFPRegsV81(MachineBasicBlock &MBB,
1439 MachineBasicBlock::iterator MBBI,
1440 DebugLoc &DL,
1441 const LivePhysRegs &LiveRegs) {
1442 BitVector ClearRegs(32, true);
1443 bool DefFP = determineFPRegsToClear(*MBBI, ClearRegs);
1444
1445 // If the instruction does not write to a FP register and no elements were
1446 // removed from the set, then no FP registers were used to pass
1447 // arguments/returns.
1448 if (!DefFP && ClearRegs.count() == ClearRegs.size()) {
1449 // save space on stack for VLSTM
1450 BuildMI(MBB, MBBI, DL, TII->get(ARM::tSUBspi), ARM::SP)
1451 .addReg(ARM::SP)
1452 .addImm(CMSE_FP_SAVE_SIZE >> 2)
1453 .add(predOps(ARMCC::AL));
1454
1455 // Lazy store all FP registers to the stack
1456 MachineInstrBuilder VLSTM = BuildMI(MBB, MBBI, DL, TII->get(ARM::VLSTM))
1457 .addReg(ARM::SP)
1458 .add(predOps(ARMCC::AL));
1459 for (auto R : {ARM::VPR, ARM::FPSCR, ARM::FPSCR_NZCV, ARM::Q0, ARM::Q1,
1460 ARM::Q2, ARM::Q3, ARM::Q4, ARM::Q5, ARM::Q6, ARM::Q7})
1461 VLSTM.addReg(R, RegState::Implicit |
1462 (LiveRegs.contains(R) ? 0 : RegState::Undef));
1463 } else {
1464 // Push all the callee-saved registers (s16-s31).
1465 MachineInstrBuilder VPUSH =
1466 BuildMI(MBB, MBBI, DL, TII->get(ARM::VSTMSDB_UPD), ARM::SP)
1467 .addReg(ARM::SP)
1468 .add(predOps(ARMCC::AL));
1469 for (int Reg = ARM::S16; Reg <= ARM::S31; ++Reg)
1470 VPUSH.addReg(Reg);
1471
1472 // Clear FP registers with a VSCCLRM.
1473 (void)CMSEClearFPRegsV81(MBB, MBBI, ClearRegs);
1474
1475 // Save floating-point context.
1476 BuildMI(MBB, MBBI, DL, TII->get(ARM::VSTR_FPCXTS_pre), ARM::SP)
1477 .addReg(ARM::SP)
1478 .addImm(-8)
1479 .add(predOps(ARMCC::AL));
1480 }
1481 }
1482
1483 // Restore FP registers if present
1484 void ARMExpandPseudo::CMSERestoreFPRegs(
1485 MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, DebugLoc &DL,
1486 SmallVectorImpl<unsigned> &AvailableRegs) {
1487 if (STI->hasV8_1MMainlineOps())
1488 CMSERestoreFPRegsV81(MBB, MBBI, DL, AvailableRegs);
1489 else
1490 CMSERestoreFPRegsV8(MBB, MBBI, DL, AvailableRegs);
1491 }
1492
1493 void ARMExpandPseudo::CMSERestoreFPRegsV8(
1494 MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, DebugLoc &DL,
1495 SmallVectorImpl<unsigned> &AvailableRegs) {
1496 if (!STI->hasFPRegs())
1497 return;
1498
1499 // Use AvailableRegs to store the fp regs
1500 std::vector<std::tuple<unsigned, unsigned, unsigned>> ClearedFPRegs;
1501 std::vector<unsigned> NonclearedFPRegs;
1502 for (const MachineOperand &Op : MBBI->operands()) {
1503 if (Op.isReg() && Op.isDef()) {
1504 unsigned Reg = Op.getReg();
1505 assert(!ARM::DPRRegClass.contains(Reg) ||
1506 ARM::DPR_VFP2RegClass.contains(Reg));
1507 assert(!ARM::QPRRegClass.contains(Reg));
1508 if (ARM::DPR_VFP2RegClass.contains(Reg)) {
1509 if (AvailableRegs.size() >= 2) {
1510 unsigned SaveReg2 = AvailableRegs.pop_back_val();
1511 unsigned SaveReg1 = AvailableRegs.pop_back_val();
1512 ClearedFPRegs.emplace_back(Reg, SaveReg1, SaveReg2);
1513
1514 // Save the fp register to the normal registers
1515 BuildMI(MBB, MBBI, DL, TII->get(ARM::VMOVRRD))
1516 .addReg(SaveReg1, RegState::Define)
1517 .addReg(SaveReg2, RegState::Define)
1518 .addReg(Reg)
1519 .add(predOps(ARMCC::AL));
1520 } else {
1521 NonclearedFPRegs.push_back(Reg);
1522 }
1523 } else if (ARM::SPRRegClass.contains(Reg)) {
1524 if (AvailableRegs.size() >= 1) {
1525 unsigned SaveReg = AvailableRegs.pop_back_val();
1526 ClearedFPRegs.emplace_back(Reg, SaveReg, 0);
1527
1528 // Save the fp register to the normal registers
1529 BuildMI(MBB, MBBI, DL, TII->get(ARM::VMOVRS), SaveReg)
1530 .addReg(Reg)
1531 .add(predOps(ARMCC::AL));
1532 } else {
1533 NonclearedFPRegs.push_back(Reg);
1534 }
1535 }
1536 }
1537 }
1538
1539 // Push FP regs that cannot be restored via normal registers on the stack
1540 for (unsigned Reg : NonclearedFPRegs) {
1541 if (ARM::DPR_VFP2RegClass.contains(Reg))
1542 BuildMI(MBB, MBBI, DL, TII->get(ARM::VSTRD), Reg)
1543 .addReg(ARM::SP)
1544 .addImm((Reg - ARM::D0) * 2)
1545 .add(predOps(ARMCC::AL));
1546 else if (ARM::SPRRegClass.contains(Reg))
1547 BuildMI(MBB, MBBI, DL, TII->get(ARM::VSTRS), Reg)
1548 .addReg(ARM::SP)
1549 .addImm(Reg - ARM::S0)
1550 .add(predOps(ARMCC::AL));
1551 }
1552
1553 // Lazy load fp regs from stack
1554 BuildMI(MBB, MBBI, DL, TII->get(ARM::VLLDM))
1555 .addReg(ARM::SP)
1556 .add(predOps(ARMCC::AL));
1557
1558 // Restore all FP registers via normal registers
1559 for (const auto &Regs : ClearedFPRegs) {
1560 unsigned Reg, SaveReg1, SaveReg2;
1561 std::tie(Reg, SaveReg1, SaveReg2) = Regs;
1562 if (ARM::DPR_VFP2RegClass.contains(Reg))
1563 BuildMI(MBB, MBBI, DL, TII->get(ARM::VMOVDRR), Reg)
1564 .addReg(SaveReg1)
1565 .addReg(SaveReg2)
1566 .add(predOps(ARMCC::AL));
1567 else if (ARM::SPRRegClass.contains(Reg))
1568 BuildMI(MBB, MBBI, DL, TII->get(ARM::VMOVSR), Reg)
1569 .addReg(SaveReg1)
1570 .add(predOps(ARMCC::AL));
1571 }
1572
1573 // Pop the stack space
1574 BuildMI(MBB, MBBI, DL, TII->get(ARM::tADDspi), ARM::SP)
1575 .addReg(ARM::SP)
1576 .addImm(CMSE_FP_SAVE_SIZE >> 2)
1577 .add(predOps(ARMCC::AL));
1578 }
1579
1580 static bool definesOrUsesFPReg(const MachineInstr &MI) {
1581 for (const MachineOperand &Op : MI.operands()) {
1582 if (!Op.isReg())
1583 continue;
1584 unsigned Reg = Op.getReg();
1585 if ((Reg >= ARM::Q0 && Reg <= ARM::Q7) ||
1586 (Reg >= ARM::D0 && Reg <= ARM::D15) ||
1587 (Reg >= ARM::S0 && Reg <= ARM::S31))
1588 return true;
1589 }
1590 return false;
1591 }
1592
1593 void ARMExpandPseudo::CMSERestoreFPRegsV81(
1594 MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, DebugLoc &DL,
1595 SmallVectorImpl<unsigned> &AvailableRegs) {
1596 if (!definesOrUsesFPReg(*MBBI)) {
1597 // Load FP registers from stack.
1598 BuildMI(MBB, MBBI, DL, TII->get(ARM::VLLDM))
1599 .addReg(ARM::SP)
1600 .add(predOps(ARMCC::AL));
1601
1602 // Pop the stack space
1603 BuildMI(MBB, MBBI, DL, TII->get(ARM::tADDspi), ARM::SP)
1604 .addReg(ARM::SP)
1605 .addImm(CMSE_FP_SAVE_SIZE >> 2)
1606 .add(predOps(ARMCC::AL));
1607 } else {
1608 // Restore the floating point context.
1609 BuildMI(MBB, MBBI, MBBI->getDebugLoc(), TII->get(ARM::VLDR_FPCXTS_post),
1610 ARM::SP)
1611 .addReg(ARM::SP)
1612 .addImm(8)
1613 .add(predOps(ARMCC::AL));
1614
1615 // Pop all the callee-saved registers (s16-s31).
1616 MachineInstrBuilder VPOP =
1617 BuildMI(MBB, MBBI, DL, TII->get(ARM::VLDMSIA_UPD), ARM::SP)
1618 .addReg(ARM::SP)
1619 .add(predOps(ARMCC::AL));
1620 for (int Reg = ARM::S16; Reg <= ARM::S31; ++Reg)
1621 VPOP.addReg(Reg, RegState::Define);
1622 }
1623 }
1624
1625 /// Expand a CMP_SWAP pseudo-inst to an ldrex/strex loop as simply as
1626 /// possible. This only gets used at -O0 so we don't care about efficiency of
1627 /// the generated code.
1628 bool ARMExpandPseudo::ExpandCMP_SWAP(MachineBasicBlock &MBB,
1629 MachineBasicBlock::iterator MBBI,
1630 unsigned LdrexOp, unsigned StrexOp,
1631 unsigned UxtOp,
1632 MachineBasicBlock::iterator &NextMBBI) {
1633 bool IsThumb = STI->isThumb();
1634 MachineInstr &MI = *MBBI;
1635 DebugLoc DL = MI.getDebugLoc();
1636 const MachineOperand &Dest = MI.getOperand(0);
1637 Register TempReg = MI.getOperand(1).getReg();
1638 // Duplicating undef operands into 2 instructions does not guarantee the same
1639 // value on both; However undef should be replaced by xzr anyway.
1640 assert(!MI.getOperand(2).isUndef() && "cannot handle undef");
1641 Register AddrReg = MI.getOperand(2).getReg();
1642 Register DesiredReg = MI.getOperand(3).getReg();
1643 Register NewReg = MI.getOperand(4).getReg();
1644
1645 if (IsThumb) {
1646 assert(STI->hasV8MBaselineOps() &&
1647 "CMP_SWAP not expected to be custom expanded for Thumb1");
1648 assert((UxtOp == 0 || UxtOp == ARM::tUXTB || UxtOp == ARM::tUXTH) &&
1649 "ARMv8-M.baseline does not have t2UXTB/t2UXTH");
1650 assert((UxtOp == 0 || ARM::tGPRRegClass.contains(DesiredReg)) &&
1651 "DesiredReg used for UXT op must be tGPR");
1652 }
1653
1654 MachineFunction *MF = MBB.getParent();
1655 auto LoadCmpBB = MF->CreateMachineBasicBlock(MBB.getBasicBlock());
1656 auto StoreBB = MF->CreateMachineBasicBlock(MBB.getBasicBlock());
1657 auto DoneBB = MF->CreateMachineBasicBlock(MBB.getBasicBlock());
1658
1659 MF->insert(++MBB.getIterator(), LoadCmpBB);
1660 MF->insert(++LoadCmpBB->getIterator(), StoreBB);
1661 MF->insert(++StoreBB->getIterator(), DoneBB);
1662
1663 if (UxtOp) {
1664 MachineInstrBuilder MIB =
1665 BuildMI(MBB, MBBI, DL, TII->get(UxtOp), DesiredReg)
1666 .addReg(DesiredReg, RegState::Kill);
1667 if (!IsThumb)
1668 MIB.addImm(0);
1669 MIB.add(predOps(ARMCC::AL));
1670 }
1671
1672 // .Lloadcmp:
1673 // ldrex rDest, [rAddr]
1674 // cmp rDest, rDesired
1675 // bne .Ldone
1676
1677 MachineInstrBuilder MIB;
1678 MIB = BuildMI(LoadCmpBB, DL, TII->get(LdrexOp), Dest.getReg());
1679 MIB.addReg(AddrReg);
1680 if (LdrexOp == ARM::t2LDREX)
1681 MIB.addImm(0); // a 32-bit Thumb ldrex (only) allows an offset.
1682 MIB.add(predOps(ARMCC::AL));
1683
1684 unsigned CMPrr = IsThumb ? ARM::tCMPhir : ARM::CMPrr;
1685 BuildMI(LoadCmpBB, DL, TII->get(CMPrr))
1686 .addReg(Dest.getReg(), getKillRegState(Dest.isDead()))
1687 .addReg(DesiredReg)
1688 .add(predOps(ARMCC::AL));
1689 unsigned Bcc = IsThumb ? ARM::tBcc : ARM::Bcc;
1690 BuildMI(LoadCmpBB, DL, TII->get(Bcc))
1691 .addMBB(DoneBB)
1692 .addImm(ARMCC::NE)
1693 .addReg(ARM::CPSR, RegState::Kill);
1694 LoadCmpBB->addSuccessor(DoneBB);
1695 LoadCmpBB->addSuccessor(StoreBB);
1696
1697 // .Lstore:
1698 // strex rTempReg, rNew, [rAddr]
1699 // cmp rTempReg, #0
1700 // bne .Lloadcmp
1701 MIB = BuildMI(StoreBB, DL, TII->get(StrexOp), TempReg)
1702 .addReg(NewReg)
1703 .addReg(AddrReg);
1704 if (StrexOp == ARM::t2STREX)
1705 MIB.addImm(0); // a 32-bit Thumb strex (only) allows an offset.
1706 MIB.add(predOps(ARMCC::AL));
1707
1708 unsigned CMPri = IsThumb ? ARM::t2CMPri : ARM::CMPri;
1709 BuildMI(StoreBB, DL, TII->get(CMPri))
1710 .addReg(TempReg, RegState::Kill)
1711 .addImm(0)
1712 .add(predOps(ARMCC::AL));
1713 BuildMI(StoreBB, DL, TII->get(Bcc))
1714 .addMBB(LoadCmpBB)
1715 .addImm(ARMCC::NE)
1716 .addReg(ARM::CPSR, RegState::Kill);
1717 StoreBB->addSuccessor(LoadCmpBB);
1718 StoreBB->addSuccessor(DoneBB);
1719
1720 DoneBB->splice(DoneBB->end(), &MBB, MI, MBB.end());
1721 DoneBB->transferSuccessors(&MBB);
1722
1723 MBB.addSuccessor(LoadCmpBB);
1724
1725 NextMBBI = MBB.end();
1726 MI.eraseFromParent();
1727
1728 // Recompute livein lists.
1729 LivePhysRegs LiveRegs;
1730 computeAndAddLiveIns(LiveRegs, *DoneBB);
1731 computeAndAddLiveIns(LiveRegs, *StoreBB);
1732 computeAndAddLiveIns(LiveRegs, *LoadCmpBB);
1733 // Do an extra pass around the loop to get loop carried registers right.
1734 StoreBB->clearLiveIns();
1735 computeAndAddLiveIns(LiveRegs, *StoreBB);
1736 LoadCmpBB->clearLiveIns();
1737 computeAndAddLiveIns(LiveRegs, *LoadCmpBB);
1738
1739 return true;
1740 }
1741
1742 /// ARM's ldrexd/strexd take a consecutive register pair (represented as a
1743 /// single GPRPair register), Thumb's take two separate registers so we need to
1744 /// extract the subregs from the pair.
1745 static void addExclusiveRegPair(MachineInstrBuilder &MIB, MachineOperand &Reg,
1746 unsigned Flags, bool IsThumb,
1747 const TargetRegisterInfo *TRI) {
1748 if (IsThumb) {
1749 Register RegLo = TRI->getSubReg(Reg.getReg(), ARM::gsub_0);
1750 Register RegHi = TRI->getSubReg(Reg.getReg(), ARM::gsub_1);
1751 MIB.addReg(RegLo, Flags);
1752 MIB.addReg(RegHi, Flags);
1753 } else
1754 MIB.addReg(Reg.getReg(), Flags);
1755 }
1756
1757 /// Expand a 64-bit CMP_SWAP to an ldrexd/strexd loop.
1758 bool ARMExpandPseudo::ExpandCMP_SWAP_64(MachineBasicBlock &MBB,
1759 MachineBasicBlock::iterator MBBI,
1760 MachineBasicBlock::iterator &NextMBBI) {
1761 bool IsThumb = STI->isThumb();
1762 MachineInstr &MI = *MBBI;
1763 DebugLoc DL = MI.getDebugLoc();
1764 MachineOperand &Dest = MI.getOperand(0);
1765 Register TempReg = MI.getOperand(1).getReg();
1766 // Duplicating undef operands into 2 instructions does not guarantee the same
1767 // value on both; However undef should be replaced by xzr anyway.
1768 assert(!MI.getOperand(2).isUndef() && "cannot handle undef");
1769 Register AddrReg = MI.getOperand(2).getReg();
1770 Register DesiredReg = MI.getOperand(3).getReg();
1771 MachineOperand New = MI.getOperand(4);
1772 New.setIsKill(false);
1773
1774 Register DestLo = TRI->getSubReg(Dest.getReg(), ARM::gsub_0);
1775 Register DestHi = TRI->getSubReg(Dest.getReg(), ARM::gsub_1);
1776 Register DesiredLo = TRI->getSubReg(DesiredReg, ARM::gsub_0);
1777 Register DesiredHi = TRI->getSubReg(DesiredReg, ARM::gsub_1);
1778
1779 MachineFunction *MF = MBB.getParent();
1780 auto LoadCmpBB = MF->CreateMachineBasicBlock(MBB.getBasicBlock());
1781 auto StoreBB = MF->CreateMachineBasicBlock(MBB.getBasicBlock());
1782 auto DoneBB = MF->CreateMachineBasicBlock(MBB.getBasicBlock());
1783
1784 MF->insert(++MBB.getIterator(), LoadCmpBB);
1785 MF->insert(++LoadCmpBB->getIterator(), StoreBB);
1786 MF->insert(++StoreBB->getIterator(), DoneBB);
1787
1788 // .Lloadcmp:
1789 // ldrexd rDestLo, rDestHi, [rAddr]
1790 // cmp rDestLo, rDesiredLo
1791 // sbcs dead rTempReg, rDestHi, rDesiredHi
1792 // bne .Ldone
1793 unsigned LDREXD = IsThumb ? ARM::t2LDREXD : ARM::LDREXD;
1794 MachineInstrBuilder MIB;
1795 MIB = BuildMI(LoadCmpBB, DL, TII->get(LDREXD));
1796 addExclusiveRegPair(MIB, Dest, RegState::Define, IsThumb, TRI);
1797 MIB.addReg(AddrReg).add(predOps(ARMCC::AL));
1798
1799 unsigned CMPrr = IsThumb ? ARM::tCMPhir : ARM::CMPrr;
1800 BuildMI(LoadCmpBB, DL, TII->get(CMPrr))
1801 .addReg(DestLo, getKillRegState(Dest.isDead()))
1802 .addReg(DesiredLo)
1803 .add(predOps(ARMCC::AL));
1804
1805 BuildMI(LoadCmpBB, DL, TII->get(CMPrr))
1806 .addReg(DestHi, getKillRegState(Dest.isDead()))
1807 .addReg(DesiredHi)
1808 .addImm(ARMCC::EQ).addReg(ARM::CPSR, RegState::Kill);
1809
1810 unsigned Bcc = IsThumb ? ARM::tBcc : ARM::Bcc;
1811 BuildMI(LoadCmpBB, DL, TII->get(Bcc))
1812 .addMBB(DoneBB)
1813 .addImm(ARMCC::NE)
1814 .addReg(ARM::CPSR, RegState::Kill);
1815 LoadCmpBB->addSuccessor(DoneBB);
1816 LoadCmpBB->addSuccessor(StoreBB);
1817
1818 // .Lstore:
1819 // strexd rTempReg, rNewLo, rNewHi, [rAddr]
1820 // cmp rTempReg, #0
1821 // bne .Lloadcmp
1822 unsigned STREXD = IsThumb ? ARM::t2STREXD : ARM::STREXD;
1823 MIB = BuildMI(StoreBB, DL, TII->get(STREXD), TempReg);
1824 unsigned Flags = getKillRegState(New.isDead());
1825 addExclusiveRegPair(MIB, New, Flags, IsThumb, TRI);
1826 MIB.addReg(AddrReg).add(predOps(ARMCC::AL));
1827
1828 unsigned CMPri = IsThumb ? ARM::t2CMPri : ARM::CMPri;
1829 BuildMI(StoreBB, DL, TII->get(CMPri))
1830 .addReg(TempReg, RegState::Kill)
1831 .addImm(0)
1832 .add(predOps(ARMCC::AL));
1833 BuildMI(StoreBB, DL, TII->get(Bcc))
1834 .addMBB(LoadCmpBB)
1835 .addImm(ARMCC::NE)
1836 .addReg(ARM::CPSR, RegState::Kill);
1837 StoreBB->addSuccessor(LoadCmpBB);
1838 StoreBB->addSuccessor(DoneBB);
1839
1840 DoneBB->splice(DoneBB->end(), &MBB, MI, MBB.end());
1841 DoneBB->transferSuccessors(&MBB);
1842
1843 MBB.addSuccessor(LoadCmpBB);
1844
1845 NextMBBI = MBB.end();
1846 MI.eraseFromParent();
1847
1848 // Recompute livein lists.
1849 LivePhysRegs LiveRegs;
1850 computeAndAddLiveIns(LiveRegs, *DoneBB);
1851 computeAndAddLiveIns(LiveRegs, *StoreBB);
1852 computeAndAddLiveIns(LiveRegs, *LoadCmpBB);
1853 // Do an extra pass around the loop to get loop carried registers right.
1854 StoreBB->clearLiveIns();
1855 computeAndAddLiveIns(LiveRegs, *StoreBB);
1856 LoadCmpBB->clearLiveIns();
1857 computeAndAddLiveIns(LiveRegs, *LoadCmpBB);
1858
1859 return true;
1860 }
1861
1862 static void CMSEPushCalleeSaves(const TargetInstrInfo &TII,
1863 MachineBasicBlock &MBB,
1864 MachineBasicBlock::iterator MBBI, int JumpReg,
1865 const LivePhysRegs &LiveRegs, bool Thumb1Only) {
1866 const DebugLoc &DL = MBBI->getDebugLoc();
1867 if (Thumb1Only) { // push Lo and Hi regs separately
1868 MachineInstrBuilder PushMIB =
1869 BuildMI(MBB, MBBI, DL, TII.get(ARM::tPUSH)).add(predOps(ARMCC::AL));
1870 for (int Reg = ARM::R4; Reg < ARM::R8; ++Reg) {
1871 PushMIB.addReg(
1872 Reg, Reg == JumpReg || LiveRegs.contains(Reg) ? 0 : RegState::Undef);
1873 }
1874
1875 // Thumb1 can only tPUSH low regs, so we copy the high regs to the low
1876 // regs that we just saved and push the low regs again, taking care to
1877 // not clobber JumpReg. If JumpReg is one of the low registers, push first
1878 // the values of r9-r11, and then r8. That would leave them ordered in
1879 // memory, and allow us to later pop them with a single instructions.
1880 // FIXME: Could also use any of r0-r3 that are free (including in the
1881 // first PUSH above).
1882 for (int LoReg = ARM::R7, HiReg = ARM::R11; LoReg >= ARM::R4; --LoReg) {
1883 if (JumpReg == LoReg)
1884 continue;
1885 BuildMI(MBB, MBBI, DL, TII.get(ARM::tMOVr), LoReg)
1886 .addReg(HiReg, LiveRegs.contains(HiReg) ? 0 : RegState::Undef)
1887 .add(predOps(ARMCC::AL));
1888 --HiReg;
1889 }
1890 MachineInstrBuilder PushMIB2 =
1891 BuildMI(MBB, MBBI, DL, TII.get(ARM::tPUSH)).add(predOps(ARMCC::AL));
1892 for (int Reg = ARM::R4; Reg < ARM::R8; ++Reg) {
1893 if (Reg == JumpReg)
1894 continue;
1895 PushMIB2.addReg(Reg, RegState::Kill);
1896 }
1897
1898 // If we couldn't use a low register for temporary storage (because it was
1899 // the JumpReg), use r4 or r5, whichever is not JumpReg. It has already been
1900 // saved.
1901 if (JumpReg >= ARM::R4 && JumpReg <= ARM::R7) {
1902 int LoReg = JumpReg == ARM::R4 ? ARM::R5 : ARM::R4;
1903 BuildMI(MBB, MBBI, DL, TII.get(ARM::tMOVr), LoReg)
1904 .addReg(ARM::R8, LiveRegs.contains(ARM::R8) ? 0 : RegState::Undef)
1905 .add(predOps(ARMCC::AL));
1906 BuildMI(MBB, MBBI, DL, TII.get(ARM::tPUSH))
1907 .add(predOps(ARMCC::AL))
1908 .addReg(LoReg, RegState::Kill);
1909 }
1910 } else { // push Lo and Hi registers with a single instruction
1911 MachineInstrBuilder PushMIB =
1912 BuildMI(MBB, MBBI, DL, TII.get(ARM::t2STMDB_UPD), ARM::SP)
1913 .addReg(ARM::SP)
1914 .add(predOps(ARMCC::AL));
1915 for (int Reg = ARM::R4; Reg < ARM::R12; ++Reg) {
1916 PushMIB.addReg(
1917 Reg, Reg == JumpReg || LiveRegs.contains(Reg) ? 0 : RegState::Undef);
1918 }
1919 }
1920 }
1921
1922 static void CMSEPopCalleeSaves(const TargetInstrInfo &TII,
1923 MachineBasicBlock &MBB,
1924 MachineBasicBlock::iterator MBBI, int JumpReg,
1925 bool Thumb1Only) {
1926 const DebugLoc &DL = MBBI->getDebugLoc();
1927 if (Thumb1Only) {
1928 MachineInstrBuilder PopMIB =
1929 BuildMI(MBB, MBBI, DL, TII.get(ARM::tPOP)).add(predOps(ARMCC::AL));
1930 for (int R = 0; R < 4; ++R) {
1931 PopMIB.addReg(ARM::R4 + R, RegState::Define);
1932 BuildMI(MBB, MBBI, DL, TII.get(ARM::tMOVr), ARM::R8 + R)
1933 .addReg(ARM::R4 + R, RegState::Kill)
1934 .add(predOps(ARMCC::AL));
1935 }
1936 MachineInstrBuilder PopMIB2 =
1937 BuildMI(MBB, MBBI, DL, TII.get(ARM::tPOP)).add(predOps(ARMCC::AL));
1938 for (int R = 0; R < 4; ++R)
1939 PopMIB2.addReg(ARM::R4 + R, RegState::Define);
1940 } else { // pop Lo and Hi registers with a single instruction
1941 MachineInstrBuilder PopMIB =
1942 BuildMI(MBB, MBBI, DL, TII.get(ARM::t2LDMIA_UPD), ARM::SP)
1943 .addReg(ARM::SP)
1944 .add(predOps(ARMCC::AL));
1945 for (int Reg = ARM::R4; Reg < ARM::R12; ++Reg)
1946 PopMIB.addReg(Reg, RegState::Define);
1947 }
1948 }
1949
1950 bool ARMExpandPseudo::ExpandMI(MachineBasicBlock &MBB,
1951 MachineBasicBlock::iterator MBBI,
1952 MachineBasicBlock::iterator &NextMBBI) {
1953 MachineInstr &MI = *MBBI;
1954 unsigned Opcode = MI.getOpcode();
1955 switch (Opcode) {
1956 default:
1957 return false;
1958
1959 case ARM::VBSPd:
1960 case ARM::VBSPq: {
1961 Register DstReg = MI.getOperand(0).getReg();
1962 if (DstReg == MI.getOperand(3).getReg()) {
1963 // Expand to VBIT
1964 unsigned NewOpc = Opcode == ARM::VBSPd ? ARM::VBITd : ARM::VBITq;
1965 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(NewOpc))
1966 .add(MI.getOperand(0))
1967 .add(MI.getOperand(3))
1968 .add(MI.getOperand(2))
1969 .add(MI.getOperand(1))
1970 .addImm(MI.getOperand(4).getImm())
1971 .add(MI.getOperand(5));
1972 } else if (DstReg == MI.getOperand(2).getReg()) {
1973 // Expand to VBIF
1974 unsigned NewOpc = Opcode == ARM::VBSPd ? ARM::VBIFd : ARM::VBIFq;
1975 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(NewOpc))
1976 .add(MI.getOperand(0))
1977 .add(MI.getOperand(2))
1978 .add(MI.getOperand(3))
1979 .add(MI.getOperand(1))
1980 .addImm(MI.getOperand(4).getImm())
1981 .add(MI.getOperand(5));
1982 } else {
1983 // Expand to VBSL
1984 unsigned NewOpc = Opcode == ARM::VBSPd ? ARM::VBSLd : ARM::VBSLq;
1985 if (DstReg == MI.getOperand(1).getReg()) {
1986 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(NewOpc))
1987 .add(MI.getOperand(0))
1988 .add(MI.getOperand(1))
1989 .add(MI.getOperand(2))
1990 .add(MI.getOperand(3))
1991 .addImm(MI.getOperand(4).getImm())
1992 .add(MI.getOperand(5));
1993 } else {
1994 // Use move to satisfy constraints
1995 unsigned MoveOpc = Opcode == ARM::VBSPd ? ARM::VORRd : ARM::VORRq;
1996 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(MoveOpc))
1997 .addReg(DstReg,
1998 RegState::Define |
1999 getRenamableRegState(MI.getOperand(0).isRenamable()))
2000 .add(MI.getOperand(1))
2001 .add(MI.getOperand(1))
2002 .addImm(MI.getOperand(4).getImm())
2003 .add(MI.getOperand(5));
2004 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(NewOpc))
2005 .add(MI.getOperand(0))
2006 .addReg(DstReg,
2007 RegState::Kill |
2008 getRenamableRegState(MI.getOperand(0).isRenamable()))
2009 .add(MI.getOperand(2))
2010 .add(MI.getOperand(3))
2011 .addImm(MI.getOperand(4).getImm())
2012 .add(MI.getOperand(5));
2013 }
2014 }
2015 MI.eraseFromParent();
2016 return true;
2017 }
2018
2019 case ARM::TCRETURNdi:
2020 case ARM::TCRETURNri: {
2021 MachineBasicBlock::iterator MBBI = MBB.getLastNonDebugInstr();
2022 assert(MBBI->isReturn() &&
2023 "Can only insert epilog into returning blocks");
2024 unsigned RetOpcode = MBBI->getOpcode();
2025 DebugLoc dl = MBBI->getDebugLoc();
2026 const ARMBaseInstrInfo &TII = *static_cast<const ARMBaseInstrInfo *>(
2027 MBB.getParent()->getSubtarget().getInstrInfo());
2028
2029 // Tail call return: adjust the stack pointer and jump to callee.
2030 MBBI = MBB.getLastNonDebugInstr();
2031 MachineOperand &JumpTarget = MBBI->getOperand(0);
2032
2033 // Jump to label or value in register.
2034 if (RetOpcode == ARM::TCRETURNdi) {
2035 unsigned TCOpcode =
2036 STI->isThumb()
2037 ? (STI->isTargetMachO() ? ARM::tTAILJMPd : ARM::tTAILJMPdND)
2038 : ARM::TAILJMPd;
2039 MachineInstrBuilder MIB = BuildMI(MBB, MBBI, dl, TII.get(TCOpcode));
2040 if (JumpTarget.isGlobal())
2041 MIB.addGlobalAddress(JumpTarget.getGlobal(), JumpTarget.getOffset(),
2042 JumpTarget.getTargetFlags());
2043 else {
2044 assert(JumpTarget.isSymbol());
2045 MIB.addExternalSymbol(JumpTarget.getSymbolName(),
2046 JumpTarget.getTargetFlags());
2047 }
2048
2049 // Add the default predicate in Thumb mode.
2050 if (STI->isThumb())
2051 MIB.add(predOps(ARMCC::AL));
2052 } else if (RetOpcode == ARM::TCRETURNri) {
2053 unsigned Opcode =
2054 STI->isThumb() ? ARM::tTAILJMPr
2055 : (STI->hasV4TOps() ? ARM::TAILJMPr : ARM::TAILJMPr4);
2056 BuildMI(MBB, MBBI, dl,
2057 TII.get(Opcode))
2058 .addReg(JumpTarget.getReg(), RegState::Kill);
2059 }
2060
2061 auto NewMI = std::prev(MBBI);
2062 for (unsigned i = 2, e = MBBI->getNumOperands(); i != e; ++i)
2063 NewMI->addOperand(MBBI->getOperand(i));
2064
2065
2066 // Update call site info and delete the pseudo instruction TCRETURN.
2067 if (MI.isCandidateForCallSiteEntry())
2068 MI.getMF()->moveCallSiteInfo(&MI, &*NewMI);
2069 MBB.erase(MBBI);
2070
2071 MBBI = NewMI;
2072 return true;
2073 }
2074 case ARM::tBXNS_RET: {
2075 MachineBasicBlock &AfterBB = CMSEClearFPRegs(MBB, MBBI);
2076
2077 if (STI->hasV8_1MMainlineOps()) {
2078 // Restore the non-secure floating point context.
2079 BuildMI(MBB, MBBI, MBBI->getDebugLoc(),
2080 TII->get(ARM::VLDR_FPCXTNS_post), ARM::SP)
2081 .addReg(ARM::SP)
2082 .addImm(4)
2083 .add(predOps(ARMCC::AL));
2084 }
2085
2086 // Clear all GPR that are not a use of the return instruction.
2087 assert(llvm::all_of(MBBI->operands(), [](const MachineOperand &Op) {
2088 return !Op.isReg() || Op.getReg() != ARM::R12;
2089 }));
2090 SmallVector<unsigned, 5> ClearRegs;
2091 determineGPRegsToClear(
2092 *MBBI, {ARM::R0, ARM::R1, ARM::R2, ARM::R3, ARM::R12}, ClearRegs);
2093 CMSEClearGPRegs(AfterBB, AfterBB.end(), MBBI->getDebugLoc(), ClearRegs,
2094 ARM::LR);
2095
2096 MachineInstrBuilder NewMI =
2097 BuildMI(AfterBB, AfterBB.end(), MBBI->getDebugLoc(),
2098 TII->get(ARM::tBXNS))
2099 .addReg(ARM::LR)
2100 .add(predOps(ARMCC::AL));
2101 for (const MachineOperand &Op : MI.operands())
2102 NewMI->addOperand(Op);
2103 MI.eraseFromParent();
2104 return true;
2105 }
2106 case ARM::tBLXNS_CALL: {
2107 DebugLoc DL = MBBI->getDebugLoc();
2108 unsigned JumpReg = MBBI->getOperand(0).getReg();
2109
2110 // Figure out which registers are live at the point immediately before the
2111 // call. When we indiscriminately push a set of registers, the live
2112 // registers are added as ordinary use operands, whereas dead registers
2113 // are "undef".
2114 LivePhysRegs LiveRegs(*TRI);
2115 LiveRegs.addLiveOuts(MBB);
2116 for (const MachineInstr &MI : make_range(MBB.rbegin(), MBBI.getReverse()))
2117 LiveRegs.stepBackward(MI);
2118 LiveRegs.stepBackward(*MBBI);
2119
2120 CMSEPushCalleeSaves(*TII, MBB, MBBI, JumpReg, LiveRegs,
2121 AFI->isThumb1OnlyFunction());
2122
2123 SmallVector<unsigned, 16> ClearRegs;
2124 determineGPRegsToClear(*MBBI,
2125 {ARM::R0, ARM::R1, ARM::R2, ARM::R3, ARM::R4,
2126 ARM::R5, ARM::R6, ARM::R7, ARM::R8, ARM::R9,
2127 ARM::R10, ARM::R11, ARM::R12},
2128 ClearRegs);
2129 auto OriginalClearRegs = ClearRegs;
2130
2131 // Get the first cleared register as a scratch (to use later with tBIC).
2132 // We need to use the first so we can ensure it is a low register.
2133 unsigned ScratchReg = ClearRegs.front();
2134
2135 // Clear LSB of JumpReg
2136 if (AFI->isThumb2Function()) {
2137 BuildMI(MBB, MBBI, DL, TII->get(ARM::t2BICri), JumpReg)
2138 .addReg(JumpReg)
2139 .addImm(1)
2140 .add(predOps(ARMCC::AL))
2141 .add(condCodeOp());
2142 } else {
2143 // We need to use an extra register to cope with 8M Baseline,
2144 // since we have saved all of the registers we are ok to trash a non
2145 // argument register here.
2146 BuildMI(MBB, MBBI, DL, TII->get(ARM::tMOVi8), ScratchReg)
2147 .add(condCodeOp())
2148 .addImm(1)
2149 .add(predOps(ARMCC::AL));
2150 BuildMI(MBB, MBBI, DL, TII->get(ARM::tBIC), JumpReg)
2151 .addReg(ARM::CPSR, RegState::Define)
2152 .addReg(JumpReg)
2153 .addReg(ScratchReg)
2154 .add(predOps(ARMCC::AL));
2155 }
2156
2157 CMSESaveClearFPRegs(MBB, MBBI, DL, LiveRegs,
2158 ClearRegs); // save+clear FP regs with ClearRegs
2159 CMSEClearGPRegs(MBB, MBBI, DL, ClearRegs, JumpReg);
2160
2161 const MachineInstrBuilder NewCall =
2162 BuildMI(MBB, MBBI, DL, TII->get(ARM::tBLXNSr))
2163 .add(predOps(ARMCC::AL))
2164 .addReg(JumpReg, RegState::Kill);
2165
2166 for (int I = 1, E = MI.getNumOperands(); I != E; ++I)
2167 NewCall->addOperand(MI.getOperand(I));
2168 if (MI.isCandidateForCallSiteEntry())
2169 MI.getMF()->moveCallSiteInfo(&MI, NewCall.getInstr());
2170
2171 CMSERestoreFPRegs(MBB, MBBI, DL, OriginalClearRegs); // restore FP registers
2172
2173 CMSEPopCalleeSaves(*TII, MBB, MBBI, JumpReg, AFI->isThumb1OnlyFunction());
2174
2175 MI.eraseFromParent();
2176 return true;
2177 }
2178 case ARM::VMOVHcc:
2179 case ARM::VMOVScc:
2180 case ARM::VMOVDcc: {
2181 unsigned newOpc = Opcode != ARM::VMOVDcc ? ARM::VMOVS : ARM::VMOVD;
2182 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(newOpc),
2183 MI.getOperand(1).getReg())
2184 .add(MI.getOperand(2))
2185 .addImm(MI.getOperand(3).getImm()) // 'pred'
2186 .add(MI.getOperand(4))
2187 .add(makeImplicit(MI.getOperand(1)));
2188
2189 MI.eraseFromParent();
2190 return true;
2191 }
2192 case ARM::t2MOVCCr:
2193 case ARM::MOVCCr: {
2194 unsigned Opc = AFI->isThumbFunction() ? ARM::t2MOVr : ARM::MOVr;
2195 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(Opc),
2196 MI.getOperand(1).getReg())
2197 .add(MI.getOperand(2))
2198 .addImm(MI.getOperand(3).getImm()) // 'pred'
2199 .add(MI.getOperand(4))
2200 .add(condCodeOp()) // 's' bit
2201 .add(makeImplicit(MI.getOperand(1)));
2202
2203 MI.eraseFromParent();
2204 return true;
2205 }
2206 case ARM::MOVCCsi: {
2207 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::MOVsi),
2208 (MI.getOperand(1).getReg()))
2209 .add(MI.getOperand(2))
2210 .addImm(MI.getOperand(3).getImm())
2211 .addImm(MI.getOperand(4).getImm()) // 'pred'
2212 .add(MI.getOperand(5))
2213 .add(condCodeOp()) // 's' bit
2214 .add(makeImplicit(MI.getOperand(1)));
2215
2216 MI.eraseFromParent();
2217 return true;
2218 }
2219 case ARM::MOVCCsr: {
2220 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::MOVsr),
2221 (MI.getOperand(1).getReg()))
2222 .add(MI.getOperand(2))
2223 .add(MI.getOperand(3))
2224 .addImm(MI.getOperand(4).getImm())
2225 .addImm(MI.getOperand(5).getImm()) // 'pred'
2226 .add(MI.getOperand(6))
2227 .add(condCodeOp()) // 's' bit
2228 .add(makeImplicit(MI.getOperand(1)));
2229
2230 MI.eraseFromParent();
2231 return true;
2232 }
2233 case ARM::t2MOVCCi16:
2234 case ARM::MOVCCi16: {
2235 unsigned NewOpc = AFI->isThumbFunction() ? ARM::t2MOVi16 : ARM::MOVi16;
2236 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(NewOpc),
2237 MI.getOperand(1).getReg())
2238 .addImm(MI.getOperand(2).getImm())
2239 .addImm(MI.getOperand(3).getImm()) // 'pred'
2240 .add(MI.getOperand(4))
2241 .add(makeImplicit(MI.getOperand(1)));
2242 MI.eraseFromParent();
2243 return true;
2244 }
2245 case ARM::t2MOVCCi:
2246 case ARM::MOVCCi: {
2247 unsigned Opc = AFI->isThumbFunction() ? ARM::t2MOVi : ARM::MOVi;
2248 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(Opc),
2249 MI.getOperand(1).getReg())
2250 .addImm(MI.getOperand(2).getImm())
2251 .addImm(MI.getOperand(3).getImm()) // 'pred'
2252 .add(MI.getOperand(4))
2253 .add(condCodeOp()) // 's' bit
2254 .add(makeImplicit(MI.getOperand(1)));
2255
2256 MI.eraseFromParent();
2257 return true;
2258 }
2259 case ARM::t2MVNCCi:
2260 case ARM::MVNCCi: {
2261 unsigned Opc = AFI->isThumbFunction() ? ARM::t2MVNi : ARM::MVNi;
2262 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(Opc),
2263 MI.getOperand(1).getReg())
2264 .addImm(MI.getOperand(2).getImm())
2265 .addImm(MI.getOperand(3).getImm()) // 'pred'
2266 .add(MI.getOperand(4))
2267 .add(condCodeOp()) // 's' bit
2268 .add(makeImplicit(MI.getOperand(1)));
2269
2270 MI.eraseFromParent();
2271 return true;
2272 }
2273 case ARM::t2MOVCClsl:
2274 case ARM::t2MOVCClsr:
2275 case ARM::t2MOVCCasr:
2276 case ARM::t2MOVCCror: {
2277 unsigned NewOpc;
2278 switch (Opcode) {
2279 case ARM::t2MOVCClsl: NewOpc = ARM::t2LSLri; break;
2280 case ARM::t2MOVCClsr: NewOpc = ARM::t2LSRri; break;
2281 case ARM::t2MOVCCasr: NewOpc = ARM::t2ASRri; break;
2282 case ARM::t2MOVCCror: NewOpc = ARM::t2RORri; break;
2283 default: llvm_unreachable("unexpeced conditional move");
2284 }
2285 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(NewOpc),
2286 MI.getOperand(1).getReg())
2287 .add(MI.getOperand(2))
2288 .addImm(MI.getOperand(3).getImm())
2289 .addImm(MI.getOperand(4).getImm()) // 'pred'
2290 .add(MI.getOperand(5))
2291 .add(condCodeOp()) // 's' bit
2292 .add(makeImplicit(MI.getOperand(1)));
2293 MI.eraseFromParent();
2294 return true;
2295 }
2296 case ARM::Int_eh_sjlj_dispatchsetup: {
2297 MachineFunction &MF = *MI.getParent()->getParent();
2298 const ARMBaseInstrInfo *AII =
2299 static_cast<const ARMBaseInstrInfo*>(TII);
2300 const ARMBaseRegisterInfo &RI = AII->getRegisterInfo();
2301 // For functions using a base pointer, we rematerialize it (via the frame
2302 // pointer) here since eh.sjlj.setjmp and eh.sjlj.longjmp don't do it
2303 // for us. Otherwise, expand to nothing.
2304 if (RI.hasBasePointer(MF)) {
2305 int32_t NumBytes = AFI->getFramePtrSpillOffset();
2306 Register FramePtr = RI.getFrameRegister(MF);
2307 assert(MF.getSubtarget().getFrameLowering()->hasFP(MF) &&
2308 "base pointer without frame pointer?");
2309
2310 if (AFI->isThumb2Function()) {
2311 emitT2RegPlusImmediate(MBB, MBBI, MI.getDebugLoc(), ARM::R6,
2312 FramePtr, -NumBytes, ARMCC::AL, 0, *TII);
2313 } else if (AFI->isThumbFunction()) {
2314 emitThumbRegPlusImmediate(MBB, MBBI, MI.getDebugLoc(), ARM::R6,
2315 FramePtr, -NumBytes, *TII, RI);
2316 } else {
2317 emitARMRegPlusImmediate(MBB, MBBI, MI.getDebugLoc(), ARM::R6,
2318 FramePtr, -NumBytes, ARMCC::AL, 0,
2319 *TII);
2320 }
2321 // If there's dynamic realignment, adjust for it.
2322 if (RI.hasStackRealignment(MF)) {
2323 MachineFrameInfo &MFI = MF.getFrameInfo();
2324 Align MaxAlign = MFI.getMaxAlign();
2325 assert (!AFI->isThumb1OnlyFunction());
2326 // Emit bic r6, r6, MaxAlign
2327 assert(MaxAlign <= Align(256) &&
2328 "The BIC instruction cannot encode "
2329 "immediates larger than 256 with all lower "
2330 "bits set.");
2331 unsigned bicOpc = AFI->isThumbFunction() ?
2332 ARM::t2BICri : ARM::BICri;
2333 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(bicOpc), ARM::R6)
2334 .addReg(ARM::R6, RegState::Kill)
2335 .addImm(MaxAlign.value() - 1)
2336 .add(predOps(ARMCC::AL))
2337 .add(condCodeOp());
2338 }
2339 }
2340 MI.eraseFromParent();
2341 return true;
2342 }
2343
2344 case ARM::MOVsrl_flag:
2345 case ARM::MOVsra_flag: {
2346 // These are just fancy MOVs instructions.
2347 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::MOVsi),
2348 MI.getOperand(0).getReg())
2349 .add(MI.getOperand(1))
2350 .addImm(ARM_AM::getSORegOpc(
2351 (Opcode == ARM::MOVsrl_flag ? ARM_AM::lsr : ARM_AM::asr), 1))
2352 .add(predOps(ARMCC::AL))
2353 .addReg(ARM::CPSR, RegState::Define);
2354 MI.eraseFromParent();
2355 return true;
2356 }
2357 case ARM::RRX: {
2358 // This encodes as "MOVs Rd, Rm, rrx
2359 MachineInstrBuilder MIB =
2360 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::MOVsi),
2361 MI.getOperand(0).getReg())
2362 .add(MI.getOperand(1))
2363 .addImm(ARM_AM::getSORegOpc(ARM_AM::rrx, 0))
2364 .add(predOps(ARMCC::AL))
2365 .add(condCodeOp());
2366 TransferImpOps(MI, MIB, MIB);
2367 MI.eraseFromParent();
2368 return true;
2369 }
2370 case ARM::tTPsoft:
2371 case ARM::TPsoft: {
2372 const bool Thumb = Opcode == ARM::tTPsoft;
2373
2374 MachineInstrBuilder MIB;
2375 MachineFunction *MF = MBB.getParent();
2376 if (STI->genLongCalls()) {
2377 MachineConstantPool *MCP = MF->getConstantPool();
2378 unsigned PCLabelID = AFI->createPICLabelUId();
2379 MachineConstantPoolValue *CPV =
2380 ARMConstantPoolSymbol::Create(MF->getFunction().getContext(),
2381 "__aeabi_read_tp", PCLabelID, 0);
2382 Register Reg = MI.getOperand(0).getReg();
2383 MIB =
2384 BuildMI(MBB, MBBI, MI.getDebugLoc(),
2385 TII->get(Thumb ? ARM::tLDRpci : ARM::LDRi12), Reg)
2386 .addConstantPoolIndex(MCP->getConstantPoolIndex(CPV, Align(4)));
2387 if (!Thumb)
2388 MIB.addImm(0);
2389 MIB.add(predOps(ARMCC::AL));
2390
2391 MIB =
2392 BuildMI(MBB, MBBI, MI.getDebugLoc(),
2393 TII->get(Thumb ? gettBLXrOpcode(*MF) : getBLXOpcode(*MF)));
2394 if (Thumb)
2395 MIB.add(predOps(ARMCC::AL));
2396 MIB.addReg(Reg, RegState::Kill);
2397 } else {
2398 MIB = BuildMI(MBB, MBBI, MI.getDebugLoc(),
2399 TII->get(Thumb ? ARM::tBL : ARM::BL));
2400 if (Thumb)
2401 MIB.add(predOps(ARMCC::AL));
2402 MIB.addExternalSymbol("__aeabi_read_tp", 0);
2403 }
2404
2405 MIB.cloneMemRefs(MI);
2406 TransferImpOps(MI, MIB, MIB);
2407 // Update the call site info.
2408 if (MI.isCandidateForCallSiteEntry())
2409 MF->moveCallSiteInfo(&MI, &*MIB);
2410 MI.eraseFromParent();
2411 return true;
2412 }
2413 case ARM::tLDRpci_pic:
2414 case ARM::t2LDRpci_pic: {
2415 unsigned NewLdOpc = (Opcode == ARM::tLDRpci_pic)
2416 ? ARM::tLDRpci : ARM::t2LDRpci;
2417 Register DstReg = MI.getOperand(0).getReg();
2418 bool DstIsDead = MI.getOperand(0).isDead();
2419 MachineInstrBuilder MIB1 =
2420 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(NewLdOpc), DstReg)
2421 .add(MI.getOperand(1))
2422 .add(predOps(ARMCC::AL));
2423 MIB1.cloneMemRefs(MI);
2424 MachineInstrBuilder MIB2 =
2425 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::tPICADD))
2426 .addReg(DstReg, RegState::Define | getDeadRegState(DstIsDead))
2427 .addReg(DstReg)
2428 .add(MI.getOperand(2));
2429 TransferImpOps(MI, MIB1, MIB2);
2430 MI.eraseFromParent();
2431 return true;
2432 }
2433
2434 case ARM::LDRLIT_ga_abs:
2435 case ARM::LDRLIT_ga_pcrel:
2436 case ARM::LDRLIT_ga_pcrel_ldr:
2437 case ARM::tLDRLIT_ga_abs:
2438 case ARM::tLDRLIT_ga_pcrel: {
2439 Register DstReg = MI.getOperand(0).getReg();
2440 bool DstIsDead = MI.getOperand(0).isDead();
2441 const MachineOperand &MO1 = MI.getOperand(1);
2442 auto Flags = MO1.getTargetFlags();
2443 const GlobalValue *GV = MO1.getGlobal();
2444 bool IsARM =
2445 Opcode != ARM::tLDRLIT_ga_pcrel && Opcode != ARM::tLDRLIT_ga_abs;
2446 bool IsPIC =
2447 Opcode != ARM::LDRLIT_ga_abs && Opcode != ARM::tLDRLIT_ga_abs;
2448 unsigned LDRLITOpc = IsARM ? ARM::LDRi12 : ARM::tLDRpci;
2449 unsigned PICAddOpc =
2450 IsARM
2451 ? (Opcode == ARM::LDRLIT_ga_pcrel_ldr ? ARM::PICLDR : ARM::PICADD)
2452 : ARM::tPICADD;
2453
2454 // We need a new const-pool entry to load from.
2455 MachineConstantPool *MCP = MBB.getParent()->getConstantPool();
2456 unsigned ARMPCLabelIndex = 0;
2457 MachineConstantPoolValue *CPV;
2458
2459 if (IsPIC) {
2460 unsigned PCAdj = IsARM ? 8 : 4;
2461 auto Modifier = (Flags & ARMII::MO_GOT)
2462 ? ARMCP::GOT_PREL
2463 : ARMCP::no_modifier;
2464 ARMPCLabelIndex = AFI->createPICLabelUId();
2465 CPV = ARMConstantPoolConstant::Create(
2466 GV, ARMPCLabelIndex, ARMCP::CPValue, PCAdj, Modifier,
2467 /*AddCurrentAddr*/ Modifier == ARMCP::GOT_PREL);
2468 } else
2469 CPV = ARMConstantPoolConstant::Create(GV, ARMCP::no_modifier);
2470
2471 MachineInstrBuilder MIB =
2472 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(LDRLITOpc), DstReg)
2473 .addConstantPoolIndex(MCP->getConstantPoolIndex(CPV, Align(4)));
2474 if (IsARM)
2475 MIB.addImm(0);
2476 MIB.add(predOps(ARMCC::AL));
2477
2478 if (IsPIC) {
2479 MachineInstrBuilder MIB =
2480 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(PICAddOpc))
2481 .addReg(DstReg, RegState::Define | getDeadRegState(DstIsDead))
2482 .addReg(DstReg)
2483 .addImm(ARMPCLabelIndex);
2484
2485 if (IsARM)
2486 MIB.add(predOps(ARMCC::AL));
2487 }
2488
2489 MI.eraseFromParent();
2490 return true;
2491 }
2492 case ARM::MOV_ga_pcrel:
2493 case ARM::MOV_ga_pcrel_ldr:
2494 case ARM::t2MOV_ga_pcrel: {
2495 // Expand into movw + movw. Also "add pc" / ldr [pc] in PIC mode.
2496 unsigned LabelId = AFI->createPICLabelUId();
2497 Register DstReg = MI.getOperand(0).getReg();
2498 bool DstIsDead = MI.getOperand(0).isDead();
2499 const MachineOperand &MO1 = MI.getOperand(1);
2500 const GlobalValue *GV = MO1.getGlobal();
2501 unsigned TF = MO1.getTargetFlags();
2502 bool isARM = Opcode != ARM::t2MOV_ga_pcrel;
2503 unsigned LO16Opc = isARM ? ARM::MOVi16_ga_pcrel : ARM::t2MOVi16_ga_pcrel;
2504 unsigned HI16Opc = isARM ? ARM::MOVTi16_ga_pcrel :ARM::t2MOVTi16_ga_pcrel;
2505 unsigned LO16TF = TF | ARMII::MO_LO16;
2506 unsigned HI16TF = TF | ARMII::MO_HI16;
2507 unsigned PICAddOpc = isARM
2508 ? (Opcode == ARM::MOV_ga_pcrel_ldr ? ARM::PICLDR : ARM::PICADD)
2509 : ARM::tPICADD;
2510 MachineInstrBuilder MIB1 = BuildMI(MBB, MBBI, MI.getDebugLoc(),
2511 TII->get(LO16Opc), DstReg)
2512 .addGlobalAddress(GV, MO1.getOffset(), TF | LO16TF)
2513 .addImm(LabelId);
2514
2515 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(HI16Opc), DstReg)
2516 .addReg(DstReg)
2517 .addGlobalAddress(GV, MO1.getOffset(), TF | HI16TF)
2518 .addImm(LabelId);
2519
2520 MachineInstrBuilder MIB3 = BuildMI(MBB, MBBI, MI.getDebugLoc(),
2521 TII->get(PICAddOpc))
2522 .addReg(DstReg, RegState::Define | getDeadRegState(DstIsDead))
2523 .addReg(DstReg).addImm(LabelId);
2524 if (isARM) {
2525 MIB3.add(predOps(ARMCC::AL));
2526 if (Opcode == ARM::MOV_ga_pcrel_ldr)
2527 MIB3.cloneMemRefs(MI);
2528 }
2529 TransferImpOps(MI, MIB1, MIB3);
2530 MI.eraseFromParent();
2531 return true;
2532 }
2533
2534 case ARM::MOVi32imm:
2535 case ARM::MOVCCi32imm:
2536 case ARM::t2MOVi32imm:
2537 case ARM::t2MOVCCi32imm:
2538 ExpandMOV32BitImm(MBB, MBBI);
2539 return true;
2540
2541 case ARM::SUBS_PC_LR: {
2542 MachineInstrBuilder MIB =
2543 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::SUBri), ARM::PC)
2544 .addReg(ARM::LR)
2545 .add(MI.getOperand(0))
2546 .add(MI.getOperand(1))
2547 .add(MI.getOperand(2))
2548 .addReg(ARM::CPSR, RegState::Undef);
2549 TransferImpOps(MI, MIB, MIB);
2550 MI.eraseFromParent();
2551 return true;
2552 }
2553 case ARM::VLDMQIA: {
2554 unsigned NewOpc = ARM::VLDMDIA;
2555 MachineInstrBuilder MIB =
2556 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(NewOpc));
2557 unsigned OpIdx = 0;
2558
2559 // Grab the Q register destination.
2560 bool DstIsDead = MI.getOperand(OpIdx).isDead();
2561 Register DstReg = MI.getOperand(OpIdx++).getReg();
2562
2563 // Copy the source register.
2564 MIB.add(MI.getOperand(OpIdx++));
2565
2566 // Copy the predicate operands.
2567 MIB.add(MI.getOperand(OpIdx++));
2568 MIB.add(MI.getOperand(OpIdx++));
2569
2570 // Add the destination operands (D subregs).
2571 Register D0 = TRI->getSubReg(DstReg, ARM::dsub_0);
2572 Register D1 = TRI->getSubReg(DstReg, ARM::dsub_1);
2573 MIB.addReg(D0, RegState::Define | getDeadRegState(DstIsDead))
2574 .addReg(D1, RegState::Define | getDeadRegState(DstIsDead));
2575
2576 // Add an implicit def for the super-register.
2577 MIB.addReg(DstReg, RegState::ImplicitDefine | getDeadRegState(DstIsDead));
2578 TransferImpOps(MI, MIB, MIB);
2579 MIB.cloneMemRefs(MI);
2580 MI.eraseFromParent();
2581 return true;
2582 }
2583
2584 case ARM::VSTMQIA: {
2585 unsigned NewOpc = ARM::VSTMDIA;
2586 MachineInstrBuilder MIB =
2587 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(NewOpc));
2588 unsigned OpIdx = 0;
2589
2590 // Grab the Q register source.
2591 bool SrcIsKill = MI.getOperand(OpIdx).isKill();
2592 Register SrcReg = MI.getOperand(OpIdx++).getReg();
2593
2594 // Copy the destination register.
2595 MachineOperand Dst(MI.getOperand(OpIdx++));
2596 MIB.add(Dst);
2597
2598 // Copy the predicate operands.
2599 MIB.add(MI.getOperand(OpIdx++));
2600 MIB.add(MI.getOperand(OpIdx++));
2601
2602 // Add the source operands (D subregs).
2603 Register D0 = TRI->getSubReg(SrcReg, ARM::dsub_0);
2604 Register D1 = TRI->getSubReg(SrcReg, ARM::dsub_1);
2605 MIB.addReg(D0, SrcIsKill ? RegState::Kill : 0)
2606 .addReg(D1, SrcIsKill ? RegState::Kill : 0);
2607
2608 if (SrcIsKill) // Add an implicit kill for the Q register.
2609 MIB->addRegisterKilled(SrcReg, TRI, true);
2610
2611 TransferImpOps(MI, MIB, MIB);
2612 MIB.cloneMemRefs(MI);
2613 MI.eraseFromParent();
2614 return true;
2615 }
2616
2617 case ARM::VLD2q8Pseudo:
2618 case ARM::VLD2q16Pseudo:
2619 case ARM::VLD2q32Pseudo:
2620 case ARM::VLD2q8PseudoWB_fixed:
2621 case ARM::VLD2q16PseudoWB_fixed:
2622 case ARM::VLD2q32PseudoWB_fixed:
2623 case ARM::VLD2q8PseudoWB_register:
2624 case ARM::VLD2q16PseudoWB_register:
2625 case ARM::VLD2q32PseudoWB_register:
2626 case ARM::VLD3d8Pseudo:
2627 case ARM::VLD3d16Pseudo:
2628 case ARM::VLD3d32Pseudo:
2629 case ARM::VLD1d8TPseudo:
2630 case ARM::VLD1d8TPseudoWB_fixed:
2631 case ARM::VLD1d8TPseudoWB_register:
2632 case ARM::VLD1d16TPseudo:
2633 case ARM::VLD1d16TPseudoWB_fixed:
2634 case ARM::VLD1d16TPseudoWB_register:
2635 case ARM::VLD1d32TPseudo:
2636 case ARM::VLD1d32TPseudoWB_fixed:
2637 case ARM::VLD1d32TPseudoWB_register:
2638 case ARM::VLD1d64TPseudo:
2639 case ARM::VLD1d64TPseudoWB_fixed:
2640 case ARM::VLD1d64TPseudoWB_register:
2641 case ARM::VLD3d8Pseudo_UPD:
2642 case ARM::VLD3d16Pseudo_UPD:
2643 case ARM::VLD3d32Pseudo_UPD:
2644 case ARM::VLD3q8Pseudo_UPD:
2645 case ARM::VLD3q16Pseudo_UPD:
2646 case ARM::VLD3q32Pseudo_UPD:
2647 case ARM::VLD3q8oddPseudo:
2648 case ARM::VLD3q16oddPseudo:
2649 case ARM::VLD3q32oddPseudo:
2650 case ARM::VLD3q8oddPseudo_UPD:
2651 case ARM::VLD3q16oddPseudo_UPD:
2652 case ARM::VLD3q32oddPseudo_UPD:
2653 case ARM::VLD4d8Pseudo:
2654 case ARM::VLD4d16Pseudo:
2655 case ARM::VLD4d32Pseudo:
2656 case ARM::VLD1d8QPseudo:
2657 case ARM::VLD1d8QPseudoWB_fixed:
2658 case ARM::VLD1d8QPseudoWB_register:
2659 case ARM::VLD1d16QPseudo:
2660 case ARM::VLD1d16QPseudoWB_fixed:
2661 case ARM::VLD1d16QPseudoWB_register:
2662 case ARM::VLD1d32QPseudo:
2663 case ARM::VLD1d32QPseudoWB_fixed:
2664 case ARM::VLD1d32QPseudoWB_register:
2665 case ARM::VLD1d64QPseudo:
2666 case ARM::VLD1d64QPseudoWB_fixed:
2667 case ARM::VLD1d64QPseudoWB_register:
2668 case ARM::VLD1q8HighQPseudo:
2669 case ARM::VLD1q8HighQPseudo_UPD:
2670 case ARM::VLD1q8LowQPseudo_UPD:
2671 case ARM::VLD1q8HighTPseudo:
2672 case ARM::VLD1q8HighTPseudo_UPD:
2673 case ARM::VLD1q8LowTPseudo_UPD:
2674 case ARM::VLD1q16HighQPseudo:
2675 case ARM::VLD1q16HighQPseudo_UPD:
2676 case ARM::VLD1q16LowQPseudo_UPD:
2677 case ARM::VLD1q16HighTPseudo:
2678 case ARM::VLD1q16HighTPseudo_UPD:
2679 case ARM::VLD1q16LowTPseudo_UPD:
2680 case ARM::VLD1q32HighQPseudo:
2681 case ARM::VLD1q32HighQPseudo_UPD:
2682 case ARM::VLD1q32LowQPseudo_UPD:
2683 case ARM::VLD1q32HighTPseudo:
2684 case ARM::VLD1q32HighTPseudo_UPD:
2685 case ARM::VLD1q32LowTPseudo_UPD:
2686 case ARM::VLD1q64HighQPseudo:
2687 case ARM::VLD1q64HighQPseudo_UPD:
2688 case ARM::VLD1q64LowQPseudo_UPD:
2689 case ARM::VLD1q64HighTPseudo:
2690 case ARM::VLD1q64HighTPseudo_UPD:
2691 case ARM::VLD1q64LowTPseudo_UPD:
2692 case ARM::VLD4d8Pseudo_UPD:
2693 case ARM::VLD4d16Pseudo_UPD:
2694 case ARM::VLD4d32Pseudo_UPD:
2695 case ARM::VLD4q8Pseudo_UPD:
2696 case ARM::VLD4q16Pseudo_UPD:
2697 case ARM::VLD4q32Pseudo_UPD:
2698 case ARM::VLD4q8oddPseudo:
2699 case ARM::VLD4q16oddPseudo:
2700 case ARM::VLD4q32oddPseudo:
2701 case ARM::VLD4q8oddPseudo_UPD:
2702 case ARM::VLD4q16oddPseudo_UPD:
2703 case ARM::VLD4q32oddPseudo_UPD:
2704 case ARM::VLD3DUPd8Pseudo:
2705 case ARM::VLD3DUPd16Pseudo:
2706 case ARM::VLD3DUPd32Pseudo:
2707 case ARM::VLD3DUPd8Pseudo_UPD:
2708 case ARM::VLD3DUPd16Pseudo_UPD:
2709 case ARM::VLD3DUPd32Pseudo_UPD:
2710 case ARM::VLD4DUPd8Pseudo:
2711 case ARM::VLD4DUPd16Pseudo:
2712 case ARM::VLD4DUPd32Pseudo:
2713 case ARM::VLD4DUPd8Pseudo_UPD:
2714 case ARM::VLD4DUPd16Pseudo_UPD:
2715 case ARM::VLD4DUPd32Pseudo_UPD:
2716 case ARM::VLD2DUPq8EvenPseudo:
2717 case ARM::VLD2DUPq8OddPseudo:
2718 case ARM::VLD2DUPq16EvenPseudo:
2719 case ARM::VLD2DUPq16OddPseudo:
2720 case ARM::VLD2DUPq32EvenPseudo:
2721 case ARM::VLD2DUPq32OddPseudo:
2722 case ARM::VLD2DUPq8OddPseudoWB_fixed:
2723 case ARM::VLD2DUPq8OddPseudoWB_register:
2724 case ARM::VLD2DUPq16OddPseudoWB_fixed:
2725 case ARM::VLD2DUPq16OddPseudoWB_register:
2726 case ARM::VLD2DUPq32OddPseudoWB_fixed:
2727 case ARM::VLD2DUPq32OddPseudoWB_register:
2728 case ARM::VLD3DUPq8EvenPseudo:
2729 case ARM::VLD3DUPq8OddPseudo:
2730 case ARM::VLD3DUPq16EvenPseudo:
2731 case ARM::VLD3DUPq16OddPseudo:
2732 case ARM::VLD3DUPq32EvenPseudo:
2733 case ARM::VLD3DUPq32OddPseudo:
2734 case ARM::VLD3DUPq8OddPseudo_UPD:
2735 case ARM::VLD3DUPq16OddPseudo_UPD:
2736 case ARM::VLD3DUPq32OddPseudo_UPD:
2737 case ARM::VLD4DUPq8EvenPseudo:
2738 case ARM::VLD4DUPq8OddPseudo:
2739 case ARM::VLD4DUPq16EvenPseudo:
2740 case ARM::VLD4DUPq16OddPseudo:
2741 case ARM::VLD4DUPq32EvenPseudo:
2742 case ARM::VLD4DUPq32OddPseudo:
2743 case ARM::VLD4DUPq8OddPseudo_UPD:
2744 case ARM::VLD4DUPq16OddPseudo_UPD:
2745 case ARM::VLD4DUPq32OddPseudo_UPD:
2746 ExpandVLD(MBBI);
2747 return true;
2748
2749 case ARM::VST2q8Pseudo:
2750 case ARM::VST2q16Pseudo:
2751 case ARM::VST2q32Pseudo:
2752 case ARM::VST2q8PseudoWB_fixed:
2753 case ARM::VST2q16PseudoWB_fixed:
2754 case ARM::VST2q32PseudoWB_fixed:
2755 case ARM::VST2q8PseudoWB_register:
2756 case ARM::VST2q16PseudoWB_register:
2757 case ARM::VST2q32PseudoWB_register:
2758 case ARM::VST3d8Pseudo:
2759 case ARM::VST3d16Pseudo:
2760 case ARM::VST3d32Pseudo:
2761 case ARM::VST1d8TPseudo:
2762 case ARM::VST1d8TPseudoWB_fixed:
2763 case ARM::VST1d8TPseudoWB_register:
2764 case ARM::VST1d16TPseudo:
2765 case ARM::VST1d16TPseudoWB_fixed:
2766 case ARM::VST1d16TPseudoWB_register:
2767 case ARM::VST1d32TPseudo:
2768 case ARM::VST1d32TPseudoWB_fixed:
2769 case ARM::VST1d32TPseudoWB_register:
2770 case ARM::VST1d64TPseudo:
2771 case ARM::VST1d64TPseudoWB_fixed:
2772 case ARM::VST1d64TPseudoWB_register:
2773 case ARM::VST3d8Pseudo_UPD:
2774 case ARM::VST3d16Pseudo_UPD:
2775 case ARM::VST3d32Pseudo_UPD:
2776 case ARM::VST3q8Pseudo_UPD:
2777 case ARM::VST3q16Pseudo_UPD:
2778 case ARM::VST3q32Pseudo_UPD:
2779 case ARM::VST3q8oddPseudo:
2780 case ARM::VST3q16oddPseudo:
2781 case ARM::VST3q32oddPseudo:
2782 case ARM::VST3q8oddPseudo_UPD:
2783 case ARM::VST3q16oddPseudo_UPD:
2784 case ARM::VST3q32oddPseudo_UPD:
2785 case ARM::VST4d8Pseudo:
2786 case ARM::VST4d16Pseudo:
2787 case ARM::VST4d32Pseudo:
2788 case ARM::VST1d8QPseudo:
2789 case ARM::VST1d8QPseudoWB_fixed:
2790 case ARM::VST1d8QPseudoWB_register:
2791 case ARM::VST1d16QPseudo:
2792 case ARM::VST1d16QPseudoWB_fixed:
2793 case ARM::VST1d16QPseudoWB_register:
2794 case ARM::VST1d32QPseudo:
2795 case ARM::VST1d32QPseudoWB_fixed:
2796 case ARM::VST1d32QPseudoWB_register:
2797 case ARM::VST1d64QPseudo:
2798 case ARM::VST1d64QPseudoWB_fixed:
2799 case ARM::VST1d64QPseudoWB_register:
2800 case ARM::VST4d8Pseudo_UPD:
2801 case ARM::VST4d16Pseudo_UPD:
2802 case ARM::VST4d32Pseudo_UPD:
2803 case ARM::VST1q8HighQPseudo:
2804 case ARM::VST1q8LowQPseudo_UPD:
2805 case ARM::VST1q8HighTPseudo:
2806 case ARM::VST1q8LowTPseudo_UPD:
2807 case ARM::VST1q16HighQPseudo:
2808 case ARM::VST1q16LowQPseudo_UPD:
2809 case ARM::VST1q16HighTPseudo:
2810 case ARM::VST1q16LowTPseudo_UPD:
2811 case ARM::VST1q32HighQPseudo:
2812 case ARM::VST1q32LowQPseudo_UPD:
2813 case ARM::VST1q32HighTPseudo:
2814 case ARM::VST1q32LowTPseudo_UPD:
2815 case ARM::VST1q64HighQPseudo:
2816 case ARM::VST1q64LowQPseudo_UPD:
2817 case ARM::VST1q64HighTPseudo:
2818 case ARM::VST1q64LowTPseudo_UPD:
2819 case ARM::VST1q8HighTPseudo_UPD:
2820 case ARM::VST1q16HighTPseudo_UPD:
2821 case ARM::VST1q32HighTPseudo_UPD:
2822 case ARM::VST1q64HighTPseudo_UPD:
2823 case ARM::VST1q8HighQPseudo_UPD:
2824 case ARM::VST1q16HighQPseudo_UPD:
2825 case ARM::VST1q32HighQPseudo_UPD:
2826 case ARM::VST1q64HighQPseudo_UPD:
2827 case ARM::VST4q8Pseudo_UPD:
2828 case ARM::VST4q16Pseudo_UPD:
2829 case ARM::VST4q32Pseudo_UPD:
2830 case ARM::VST4q8oddPseudo:
2831 case ARM::VST4q16oddPseudo:
2832 case ARM::VST4q32oddPseudo:
2833 case ARM::VST4q8oddPseudo_UPD:
2834 case ARM::VST4q16oddPseudo_UPD:
2835 case ARM::VST4q32oddPseudo_UPD:
2836 ExpandVST(MBBI);
2837 return true;
2838
2839 case ARM::VLD1LNq8Pseudo:
2840 case ARM::VLD1LNq16Pseudo:
2841 case ARM::VLD1LNq32Pseudo:
2842 case ARM::VLD1LNq8Pseudo_UPD:
2843 case ARM::VLD1LNq16Pseudo_UPD:
2844 case ARM::VLD1LNq32Pseudo_UPD:
2845 case ARM::VLD2LNd8Pseudo:
2846 case ARM::VLD2LNd16Pseudo:
2847 case ARM::VLD2LNd32Pseudo:
2848 case ARM::VLD2LNq16Pseudo:
2849 case ARM::VLD2LNq32Pseudo:
2850 case ARM::VLD2LNd8Pseudo_UPD:
2851 case ARM::VLD2LNd16Pseudo_UPD:
2852 case ARM::VLD2LNd32Pseudo_UPD:
2853 case ARM::VLD2LNq16Pseudo_UPD:
2854 case ARM::VLD2LNq32Pseudo_UPD:
2855 case ARM::VLD3LNd8Pseudo:
2856 case ARM::VLD3LNd16Pseudo:
2857 case ARM::VLD3LNd32Pseudo:
2858 case ARM::VLD3LNq16Pseudo:
2859 case ARM::VLD3LNq32Pseudo:
2860 case ARM::VLD3LNd8Pseudo_UPD:
2861 case ARM::VLD3LNd16Pseudo_UPD:
2862 case ARM::VLD3LNd32Pseudo_UPD:
2863 case ARM::VLD3LNq16Pseudo_UPD:
2864 case ARM::VLD3LNq32Pseudo_UPD:
2865 case ARM::VLD4LNd8Pseudo:
2866 case ARM::VLD4LNd16Pseudo:
2867 case ARM::VLD4LNd32Pseudo:
2868 case ARM::VLD4LNq16Pseudo:
2869 case ARM::VLD4LNq32Pseudo:
2870 case ARM::VLD4LNd8Pseudo_UPD:
2871 case ARM::VLD4LNd16Pseudo_UPD:
2872 case ARM::VLD4LNd32Pseudo_UPD:
2873 case ARM::VLD4LNq16Pseudo_UPD:
2874 case ARM::VLD4LNq32Pseudo_UPD:
2875 case ARM::VST1LNq8Pseudo:
2876 case ARM::VST1LNq16Pseudo:
2877 case ARM::VST1LNq32Pseudo:
2878 case ARM::VST1LNq8Pseudo_UPD:
2879 case ARM::VST1LNq16Pseudo_UPD:
2880 case ARM::VST1LNq32Pseudo_UPD:
2881 case ARM::VST2LNd8Pseudo:
2882 case ARM::VST2LNd16Pseudo:
2883 case ARM::VST2LNd32Pseudo:
2884 case ARM::VST2LNq16Pseudo:
2885 case ARM::VST2LNq32Pseudo:
2886 case ARM::VST2LNd8Pseudo_UPD:
2887 case ARM::VST2LNd16Pseudo_UPD:
2888 case ARM::VST2LNd32Pseudo_UPD:
2889 case ARM::VST2LNq16Pseudo_UPD:
2890 case ARM::VST2LNq32Pseudo_UPD:
2891 case ARM::VST3LNd8Pseudo:
2892 case ARM::VST3LNd16Pseudo:
2893 case ARM::VST3LNd32Pseudo:
2894 case ARM::VST3LNq16Pseudo:
2895 case ARM::VST3LNq32Pseudo:
2896 case ARM::VST3LNd8Pseudo_UPD:
2897 case ARM::VST3LNd16Pseudo_UPD:
2898 case ARM::VST3LNd32Pseudo_UPD:
2899 case ARM::VST3LNq16Pseudo_UPD:
2900 case ARM::VST3LNq32Pseudo_UPD:
2901 case ARM::VST4LNd8Pseudo:
2902 case ARM::VST4LNd16Pseudo:
2903 case ARM::VST4LNd32Pseudo:
2904 case ARM::VST4LNq16Pseudo:
2905 case ARM::VST4LNq32Pseudo:
2906 case ARM::VST4LNd8Pseudo_UPD:
2907 case ARM::VST4LNd16Pseudo_UPD:
2908 case ARM::VST4LNd32Pseudo_UPD:
2909 case ARM::VST4LNq16Pseudo_UPD:
2910 case ARM::VST4LNq32Pseudo_UPD:
2911 ExpandLaneOp(MBBI);
2912 return true;
2913
2914 case ARM::VTBL3Pseudo: ExpandVTBL(MBBI, ARM::VTBL3, false); return true;
2915 case ARM::VTBL4Pseudo: ExpandVTBL(MBBI, ARM::VTBL4, false); return true;
2916 case ARM::VTBX3Pseudo: ExpandVTBL(MBBI, ARM::VTBX3, true); return true;
2917 case ARM::VTBX4Pseudo: ExpandVTBL(MBBI, ARM::VTBX4, true); return true;
2918
2919 case ARM::tCMP_SWAP_8:
2920 assert(STI->isThumb());
2921 return ExpandCMP_SWAP(MBB, MBBI, ARM::t2LDREXB, ARM::t2STREXB, ARM::tUXTB,
2922 NextMBBI);
2923 case ARM::tCMP_SWAP_16:
2924 assert(STI->isThumb());
2925 return ExpandCMP_SWAP(MBB, MBBI, ARM::t2LDREXH, ARM::t2STREXH, ARM::tUXTH,
2926 NextMBBI);
2927
2928 case ARM::CMP_SWAP_8:
2929 assert(!STI->isThumb());
2930 return ExpandCMP_SWAP(MBB, MBBI, ARM::LDREXB, ARM::STREXB, ARM::UXTB,
2931 NextMBBI);
2932 case ARM::CMP_SWAP_16:
2933 assert(!STI->isThumb());
2934 return ExpandCMP_SWAP(MBB, MBBI, ARM::LDREXH, ARM::STREXH, ARM::UXTH,
2935 NextMBBI);
2936 case ARM::CMP_SWAP_32:
2937 if (STI->isThumb())
2938 return ExpandCMP_SWAP(MBB, MBBI, ARM::t2LDREX, ARM::t2STREX, 0,
2939 NextMBBI);
2940 else
2941 return ExpandCMP_SWAP(MBB, MBBI, ARM::LDREX, ARM::STREX, 0, NextMBBI);
2942
2943 case ARM::CMP_SWAP_64:
2944 return ExpandCMP_SWAP_64(MBB, MBBI, NextMBBI);
2945
2946 case ARM::tBL_PUSHLR:
2947 case ARM::BL_PUSHLR: {
2948 const bool Thumb = Opcode == ARM::tBL_PUSHLR;
2949 Register Reg = MI.getOperand(0).getReg();
2950 assert(Reg == ARM::LR && "expect LR register!");
2951 MachineInstrBuilder MIB;
2952 if (Thumb) {
2953 // push {lr}
2954 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::tPUSH))
2955 .add(predOps(ARMCC::AL))
2956 .addReg(Reg);
2957
2958 // bl __gnu_mcount_nc
2959 MIB = BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::tBL));
2960 } else {
2961 // stmdb sp!, {lr}
2962 BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::STMDB_UPD))
2963 .addReg(ARM::SP, RegState::Define)
2964 .addReg(ARM::SP)
2965 .add(predOps(ARMCC::AL))
2966 .addReg(Reg);
2967
2968 // bl __gnu_mcount_nc
2969 MIB = BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(ARM::BL));
2970 }
2971 MIB.cloneMemRefs(MI);
2972 for (unsigned i = 1; i < MI.getNumOperands(); ++i) MIB.add(MI.getOperand(i));
2973 MI.eraseFromParent();
2974 return true;
2975 }
2976 case ARM::LOADDUAL:
2977 case ARM::STOREDUAL: {
2978 Register PairReg = MI.getOperand(0).getReg();
2979
2980 MachineInstrBuilder MIB =
2981 BuildMI(MBB, MBBI, MI.getDebugLoc(),
2982 TII->get(Opcode == ARM::LOADDUAL ? ARM::LDRD : ARM::STRD))
2983 .addReg(TRI->getSubReg(PairReg, ARM::gsub_0),
2984 Opcode == ARM::LOADDUAL ? RegState::Define : 0)
2985 .addReg(TRI->getSubReg(PairReg, ARM::gsub_1),
2986 Opcode == ARM::LOADDUAL ? RegState::Define : 0);
2987 for (unsigned i = 1; i < MI.getNumOperands(); i++)
2988 MIB.add(MI.getOperand(i));
2989 MIB.add(predOps(ARMCC::AL));
2990 MIB.cloneMemRefs(MI);
2991 MI.eraseFromParent();
2992 return true;
2993 }
2994 }
2995 }
2996
2997 bool ARMExpandPseudo::ExpandMBB(MachineBasicBlock &MBB) {
2998 bool Modified = false;
2999
3000 MachineBasicBlock::iterator MBBI = MBB.begin(), E = MBB.end();
3001 while (MBBI != E) {
3002 MachineBasicBlock::iterator NMBBI = std::next(MBBI);
3003 Modified |= ExpandMI(MBB, MBBI, NMBBI);
3004 MBBI = NMBBI;
3005 }
3006
3007 return Modified;
3008 }
3009
3010 bool ARMExpandPseudo::runOnMachineFunction(MachineFunction &MF) {
3011 STI = &static_cast<const ARMSubtarget &>(MF.getSubtarget());
3012 TII = STI->getInstrInfo();
3013 TRI = STI->getRegisterInfo();
3014 AFI = MF.getInfo<ARMFunctionInfo>();
3015
3016 LLVM_DEBUG(dbgs() << "********** ARM EXPAND PSEUDO INSTRUCTIONS **********\n"
3017 << "********** Function: " << MF.getName() << '\n');
3018
3019 bool Modified = false;
3020 for (MachineBasicBlock &MBB : MF)
3021 Modified |= ExpandMBB(MBB);
3022 if (VerifyARMPseudo)
3023 MF.verify(this, "After expanding ARM pseudo instructions.");
3024
3025 LLVM_DEBUG(dbgs() << "***************************************************\n");
3026 return Modified;
3027 }
3028
3029 /// createARMExpandPseudoPass - returns an instance of the pseudo instruction
3030 /// expansion pass.
3031 FunctionPass *llvm::createARMExpandPseudoPass() {
3032 return new ARMExpandPseudo();
3033 }
3034