1// Copyright 2015 The Go Authors. All rights reserved. 2// Use of this source code is governed by a BSD-style 3// license that can be found in the LICENSE file. 4 5// This file encapsulates some of the odd characteristics of the ARM64 6// instruction set, to minimize its interaction with the core of the 7// assembler. 8 9package arch 10 11import ( 12 "cmd/internal/obj" 13 "cmd/internal/obj/arm64" 14 "errors" 15) 16 17var arm64LS = map[string]uint8{ 18 "P": arm64.C_XPOST, 19 "W": arm64.C_XPRE, 20} 21 22var arm64Jump = map[string]bool{ 23 "B": true, 24 "BL": true, 25 "BEQ": true, 26 "BNE": true, 27 "BCS": true, 28 "BHS": true, 29 "BCC": true, 30 "BLO": true, 31 "BMI": true, 32 "BPL": true, 33 "BVS": true, 34 "BVC": true, 35 "BHI": true, 36 "BLS": true, 37 "BGE": true, 38 "BLT": true, 39 "BGT": true, 40 "BLE": true, 41 "CALL": true, 42 "CBZ": true, 43 "CBZW": true, 44 "CBNZ": true, 45 "CBNZW": true, 46 "JMP": true, 47 "TBNZ": true, 48 "TBZ": true, 49} 50 51func jumpArm64(word string) bool { 52 return arm64Jump[word] 53} 54 55// IsARM64CMP reports whether the op (as defined by an arm.A* constant) is 56// one of the comparison instructions that require special handling. 57func IsARM64CMP(op obj.As) bool { 58 switch op { 59 case arm64.ACMN, arm64.ACMP, arm64.ATST, 60 arm64.ACMNW, arm64.ACMPW, arm64.ATSTW, 61 arm64.AFCMPS, arm64.AFCMPD, 62 arm64.AFCMPES, arm64.AFCMPED: 63 return true 64 } 65 return false 66} 67 68// IsARM64STLXR reports whether the op (as defined by an arm64.A* 69// constant) is one of the STLXR-like instructions that require special 70// handling. 71func IsARM64STLXR(op obj.As) bool { 72 switch op { 73 case arm64.ASTLXRB, arm64.ASTLXRH, arm64.ASTLXRW, arm64.ASTLXR, 74 arm64.ASTXRB, arm64.ASTXRH, arm64.ASTXRW, arm64.ASTXR, 75 arm64.ASTXP, arm64.ASTXPW, arm64.ASTLXP, arm64.ASTLXPW: 76 return true 77 } 78 // LDADDx/SWPx/CASx atomic instructions 79 if arm64.IsAtomicInstruction(op) { 80 return true 81 } 82 return false 83} 84 85// IsARM64TBL reports whether the op (as defined by an arm64.A* 86// constant) is one of the TBL-like instructions and one of its 87// inputs does not fit into prog.Reg, so require special handling. 88func IsARM64TBL(op obj.As) bool { 89 switch op { 90 case arm64.AVTBL, arm64.AVMOVQ: 91 return true 92 } 93 return false 94} 95 96// IsARM64CASP reports whether the op (as defined by an arm64.A* 97// constant) is one of the CASP-like instructions, and its 2nd 98// destination is a register pair that require special handling. 99func IsARM64CASP(op obj.As) bool { 100 switch op { 101 case arm64.ACASPD, arm64.ACASPW: 102 return true 103 } 104 return false 105} 106 107// ARM64Suffix handles the special suffix for the ARM64. 108// It returns a boolean to indicate success; failure means 109// cond was unrecognized. 110func ARM64Suffix(prog *obj.Prog, cond string) bool { 111 if cond == "" { 112 return true 113 } 114 bits, ok := parseARM64Suffix(cond) 115 if !ok { 116 return false 117 } 118 prog.Scond = bits 119 return true 120} 121 122// parseARM64Suffix parses the suffix attached to an ARM64 instruction. 123// The input is a single string consisting of period-separated condition 124// codes, such as ".P.W". An initial period is ignored. 125func parseARM64Suffix(cond string) (uint8, bool) { 126 if cond == "" { 127 return 0, true 128 } 129 return parseARMCondition(cond, arm64LS, nil) 130} 131 132func arm64RegisterNumber(name string, n int16) (int16, bool) { 133 switch name { 134 case "F": 135 if 0 <= n && n <= 31 { 136 return arm64.REG_F0 + n, true 137 } 138 case "R": 139 if 0 <= n && n <= 30 { // not 31 140 return arm64.REG_R0 + n, true 141 } 142 case "V": 143 if 0 <= n && n <= 31 { 144 return arm64.REG_V0 + n, true 145 } 146 } 147 return 0, false 148} 149 150// ARM64RegisterShift constructs an ARM64 register with shift operation. 151func ARM64RegisterShift(reg, op, count int16) (int64, error) { 152 // the base register of shift operations must be general register. 153 if reg > arm64.REG_R31 || reg < arm64.REG_R0 { 154 return 0, errors.New("invalid register for shift operation") 155 } 156 return int64(reg&31)<<16 | int64(op)<<22 | int64(uint16(count)), nil 157} 158 159// ARM64RegisterExtension constructs an ARM64 register with extension or arrangement. 160func ARM64RegisterExtension(a *obj.Addr, ext string, reg, num int16, isAmount, isIndex bool) error { 161 Rnum := (reg & 31) + int16(num<<5) 162 if isAmount { 163 if num < 0 || num > 7 { 164 return errors.New("index shift amount is out of range") 165 } 166 } 167 if reg <= arm64.REG_R31 && reg >= arm64.REG_R0 { 168 if !isAmount { 169 return errors.New("invalid register extension") 170 } 171 switch ext { 172 case "UXTB": 173 if a.Type == obj.TYPE_MEM { 174 return errors.New("invalid shift for the register offset addressing mode") 175 } 176 a.Reg = arm64.REG_UXTB + Rnum 177 case "UXTH": 178 if a.Type == obj.TYPE_MEM { 179 return errors.New("invalid shift for the register offset addressing mode") 180 } 181 a.Reg = arm64.REG_UXTH + Rnum 182 case "UXTW": 183 // effective address of memory is a base register value and an offset register value. 184 if a.Type == obj.TYPE_MEM { 185 a.Index = arm64.REG_UXTW + Rnum 186 } else { 187 a.Reg = arm64.REG_UXTW + Rnum 188 } 189 case "UXTX": 190 if a.Type == obj.TYPE_MEM { 191 return errors.New("invalid shift for the register offset addressing mode") 192 } 193 a.Reg = arm64.REG_UXTX + Rnum 194 case "SXTB": 195 if a.Type == obj.TYPE_MEM { 196 return errors.New("invalid shift for the register offset addressing mode") 197 } 198 a.Reg = arm64.REG_SXTB + Rnum 199 case "SXTH": 200 if a.Type == obj.TYPE_MEM { 201 return errors.New("invalid shift for the register offset addressing mode") 202 } 203 a.Reg = arm64.REG_SXTH + Rnum 204 case "SXTW": 205 if a.Type == obj.TYPE_MEM { 206 a.Index = arm64.REG_SXTW + Rnum 207 } else { 208 a.Reg = arm64.REG_SXTW + Rnum 209 } 210 case "SXTX": 211 if a.Type == obj.TYPE_MEM { 212 a.Index = arm64.REG_SXTX + Rnum 213 } else { 214 a.Reg = arm64.REG_SXTX + Rnum 215 } 216 case "LSL": 217 a.Index = arm64.REG_LSL + Rnum 218 default: 219 return errors.New("unsupported general register extension type: " + ext) 220 221 } 222 } else if reg <= arm64.REG_V31 && reg >= arm64.REG_V0 { 223 switch ext { 224 case "B8": 225 if isIndex { 226 return errors.New("invalid register extension") 227 } 228 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8B & 15) << 5) 229 case "B16": 230 if isIndex { 231 return errors.New("invalid register extension") 232 } 233 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_16B & 15) << 5) 234 case "H4": 235 if isIndex { 236 return errors.New("invalid register extension") 237 } 238 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4H & 15) << 5) 239 case "H8": 240 if isIndex { 241 return errors.New("invalid register extension") 242 } 243 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8H & 15) << 5) 244 case "S2": 245 if isIndex { 246 return errors.New("invalid register extension") 247 } 248 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2S & 15) << 5) 249 case "S4": 250 if isIndex { 251 return errors.New("invalid register extension") 252 } 253 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4S & 15) << 5) 254 case "D1": 255 if isIndex { 256 return errors.New("invalid register extension") 257 } 258 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1D & 15) << 5) 259 case "D2": 260 if isIndex { 261 return errors.New("invalid register extension") 262 } 263 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2D & 15) << 5) 264 case "Q1": 265 if isIndex { 266 return errors.New("invalid register extension") 267 } 268 a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1Q & 15) << 5) 269 case "B": 270 if !isIndex { 271 return nil 272 } 273 a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_B & 15) << 5) 274 a.Index = num 275 case "H": 276 if !isIndex { 277 return nil 278 } 279 a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_H & 15) << 5) 280 a.Index = num 281 case "S": 282 if !isIndex { 283 return nil 284 } 285 a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_S & 15) << 5) 286 a.Index = num 287 case "D": 288 if !isIndex { 289 return nil 290 } 291 a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_D & 15) << 5) 292 a.Index = num 293 default: 294 return errors.New("unsupported simd register extension type: " + ext) 295 } 296 } else { 297 return errors.New("invalid register and extension combination") 298 } 299 return nil 300} 301 302// ARM64RegisterArrangement constructs an ARM64 vector register arrangement. 303func ARM64RegisterArrangement(reg int16, name, arng string) (int64, error) { 304 var curQ, curSize uint16 305 if name[0] != 'V' { 306 return 0, errors.New("expect V0 through V31; found: " + name) 307 } 308 if reg < 0 { 309 return 0, errors.New("invalid register number: " + name) 310 } 311 switch arng { 312 case "B8": 313 curSize = 0 314 curQ = 0 315 case "B16": 316 curSize = 0 317 curQ = 1 318 case "H4": 319 curSize = 1 320 curQ = 0 321 case "H8": 322 curSize = 1 323 curQ = 1 324 case "S2": 325 curSize = 2 326 curQ = 0 327 case "S4": 328 curSize = 2 329 curQ = 1 330 case "D1": 331 curSize = 3 332 curQ = 0 333 case "D2": 334 curSize = 3 335 curQ = 1 336 default: 337 return 0, errors.New("invalid arrangement in ARM64 register list") 338 } 339 return (int64(curQ) & 1 << 30) | (int64(curSize&3) << 10), nil 340} 341 342// ARM64RegisterListOffset generates offset encoding according to AArch64 specification. 343func ARM64RegisterListOffset(firstReg, regCnt int, arrangement int64) (int64, error) { 344 offset := int64(firstReg) 345 switch regCnt { 346 case 1: 347 offset |= 0x7 << 12 348 case 2: 349 offset |= 0xa << 12 350 case 3: 351 offset |= 0x6 << 12 352 case 4: 353 offset |= 0x2 << 12 354 default: 355 return 0, errors.New("invalid register numbers in ARM64 register list") 356 } 357 offset |= arrangement 358 // arm64 uses the 60th bit to differentiate from other archs 359 // For more details, refer to: obj/arm64/list7.go 360 offset |= 1 << 60 361 return offset, nil 362} 363