1;; Unspec defintions.
2;; Copyright (C) 2012-2022 Free Software Foundation, Inc.
3;; Contributed by ARM Ltd.
4
5;; This file is part of GCC.
6
7;; GCC is free software; you can redistribute it and/or modify it
8;; under the terms of the GNU General Public License as published
9;; by the Free Software Foundation; either version 3, or (at your
10;; option) any later version.
11
12;; GCC is distributed in the hope that it will be useful, but WITHOUT
13;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14;; or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15;; License for more details.
16
17;; You should have received a copy of the GNU General Public License
18;; along with GCC; see the file COPYING3.  If not see
19;; <http://www.gnu.org/licenses/>.
20
21;; UNSPEC Usage:
22;; Note: sin and cos are no-longer used.
23;; Unspec enumerators for Neon are defined in neon.md.
24;; Unspec enumerators for iwmmxt2 are defined in iwmmxt2.md
25
26(define_c_enum "unspec" [
27  UNSPEC_PUSH_MULT      ; `push multiple' operation:
28                        ;   operand 0 is the first register,
29                        ;   subsequent registers are in parallel (use ...)
30                        ;   expressions.
31  UNSPEC_PIC_SYM        ; A symbol that has been treated properly for pic
32                        ; usage, that is, we will add the pic_register
33                        ; value to it before trying to dereference it.
34  UNSPEC_PIC_BASE       ; Add PC and all but the last operand together,
35                        ; The last operand is the number of a PIC_LABEL
36                        ; that points at the containing instruction.
37  UNSPEC_PRLG_STK       ; A special barrier that prevents frame accesses
38                        ; being scheduled before the stack adjustment insn.
39  UNSPEC_REGISTER_USE   ; As USE insns are not meaningful after reload,
40                        ; this unspec is used to prevent the deletion of
41                        ; instructions setting registers for EH handling
42                        ; and stack frame generation.  Operand 0 is the
43                        ; register to "use".
44  UNSPEC_CHECK_ARCH     ; Set CCs to indicate 26-bit or 32-bit mode.
45  UNSPEC_WSHUFH         ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
46  UNSPEC_WACC           ; Used by the intrinsic form of the iWMMXt WACC instruction.
47  UNSPEC_TMOVMSK        ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
48  UNSPEC_WSAD           ; Used by the intrinsic form of the iWMMXt WSAD instruction.
49  UNSPEC_WSADZ          ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
50  UNSPEC_WMACS          ; Used by the intrinsic form of the iWMMXt WMACS instruction.
51  UNSPEC_WMACU          ; Used by the intrinsic form of the iWMMXt WMACU instruction.
52  UNSPEC_WMACSZ         ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
53  UNSPEC_WMACUZ         ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
54  UNSPEC_CLRDI          ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
55  UNSPEC_WALIGNI        ; Used by the intrinsic form of the iWMMXt WALIGN instruction.
56  UNSPEC_TLS            ; A symbol that has been treated properly for TLS usage.
57  UNSPEC_PIC_LABEL      ; A label used for PIC access that does not appear in the
58                        ; instruction stream.
59  UNSPEC_PIC_OFFSET     ; A symbolic 12-bit OFFSET that has been treated
60                        ; correctly for PIC usage.
61  UNSPEC_GOTSYM_OFF     ; The offset of the start of the GOT from a
62                        ; a given symbolic address.
63  UNSPEC_THUMB1_CASESI  ; A Thumb1 compressed dispatch-table call.
64  UNSPEC_RBIT           ; rbit operation.
65  UNSPEC_SYMBOL_OFFSET  ; The offset of the start of the symbol from
66                        ; another symbolic address.
67  UNSPEC_MEMORY_BARRIER ; Represent a memory barrier.
68  UNSPEC_UNALIGNED_LOAD	; Used to represent ldr/ldrh instructions that access
69			; unaligned locations, on architectures which support
70			; that.
71  UNSPEC_UNALIGNED_STORE ; Same for str/strh.
72  UNSPEC_PIC_UNIFIED    ; Create a common pic addressing form.
73  UNSPEC_Q_SET          ; Represent setting the Q bit.
74  UNSPEC_GE_SET         ; Represent setting the GE bits.
75  UNSPEC_APSR_READ      ; Represent reading the APSR.
76
77  UNSPEC_LL		; Represent an unpaired load-register-exclusive.
78  UNSPEC_VRINTZ         ; Represent a float to integral float rounding
79                        ; towards zero.
80  UNSPEC_VRINTP         ; Represent a float to integral float rounding
81                        ; towards +Inf.
82  UNSPEC_VRINTM         ; Represent a float to integral float rounding
83                        ; towards -Inf.
84  UNSPEC_VRINTR         ; Represent a float to integral float rounding
85                        ; FPSCR rounding mode.
86  UNSPEC_VRINTX         ; Represent a float to integral float rounding
87                        ; FPSCR rounding mode and signal inexactness.
88  UNSPEC_VRINTA         ; Represent a float to integral float rounding
89                        ; towards nearest, ties away from zero.
90  UNSPEC_PROBE_STACK    ; Probe stack memory reference
91  UNSPEC_NONSECURE_MEM	; Represent non-secure memory in ARMv8-M with
92			; security extension
93  UNSPEC_SP_SET		; Represent the setting of stack protector's canary
94  UNSPEC_SP_TEST	; Represent the testing of stack protector's canary
95			; against the guard.
96  UNSPEC_PIC_RESTORE	; Use to restore fdpic register
97
98  UNSPEC_SXTAB16	; Represent the SXTAB16 operation.
99  UNSPEC_UXTAB16	; Represent the UXTAB16 operation.
100  UNSPEC_SXTB16		; Represent the SXTB16 operation.
101  UNSPEC_UXTB16		; Represent the UXTB16 operation.
102  UNSPEC_QADD8		; Represent the QADD8 operation.
103  UNSPEC_QSUB8		; Represent the QSUB8 operation.
104  UNSPEC_SHADD8		; Represent the SHADD8 operation.
105  UNSPEC_SHSUB8		; Represent the SHSUB8 operation.
106  UNSPEC_UHADD8		; Represent the UHADD8 operation.
107  UNSPEC_UHSUB8		; Represent the UHSUB8 operation.
108  UNSPEC_UQADD8		; Represent the UQADD8 operation.
109  UNSPEC_UQSUB8		; Represent the UQSUB8 operation.
110  UNSPEC_QADD16		; Represent the QADD16 operation.
111  UNSPEC_QASX		; Represent the QASX operation.
112  UNSPEC_QSAX		; Represent the QSAX operation.
113  UNSPEC_QSUB16		; Represent the QSUB16 operation.
114  UNSPEC_SHADD16	; Represent the SHADD16 operation.
115  UNSPEC_SHASX		; Represent the SHASX operation.
116  UNSPEC_SHSAX		; Represent the SSAX operation.
117  UNSPEC_SHSUB16	; Represent the SHSUB16 operation.
118  UNSPEC_UHADD16	; Represent the UHADD16 operation.
119  UNSPEC_UHASX		; Represent the UHASX operation.
120  UNSPEC_UHSAX		; Represent the USAX operation.
121  UNSPEC_UHSUB16	; Represent the UHSUB16 operation.
122  UNSPEC_UQADD16	; Represent the UQADD16 operation.
123  UNSPEC_UQASX		; Represent the UQASX operation.
124  UNSPEC_UQSAX		; Represent the UQSAX operation.
125  UNSPEC_UQSUB16	; Represent the UQSUB16 operation.
126  UNSPEC_SMUSD		; Represent the SMUSD operation.
127  UNSPEC_SMUSDX		; Represent the SMUSDX operation.
128  UNSPEC_USAD8		; Represent the USAD8 operation.
129  UNSPEC_USADA8		; Represent the USADA8 operation.
130  UNSPEC_SMLALD		; Represent the SMLALD operation.
131  UNSPEC_SMLALDX	; Represent the SMLALDX operation.
132  UNSPEC_SMLSLD		; Represent the SMLSLD operation.
133  UNSPEC_SMLSLDX	; Represent the SMLSLDX operation.
134  UNSPEC_SMLAWB		; Represent the SMLAWB operation.
135  UNSPEC_SMLAWT		; Represent the SMLAWT operation.
136  UNSPEC_SEL		; Represent the SEL operation.
137  UNSPEC_SADD8		; Represent the SADD8 operation.
138  UNSPEC_SSUB8		; Represent the SSUB8 operation.
139  UNSPEC_UADD8		; Represent the UADD8 operation.
140  UNSPEC_USUB8		; Represent the USUB8 operation.
141  UNSPEC_SADD16		; Represent the SADD16 operation.
142  UNSPEC_SASX		; Represent the SASX operation.
143  UNSPEC_SSAX		; Represent the SSAX operation.
144  UNSPEC_SSUB16		; Represent the SSUB16 operation.
145  UNSPEC_UADD16		; Represent the UADD16 operation.
146  UNSPEC_UASX		; Represent the UASX operation.
147  UNSPEC_USAX		; Represent the USAX operation.
148  UNSPEC_USUB16		; Represent the USUB16 operation.
149  UNSPEC_SMLAD		; Represent the SMLAD operation.
150  UNSPEC_SMLADX		; Represent the SMLADX operation.
151  UNSPEC_SMLSD		; Represent the SMLSD operation.
152  UNSPEC_SMLSDX		; Represent the SMLSDX operation.
153  UNSPEC_SMUAD		; Represent the SMUAD operation.
154  UNSPEC_SMUADX		; Represent the SMUADX operation.
155  UNSPEC_SSAT16		; Represent the SSAT16 operation.
156  UNSPEC_USAT16		; Represent the USAT16 operation.
157  UNSPEC_CDE		; Custom Datapath Extension instruction.
158  UNSPEC_CDEA		; Custom Datapath Extension instruction.
159  UNSPEC_VCDE		; Custom Datapath Extension instruction.
160  UNSPEC_VCDEA		; Custom Datapath Extension instruction.
161  UNSPEC_DLS		; Used for DLS (Do Loop Start), Armv8.1-M Mainline instruction
162])
163
164
165(define_c_enum "unspec" [
166  UNSPEC_WADDC		; Used by the intrinsic form of the iWMMXt WADDC instruction.
167  UNSPEC_WABS		; Used by the intrinsic form of the iWMMXt WABS instruction.
168  UNSPEC_WQMULWMR	; Used by the intrinsic form of the iWMMXt WQMULWMR instruction.
169  UNSPEC_WQMULMR	; Used by the intrinsic form of the iWMMXt WQMULMR instruction.
170  UNSPEC_WQMULWM	; Used by the intrinsic form of the iWMMXt WQMULWM instruction.
171  UNSPEC_WQMULM		; Used by the intrinsic form of the iWMMXt WQMULM instruction.
172  UNSPEC_WQMIAxyn	; Used by the intrinsic form of the iWMMXt WMIAxyn instruction.
173  UNSPEC_WQMIAxy	; Used by the intrinsic form of the iWMMXt WMIAxy instruction.
174  UNSPEC_TANDC		; Used by the intrinsic form of the iWMMXt TANDC instruction.
175  UNSPEC_TORC		; Used by the intrinsic form of the iWMMXt TORC instruction.
176  UNSPEC_TORVSC		; Used by the intrinsic form of the iWMMXt TORVSC instruction.
177  UNSPEC_TEXTRC		; Used by the intrinsic form of the iWMMXt TEXTRC instruction.
178  UNSPEC_GET_FPSCR_NZCVQC	; Represent fetch of FPSCR_nzcvqc content.
179])
180
181
182;; UNSPEC_VOLATILE Usage:
183
184(define_c_enum "unspecv" [
185  VUNSPEC_BLOCKAGE      ; `blockage' insn to prevent scheduling across an
186                        ;   insn in the code.
187  VUNSPEC_EPILOGUE      ; `epilogue' insn, used to represent any part of the
188                        ;   instruction epilogue sequence that isn't expanded
189                        ;   into normal RTL.  Used for both normal and sibcall
190                        ;   epilogues.
191  VUNSPEC_THUMB1_INTERWORK ; `prologue_thumb1_interwork' insn, used to swap
192			;   modes from arm to thumb.
193  VUNSPEC_ALIGN         ; `align' insn.  Used at the head of a minipool table
194                        ;   for inlined constants.
195  VUNSPEC_POOL_END      ; `end-of-table'.  Used to mark the end of a minipool
196                        ;   table.
197  VUNSPEC_POOL_1        ; `pool-entry(1)'.  An entry in the constant pool for
198                        ;   an 8-bit object.
199  VUNSPEC_POOL_2        ; `pool-entry(2)'.  An entry in the constant pool for
200                        ;   a 16-bit object.
201  VUNSPEC_POOL_4        ; `pool-entry(4)'.  An entry in the constant pool for
202                        ;   a 32-bit object.
203  VUNSPEC_POOL_8        ; `pool-entry(8)'.  An entry in the constant pool for
204                        ;   a 64-bit object.
205  VUNSPEC_POOL_16       ; `pool-entry(16)'.  An entry in the constant pool for
206                        ;   a 128-bit object.
207  VUNSPEC_TMRC          ; Used by the iWMMXt TMRC instruction.
208  VUNSPEC_TMCR          ; Used by the iWMMXt TMCR instruction.
209  VUNSPEC_ALIGN8        ; 8-byte alignment version of VUNSPEC_ALIGN
210  VUNSPEC_WCMP_EQ       ; Used by the iWMMXt WCMPEQ instructions
211  VUNSPEC_WCMP_GTU      ; Used by the iWMMXt WCMPGTU instructions
212  VUNSPEC_WCMP_GT       ; Used by the iwMMXT WCMPGT instructions
213  VUNSPEC_EH_RETURN     ; Use to override the return address for exception
214                        ; handling.
215  VUNSPEC_ATOMIC_CAS	; Represent an atomic compare swap.
216  VUNSPEC_ATOMIC_XCHG	; Represent an atomic exchange.
217  VUNSPEC_ATOMIC_OP	; Represent an atomic operation.
218  VUNSPEC_LL		; Represent a load-register-exclusive.
219  VUNSPEC_LDRD_ATOMIC	; Represent an LDRD used as an atomic DImode load.
220  VUNSPEC_SC		; Represent a store-register-exclusive.
221  VUNSPEC_LAX		; Represent a load-register-acquire-exclusive.
222  VUNSPEC_SLX		; Represent a store-register-release-exclusive.
223  VUNSPEC_LDA		; Represent a store-register-acquire.
224  VUNSPEC_STL		; Represent a store-register-release.
225  VUNSPEC_GET_FPSCR	; Represent fetch of FPSCR content.
226  VUNSPEC_SET_FPSCR	; Represent assign of FPSCR content.
227  VUNSPEC_SET_FPSCR_NZCVQC	; Represent assign of FPSCR_nzcvqc content.
228  VUNSPEC_PROBE_STACK_RANGE ; Represent stack range probing.
229  VUNSPEC_CDP		; Represent the coprocessor cdp instruction.
230  VUNSPEC_CDP2		; Represent the coprocessor cdp2 instruction.
231  VUNSPEC_LDC		; Represent the coprocessor ldc instruction.
232  VUNSPEC_LDC2		; Represent the coprocessor ldc2 instruction.
233  VUNSPEC_LDCL		; Represent the coprocessor ldcl instruction.
234  VUNSPEC_LDC2L		; Represent the coprocessor ldc2l instruction.
235  VUNSPEC_STC		; Represent the coprocessor stc instruction.
236  VUNSPEC_STC2		; Represent the coprocessor stc2 instruction.
237  VUNSPEC_STCL		; Represent the coprocessor stcl instruction.
238  VUNSPEC_STC2L		; Represent the coprocessor stc2l instruction.
239  VUNSPEC_MCR		; Represent the coprocessor mcr instruction.
240  VUNSPEC_MCR2		; Represent the coprocessor mcr2 instruction.
241  VUNSPEC_MRC		; Represent the coprocessor mrc instruction.
242  VUNSPEC_MRC2		; Represent the coprocessor mrc2 instruction.
243  VUNSPEC_MCRR		; Represent the coprocessor mcrr instruction.
244  VUNSPEC_MCRR2		; Represent the coprocessor mcrr2 instruction.
245  VUNSPEC_MRRC		; Represent the coprocessor mrrc instruction.
246  VUNSPEC_MRRC2		; Represent the coprocessor mrrc2 instruction.
247  VUNSPEC_SPECULATION_BARRIER ; Represents an unconditional speculation barrier.
248  VUNSPEC_APSR_WRITE     ; Represent writing the APSR.
249  VUNSPEC_VSTR_VLDR	; Represent the vstr/vldr instruction.
250  VUNSPEC_CLRM_APSR	; Represent the clearing of APSR with clrm instruction.
251  VUNSPEC_VSCCLRM_VPR	; Represent the clearing of VPR with vscclrm
252			; instruction.
253  VUNSPEC_VLSTM		; Represent the lazy store multiple with vlstm
254			; instruction.
255  VUNSPEC_VLLDM		; Represent the lazy load multiple with vlldm
256			; instruction.
257])
258
259;; Enumerators for NEON unspecs.
260(define_c_enum "unspec" [
261  UNSPEC_ASHIFT_SIGNED
262  UNSPEC_ASHIFT_UNSIGNED
263  UNSPEC_CRC32B
264  UNSPEC_CRC32H
265  UNSPEC_CRC32W
266  UNSPEC_CRC32CB
267  UNSPEC_CRC32CH
268  UNSPEC_CRC32CW
269  UNSPEC_AESD
270  UNSPEC_AESE
271  UNSPEC_AESIMC
272  UNSPEC_AESMC
273  UNSPEC_AES_PROTECT
274  UNSPEC_SHA1C
275  UNSPEC_SHA1M
276  UNSPEC_SHA1P
277  UNSPEC_SHA1H
278  UNSPEC_SHA1SU0
279  UNSPEC_SHA1SU1
280  UNSPEC_SHA256H
281  UNSPEC_SHA256H2
282  UNSPEC_SHA256SU0
283  UNSPEC_SHA256SU1
284  UNSPEC_VMULLP64
285  UNSPEC_LOAD_COUNT
286  UNSPEC_VABAL_S
287  UNSPEC_VABAL_U
288  UNSPEC_VABD_F
289  UNSPEC_VABD_S
290  UNSPEC_VABD_U
291  UNSPEC_VABDL_S
292  UNSPEC_VABDL_U
293  UNSPEC_VADD
294  UNSPEC_VADDHN
295  UNSPEC_VRADDHN
296  UNSPEC_VADDL_S
297  UNSPEC_VADDL_U
298  UNSPEC_VADDW_S
299  UNSPEC_VADDW_U
300  UNSPEC_VBSL
301  UNSPEC_VCAGE
302  UNSPEC_VCAGT
303  UNSPEC_VCALE
304  UNSPEC_VCALT
305  UNSPEC_VCEQ
306  UNSPEC_VCGE
307  UNSPEC_VCGEU
308  UNSPEC_VCGT
309  UNSPEC_VCGTU
310  UNSPEC_VCLS
311  UNSPEC_VCONCAT
312  UNSPEC_VCVT
313  UNSPEC_VCVT_S
314  UNSPEC_VCVT_U
315  UNSPEC_VCVT_S_N
316  UNSPEC_VCVT_U_N
317  UNSPEC_VCVT_HF_S_N
318  UNSPEC_VCVT_HF_U_N
319  UNSPEC_VCVT_SI_S_N
320  UNSPEC_VCVT_SI_U_N
321  UNSPEC_VCVTH_S
322  UNSPEC_VCVTH_U
323  UNSPEC_VCVTA_S
324  UNSPEC_VCVTA_U
325  UNSPEC_VCVTM_S
326  UNSPEC_VCVTM_U
327  UNSPEC_VCVTN_S
328  UNSPEC_VCVTN_U
329  UNSPEC_VCVTP_S
330  UNSPEC_VCVTP_U
331  UNSPEC_VEXT
332  UNSPEC_VHADD_S
333  UNSPEC_VHADD_U
334  UNSPEC_VRHADD_S
335  UNSPEC_VRHADD_U
336  UNSPEC_VHSUB_S
337  UNSPEC_VHSUB_U
338  UNSPEC_VLD1
339  UNSPEC_VLD1_LANE
340  UNSPEC_VLD2
341  UNSPEC_VLD2_DUP
342  UNSPEC_VLD2_LANE
343  UNSPEC_VLD3
344  UNSPEC_VLD3A
345  UNSPEC_VLD3B
346  UNSPEC_VLD3_DUP
347  UNSPEC_VLD3_LANE
348  UNSPEC_VLD4
349  UNSPEC_VLD4A
350  UNSPEC_VLD4B
351  UNSPEC_VLD4_DUP
352  UNSPEC_VLD4_LANE
353  UNSPEC_VMAX
354  UNSPEC_VMAX_U
355  UNSPEC_VMAXNM
356  UNSPEC_VMIN
357  UNSPEC_VMIN_U
358  UNSPEC_VMINNM
359  UNSPEC_VMLA
360  UNSPEC_VMLA_LANE
361  UNSPEC_VMLAL_S
362  UNSPEC_VMLAL_U
363  UNSPEC_VMLAL_S_LANE
364  UNSPEC_VMLAL_U_LANE
365  UNSPEC_VMLS
366  UNSPEC_VMLS_LANE
367  UNSPEC_VMLSL_S
368  UNSPEC_VMLSL_U
369  UNSPEC_VMLSL_S_LANE
370  UNSPEC_VMLSL_U_LANE
371  UNSPEC_VMLSL_LANE
372  UNSPEC_VFMA_LANE
373  UNSPEC_VFMS_LANE
374  UNSPEC_VMOVL_S
375  UNSPEC_VMOVL_U
376  UNSPEC_VMOVN
377  UNSPEC_VMUL
378  UNSPEC_VMULL_P
379  UNSPEC_VMULL_S
380  UNSPEC_VMULL_U
381  UNSPEC_VMUL_LANE
382  UNSPEC_VMULL_S_LANE
383  UNSPEC_VMULL_U_LANE
384  UNSPEC_VPADAL_S
385  UNSPEC_VPADAL_U
386  UNSPEC_VPADD
387  UNSPEC_VPADDL_S
388  UNSPEC_VPADDL_U
389  UNSPEC_VPMAX
390  UNSPEC_VPMAX_U
391  UNSPEC_VPMIN
392  UNSPEC_VPMIN_U
393  UNSPEC_VPSMAX
394  UNSPEC_VPSMIN
395  UNSPEC_VPUMAX
396  UNSPEC_VPUMIN
397  UNSPEC_VQABS
398  UNSPEC_VQADD_S
399  UNSPEC_VQADD_U
400  UNSPEC_VQDMLAL
401  UNSPEC_VQDMLAL_LANE
402  UNSPEC_VQDMLSL
403  UNSPEC_VQDMLSL_LANE
404  UNSPEC_VQDMULH
405  UNSPEC_VQDMULH_LANE
406  UNSPEC_VQRDMULH
407  UNSPEC_VQRDMULH_LANE
408  UNSPEC_VQDMULL
409  UNSPEC_VQDMULL_LANE
410  UNSPEC_VQMOVN_S
411  UNSPEC_VQMOVN_U
412  UNSPEC_VQMOVUN
413  UNSPEC_VQNEG
414  UNSPEC_VQSHL_S
415  UNSPEC_VQSHL_U
416  UNSPEC_VQRSHL_S
417  UNSPEC_VQRSHL_U
418  UNSPEC_VQSHL_S_N
419  UNSPEC_VQSHL_U_N
420  UNSPEC_VQSHLU_N
421  UNSPEC_VQSHRN_S_N
422  UNSPEC_VQSHRN_U_N
423  UNSPEC_VQRSHRN_S_N
424  UNSPEC_VQRSHRN_U_N
425  UNSPEC_VQSHRUN_N
426  UNSPEC_VQRSHRUN_N
427  UNSPEC_VQSUB_S
428  UNSPEC_VQSUB_U
429  UNSPEC_VRECPE
430  UNSPEC_VRECPS
431  UNSPEC_VREV16
432  UNSPEC_VREV32
433  UNSPEC_VREV64
434  UNSPEC_VRSQRTE
435  UNSPEC_VRSQRTS
436  UNSPEC_VSHL_S
437  UNSPEC_VSHL_U
438  UNSPEC_VRSHL_S
439  UNSPEC_VRSHL_U
440  UNSPEC_VSHLL_S_N
441  UNSPEC_VSHLL_U_N
442  UNSPEC_VSHL_N
443  UNSPEC_VSHR_S_N
444  UNSPEC_VSHR_U_N
445  UNSPEC_VRSHR_S_N
446  UNSPEC_VRSHR_U_N
447  UNSPEC_VSHRN_N
448  UNSPEC_VRSHRN_N
449  UNSPEC_VSLI
450  UNSPEC_VSRA_S_N
451  UNSPEC_VSRA_U_N
452  UNSPEC_VRSRA_S_N
453  UNSPEC_VRSRA_U_N
454  UNSPEC_VSRI
455  UNSPEC_VST1
456  UNSPEC_VST1_LANE
457  UNSPEC_VST2
458  UNSPEC_VST2_LANE
459  UNSPEC_VST3
460  UNSPEC_VST3A
461  UNSPEC_VST3B
462  UNSPEC_VST3_LANE
463  UNSPEC_VST4
464  UNSPEC_VST4A
465  UNSPEC_VST4B
466  UNSPEC_VST4_LANE
467  UNSPEC_VSTRUCTDUMMY
468  UNSPEC_VSUB
469  UNSPEC_VSUBHN
470  UNSPEC_VRSUBHN
471  UNSPEC_VSUBL_S
472  UNSPEC_VSUBL_U
473  UNSPEC_VSUBW_S
474  UNSPEC_VSUBW_U
475  UNSPEC_VTBL
476  UNSPEC_VTBX
477  UNSPEC_VTRN1
478  UNSPEC_VTRN2
479  UNSPEC_VTST
480  UNSPEC_VUZP1
481  UNSPEC_VUZP2
482  UNSPEC_VZIP1
483  UNSPEC_VZIP2
484  UNSPEC_MISALIGNED_ACCESS
485  UNSPEC_VCLE
486  UNSPEC_VCLT
487  UNSPEC_NVRINTZ
488  UNSPEC_NVRINTP
489  UNSPEC_NVRINTM
490  UNSPEC_NVRINTX
491  UNSPEC_NVRINTA
492  UNSPEC_NVRINTN
493  UNSPEC_VQRDMLAH
494  UNSPEC_VQRDMLSH
495  UNSPEC_VRND
496  UNSPEC_VRNDA
497  UNSPEC_VRNDI
498  UNSPEC_VRNDM
499  UNSPEC_VRNDN
500  UNSPEC_VRNDP
501  UNSPEC_VRNDX
502  UNSPEC_DOT_S
503  UNSPEC_DOT_U
504  UNSPEC_DOT_US
505  UNSPEC_DOT_SU
506  UNSPEC_VFML_LO
507  UNSPEC_VFML_HI
508  UNSPEC_VCADD90
509  UNSPEC_VCADD270
510  UNSPEC_VCMLA
511  UNSPEC_VCMLA90
512  UNSPEC_VCMLA180
513  UNSPEC_VCMLA270
514  UNSPEC_VCMLA_CONJ
515  UNSPEC_VCMLA180_CONJ
516  UNSPEC_VCMUL
517  UNSPEC_VCMUL90
518  UNSPEC_VCMUL180
519  UNSPEC_VCMUL270
520  UNSPEC_VCMUL_CONJ
521  UNSPEC_MATMUL_S
522  UNSPEC_MATMUL_U
523  UNSPEC_MATMUL_US
524  UNSPEC_BFCVT
525  UNSPEC_BFCVT_HIGH
526  UNSPEC_BFMMLA
527  UNSPEC_BFMAB
528  UNSPEC_BFMAT
529])
530
531;; Enumerators for MVE unspecs.
532(define_c_enum "unspec" [
533  VST4Q
534  VRNDXQ_F
535  VRNDQ_F
536  VRNDPQ_F
537  VRNDNQ_F
538  VRNDMQ_F
539  VRNDAQ_F
540  VREV64Q_F
541  VDUPQ_N_F
542  VREV32Q_F
543  VCVTTQ_F32_F16
544  VCVTBQ_F32_F16
545  VCVTQ_TO_F_S
546  VQNEGQ_S
547  VCVTQ_TO_F_U
548  VREV16Q_S
549  VREV16Q_U
550  VADDLVQ_S
551  VMVNQ_N_S
552  VMVNQ_N_U
553  VCVTAQ_S
554  VCVTAQ_U
555  VREV64Q_S
556  VREV64Q_U
557  VQABSQ_S
558  VDUPQ_N_U
559  VDUPQ_N_S
560  VCLSQ_S
561  VADDVQ_S
562  VADDVQ_U
563  VREV32Q_U
564  VREV32Q_S
565  VMOVLTQ_U
566  VMOVLTQ_S
567  VMOVLBQ_S
568  VMOVLBQ_U
569  VCVTQ_FROM_F_S
570  VCVTQ_FROM_F_U
571  VCVTPQ_S
572  VCVTPQ_U
573  VCVTNQ_S
574  VCVTNQ_U
575  VCVTMQ_S
576  VCVTMQ_U
577  VADDLVQ_U
578  VCTP8Q
579  VCTP16Q
580  VCTP32Q
581  VCTP64Q
582  VPNOT
583  VCREATEQ_F
584  VCVTQ_N_TO_F_S
585  VCVTQ_N_TO_F_U
586  VBRSRQ_N_F
587  VSUBQ_N_F
588  VCREATEQ_U
589  VCREATEQ_S
590  VSHRQ_N_S
591  VSHRQ_N_U
592  VCVTQ_N_FROM_F_S
593  VCVTQ_N_FROM_F_U
594  VADDLVQ_P_S
595  VADDLVQ_P_U
596  VSHLQ_S
597  VSHLQ_U
598  VABDQ_S
599  VADDQ_N_S
600  VADDVAQ_S
601  VADDVQ_P_S
602  VBRSRQ_N_S
603  VHADDQ_S
604  VHADDQ_N_S
605  VHSUBQ_S
606  VHSUBQ_N_S
607  VMAXQ_S
608  VMAXVQ_S
609  VMINQ_S
610  VMINVQ_S
611  VMLADAVQ_S
612  VMULHQ_S
613  VMULLBQ_INT_S
614  VMULLTQ_INT_S
615  VMULQ_S
616  VMULQ_N_S
617  VQADDQ_S
618  VQADDQ_N_S
619  VQRSHLQ_S
620  VQRSHLQ_N_S
621  VQSHLQ_S
622  VQSHLQ_N_S
623  VQSHLQ_R_S
624  VQSUBQ_S
625  VQSUBQ_N_S
626  VRHADDQ_S
627  VRMULHQ_S
628  VRSHLQ_S
629  VRSHLQ_N_S
630  VRSHRQ_N_S
631  VSHLQ_N_S
632  VSHLQ_R_S
633  VSUBQ_S
634  VSUBQ_N_S
635  VABDQ_U
636  VADDQ_N_U
637  VADDVAQ_U
638  VADDVQ_P_U
639  VBRSRQ_N_U
640  VHADDQ_U
641  VHADDQ_N_U
642  VHSUBQ_U
643  VHSUBQ_N_U
644  VMAXQ_U
645  VMAXVQ_U
646  VMINQ_U
647  VMINVQ_U
648  VMLADAVQ_U
649  VMULHQ_U
650  VMULLBQ_INT_U
651  VMULLTQ_INT_U
652  VMULQ_U
653  VMULQ_N_U
654  VQADDQ_U
655  VQADDQ_N_U
656  VQRSHLQ_U
657  VQRSHLQ_N_U
658  VQSHLQ_U
659  VQSHLQ_N_U
660  VQSHLQ_R_U
661  VQSUBQ_U
662  VQSUBQ_N_U
663  VRHADDQ_U
664  VRMULHQ_U
665  VRSHLQ_U
666  VRSHLQ_N_U
667  VRSHRQ_N_U
668  VSHLQ_N_U
669  VSHLQ_R_U
670  VSUBQ_U
671  VSUBQ_N_U
672  VHCADDQ_ROT270_S
673  VHCADDQ_ROT90_S
674  VMAXAQ_S
675  VMAXAVQ_S
676  VMINAQ_S
677  VMINAVQ_S
678  VMLADAVXQ_S
679  VMLSDAVQ_S
680  VMLSDAVXQ_S
681  VQDMULHQ_N_S
682  VQDMULHQ_S
683  VQRDMULHQ_N_S
684  VQRDMULHQ_S
685  VQSHLUQ_N_S
686  VABDQ_M_S
687  VABDQ_M_U
688  VABDQ_F
689  VADDQ_N_F
690  VMAXNMAQ_F
691  VMAXNMAVQ_F
692  VMAXNMQ_F
693  VMAXNMVQ_F
694  VMINNMAQ_F
695  VMINNMAVQ_F
696  VMINNMQ_F
697  VMINNMVQ_F
698  VMULQ_F
699  VMULQ_N_F
700  VSUBQ_F
701  VADDLVAQ_U
702  VADDLVAQ_S
703  VBICQ_N_U
704  VBICQ_N_S
705  VCTP8Q_M
706  VCTP16Q_M
707  VCTP32Q_M
708  VCTP64Q_M
709  VCVTBQ_F16_F32
710  VCVTTQ_F16_F32
711  VMLALDAVQ_U
712  VMLALDAVXQ_U
713  VMLALDAVXQ_S
714  VMLALDAVQ_S
715  VMLSLDAVQ_S
716  VMLSLDAVXQ_S
717  VMOVNBQ_U
718  VMOVNBQ_S
719  VMOVNTQ_U
720  VMOVNTQ_S
721  VORRQ_N_S
722  VORRQ_N_U
723  VQDMULLBQ_N_S
724  VQDMULLBQ_S
725  VQDMULLTQ_N_S
726  VQDMULLTQ_S
727  VQMOVNBQ_U
728  VQMOVNBQ_S
729  VQMOVUNBQ_S
730  VQMOVUNTQ_S
731  VRMLALDAVHXQ_S
732  VRMLSLDAVHQ_S
733  VRMLSLDAVHXQ_S
734  VSHLLBQ_S
735  VSHLLBQ_U
736  VSHLLTQ_U
737  VSHLLTQ_S
738  VQMOVNTQ_U
739  VQMOVNTQ_S
740  VSHLLBQ_N_S
741  VSHLLBQ_N_U
742  VSHLLTQ_N_U
743  VSHLLTQ_N_S
744  VRMLALDAVHQ_U
745  VRMLALDAVHQ_S
746  VMULLTQ_POLY_P
747  VMULLBQ_POLY_P
748  VBICQ_M_N_S
749  VBICQ_M_N_U
750  VCMPEQQ_M_F
751  VCVTAQ_M_S
752  VCVTAQ_M_U
753  VCVTQ_M_TO_F_S
754  VCVTQ_M_TO_F_U
755  VQRSHRNBQ_N_U
756  VQRSHRNBQ_N_S
757  VQRSHRUNBQ_N_S
758  VRMLALDAVHAQ_S
759  VABAVQ_S
760  VABAVQ_U
761  VSHLCQ_S
762  VSHLCQ_U
763  VRMLALDAVHAQ_U
764  VABSQ_M_S
765  VADDVAQ_P_S
766  VADDVAQ_P_U
767  VCLSQ_M_S
768  VCLZQ_M_S
769  VCLZQ_M_U
770  VCMPCSQ_M_N_U
771  VCMPCSQ_M_U
772  VCMPEQQ_M_N_S
773  VCMPEQQ_M_N_U
774  VCMPEQQ_M_S
775  VCMPEQQ_M_U
776  VCMPGEQ_M_N_S
777  VCMPGEQ_M_S
778  VCMPGTQ_M_N_S
779  VCMPGTQ_M_S
780  VCMPHIQ_M_N_U
781  VCMPHIQ_M_U
782  VCMPLEQ_M_N_S
783  VCMPLEQ_M_S
784  VCMPLTQ_M_N_S
785  VCMPLTQ_M_S
786  VCMPNEQ_M_N_S
787  VCMPNEQ_M_N_U
788  VCMPNEQ_M_S
789  VCMPNEQ_M_U
790  VDUPQ_M_N_S
791  VDUPQ_M_N_U
792  VDWDUPQ_N_U
793  VDWDUPQ_WB_U
794  VIWDUPQ_N_U
795  VIWDUPQ_WB_U
796  VMAXAQ_M_S
797  VMAXAVQ_P_S
798  VMAXVQ_P_S
799  VMAXVQ_P_U
800  VMINAQ_M_S
801  VMINAVQ_P_S
802  VMINVQ_P_S
803  VMINVQ_P_U
804  VMLADAVAQ_S
805  VMLADAVAQ_U
806  VMLADAVQ_P_S
807  VMLADAVQ_P_U
808  VMLADAVXQ_P_S
809  VMLAQ_N_S
810  VMLAQ_N_U
811  VMLASQ_N_S
812  VMLASQ_N_U
813  VMLSDAVQ_P_S
814  VMLSDAVXQ_P_S
815  VMVNQ_M_S
816  VMVNQ_M_U
817  VNEGQ_M_S
818  VPSELQ_S
819  VPSELQ_U
820  VQABSQ_M_S
821  VQDMLAHQ_N_S
822  VQDMLASHQ_N_S
823  VQNEGQ_M_S
824  VQRDMLADHQ_S
825  VQRDMLADHXQ_S
826  VQRDMLAHQ_N_S
827  VQRDMLASHQ_N_S
828  VQRDMLSDHQ_S
829  VQRDMLSDHXQ_S
830  VQRSHLQ_M_N_S
831  VQRSHLQ_M_N_U
832  VQSHLQ_M_R_S
833  VQSHLQ_M_R_U
834  VREV64Q_M_S
835  VREV64Q_M_U
836  VRSHLQ_M_N_S
837  VRSHLQ_M_N_U
838  VSHLQ_M_R_S
839  VSHLQ_M_R_U
840  VSLIQ_N_S
841  VSLIQ_N_U
842  VSRIQ_N_S
843  VSRIQ_N_U
844  VQDMLSDHXQ_S
845  VQDMLSDHQ_S
846  VQDMLADHXQ_S
847  VQDMLADHQ_S
848  VMLSDAVAXQ_S
849  VMLSDAVAQ_S
850  VMLADAVAXQ_S
851  VCMPGEQ_M_F
852  VCMPGTQ_M_N_F
853  VMLSLDAVQ_P_S
854  VRMLALDAVHAXQ_S
855  VMLSLDAVXQ_P_S
856  VFMAQ_F
857  VMLSLDAVAQ_S
858  VQSHRUNBQ_N_S
859  VQRSHRUNTQ_N_S
860  VMINNMAQ_M_F
861  VFMASQ_N_F
862  VDUPQ_M_N_F
863  VCMPGTQ_M_F
864  VCMPLTQ_M_F
865  VRMLSLDAVHQ_P_S
866  VQSHRUNTQ_N_S
867  VABSQ_M_F
868  VMAXNMAVQ_P_F
869  VFMAQ_N_F
870  VRMLSLDAVHXQ_P_S
871  VREV32Q_M_F
872  VRMLSLDAVHAQ_S
873  VRMLSLDAVHAXQ_S
874  VCMPLTQ_M_N_F
875  VCMPNEQ_M_F
876  VRNDAQ_M_F
877  VRNDPQ_M_F
878  VADDLVAQ_P_S
879  VQMOVUNBQ_M_S
880  VCMPLEQ_M_F
881  VMLSLDAVAXQ_S
882  VRNDXQ_M_F
883  VFMSQ_F
884  VMINNMVQ_P_F
885  VMAXNMVQ_P_F
886  VPSELQ_F
887  VQMOVUNTQ_M_S
888  VREV64Q_M_F
889  VNEGQ_M_F
890  VRNDMQ_M_F
891  VCMPLEQ_M_N_F
892  VCMPGEQ_M_N_F
893  VRNDNQ_M_F
894  VMINNMAVQ_P_F
895  VCMPNEQ_M_N_F
896  VRMLALDAVHQ_P_S
897  VRMLALDAVHXQ_P_S
898  VCMPEQQ_M_N_F
899  VMAXNMAQ_M_F
900  VRNDQ_M_F
901  VMLALDAVQ_P_U
902  VMLALDAVQ_P_S
903  VQMOVNBQ_M_S
904  VQMOVNBQ_M_U
905  VMOVLTQ_M_U
906  VMOVLTQ_M_S
907  VMOVNBQ_M_U
908  VMOVNBQ_M_S
909  VRSHRNTQ_N_U
910  VRSHRNTQ_N_S
911  VORRQ_M_N_S
912  VORRQ_M_N_U
913  VREV32Q_M_S
914  VREV32Q_M_U
915  VQRSHRNTQ_N_U
916  VQRSHRNTQ_N_S
917  VMOVNTQ_M_U
918  VMOVNTQ_M_S
919  VMOVLBQ_M_U
920  VMOVLBQ_M_S
921  VMLALDAVAQ_S
922  VMLALDAVAQ_U
923  VQSHRNBQ_N_U
924  VQSHRNBQ_N_S
925  VSHRNBQ_N_U
926  VSHRNBQ_N_S
927  VRSHRNBQ_N_S
928  VRSHRNBQ_N_U
929  VMLALDAVXQ_P_U
930  VMLALDAVXQ_P_S
931  VQMOVNTQ_M_U
932  VQMOVNTQ_M_S
933  VMVNQ_M_N_U
934  VMVNQ_M_N_S
935  VQSHRNTQ_N_U
936  VQSHRNTQ_N_S
937  VMLALDAVAXQ_S
938  VMLALDAVAXQ_U
939  VSHRNTQ_N_S
940  VSHRNTQ_N_U
941  VCVTBQ_M_F16_F32
942  VCVTBQ_M_F32_F16
943  VCVTTQ_M_F16_F32
944  VCVTTQ_M_F32_F16
945  VCVTMQ_M_S
946  VCVTMQ_M_U
947  VCVTNQ_M_S
948  VCVTPQ_M_S
949  VCVTPQ_M_U
950  VCVTQ_M_N_FROM_F_S
951  VCVTNQ_M_U
952  VREV16Q_M_S
953  VREV16Q_M_U
954  VREV32Q_M
955  VCVTQ_M_FROM_F_U
956  VCVTQ_M_FROM_F_S
957  VRMLALDAVHQ_P_U
958  VADDLVAQ_P_U
959  VCVTQ_M_N_FROM_F_U
960  VQSHLUQ_M_N_S
961  VABAVQ_P_S
962  VABAVQ_P_U
963  VSHLQ_M_S
964  VSHLQ_M_U
965  VSRIQ_M_N_S
966  VSRIQ_M_N_U
967  VSUBQ_M_U
968  VSUBQ_M_S
969  VCVTQ_M_N_TO_F_U
970  VCVTQ_M_N_TO_F_S
971  VQADDQ_M_U
972  VQADDQ_M_S
973  VRSHRQ_M_N_S
974  VSUBQ_M_N_S
975  VSUBQ_M_N_U
976  VBRSRQ_M_N_S
977  VSUBQ_M_N_F
978  VBICQ_M_F
979  VHADDQ_M_U
980  VBICQ_M_U
981  VBICQ_M_S
982  VMULQ_M_N_U
983  VHADDQ_M_S
984  VORNQ_M_F
985  VMLAQ_M_N_S
986  VQSUBQ_M_U
987  VQSUBQ_M_S
988  VMLAQ_M_N_U
989  VQSUBQ_M_N_U
990  VQSUBQ_M_N_S
991  VMULLTQ_INT_M_S
992  VMULLTQ_INT_M_U
993  VMULQ_M_N_S
994  VMULQ_M_N_F
995  VMLASQ_M_N_U
996  VMLASQ_M_N_S
997  VMAXQ_M_U
998  VQRDMLAHQ_M_N_U
999  VCADDQ_ROT270_M_F
1000  VCADDQ_ROT270_M_U
1001  VCADDQ_ROT270_M_S
1002  VQRSHLQ_M_S
1003  VMULQ_M_F
1004  VRHADDQ_M_U
1005  VSHRQ_M_N_U
1006  VRHADDQ_M_S
1007  VMULQ_M_S
1008  VMULQ_M_U
1009  VQDMLASHQ_M_N_S
1010  VQRDMLASHQ_M_N_S
1011  VRSHLQ_M_S
1012  VRSHLQ_M_U
1013  VRSHRQ_M_N_U
1014  VADDQ_M_N_F
1015  VADDQ_M_N_S
1016  VADDQ_M_N_U
1017  VQRDMLASHQ_M_N_U
1018  VMAXQ_M_S
1019  VQRDMLAHQ_M_N_S
1020  VORRQ_M_S
1021  VORRQ_M_U
1022  VORRQ_M_F
1023  VQRSHLQ_M_U
1024  VRMULHQ_M_U
1025  VRMULHQ_M_S
1026  VMINQ_M_S
1027  VMINQ_M_U
1028  VANDQ_M_F
1029  VANDQ_M_U
1030  VANDQ_M_S
1031  VHSUBQ_M_N_S
1032  VHSUBQ_M_N_U
1033  VMULHQ_M_S
1034  VMULHQ_M_U
1035  VMULLBQ_INT_M_U
1036  VMULLBQ_INT_M_S
1037  VCADDQ_ROT90_M_F
1038  VSHRQ_M_N_S
1039  VADDQ_M_U
1040  VSLIQ_M_N_U
1041  VQADDQ_M_N_S
1042  VBRSRQ_M_N_F
1043  VABDQ_M_F
1044  VBRSRQ_M_N_U
1045  VEORQ_M_F
1046  VSHLQ_M_N_S
1047  VQDMLAHQ_M_N_U
1048  VQDMLAHQ_M_N_S
1049  VSHLQ_M_N_U
1050  VMLADAVAQ_P_U
1051  VMLADAVAQ_P_S
1052  VSLIQ_M_N_S
1053  VQSHLQ_M_U
1054  VQSHLQ_M_S
1055  VCADDQ_ROT90_M_U
1056  VCADDQ_ROT90_M_S
1057  VORNQ_M_U
1058  VORNQ_M_S
1059  VQSHLQ_M_N_S
1060  VQSHLQ_M_N_U
1061  VADDQ_M_S
1062  VHADDQ_M_N_S
1063  VADDQ_M_F
1064  VQADDQ_M_N_U
1065  VEORQ_M_S
1066  VEORQ_M_U
1067  VHSUBQ_M_S
1068  VHSUBQ_M_U
1069  VHADDQ_M_N_U
1070  VHCADDQ_ROT90_M_S
1071  VQRDMLSDHQ_M_S
1072  VQRDMLSDHXQ_M_S
1073  VQRDMLADHXQ_M_S
1074  VQDMULHQ_M_S
1075  VMLADAVAXQ_P_S
1076  VQDMLADHXQ_M_S
1077  VQRDMULHQ_M_S
1078  VMLSDAVAXQ_P_S
1079  VQDMULHQ_M_N_S
1080  VHCADDQ_ROT270_M_S
1081  VQDMLSDHQ_M_S
1082  VQDMLSDHXQ_M_S
1083  VMLSDAVAQ_P_S
1084  VQRDMLADHQ_M_S
1085  VQDMLADHQ_M_S
1086  VMLALDAVAQ_P_U
1087  VMLALDAVAQ_P_S
1088  VQRSHRNBQ_M_N_U
1089  VQRSHRNBQ_M_N_S
1090  VQRSHRNTQ_M_N_S
1091  VQSHRNBQ_M_N_U
1092  VQSHRNBQ_M_N_S
1093  VQSHRNTQ_M_N_S
1094  VRSHRNBQ_M_N_U
1095  VRSHRNBQ_M_N_S
1096  VRSHRNTQ_M_N_U
1097  VSHLLBQ_M_N_U
1098  VSHLLBQ_M_N_S
1099  VSHLLTQ_M_N_U
1100  VSHLLTQ_M_N_S
1101  VSHRNBQ_M_N_S
1102  VSHRNBQ_M_N_U
1103  VSHRNTQ_M_N_S
1104  VSHRNTQ_M_N_U
1105  VMLALDAVAXQ_P_S
1106  VQRSHRNTQ_M_N_U
1107  VQSHRNTQ_M_N_U
1108  VRSHRNTQ_M_N_S
1109  VQRDMULHQ_M_N_S
1110  VRMLALDAVHAQ_P_S
1111  VMLSLDAVAQ_P_S
1112  VMLSLDAVAXQ_P_S
1113  VMULLBQ_POLY_M_P
1114  VMULLTQ_POLY_M_P
1115  VQDMULLBQ_M_N_S
1116  VQDMULLBQ_M_S
1117  VQDMULLTQ_M_N_S
1118  VQDMULLTQ_M_S
1119  VQRSHRUNBQ_M_N_S
1120  VQSHRUNBQ_M_N_S
1121  VQSHRUNTQ_M_N_S
1122  VRMLALDAVHAQ_P_U
1123  VRMLALDAVHAXQ_P_S
1124  VRMLSLDAVHAQ_P_S
1125  VRMLSLDAVHAXQ_P_S
1126  VQRSHRUNTQ_M_N_S
1127  VCMLAQ_M_F
1128  VCMLAQ_ROT180_M_F
1129  VCMLAQ_ROT270_M_F
1130  VCMLAQ_ROT90_M_F
1131  VCMULQ_M_F
1132  VCMULQ_ROT180_M_F
1133  VCMULQ_ROT270_M_F
1134  VCMULQ_ROT90_M_F
1135  VFMAQ_M_F
1136  VFMAQ_M_N_F
1137  VFMASQ_M_N_F
1138  VFMSQ_M_F
1139  VMAXNMQ_M_F
1140  VMINNMQ_M_F
1141  VSUBQ_M_F
1142  VSTRWQSB_S
1143  VSTRWQSB_U
1144  VSTRBQSO_S
1145  VSTRBQSO_U
1146  VSTRBQ_S
1147  VSTRBQ_U
1148  VLDRBQGO_S
1149  VLDRBQGO_U
1150  VLDRBQ_S
1151  VLDRBQ_U
1152  VLDRWQGB_S
1153  VLDRWQGB_U
1154  VLD1Q_F
1155  VLD1Q_S
1156  VLD1Q_U
1157  VLDRHQ_F
1158  VLDRHQGO_S
1159  VLDRHQGO_U
1160  VLDRHQGSO_S
1161  VLDRHQGSO_U
1162  VLDRHQ_S
1163  VLDRHQ_U
1164  VLDRWQ_F
1165  VLDRWQ_S
1166  VLDRWQ_U
1167  VLDRDQGB_S
1168  VLDRDQGB_U
1169  VLDRDQGO_S
1170  VLDRDQGO_U
1171  VLDRDQGSO_S
1172  VLDRDQGSO_U
1173  VLDRHQGO_F
1174  VLDRHQGSO_F
1175  VLDRWQGB_F
1176  VLDRWQGO_F
1177  VLDRWQGO_S
1178  VLDRWQGO_U
1179  VLDRWQGSO_F
1180  VLDRWQGSO_S
1181  VLDRWQGSO_U
1182  VSTRHQ_F
1183  VST1Q_S
1184  VST1Q_U
1185  VSTRHQSO_S
1186  VSTRHQ_U
1187  VSTRWQ_S
1188  VSTRWQ_U
1189  VSTRWQ_F
1190  VST1Q_F
1191  VSTRDQSB_S
1192  VSTRDQSB_U
1193  VSTRDQSO_S
1194  VSTRDQSO_U
1195  VSTRDQSSO_S
1196  VSTRDQSSO_U
1197  VSTRWQSO_S
1198  VSTRWQSO_U
1199  VSTRWQSSO_S
1200  VSTRWQSSO_U
1201  VSTRHQSO_F
1202  VSTRHQSSO_F
1203  VSTRWQSB_F
1204  VSTRWQSO_F
1205  VSTRWQSSO_F
1206  VDDUPQ
1207  VDDUPQ_M
1208  VDWDUPQ
1209  VDWDUPQ_M
1210  VIDUPQ
1211  VIDUPQ_M
1212  VIWDUPQ
1213  VIWDUPQ_M
1214  VSTRWQSBWB_S
1215  VSTRWQSBWB_U
1216  VLDRWQGBWB_S
1217  VLDRWQGBWB_U
1218  VSTRWQSBWB_F
1219  VLDRWQGBWB_F
1220  VSTRDQSBWB_S
1221  VSTRDQSBWB_U
1222  VLDRDQGBWB_S
1223  VLDRDQGBWB_U
1224  VADCQ_U
1225  VADCQ_M_U
1226  VADCQ_S
1227  VADCQ_M_S
1228  VSBCIQ_U
1229  VSBCIQ_S
1230  VSBCIQ_M_U
1231  VSBCIQ_M_S
1232  VSBCQ_U
1233  VSBCQ_S
1234  VSBCQ_M_U
1235  VSBCQ_M_S
1236  VADCIQ_U
1237  VADCIQ_M_U
1238  VADCIQ_S
1239  VADCIQ_M_S
1240  VLD2Q
1241  VLD4Q
1242  VST2Q
1243  VSHLCQ_M_U
1244  VSHLCQ_M_S
1245  VSTRHQSO_U
1246  VSTRHQSSO_S
1247  VSTRHQSSO_U
1248  VSTRHQ_S
1249  SRSHRL
1250  SRSHR
1251  URSHR
1252  URSHRL
1253  SQRSHR
1254  UQRSHL
1255  UQRSHLL_64
1256  UQRSHLL_48
1257  SQRSHRL_64
1258  SQRSHRL_48
1259  VSHLCQ_M_
1260])
1261