Lines Matching refs:RISCV

62 using namespace RISCV;
70 : RISCVGenInstrInfo(RISCV::ADJCALLSTACKDOWN, RISCV::ADJCALLSTACKUP), in RISCVInstrInfo()
75 return MCInstBuilder(RISCV::C_NOP); in getNop()
76 return MCInstBuilder(RISCV::ADDI) in getNop()
77 .addReg(RISCV::X0) in getNop()
78 .addReg(RISCV::X0) in getNop()
94 case RISCV::LB: in isLoadFromStackSlot()
95 case RISCV::LBU: in isLoadFromStackSlot()
98 case RISCV::LH: in isLoadFromStackSlot()
99 case RISCV::LHU: in isLoadFromStackSlot()
100 case RISCV::FLH: in isLoadFromStackSlot()
103 case RISCV::LW: in isLoadFromStackSlot()
104 case RISCV::FLW: in isLoadFromStackSlot()
105 case RISCV::LWU: in isLoadFromStackSlot()
108 case RISCV::LD: in isLoadFromStackSlot()
109 case RISCV::FLD: in isLoadFromStackSlot()
135 case RISCV::SB: in isStoreToStackSlot()
138 case RISCV::SH: in isStoreToStackSlot()
139 case RISCV::FSH: in isStoreToStackSlot()
142 case RISCV::SW: in isStoreToStackSlot()
143 case RISCV::FSW: in isStoreToStackSlot()
146 case RISCV::SD: in isStoreToStackSlot()
147 case RISCV::FSD: in isStoreToStackSlot()
187 if (MBBI->getOpcode() == RISCV::PseudoVSETVLI || in isConvertibleToVMV_V_V()
188 MBBI->getOpcode() == RISCV::PseudoVSETVLIX0 || in isConvertibleToVMV_V_V()
189 MBBI->getOpcode() == RISCV::PseudoVSETIVLI) { in isConvertibleToVMV_V_V()
209 if (MBBI->getOperand(0).getReg() != RISCV::X0) in isConvertibleToVMV_V_V()
213 if (MBBI->getOperand(1).getReg() != RISCV::X0) in isConvertibleToVMV_V_V()
242 if (MBBI->modifiesRegister(RISCV::VL)) in isConvertibleToVMV_V_V()
311 case RISCV::VMV1R_V: in copyPhysRegVector()
313 SubRegIdx = RISCV::sub_vrm1_0; in copyPhysRegVector()
314 VVOpc = RISCV::PseudoVMV_V_V_M1; in copyPhysRegVector()
315 VIOpc = RISCV::PseudoVMV_V_I_M1; in copyPhysRegVector()
317 case RISCV::VMV2R_V: in copyPhysRegVector()
319 SubRegIdx = RISCV::sub_vrm2_0; in copyPhysRegVector()
320 VVOpc = RISCV::PseudoVMV_V_V_M2; in copyPhysRegVector()
321 VIOpc = RISCV::PseudoVMV_V_I_M2; in copyPhysRegVector()
323 case RISCV::VMV4R_V: in copyPhysRegVector()
325 SubRegIdx = RISCV::sub_vrm4_0; in copyPhysRegVector()
326 VVOpc = RISCV::PseudoVMV_V_V_M4; in copyPhysRegVector()
327 VIOpc = RISCV::PseudoVMV_V_I_M4; in copyPhysRegVector()
329 case RISCV::VMV8R_V: in copyPhysRegVector()
332 SubRegIdx = RISCV::sub_vrm1_0; // There is no sub_vrm8_0. in copyPhysRegVector()
333 VVOpc = RISCV::PseudoVMV_V_V_M8; in copyPhysRegVector()
334 VIOpc = RISCV::PseudoVMV_V_I_M8; in copyPhysRegVector()
364 MIB.addReg(RISCV::VL, RegState::Implicit); in copyPhysRegVector()
365 MIB.addReg(RISCV::VTYPE, RegState::Implicit); in copyPhysRegVector()
398 MIB.addReg(RISCV::VL, RegState::Implicit); in copyPhysRegVector()
399 MIB.addReg(RISCV::VTYPE, RegState::Implicit); in copyPhysRegVector()
410 if (RISCV::GPRRegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
411 BuildMI(MBB, MBBI, DL, get(RISCV::ADDI), DstReg) in copyPhysReg()
417 if (RISCV::GPRPairRegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
419 BuildMI(MBB, MBBI, DL, get(RISCV::ADDI), in copyPhysReg()
420 TRI->getSubReg(DstReg, RISCV::sub_gpr_even)) in copyPhysReg()
421 .addReg(TRI->getSubReg(SrcReg, RISCV::sub_gpr_even), in copyPhysReg()
424 BuildMI(MBB, MBBI, DL, get(RISCV::ADDI), in copyPhysReg()
425 TRI->getSubReg(DstReg, RISCV::sub_gpr_odd)) in copyPhysReg()
426 .addReg(TRI->getSubReg(SrcReg, RISCV::sub_gpr_odd), in copyPhysReg()
433 if (RISCV::VCSRRegClass.contains(SrcReg) && in copyPhysReg()
434 RISCV::GPRRegClass.contains(DstReg)) { in copyPhysReg()
435 BuildMI(MBB, MBBI, DL, get(RISCV::CSRRS), DstReg) in copyPhysReg()
437 .addReg(RISCV::X0); in copyPhysReg()
441 if (RISCV::FPR16RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
444 Opc = RISCV::FSGNJ_H; in copyPhysReg()
450 DstReg = TRI->getMatchingSuperReg(DstReg, RISCV::sub_16, in copyPhysReg()
451 &RISCV::FPR32RegClass); in copyPhysReg()
452 SrcReg = TRI->getMatchingSuperReg(SrcReg, RISCV::sub_16, in copyPhysReg()
453 &RISCV::FPR32RegClass); in copyPhysReg()
454 Opc = RISCV::FSGNJ_S; in copyPhysReg()
462 if (RISCV::FPR32RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
463 BuildMI(MBB, MBBI, DL, get(RISCV::FSGNJ_S), DstReg) in copyPhysReg()
469 if (RISCV::FPR64RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
470 BuildMI(MBB, MBBI, DL, get(RISCV::FSGNJ_D), DstReg) in copyPhysReg()
476 if (RISCV::FPR32RegClass.contains(DstReg) && in copyPhysReg()
477 RISCV::GPRRegClass.contains(SrcReg)) { in copyPhysReg()
478 BuildMI(MBB, MBBI, DL, get(RISCV::FMV_W_X), DstReg) in copyPhysReg()
483 if (RISCV::GPRRegClass.contains(DstReg) && in copyPhysReg()
484 RISCV::FPR32RegClass.contains(SrcReg)) { in copyPhysReg()
485 BuildMI(MBB, MBBI, DL, get(RISCV::FMV_X_W), DstReg) in copyPhysReg()
490 if (RISCV::FPR64RegClass.contains(DstReg) && in copyPhysReg()
491 RISCV::GPRRegClass.contains(SrcReg)) { in copyPhysReg()
493 BuildMI(MBB, MBBI, DL, get(RISCV::FMV_D_X), DstReg) in copyPhysReg()
498 if (RISCV::GPRRegClass.contains(DstReg) && in copyPhysReg()
499 RISCV::FPR64RegClass.contains(SrcReg)) { in copyPhysReg()
501 BuildMI(MBB, MBBI, DL, get(RISCV::FMV_X_D), DstReg) in copyPhysReg()
507 if (RISCV::VRRegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
508 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV1R_V); in copyPhysReg()
512 if (RISCV::VRM2RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
513 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV2R_V); in copyPhysReg()
517 if (RISCV::VRM4RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
518 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV4R_V); in copyPhysReg()
522 if (RISCV::VRM8RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
523 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV8R_V); in copyPhysReg()
527 if (RISCV::VRN2M1RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
528 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV1R_V, in copyPhysReg()
533 if (RISCV::VRN2M2RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
534 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV2R_V, in copyPhysReg()
539 if (RISCV::VRN2M4RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
540 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV4R_V, in copyPhysReg()
545 if (RISCV::VRN3M1RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
546 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV1R_V, in copyPhysReg()
551 if (RISCV::VRN3M2RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
552 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV2R_V, in copyPhysReg()
557 if (RISCV::VRN4M1RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
558 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV1R_V, in copyPhysReg()
563 if (RISCV::VRN4M2RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
564 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV2R_V, in copyPhysReg()
569 if (RISCV::VRN5M1RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
570 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV1R_V, in copyPhysReg()
575 if (RISCV::VRN6M1RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
576 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV1R_V, in copyPhysReg()
581 if (RISCV::VRN7M1RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
582 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV1R_V, in copyPhysReg()
587 if (RISCV::VRN8M1RegClass.contains(DstReg, SrcReg)) { in copyPhysReg()
588 copyPhysRegVector(MBB, MBBI, DL, DstReg, SrcReg, KillSrc, RISCV::VMV1R_V, in copyPhysReg()
607 if (RISCV::GPRRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
608 Opcode = TRI->getRegSizeInBits(RISCV::GPRRegClass) == 32 ? in storeRegToStackSlot()
609 RISCV::SW : RISCV::SD; in storeRegToStackSlot()
611 } else if (RISCV::GPRPairRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
612 Opcode = RISCV::PseudoRV32ZdinxSD; in storeRegToStackSlot()
614 } else if (RISCV::FPR16RegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
615 Opcode = RISCV::FSH; in storeRegToStackSlot()
617 } else if (RISCV::FPR32RegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
618 Opcode = RISCV::FSW; in storeRegToStackSlot()
620 } else if (RISCV::FPR64RegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
621 Opcode = RISCV::FSD; in storeRegToStackSlot()
623 } else if (RISCV::VRRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
624 Opcode = RISCV::VS1R_V; in storeRegToStackSlot()
625 } else if (RISCV::VRM2RegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
626 Opcode = RISCV::VS2R_V; in storeRegToStackSlot()
627 } else if (RISCV::VRM4RegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
628 Opcode = RISCV::VS4R_V; in storeRegToStackSlot()
629 } else if (RISCV::VRM8RegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
630 Opcode = RISCV::VS8R_V; in storeRegToStackSlot()
631 } else if (RISCV::VRN2M1RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
632 Opcode = RISCV::PseudoVSPILL2_M1; in storeRegToStackSlot()
633 else if (RISCV::VRN2M2RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
634 Opcode = RISCV::PseudoVSPILL2_M2; in storeRegToStackSlot()
635 else if (RISCV::VRN2M4RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
636 Opcode = RISCV::PseudoVSPILL2_M4; in storeRegToStackSlot()
637 else if (RISCV::VRN3M1RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
638 Opcode = RISCV::PseudoVSPILL3_M1; in storeRegToStackSlot()
639 else if (RISCV::VRN3M2RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
640 Opcode = RISCV::PseudoVSPILL3_M2; in storeRegToStackSlot()
641 else if (RISCV::VRN4M1RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
642 Opcode = RISCV::PseudoVSPILL4_M1; in storeRegToStackSlot()
643 else if (RISCV::VRN4M2RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
644 Opcode = RISCV::PseudoVSPILL4_M2; in storeRegToStackSlot()
645 else if (RISCV::VRN5M1RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
646 Opcode = RISCV::PseudoVSPILL5_M1; in storeRegToStackSlot()
647 else if (RISCV::VRN6M1RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
648 Opcode = RISCV::PseudoVSPILL6_M1; in storeRegToStackSlot()
649 else if (RISCV::VRN7M1RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
650 Opcode = RISCV::PseudoVSPILL7_M1; in storeRegToStackSlot()
651 else if (RISCV::VRN8M1RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
652 Opcode = RISCV::PseudoVSPILL8_M1; in storeRegToStackSlot()
690 if (RISCV::GPRRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
691 Opcode = TRI->getRegSizeInBits(RISCV::GPRRegClass) == 32 ? in loadRegFromStackSlot()
692 RISCV::LW : RISCV::LD; in loadRegFromStackSlot()
694 } else if (RISCV::GPRPairRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
695 Opcode = RISCV::PseudoRV32ZdinxLD; in loadRegFromStackSlot()
697 } else if (RISCV::FPR16RegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
698 Opcode = RISCV::FLH; in loadRegFromStackSlot()
700 } else if (RISCV::FPR32RegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
701 Opcode = RISCV::FLW; in loadRegFromStackSlot()
703 } else if (RISCV::FPR64RegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
704 Opcode = RISCV::FLD; in loadRegFromStackSlot()
706 } else if (RISCV::VRRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
707 Opcode = RISCV::VL1RE8_V; in loadRegFromStackSlot()
708 } else if (RISCV::VRM2RegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
709 Opcode = RISCV::VL2RE8_V; in loadRegFromStackSlot()
710 } else if (RISCV::VRM4RegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
711 Opcode = RISCV::VL4RE8_V; in loadRegFromStackSlot()
712 } else if (RISCV::VRM8RegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
713 Opcode = RISCV::VL8RE8_V; in loadRegFromStackSlot()
714 } else if (RISCV::VRN2M1RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
715 Opcode = RISCV::PseudoVRELOAD2_M1; in loadRegFromStackSlot()
716 else if (RISCV::VRN2M2RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
717 Opcode = RISCV::PseudoVRELOAD2_M2; in loadRegFromStackSlot()
718 else if (RISCV::VRN2M4RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
719 Opcode = RISCV::PseudoVRELOAD2_M4; in loadRegFromStackSlot()
720 else if (RISCV::VRN3M1RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
721 Opcode = RISCV::PseudoVRELOAD3_M1; in loadRegFromStackSlot()
722 else if (RISCV::VRN3M2RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
723 Opcode = RISCV::PseudoVRELOAD3_M2; in loadRegFromStackSlot()
724 else if (RISCV::VRN4M1RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
725 Opcode = RISCV::PseudoVRELOAD4_M1; in loadRegFromStackSlot()
726 else if (RISCV::VRN4M2RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
727 Opcode = RISCV::PseudoVRELOAD4_M2; in loadRegFromStackSlot()
728 else if (RISCV::VRN5M1RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
729 Opcode = RISCV::PseudoVRELOAD5_M1; in loadRegFromStackSlot()
730 else if (RISCV::VRN6M1RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
731 Opcode = RISCV::PseudoVRELOAD6_M1; in loadRegFromStackSlot()
732 else if (RISCV::VRN7M1RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
733 Opcode = RISCV::PseudoVRELOAD7_M1; in loadRegFromStackSlot()
734 else if (RISCV::VRN8M1RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
735 Opcode = RISCV::PseudoVRELOAD8_M1; in loadRegFromStackSlot()
779 if (RISCV::isSEXT_W(MI)) { in foldMemoryOperandImpl()
780 LoadOpc = RISCV::LW; in foldMemoryOperandImpl()
783 if (RISCV::isZEXT_W(MI)) { in foldMemoryOperandImpl()
784 LoadOpc = RISCV::LWU; in foldMemoryOperandImpl()
787 if (RISCV::isZEXT_B(MI)) { in foldMemoryOperandImpl()
788 LoadOpc = RISCV::LBU; in foldMemoryOperandImpl()
792 case RISCV::SEXT_H: in foldMemoryOperandImpl()
793 LoadOpc = RISCV::LH; in foldMemoryOperandImpl()
795 case RISCV::SEXT_B: in foldMemoryOperandImpl()
796 LoadOpc = RISCV::LB; in foldMemoryOperandImpl()
798 case RISCV::ZEXT_H_RV32: in foldMemoryOperandImpl()
799 case RISCV::ZEXT_H_RV64: in foldMemoryOperandImpl()
800 LoadOpc = RISCV::LHU; in foldMemoryOperandImpl()
822 Register SrcReg = RISCV::X0; in movImm()
837 unsigned SrcRegState = getKillRegState(SrcReg != RISCV::X0) | in movImm()
850 .addReg(RISCV::X0) in movImm()
879 case RISCV::BEQ: in getCondFromBranchOpc()
881 case RISCV::BNE: in getCondFromBranchOpc()
883 case RISCV::BLT: in getCondFromBranchOpc()
885 case RISCV::BGE: in getCondFromBranchOpc()
887 case RISCV::BLTU: in getCondFromBranchOpc()
889 case RISCV::BGEU: in getCondFromBranchOpc()
914 return RISCV::BEQ; in getBrCond()
916 return RISCV::BNE; in getBrCond()
918 return RISCV::BLT; in getBrCond()
920 return RISCV::BGE; in getBrCond()
922 return RISCV::BLTU; in getBrCond()
924 return RISCV::BGEU; in getBrCond()
1070 MachineInstr &MI = *BuildMI(&MBB, DL, get(RISCV::PseudoBR)).addMBB(TBB); in insertBranch()
1088 MachineInstr &MI = *BuildMI(&MBB, DL, get(RISCV::PseudoBR)).addMBB(FBB); in insertBranch()
1118 Register ScratchReg = MRI.createVirtualRegister(&RISCV::GPRRegClass); in insertIndirectBranch()
1121 MachineInstr &MI = *BuildMI(MBB, II, DL, get(RISCV::PseudoJump)) in insertIndirectBranch()
1127 RS->scavengeRegisterBackwards(RISCV::GPRRegClass, MI.getIterator(), in insertIndirectBranch()
1130 if (TmpGPR != RISCV::NoRegister) in insertIndirectBranch()
1136 TmpGPR = RISCV::X27; in insertIndirectBranch()
1143 &RISCV::GPRRegClass, TRI, Register()); in insertIndirectBranch()
1150 &RISCV::GPRRegClass, TRI, Register()); in insertIndirectBranch()
1203 if (MI->getOpcode() == RISCV::ADDI && MI->getOperand(1).isReg() && in optimizeCondBranch()
1204 MI->getOperand(1).getReg() == RISCV::X0) { in optimizeCondBranch()
1215 if (Reg == RISCV::X0) { in optimizeCondBranch()
1304 case RISCV::BEQ: in isBranchOffsetInRange()
1305 case RISCV::BNE: in isBranchOffsetInRange()
1306 case RISCV::BLT: in isBranchOffsetInRange()
1307 case RISCV::BGE: in isBranchOffsetInRange()
1308 case RISCV::BLTU: in isBranchOffsetInRange()
1309 case RISCV::BGEU: in isBranchOffsetInRange()
1311 case RISCV::JAL: in isBranchOffsetInRange()
1312 case RISCV::PseudoBR: in isBranchOffsetInRange()
1314 case RISCV::PseudoJump: in isBranchOffsetInRange()
1324 case RISCV::ADD: return RISCV::PseudoCCADD; break; in getPredicatedOpcode()
1325 case RISCV::SUB: return RISCV::PseudoCCSUB; break; in getPredicatedOpcode()
1326 case RISCV::SLL: return RISCV::PseudoCCSLL; break; in getPredicatedOpcode()
1327 case RISCV::SRL: return RISCV::PseudoCCSRL; break; in getPredicatedOpcode()
1328 case RISCV::SRA: return RISCV::PseudoCCSRA; break; in getPredicatedOpcode()
1329 case RISCV::AND: return RISCV::PseudoCCAND; break; in getPredicatedOpcode()
1330 case RISCV::OR: return RISCV::PseudoCCOR; break; in getPredicatedOpcode()
1331 case RISCV::XOR: return RISCV::PseudoCCXOR; break; in getPredicatedOpcode()
1333 case RISCV::ADDI: return RISCV::PseudoCCADDI; break; in getPredicatedOpcode()
1334 case RISCV::SLLI: return RISCV::PseudoCCSLLI; break; in getPredicatedOpcode()
1335 case RISCV::SRLI: return RISCV::PseudoCCSRLI; break; in getPredicatedOpcode()
1336 case RISCV::SRAI: return RISCV::PseudoCCSRAI; break; in getPredicatedOpcode()
1337 case RISCV::ANDI: return RISCV::PseudoCCANDI; break; in getPredicatedOpcode()
1338 case RISCV::ORI: return RISCV::PseudoCCORI; break; in getPredicatedOpcode()
1339 case RISCV::XORI: return RISCV::PseudoCCXORI; break; in getPredicatedOpcode()
1341 case RISCV::ADDW: return RISCV::PseudoCCADDW; break; in getPredicatedOpcode()
1342 case RISCV::SUBW: return RISCV::PseudoCCSUBW; break; in getPredicatedOpcode()
1343 case RISCV::SLLW: return RISCV::PseudoCCSLLW; break; in getPredicatedOpcode()
1344 case RISCV::SRLW: return RISCV::PseudoCCSRLW; break; in getPredicatedOpcode()
1345 case RISCV::SRAW: return RISCV::PseudoCCSRAW; break; in getPredicatedOpcode()
1347 case RISCV::ADDIW: return RISCV::PseudoCCADDIW; break; in getPredicatedOpcode()
1348 case RISCV::SLLIW: return RISCV::PseudoCCSLLIW; break; in getPredicatedOpcode()
1349 case RISCV::SRLIW: return RISCV::PseudoCCSRLIW; break; in getPredicatedOpcode()
1350 case RISCV::SRAIW: return RISCV::PseudoCCSRAIW; break; in getPredicatedOpcode()
1352 case RISCV::ANDN: return RISCV::PseudoCCANDN; break; in getPredicatedOpcode()
1353 case RISCV::ORN: return RISCV::PseudoCCORN; break; in getPredicatedOpcode()
1354 case RISCV::XNOR: return RISCV::PseudoCCXNOR; break; in getPredicatedOpcode()
1357 return RISCV::INSTRUCTION_LIST_END; in getPredicatedOpcode()
1373 if (getPredicatedOpcode(MI->getOpcode()) == RISCV::INSTRUCTION_LIST_END) in canFoldAsPredicatedOp()
1376 if (MI->getOpcode() == RISCV::ADDI && MI->getOperand(1).isReg() && in canFoldAsPredicatedOp()
1377 MI->getOperand(1).getReg() == RISCV::X0) in canFoldAsPredicatedOp()
1405 assert(MI.getOpcode() == RISCV::PseudoCCMOVGPR && in analyzeSelect()
1428 assert(MI.getOpcode() == RISCV::PseudoCCMOVGPR && in optimizeSelect()
1450 assert(PredOpc != RISCV::INSTRUCTION_LIST_END && "Unexpected opcode!"); in optimizeSelect()
1557 case RISCV::FSGNJ_D: in isAsCheapAsAMove()
1558 case RISCV::FSGNJ_S: in isAsCheapAsAMove()
1559 case RISCV::FSGNJ_H: in isAsCheapAsAMove()
1560 case RISCV::FSGNJ_D_INX: in isAsCheapAsAMove()
1561 case RISCV::FSGNJ_D_IN32X: in isAsCheapAsAMove()
1562 case RISCV::FSGNJ_S_INX: in isAsCheapAsAMove()
1563 case RISCV::FSGNJ_H_INX: in isAsCheapAsAMove()
1567 case RISCV::ADDI: in isAsCheapAsAMove()
1568 case RISCV::ORI: in isAsCheapAsAMove()
1569 case RISCV::XORI: in isAsCheapAsAMove()
1571 MI.getOperand(1).getReg() == RISCV::X0) || in isAsCheapAsAMove()
1584 case RISCV::ADDI: in isCopyInstrImpl()
1590 case RISCV::FSGNJ_D: in isCopyInstrImpl()
1591 case RISCV::FSGNJ_S: in isCopyInstrImpl()
1592 case RISCV::FSGNJ_H: in isCopyInstrImpl()
1593 case RISCV::FSGNJ_D_INX: in isCopyInstrImpl()
1594 case RISCV::FSGNJ_D_IN32X: in isCopyInstrImpl()
1595 case RISCV::FSGNJ_S_INX: in isCopyInstrImpl()
1596 case RISCV::FSGNJ_H_INX: in isCopyInstrImpl()
1624 RISCV::getNamedOperandIdx(Root.getOpcode(), RISCV::OpName::frm); in finalizeInsInstrs()
1628 return RISCV::getNamedOperandIdx(MI->getOpcode(), in finalizeInsInstrs()
1629 RISCV::OpName::frm) < 0; in finalizeInsInstrs()
1639 assert(static_cast<unsigned>(RISCV::getNamedOperandIdx( in finalizeInsInstrs()
1640 NewMI->getOpcode(), RISCV::OpName::frm)) == in finalizeInsInstrs()
1646 MIB.addUse(RISCV::FRM, RegState::Implicit); in finalizeInsInstrs()
1654 case RISCV::FADD_H: in isFADD()
1655 case RISCV::FADD_S: in isFADD()
1656 case RISCV::FADD_D: in isFADD()
1665 case RISCV::FSUB_H: in isFSUB()
1666 case RISCV::FSUB_S: in isFSUB()
1667 case RISCV::FSUB_D: in isFSUB()
1676 case RISCV::FMUL_H: in isFMUL()
1677 case RISCV::FMUL_S: in isFMUL()
1678 case RISCV::FMUL_D: in isFMUL()
1694 RISCV::getNamedOperandIdx(Inst.getOpcode(), RISCV::OpName::frm); in hasReassociableSibling()
1696 RISCV::getNamedOperandIdx(Sibling.getOpcode(), RISCV::OpName::frm); in hasReassociableSibling()
1699 RISCV::hasEqualFRM(Inst, Sibling); in hasReassociableSibling()
1719 case RISCV::ADD: in isAssociativeAndCommutative()
1720 case RISCV::ADDW: in isAssociativeAndCommutative()
1721 case RISCV::AND: in isAssociativeAndCommutative()
1722 case RISCV::OR: in isAssociativeAndCommutative()
1723 case RISCV::XOR: in isAssociativeAndCommutative()
1736 case RISCV::MUL: in isAssociativeAndCommutative()
1737 case RISCV::MULW: in isAssociativeAndCommutative()
1738 case RISCV::MIN: in isAssociativeAndCommutative()
1739 case RISCV::MINU: in isAssociativeAndCommutative()
1740 case RISCV::MAX: in isAssociativeAndCommutative()
1741 case RISCV::MAXU: in isAssociativeAndCommutative()
1742 case RISCV::FMIN_H: in isAssociativeAndCommutative()
1743 case RISCV::FMIN_S: in isAssociativeAndCommutative()
1744 case RISCV::FMIN_D: in isAssociativeAndCommutative()
1745 case RISCV::FMAX_H: in isAssociativeAndCommutative()
1746 case RISCV::FMAX_S: in isAssociativeAndCommutative()
1747 case RISCV::FMAX_D: in isAssociativeAndCommutative()
1759 case RISCV::FADD_H: in getInverseOpcode()
1760 return RISCV::FSUB_H; in getInverseOpcode()
1761 case RISCV::FADD_S: in getInverseOpcode()
1762 return RISCV::FSUB_S; in getInverseOpcode()
1763 case RISCV::FADD_D: in getInverseOpcode()
1764 return RISCV::FSUB_D; in getInverseOpcode()
1765 case RISCV::FSUB_H: in getInverseOpcode()
1766 return RISCV::FADD_H; in getInverseOpcode()
1767 case RISCV::FSUB_S: in getInverseOpcode()
1768 return RISCV::FADD_S; in getInverseOpcode()
1769 case RISCV::FSUB_D: in getInverseOpcode()
1770 return RISCV::FADD_D; in getInverseOpcode()
1771 case RISCV::ADD: in getInverseOpcode()
1772 return RISCV::SUB; in getInverseOpcode()
1773 case RISCV::SUB: in getInverseOpcode()
1774 return RISCV::ADD; in getInverseOpcode()
1775 case RISCV::ADDW: in getInverseOpcode()
1776 return RISCV::SUBW; in getInverseOpcode()
1777 case RISCV::SUBW: in getInverseOpcode()
1778 return RISCV::ADDW; in getInverseOpcode()
1806 return RISCV::hasEqualFRM(Root, *MI); in canCombineFPFusedMultiply()
1855 case RISCV::FADD_H: in getFPFusedMultiplyOpcode()
1856 return RISCV::FMADD_H; in getFPFusedMultiplyOpcode()
1857 case RISCV::FADD_S: in getFPFusedMultiplyOpcode()
1858 return RISCV::FMADD_S; in getFPFusedMultiplyOpcode()
1859 case RISCV::FADD_D: in getFPFusedMultiplyOpcode()
1860 return RISCV::FMADD_D; in getFPFusedMultiplyOpcode()
1861 case RISCV::FSUB_H: in getFPFusedMultiplyOpcode()
1862 return Pattern == MachineCombinerPattern::FMSUB ? RISCV::FMSUB_H in getFPFusedMultiplyOpcode()
1863 : RISCV::FNMSUB_H; in getFPFusedMultiplyOpcode()
1864 case RISCV::FSUB_S: in getFPFusedMultiplyOpcode()
1865 return Pattern == MachineCombinerPattern::FMSUB ? RISCV::FMSUB_S in getFPFusedMultiplyOpcode()
1866 : RISCV::FNMSUB_S; in getFPFusedMultiplyOpcode()
1867 case RISCV::FSUB_D: in getFPFusedMultiplyOpcode()
1868 return Pattern == MachineCombinerPattern::FMSUB ? RISCV::FMSUB_D in getFPFusedMultiplyOpcode()
1869 : RISCV::FNMSUB_D; in getFPFusedMultiplyOpcode()
2076 if (Op.isReg() && Op.getReg() != RISCV::NoRegister) { in verifyInstruction()
2079 if (!RISCV::GPRRegClass.hasSubClassEq(RC)) { in verifyInstruction()
2141 case RISCV::LB: in canFoldIntoAddrMode()
2142 case RISCV::LBU: in canFoldIntoAddrMode()
2143 case RISCV::LH: in canFoldIntoAddrMode()
2144 case RISCV::LHU: in canFoldIntoAddrMode()
2145 case RISCV::LW: in canFoldIntoAddrMode()
2146 case RISCV::LWU: in canFoldIntoAddrMode()
2147 case RISCV::LD: in canFoldIntoAddrMode()
2148 case RISCV::FLH: in canFoldIntoAddrMode()
2149 case RISCV::FLW: in canFoldIntoAddrMode()
2150 case RISCV::FLD: in canFoldIntoAddrMode()
2151 case RISCV::SB: in canFoldIntoAddrMode()
2152 case RISCV::SH: in canFoldIntoAddrMode()
2153 case RISCV::SW: in canFoldIntoAddrMode()
2154 case RISCV::SD: in canFoldIntoAddrMode()
2155 case RISCV::FSH: in canFoldIntoAddrMode()
2156 case RISCV::FSW: in canFoldIntoAddrMode()
2157 case RISCV::FSD: in canFoldIntoAddrMode()
2164 if (AddrI.getOpcode() != RISCV::ADDI || !AddrI.getOperand(1).isReg() || in canFoldIntoAddrMode()
2212 case RISCV::LB: in getMemOperandsWithOffsetWidth()
2213 case RISCV::LBU: in getMemOperandsWithOffsetWidth()
2214 case RISCV::SB: in getMemOperandsWithOffsetWidth()
2215 case RISCV::LH: in getMemOperandsWithOffsetWidth()
2216 case RISCV::LHU: in getMemOperandsWithOffsetWidth()
2217 case RISCV::FLH: in getMemOperandsWithOffsetWidth()
2218 case RISCV::SH: in getMemOperandsWithOffsetWidth()
2219 case RISCV::FSH: in getMemOperandsWithOffsetWidth()
2220 case RISCV::LW: in getMemOperandsWithOffsetWidth()
2221 case RISCV::LWU: in getMemOperandsWithOffsetWidth()
2222 case RISCV::FLW: in getMemOperandsWithOffsetWidth()
2223 case RISCV::SW: in getMemOperandsWithOffsetWidth()
2224 case RISCV::FSW: in getMemOperandsWithOffsetWidth()
2225 case RISCV::LD: in getMemOperandsWithOffsetWidth()
2226 case RISCV::FLD: in getMemOperandsWithOffsetWidth()
2227 case RISCV::SD: in getMemOperandsWithOffsetWidth()
2228 case RISCV::FSD: in getMemOperandsWithOffsetWidth()
2431 return !C.isAvailableAcrossAndOutOfSeq(RISCV::X5, *TRI); in getOutliningCandidateInfo()
2486 if (MI.modifiesRegister(RISCV::X5, TRI) || in getOutliningTypeImpl()
2487 MI.getDesc().hasImplicitDefOfPhysReg(RISCV::X5)) in getOutliningTypeImpl()
2523 MBB.addLiveIn(RISCV::X5); in buildOutlinedFrame()
2526 MBB.insert(MBB.end(), BuildMI(MF, DebugLoc(), get(RISCV::JALR)) in buildOutlinedFrame()
2527 .addReg(RISCV::X0, RegState::Define) in buildOutlinedFrame()
2528 .addReg(RISCV::X5) in buildOutlinedFrame()
2538 BuildMI(MF, DebugLoc(), get(RISCV::PseudoCALLReg), RISCV::X5) in insertOutlinedCall()
2554 if (MI.getOpcode() == RISCV::ADDI && MI.getOperand(1).isReg() && in isAddImmediate()
2582 if ((MI.getOpcode() == RISCV::VSETVLI || MI.getOpcode() == RISCV::VSETIVLI || in createMIROperandComment()
2583 MI.getOpcode() == RISCV::PseudoVSETVLI || in createMIROperandComment()
2584 MI.getOpcode() == RISCV::PseudoVSETIVLI || in createMIROperandComment()
2585 MI.getOpcode() == RISCV::PseudoVSETVLIX0) && in createMIROperandComment()
2610 RISCV::PseudoV##OP##_##TYPE##_##LMUL
2644 case RISCV::TH_MVEQZ: in findCommutedOpIndices()
2645 case RISCV::TH_MVNEZ: in findCommutedOpIndices()
2649 if (MI.getOperand(2).getReg() == RISCV::X0) in findCommutedOpIndices()
2653 case RISCV::TH_MULA: in findCommutedOpIndices()
2654 case RISCV::TH_MULAW: in findCommutedOpIndices()
2655 case RISCV::TH_MULAH: in findCommutedOpIndices()
2656 case RISCV::TH_MULS: in findCommutedOpIndices()
2657 case RISCV::TH_MULSW: in findCommutedOpIndices()
2658 case RISCV::TH_MULSH: in findCommutedOpIndices()
2661 case RISCV::PseudoCCMOVGPRNoX0: in findCommutedOpIndices()
2662 case RISCV::PseudoCCMOVGPR: in findCommutedOpIndices()
2772 case RISCV::PseudoV##OLDOP##_##TYPE##_##LMUL: \
2773 Opc = RISCV::PseudoV##NEWOP##_##TYPE##_##LMUL; \
2810 case RISCV::TH_MVEQZ: in commuteInstructionImpl()
2811 case RISCV::TH_MVNEZ: { in commuteInstructionImpl()
2813 WorkingMI.setDesc(get(MI.getOpcode() == RISCV::TH_MVEQZ ? RISCV::TH_MVNEZ in commuteInstructionImpl()
2814 : RISCV::TH_MVEQZ)); in commuteInstructionImpl()
2818 case RISCV::PseudoCCMOVGPRNoX0: in commuteInstructionImpl()
2819 case RISCV::PseudoCCMOVGPR: { in commuteInstructionImpl()
2923 RISCV::PseudoV##OP##_##LMUL##_TIED
2938 case RISCV::PseudoV##OP##_##LMUL##_TIED: \
2939 NewOpc = RISCV::PseudoV##OP##_##LMUL; \
3071 BuildMI(MBB, II, DL, get(RISCV::PseudoReadVLENB), DestReg).setMIFlag(Flag); in getVLENFactoredAmount()
3078 BuildMI(MBB, II, DL, get(RISCV::SLLI), DestReg) in getVLENFactoredAmount()
3090 Opc = RISCV::SH3ADD; in getVLENFactoredAmount()
3093 Opc = RISCV::SH2ADD; in getVLENFactoredAmount()
3096 Opc = RISCV::SH1ADD; in getVLENFactoredAmount()
3102 BuildMI(MBB, II, DL, get(RISCV::SLLI), DestReg) in getVLENFactoredAmount()
3111 Register ScaledRegister = MRI.createVirtualRegister(&RISCV::GPRRegClass); in getVLENFactoredAmount()
3113 BuildMI(MBB, II, DL, get(RISCV::SLLI), ScaledRegister) in getVLENFactoredAmount()
3117 BuildMI(MBB, II, DL, get(RISCV::ADD), DestReg) in getVLENFactoredAmount()
3122 Register ScaledRegister = MRI.createVirtualRegister(&RISCV::GPRRegClass); in getVLENFactoredAmount()
3124 BuildMI(MBB, II, DL, get(RISCV::SLLI), ScaledRegister) in getVLENFactoredAmount()
3128 BuildMI(MBB, II, DL, get(RISCV::SUB), DestReg) in getVLENFactoredAmount()
3133 Register N = MRI.createVirtualRegister(&RISCV::GPRRegClass); in getVLENFactoredAmount()
3135 BuildMI(MBB, II, DL, get(RISCV::MUL), DestReg) in getVLENFactoredAmount()
3140 Register Acc = MRI.createVirtualRegister(&RISCV::GPRRegClass); in getVLENFactoredAmount()
3141 BuildMI(MBB, II, DL, get(RISCV::ADDI), Acc) in getVLENFactoredAmount()
3142 .addReg(RISCV::X0) in getVLENFactoredAmount()
3149 BuildMI(MBB, II, DL, get(RISCV::SLLI), DestReg) in getVLENFactoredAmount()
3154 BuildMI(MBB, II, DL, get(RISCV::ADD), Acc) in getVLENFactoredAmount()
3161 BuildMI(MBB, II, DL, get(RISCV::ADD), DestReg) in getVLENFactoredAmount()
3177 bool RISCV::isSEXT_W(const MachineInstr &MI) { in isSEXT_W()
3178 return MI.getOpcode() == RISCV::ADDIW && MI.getOperand(1).isReg() && in isSEXT_W()
3183 bool RISCV::isZEXT_W(const MachineInstr &MI) { in isZEXT_W()
3184 return MI.getOpcode() == RISCV::ADD_UW && MI.getOperand(1).isReg() && in isZEXT_W()
3185 MI.getOperand(2).isReg() && MI.getOperand(2).getReg() == RISCV::X0; in isZEXT_W()
3189 bool RISCV::isZEXT_B(const MachineInstr &MI) { in isZEXT_B()
3190 return MI.getOpcode() == RISCV::ANDI && MI.getOperand(1).isReg() && in isZEXT_B()
3198 case RISCV::VS1R_V: in isRVVWholeLoadStore()
3199 case RISCV::VS2R_V: in isRVVWholeLoadStore()
3200 case RISCV::VS4R_V: in isRVVWholeLoadStore()
3201 case RISCV::VS8R_V: in isRVVWholeLoadStore()
3202 case RISCV::VL1RE8_V: in isRVVWholeLoadStore()
3203 case RISCV::VL2RE8_V: in isRVVWholeLoadStore()
3204 case RISCV::VL4RE8_V: in isRVVWholeLoadStore()
3205 case RISCV::VL8RE8_V: in isRVVWholeLoadStore()
3206 case RISCV::VL1RE16_V: in isRVVWholeLoadStore()
3207 case RISCV::VL2RE16_V: in isRVVWholeLoadStore()
3208 case RISCV::VL4RE16_V: in isRVVWholeLoadStore()
3209 case RISCV::VL8RE16_V: in isRVVWholeLoadStore()
3210 case RISCV::VL1RE32_V: in isRVVWholeLoadStore()
3211 case RISCV::VL2RE32_V: in isRVVWholeLoadStore()
3212 case RISCV::VL4RE32_V: in isRVVWholeLoadStore()
3213 case RISCV::VL8RE32_V: in isRVVWholeLoadStore()
3214 case RISCV::VL1RE64_V: in isRVVWholeLoadStore()
3215 case RISCV::VL2RE64_V: in isRVVWholeLoadStore()
3216 case RISCV::VL4RE64_V: in isRVVWholeLoadStore()
3217 case RISCV::VL8RE64_V: in isRVVWholeLoadStore()
3222 bool RISCV::isRVVSpill(const MachineInstr &MI) { in isRVVSpill()
3233 RISCV::isRVVSpillForZvlsseg(unsigned Opcode) { in isRVVSpillForZvlsseg()
3237 case RISCV::PseudoVSPILL2_M1: in isRVVSpillForZvlsseg()
3238 case RISCV::PseudoVRELOAD2_M1: in isRVVSpillForZvlsseg()
3240 case RISCV::PseudoVSPILL2_M2: in isRVVSpillForZvlsseg()
3241 case RISCV::PseudoVRELOAD2_M2: in isRVVSpillForZvlsseg()
3243 case RISCV::PseudoVSPILL2_M4: in isRVVSpillForZvlsseg()
3244 case RISCV::PseudoVRELOAD2_M4: in isRVVSpillForZvlsseg()
3246 case RISCV::PseudoVSPILL3_M1: in isRVVSpillForZvlsseg()
3247 case RISCV::PseudoVRELOAD3_M1: in isRVVSpillForZvlsseg()
3249 case RISCV::PseudoVSPILL3_M2: in isRVVSpillForZvlsseg()
3250 case RISCV::PseudoVRELOAD3_M2: in isRVVSpillForZvlsseg()
3252 case RISCV::PseudoVSPILL4_M1: in isRVVSpillForZvlsseg()
3253 case RISCV::PseudoVRELOAD4_M1: in isRVVSpillForZvlsseg()
3255 case RISCV::PseudoVSPILL4_M2: in isRVVSpillForZvlsseg()
3256 case RISCV::PseudoVRELOAD4_M2: in isRVVSpillForZvlsseg()
3258 case RISCV::PseudoVSPILL5_M1: in isRVVSpillForZvlsseg()
3259 case RISCV::PseudoVRELOAD5_M1: in isRVVSpillForZvlsseg()
3261 case RISCV::PseudoVSPILL6_M1: in isRVVSpillForZvlsseg()
3262 case RISCV::PseudoVRELOAD6_M1: in isRVVSpillForZvlsseg()
3264 case RISCV::PseudoVSPILL7_M1: in isRVVSpillForZvlsseg()
3265 case RISCV::PseudoVRELOAD7_M1: in isRVVSpillForZvlsseg()
3267 case RISCV::PseudoVSPILL8_M1: in isRVVSpillForZvlsseg()
3268 case RISCV::PseudoVRELOAD8_M1: in isRVVSpillForZvlsseg()
3273 bool RISCV::isFaultFirstLoad(const MachineInstr &MI) { in isFaultFirstLoad()
3274 return MI.getNumExplicitDefs() == 2 && MI.modifiesRegister(RISCV::VL) && in isFaultFirstLoad()
3278 bool RISCV::hasEqualFRM(const MachineInstr &MI1, const MachineInstr &MI2) { in hasEqualFRM()
3280 RISCV::getNamedOperandIdx(MI1.getOpcode(), RISCV::OpName::frm); in hasEqualFRM()
3282 RISCV::getNamedOperandIdx(MI2.getOpcode(), RISCV::OpName::frm); in hasEqualFRM()
3291 RISCV::getVectorLowDemandedScalarBits(uint16_t Opcode, unsigned Log2SEW) { in getVectorLowDemandedScalarBits()
3298 case RISCV::VSLL_VX: in getVectorLowDemandedScalarBits()
3299 case RISCV::VSRL_VX: in getVectorLowDemandedScalarBits()
3300 case RISCV::VSRA_VX: in getVectorLowDemandedScalarBits()
3302 case RISCV::VSSRL_VX: in getVectorLowDemandedScalarBits()
3303 case RISCV::VSSRA_VX: in getVectorLowDemandedScalarBits()
3308 case RISCV::VNSRL_WX: in getVectorLowDemandedScalarBits()
3309 case RISCV::VNSRA_WX: in getVectorLowDemandedScalarBits()
3311 case RISCV::VNCLIPU_WX: in getVectorLowDemandedScalarBits()
3312 case RISCV::VNCLIP_WX: in getVectorLowDemandedScalarBits()
3317 case RISCV::VADD_VX: in getVectorLowDemandedScalarBits()
3318 case RISCV::VSUB_VX: in getVectorLowDemandedScalarBits()
3319 case RISCV::VRSUB_VX: in getVectorLowDemandedScalarBits()
3321 case RISCV::VWADDU_VX: in getVectorLowDemandedScalarBits()
3322 case RISCV::VWSUBU_VX: in getVectorLowDemandedScalarBits()
3323 case RISCV::VWADD_VX: in getVectorLowDemandedScalarBits()
3324 case RISCV::VWSUB_VX: in getVectorLowDemandedScalarBits()
3325 case RISCV::VWADDU_WX: in getVectorLowDemandedScalarBits()
3326 case RISCV::VWSUBU_WX: in getVectorLowDemandedScalarBits()
3327 case RISCV::VWADD_WX: in getVectorLowDemandedScalarBits()
3328 case RISCV::VWSUB_WX: in getVectorLowDemandedScalarBits()
3330 case RISCV::VADC_VXM: in getVectorLowDemandedScalarBits()
3331 case RISCV::VADC_VIM: in getVectorLowDemandedScalarBits()
3332 case RISCV::VMADC_VXM: in getVectorLowDemandedScalarBits()
3333 case RISCV::VMADC_VIM: in getVectorLowDemandedScalarBits()
3334 case RISCV::VMADC_VX: in getVectorLowDemandedScalarBits()
3335 case RISCV::VSBC_VXM: in getVectorLowDemandedScalarBits()
3336 case RISCV::VMSBC_VXM: in getVectorLowDemandedScalarBits()
3337 case RISCV::VMSBC_VX: in getVectorLowDemandedScalarBits()
3339 case RISCV::VAND_VX: in getVectorLowDemandedScalarBits()
3340 case RISCV::VOR_VX: in getVectorLowDemandedScalarBits()
3341 case RISCV::VXOR_VX: in getVectorLowDemandedScalarBits()
3343 case RISCV::VMSEQ_VX: in getVectorLowDemandedScalarBits()
3344 case RISCV::VMSNE_VX: in getVectorLowDemandedScalarBits()
3345 case RISCV::VMSLTU_VX: in getVectorLowDemandedScalarBits()
3346 case RISCV::VMSLT_VX: in getVectorLowDemandedScalarBits()
3347 case RISCV::VMSLEU_VX: in getVectorLowDemandedScalarBits()
3348 case RISCV::VMSLE_VX: in getVectorLowDemandedScalarBits()
3349 case RISCV::VMSGTU_VX: in getVectorLowDemandedScalarBits()
3350 case RISCV::VMSGT_VX: in getVectorLowDemandedScalarBits()
3352 case RISCV::VMINU_VX: in getVectorLowDemandedScalarBits()
3353 case RISCV::VMIN_VX: in getVectorLowDemandedScalarBits()
3354 case RISCV::VMAXU_VX: in getVectorLowDemandedScalarBits()
3355 case RISCV::VMAX_VX: in getVectorLowDemandedScalarBits()
3357 case RISCV::VMUL_VX: in getVectorLowDemandedScalarBits()
3358 case RISCV::VMULH_VX: in getVectorLowDemandedScalarBits()
3359 case RISCV::VMULHU_VX: in getVectorLowDemandedScalarBits()
3360 case RISCV::VMULHSU_VX: in getVectorLowDemandedScalarBits()
3362 case RISCV::VDIVU_VX: in getVectorLowDemandedScalarBits()
3363 case RISCV::VDIV_VX: in getVectorLowDemandedScalarBits()
3364 case RISCV::VREMU_VX: in getVectorLowDemandedScalarBits()
3365 case RISCV::VREM_VX: in getVectorLowDemandedScalarBits()
3367 case RISCV::VWMUL_VX: in getVectorLowDemandedScalarBits()
3368 case RISCV::VWMULU_VX: in getVectorLowDemandedScalarBits()
3369 case RISCV::VWMULSU_VX: in getVectorLowDemandedScalarBits()
3371 case RISCV::VMACC_VX: in getVectorLowDemandedScalarBits()
3372 case RISCV::VNMSAC_VX: in getVectorLowDemandedScalarBits()
3373 case RISCV::VMADD_VX: in getVectorLowDemandedScalarBits()
3374 case RISCV::VNMSUB_VX: in getVectorLowDemandedScalarBits()
3376 case RISCV::VWMACCU_VX: in getVectorLowDemandedScalarBits()
3377 case RISCV::VWMACC_VX: in getVectorLowDemandedScalarBits()
3378 case RISCV::VWMACCSU_VX: in getVectorLowDemandedScalarBits()
3379 case RISCV::VWMACCUS_VX: in getVectorLowDemandedScalarBits()
3381 case RISCV::VMERGE_VXM: in getVectorLowDemandedScalarBits()
3383 case RISCV::VMV_V_X: in getVectorLowDemandedScalarBits()
3385 case RISCV::VSADDU_VX: in getVectorLowDemandedScalarBits()
3386 case RISCV::VSADD_VX: in getVectorLowDemandedScalarBits()
3387 case RISCV::VSSUBU_VX: in getVectorLowDemandedScalarBits()
3388 case RISCV::VSSUB_VX: in getVectorLowDemandedScalarBits()
3390 case RISCV::VAADDU_VX: in getVectorLowDemandedScalarBits()
3391 case RISCV::VAADD_VX: in getVectorLowDemandedScalarBits()
3392 case RISCV::VASUBU_VX: in getVectorLowDemandedScalarBits()
3393 case RISCV::VASUB_VX: in getVectorLowDemandedScalarBits()
3395 case RISCV::VSMUL_VX: in getVectorLowDemandedScalarBits()
3397 case RISCV::VMV_S_X: in getVectorLowDemandedScalarBits()
3402 unsigned RISCV::getRVVMCOpcode(unsigned RVVPseudoOpcode) { in getRVVMCOpcode()