/aosp_15_r20/external/llvm/lib/CodeGen/ |
H A D | ImplicitNullChecks.cpp | 485 ImplicitNullChecks::insertFaultingLoad(MachineInstr *LoadMI, in insertFaultingLoad()
|
H A D | TargetInstrInfo.cpp | 780 MachineInstr &LoadMI, in foldMemoryOperand()
|
H A D | InlineSpiller.cpp | 728 MachineInstr *LoadMI) { in foldMemoryOperand()
|
/aosp_15_r20/external/llvm/lib/Target/X86/ |
H A D | X86MCInstLower.cpp | 908 MCInst LoadMI; in LowerFAULTING_LOAD_OP() local
|
H A D | X86InstrInfo.cpp | 6165 static bool isNonFoldablePartialRegisterLoad(const MachineInstr &LoadMI, in isNonFoldablePartialRegisterLoad() 6220 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl()
|
H A D | X86FastISel.cpp | 778 MachineInstrBuilder LoadMI = in handleConstantAddresses() local
|
/aosp_15_r20/external/llvm/lib/Target/AArch64/ |
H A D | AArch64LoadStoreOptimizer.cpp | 1103 MachineInstr &LoadMI = *I; in findMatchingStore() local
|
/aosp_15_r20/external/swiftshader/third_party/llvm-16.0/llvm/lib/Target/PowerPC/ |
H A D | PPCMIPeephole.cpp | 554 MachineInstr *LoadMI = MRI->getVRegDef(FeedReg1); in simplifyCode() local
|
/aosp_15_r20/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/PowerPC/ |
H A D | PPCMIPeephole.cpp | 370 MachineInstr *LoadMI = MRI->getVRegDef(FeedReg1); in simplifyCode() local
|
/aosp_15_r20/external/llvm/lib/Target/SystemZ/ |
H A D | SystemZInstrInfo.cpp | 1029 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl()
|
/aosp_15_r20/external/swiftshader/third_party/llvm-16.0/llvm/lib/CodeGen/ |
H A D | InlineSpiller.cpp | 821 MachineInstr *LoadMI) { in foldMemoryOperand()
|
H A D | TargetInstrInfo.cpp | 652 MachineInstr &LoadMI, in foldMemoryOperand()
|
/aosp_15_r20/external/swiftshader/third_party/llvm-10.0/llvm/lib/CodeGen/ |
H A D | InlineSpiller.cpp | 789 MachineInstr *LoadMI) { in foldMemoryOperand()
|
H A D | TargetInstrInfo.cpp | 622 MachineInstr &LoadMI, in foldMemoryOperand()
|
/aosp_15_r20/external/swiftshader/third_party/llvm-16.0/llvm/lib/Target/AArch64/GISel/ |
H A D | AArch64InstructionSelector.cpp | 2621 auto *LoadMI = emitLoadFromConstantPool(FPImm, MIB); in select() local 3269 auto *LoadMI = getOpcodeDef(TargetOpcode::G_LOAD, SrcReg, MRI); in select() local 4386 MachineInstr *LoadMI = nullptr; in emitLoadFromConstantPool() local
|
/aosp_15_r20/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/ |
H A D | AArch64LoadStoreOptimizer.cpp | 1175 MachineInstr &LoadMI = *I; in findMatchingStore() local
|
H A D | AArch64InstructionSelector.cpp | 2181 auto *LoadMI = getOpcodeDef(TargetOpcode::G_LOAD, SrcReg, MRI); in select() local 3180 MachineInstr *LoadMI = nullptr; in emitLoadFromConstantPool() local
|
/aosp_15_r20/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/SystemZ/ |
H A D | SystemZInstrInfo.cpp | 1220 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl()
|
/aosp_15_r20/external/swiftshader/third_party/llvm-16.0/llvm/lib/Target/AArch64/ |
H A D | AArch64LoadStoreOptimizer.cpp | 1223 MachineInstr &LoadMI = *I; in findMatchingStore() local
|
/aosp_15_r20/external/swiftshader/third_party/llvm-16.0/llvm/lib/Target/SystemZ/ |
H A D | SystemZInstrInfo.cpp | 1338 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl()
|
/aosp_15_r20/external/swiftshader/third_party/llvm-16.0/llvm/lib/CodeGen/GlobalISel/ |
H A D | CombinerHelper.cpp | 518 GAnyLoad *LoadMI = dyn_cast<GAnyLoad>(&MI); in matchCombineExtendingLoads() local 721 GAnyLoad *LoadMI = dyn_cast<GAnyLoad>(MRI.getVRegDef(SrcReg)); in matchCombineLoadWithAndMask() local 816 if (auto *LoadMI = getOpcodeDef<GSExtLoad>(LoadUser, MRI)) { in matchSextTruncSextLoad() local
|
H A D | LegalizerHelper.cpp | 1040 auto &LoadMI = cast<GLoad>(MI); in narrowScalar() local 1058 auto &LoadMI = cast<GExtLoad>(MI); in narrowScalar() local 2933 LegalizerHelper::LegalizeResult LegalizerHelper::lowerLoad(GAnyLoad &LoadMI) { in lowerLoad()
|
/aosp_15_r20/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/X86/ |
H A D | X86InstrInfo.cpp | 5070 static bool isNonFoldablePartialRegisterLoad(const MachineInstr &LoadMI, in isNonFoldablePartialRegisterLoad() 5193 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl()
|
/aosp_15_r20/external/swiftshader/third_party/llvm-16.0/llvm/lib/Target/X86/ |
H A D | X86InstrInfo.cpp | 6350 static bool isNonFoldablePartialRegisterLoad(const MachineInstr &LoadMI, in isNonFoldablePartialRegisterLoad() 6624 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl()
|
H A D | X86FastISel.cpp | 786 MachineInstrBuilder LoadMI = in handleConstantAddresses() local
|