From f036f7a1e79910bf5b5b6f37d2e512b4f01155a0 Mon Sep 17 00:00:00 2001 From: Jakob Stoklund Olesen Date: Wed, 29 Aug 2012 21:19:21 +0000 Subject: Rename hasVolatileMemoryRef() to hasOrderedMemoryRef(). Ordered memory operations are more constrained than volatile loads and stores because they must be ordered with respect to all other memory operations. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@162861 91177308-0d34-0410-b5e6-96231b3b80d8 --- include/llvm/CodeGen/MachineInstr.h | 10 +++++----- lib/CodeGen/MachineInstr.cpp | 16 ++++++++-------- lib/CodeGen/ScheduleDAGInstrs.cpp | 2 +- lib/Target/Hexagon/HexagonVLIWPacketizer.cpp | 4 ++-- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/include/llvm/CodeGen/MachineInstr.h b/include/llvm/CodeGen/MachineInstr.h index e88351cb989..d636dfd4416 100644 --- a/include/llvm/CodeGen/MachineInstr.h +++ b/include/llvm/CodeGen/MachineInstr.h @@ -857,11 +857,11 @@ public: bool isSafeToReMat(const TargetInstrInfo *TII, AliasAnalysis *AA, unsigned DstReg) const; - /// hasVolatileMemoryRef - Return true if this instruction may have a - /// volatile memory reference, or if the information describing the - /// memory reference is not available. Return false if it is known to - /// have no volatile memory references. - bool hasVolatileMemoryRef() const; + /// hasOrderedMemoryRef - Return true if this instruction may have an ordered + /// or volatile memory reference, or if the information describing the memory + /// reference is not available. Return false if it is known to have no + /// ordered or volatile memory references. + bool hasOrderedMemoryRef() const; /// isInvariantLoad - Return true if this instruction is loading from a /// location whose value is invariant across the function. For example, diff --git a/lib/CodeGen/MachineInstr.cpp b/lib/CodeGen/MachineInstr.cpp index 7ff79b3a223..4a4c7217bd9 100644 --- a/lib/CodeGen/MachineInstr.cpp +++ b/lib/CodeGen/MachineInstr.cpp @@ -1348,7 +1348,7 @@ bool MachineInstr::isSafeToMove(const TargetInstrInfo *TII, // volatiles, but it is required for atomic loads. It is now allowed to move // a load across an atomic load with Ordering > Monotonic. if (mayStore() || isCall() || - (mayLoad() && hasVolatileMemoryRef())) { + (mayLoad() && hasOrderedMemoryRef())) { SawStore = true; return false; } @@ -1396,11 +1396,11 @@ bool MachineInstr::isSafeToReMat(const TargetInstrInfo *TII, return true; } -/// hasVolatileMemoryRef - Return true if this instruction may have a -/// volatile memory reference, or if the information describing the -/// memory reference is not available. Return false if it is known to -/// have no volatile memory references. -bool MachineInstr::hasVolatileMemoryRef() const { +/// hasOrderedMemoryRef - Return true if this instruction may have an ordered +/// or volatile memory reference, or if the information describing the memory +/// reference is not available. Return false if it is known to have no ordered +/// memory references. +bool MachineInstr::hasOrderedMemoryRef() const { // An instruction known never to access memory won't have a volatile access. if (!mayStore() && !mayLoad() && @@ -1413,9 +1413,9 @@ bool MachineInstr::hasVolatileMemoryRef() const { if (memoperands_empty()) return true; - // Check the memory reference information for volatile references. + // Check the memory reference information for ordered references. for (mmo_iterator I = memoperands_begin(), E = memoperands_end(); I != E; ++I) - if ((*I)->isVolatile()) + if (!(*I)->isUnordered()) return true; return false; diff --git a/lib/CodeGen/ScheduleDAGInstrs.cpp b/lib/CodeGen/ScheduleDAGInstrs.cpp index dfad18b394e..c25f39d5fd4 100644 --- a/lib/CodeGen/ScheduleDAGInstrs.cpp +++ b/lib/CodeGen/ScheduleDAGInstrs.cpp @@ -502,7 +502,7 @@ void ScheduleDAGInstrs::addVRegUseDeps(SUnit *SU, unsigned OperIdx) { /// (like a call or something with unmodeled side effects). static inline bool isGlobalMemoryObject(AliasAnalysis *AA, MachineInstr *MI) { if (MI->isCall() || MI->hasUnmodeledSideEffects() || - (MI->hasVolatileMemoryRef() && + (MI->hasOrderedMemoryRef() && (!MI->mayLoad() || !MI->isInvariantLoad(AA)))) return true; return false; diff --git a/lib/Target/Hexagon/HexagonVLIWPacketizer.cpp b/lib/Target/Hexagon/HexagonVLIWPacketizer.cpp index a03ed03365b..3d5f685028e 100644 --- a/lib/Target/Hexagon/HexagonVLIWPacketizer.cpp +++ b/lib/Target/Hexagon/HexagonVLIWPacketizer.cpp @@ -3474,8 +3474,8 @@ bool HexagonPacketizerList::isLegalToPacketizeTogether(SUnit *SUI, SUnit *SUJ) { // 1. Two loads unless they are volatile. // 2. Two stores in V4 unless they are volatile. else if ((DepType == SDep::Order) && - !I->hasVolatileMemoryRef() && - !J->hasVolatileMemoryRef()) { + !I->hasOrderedMemoryRef() && + !J->hasOrderedMemoryRef()) { if (QRI->Subtarget.hasV4TOps() && // hexagonv4 allows dual store. MCIDI.mayStore() && MCIDJ.mayStore()) { -- cgit v1.2.3