summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMichel Dänzer <michel.daenzer@amd.com>2012-11-01 10:30:19 +0100
committerTom Stellard <thomas.stellard@amd.com>2012-11-12 16:02:46 +0000
commitc312ea9a05e56a63be20382e61b8a6a544682a18 (patch)
treec0a4199ce68f9fbb23227f9c446b9eebee528709
parentc1e2ffda3d51028c480e5fd40d0424e9da79e839 (diff)
SI: Update flow control comments to match current code.
Patch by: Michel Dänzer Reviewed-by: Tom Stellard <thomas.stellard@amd.com> Signed-off-by: Michel Dänzer <michel.daenzer@amd.com>
-rw-r--r--lib/Target/AMDGPU/SILowerFlowControl.cpp9
1 files changed, 5 insertions, 4 deletions
diff --git a/lib/Target/AMDGPU/SILowerFlowControl.cpp b/lib/Target/AMDGPU/SILowerFlowControl.cpp
index b90168844fa..0d90c136ee8 100644
--- a/lib/Target/AMDGPU/SILowerFlowControl.cpp
+++ b/lib/Target/AMDGPU/SILowerFlowControl.cpp
@@ -31,6 +31,7 @@
//
// %SGPR0 = S_MOV_B64 %EXEC // Save the current exec mask
// %EXEC = S_AND_B64 %VCC, %EXEC // Update the exec mask
+// %SGPR0 = S_XOR_B64 %SGPR0, %EXEC // Clear live bits from saved exec mask
// S_CBRANCH_EXECZ label0 // This instruction is an
// // optimization which allows us to
// // branch if all the bits of
@@ -38,14 +39,14 @@
// %VGPR0 = V_ADD_F32 %VGPR0, %VGPR0 // Do the IF block of the branch
//
// label0:
-// %EXEC = S_NOT_B64 %EXEC // Invert the exec mask for the
-// // Then block.
-// %EXEC = S_AND_B64 %SGPR0, %EXEC
+// %SGPR2 = S_MOV_B64 %EXEC // Save the current exec mask
+// %EXEC = S_MOV_B64 %SGPR0 // Restore the exec mask for the Then block
+// %SGPR0 = S_MOV_B64 %SGPR2 // Save the exec mask from the If block
// S_BRANCH_EXECZ label1 // Use our branch optimization
// // instruction again.
// %VGPR0 = V_SUB_F32 %VGPR0, %VGPR // Do the THEN block
// label1:
-// S_MOV_B64 // Restore the old EXEC value
+// %EXEC = S_OR_B64 %EXEC, %SGPR0 // Re-enable saved exec mask bits
//===----------------------------------------------------------------------===//
#include "AMDGPU.h"