diff --git a/test/CodeGen/X86/stackmap-nops.ll b/test/CodeGen/X86/stackmap-nops.ll index 3888603c969..7de63af852a 100644 --- a/test/CodeGen/X86/stackmap-nops.ll +++ b/test/CodeGen/X86/stackmap-nops.ll @@ -224,6 +224,9 @@ entry: tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 28, i32 28) tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 29, i32 29) tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 30, i32 30) +; Add an extra stackmap with a zero-length shadow to thwart the shadow +; optimization. This will force all 15 bytes of the previous shadow to be +; padded with nops. tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 31, i32 0) ret void } diff --git a/test/MC/X86/stackmap-nops.ll b/test/MC/X86/stackmap-nops.ll index 2b0b88c80fa..a0d44186603 100644 --- a/test/MC/X86/stackmap-nops.ll +++ b/test/MC/X86/stackmap-nops.ll @@ -41,6 +41,9 @@ entry: tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 13, i32 13) tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 14, i32 14) tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 15, i32 15) +; Add an extra stackmap with a zero-length shadow to thwart the shadow +; optimization. This will force all 15 bytes of the previous shadow to be +; padded with nops. tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 16, i32 0) ret void }