mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2024-11-01 15:11:24 +00:00
Grow the stackmap/patchpoint format to hold 64-bit IDs.
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@197255 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
parent
539e93120c
commit
cd8314d63c
@ -1,4 +1,5 @@
|
||||
//===------------------- StackMaps.h - StackMaps ----------------*- C++ -*-===//
|
||||
|
||||
//
|
||||
// The LLVM Compiler Infrastructure
|
||||
//
|
||||
@ -132,11 +133,11 @@ private:
|
||||
|
||||
struct CallsiteInfo {
|
||||
const MCExpr *CSOffsetExpr;
|
||||
unsigned ID;
|
||||
uint64_t ID;
|
||||
LocationVec Locations;
|
||||
LiveOutVec LiveOuts;
|
||||
CallsiteInfo() : CSOffsetExpr(0), ID(0) {}
|
||||
CallsiteInfo(const MCExpr *CSOffsetExpr, unsigned ID,
|
||||
CallsiteInfo(const MCExpr *CSOffsetExpr, uint64_t ID,
|
||||
LocationVec &Locations, LiveOutVec &LiveOuts)
|
||||
: CSOffsetExpr(CSOffsetExpr), ID(ID), Locations(Locations),
|
||||
LiveOuts(LiveOuts) {}
|
||||
@ -168,7 +169,6 @@ private:
|
||||
CallsiteInfoList CSInfos;
|
||||
ConstantPool ConstPool;
|
||||
|
||||
/// Parse
|
||||
std::pair<Location, MachineInstr::const_mop_iterator>
|
||||
parseOperand(MachineInstr::const_mop_iterator MOI,
|
||||
MachineInstr::const_mop_iterator MOE) const;
|
||||
@ -186,7 +186,7 @@ private:
|
||||
/// instruction are stored, and outputs a label to record the offset of
|
||||
/// the call from the start of the text section. In special cases (e.g. AnyReg
|
||||
/// calling convention) the return register is also recorded if requested.
|
||||
void recordStackMapOpers(const MachineInstr &MI, uint32_t ID,
|
||||
void recordStackMapOpers(const MachineInstr &MI, uint64_t ID,
|
||||
MachineInstr::const_mop_iterator MOI,
|
||||
MachineInstr::const_mop_iterator MOE,
|
||||
bool recordResult = false);
|
||||
|
@ -458,13 +458,13 @@ def int_invariant_end : Intrinsic<[],
|
||||
//===------------------------ Stackmap Intrinsics -------------------------===//
|
||||
//
|
||||
def int_experimental_stackmap : Intrinsic<[],
|
||||
[llvm_i32_ty, llvm_i32_ty, llvm_vararg_ty]>;
|
||||
[llvm_i64_ty, llvm_i32_ty, llvm_vararg_ty]>;
|
||||
def int_experimental_patchpoint_void : Intrinsic<[],
|
||||
[llvm_i32_ty, llvm_i32_ty,
|
||||
[llvm_i64_ty, llvm_i32_ty,
|
||||
llvm_ptr_ty, llvm_i32_ty,
|
||||
llvm_vararg_ty]>;
|
||||
def int_experimental_patchpoint_i64 : Intrinsic<[llvm_i64_ty],
|
||||
[llvm_i32_ty, llvm_i32_ty,
|
||||
[llvm_i64_ty, llvm_i32_ty,
|
||||
llvm_ptr_ty, llvm_i32_ty,
|
||||
llvm_vararg_ty]>;
|
||||
|
||||
|
@ -6893,7 +6893,7 @@ void SelectionDAGBuilder::visitStackmap(const CallInst &CI) {
|
||||
|
||||
/// \brief Lower llvm.experimental.patchpoint directly to its target opcode.
|
||||
void SelectionDAGBuilder::visitPatchpoint(const CallInst &CI) {
|
||||
// void|i64 @llvm.experimental.patchpoint.void|i64(i32 <id>,
|
||||
// void|i64 @llvm.experimental.patchpoint.void|i64(i64 <id>,
|
||||
// i32 <numBytes>,
|
||||
// i8* <target>,
|
||||
// i32 <numArgs>,
|
||||
@ -6941,7 +6941,7 @@ void SelectionDAGBuilder::visitPatchpoint(const CallInst &CI) {
|
||||
// Add the <id> and <numBytes> constants.
|
||||
SDValue IDVal = getValue(CI.getOperand(PatchPointOpers::IDPos));
|
||||
Ops.push_back(DAG.getTargetConstant(
|
||||
cast<ConstantSDNode>(IDVal)->getZExtValue(), MVT::i32));
|
||||
cast<ConstantSDNode>(IDVal)->getZExtValue(), MVT::i64));
|
||||
SDValue NBytesVal = getValue(CI.getOperand(PatchPointOpers::NBytesPos));
|
||||
Ops.push_back(DAG.getTargetConstant(
|
||||
cast<ConstantSDNode>(NBytesVal)->getZExtValue(), MVT::i32));
|
||||
|
@ -183,7 +183,7 @@ StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask) const {
|
||||
return LiveOuts;
|
||||
}
|
||||
|
||||
void StackMaps::recordStackMapOpers(const MachineInstr &MI, uint32_t ID,
|
||||
void StackMaps::recordStackMapOpers(const MachineInstr &MI, uint64_t ID,
|
||||
MachineInstr::const_mop_iterator MOI,
|
||||
MachineInstr::const_mop_iterator MOE,
|
||||
bool recordResult) {
|
||||
@ -244,7 +244,6 @@ void StackMaps::recordStackMap(const MachineInstr &MI) {
|
||||
assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap");
|
||||
|
||||
int64_t ID = MI.getOperand(0).getImm();
|
||||
assert((int32_t)ID == ID && "Stack maps hold 32-bit IDs");
|
||||
recordStackMapOpers(MI, ID, llvm::next(MI.operands_begin(), 2),
|
||||
getStackMapEndMOP(MI.operands_begin(),
|
||||
MI.operands_end()));
|
||||
@ -255,7 +254,7 @@ void StackMaps::recordPatchPoint(const MachineInstr &MI) {
|
||||
|
||||
PatchPointOpers opers(&MI);
|
||||
int64_t ID = opers.getMetaOper(PatchPointOpers::IDPos).getImm();
|
||||
assert((int32_t)ID == ID && "Stack maps hold 32-bit IDs");
|
||||
|
||||
MachineInstr::const_mop_iterator MOI =
|
||||
llvm::next(MI.operands_begin(), opers.getStackMapStartIdx());
|
||||
recordStackMapOpers(MI, ID, MOI, getStackMapEndMOP(MOI, MI.operands_end()),
|
||||
@ -280,7 +279,7 @@ void StackMaps::recordPatchPoint(const MachineInstr &MI) {
|
||||
/// int64 : Constants[NumConstants]
|
||||
/// uint32 : NumRecords
|
||||
/// StkMapRecord[NumRecords] {
|
||||
/// uint32 : PatchPoint ID
|
||||
/// uint64 : PatchPoint ID
|
||||
/// uint32 : Instruction Offset
|
||||
/// uint16 : Reserved (record flags)
|
||||
/// uint16 : NumLocations
|
||||
@ -345,7 +344,7 @@ void StackMaps::serializeToStackMapSection() {
|
||||
CSIE = CSInfos.end();
|
||||
CSII != CSIE; ++CSII) {
|
||||
|
||||
unsigned CallsiteID = CSII->ID;
|
||||
uint64_t CallsiteID = CSII->ID;
|
||||
const LocationVec &CSLocs = CSII->Locations;
|
||||
const LiveOutVec &LiveOuts = CSII->LiveOuts;
|
||||
|
||||
@ -356,7 +355,7 @@ void StackMaps::serializeToStackMapSection() {
|
||||
// simple overflow checks, but we may eventually communicate other
|
||||
// compilation errors this way.
|
||||
if (CSLocs.size() > UINT16_MAX || LiveOuts.size() > UINT16_MAX) {
|
||||
AP.OutStreamer.EmitIntValue(UINT32_MAX, 4); // Invalid ID.
|
||||
AP.OutStreamer.EmitIntValue(UINT64_MAX, 8); // Invalid ID.
|
||||
AP.OutStreamer.EmitValue(CSII->CSOffsetExpr, 4);
|
||||
AP.OutStreamer.EmitIntValue(0, 2); // Reserved.
|
||||
AP.OutStreamer.EmitIntValue(0, 2); // 0 locations.
|
||||
@ -364,7 +363,7 @@ void StackMaps::serializeToStackMapSection() {
|
||||
continue;
|
||||
}
|
||||
|
||||
AP.OutStreamer.EmitIntValue(CallsiteID, 4);
|
||||
AP.OutStreamer.EmitIntValue(CallsiteID, 8);
|
||||
AP.OutStreamer.EmitValue(CSII->CSOffsetExpr, 4);
|
||||
|
||||
// Reserved for flags.
|
||||
|
@ -7,11 +7,11 @@ define i64 @anyreglimit(i64 %v1, i64 %v2, i64 %v3, i64 %v4, i64 %v5, i64 %v6,
|
||||
i64 %v7, i64 %v8, i64 %v9, i64 %v10, i64 %v11, i64 %v12,
|
||||
i64 %v13, i64 %v14, i64 %v15, i64 %v16) {
|
||||
entry:
|
||||
%result = tail call anyregcc i64 (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i32 12, i32 15, i8* inttoptr (i64 0 to i8*), i32 16,
|
||||
%result = tail call anyregcc i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 12, i32 15, i8* inttoptr (i64 0 to i8*), i32 16,
|
||||
i64 %v1, i64 %v2, i64 %v3, i64 %v4, i64 %v5, i64 %v6,
|
||||
i64 %v7, i64 %v8, i64 %v9, i64 %v10, i64 %v11, i64 %v12,
|
||||
i64 %v13, i64 %v14, i64 %v15, i64 %v16)
|
||||
ret i64 %result
|
||||
}
|
||||
|
||||
declare i64 @llvm.experimental.patchpoint.i64(i32, i32, i8*, i32, ...)
|
||||
declare i64 @llvm.experimental.patchpoint.i64(i64, i32, i8*, i32, ...)
|
||||
|
@ -32,7 +32,7 @@
|
||||
; CHECK-NEXT: .long 3
|
||||
define i64 @test() nounwind ssp uwtable {
|
||||
entry:
|
||||
call anyregcc void (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i32 0, i32 15, i8* null, i32 2, i32 1, i32 2, i64 3)
|
||||
call anyregcc void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 0, i32 15, i8* null, i32 2, i32 1, i32 2, i64 3)
|
||||
ret i64 0
|
||||
}
|
||||
|
||||
@ -54,7 +54,7 @@ entry:
|
||||
define i64 @property_access1(i8* %obj) nounwind ssp uwtable {
|
||||
entry:
|
||||
%f = inttoptr i64 12297829382473034410 to i8*
|
||||
%ret = call anyregcc i64 (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i32 1, i32 15, i8* %f, i32 1, i8* %obj)
|
||||
%ret = call anyregcc i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 1, i32 15, i8* %f, i32 1, i8* %obj)
|
||||
ret i64 %ret
|
||||
}
|
||||
|
||||
@ -77,7 +77,7 @@ define i64 @property_access2() nounwind ssp uwtable {
|
||||
entry:
|
||||
%obj = alloca i64, align 8
|
||||
%f = inttoptr i64 12297829382473034410 to i8*
|
||||
%ret = call anyregcc i64 (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i32 2, i32 15, i8* %f, i32 1, i64* %obj)
|
||||
%ret = call anyregcc i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 2, i32 15, i8* %f, i32 1, i64* %obj)
|
||||
ret i64 %ret
|
||||
}
|
||||
|
||||
@ -100,7 +100,7 @@ define i64 @property_access3() nounwind ssp uwtable {
|
||||
entry:
|
||||
%obj = alloca i64, align 8
|
||||
%f = inttoptr i64 12297829382473034410 to i8*
|
||||
%ret = call anyregcc i64 (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i32 3, i32 15, i8* %f, i32 0, i64* %obj)
|
||||
%ret = call anyregcc i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 3, i32 15, i8* %f, i32 0, i64* %obj)
|
||||
ret i64 %ret
|
||||
}
|
||||
|
||||
@ -182,7 +182,7 @@ entry:
|
||||
define i64 @anyreg_test1(i8* %a1, i8* %a2, i8* %a3, i8* %a4, i8* %a5, i8* %a6, i8* %a7, i8* %a8, i8* %a9, i8* %a10, i8* %a11, i8* %a12, i8* %a13) nounwind ssp uwtable {
|
||||
entry:
|
||||
%f = inttoptr i64 12297829382473034410 to i8*
|
||||
%ret = call anyregcc i64 (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i32 4, i32 15, i8* %f, i32 13, i8* %a1, i8* %a2, i8* %a3, i8* %a4, i8* %a5, i8* %a6, i8* %a7, i8* %a8, i8* %a9, i8* %a10, i8* %a11, i8* %a12, i8* %a13)
|
||||
%ret = call anyregcc i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 4, i32 15, i8* %f, i32 13, i8* %a1, i8* %a2, i8* %a3, i8* %a4, i8* %a5, i8* %a6, i8* %a7, i8* %a8, i8* %a9, i8* %a10, i8* %a11, i8* %a12, i8* %a13)
|
||||
ret i64 %ret
|
||||
}
|
||||
|
||||
@ -264,7 +264,7 @@ entry:
|
||||
define i64 @anyreg_test2(i8* %a1, i8* %a2, i8* %a3, i8* %a4, i8* %a5, i8* %a6, i8* %a7, i8* %a8, i8* %a9, i8* %a10, i8* %a11, i8* %a12, i8* %a13) nounwind ssp uwtable {
|
||||
entry:
|
||||
%f = inttoptr i64 12297829382473034410 to i8*
|
||||
%ret = call anyregcc i64 (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i32 5, i32 15, i8* %f, i32 8, i8* %a1, i8* %a2, i8* %a3, i8* %a4, i8* %a5, i8* %a6, i8* %a7, i8* %a8, i8* %a9, i8* %a10, i8* %a11, i8* %a12, i8* %a13)
|
||||
%ret = call anyregcc i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 5, i32 15, i8* %f, i32 8, i8* %a1, i8* %a2, i8* %a3, i8* %a4, i8* %a5, i8* %a6, i8* %a7, i8* %a8, i8* %a9, i8* %a10, i8* %a11, i8* %a12, i8* %a13)
|
||||
ret i64 %ret
|
||||
}
|
||||
|
||||
@ -292,7 +292,7 @@ entry:
|
||||
; CHECK-NEXT: .long 0
|
||||
define i64 @patchpoint_spilldef(i64 %p1, i64 %p2, i64 %p3, i64 %p4) {
|
||||
entry:
|
||||
%result = tail call anyregcc i64 (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i32 12, i32 15, i8* inttoptr (i64 0 to i8*), i32 2, i64 %p1, i64 %p2)
|
||||
%result = tail call anyregcc i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 12, i32 15, i8* inttoptr (i64 0 to i8*), i32 2, i64 %p1, i64 %p2)
|
||||
tail call void asm sideeffect "nop", "~{ax},~{bx},~{cx},~{dx},~{bp},~{si},~{di},~{r8},~{r9},~{r10},~{r11},~{r12},~{r13},~{r14},~{r15}"() nounwind
|
||||
ret i64 %result
|
||||
}
|
||||
@ -332,9 +332,9 @@ entry:
|
||||
define i64 @patchpoint_spillargs(i64 %p1, i64 %p2, i64 %p3, i64 %p4) {
|
||||
entry:
|
||||
tail call void asm sideeffect "nop", "~{ax},~{bx},~{cx},~{dx},~{bp},~{si},~{di},~{r8},~{r9},~{r10},~{r11},~{r12},~{r13},~{r14},~{r15}"() nounwind
|
||||
%result = tail call anyregcc i64 (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i32 13, i32 15, i8* inttoptr (i64 0 to i8*), i32 2, i64 %p1, i64 %p2, i64 %p3, i64 %p4)
|
||||
%result = tail call anyregcc i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 13, i32 15, i8* inttoptr (i64 0 to i8*), i32 2, i64 %p1, i64 %p2, i64 %p3, i64 %p4)
|
||||
ret i64 %result
|
||||
}
|
||||
|
||||
declare void @llvm.experimental.patchpoint.void(i32, i32, i8*, i32, ...)
|
||||
declare i64 @llvm.experimental.patchpoint.i64(i32, i32, i8*, i32, ...)
|
||||
declare void @llvm.experimental.patchpoint.void(i64, i32, i8*, i32, ...)
|
||||
declare i64 @llvm.experimental.patchpoint.i64(i64, i32, i8*, i32, ...)
|
||||
|
@ -14,9 +14,9 @@ entry:
|
||||
; CHECK: movq %[[REG]], %rax
|
||||
; CHECK: ret
|
||||
%resolveCall2 = inttoptr i64 -559038736 to i8*
|
||||
%result = tail call i64 (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i32 2, i32 15, i8* %resolveCall2, i32 4, i64 %p1, i64 %p2, i64 %p3, i64 %p4)
|
||||
%result = tail call i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 2, i32 15, i8* %resolveCall2, i32 4, i64 %p1, i64 %p2, i64 %p3, i64 %p4)
|
||||
%resolveCall3 = inttoptr i64 -559038737 to i8*
|
||||
tail call void (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i32 3, i32 15, i8* %resolveCall3, i32 2, i64 %p1, i64 %result)
|
||||
tail call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 3, i32 15, i8* %resolveCall3, i32 2, i64 %p1, i64 %result)
|
||||
ret i64 %result
|
||||
}
|
||||
|
||||
@ -34,7 +34,7 @@ entry:
|
||||
store i64 11, i64* %metadata
|
||||
store i64 12, i64* %metadata
|
||||
store i64 13, i64* %metadata
|
||||
call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 4, i32 0, i64* %metadata)
|
||||
call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 4, i32 0, i64* %metadata)
|
||||
ret void
|
||||
}
|
||||
|
||||
@ -53,9 +53,9 @@ entry:
|
||||
; CHECK: movq %rax, 8(%rsp)
|
||||
; CHECK: callq
|
||||
%resolveCall2 = inttoptr i64 -559038736 to i8*
|
||||
%result = tail call webkit_jscc i64 (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i32 5, i32 15, i8* %resolveCall2, i32 2, i64 %p1, i64 %p2)
|
||||
%result = tail call webkit_jscc i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 5, i32 15, i8* %resolveCall2, i32 2, i64 %p1, i64 %p2)
|
||||
%resolveCall3 = inttoptr i64 -559038737 to i8*
|
||||
tail call webkit_jscc void (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i32 6, i32 15, i8* %resolveCall3, i32 2, i64 %p1, i64 %result)
|
||||
tail call webkit_jscc void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 6, i32 15, i8* %resolveCall3, i32 2, i64 %p1, i64 %result)
|
||||
ret void
|
||||
}
|
||||
|
||||
@ -68,14 +68,14 @@ entry:
|
||||
%tmp80 = add i64 %tmp79, -16
|
||||
%tmp81 = inttoptr i64 %tmp80 to i64*
|
||||
%tmp82 = load i64* %tmp81, align 8
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 14, i32 5, i64 %arg, i64 %tmp2, i64 %tmp10, i64 %tmp82)
|
||||
tail call void (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i32 15, i32 30, i8* null, i32 3, i64 %arg, i64 %tmp10, i64 %tmp82)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 14, i32 5, i64 %arg, i64 %tmp2, i64 %tmp10, i64 %tmp82)
|
||||
tail call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 15, i32 30, i8* null, i32 3, i64 %arg, i64 %tmp10, i64 %tmp82)
|
||||
%tmp83 = load i64* %tmp33, align 8
|
||||
%tmp84 = add i64 %tmp83, -24
|
||||
%tmp85 = inttoptr i64 %tmp84 to i64*
|
||||
%tmp86 = load i64* %tmp85, align 8
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 17, i32 5, i64 %arg, i64 %tmp10, i64 %tmp86)
|
||||
tail call void (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i32 18, i32 30, i8* null, i32 3, i64 %arg, i64 %tmp10, i64 %tmp86)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 17, i32 5, i64 %arg, i64 %tmp10, i64 %tmp86)
|
||||
tail call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 18, i32 30, i8* null, i32 3, i64 %arg, i64 %tmp10, i64 %tmp86)
|
||||
ret i64 10
|
||||
}
|
||||
|
||||
@ -87,10 +87,10 @@ entry:
|
||||
; CHECK: nopl 8(%rax,%rax)
|
||||
; CHECK-NEXT: popq
|
||||
; CHECK-NEXT: ret
|
||||
%result = tail call i64 (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i32 5, i32 5, i8* null, i32 2, i64 %p1, i64 %p2)
|
||||
%result = tail call i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 5, i32 5, i8* null, i32 2, i64 %p1, i64 %p2)
|
||||
ret void
|
||||
}
|
||||
|
||||
declare void @llvm.experimental.stackmap(i32, i32, ...)
|
||||
declare void @llvm.experimental.patchpoint.void(i32, i32, i8*, i32, ...)
|
||||
declare i64 @llvm.experimental.patchpoint.i64(i32, i32, i8*, i32, ...)
|
||||
declare void @llvm.experimental.stackmap(i64, i32, ...)
|
||||
declare void @llvm.experimental.patchpoint.void(i64, i32, i8*, i32, ...)
|
||||
declare i64 @llvm.experimental.patchpoint.i64(i64, i32, i8*, i32, ...)
|
||||
|
@ -70,16 +70,16 @@
|
||||
define void @liveness() {
|
||||
entry:
|
||||
%a1 = call <2 x double> asm sideeffect "", "={xmm2}"() nounwind
|
||||
call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 1, i32 5)
|
||||
call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 1, i32 5)
|
||||
%a2 = call i64 asm sideeffect "", "={r8}"() nounwind
|
||||
%a3 = call i8 asm sideeffect "", "={ah}"() nounwind
|
||||
%a4 = call <4 x double> asm sideeffect "", "={ymm0}"() nounwind
|
||||
%a5 = call <4 x double> asm sideeffect "", "={ymm1}"() nounwind
|
||||
call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 2, i32 5)
|
||||
call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 2, i32 5)
|
||||
call void asm sideeffect "", "{r8},{ah},{ymm0},{ymm1}"(i64 %a2, i8 %a3, <4 x double> %a4, <4 x double> %a5) nounwind
|
||||
call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 3, i32 5)
|
||||
call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 3, i32 5)
|
||||
call void asm sideeffect "", "{xmm2}"(<2 x double> %a1) nounwind
|
||||
ret void
|
||||
}
|
||||
|
||||
declare void @llvm.experimental.stackmap(i32, i32, ...)
|
||||
declare void @llvm.experimental.stackmap(i64, i32, ...)
|
||||
|
@ -193,38 +193,38 @@ entry:
|
||||
; CHECK-NEXT: .byte 102
|
||||
; CHECK-NEXT: .byte 102
|
||||
; CHECK-NEXT: nopw %cs:512(%rax,%rax)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 0, i32 0)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 1, i32 1)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 2, i32 2)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 3, i32 3)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 4, i32 4)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 5, i32 5)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 6, i32 6)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 7, i32 7)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 8, i32 8)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 9, i32 9)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 10, i32 10)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 11, i32 11)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 12, i32 12)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 13, i32 13)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 14, i32 14)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 15, i32 15)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 16, i32 16)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 17, i32 17)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 18, i32 18)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 19, i32 19)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 20, i32 20)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 21, i32 21)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 22, i32 22)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 23, i32 23)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 24, i32 24)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 25, i32 25)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 26, i32 26)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 27, i32 27)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 28, i32 28)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 29, i32 29)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 30, i32 30)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 0, i32 0)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 1, i32 1)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 2, i32 2)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 3, i32 3)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 4, i32 4)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 5, i32 5)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 6, i32 6)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 7, i32 7)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 8, i32 8)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 9, i32 9)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 10, i32 10)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 11, i32 11)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 12, i32 12)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 13, i32 13)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 14, i32 14)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 15, i32 15)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 16, i32 16)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 17, i32 17)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 18, i32 18)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 19, i32 19)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 20, i32 20)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 21, i32 21)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 22, i32 22)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 23, i32 23)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 24, i32 24)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 25, i32 25)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 26, i32 26)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 27, i32 27)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 28, i32 28)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 29, i32 29)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 30, i32 30)
|
||||
ret void
|
||||
}
|
||||
|
||||
declare void @llvm.experimental.stackmap(i32, i32, ...)
|
||||
declare void @llvm.experimental.stackmap(i64, i32, ...)
|
||||
|
@ -9,11 +9,11 @@
|
||||
; CHECK-NEXT: .long 1
|
||||
; CHECK-NEXT: .quad 4294967296
|
||||
; Num Callsites
|
||||
; CHECK-NEXT: .long 14
|
||||
; CHECK-NEXT: .long 18
|
||||
|
||||
; Constant arguments
|
||||
;
|
||||
; CHECK-NEXT: .long 1
|
||||
; CHECK-NEXT: .quad 1
|
||||
; CHECK-NEXT: .long L{{.*}}-_constantargs
|
||||
; CHECK-NEXT: .short 0
|
||||
; CHECK-NEXT: .short 4
|
||||
@ -41,7 +41,7 @@
|
||||
define void @constantargs() {
|
||||
entry:
|
||||
%0 = inttoptr i64 12345 to i8*
|
||||
tail call void (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i32 1, i32 15, i8* %0, i32 0, i64 65535, i64 65536, i64 4294967295, i64 4294967296)
|
||||
tail call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 1, i32 15, i8* %0, i32 0, i64 65535, i64 65536, i64 4294967295, i64 4294967296)
|
||||
ret void
|
||||
}
|
||||
|
||||
@ -63,7 +63,7 @@ entry:
|
||||
; Runtime void->void call.
|
||||
call void inttoptr (i64 -559038737 to void ()*)()
|
||||
; Followed by inline OSR patchpoint with 12-byte shadow and 2 live vars.
|
||||
call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 3, i32 12, i64 %a, i64 %b)
|
||||
call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 3, i32 12, i64 %a, i64 %b)
|
||||
ret void
|
||||
}
|
||||
|
||||
@ -89,7 +89,7 @@ entry:
|
||||
cold:
|
||||
; OSR patchpoint with 12-byte nop-slide and 2 live vars.
|
||||
%thunk = inttoptr i64 -559038737 to i8*
|
||||
call void (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i32 4, i32 15, i8* %thunk, i32 0, i64 %a, i64 %b)
|
||||
call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 4, i32 15, i8* %thunk, i32 0, i64 %a, i64 %b)
|
||||
unreachable
|
||||
ret:
|
||||
ret void
|
||||
@ -110,7 +110,7 @@ ret:
|
||||
define i64 @propertyRead(i64* %obj) {
|
||||
entry:
|
||||
%resolveRead = inttoptr i64 -559038737 to i8*
|
||||
%result = call anyregcc i64 (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i32 5, i32 15, i8* %resolveRead, i32 1, i64* %obj)
|
||||
%result = call anyregcc i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 5, i32 15, i8* %resolveRead, i32 1, i64* %obj)
|
||||
%add = add i64 %result, 3
|
||||
ret i64 %add
|
||||
}
|
||||
@ -130,7 +130,7 @@ entry:
|
||||
define void @propertyWrite(i64 %dummy1, i64* %obj, i64 %dummy2, i64 %a) {
|
||||
entry:
|
||||
%resolveWrite = inttoptr i64 -559038737 to i8*
|
||||
call anyregcc void (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i32 6, i32 15, i8* %resolveWrite, i32 2, i64* %obj, i64 %a)
|
||||
call anyregcc void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 6, i32 15, i8* %resolveWrite, i32 2, i64* %obj, i64 %a)
|
||||
ret void
|
||||
}
|
||||
|
||||
@ -152,7 +152,7 @@ entry:
|
||||
define void @jsVoidCall(i64 %dummy1, i64* %obj, i64 %arg, i64 %l1, i64 %l2) {
|
||||
entry:
|
||||
%resolveCall = inttoptr i64 -559038737 to i8*
|
||||
call void (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i32 7, i32 15, i8* %resolveCall, i32 2, i64* %obj, i64 %arg, i64 %l1, i64 %l2)
|
||||
call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 7, i32 15, i8* %resolveCall, i32 2, i64* %obj, i64 %arg, i64 %l1, i64 %l2)
|
||||
ret void
|
||||
}
|
||||
|
||||
@ -174,7 +174,7 @@ entry:
|
||||
define i64 @jsIntCall(i64 %dummy1, i64* %obj, i64 %arg, i64 %l1, i64 %l2) {
|
||||
entry:
|
||||
%resolveCall = inttoptr i64 -559038737 to i8*
|
||||
%result = call i64 (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i32 8, i32 15, i8* %resolveCall, i32 2, i64* %obj, i64 %arg, i64 %l1, i64 %l2)
|
||||
%result = call i64 (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.i64(i64 8, i32 15, i8* %resolveCall, i32 2, i64* %obj, i64 %arg, i64 %l1, i64 %l2)
|
||||
%add = add i64 %result, 3
|
||||
ret i64 %add
|
||||
}
|
||||
@ -194,7 +194,7 @@ entry:
|
||||
; CHECK-NEXT: .short 6
|
||||
define void @spilledValue(i64 %arg0, i64 %arg1, i64 %arg2, i64 %arg3, i64 %arg4, i64 %l0, i64 %l1, i64 %l2, i64 %l3, i64 %l4, i64 %l5, i64 %l6, i64 %l7, i64 %l8, i64 %l9, i64 %l10, i64 %l11, i64 %l12, i64 %l13, i64 %l14, i64 %l15, i64 %l16) {
|
||||
entry:
|
||||
call void (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i32 11, i32 15, i8* null, i32 5, i64 %arg0, i64 %arg1, i64 %arg2, i64 %arg3, i64 %arg4, i64 %l0, i64 %l1, i64 %l2, i64 %l3, i64 %l4, i64 %l5, i64 %l6, i64 %l7, i64 %l8, i64 %l9, i64 %l10, i64 %l11, i64 %l12, i64 %l13, i64 %l14, i64 %l15, i64 %l16)
|
||||
call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 11, i32 15, i8* null, i32 5, i64 %arg0, i64 %arg1, i64 %arg2, i64 %arg3, i64 %arg4, i64 %l0, i64 %l1, i64 %l2, i64 %l3, i64 %l4, i64 %l5, i64 %l6, i64 %l7, i64 %l8, i64 %l9, i64 %l10, i64 %l11, i64 %l12, i64 %l13, i64 %l14, i64 %l15, i64 %l16)
|
||||
ret void
|
||||
}
|
||||
|
||||
@ -213,7 +213,7 @@ entry:
|
||||
; CHECK-NEXT: .short 6
|
||||
define webkit_jscc void @spilledStackMapValue(i64 %l0, i64 %l1, i64 %l2, i64 %l3, i64 %l4, i64 %l5, i64 %l6, i64 %l7, i64 %l8, i64 %l9, i64 %l10, i64 %l11, i64 %l12, i64 %l13, i64 %l14, i64 %l15, i64 %l16) {
|
||||
entry:
|
||||
call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 12, i32 15, i64 %l0, i64 %l1, i64 %l2, i64 %l3, i64 %l4, i64 %l5, i64 %l6, i64 %l7, i64 %l8, i64 %l9, i64 %l10, i64 %l11, i64 %l12, i64 %l13, i64 %l14, i64 %l15, i64 %l16)
|
||||
call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 12, i32 15, i64 %l0, i64 %l1, i64 %l2, i64 %l3, i64 %l4, i64 %l5, i64 %l6, i64 %l7, i64 %l8, i64 %l9, i64 %l10, i64 %l11, i64 %l12, i64 %l13, i64 %l14, i64 %l15, i64 %l16)
|
||||
ret void
|
||||
}
|
||||
|
||||
@ -249,7 +249,7 @@ bb17:
|
||||
|
||||
bb60:
|
||||
tail call void asm sideeffect "nop", "~{ax},~{bx},~{cx},~{dx},~{bp},~{si},~{di},~{r8},~{r9},~{r10},~{r11},~{r12},~{r13},~{r14},~{r15}"() nounwind
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 13, i32 5, i32 %tmp32)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 13, i32 5, i32 %tmp32)
|
||||
unreachable
|
||||
|
||||
bb61:
|
||||
@ -283,7 +283,7 @@ define void @subRegOffset(i16 %arg) {
|
||||
%arghi = lshr i16 %v, 8
|
||||
%a1 = trunc i16 %arghi to i8
|
||||
tail call void asm sideeffect "nop", "~{cx},~{dx},~{bp},~{si},~{di},~{r8},~{r9},~{r10},~{r11},~{r12},~{r13},~{r14},~{r15}"() nounwind
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 14, i32 5, i8 %a0, i8 %a1)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 14, i32 5, i8 %a0, i8 %a1)
|
||||
ret void
|
||||
}
|
||||
|
||||
@ -300,7 +300,7 @@ define void @subRegOffset(i16 %arg) {
|
||||
; CHECK-NEXT: .long 33
|
||||
|
||||
define void @liveConstant() {
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 15, i32 5, i32 33)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 15, i32 5, i32 33)
|
||||
ret void
|
||||
}
|
||||
|
||||
@ -338,13 +338,32 @@ entry:
|
||||
store i64 11, i64* %metadata1
|
||||
store i64 12, i64* %metadata1
|
||||
store i64 13, i64* %metadata1
|
||||
call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 16, i32 0, i64* %metadata1)
|
||||
call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 16, i32 0, i64* %metadata1)
|
||||
%metadata2 = alloca i8, i32 4, align 8
|
||||
%metadata3 = alloca i16, i32 4, align 8
|
||||
call void (i32, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i32 17, i32 5, i8* null, i32 0, i8* %metadata2, i16* %metadata3)
|
||||
call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 17, i32 5, i8* null, i32 0, i8* %metadata2, i16* %metadata3)
|
||||
ret void
|
||||
}
|
||||
|
||||
declare void @llvm.experimental.stackmap(i32, i32, ...)
|
||||
declare void @llvm.experimental.patchpoint.void(i32, i32, i8*, i32, ...)
|
||||
declare i64 @llvm.experimental.patchpoint.i64(i32, i32, i8*, i32, ...)
|
||||
; Test a 64-bit ID.
|
||||
;
|
||||
; CHECK: .quad 4294967295
|
||||
; CHECK-LABEL: .long L{{.*}}-_longid
|
||||
; CHECK: .quad 4294967296
|
||||
; CHECK-LABEL: .long L{{.*}}-_longid
|
||||
; CHECK: .quad 9223372036854775807
|
||||
; CHECK-LABEL: .long L{{.*}}-_longid
|
||||
; CHECK: .quad -1
|
||||
; CHECK-LABEL: .long L{{.*}}-_longid
|
||||
define void @longid() {
|
||||
entry:
|
||||
tail call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 4294967295, i32 0, i8* null, i32 0)
|
||||
tail call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 4294967296, i32 0, i8* null, i32 0)
|
||||
tail call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 9223372036854775807, i32 0, i8* null, i32 0)
|
||||
tail call void (i64, i32, i8*, i32, ...)* @llvm.experimental.patchpoint.void(i64 -1, i32 0, i8* null, i32 0)
|
||||
ret void
|
||||
}
|
||||
|
||||
declare void @llvm.experimental.stackmap(i64, i32, ...)
|
||||
declare void @llvm.experimental.patchpoint.void(i64, i32, i8*, i32, ...)
|
||||
declare i64 @llvm.experimental.patchpoint.i64(i64, i32, i8*, i32, ...)
|
||||
|
@ -25,23 +25,23 @@ entry:
|
||||
; CHECK: 7c: 5d
|
||||
; CHECK: 7d: c3
|
||||
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 0, i32 0)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 1, i32 1)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 2, i32 2)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 3, i32 3)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 4, i32 4)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 5, i32 5)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 6, i32 6)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 7, i32 7)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 8, i32 8)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 9, i32 9)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 10, i32 10)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 11, i32 11)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 12, i32 12)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 13, i32 13)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 14, i32 14)
|
||||
tail call void (i32, i32, ...)* @llvm.experimental.stackmap(i32 15, i32 15)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 0, i32 0)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 1, i32 1)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 2, i32 2)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 3, i32 3)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 4, i32 4)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 5, i32 5)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 6, i32 6)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 7, i32 7)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 8, i32 8)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 9, i32 9)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 10, i32 10)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 11, i32 11)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 12, i32 12)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 13, i32 13)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 14, i32 14)
|
||||
tail call void (i64, i32, ...)* @llvm.experimental.stackmap(i64 15, i32 15)
|
||||
ret void
|
||||
}
|
||||
|
||||
declare void @llvm.experimental.stackmap(i32, i32, ...)
|
||||
declare void @llvm.experimental.stackmap(i64, i32, ...)
|
||||
|
@ -1,16 +1,16 @@
|
||||
; RUN: not llvm-as < %s -o /dev/null 2>&1 | FileCheck %s
|
||||
|
||||
declare void @llvm.experimental.stackmap(i32, i32)
|
||||
declare void @llvm.experimental.stackmap(i64, i32)
|
||||
declare void @llvm.donothing(...)
|
||||
|
||||
define void @foo1() {
|
||||
call void @llvm.experimental.stackmap(i32 0, i32 12)
|
||||
call void @llvm.experimental.stackmap(i64 0, i32 12)
|
||||
; CHECK: Callsite was not defined with variable arguments!
|
||||
ret void
|
||||
}
|
||||
|
||||
define void @foo2() {
|
||||
call void (...)* @llvm.donothing(i32 0, i64 1)
|
||||
call void (...)* @llvm.donothing(i64 0, i64 1)
|
||||
; CHECK: Intrinsic was not defined with variable arguments!
|
||||
ret void
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user