mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2024-12-13 20:32:21 +00:00
Make the need-stub variables accurate and consistent. In the case of
MachineRelocations, "stub" always refers to a far-call stub or a load-a-faraway-global stub, so this patch adds "Far" to the term. (Other stubs are used for lazy compilation and dlsym address replacement.) The variable was also inconsistent between the positive and negative sense, and the positive sense ("NeedStub") was more demanding than is accurate (since a nearby-enough function can be called directly even if the platform often requires a stub). Since the negative sense causes double-negatives, I switched to "MayNeedFarStub" globally. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@86363 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
parent
761411c21b
commit
2d274412ed
@ -65,7 +65,7 @@ class MachineRelocation {
|
||||
|
||||
unsigned TargetReloType : 6; // The target relocation ID
|
||||
AddressType AddrType : 4; // The field of Target to use
|
||||
bool NeedStub : 1; // True if this relocation requires a stub
|
||||
bool MayNeedFarStub : 1; // True if this relocation may require a far-stub
|
||||
bool GOTRelative : 1; // Should this relocation be relative to the GOT?
|
||||
bool TargetResolve : 1; // True if target should resolve the address
|
||||
|
||||
@ -81,7 +81,7 @@ public:
|
||||
///
|
||||
static MachineRelocation getGV(uintptr_t offset, unsigned RelocationType,
|
||||
GlobalValue *GV, intptr_t cst = 0,
|
||||
bool NeedStub = 0,
|
||||
bool MayNeedFarStub = 0,
|
||||
bool GOTrelative = 0) {
|
||||
assert((RelocationType & ~63) == 0 && "Relocation type too large!");
|
||||
MachineRelocation Result;
|
||||
@ -89,7 +89,7 @@ public:
|
||||
Result.ConstantVal = cst;
|
||||
Result.TargetReloType = RelocationType;
|
||||
Result.AddrType = isGV;
|
||||
Result.NeedStub = NeedStub;
|
||||
Result.MayNeedFarStub = MayNeedFarStub;
|
||||
Result.GOTRelative = GOTrelative;
|
||||
Result.TargetResolve = false;
|
||||
Result.Target.GV = GV;
|
||||
@ -101,7 +101,7 @@ public:
|
||||
static MachineRelocation getIndirectSymbol(uintptr_t offset,
|
||||
unsigned RelocationType,
|
||||
GlobalValue *GV, intptr_t cst = 0,
|
||||
bool NeedStub = 0,
|
||||
bool MayNeedFarStub = 0,
|
||||
bool GOTrelative = 0) {
|
||||
assert((RelocationType & ~63) == 0 && "Relocation type too large!");
|
||||
MachineRelocation Result;
|
||||
@ -109,7 +109,7 @@ public:
|
||||
Result.ConstantVal = cst;
|
||||
Result.TargetReloType = RelocationType;
|
||||
Result.AddrType = isIndirectSym;
|
||||
Result.NeedStub = NeedStub;
|
||||
Result.MayNeedFarStub = MayNeedFarStub;
|
||||
Result.GOTRelative = GOTrelative;
|
||||
Result.TargetResolve = false;
|
||||
Result.Target.GV = GV;
|
||||
@ -126,7 +126,7 @@ public:
|
||||
Result.ConstantVal = cst;
|
||||
Result.TargetReloType = RelocationType;
|
||||
Result.AddrType = isBB;
|
||||
Result.NeedStub = false;
|
||||
Result.MayNeedFarStub = false;
|
||||
Result.GOTRelative = false;
|
||||
Result.TargetResolve = false;
|
||||
Result.Target.MBB = MBB;
|
||||
@ -145,7 +145,7 @@ public:
|
||||
Result.ConstantVal = cst;
|
||||
Result.TargetReloType = RelocationType;
|
||||
Result.AddrType = isExtSym;
|
||||
Result.NeedStub = true;
|
||||
Result.MayNeedFarStub = true;
|
||||
Result.GOTRelative = GOTrelative;
|
||||
Result.TargetResolve = false;
|
||||
Result.Target.ExtSym = ES;
|
||||
@ -164,7 +164,7 @@ public:
|
||||
Result.ConstantVal = cst;
|
||||
Result.TargetReloType = RelocationType;
|
||||
Result.AddrType = isConstPool;
|
||||
Result.NeedStub = false;
|
||||
Result.MayNeedFarStub = false;
|
||||
Result.GOTRelative = false;
|
||||
Result.TargetResolve = letTargetResolve;
|
||||
Result.Target.Index = CPI;
|
||||
@ -183,7 +183,7 @@ public:
|
||||
Result.ConstantVal = cst;
|
||||
Result.TargetReloType = RelocationType;
|
||||
Result.AddrType = isJumpTable;
|
||||
Result.NeedStub = false;
|
||||
Result.MayNeedFarStub = false;
|
||||
Result.GOTRelative = false;
|
||||
Result.TargetResolve = letTargetResolve;
|
||||
Result.Target.Index = JTI;
|
||||
@ -258,12 +258,14 @@ public:
|
||||
return GOTRelative;
|
||||
}
|
||||
|
||||
/// doesntNeedStub - This function returns true if the JIT for this target
|
||||
/// target is capable of directly handling the relocated GlobalValue reference
|
||||
/// without using either a stub function or issuing an extra load to get the
|
||||
/// GV address.
|
||||
bool doesntNeedStub() const {
|
||||
return !NeedStub;
|
||||
/// mayNeedFarStub - This function returns true if the JIT for this target may
|
||||
/// need either a stub function or an indirect global-variable load to handle
|
||||
/// the relocated GlobalValue reference. For example, the x86-64 call
|
||||
/// instruction can only call functions within +/-2GB of the call site.
|
||||
/// Anything farther away needs a longer mov+call sequence, which can't just
|
||||
/// be written on top of the existing call.
|
||||
bool mayNeedFarStub() const {
|
||||
return MayNeedFarStub;
|
||||
}
|
||||
|
||||
/// letTargetResolve - Return true if the target JITInfo is usually
|
||||
|
@ -491,9 +491,9 @@ namespace {
|
||||
JITMemoryManager *getMemMgr() const { return MemMgr; }
|
||||
|
||||
private:
|
||||
void *getPointerToGlobal(GlobalValue *GV, void *Reference, bool NoNeedStub);
|
||||
void *getPointerToGVIndirectSym(GlobalValue *V, void *Reference,
|
||||
bool NoNeedStub);
|
||||
void *getPointerToGlobal(GlobalValue *GV, void *Reference,
|
||||
bool MayNeedFarStub);
|
||||
void *getPointerToGVIndirectSym(GlobalValue *V, void *Reference);
|
||||
unsigned addSizeOfGlobal(const GlobalVariable *GV, unsigned Size);
|
||||
unsigned addSizeOfGlobalsInConstantVal(const Constant *C, unsigned Size);
|
||||
unsigned addSizeOfGlobalsInInitializer(const Constant *Init, unsigned Size);
|
||||
@ -737,7 +737,7 @@ void *JITResolver::JITCompilerFn(void *Stub) {
|
||||
// JITEmitter code.
|
||||
//
|
||||
void *JITEmitter::getPointerToGlobal(GlobalValue *V, void *Reference,
|
||||
bool DoesntNeedStub) {
|
||||
bool MayNeedFarStub) {
|
||||
if (GlobalVariable *GV = dyn_cast<GlobalVariable>(V))
|
||||
return TheJIT->getOrEmitGlobalVariable(GV);
|
||||
|
||||
@ -747,7 +747,7 @@ void *JITEmitter::getPointerToGlobal(GlobalValue *V, void *Reference,
|
||||
// If we have already compiled the function, return a pointer to its body.
|
||||
Function *F = cast<Function>(V);
|
||||
void *ResultPtr;
|
||||
if (!DoesntNeedStub) {
|
||||
if (MayNeedFarStub) {
|
||||
// Return the function stub if it's already created.
|
||||
ResultPtr = Resolver.getFunctionStubIfAvailable(F);
|
||||
if (ResultPtr)
|
||||
@ -761,14 +761,14 @@ void *JITEmitter::getPointerToGlobal(GlobalValue *V, void *Reference,
|
||||
// 'compile' it, which really just adds it to the map. In dlsym mode,
|
||||
// external functions are forced through a stub, regardless of reloc type.
|
||||
if (F->isDeclaration() && !F->hasNotBeenReadFromBitcode() &&
|
||||
DoesntNeedStub && !TheJIT->areDlsymStubsEnabled())
|
||||
!MayNeedFarStub && !TheJIT->areDlsymStubsEnabled())
|
||||
return TheJIT->getPointerToFunction(F);
|
||||
|
||||
// Okay, the function has not been compiled yet, if the target callback
|
||||
// mechanism is capable of rewriting the instruction directly, prefer to do
|
||||
// that instead of emitting a stub. This uses the lazy resolver, so is not
|
||||
// legal if lazy compilation is disabled.
|
||||
if (DoesntNeedStub && TheJIT->isCompilingLazily())
|
||||
if (!MayNeedFarStub && TheJIT->isCompilingLazily())
|
||||
return Resolver.AddCallbackAtLocation(F, Reference);
|
||||
|
||||
// Otherwise, we have to emit a stub.
|
||||
@ -784,11 +784,10 @@ void *JITEmitter::getPointerToGlobal(GlobalValue *V, void *Reference,
|
||||
return StubAddr;
|
||||
}
|
||||
|
||||
void *JITEmitter::getPointerToGVIndirectSym(GlobalValue *V, void *Reference,
|
||||
bool NoNeedStub) {
|
||||
void *JITEmitter::getPointerToGVIndirectSym(GlobalValue *V, void *Reference) {
|
||||
// Make sure GV is emitted first, and create a stub containing the fully
|
||||
// resolved address.
|
||||
void *GVAddress = getPointerToGlobal(V, Reference, true);
|
||||
void *GVAddress = getPointerToGlobal(V, Reference, false);
|
||||
void *StubAddr = Resolver.getGlobalValueIndirectSym(V, GVAddress);
|
||||
|
||||
// Add the stub to the current function's list of referenced stubs, so we can
|
||||
@ -1112,7 +1111,7 @@ bool JITEmitter::finishFunction(MachineFunction &F) {
|
||||
<< ResultPtr << "]\n");
|
||||
|
||||
// If the target REALLY wants a stub for this function, emit it now.
|
||||
if (!MR.doesntNeedStub()) {
|
||||
if (MR.mayNeedFarStub()) {
|
||||
if (!TheJIT->areDlsymStubsEnabled()) {
|
||||
ResultPtr = Resolver.getExternalFunctionStub(ResultPtr);
|
||||
} else {
|
||||
@ -1127,11 +1126,10 @@ bool JITEmitter::finishFunction(MachineFunction &F) {
|
||||
} else if (MR.isGlobalValue()) {
|
||||
ResultPtr = getPointerToGlobal(MR.getGlobalValue(),
|
||||
BufferBegin+MR.getMachineCodeOffset(),
|
||||
MR.doesntNeedStub());
|
||||
MR.mayNeedFarStub());
|
||||
} else if (MR.isIndirectSymbol()) {
|
||||
ResultPtr = getPointerToGVIndirectSym(MR.getGlobalValue(),
|
||||
BufferBegin+MR.getMachineCodeOffset(),
|
||||
MR.doesntNeedStub());
|
||||
ResultPtr = getPointerToGVIndirectSym(
|
||||
MR.getGlobalValue(), BufferBegin+MR.getMachineCodeOffset());
|
||||
} else if (MR.isBasicBlock()) {
|
||||
ResultPtr = (void*)getMachineBasicBlockAddress(MR.getBasicBlock());
|
||||
} else if (MR.isConstantPoolIndex()) {
|
||||
|
@ -168,7 +168,8 @@ namespace {
|
||||
/// Routines that handle operands which add machine relocations which are
|
||||
/// fixed up by the relocation stage.
|
||||
void emitGlobalAddress(GlobalValue *GV, unsigned Reloc,
|
||||
bool NeedStub, bool Indirect, intptr_t ACPV = 0);
|
||||
bool MayNeedFarStub, bool Indirect,
|
||||
intptr_t ACPV = 0);
|
||||
void emitExternalSymbolAddress(const char *ES, unsigned Reloc);
|
||||
void emitConstPoolAddress(unsigned CPI, unsigned Reloc);
|
||||
void emitJumpTableAddress(unsigned JTIndex, unsigned Reloc);
|
||||
@ -277,13 +278,13 @@ unsigned Emitter<CodeEmitter>::getMachineOpValue(const MachineInstr &MI,
|
||||
///
|
||||
template<class CodeEmitter>
|
||||
void Emitter<CodeEmitter>::emitGlobalAddress(GlobalValue *GV, unsigned Reloc,
|
||||
bool NeedStub, bool Indirect,
|
||||
bool MayNeedFarStub, bool Indirect,
|
||||
intptr_t ACPV) {
|
||||
MachineRelocation MR = Indirect
|
||||
? MachineRelocation::getIndirectSymbol(MCE.getCurrentPCOffset(), Reloc,
|
||||
GV, ACPV, NeedStub)
|
||||
GV, ACPV, MayNeedFarStub)
|
||||
: MachineRelocation::getGV(MCE.getCurrentPCOffset(), Reloc,
|
||||
GV, ACPV, NeedStub);
|
||||
GV, ACPV, MayNeedFarStub);
|
||||
MCE.addRelocation(MR);
|
||||
}
|
||||
|
||||
|
@ -82,7 +82,7 @@ namespace {
|
||||
void emitPCRelativeBlockAddress(MachineBasicBlock *MBB);
|
||||
void emitGlobalAddress(GlobalValue *GV, unsigned Reloc,
|
||||
intptr_t Disp = 0, intptr_t PCAdj = 0,
|
||||
bool NeedStub = false, bool Indirect = false);
|
||||
bool MayNeedFarStub = false, bool Indirect = false);
|
||||
void emitExternalSymbolAddress(const char *ES, unsigned Reloc);
|
||||
void emitConstPoolAddress(unsigned CPI, unsigned Reloc, intptr_t Disp = 0,
|
||||
intptr_t PCAdj = 0);
|
||||
@ -176,7 +176,7 @@ template<class CodeEmitter>
|
||||
void Emitter<CodeEmitter>::emitGlobalAddress(GlobalValue *GV, unsigned Reloc,
|
||||
intptr_t Disp /* = 0 */,
|
||||
intptr_t PCAdj /* = 0 */,
|
||||
bool NeedStub /* = false */,
|
||||
bool MayNeedFarStub /* = false */,
|
||||
bool Indirect /* = false */) {
|
||||
intptr_t RelocCST = Disp;
|
||||
if (Reloc == X86::reloc_picrel_word)
|
||||
@ -185,9 +185,9 @@ void Emitter<CodeEmitter>::emitGlobalAddress(GlobalValue *GV, unsigned Reloc,
|
||||
RelocCST = PCAdj;
|
||||
MachineRelocation MR = Indirect
|
||||
? MachineRelocation::getIndirectSymbol(MCE.getCurrentPCOffset(), Reloc,
|
||||
GV, RelocCST, NeedStub)
|
||||
GV, RelocCST, MayNeedFarStub)
|
||||
: MachineRelocation::getGV(MCE.getCurrentPCOffset(), Reloc,
|
||||
GV, RelocCST, NeedStub);
|
||||
GV, RelocCST, MayNeedFarStub);
|
||||
MCE.addRelocation(MR);
|
||||
// The relocated value will be added to the displacement
|
||||
if (Reloc == X86::reloc_absolute_dword)
|
||||
@ -333,10 +333,10 @@ void Emitter<CodeEmitter>::emitDisplacementField(const MachineOperand *RelocOp,
|
||||
// do it, otherwise fallback to absolute (this is determined by IsPCRel).
|
||||
// 89 05 00 00 00 00 mov %eax,0(%rip) # PC-relative
|
||||
// 89 04 25 00 00 00 00 mov %eax,0x0 # Absolute
|
||||
bool NeedStub = isa<Function>(RelocOp->getGlobal());
|
||||
bool MayNeedFarStub = isa<Function>(RelocOp->getGlobal());
|
||||
bool Indirect = gvNeedsNonLazyPtr(*RelocOp, TM);
|
||||
emitGlobalAddress(RelocOp->getGlobal(), RelocType, RelocOp->getOffset(),
|
||||
Adj, NeedStub, Indirect);
|
||||
Adj, MayNeedFarStub, Indirect);
|
||||
} else if (RelocOp->isSymbol()) {
|
||||
emitExternalSymbolAddress(RelocOp->getSymbolName(), RelocType);
|
||||
} else if (RelocOp->isCPI()) {
|
||||
@ -634,13 +634,13 @@ void Emitter<CodeEmitter>::emitInstruction(const MachineInstr &MI,
|
||||
|
||||
if (MO.isGlobal()) {
|
||||
// Assume undefined functions may be outside the Small codespace.
|
||||
bool NeedStub =
|
||||
bool MayNeedFarStub =
|
||||
(Is64BitMode &&
|
||||
(TM.getCodeModel() == CodeModel::Large ||
|
||||
TM.getSubtarget<X86Subtarget>().isTargetDarwin())) ||
|
||||
Opcode == X86::TAILJMPd;
|
||||
emitGlobalAddress(MO.getGlobal(), X86::reloc_pcrel_word,
|
||||
MO.getOffset(), 0, NeedStub);
|
||||
MO.getOffset(), 0, MayNeedFarStub);
|
||||
break;
|
||||
}
|
||||
|
||||
@ -681,10 +681,10 @@ void Emitter<CodeEmitter>::emitInstruction(const MachineInstr &MI,
|
||||
if (Opcode == X86::MOV64ri)
|
||||
rt = X86::reloc_absolute_dword; // FIXME: add X86II flag?
|
||||
if (MO1.isGlobal()) {
|
||||
bool NeedStub = isa<Function>(MO1.getGlobal());
|
||||
bool MayNeedFarStub = isa<Function>(MO1.getGlobal());
|
||||
bool Indirect = gvNeedsNonLazyPtr(MO1, TM);
|
||||
emitGlobalAddress(MO1.getGlobal(), rt, MO1.getOffset(), 0,
|
||||
NeedStub, Indirect);
|
||||
MayNeedFarStub, Indirect);
|
||||
} else if (MO1.isSymbol())
|
||||
emitExternalSymbolAddress(MO1.getSymbolName(), rt);
|
||||
else if (MO1.isCPI())
|
||||
@ -790,10 +790,10 @@ void Emitter<CodeEmitter>::emitInstruction(const MachineInstr &MI,
|
||||
if (Opcode == X86::MOV64ri32)
|
||||
rt = X86::reloc_absolute_word_sext; // FIXME: add X86II flag?
|
||||
if (MO1.isGlobal()) {
|
||||
bool NeedStub = isa<Function>(MO1.getGlobal());
|
||||
bool MayNeedFarStub = isa<Function>(MO1.getGlobal());
|
||||
bool Indirect = gvNeedsNonLazyPtr(MO1, TM);
|
||||
emitGlobalAddress(MO1.getGlobal(), rt, MO1.getOffset(), 0,
|
||||
NeedStub, Indirect);
|
||||
MayNeedFarStub, Indirect);
|
||||
} else if (MO1.isSymbol())
|
||||
emitExternalSymbolAddress(MO1.getSymbolName(), rt);
|
||||
else if (MO1.isCPI())
|
||||
@ -831,10 +831,10 @@ void Emitter<CodeEmitter>::emitInstruction(const MachineInstr &MI,
|
||||
if (Opcode == X86::MOV64mi32)
|
||||
rt = X86::reloc_absolute_word_sext; // FIXME: add X86II flag?
|
||||
if (MO.isGlobal()) {
|
||||
bool NeedStub = isa<Function>(MO.getGlobal());
|
||||
bool MayNeedFarStub = isa<Function>(MO.getGlobal());
|
||||
bool Indirect = gvNeedsNonLazyPtr(MO, TM);
|
||||
emitGlobalAddress(MO.getGlobal(), rt, MO.getOffset(), 0,
|
||||
NeedStub, Indirect);
|
||||
MayNeedFarStub, Indirect);
|
||||
} else if (MO.isSymbol())
|
||||
emitExternalSymbolAddress(MO.getSymbolName(), rt);
|
||||
else if (MO.isCPI())
|
||||
|
Loading…
Reference in New Issue
Block a user