mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2025-06-13 22:24:07 +00:00
Reformat.
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@211689 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
@ -457,7 +457,7 @@ unsigned MachineFunction::addLiveIn(unsigned PReg,
|
|||||||
/// getJTISymbol - Return the MCSymbol for the specified non-empty jump table.
|
/// getJTISymbol - Return the MCSymbol for the specified non-empty jump table.
|
||||||
/// If isLinkerPrivate is specified, an 'l' label is returned, otherwise a
|
/// If isLinkerPrivate is specified, an 'l' label is returned, otherwise a
|
||||||
/// normal 'L' label is returned.
|
/// normal 'L' label is returned.
|
||||||
MCSymbol *MachineFunction::getJTISymbol(unsigned JTI, MCContext &Ctx,
|
MCSymbol *MachineFunction::getJTISymbol(unsigned JTI, MCContext &Ctx,
|
||||||
bool isLinkerPrivate) const {
|
bool isLinkerPrivate) const {
|
||||||
const DataLayout *DL = getTarget().getDataLayout();
|
const DataLayout *DL = getTarget().getDataLayout();
|
||||||
assert(JumpTableInfo && "No jump tables");
|
assert(JumpTableInfo && "No jump tables");
|
||||||
@ -530,10 +530,9 @@ int MachineFrameInfo::CreateStackObject(uint64_t Size, unsigned Alignment,
|
|||||||
///
|
///
|
||||||
int MachineFrameInfo::CreateSpillStackObject(uint64_t Size,
|
int MachineFrameInfo::CreateSpillStackObject(uint64_t Size,
|
||||||
unsigned Alignment) {
|
unsigned Alignment) {
|
||||||
Alignment =
|
Alignment = clampStackAlignment(
|
||||||
clampStackAlignment(!getFrameLowering()->isStackRealignable() ||
|
!getFrameLowering()->isStackRealignable() || !RealignOption, Alignment,
|
||||||
!RealignOption,
|
getFrameLowering()->getStackAlignment());
|
||||||
Alignment, getFrameLowering()->getStackAlignment());
|
|
||||||
CreateStackObject(Size, Alignment, true);
|
CreateStackObject(Size, Alignment, true);
|
||||||
int Index = (int)Objects.size() - NumFixedObjects - 1;
|
int Index = (int)Objects.size() - NumFixedObjects - 1;
|
||||||
ensureMaxAlignment(Alignment);
|
ensureMaxAlignment(Alignment);
|
||||||
@ -548,10 +547,9 @@ int MachineFrameInfo::CreateSpillStackObject(uint64_t Size,
|
|||||||
int MachineFrameInfo::CreateVariableSizedObject(unsigned Alignment,
|
int MachineFrameInfo::CreateVariableSizedObject(unsigned Alignment,
|
||||||
const AllocaInst *Alloca) {
|
const AllocaInst *Alloca) {
|
||||||
HasVarSizedObjects = true;
|
HasVarSizedObjects = true;
|
||||||
Alignment =
|
Alignment = clampStackAlignment(
|
||||||
clampStackAlignment(!getFrameLowering()->isStackRealignable() ||
|
!getFrameLowering()->isStackRealignable() || !RealignOption, Alignment,
|
||||||
!RealignOption,
|
getFrameLowering()->getStackAlignment());
|
||||||
Alignment, getFrameLowering()->getStackAlignment());
|
|
||||||
Objects.push_back(StackObject(0, Alignment, 0, false, false, Alloca));
|
Objects.push_back(StackObject(0, Alignment, 0, false, false, Alloca));
|
||||||
ensureMaxAlignment(Alignment);
|
ensureMaxAlignment(Alignment);
|
||||||
return (int)Objects.size()-NumFixedObjects-1;
|
return (int)Objects.size()-NumFixedObjects-1;
|
||||||
@ -571,10 +569,9 @@ int MachineFrameInfo::CreateFixedObject(uint64_t Size, int64_t SPOffset,
|
|||||||
// object is 16-byte aligned.
|
// object is 16-byte aligned.
|
||||||
unsigned StackAlign = getFrameLowering()->getStackAlignment();
|
unsigned StackAlign = getFrameLowering()->getStackAlignment();
|
||||||
unsigned Align = MinAlign(SPOffset, StackAlign);
|
unsigned Align = MinAlign(SPOffset, StackAlign);
|
||||||
Align =
|
Align = clampStackAlignment(!getFrameLowering()->isStackRealignable() ||
|
||||||
clampStackAlignment(!getFrameLowering()->isStackRealignable() ||
|
!RealignOption,
|
||||||
!RealignOption,
|
Align, getFrameLowering()->getStackAlignment());
|
||||||
Align, getFrameLowering()->getStackAlignment());
|
|
||||||
Objects.insert(Objects.begin(), StackObject(Size, Align, SPOffset, Immutable,
|
Objects.insert(Objects.begin(), StackObject(Size, Align, SPOffset, Immutable,
|
||||||
/*isSS*/ false,
|
/*isSS*/ false,
|
||||||
/*Alloca*/ nullptr));
|
/*Alloca*/ nullptr));
|
||||||
@ -849,11 +846,10 @@ static bool CanShareConstantPoolEntry(const Constant *A, const Constant *B,
|
|||||||
if (isa<StructType>(A->getType()) || isa<ArrayType>(A->getType()) ||
|
if (isa<StructType>(A->getType()) || isa<ArrayType>(A->getType()) ||
|
||||||
isa<StructType>(B->getType()) || isa<ArrayType>(B->getType()))
|
isa<StructType>(B->getType()) || isa<ArrayType>(B->getType()))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
// For now, only support constants with the same size.
|
// For now, only support constants with the same size.
|
||||||
uint64_t StoreSize = TD->getTypeStoreSize(A->getType());
|
uint64_t StoreSize = TD->getTypeStoreSize(A->getType());
|
||||||
if (StoreSize != TD->getTypeStoreSize(B->getType()) ||
|
if (StoreSize != TD->getTypeStoreSize(B->getType()) || StoreSize > 128)
|
||||||
StoreSize > 128)
|
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
Type *IntTy = IntegerType::get(A->getContext(), StoreSize*8);
|
Type *IntTy = IntegerType::get(A->getContext(), StoreSize*8);
|
||||||
@ -882,7 +878,7 @@ static bool CanShareConstantPoolEntry(const Constant *A, const Constant *B,
|
|||||||
/// an existing one. User must specify the log2 of the minimum required
|
/// an existing one. User must specify the log2 of the minimum required
|
||||||
/// alignment for the object.
|
/// alignment for the object.
|
||||||
///
|
///
|
||||||
unsigned MachineConstantPool::getConstantPoolIndex(const Constant *C,
|
unsigned MachineConstantPool::getConstantPoolIndex(const Constant *C,
|
||||||
unsigned Alignment) {
|
unsigned Alignment) {
|
||||||
assert(Alignment && "Alignment must be specified!");
|
assert(Alignment && "Alignment must be specified!");
|
||||||
if (Alignment > PoolAlignment) PoolAlignment = Alignment;
|
if (Alignment > PoolAlignment) PoolAlignment = Alignment;
|
||||||
|
@ -997,9 +997,9 @@ bool X86FrameLowering::spillCalleeSavedRegisters(MachineBasicBlock &MBB,
|
|||||||
// Push GPRs. It increases frame size.
|
// Push GPRs. It increases frame size.
|
||||||
unsigned Opc = STI.is64Bit() ? X86::PUSH64r : X86::PUSH32r;
|
unsigned Opc = STI.is64Bit() ? X86::PUSH64r : X86::PUSH32r;
|
||||||
for (unsigned i = CSI.size(); i != 0; --i) {
|
for (unsigned i = CSI.size(); i != 0; --i) {
|
||||||
unsigned Reg = CSI[i-1].getReg();
|
unsigned Reg = CSI[i - 1].getReg();
|
||||||
if (!X86::GR64RegClass.contains(Reg) &&
|
|
||||||
!X86::GR32RegClass.contains(Reg))
|
if (!X86::GR64RegClass.contains(Reg) && !X86::GR32RegClass.contains(Reg))
|
||||||
continue;
|
continue;
|
||||||
// Add the callee-saved register as live-in. It's killed at the spill.
|
// Add the callee-saved register as live-in. It's killed at the spill.
|
||||||
MBB.addLiveIn(Reg);
|
MBB.addLiveIn(Reg);
|
||||||
@ -1024,8 +1024,9 @@ bool X86FrameLowering::spillCalleeSavedRegisters(MachineBasicBlock &MBB,
|
|||||||
// Add the callee-saved register as live-in. It's killed at the spill.
|
// Add the callee-saved register as live-in. It's killed at the spill.
|
||||||
MBB.addLiveIn(Reg);
|
MBB.addLiveIn(Reg);
|
||||||
const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(Reg);
|
const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(Reg);
|
||||||
TII.storeRegToStackSlot(MBB, MI, Reg, true, CSI[i-1].getFrameIdx(),
|
|
||||||
RC, TRI);
|
TII.storeRegToStackSlot(MBB, MI, Reg, true, CSI[i - 1].getFrameIdx(), RC,
|
||||||
|
TRI);
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
@ -1050,9 +1051,9 @@ bool X86FrameLowering::restoreCalleeSavedRegisters(MachineBasicBlock &MBB,
|
|||||||
if (X86::GR64RegClass.contains(Reg) ||
|
if (X86::GR64RegClass.contains(Reg) ||
|
||||||
X86::GR32RegClass.contains(Reg))
|
X86::GR32RegClass.contains(Reg))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(Reg);
|
const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(Reg);
|
||||||
TII.loadRegFromStackSlot(MBB, MI, Reg, CSI[i].getFrameIdx(),
|
TII.loadRegFromStackSlot(MBB, MI, Reg, CSI[i].getFrameIdx(), RC, TRI);
|
||||||
RC, TRI);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// POP GPRs.
|
// POP GPRs.
|
||||||
|
@ -110,7 +110,7 @@ let Defs = [EAX, ESP, EFLAGS], Uses = [ESP] in
|
|||||||
|
|
||||||
// When using segmented stacks these are lowered into instructions which first
|
// When using segmented stacks these are lowered into instructions which first
|
||||||
// check if the current stacklet has enough free memory. If it does, memory is
|
// check if the current stacklet has enough free memory. If it does, memory is
|
||||||
// allocated by bumping the stack pointer. Otherwise memory is allocated from
|
// allocated by bumping the stack pointer. Otherwise memory is allocated from
|
||||||
// the heap.
|
// the heap.
|
||||||
|
|
||||||
let Defs = [EAX, ESP, EFLAGS], Uses = [ESP] in
|
let Defs = [EAX, ESP, EFLAGS], Uses = [ESP] in
|
||||||
@ -371,7 +371,7 @@ let Defs = [RCX,RDI], isCodeGenOnly = 1 in {
|
|||||||
def REP_STOSD_64 : I<0xAB, RawFrm, (outs), (ins), "{rep;stosl|rep stosd}",
|
def REP_STOSD_64 : I<0xAB, RawFrm, (outs), (ins), "{rep;stosl|rep stosd}",
|
||||||
[(X86rep_stos i32)], IIC_REP_STOS>, REP, OpSize32,
|
[(X86rep_stos i32)], IIC_REP_STOS>, REP, OpSize32,
|
||||||
Requires<[In64BitMode]>;
|
Requires<[In64BitMode]>;
|
||||||
|
|
||||||
let Uses = [RAX,RCX,RDI] in
|
let Uses = [RAX,RCX,RDI] in
|
||||||
def REP_STOSQ_64 : RI<0xAB, RawFrm, (outs), (ins), "{rep;stosq|rep stosq}",
|
def REP_STOSQ_64 : RI<0xAB, RawFrm, (outs), (ins), "{rep;stosq|rep stosq}",
|
||||||
[(X86rep_stos i64)], IIC_REP_STOS>, REP,
|
[(X86rep_stos i64)], IIC_REP_STOS>, REP,
|
||||||
|
Reference in New Issue
Block a user