mirror of
https://github.com/KarolS/millfork.git
synced 2024-12-25 06:29:17 +00:00
Z80: Various optimizations for stack variables
This commit is contained in:
parent
9bcaffaa30
commit
43c044c80c
@ -7,11 +7,13 @@ import scala.collection.mutable.ListBuffer
|
||||
*/
|
||||
object NonOverlappingIntervals {
|
||||
def apply[T](intervals: Iterable[T], start: T => Int, end: T => Int): Seq[Set[T]] = {
|
||||
def nonEmpty(interval: T): Boolean = start(interval) != end(interval)
|
||||
|
||||
val builder = ListBuffer[Set[T]]()
|
||||
|
||||
def scan(set: Set[T], lastEnd: Int): Unit = {
|
||||
builder += set
|
||||
for (interval <- intervals) {
|
||||
for (interval <- intervals if nonEmpty(interval)) {
|
||||
if (start(interval) >= lastEnd) {
|
||||
scan(set + interval, end(interval))
|
||||
}
|
||||
|
@ -232,6 +232,7 @@ case class ZLine(opcode: ZOpcode.Value, registers: ZRegisters, parameter: Consta
|
||||
case DISCARD_BCDEIX => " ; DISCARD_BCDEIX"
|
||||
case BYTE => " !byte " + parameter.toString // TODO: format?
|
||||
case LABEL => parameter.toString + ":"
|
||||
case RST => s" RST $parameter"
|
||||
case EX_AF_AF => " EX AF,AF'"
|
||||
case EX_SP => registers match {
|
||||
case OneRegister(r) => s" EX (SP),${asAssemblyString(r)})"
|
||||
@ -292,6 +293,10 @@ case class ZLine(opcode: ZOpcode.Value, registers: ZRegisters, parameter: Consta
|
||||
case TwoRegisters(HL, HL) => r == H || r == L
|
||||
case TwoRegisters(HL, BC) => r == H || r == L || r == B || r == C
|
||||
case TwoRegisters(HL, DE) => r == H || r == L || r == D || r == E
|
||||
case TwoRegisters(HL, SP) => r == H || r == L || r == SP
|
||||
case TwoRegisters(IX, DE) => r == IXH || r == IXL || r == D || r == E
|
||||
case TwoRegisters(IX, BC) => r == IXH || r == IXL || r == B || r == C
|
||||
case TwoRegisters(IX, SP) => r == IXH || r == IXL || r == SP
|
||||
case _ => true
|
||||
}
|
||||
case LD => (registers match {
|
||||
@ -472,12 +477,13 @@ case class ZLine(opcode: ZOpcode.Value, registers: ZRegisters, parameter: Consta
|
||||
case _ => false
|
||||
}
|
||||
case JP | JR | RET | RETI | RETN |
|
||||
POP |
|
||||
PUSH |
|
||||
DISCARD_A | DISCARD_BCDEIX | DISCARD_HL | DISCARD_F => false
|
||||
case ADD | ADC | AND | OR | XOR | SUB | SBC | DAA | NEG | CPL => r == A
|
||||
case CP => false
|
||||
case DJNZ => r == B
|
||||
case LABEL | DI | EI | NOP | HALT => false
|
||||
case CALL => r != IXH && r != IXL && r != SP
|
||||
case _ => true // TODO
|
||||
}
|
||||
}
|
||||
|
@ -204,6 +204,10 @@ object AlwaysGoodZ80Optimizations {
|
||||
(Linear & Not(Changes(register)) & DoesntChangeMemoryAt(1)).* ~
|
||||
(Elidable & Is16BitLoad(register, ZRegister.MEM_ABS_16) & MatchParameter(0)) ~~> (_.init)
|
||||
),
|
||||
// 60
|
||||
(HasOpcode(LD) & MatchSourceRegisterAndOffset(0) & MatchTargetRegisterAndOffset(1)) ~
|
||||
Where(ctx => ctx.get[RegisterAndOffset](0).register != ZRegister.MEM_ABS_8 && ctx.get[RegisterAndOffset](1).register != ZRegister.MEM_ABS_8) ~
|
||||
(Elidable & HasOpcode(LD) & MatchSourceRegisterAndOffset(1) & MatchTargetRegisterAndOffset(0)) ~~> (_.init)
|
||||
)
|
||||
|
||||
val PointlessStackStashing = new RuleBasedAssemblyOptimization("Pointless stack stashing",
|
||||
@ -607,6 +611,49 @@ object AlwaysGoodZ80Optimizations {
|
||||
},
|
||||
|
||||
|
||||
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.D)) ~
|
||||
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.E)) ~
|
||||
(Elidable & HasOpcode(PUSH) & HasRegisterParam(ZRegister.HL) & DoesntMatterWhatItDoesWith(ZRegister.HL)) ~~> {_ =>
|
||||
List(ZLine.register(PUSH, ZRegister.DE))
|
||||
},
|
||||
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.B)) ~
|
||||
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.C)) ~
|
||||
(Elidable & HasOpcode(PUSH) & HasRegisterParam(ZRegister.HL) & DoesntMatterWhatItDoesWith(ZRegister.HL)) ~~> {_ =>
|
||||
List(ZLine.register(PUSH, ZRegister.BC))
|
||||
},
|
||||
(Elidable & Is8BitLoad(ZRegister.D, ZRegister.H)) ~
|
||||
(Elidable & Is8BitLoad(ZRegister.E, ZRegister.L)) ~
|
||||
(Elidable & HasOpcode(PUSH) & HasRegisterParam(ZRegister.DE) & DoesntMatterWhatItDoesWith(ZRegister.DE)) ~~> {_ =>
|
||||
List(ZLine.register(PUSH, ZRegister.HL))
|
||||
},
|
||||
(Elidable & Is8BitLoad(ZRegister.B, ZRegister.H)) ~
|
||||
(Elidable & Is8BitLoad(ZRegister.C, ZRegister.L)) ~
|
||||
(Elidable & HasOpcode(PUSH) & HasRegisterParam(ZRegister.BC) & DoesntMatterWhatItDoesWith(ZRegister.BC)) ~~> {_ =>
|
||||
List(ZLine.register(PUSH, ZRegister.HL))
|
||||
},
|
||||
|
||||
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.E)) ~
|
||||
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.D)) ~
|
||||
(Elidable & HasOpcode(PUSH) & HasRegisterParam(ZRegister.HL) & DoesntMatterWhatItDoesWith(ZRegister.HL)) ~~> {_ =>
|
||||
List(ZLine.register(PUSH, ZRegister.DE))
|
||||
},
|
||||
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.C)) ~
|
||||
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.B)) ~
|
||||
(Elidable & HasOpcode(PUSH) & HasRegisterParam(ZRegister.HL) & DoesntMatterWhatItDoesWith(ZRegister.HL)) ~~> {_ =>
|
||||
List(ZLine.register(PUSH, ZRegister.BC))
|
||||
},
|
||||
(Elidable & Is8BitLoad(ZRegister.E, ZRegister.L)) ~
|
||||
(Elidable & Is8BitLoad(ZRegister.D, ZRegister.H)) ~
|
||||
(Elidable & HasOpcode(PUSH) & HasRegisterParam(ZRegister.DE) & DoesntMatterWhatItDoesWith(ZRegister.DE)) ~~> {_ =>
|
||||
List(ZLine.register(PUSH, ZRegister.HL))
|
||||
},
|
||||
(Elidable & Is8BitLoad(ZRegister.C, ZRegister.L)) ~
|
||||
(Elidable & Is8BitLoad(ZRegister.B, ZRegister.H)) ~
|
||||
(Elidable & HasOpcode(PUSH) & HasRegisterParam(ZRegister.BC) & DoesntMatterWhatItDoesWith(ZRegister.BC)) ~~> {_ =>
|
||||
List(ZLine.register(PUSH, ZRegister.HL))
|
||||
},
|
||||
|
||||
|
||||
)
|
||||
|
||||
val UnusedCodeRemoval = new RuleBasedAssemblyOptimization("Unreachable code removal",
|
||||
|
@ -22,6 +22,8 @@ object ByteVariableToRegisterOptimization extends AssemblyOptimization[ZLine] {
|
||||
|
||||
case class CyclesAndBytes(bytes: Int, cycles: Int) {
|
||||
def +(that: CyclesAndBytes) = CyclesAndBytes(this.bytes + that.bytes, this.cycles + that.cycles)
|
||||
|
||||
def *(scale: Double) = CyclesAndBytes(bytes.*(scale).round.toInt, cycles.*(scale).round.toInt)
|
||||
}
|
||||
|
||||
override def optimize(f: NormalFunction, code: List[ZLine], optimizationContext: OptimizationContext): List[ZLine] = {
|
||||
@ -29,13 +31,29 @@ object ByteVariableToRegisterOptimization extends AssemblyOptimization[ZLine] {
|
||||
val options = optimizationContext.options
|
||||
val removeVariablesForReal = !options.flag(CompilationFlag.InternalCurrentlyOptimizingForMeasurement)
|
||||
val costFunction: CyclesAndBytes => Int = if (options.flag(CompilationFlag.OptimizeForSpeed)) _.cycles else _.bytes
|
||||
lazy val savingsForRemovingOneStackVariable = {
|
||||
val localVariableAreaSize = code.flatMap {
|
||||
case ZLine(_, OneRegisterOffset(ZRegister.MEM_IX_D, offset), _, _) => Some(offset)
|
||||
case ZLine(_, TwoRegistersOffset(_, ZRegister.MEM_IX_D, offset), _, _) => Some(offset)
|
||||
case ZLine(_, TwoRegistersOffset(ZRegister.MEM_IX_D, _, offset), _, _) => Some(offset)
|
||||
case _ => None
|
||||
}.toSet.size
|
||||
val prologueAndEpilogue = if (f.returnType.size == 2) CyclesAndBytes(107, 20) else CyclesAndBytes(95, 17)
|
||||
localVariableAreaSize match {
|
||||
case 1 => prologueAndEpilogue
|
||||
case 2 => prologueAndEpilogue * 0.4
|
||||
case 3 => prologueAndEpilogue * 0.1
|
||||
case _ => CyclesAndBytes.Zero
|
||||
}
|
||||
|
||||
val bCandidates = getCandidates(vs, _.b, ZRegister.B)
|
||||
val cCandidates = getCandidates(vs, _.c, ZRegister.C)
|
||||
val dCandidates = getCandidates(vs, _.d, ZRegister.D)
|
||||
val eCandidates = getCandidates(vs, _.e, ZRegister.E)
|
||||
val hCandidates = getCandidates(vs, _.h, ZRegister.H)
|
||||
val lCandidates = getCandidates(vs, _.l, ZRegister.L)
|
||||
}
|
||||
|
||||
val bCandidates = getCandidates(vs, _.b, ZRegister.B, savingsForRemovingOneStackVariable)
|
||||
val cCandidates = getCandidates(vs, _.c, ZRegister.C, savingsForRemovingOneStackVariable)
|
||||
val dCandidates = getCandidates(vs, _.d, ZRegister.D, savingsForRemovingOneStackVariable)
|
||||
val eCandidates = getCandidates(vs, _.e, ZRegister.E, savingsForRemovingOneStackVariable)
|
||||
val hCandidates = getCandidates(vs, _.h, ZRegister.H, savingsForRemovingOneStackVariable)
|
||||
val lCandidates = getCandidates(vs, _.l, ZRegister.L, savingsForRemovingOneStackVariable)
|
||||
|
||||
val bCandidateSets = NonOverlappingIntervals.apply[(String, Range, CyclesAndBytes)](bCandidates, _._2.start, _._2.end)
|
||||
val cCandidateSets = NonOverlappingIntervals.apply[(String, Range, CyclesAndBytes)](cCandidates, _._2.start, _._2.end)
|
||||
@ -114,7 +132,7 @@ object ByteVariableToRegisterOptimization extends AssemblyOptimization[ZLine] {
|
||||
reportOptimizedBlock(oldCode, newCode)
|
||||
output ++= newCode
|
||||
i = range.end
|
||||
if (removeVariablesForReal && contains(range, vs.variablesWithLifetimesMap(v))) {
|
||||
if (removeVariablesForReal && !v.startsWith("IX+") && vs.variablesWithLifetimesMap.contains(v) && contains(range, vs.variablesWithLifetimesMap(v))) {
|
||||
f.environment.removeVariable(v)
|
||||
}
|
||||
true
|
||||
@ -150,7 +168,7 @@ object ByteVariableToRegisterOptimization extends AssemblyOptimization[ZLine] {
|
||||
}
|
||||
}
|
||||
|
||||
private def getCandidates(vs: VariableStatus, importanceExtractor: CpuImportance => Importance, register: ZRegister.Value) = {
|
||||
private def getCandidates(vs: VariableStatus, importanceExtractor: CpuImportance => Importance, register: ZRegister.Value, savingsForRemovingOneStackVariable: =>CyclesAndBytes) = {
|
||||
vs.variablesWithLifetimes.filter {
|
||||
case (v, range) =>
|
||||
val tuple = vs.codeWithFlow(range.start)
|
||||
@ -160,8 +178,15 @@ object ByteVariableToRegisterOptimization extends AssemblyOptimization[ZLine] {
|
||||
}
|
||||
}.flatMap {
|
||||
case (v, range) =>
|
||||
canBeInlined(v.name, synced = false, register, Some(false), Some(false), Some(false), vs.codeWithFlow.slice(range.start, range.end)).map { score =>
|
||||
(v.name, range, if (vs.variablesWithRegisterHint(v.name)) score + CyclesAndBytes(16, 16) else score)
|
||||
val id = v match {
|
||||
case MemoryVariable(name, _, _) => name
|
||||
case StackVariable(_, _, offset) => "IX+" + offset
|
||||
}
|
||||
var bonus = CyclesAndBytes.Zero
|
||||
if (vs.variablesWithRegisterHint(v.name)) bonus += CyclesAndBytes(16, 16)
|
||||
if (id.startsWith("IX+")) bonus += savingsForRemovingOneStackVariable
|
||||
canBeInlined(id, synced = false, register, Some(false), Some(false), Some(false), vs.codeWithFlow.slice(range.start, range.end)).map { score =>
|
||||
(id, range, score + bonus)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -173,9 +198,9 @@ object ByteVariableToRegisterOptimization extends AssemblyOptimization[ZLine] {
|
||||
import millfork.assembly.z80.ZOpcode._
|
||||
import millfork.node.ZRegister._
|
||||
|
||||
def add(first: Boolean, ifTrue: CyclesAndBytes, ifFalse: CyclesAndBytes): CyclesAndBytes=>CyclesAndBytes = { c =>
|
||||
if (first) c + ifTrue else c + ifFalse
|
||||
}
|
||||
def add(first: Boolean, ifTrue: CyclesAndBytes, ifFalse: CyclesAndBytes): CyclesAndBytes => CyclesAndBytes =
|
||||
if (first) _ + ifTrue else _ + ifFalse
|
||||
|
||||
def add(value: CyclesAndBytes): CyclesAndBytes=>CyclesAndBytes = _ + value
|
||||
|
||||
def canBeInlined(vname: String, synced: Boolean, target: ZRegister.Value, addressInHl: Option[Boolean], addressInBc: Option[Boolean], addressInDe: Option[Boolean], code: List[(FlowInfo, ZLine)]): Option[CyclesAndBytes] = {
|
||||
@ -192,12 +217,22 @@ object ByteVariableToRegisterOptimization extends AssemblyOptimization[ZLine] {
|
||||
case _ => None
|
||||
}
|
||||
}
|
||||
object ThisOffset {
|
||||
def unapply(c: Int): Option[Int] = if ("IX+" + c == vname) Some(c) else None
|
||||
}
|
||||
code match {
|
||||
case (_, ZLine(LD, TwoRegisters(A, MEM_ABS_8), ThisVar(_), _)) :: xs =>
|
||||
canBeInlined(vname, synced, target, addressInHl, addressInBc, addressInDe, xs).map(add(CyclesAndBytes(9, 2)))
|
||||
case (_, ZLine(LD, TwoRegisters(MEM_ABS_8, A), ThisVar(_), _)) :: xs =>
|
||||
canBeInlined(vname, synced, target, addressInHl, addressInBc, addressInDe, xs).map(add(CyclesAndBytes(9, 2)))
|
||||
|
||||
case (_, ZLine(LD, TwoRegistersOffset(reg, MEM_IX_D, ThisOffset(_)), _, _)) :: xs =>
|
||||
canBeInlined(vname, synced, target, addressInHl, addressInBc, addressInDe, xs).map(add(reg == target, CyclesAndBytes(19, 3), CyclesAndBytes(15, 2)))
|
||||
case (_, ZLine(LD, TwoRegistersOffset(MEM_IX_D, reg, ThisOffset(_)), _, _)) :: xs =>
|
||||
canBeInlined(vname, synced, target, addressInHl, addressInBc, addressInDe, xs).map(add(reg == target, CyclesAndBytes(19, 3), CyclesAndBytes(15, 2)))
|
||||
case (_, ZLine(_, OneRegisterOffset(MEM_IX_D, ThisOffset(_)), _, _)) :: xs =>
|
||||
canBeInlined(vname, synced, target, addressInHl, addressInBc, addressInDe, xs).map(add(CyclesAndBytes(15, 2)))
|
||||
|
||||
case (_, ZLine(LD_16, TwoRegisters(HL, IMM_16), ThisVar(_), _)) :: xs =>
|
||||
if (target == H || target == L) fail(61) else
|
||||
canBeInlined(vname, synced, target, addressInHl = Some(true), addressInBc, addressInDe, xs).map(add(CyclesAndBytes(10, 3)))
|
||||
@ -233,6 +268,16 @@ object ByteVariableToRegisterOptimization extends AssemblyOptimization[ZLine] {
|
||||
case (_, ZLine(_, _, SubbyteConstant(ThisVar(_), _), _)) :: _ => fail(6)
|
||||
case (_, ZLine(_, _, ThisVar(_), _)) :: _ => fail(4)
|
||||
case (_, ZLine(_, _, CompoundConstant(_, ThisVar(_), _), _)) :: _ => fail(5)
|
||||
|
||||
case (_, ZLine(CALL, _, _, _)) :: xs =>
|
||||
// TODO: check return type and allow HL sometimes
|
||||
target match {
|
||||
case ZRegister.B | ZRegister.C | ZRegister.D | ZRegister.E =>
|
||||
canBeInlined(vname, synced, target, addressInHl, addressInBc, addressInDe, xs).map(add(CyclesAndBytes(-21, -2)))
|
||||
case _ =>
|
||||
fail(3)
|
||||
}
|
||||
|
||||
case (_, x) :: xs if x.changesRegister(target) => fail(1)
|
||||
case (_, x) :: xs if x.readsRegister(target) && !synced => fail(2)
|
||||
|
||||
@ -249,19 +294,26 @@ object ByteVariableToRegisterOptimization extends AssemblyOptimization[ZLine] {
|
||||
case (_, x) :: xs if x.readsRegister(DE) && addressInDe.contains(true) => fail(83)
|
||||
|
||||
case (_, ZLine(LABEL, _, _, _)) :: xs => canBeInlined(vname, synced = false, target, addressInHl, addressInBc, addressInDe, xs)
|
||||
case (_, ZLine(CALL, _, _, _)) :: xs => fail(3)
|
||||
case _ :: xs => canBeInlined(vname, synced, target, addressInHl, addressInBc, addressInDe, xs)
|
||||
case _ => Some(CyclesAndBytes.Zero)
|
||||
}
|
||||
}
|
||||
|
||||
def inlineVars(vname: String, target: ZRegister.Value, addressInHl: Boolean, addressInBc: Boolean, addressInDe: Boolean, code: List[ZLine]): List[ZLine] = {
|
||||
if (code.nonEmpty) println(code.head)
|
||||
// if (code.nonEmpty) println(code.head)
|
||||
code match {
|
||||
case ZLine(LD, TwoRegisters(A, MEM_ABS_8), MemoryAddressConstant(th), _) :: xs if th.name == vname =>
|
||||
ZLine.ld8(A, target) :: inlineVars(vname, target, addressInHl, addressInBc, addressInDe, xs)
|
||||
case ZLine(LD, TwoRegisters(MEM_ABS_8, A), MemoryAddressConstant(th), _) :: xs if th.name == vname =>
|
||||
ZLine.ld8(target, A) :: inlineVars(vname, target, addressInHl, addressInBc, addressInDe, xs)
|
||||
case ZLine(LD, TwoRegistersOffset(reg, MEM_IX_D, off), _, _) :: xs if "IX+" + off == vname =>
|
||||
if (reg == target) inlineVars(vname, target, addressInHl, addressInBc, addressInDe, xs)
|
||||
else ZLine.ld8(reg, target) :: inlineVars(vname, target, addressInHl, addressInBc, addressInDe, xs)
|
||||
case ZLine(LD, TwoRegistersOffset(MEM_IX_D, reg, off), _, _) :: xs if "IX+" + off == vname =>
|
||||
if (reg == target) inlineVars(vname, target, addressInHl, addressInBc, addressInDe, xs)
|
||||
else ZLine.ld8(target, reg) :: inlineVars(vname, target, addressInHl, addressInBc, addressInDe, xs)
|
||||
case (l@ZLine(_, OneRegisterOffset(MEM_IX_D, off), _, _)) :: xs if "IX+" + off == vname =>
|
||||
l.copy(registers = OneRegister(target)) :: inlineVars(vname, target, addressInHl, addressInBc, addressInDe, xs)
|
||||
|
||||
case ZLine(LD_16, TwoRegisters(HL, IMM_16), MemoryAddressConstant(th), _) :: xs if th.name == vname =>
|
||||
inlineVars(vname, target, addressInHl = true, addressInBc, addressInDe, xs)
|
||||
@ -295,6 +347,17 @@ object ByteVariableToRegisterOptimization extends AssemblyOptimization[ZLine] {
|
||||
x.copy(registers = TwoRegisters(reg, target), parameter = p) ::
|
||||
inlineVars(vname, target, addressInHl, addressInBc, addressInDe, xs)
|
||||
|
||||
case (x@ZLine(CALL,_,_,_))::xs =>
|
||||
// TODO: this push/pull pair shouldn't prevent the inlining to the other register in the pair
|
||||
target match {
|
||||
case ZRegister.B | ZRegister.C =>
|
||||
ZLine.register(PUSH, BC) :: x :: ZLine.register(POP, BC) ::
|
||||
inlineVars(vname, target, addressInHl, addressInBc, addressInDe, xs)
|
||||
case ZRegister.D | ZRegister.E =>
|
||||
ZLine.register(PUSH, DE) :: x :: ZLine.register(POP, DE) ::
|
||||
inlineVars(vname, target, addressInHl, addressInBc, addressInDe, xs)
|
||||
}
|
||||
|
||||
case x :: xs if x.changesRegister(HL) =>
|
||||
x :: inlineVars(vname, target, addressInHl = false, addressInBc, addressInDe, xs)
|
||||
case x :: xs if x.changesRegister(BC) =>
|
||||
|
137
src/main/scala/millfork/assembly/z80/opt/CompactStackFrame.scala
Normal file
137
src/main/scala/millfork/assembly/z80/opt/CompactStackFrame.scala
Normal file
@ -0,0 +1,137 @@
|
||||
package millfork.assembly.z80.opt
|
||||
|
||||
import millfork.assembly.{AssemblyOptimization, OptimizationContext}
|
||||
import millfork.assembly.z80._
|
||||
import millfork.env.{MemoryAddressConstant, NormalFunction, NumericConstant}
|
||||
import millfork.error.ErrorReporting
|
||||
import millfork.node.ZRegister
|
||||
|
||||
/**
|
||||
* @author Karol Stasiak
|
||||
*/
|
||||
object CompactStackFrame extends AssemblyOptimization[ZLine] {
|
||||
override def name: String = "Compacting the stack frame"
|
||||
|
||||
override def optimize(f: NormalFunction, code: List[ZLine], context: OptimizationContext): List[ZLine] = {
|
||||
optimizeStart(code) match {
|
||||
case Some((optimized, before, after)) =>
|
||||
ErrorReporting.debug(s"Optimized stack frame from $before to $after bytes")
|
||||
optimized
|
||||
case None => code
|
||||
}
|
||||
}
|
||||
|
||||
def optimizeStart(code: List[ZLine]): Option[(List[ZLine], Int, Int)] = {
|
||||
import millfork.assembly.z80.ZOpcode._
|
||||
import millfork.node.ZRegister._
|
||||
code match {
|
||||
case (name@ZLine(LABEL, _, _, _)) ::
|
||||
ZLine(PUSH, OneRegister(IX), _, true) ::
|
||||
ZLine(LD_16, TwoRegisters(IX, IMM_16), NumericConstant(negativeSize, _), true) ::
|
||||
ZLine(ADD_16, TwoRegisters(IX, SP), _, true) ::
|
||||
ZLine(LD_16, TwoRegisters(SP, IX), _, true) :: tail =>
|
||||
val sourceSize = (-negativeSize).&(0xffff).toInt
|
||||
val usedOffsets: Set[Int] = findUsedOffsets(tail)
|
||||
val targetSize = usedOffsets.size + usedOffsets.size.&(1)
|
||||
if (targetSize == sourceSize) None else {
|
||||
val prologue = if (targetSize == 0) Nil else List(
|
||||
ZLine.register(PUSH, IX),
|
||||
ZLine.ldImm16(IX, 0x10000 - targetSize),
|
||||
ZLine.registers(ADD_16, IX, SP),
|
||||
ZLine.ld16(SP, IX))
|
||||
val map = usedOffsets.toSeq.sorted.zipWithIndex.toMap
|
||||
optimizeContinue(tail, sourceSize, targetSize, map).map { optTail =>
|
||||
(name :: prologue ++ optTail, sourceSize, targetSize)
|
||||
}
|
||||
}
|
||||
case _ =>
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def findUsedOffsets(code: List[ZLine]): Set[Int] = {
|
||||
code.flatMap {
|
||||
case ZLine(_, OneRegisterOffset(ZRegister.MEM_IX_D, offset), _, _) => Some(offset)
|
||||
case ZLine(_, TwoRegistersOffset(_, ZRegister.MEM_IX_D, offset), _, _) => Some(offset)
|
||||
case ZLine(_, TwoRegistersOffset(ZRegister.MEM_IX_D, _, offset), _, _) => Some(offset)
|
||||
case _ => None
|
||||
}.toSet
|
||||
}
|
||||
|
||||
def optimizeContinue(code: List[ZLine], sourceSize: Int, targetSize: Int, mapping: Map[Int, Int]): Option[List[ZLine]] = {
|
||||
import millfork.assembly.z80.ZOpcode._
|
||||
import millfork.node.ZRegister._
|
||||
code match {
|
||||
case (head@ZLine(_, TwoRegistersOffset(reg, MEM_IX_D, offset), _, _)) :: tail =>
|
||||
optimizeContinue(tail, sourceSize, targetSize, mapping).map(
|
||||
head.copy(registers = TwoRegistersOffset(reg, MEM_IX_D, mapping(offset))) :: _)
|
||||
|
||||
case (head@ZLine(_, TwoRegistersOffset(MEM_IX_D, reg, offset), _, _)) :: tail =>
|
||||
optimizeContinue(tail, sourceSize, targetSize, mapping).map(
|
||||
head.copy(registers = TwoRegistersOffset(MEM_IX_D, reg, mapping(offset))) :: _)
|
||||
|
||||
case (head@ZLine(_, OneRegisterOffset(MEM_IX_D, offset), _, _)) :: tail =>
|
||||
optimizeContinue(tail, sourceSize, targetSize, mapping).map(
|
||||
head.copy(registers = OneRegisterOffset(MEM_IX_D, mapping(offset))) :: _)
|
||||
|
||||
case
|
||||
ZLine(LD_16, TwoRegisters(IX, IMM_16), NumericConstant(size, _), _) ::
|
||||
ZLine(ADD_16, TwoRegisters(IX, SP), _, _) ::
|
||||
ZLine(LD_16, TwoRegisters(SP, IX), _, _) ::
|
||||
ZLine(POP, OneRegister(IX), _, _) :: tail =>
|
||||
if (size != sourceSize) None
|
||||
else {
|
||||
stripReturn(tail).flatMap {
|
||||
case (ret, rest) =>
|
||||
val epilogue = if (targetSize == 0) Nil else {
|
||||
List(
|
||||
ZLine.ldImm16(IX, targetSize),
|
||||
ZLine.registers(ADD_16, IX, SP),
|
||||
ZLine.ld16(SP, IX),
|
||||
ZLine.register(POP, IX))
|
||||
}
|
||||
optimizeContinue(rest, sourceSize, targetSize, mapping).map(epilogue ++ ret ++ _)
|
||||
}
|
||||
}
|
||||
case
|
||||
ZLine(LD_16, TwoRegisters(HL, IMM_16), NumericConstant(size, _), _) ::
|
||||
ZLine(ADD_16, TwoRegisters(HL, SP), _, _) ::
|
||||
ZLine(LD_16, TwoRegisters(SP, HL), _, _) ::
|
||||
ZLine(POP, OneRegister(IX), _, _) :: tail =>
|
||||
if (size != sourceSize) {
|
||||
println("Mismatched stack frame sizes")
|
||||
None
|
||||
} else {
|
||||
stripReturn(tail).flatMap {
|
||||
case (ret, rest) =>
|
||||
val epilogue = if (targetSize == 0) Nil else {
|
||||
List(
|
||||
ZLine.ldImm16(HL, targetSize),
|
||||
ZLine.registers(ADD_16, HL, SP),
|
||||
ZLine.ld16(SP, HL),
|
||||
ZLine.register(POP, IX))
|
||||
}
|
||||
optimizeContinue(rest, sourceSize, targetSize, mapping).map(epilogue ++ ret ++ _)
|
||||
}
|
||||
}
|
||||
case ZLine(RET | RETI | RETN | BYTE, _, _, _) :: _ => None
|
||||
case ZLine(JP, _, MemoryAddressConstant(f: NormalFunction), _) :: _ => None
|
||||
case x :: _ if x.changesRegister(ZRegister.IX) => None
|
||||
case x :: xs => optimizeContinue(xs, sourceSize, targetSize, mapping).map(x :: _)
|
||||
case Nil => Some(Nil)
|
||||
}
|
||||
}
|
||||
|
||||
def stripReturn(code: List[ZLine]): Option[(List[ZLine], List[ZLine])] = {
|
||||
val (discards, rest) = code.span(l => ZOpcodeClasses.NoopDiscards(l.opcode))
|
||||
if (rest.isEmpty) return None
|
||||
import millfork.assembly.z80.ZOpcode._
|
||||
val potentialResult = (discards :+ rest.head) -> rest.tail
|
||||
rest.head match {
|
||||
case ZLine(RET | RETI | RETN, _, _, _) => Some(potentialResult)
|
||||
case ZLine(JP, NoRegisters, MemoryAddressConstant(f: NormalFunction), _) => Some(potentialResult)
|
||||
case _ => None
|
||||
}
|
||||
}
|
||||
}
|
@ -24,7 +24,7 @@ object EmptyMemoryStoreRemoval extends AssemblyOptimization[ZLine] {
|
||||
val toRemove = mutable.Set[Int]()
|
||||
val badVariables = mutable.Set[String]()
|
||||
|
||||
for((v, lifetime) <- vs.variablesWithLifetimes) {
|
||||
for((v, lifetime) <- vs.variablesWithLifetimes if lifetime.nonEmpty) {
|
||||
val lastaccess = lifetime.last
|
||||
if (lastaccess >= 0) {
|
||||
val lastVariableAccess = code(lastaccess)
|
||||
|
@ -0,0 +1,59 @@
|
||||
package millfork.assembly.z80.opt
|
||||
|
||||
import millfork.assembly.opt.SingleStatus
|
||||
import millfork.assembly.z80._
|
||||
import millfork.env._
|
||||
import millfork.node.ZRegister
|
||||
|
||||
/**
|
||||
* @author Karol Stasiak
|
||||
*/
|
||||
object StackVariableLifetime {
|
||||
|
||||
// This only works for non-stack variables.
|
||||
// TODO: this is also probably very wrong
|
||||
def apply(variableOffset: Int, codeWithFlow: List[(FlowInfo, ZLine)]): Range = {
|
||||
val flags = codeWithFlow.map {
|
||||
case (_, ZLine(_, OneRegisterOffset(ZRegister.MEM_IX_D, i), _, _)) => i == variableOffset
|
||||
case (_, ZLine(_, TwoRegistersOffset(ZRegister.MEM_IX_D, _, i), _, _)) => i == variableOffset
|
||||
case (_, ZLine(_, TwoRegistersOffset(_, ZRegister.MEM_IX_D, i), _, _)) => i == variableOffset
|
||||
case _ => false
|
||||
}
|
||||
if (flags.forall(!_)) return Range(0, 0)
|
||||
var min = flags.indexOf(true)
|
||||
var max = flags.lastIndexOf(true) + 1
|
||||
var changed = true
|
||||
val labelMap = codeWithFlow.zipWithIndex.flatMap(a => a._1._2.parameter match {
|
||||
case MemoryAddressConstant(Label(l)) => List(l -> a._2)
|
||||
case _ => Nil
|
||||
}).groupBy(_._1).mapValues(_.map(_._2).toSet)
|
||||
|
||||
while (changed) {
|
||||
changed = false
|
||||
for ((label, indices) <- labelMap) {
|
||||
if (indices.exists(i => i >= min && i < max)) {
|
||||
indices.foreach { i =>
|
||||
val before = max - min
|
||||
min = min min i
|
||||
max = max max (i + 1)
|
||||
if (max - min != before) {
|
||||
changed = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ErrorReporting.trace("Lifetime for IX+" + variableOffset)
|
||||
// codeWithFlow.zipWithIndex.foreach {
|
||||
// case ((_, line), index) =>
|
||||
// if (index >= min && index < max) {
|
||||
// ErrorReporting.trace(f"$line%-30s <")
|
||||
// } else {
|
||||
// ErrorReporting.trace(line.toString)
|
||||
// }
|
||||
// }
|
||||
|
||||
Range(min, max)
|
||||
}
|
||||
}
|
@ -66,6 +66,7 @@ object VariableStatus {
|
||||
val localVariables = allLocalVariables.filter {
|
||||
case MemoryVariable(name, typ, VariableAllocationMethod.Auto | VariableAllocationMethod.Zeropage) =>
|
||||
typFilter(typ) && !paramVariables(name) && stillUsedVariables(name) && !variablesWithAddressesTaken(name)
|
||||
case StackVariable(name, typ, _) => typFilter(typ)
|
||||
case _ => false
|
||||
}
|
||||
val variablesWithRegisterHint = f.environment.getAllLocalVariables.filter {
|
||||
@ -73,11 +74,15 @@ object VariableStatus {
|
||||
typFilter(typ) && (typ.size == 1 || typ.size == 2) && !paramVariables(name) && stillUsedVariables(name) && !variablesWithAddressesTaken(name)
|
||||
case _ => false
|
||||
}.map(_.name).toSet
|
||||
val variablesWithLifetimes = localVariables.map(v =>
|
||||
v -> VariableLifetime.apply(v.name, flow)
|
||||
)
|
||||
val variablesWithLifetimes = localVariables.map {
|
||||
case v: MemoryVariable =>
|
||||
v -> VariableLifetime.apply(v.name, flow)
|
||||
case v: StackVariable =>
|
||||
v -> StackVariableLifetime.apply(v.baseOffset, flow)
|
||||
}
|
||||
val variablesWithLifetimesMap = variablesWithLifetimes.map {
|
||||
case (v, lt) => v.name -> lt
|
||||
case (v: MemoryVariable, lt) => v.name -> lt
|
||||
case (v: StackVariable, lt) => ("IX+" + v.baseOffset) -> lt
|
||||
}.toMap
|
||||
Some(new VariableStatus(
|
||||
paramVariables,
|
||||
|
@ -274,6 +274,12 @@ object WordVariableToRegisterOptimization extends AssemblyOptimization[ZLine] {
|
||||
case (_, ZLine(_, _, MemoryAddressConstant(th), _)) :: _ if th.name == vname => fail(4)
|
||||
case (_, ZLine(_, _, CompoundConstant(_, MemoryAddressConstant(th), _), _)) :: _ if th.name == vname => fail(5)
|
||||
case (_, ZLine(_, _, SubbyteConstant(MemoryAddressConstant(th), _), _)) :: _ if th.name == vname => fail(6)
|
||||
case (_, ZLine(CALL, _, _, _)) :: xs => target match {
|
||||
// TODO: check return type and allow HL sometimes
|
||||
case BC | DE =>
|
||||
canBeInlined(vname, synced, target, xs).map(add(CyclesAndBytes(-21, -2)))
|
||||
case _ => fail(3)
|
||||
}
|
||||
case (_, x) :: xs if x.changesRegister(target) => fail(1)
|
||||
case (_, x) :: xs if x.readsRegister(target) && !synced => fail(2)
|
||||
case (_, ZLine(LABEL, _, _, _)) :: xs => canBeInlined(vname, synced = false, target, xs)
|
||||
@ -400,19 +406,29 @@ object WordVariableToRegisterOptimization extends AssemblyOptimization[ZLine] {
|
||||
case ZLine(LD, TwoRegisters(MEM_ABS_8, A), MemoryAddressConstant(th), _) :: xs if th.name == de =>
|
||||
ZLine.ld8(E, A) :: inlineVars(hl, bc, de, xs)
|
||||
|
||||
case ZLine(LD, TwoRegisters(A, MEM_ABS_8), CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th), NumericConstant(1,_)), _) :: xs if th.name == hl =>
|
||||
case ZLine(LD, TwoRegisters(A, MEM_ABS_8), CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th), NumericConstant(1, _)), _) :: xs if th.name == hl =>
|
||||
ZLine.ld8(A, H) :: inlineVars(hl, bc, de, xs)
|
||||
case ZLine(LD, TwoRegisters(MEM_ABS_8, A), CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th), NumericConstant(1,_)), _) :: xs if th.name == hl =>
|
||||
case ZLine(LD, TwoRegisters(MEM_ABS_8, A), CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th), NumericConstant(1, _)), _) :: xs if th.name == hl =>
|
||||
ZLine.ld8(H, A) :: inlineVars(hl, bc, de, xs)
|
||||
case ZLine(LD, TwoRegisters(A, MEM_ABS_8), CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th), NumericConstant(1,_)), _) :: xs if th.name == bc =>
|
||||
case ZLine(LD, TwoRegisters(A, MEM_ABS_8), CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th), NumericConstant(1, _)), _) :: xs if th.name == bc =>
|
||||
ZLine.ld8(A, B) :: inlineVars(hl, bc, de, xs)
|
||||
case ZLine(LD, TwoRegisters(MEM_ABS_8, A), CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th), NumericConstant(1,_)), _) :: xs if th.name == bc =>
|
||||
case ZLine(LD, TwoRegisters(MEM_ABS_8, A), CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th), NumericConstant(1, _)), _) :: xs if th.name == bc =>
|
||||
ZLine.ld8(B, A) :: inlineVars(hl, bc, de, xs)
|
||||
case ZLine(LD, TwoRegisters(A, MEM_ABS_8), CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th), NumericConstant(1,_)), _) :: xs if th.name == de =>
|
||||
case ZLine(LD, TwoRegisters(A, MEM_ABS_8), CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th), NumericConstant(1, _)), _) :: xs if th.name == de =>
|
||||
ZLine.ld8(A, D) :: inlineVars(hl, bc, de, xs)
|
||||
case ZLine(LD, TwoRegisters(MEM_ABS_8, A), CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th), NumericConstant(1,_)), _) :: xs if th.name == de =>
|
||||
case ZLine(LD, TwoRegisters(MEM_ABS_8, A), CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th), NumericConstant(1, _)), _) :: xs if th.name == de =>
|
||||
ZLine.ld8(D, A) :: inlineVars(hl, bc, de, xs)
|
||||
|
||||
case (x@ZLine(CALL, _, _, _)) :: xs =>
|
||||
if (bc != "") {
|
||||
ZLine.register(PUSH, BC) :: x :: ZLine.register(POP, BC) :: inlineVars(hl, bc, de, xs)
|
||||
} else if (de != "") {
|
||||
ZLine.register(PUSH, DE) :: x :: ZLine.register(POP, DE) :: inlineVars(hl, bc, de, xs)
|
||||
} else {
|
||||
throw new IllegalStateException()
|
||||
}
|
||||
|
||||
|
||||
case x :: ZLine(LD_16, TwoRegisters(MEM_ABS_16, HL), MemoryAddressConstant(th), _) :: xs if x.changesRegister(HL) && th.name == hl =>
|
||||
x :: inlineVars(hl, bc, de, xs)
|
||||
case x :: ZLine(LD_16, TwoRegisters(MEM_ABS_16, BC), MemoryAddressConstant(th), _) :: xs if x.changesRegister(BC) && th.name == bc =>
|
||||
@ -420,9 +436,9 @@ object WordVariableToRegisterOptimization extends AssemblyOptimization[ZLine] {
|
||||
case x :: ZLine(LD_16, TwoRegisters(MEM_ABS_16, DE), MemoryAddressConstant(th), _) :: xs if x.changesRegister(DE) && th.name == de =>
|
||||
x :: inlineVars(hl, bc, de, xs)
|
||||
|
||||
case x :: _ if bc != "" && x.changesRegister(BC) => ???
|
||||
case x :: _ if de != "" && x.changesRegister(DE) => ???
|
||||
case x :: _ if hl != "" && x.changesRegister(HL) => ???
|
||||
case x :: _ if bc != "" && x.changesRegister(BC) => throw new IllegalStateException()
|
||||
case x :: _ if de != "" && x.changesRegister(DE) => throw new IllegalStateException()
|
||||
case x :: _ if hl != "" && x.changesRegister(HL) => throw new IllegalStateException()
|
||||
|
||||
case x :: xs => x :: inlineVars(hl, bc, de, xs)
|
||||
case Nil => Nil
|
||||
|
@ -16,7 +16,7 @@ object Z80OptimizationPresets {
|
||||
EmptyParameterStoreRemoval,
|
||||
EmptyMemoryStoreRemoval)
|
||||
).flatten ++
|
||||
List(WordVariableToRegisterOptimization, ByteVariableToRegisterOptimization)
|
||||
List(WordVariableToRegisterOptimization, ByteVariableToRegisterOptimization, CompactStackFrame)
|
||||
).flatten
|
||||
}
|
||||
|
||||
|
@ -29,7 +29,7 @@ object Z80Compiler extends AbstractCompiler[ZLine] {
|
||||
List(ZLine.ldAbs16(param.toAddress, ZRegister.HL))
|
||||
case _ => Nil
|
||||
}
|
||||
label :: (storeParamsFromRegisters ++ stackPointerFixAtBeginning(ctx) ++ chunk)
|
||||
label :: (stackPointerFixAtBeginning(ctx) ++ storeParamsFromRegisters ++ chunk)
|
||||
}
|
||||
|
||||
def stackPointerFixAtBeginning(ctx: CompilationContext): List[ZLine] = {
|
||||
|
@ -204,4 +204,22 @@ class StackVarSuite extends FunSuite with Matchers {
|
||||
| }
|
||||
""".stripMargin){m => m.readWord(0xc055) should equal(0x66) }
|
||||
}
|
||||
|
||||
test("Double array with stack variables") {
|
||||
EmuCrossPlatformBenchmarkRun(Cpu.Mos, Cpu.Z80)(
|
||||
"""
|
||||
| array output[5]@$c001
|
||||
| array input = [0,1,4,9,16,25,36,49]
|
||||
| void main () {
|
||||
| stack byte i
|
||||
| for i,0,until,output.length {
|
||||
| output[i] = input[i+1]<<1
|
||||
| }
|
||||
| }
|
||||
| void _panic(){while(true){}}
|
||||
""".stripMargin){ m=>
|
||||
m.readByte(0xc001) should equal (2)
|
||||
m.readByte(0xc005) should equal (50)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user