1
0
mirror of https://github.com/KarolS/millfork.git synced 2025-01-01 06:29:53 +00:00

8080/Z80: More optimizations

This commit is contained in:
Karol Stasiak 2019-06-26 01:47:03 +02:00
parent 507791bcaf
commit 6cf746045f
3 changed files with 77 additions and 0 deletions

View File

@ -442,6 +442,13 @@ object AlwaysGoodI80Optimizations {
shallowerStack(code.tail.init)
}
}),
//32
(Elidable & HasOpcode(PUSH) & HasRegisterParam(ZRegister.AF)) ~
(Linear & Not(Changes(ZRegister.A)) & Not(ReadsStackPointer)).*.capture(2) ~
Where(ctx => ctx.isStackPreservingBlock(2)) ~
(Elidable & HasOpcode(POP) & HasRegisterParam(ZRegister.AF) & DoesntMatterWhatItDoesWithFlags) ~~> {code =>
code.tail.init
},
)
private def shallowerStack(lines: List[ZLine]): List[ZLine] = lines match {
@ -463,6 +470,13 @@ object AlwaysGoodI80Optimizations {
code.tail.init :+ ZLine.ldImm16(register, i)
}
}),
// 5
(Elidable & HasOpcode(PUSH) & HasRegisterParam(ZRegister.AF) & MatchRegister(ZRegister.A, 1)) ~
((Linear | HasOpcode(CALL)) & Not(HasOpcode(POP) & HasRegisterParam(ZRegister.AF)) & Not(ReadsStackPointer)).*.capture(2) ~
Where(ctx => ctx.isStackPreservingBlock(2)) ~
(Elidable & HasOpcode(POP) & HasRegisterParam(ZRegister.AF) & DoesntMatterWhatItDoesWithFlags) ~~> { (code, ctx) =>
code.tail.init :+ ZLine.ldImm8(ZRegister.A, ctx.get[Int](1)).pos(code.last.source)
},
)
val PointlessStackUnstashing = new RuleBasedAssemblyOptimization("Pointless stack unstashing",
@ -821,6 +835,15 @@ object AlwaysGoodI80Optimizations {
code(2).copy(registers = OneRegister(ZRegister.BC))
)),
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.C)) ~
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.B)) ~
(Elidable & HasOpcodeIn(Set(INC_16, DEC_16, PUSH, POP)) & HasRegisterParam(ZRegister.HL)) ~
(Elidable & Is8BitLoad(ZRegister.C, ZRegister.L)) ~
(Elidable & Is8BitLoad(ZRegister.B, ZRegister.H) & DoesntMatterWhatItDoesWith(ZRegister.HL)) ~~> (code =>
List(
code(2).copy(registers = OneRegister(ZRegister.BC))
)),
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.D)) ~
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.E)) ~
(Elidable & HasOpcodeIn(Set(INC_16, DEC_16, PUSH, POP)) & HasRegisterParam(ZRegister.HL)) ~
@ -830,6 +853,15 @@ object AlwaysGoodI80Optimizations {
code(2).copy(registers = OneRegister(ZRegister.DE))
)),
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.E)) ~
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.D)) ~
(Elidable & HasOpcodeIn(Set(INC_16, DEC_16, PUSH, POP)) & HasRegisterParam(ZRegister.HL)) ~
(Elidable & Is8BitLoad(ZRegister.E, ZRegister.L)) ~
(Elidable & Is8BitLoad(ZRegister.D, ZRegister.H) & DoesntMatterWhatItDoesWith(ZRegister.HL)) ~~> (code =>
List(
code(2).copy(registers = OneRegister(ZRegister.DE))
)),
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.D)) ~
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.E)) ~
(Elidable & HasOpcodeIn(Set(INC_16, DEC_16, PUSH, POP)) & HasRegisterParam(ZRegister.HL)) ~

View File

@ -140,24 +140,48 @@ object AlwaysGoodZ80Optimizations {
List(ZLine.ld8(ZRegister.MEM_BC, ZRegister.A))
},
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.C)) ~
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.B)) ~
(Elidable & Is8BitLoad(ZRegister.MEM_HL, ZRegister.A) & DoesntMatterWhatItDoesWith(ZRegister.HL)) ~~> {_ =>
List(ZLine.ld8(ZRegister.MEM_BC, ZRegister.A))
},
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.D)) ~
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.E)) ~
(Elidable & Is8BitLoad(ZRegister.MEM_HL, ZRegister.A) & DoesntMatterWhatItDoesWith(ZRegister.HL)) ~~> {_ =>
List(ZLine.ld8(ZRegister.MEM_DE, ZRegister.A))
},
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.E)) ~
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.D)) ~
(Elidable & Is8BitLoad(ZRegister.MEM_HL, ZRegister.A) & DoesntMatterWhatItDoesWith(ZRegister.HL)) ~~> {_ =>
List(ZLine.ld8(ZRegister.MEM_DE, ZRegister.A))
},
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.B)) ~
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.C)) ~
(Elidable & Is8BitLoad(ZRegister.A, ZRegister.MEM_HL) & DoesntMatterWhatItDoesWith(ZRegister.HL)) ~~> {_ =>
List(ZLine.ld8(ZRegister.A, ZRegister.MEM_BC))
},
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.C)) ~
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.B)) ~
(Elidable & Is8BitLoad(ZRegister.A, ZRegister.MEM_HL) & DoesntMatterWhatItDoesWith(ZRegister.HL)) ~~> {_ =>
List(ZLine.ld8(ZRegister.A, ZRegister.MEM_BC))
},
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.D)) ~
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.E)) ~
(Elidable & Is8BitLoad(ZRegister.A, ZRegister.MEM_HL) & DoesntMatterWhatItDoesWith(ZRegister.HL)) ~~> {_ =>
List(ZLine.ld8(ZRegister.A, ZRegister.MEM_DE))
},
(Elidable & Is8BitLoad(ZRegister.L, ZRegister.E)) ~
(Elidable & Is8BitLoad(ZRegister.H, ZRegister.D)) ~
(Elidable & Is8BitLoad(ZRegister.A, ZRegister.MEM_HL) & DoesntMatterWhatItDoesWith(ZRegister.HL)) ~~> {_ =>
List(ZLine.ld8(ZRegister.A, ZRegister.MEM_DE))
},
)
val All: List[AssemblyOptimization[ZLine]] = List[AssemblyOptimization[ZLine]](

View File

@ -181,6 +181,23 @@ class AssemblyMatchingContext(val compilationOptions: CompilationOptions) {
jumps.isEmpty
}
def isStackPreservingBlock(i: Int): Boolean = {
import millfork.assembly.z80.ZOpcode._
var pushCount = 0
get[List[ZLine]](i).foreach {
case ZLine0(RET | RST | RETI | RETN, _, _) =>
return false
case ZLine0(PUSH, _, _) =>
pushCount += 1
case ZLine0(POP, _, _) =>
pushCount -= 1
if (pushCount < 0) return false
case l =>
if (ReadsStackPointer(l)) return false
}
pushCount == 0
}
def isAlignableBlock(i: Int): Boolean = {
if (!isExternallyLinearBlock(i)) return false
import ZOpcode._
@ -911,6 +928,10 @@ case object ReadsStackPointer extends TrivialAssemblyLinePattern {
case _ => false
}
case LD_HLSP | LD_DESP | PUSH | POP => true
case CALL => line.parameter match {
case MemoryAddressConstant(th: NormalFunction) => false
case _ => true
}
case _ => false
}
}