mirror of
https://github.com/irmen/prog8.git
synced 2024-12-25 23:29:55 +00:00
working on better encoding of romsub in new ast/vmtarget
This commit is contained in:
parent
4644c9b621
commit
f718f4251b
@ -212,7 +212,7 @@ class StMemVar(name: String,
|
||||
}
|
||||
|
||||
|
||||
class StSub(name: String, val parameters: List<StSubroutineParameter>, position: Position) :
|
||||
class StSub(name: String, val parameters: List<StSubroutineParameter>, val returnType: DataType?, position: Position) :
|
||||
StNode(name, StNodeType.SUBROUTINE, position) {
|
||||
override fun printProperties() {
|
||||
print(name)
|
||||
@ -220,7 +220,7 @@ class StSub(name: String, val parameters: List<StSubroutineParameter>, position:
|
||||
}
|
||||
|
||||
|
||||
class StRomSub(name: String, val address: UInt, parameters: List<StSubroutineParameter>, position: Position) :
|
||||
class StRomSub(name: String, val address: UInt, val parameters: List<StSubroutineParameter>, val returnTypes: List<DataType>, position: Position) :
|
||||
StNode(name, StNodeType.ROMSUB, position) {
|
||||
override fun printProperties() {
|
||||
print("$name address=${address.toHex()}")
|
||||
|
@ -8,6 +8,7 @@ class PtAsmSub(
|
||||
val address: UInt?,
|
||||
val clobbers: Set<CpuRegister>,
|
||||
val parameters: List<Pair<PtSubroutineParameter, RegisterOrStatusflag>>,
|
||||
val returnTypes: List<DataType>,
|
||||
val retvalRegisters: List<RegisterOrStatusflag>,
|
||||
val inline: Boolean,
|
||||
position: Position
|
||||
|
@ -82,6 +82,7 @@ class CodeGen(internal val program: PtProgram,
|
||||
val code = when(node) {
|
||||
is PtBlock -> translate(node)
|
||||
is PtSub -> translate(node)
|
||||
is PtAsmSub -> translate(node)
|
||||
is PtScopeVarsDecls -> VmCodeChunk() // vars should be looked up via symbol table
|
||||
is PtVariable -> VmCodeChunk() // var should be looked up via symbol table
|
||||
is PtMemMapped -> VmCodeChunk() // memmapped var should be looked up via symbol table
|
||||
@ -103,7 +104,6 @@ class CodeGen(internal val program: PtProgram,
|
||||
is PtConditionalBranch -> translate(node)
|
||||
is PtInlineAssembly -> VmCodeChunk(VmCodeInlineAsm(node.assembly))
|
||||
is PtIncludeBinary -> VmCodeChunk(VmCodeInlineBinary(node.file, node.offset, node.length))
|
||||
is PtAsmSub -> TODO("asmsub not yet supported on virtual machine target ${node.position}")
|
||||
is PtAddressOf,
|
||||
is PtContainmentCheck,
|
||||
is PtMemoryByte,
|
||||
@ -781,6 +781,17 @@ class CodeGen(internal val program: PtProgram,
|
||||
return code
|
||||
}
|
||||
|
||||
private fun translate(sub: PtAsmSub): VmCodeChunk {
|
||||
val code = VmCodeChunk()
|
||||
code += VmCodeComment("ASMSUB: ${sub.scopedName}")
|
||||
code += VmCodeLabel(sub.scopedName)
|
||||
for (child in sub.children) {
|
||||
code += translateNode(child)
|
||||
}
|
||||
code += VmCodeComment("ASMSUB-END '${sub.name}'")
|
||||
return code
|
||||
}
|
||||
|
||||
private fun translate(block: PtBlock): VmCodeChunk {
|
||||
val code = VmCodeChunk()
|
||||
code += VmCodeComment("BLOCK '${block.name}' addr=${block.address} lib=${block.library}")
|
||||
|
@ -1,7 +1,9 @@
|
||||
package prog8.codegen.virtual
|
||||
|
||||
import prog8.code.StRomSub
|
||||
import prog8.code.StStaticVariable
|
||||
import prog8.code.StSub
|
||||
import prog8.code.StSubroutineParameter
|
||||
import prog8.code.ast.*
|
||||
import prog8.code.core.*
|
||||
import prog8.vm.Opcode
|
||||
@ -812,45 +814,52 @@ internal class ExpressionGen(private val codeGen: CodeGen) {
|
||||
}
|
||||
|
||||
fun translate(fcall: PtFunctionCall, resultRegister: Int, resultFpRegister: Int): VmCodeChunk {
|
||||
val subroutine = codeGen.symbolTable.flat.getValue(fcall.functionName) as StSub
|
||||
val code = VmCodeChunk()
|
||||
for ((arg, parameter) in fcall.args.zip(subroutine.parameters)) {
|
||||
val paramDt = codeGen.vmType(parameter.type)
|
||||
if(codeGen.isZero(arg)) {
|
||||
if (paramDt == VmDataType.FLOAT) {
|
||||
val mem = codeGen.allocations.get(fcall.functionName + parameter.name)
|
||||
code += VmCodeInstruction(Opcode.STOREZM, paramDt, value = mem)
|
||||
} else {
|
||||
val mem = codeGen.allocations.get(fcall.functionName + parameter.name)
|
||||
code += VmCodeInstruction(Opcode.STOREZM, paramDt, value = mem)
|
||||
when (val callTarget = codeGen.symbolTable.flat.getValue(fcall.functionName)) {
|
||||
is StSub -> {
|
||||
val code = VmCodeChunk()
|
||||
for ((arg, parameter) in fcall.args.zip(callTarget.parameters)) {
|
||||
val paramDt = codeGen.vmType(parameter.type)
|
||||
if(codeGen.isZero(arg)) {
|
||||
if (paramDt == VmDataType.FLOAT) {
|
||||
val mem = codeGen.allocations.get(fcall.functionName + parameter.name)
|
||||
code += VmCodeInstruction(Opcode.STOREZM, paramDt, value = mem)
|
||||
} else {
|
||||
val mem = codeGen.allocations.get(fcall.functionName + parameter.name)
|
||||
code += VmCodeInstruction(Opcode.STOREZM, paramDt, value = mem)
|
||||
}
|
||||
} else {
|
||||
if (paramDt == VmDataType.FLOAT) {
|
||||
val argFpReg = codeGen.vmRegisters.nextFreeFloat()
|
||||
code += translateExpression(arg, -1, argFpReg)
|
||||
val mem = codeGen.allocations.get(fcall.functionName + parameter.name)
|
||||
code += VmCodeInstruction(Opcode.STOREM, paramDt, fpReg1 = argFpReg, value = mem)
|
||||
} else {
|
||||
val argReg = codeGen.vmRegisters.nextFree()
|
||||
code += translateExpression(arg, argReg, -1)
|
||||
val mem = codeGen.allocations.get(fcall.functionName + parameter.name)
|
||||
code += VmCodeInstruction(Opcode.STOREM, paramDt, reg1 = argReg, value = mem)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (paramDt == VmDataType.FLOAT) {
|
||||
val argFpReg = codeGen.vmRegisters.nextFreeFloat()
|
||||
code += translateExpression(arg, -1, argFpReg)
|
||||
val mem = codeGen.allocations.get(fcall.functionName + parameter.name)
|
||||
code += VmCodeInstruction(Opcode.STOREM, paramDt, fpReg1 = argFpReg, value = mem)
|
||||
code += VmCodeInstruction(Opcode.CALL, labelSymbol=fcall.functionName)
|
||||
if(fcall.type==DataType.FLOAT) {
|
||||
if (!fcall.void && resultFpRegister != 0) {
|
||||
// Call convention: result value is in fr0, so put it in the required register instead.
|
||||
code += VmCodeInstruction(Opcode.LOADR, VmDataType.FLOAT, fpReg1 = resultFpRegister, fpReg2 = 0)
|
||||
}
|
||||
} else {
|
||||
val argReg = codeGen.vmRegisters.nextFree()
|
||||
code += translateExpression(arg, argReg, -1)
|
||||
val mem = codeGen.allocations.get(fcall.functionName + parameter.name)
|
||||
code += VmCodeInstruction(Opcode.STOREM, paramDt, reg1 = argReg, value = mem)
|
||||
if (!fcall.void && resultRegister != 0) {
|
||||
// Call convention: result value is in r0, so put it in the required register instead.
|
||||
code += VmCodeInstruction(Opcode.LOADR, codeGen.vmType(fcall.type), reg1 = resultRegister, reg2 = 0)
|
||||
}
|
||||
}
|
||||
return code
|
||||
}
|
||||
is StRomSub -> {
|
||||
TODO("call romsub $fcall")
|
||||
}
|
||||
else -> throw AssemblyError("invalid node type")
|
||||
}
|
||||
code += VmCodeInstruction(Opcode.CALL, labelSymbol=fcall.functionName)
|
||||
if(fcall.type==DataType.FLOAT) {
|
||||
if (!fcall.void && resultFpRegister != 0) {
|
||||
// Call convention: result value is in fr0, so put it in the required register instead.
|
||||
code += VmCodeInstruction(Opcode.LOADR, VmDataType.FLOAT, fpReg1 = resultFpRegister, fpReg2 = 0)
|
||||
}
|
||||
} else {
|
||||
if (!fcall.void && resultRegister != 0) {
|
||||
// Call convention: result value is in r0, so put it in the required register instead.
|
||||
code += VmCodeInstruction(Opcode.LOADR, codeGen.vmType(fcall.type), reg1 = resultRegister, reg2 = 0)
|
||||
}
|
||||
}
|
||||
return code
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -285,6 +285,7 @@ class IntermediateAstMaker(val program: Program) {
|
||||
srcSub.asmAddress,
|
||||
srcSub.asmClobbers,
|
||||
params,
|
||||
srcSub.returntypes,
|
||||
srcSub.asmReturnvaluesRegisters,
|
||||
srcSub.inline,
|
||||
srcSub.position)
|
||||
|
@ -303,7 +303,7 @@ internal class AstChecker(private val program: Program,
|
||||
if (subroutine.returntypes.isNotEmpty()) {
|
||||
// for asm subroutines with an address, no statement check is possible.
|
||||
if (subroutine.asmAddress == null && !subroutine.inline)
|
||||
err("non-inline subroutine has result value(s) and thus must have at least one 'return' or 'goto' in it (or rts/jmp/bra in case of %asm)")
|
||||
err("non-inline subroutine has result value(s) and thus must have at least one 'return' or 'goto' in it (or the assembler equivalent in case of %asm)")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -6,10 +6,13 @@ import prog8.ast.expressions.CharLiteral
|
||||
import prog8.ast.expressions.IdentifierReference
|
||||
import prog8.ast.expressions.NumericLiteral
|
||||
import prog8.ast.statements.Directive
|
||||
import prog8.ast.statements.InlineAssembly
|
||||
import prog8.ast.statements.Subroutine
|
||||
import prog8.ast.statements.VarDeclOrigin
|
||||
import prog8.ast.walk.AstWalker
|
||||
import prog8.ast.walk.IAstModification
|
||||
import prog8.code.core.*
|
||||
import prog8.code.target.VMTarget
|
||||
|
||||
|
||||
internal fun Program.checkValid(errors: IErrorReporter, compilerOptions: CompilationOptions) {
|
||||
@ -165,3 +168,17 @@ internal fun IdentifierReference.isSubroutineParameter(program: Program): Boolea
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
internal fun Subroutine.hasRtsInAsm(compTarget: ICompilationTarget): Boolean {
|
||||
val instructions =
|
||||
if(compTarget.name == VMTarget.NAME)
|
||||
listOf(" return", "\treturn", " jump", "\tjump", " jumpi", "\tjumpi")
|
||||
else
|
||||
listOf(" rti", "\trti", " rts", "\trts", " jmp", "\tjmp", " bra", "\tbra")
|
||||
return statements
|
||||
.asSequence()
|
||||
.filterIsInstance<InlineAssembly>()
|
||||
.any {
|
||||
instructions.any { instr->instr in it.assembly }
|
||||
}
|
||||
}
|
@ -135,7 +135,7 @@ internal class BeforeAsmAstChanger(val program: Program,
|
||||
// and if an assembly block doesn't contain a rts/rti, and some other situations.
|
||||
if (!subroutine.isAsmSubroutine) {
|
||||
if(subroutine.statements.isEmpty() ||
|
||||
(subroutine.amountOfRtsInAsm() == 0
|
||||
(!subroutine.hasRtsInAsm(options.compTarget)
|
||||
&& subroutine.statements.lastOrNull { it !is VarDecl } !is Return
|
||||
&& subroutine.statements.last() !is Subroutine
|
||||
&& subroutine.statements.last() !is Return)) {
|
||||
@ -161,10 +161,12 @@ internal class BeforeAsmAstChanger(val program: Program,
|
||||
}
|
||||
|
||||
if (!subroutine.inline || !options.optimize) {
|
||||
if (subroutine.isAsmSubroutine && subroutine.asmAddress==null && subroutine.amountOfRtsInAsm() == 0) {
|
||||
if (subroutine.isAsmSubroutine && subroutine.asmAddress==null && !subroutine.hasRtsInAsm(options.compTarget)) {
|
||||
// make sure the NOT INLINED asm subroutine actually has a rts at the end
|
||||
// (non-asm routines get a Return statement as needed, above)
|
||||
mods += IAstModification.InsertLast(InlineAssembly(" rts\n", Position.DUMMY), subroutine)
|
||||
val instruction = if(options.compTarget.name==VMTarget.NAME) " return\n" else " rts\n"
|
||||
mods += IAstModification.InsertLast(InlineAssembly(instruction, Position.DUMMY), subroutine)
|
||||
println("adding returnstmt3 ${subroutine.hasRtsInAsm(options.compTarget)}") // TODO
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -38,11 +38,12 @@ internal class SymbolTableMaker: IAstVisitor {
|
||||
override fun visit(subroutine: Subroutine) {
|
||||
val parameters = subroutine.parameters.map { StSubroutineParameter(it.name, it.type) }
|
||||
if(subroutine.asmAddress!=null) {
|
||||
val node = StRomSub(subroutine.name, subroutine.asmAddress!!, parameters, subroutine.position)
|
||||
val node = StRomSub(subroutine.name, subroutine.asmAddress!!, parameters, subroutine.returntypes, subroutine.position)
|
||||
scopestack.peek().add(node)
|
||||
// st.origAstLinks[subroutine] = node
|
||||
} else {
|
||||
val node = StSub(subroutine.name, parameters, subroutine.position)
|
||||
val returnType = if(subroutine.returntypes.isEmpty()) null else subroutine.returntypes.first()
|
||||
val node = StSub(subroutine.name, parameters, returnType, subroutine.position)
|
||||
scopestack.peek().add(node)
|
||||
scopestack.push(node)
|
||||
super.visit(subroutine)
|
||||
|
@ -10,6 +10,7 @@ import prog8.ast.expressions.IdentifierReference
|
||||
import prog8.ast.statements.*
|
||||
import prog8.code.core.DataType
|
||||
import prog8.code.target.C64Target
|
||||
import prog8.compiler.astprocessing.hasRtsInAsm
|
||||
import prog8tests.helpers.ErrorReporterForTests
|
||||
import prog8tests.helpers.compileText
|
||||
|
||||
@ -123,7 +124,7 @@ class TestSubroutines: FunSpec({
|
||||
asmfunc.isAsmSubroutine shouldBe true
|
||||
asmfunc.statements.single() shouldBe instanceOf<InlineAssembly>()
|
||||
(asmfunc.statements.single() as InlineAssembly).assembly.trim() shouldBe "rts"
|
||||
asmfunc.amountOfRtsInAsm() shouldBe 1
|
||||
asmfunc.hasRtsInAsm(C64Target()) shouldBe true
|
||||
func.isAsmSubroutine shouldBe false
|
||||
withClue("str param should have been changed to uword") {
|
||||
asmfunc.parameters.single().type shouldBe DataType.UWORD
|
||||
|
@ -743,13 +743,6 @@ class Subroutine(override val name: String,
|
||||
return KeepAresult(false, saveAonReturn)
|
||||
}
|
||||
|
||||
fun amountOfRtsInAsm(): Int = statements
|
||||
.asSequence()
|
||||
.filter { it is InlineAssembly }
|
||||
.map { (it as InlineAssembly).assembly }
|
||||
.count { " rti" in it || "\trti" in it || " rts" in it || "\trts" in it || " jmp" in it || "\tjmp" in it || " bra" in it || "\tbra" in it }
|
||||
|
||||
|
||||
// code to provide the ability to reference asmsub parameters via qualified name:
|
||||
private val asmParamsDecls = mutableMapOf<String, VarDecl>()
|
||||
|
||||
|
@ -20,10 +20,10 @@ Compiler:
|
||||
- vm Instructions needs to know what the read-registers/memory are, and what the write-register/memory is. This info is needed for more advanced optimizations and later code generation steps.
|
||||
- vm: implement remaining sin/cos functions in math.p8
|
||||
- vm: find a solution for the cx16.r0..r15 that "overlap" (r0, r0L, r0H etc) but in the vm each get their own separate variable location now
|
||||
- vm: somehow deal with asmsubs otherwise the vm IR can't fully encode all of prog8
|
||||
- vm: encode romsub & romsub call in VM IR (but just crash in virtualmachine itself.) ExpressionGen.kt
|
||||
- vm: how to remove all unused subroutines? (the 6502 assembly codegen relies on 64tass solve this for us)
|
||||
- vm: rather than being able to jump to any 'address' (IPTR), use 'blocks' that have entry and exit points -> even better dead code elimination possible too
|
||||
- vm: add ore optimizations in VmPeepholeOptimizer
|
||||
- vm: add more optimizations in VmPeepholeOptimizer
|
||||
- see if we can let for loops skip the loop if end<start, like other programming languages. Without adding a lot of code size/duplicating the loop condition.
|
||||
this is documented behavior to now loop around but it's too easy to forget about!
|
||||
Lot of work because of so many special cases in ForLoopsAsmgen.....
|
||||
@ -34,7 +34,6 @@ Compiler:
|
||||
- generate WASM from the new ast (or from vm code?) to run prog8 on a browser canvas?
|
||||
- createAssemblyAndAssemble(): make it possible to actually get rid of the VarDecl nodes by fixing the rest of the code mentioned there.
|
||||
but probably better to rewrite the 6502 codegen on top of the new Ast.
|
||||
- simplifyConditionalExpression() sometimes introduces needless assignment to r9 tempvar, can we detect & prevent this?
|
||||
- make it possible to use cpu opcodes such as 'nop' as variable names by prefixing all asm vars with something such as ``p8v_``? Or not worth it (most 3 letter opcodes as variables are nonsensical anyway)
|
||||
then we can get rid of the instruction lists in the machinedefinitions as well?
|
||||
- [problematic due to using 64tass:] add a compiler option to not remove unused subroutines. this allows for building library programs. But this won't work with 64tass's .proc ...
|
||||
|
@ -2,40 +2,27 @@
|
||||
%zeropage basicsafe
|
||||
|
||||
main {
|
||||
romsub $ea31 = foobar(uword derp @AY) -> ubyte @A
|
||||
romsub $ea33 = foobar2() -> ubyte @A
|
||||
romsub $ea33 = foobar3()
|
||||
|
||||
sub subroutine(ubyte subroutineArg) -> ubyte {
|
||||
return subroutineArg+22
|
||||
}
|
||||
|
||||
asmsub asmsubje(uword arg @AY) -> ubyte @A {
|
||||
%asm {{
|
||||
rts
|
||||
return
|
||||
}}
|
||||
}
|
||||
|
||||
sub start() {
|
||||
bool aa = true
|
||||
ubyte[] bb = [%0000, %1111]
|
||||
uword w1 = %1000000000000001
|
||||
uword w2 = %0000000000000010
|
||||
ubyte @shared qq0 = subroutine(11)
|
||||
ubyte @shared qq1 = foobar(12345)
|
||||
ubyte @shared qq2 = foobar2()
|
||||
foobar3()
|
||||
|
||||
if aa and w1 | w2
|
||||
txt.print("ok")
|
||||
else
|
||||
txt.print("fail")
|
||||
txt.spc()
|
||||
|
||||
if aa and w1 & w2
|
||||
txt.print("fail")
|
||||
else
|
||||
txt.print("ok")
|
||||
txt.spc()
|
||||
|
||||
if aa and bb[0] | %0100
|
||||
txt.print("ok")
|
||||
else
|
||||
txt.print("fail")
|
||||
txt.spc()
|
||||
|
||||
if aa and bb[0] & %0100
|
||||
txt.print("fail")
|
||||
else
|
||||
txt.print("ok")
|
||||
txt.spc()
|
||||
|
||||
aa = aa and bb[0] | %0100
|
||||
txt.print_ub(aa)
|
||||
txt.spc()
|
||||
aa = aa and bb[0] & %0100
|
||||
txt.print_ub(aa)
|
||||
txt.print_ub(qq0)
|
||||
}
|
||||
}
|
||||
|
@ -33,9 +33,10 @@ main {
|
||||
for yy in 0 to 239 {
|
||||
for xx in 0 to 319 {
|
||||
ubyte pixel = sys.gfx_getpixel(xx, yy)
|
||||
if pixel>4
|
||||
if pixel>4 {
|
||||
pixel-=4
|
||||
sys.gfx_plot(xx, yy, pixel)
|
||||
sys.gfx_plot(xx, yy, pixel)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -86,7 +86,11 @@ class Assembler {
|
||||
println("warning: ignoring incbin command: $rest")
|
||||
continue
|
||||
}
|
||||
val opcode = Opcode.valueOf(instr.uppercase())
|
||||
val opcode = try {
|
||||
Opcode.valueOf(instr.uppercase())
|
||||
} catch (ax: IllegalArgumentException) {
|
||||
throw IllegalArgumentException("invalid vmasm instruction: $instr", ax)
|
||||
}
|
||||
var type: VmDataType? = convertType(typestr)
|
||||
val formats = instructionFormats.getValue(opcode)
|
||||
val format: InstructionFormat
|
||||
|
Loading…
Reference in New Issue
Block a user