2018-01-08 00:51:36 +00:00
|
|
|
"""
|
2018-01-08 02:31:23 +00:00
|
|
|
Programming Language for 6502/6510 microprocessors, codename 'Sick'
|
2018-01-08 00:51:36 +00:00
|
|
|
This is the compiler of the IL65 code, that prepares the parse tree for code generation.
|
|
|
|
|
2018-01-08 02:31:23 +00:00
|
|
|
Written by Irmen de Jong (irmen@razorvine.net) - license: GNU GPL 3.0
|
2018-01-08 00:51:36 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
import re
|
2018-01-07 18:14:21 +00:00
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import linecache
|
2018-01-13 00:19:45 +00:00
|
|
|
from typing import Optional, Tuple, Set, Dict, List, Any, no_type_check
|
2018-01-09 01:40:32 +00:00
|
|
|
import attr
|
2018-01-09 23:44:11 +00:00
|
|
|
from .plyparse import parse_file, ParseError, Module, Directive, Block, Subroutine, Scope, VarDef, LiteralValue, \
|
|
|
|
SubCall, Goto, Return, Assignment, InlineAssembly, Register, Expression, ProgramFormat, ZpOptions,\
|
2018-01-17 00:26:58 +00:00
|
|
|
SymbolName, Dereference, AddressOf, IncrDecr, AstNode, datatype_of, coerce_constant_value, \
|
2018-01-18 22:33:02 +00:00
|
|
|
check_symbol_definition, UndefinedSymbolError, process_expression
|
2018-01-09 01:40:32 +00:00
|
|
|
from .plylex import SourceRef, print_bold
|
2018-01-13 22:49:57 +00:00
|
|
|
from .datatypes import DataType, VarType
|
2018-01-13 00:19:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
class CompileError(Exception):
|
|
|
|
pass
|
2018-01-07 18:14:21 +00:00
|
|
|
|
|
|
|
|
|
|
|
class PlyParser:
|
2018-01-17 00:26:58 +00:00
|
|
|
def __init__(self, imported_module: bool=False) -> None:
|
2018-01-07 18:14:21 +00:00
|
|
|
self.parse_errors = 0
|
2018-01-17 00:26:58 +00:00
|
|
|
self.imported_module = imported_module
|
2018-01-07 18:14:21 +00:00
|
|
|
|
|
|
|
def parse_file(self, filename: str) -> Module:
|
|
|
|
print("parsing:", filename)
|
2018-01-09 01:40:32 +00:00
|
|
|
module = None
|
2018-01-07 18:14:21 +00:00
|
|
|
try:
|
2018-01-09 01:40:32 +00:00
|
|
|
module = parse_file(filename, self.lexer_error)
|
2018-01-07 18:14:21 +00:00
|
|
|
self.check_directives(module)
|
|
|
|
self.process_imports(module)
|
2018-01-17 00:26:58 +00:00
|
|
|
self.check_all_symbolnames(module)
|
2018-01-08 00:51:36 +00:00
|
|
|
self.create_multiassigns(module)
|
2018-01-12 22:25:00 +00:00
|
|
|
self.check_and_merge_zeropages(module)
|
2018-01-18 22:33:02 +00:00
|
|
|
self.process_all_expressions(module)
|
2018-01-17 00:26:58 +00:00
|
|
|
return module # XXX
|
|
|
|
# if not self.parsing_import:
|
|
|
|
# # these shall only be done on the main module after all imports have been done:
|
|
|
|
# self.apply_directive_options(module)
|
|
|
|
# self.determine_subroutine_usage(module)
|
|
|
|
# self.semantic_check(module)
|
|
|
|
# self.allocate_zeropage_vars(module)
|
2018-01-07 18:14:21 +00:00
|
|
|
except ParseError as x:
|
|
|
|
self.handle_parse_error(x)
|
2018-01-07 22:45:42 +00:00
|
|
|
if self.parse_errors:
|
2018-01-08 00:51:36 +00:00
|
|
|
print_bold("\nNo output; there were {:d} errors.\n".format(self.parse_errors))
|
2018-01-07 22:45:42 +00:00
|
|
|
raise SystemExit(1)
|
2018-01-07 18:14:21 +00:00
|
|
|
return module
|
|
|
|
|
2018-01-07 22:45:42 +00:00
|
|
|
def lexer_error(self, sourceref: SourceRef, fmtstring: str, *args: str) -> None:
|
|
|
|
self.parse_errors += 1
|
2018-01-08 00:51:36 +00:00
|
|
|
print_bold("ERROR: {}: {}".format(sourceref, fmtstring.format(*args)))
|
2018-01-07 22:45:42 +00:00
|
|
|
|
2018-01-16 00:47:55 +00:00
|
|
|
def _check_last_statement_is_return(self, last_stmt: AstNode) -> None:
|
|
|
|
if isinstance(last_stmt, Subroutine):
|
|
|
|
return
|
|
|
|
if isinstance(last_stmt, Directive) and last_stmt.name == "noreturn":
|
|
|
|
return
|
|
|
|
if isinstance(last_stmt, InlineAssembly):
|
|
|
|
for line in reversed(last_stmt.assembly.splitlines()):
|
|
|
|
line = line.strip()
|
|
|
|
if line.startswith(";"):
|
|
|
|
continue
|
|
|
|
if "jmp " in line or "jmp\t" in line or "rts" in line or "rti" in line:
|
|
|
|
return
|
|
|
|
raise ParseError("last statement in a block/subroutine must be a return or goto, "
|
|
|
|
"(or %noreturn directive to silence this error)", last_stmt.sourceref)
|
|
|
|
|
2018-01-17 00:26:58 +00:00
|
|
|
# def semantic_check(self, module: Module) -> None:
|
|
|
|
# # perform semantic analysis / checks on the syntactic parse tree we have so far
|
|
|
|
# # (note: symbol names have already been checked to exist when we start this)
|
|
|
|
# for node, parent in module.all_nodes():
|
|
|
|
# previous_stmt = None
|
|
|
|
# if isinstance(node, SubCall):
|
|
|
|
# if isinstance(node.target, SymbolName):
|
|
|
|
# subdef = block.scope.lookup(stmt.target.target.name)
|
|
|
|
# self.check_subroutine_arguments(stmt, subdef)
|
|
|
|
# if isinstance(stmt, Subroutine):
|
|
|
|
# # the previous statement (if any) must be a Goto or Return
|
|
|
|
# if previous_stmt and not isinstance(previous_stmt, (Goto, Return, VarDef, Subroutine)):
|
|
|
|
# raise ParseError("statement preceding subroutine must be a goto or return or another subroutine", stmt.sourceref)
|
|
|
|
# if isinstance(previous_stmt, Subroutine):
|
|
|
|
# # the statement after a subroutine can not be some random executable instruction because it could not be reached
|
|
|
|
# if not isinstance(stmt, (Subroutine, Label, Directive, InlineAssembly, VarDef)):
|
|
|
|
# raise ParseError("statement following a subroutine can't be runnable code, "
|
|
|
|
# "at least use a label first", stmt.sourceref)
|
|
|
|
# previous_stmt = stmt
|
|
|
|
# if isinstance(stmt, IncrDecr):
|
|
|
|
# if isinstance(stmt.target, SymbolName):
|
|
|
|
# symdef = block.scope.lookup(stmt.target.name)
|
|
|
|
# if isinstance(symdef, VarDef) and symdef.vartype == VarType.CONST:
|
|
|
|
# raise ParseError("cannot modify a constant", stmt.sourceref)
|
|
|
|
#
|
|
|
|
# if parent and block.name != "ZP" and not isinstance(stmt, (Return, Goto)):
|
|
|
|
# self._check_last_statement_is_return(stmt)
|
2018-01-16 00:47:55 +00:00
|
|
|
|
|
|
|
def check_subroutine_arguments(self, call: SubCall, subdef: Subroutine) -> None:
|
|
|
|
# @todo must be moved to expression processing, or, restructure whole AST tree walking to make it easier to walk over everything
|
|
|
|
if len(call.arguments) != len(subdef.param_spec):
|
|
|
|
raise ParseError("invalid number of arguments ({:d}, required: {:d})"
|
|
|
|
.format(len(call.arguments), len(subdef.param_spec)), call.sourceref)
|
|
|
|
for arg, param in zip(call.arguments, subdef.param_spec):
|
|
|
|
if arg.name and arg.name != param[0]:
|
|
|
|
raise ParseError("parameter name mismatch", arg.sourceref)
|
2018-01-14 17:02:39 +00:00
|
|
|
|
2018-01-09 01:40:32 +00:00
|
|
|
def check_and_merge_zeropages(self, module: Module) -> None:
|
|
|
|
# merge all ZP blocks into one
|
2018-01-17 00:26:58 +00:00
|
|
|
# XXX done: converted to new nodes
|
2018-01-09 01:40:32 +00:00
|
|
|
zeropage = None
|
2018-01-17 00:26:58 +00:00
|
|
|
for block in module.all_nodes([Block]):
|
2018-01-09 01:40:32 +00:00
|
|
|
if block.name == "ZP":
|
|
|
|
if zeropage:
|
|
|
|
# merge other ZP block into first ZP block
|
2018-01-10 23:29:46 +00:00
|
|
|
for node in block.nodes:
|
2018-01-09 01:40:32 +00:00
|
|
|
if isinstance(node, Directive):
|
|
|
|
zeropage.scope.add_node(node, 0)
|
|
|
|
elif isinstance(node, VarDef):
|
|
|
|
zeropage.scope.add_node(node)
|
|
|
|
else:
|
|
|
|
raise ParseError("only variables and directives allowed in zeropage block", node.sourceref)
|
|
|
|
else:
|
|
|
|
zeropage = block
|
2018-01-17 00:26:58 +00:00
|
|
|
block.parent.remove_node(block)
|
2018-01-09 01:40:32 +00:00
|
|
|
if zeropage:
|
|
|
|
# add the zero page again, as the very first block
|
|
|
|
module.scope.add_node(zeropage, 0)
|
|
|
|
|
2018-01-13 00:19:45 +00:00
|
|
|
def allocate_zeropage_vars(self, module: Module) -> None:
|
|
|
|
# allocate zeropage variables to the available free zp addresses
|
|
|
|
if not module.scope.nodes:
|
|
|
|
return
|
|
|
|
zpnode = module.scope.nodes[0]
|
2018-01-13 01:13:32 +00:00
|
|
|
if zpnode.name != "ZP":
|
|
|
|
return
|
2018-01-13 00:19:45 +00:00
|
|
|
zeropage = Zeropage(module.zp_options)
|
|
|
|
for vardef in zpnode.scope.filter_nodes(VarDef):
|
2018-01-13 22:49:57 +00:00
|
|
|
if vardef.datatype.isstring():
|
|
|
|
raise ParseError("cannot put strings in the zeropage", vardef.sourceref)
|
2018-01-13 00:19:45 +00:00
|
|
|
try:
|
2018-01-13 21:42:07 +00:00
|
|
|
if vardef.vartype == VarType.VAR:
|
|
|
|
vardef.zp_address = zeropage.allocate(vardef)
|
2018-01-13 00:19:45 +00:00
|
|
|
except CompileError as x:
|
|
|
|
raise ParseError(str(x), vardef.sourceref)
|
|
|
|
|
2018-01-17 00:26:58 +00:00
|
|
|
def check_all_symbolnames(self, module: Module) -> None:
|
|
|
|
for node in module.all_nodes([SymbolName]):
|
|
|
|
check_symbol_definition(node.name, node.my_scope(), node.sourceref)
|
|
|
|
|
2018-01-18 22:33:02 +00:00
|
|
|
def process_all_expressions(self, module: Module) -> None:
|
2018-01-17 00:26:58 +00:00
|
|
|
# process/simplify all expressions (constant folding etc)
|
2018-01-12 22:25:00 +00:00
|
|
|
encountered_blocks = set()
|
2018-01-17 00:26:58 +00:00
|
|
|
for node in module.all_nodes():
|
|
|
|
if isinstance(node, Block):
|
|
|
|
parentname = (node.parent.name + ".") if node.parent else ""
|
|
|
|
blockname = parentname + node.name
|
|
|
|
if blockname in encountered_blocks:
|
|
|
|
raise ValueError("block names not unique:", blockname)
|
|
|
|
encountered_blocks.add(blockname)
|
|
|
|
elif isinstance(node, Expression):
|
2018-01-18 22:33:02 +00:00
|
|
|
try:
|
|
|
|
process_expression(node, node.my_scope(), node.sourceref)
|
|
|
|
except ParseError:
|
|
|
|
raise
|
|
|
|
except Exception as x:
|
|
|
|
self.handle_internal_error(x, "process_expressions of node {}".format(node))
|
2018-01-17 00:26:58 +00:00
|
|
|
elif isinstance(node, IncrDecr) and node.howmuch not in (0, 1):
|
|
|
|
_, node.howmuch = coerce_constant_value(datatype_of(node.target, node.my_scope()), node.howmuch, node.sourceref)
|
|
|
|
elif isinstance(node, Assignment):
|
|
|
|
lvalue_types = set(datatype_of(lv, node.my_scope()) for lv in node.left.nodes)
|
|
|
|
if len(lvalue_types) == 1:
|
|
|
|
_, node.right = coerce_constant_value(lvalue_types.pop(), node.right, node.sourceref)
|
|
|
|
else:
|
|
|
|
for lv_dt in lvalue_types:
|
|
|
|
coerce_constant_value(lv_dt, node.right, node.sourceref)
|
2018-01-08 02:31:23 +00:00
|
|
|
|
2018-01-08 00:51:36 +00:00
|
|
|
def create_multiassigns(self, module: Module) -> None:
|
|
|
|
# create multi-assign statements from nested assignments (A=B=C=5),
|
|
|
|
# and optimize TargetRegisters down to single Register if it's just one register.
|
2018-01-17 00:26:58 +00:00
|
|
|
# XXX done: converted to new nodes
|
2018-01-08 00:51:36 +00:00
|
|
|
def reduce_right(assign: Assignment) -> Assignment:
|
|
|
|
if isinstance(assign.right, Assignment):
|
|
|
|
right = reduce_right(assign.right)
|
2018-01-08 02:31:23 +00:00
|
|
|
assign.left.extend(right.left)
|
2018-01-08 00:51:36 +00:00
|
|
|
assign.right = right.right
|
|
|
|
return assign
|
|
|
|
|
2018-01-17 00:26:58 +00:00
|
|
|
for node in module.all_nodes([Assignment]):
|
|
|
|
if isinstance(node.right, Assignment):
|
|
|
|
multi = reduce_right(node)
|
|
|
|
assert multi is node and len(multi.left) > 1 and not isinstance(multi.right, Assignment)
|
2018-01-08 00:51:36 +00:00
|
|
|
|
2018-01-09 01:40:32 +00:00
|
|
|
def apply_directive_options(self, module: Module) -> None:
|
|
|
|
def set_save_registers(scope: Scope, save_dir: Directive) -> None:
|
|
|
|
if not scope:
|
|
|
|
return
|
|
|
|
if len(save_dir.args) > 1:
|
2018-01-09 23:44:11 +00:00
|
|
|
raise ParseError("expected zero or one directive argument", save_dir.sourceref)
|
2018-01-09 01:40:32 +00:00
|
|
|
if save_dir.args:
|
2018-01-09 23:44:11 +00:00
|
|
|
if save_dir.args[0] in ("yes", "true", True):
|
2018-01-09 01:40:32 +00:00
|
|
|
scope.save_registers = True
|
2018-01-09 23:44:11 +00:00
|
|
|
elif save_dir.args[0] in ("no", "false", False):
|
2018-01-09 01:40:32 +00:00
|
|
|
scope.save_registers = False
|
|
|
|
else:
|
|
|
|
raise ParseError("invalid directive args", save_dir.sourceref)
|
|
|
|
else:
|
|
|
|
scope.save_registers = True
|
|
|
|
|
|
|
|
for block, parent in module.all_scopes():
|
|
|
|
if isinstance(block, Module):
|
|
|
|
# process the module's directives
|
|
|
|
for directive in block.scope.filter_nodes(Directive):
|
|
|
|
if directive.name == "output":
|
|
|
|
if len(directive.args) != 1 or not isinstance(directive.args[0], str):
|
2018-01-09 23:44:11 +00:00
|
|
|
raise ParseError("expected one str directive argument", directive.sourceref)
|
2018-01-09 01:40:32 +00:00
|
|
|
if directive.args[0] == "raw":
|
|
|
|
block.format = ProgramFormat.RAW
|
|
|
|
block.address = 0xc000
|
|
|
|
elif directive.args[0] == "prg":
|
|
|
|
block.format = ProgramFormat.PRG
|
2018-01-09 22:28:39 +00:00
|
|
|
block.address = 0xc000
|
2018-01-09 01:40:32 +00:00
|
|
|
elif directive.args[0] == "basic":
|
|
|
|
block.format = ProgramFormat.BASIC
|
|
|
|
block.address = 0x0801
|
|
|
|
else:
|
|
|
|
raise ParseError("invalid directive args", directive.sourceref)
|
|
|
|
elif directive.name == "address":
|
2018-01-15 01:22:14 +00:00
|
|
|
if len(directive.args) != 1 or type(directive.args[0]) is not int:
|
2018-01-09 23:44:11 +00:00
|
|
|
raise ParseError("expected one integer directive argument", directive.sourceref)
|
2018-01-09 01:40:32 +00:00
|
|
|
if block.format == ProgramFormat.BASIC:
|
|
|
|
raise ParseError("basic cannot have a custom load address", directive.sourceref)
|
|
|
|
block.address = directive.args[0]
|
|
|
|
attr.validate(block)
|
|
|
|
elif directive.name in "import":
|
|
|
|
pass # is processed earlier
|
|
|
|
elif directive.name == "zp":
|
|
|
|
if len(directive.args) not in (1, 2) or set(directive.args) - {"clobber", "restore"}:
|
|
|
|
raise ParseError("invalid directive args", directive.sourceref)
|
|
|
|
if "clobber" in directive.args and "restore" in directive.args:
|
|
|
|
module.zp_options = ZpOptions.CLOBBER_RESTORE
|
|
|
|
elif "clobber" in directive.args:
|
|
|
|
module.zp_options = ZpOptions.CLOBBER
|
|
|
|
elif "restore" in directive.args:
|
|
|
|
raise ParseError("invalid directive args", directive.sourceref)
|
|
|
|
elif directive.name == "saveregisters":
|
|
|
|
set_save_registers(block.scope, directive)
|
|
|
|
else:
|
|
|
|
raise NotImplementedError(directive.name)
|
|
|
|
elif isinstance(block, Block):
|
|
|
|
# process the block's directives
|
|
|
|
for directive in block.scope.filter_nodes(Directive):
|
|
|
|
if directive.name == "saveregisters":
|
|
|
|
set_save_registers(block.scope, directive)
|
2018-01-14 23:20:36 +00:00
|
|
|
elif directive.name in ("breakpoint", "asmbinary", "asminclude", "noreturn"):
|
2018-01-09 01:40:32 +00:00
|
|
|
continue
|
|
|
|
else:
|
|
|
|
raise NotImplementedError(directive.name)
|
|
|
|
elif isinstance(block, Subroutine):
|
|
|
|
if block.scope:
|
|
|
|
# process the sub's directives
|
|
|
|
for directive in block.scope.filter_nodes(Directive):
|
|
|
|
if directive.name == "saveregisters":
|
|
|
|
set_save_registers(block.scope, directive)
|
2018-01-14 23:20:36 +00:00
|
|
|
elif directive.name in ("breakpoint", "asmbinary", "asminclude", "noreturn"):
|
2018-01-09 01:40:32 +00:00
|
|
|
continue
|
|
|
|
else:
|
|
|
|
raise NotImplementedError(directive.name)
|
|
|
|
|
2018-01-08 02:31:23 +00:00
|
|
|
@no_type_check
|
2018-01-08 00:51:36 +00:00
|
|
|
def determine_subroutine_usage(self, module: Module) -> None:
|
|
|
|
module.subroutine_usage.clear()
|
2018-01-08 02:31:23 +00:00
|
|
|
for block, parent in module.all_scopes():
|
2018-01-09 23:44:11 +00:00
|
|
|
for node in block.nodes:
|
|
|
|
if isinstance(node, InlineAssembly):
|
|
|
|
self._get_subroutine_usages_from_asm(module.subroutine_usage, node, block.scope)
|
|
|
|
elif isinstance(node, SubCall):
|
|
|
|
self._get_subroutine_usages_from_subcall(module.subroutine_usage, node, block.scope)
|
|
|
|
elif isinstance(node, Goto):
|
|
|
|
self._get_subroutine_usages_from_goto(module.subroutine_usage, node, block.scope)
|
|
|
|
elif isinstance(node, Return):
|
|
|
|
self._get_subroutine_usages_from_return(module.subroutine_usage, node, block.scope)
|
|
|
|
elif isinstance(node, Assignment):
|
|
|
|
self._get_subroutine_usages_from_assignment(module.subroutine_usage, node, block.scope)
|
2018-01-15 20:12:17 +00:00
|
|
|
print("----------SUBROUTINES IN USE-------------") # XXX
|
|
|
|
import pprint
|
2018-01-17 00:26:58 +00:00
|
|
|
pprint.pprint(module.subroutine_usage) # XXX
|
2018-01-15 20:12:17 +00:00
|
|
|
print("----------/SUBROUTINES IN USE-------------") # XXX
|
2018-01-08 00:51:36 +00:00
|
|
|
|
2018-01-09 23:44:11 +00:00
|
|
|
def _get_subroutine_usages_from_subcall(self, usages: Dict[Tuple[str, str], Set[str]],
|
|
|
|
subcall: SubCall, parent_scope: Scope) -> None:
|
2018-01-15 20:12:17 +00:00
|
|
|
target = subcall.target.target
|
|
|
|
if isinstance(target, SymbolName):
|
|
|
|
usages[(parent_scope.name, target.name)].add(str(subcall.sourceref))
|
2018-01-08 00:51:36 +00:00
|
|
|
for arg in subcall.arguments:
|
2018-01-09 23:44:11 +00:00
|
|
|
self._get_subroutine_usages_from_expression(usages, arg.value, parent_scope)
|
2018-01-08 00:51:36 +00:00
|
|
|
|
2018-01-09 23:44:11 +00:00
|
|
|
def _get_subroutine_usages_from_expression(self, usages: Dict[Tuple[str, str], Set[str]],
|
|
|
|
expr: Any, parent_scope: Scope) -> None:
|
2018-01-08 00:51:36 +00:00
|
|
|
if expr is None or isinstance(expr, (int, str, float, bool, Register)):
|
|
|
|
return
|
|
|
|
elif isinstance(expr, SubCall):
|
2018-01-09 23:44:11 +00:00
|
|
|
self._get_subroutine_usages_from_subcall(usages, expr, parent_scope)
|
2018-01-08 00:51:36 +00:00
|
|
|
elif isinstance(expr, Expression):
|
2018-01-09 23:44:11 +00:00
|
|
|
self._get_subroutine_usages_from_expression(usages, expr.left, parent_scope)
|
|
|
|
self._get_subroutine_usages_from_expression(usages, expr.right, parent_scope)
|
|
|
|
elif isinstance(expr, LiteralValue):
|
|
|
|
return
|
2018-01-11 23:55:47 +00:00
|
|
|
elif isinstance(expr, Dereference):
|
2018-01-17 00:26:58 +00:00
|
|
|
return self._get_subroutine_usages_from_expression(usages, expr.operand, parent_scope)
|
2018-01-11 23:55:47 +00:00
|
|
|
elif isinstance(expr, AddressOf):
|
|
|
|
return self._get_subroutine_usages_from_expression(usages, expr.name, parent_scope)
|
2018-01-09 23:44:11 +00:00
|
|
|
elif isinstance(expr, SymbolName):
|
|
|
|
try:
|
2018-01-16 00:47:55 +00:00
|
|
|
symbol = parent_scope.lookup(expr.name)
|
2018-01-09 23:44:11 +00:00
|
|
|
if isinstance(symbol, Subroutine):
|
|
|
|
usages[(parent_scope.name, expr.name)].add(str(expr.sourceref))
|
2018-01-16 00:47:55 +00:00
|
|
|
except UndefinedSymbolError:
|
2018-01-09 23:44:11 +00:00
|
|
|
pass
|
2018-01-08 00:51:36 +00:00
|
|
|
else:
|
2018-01-09 23:44:11 +00:00
|
|
|
raise TypeError("unknown expr type to scan for sub usages", expr, expr.sourceref)
|
2018-01-08 00:51:36 +00:00
|
|
|
|
2018-01-09 23:44:11 +00:00
|
|
|
def _get_subroutine_usages_from_goto(self, usages: Dict[Tuple[str, str], Set[str]],
|
|
|
|
goto: Goto, parent_scope: Scope) -> None:
|
2018-01-15 20:12:17 +00:00
|
|
|
target = goto.target.target
|
|
|
|
if isinstance(target, SymbolName):
|
|
|
|
usages[(parent_scope.name, target.name)].add(str(goto.sourceref))
|
2018-01-09 23:44:11 +00:00
|
|
|
self._get_subroutine_usages_from_expression(usages, goto.condition, parent_scope)
|
2018-01-08 00:51:36 +00:00
|
|
|
|
2018-01-09 23:44:11 +00:00
|
|
|
def _get_subroutine_usages_from_return(self, usages: Dict[Tuple[str, str], Set[str]],
|
|
|
|
returnnode: Return, parent_scope: Scope) -> None:
|
2018-01-08 00:51:36 +00:00
|
|
|
# node.value_A (expression), value_X (expression), value_Y (expression)
|
2018-01-09 23:44:11 +00:00
|
|
|
self._get_subroutine_usages_from_expression(usages, returnnode.value_A, parent_scope)
|
|
|
|
self._get_subroutine_usages_from_expression(usages, returnnode.value_X, parent_scope)
|
|
|
|
self._get_subroutine_usages_from_expression(usages, returnnode.value_Y, parent_scope)
|
2018-01-08 00:51:36 +00:00
|
|
|
|
2018-01-09 23:44:11 +00:00
|
|
|
def _get_subroutine_usages_from_assignment(self, usages: Dict[Tuple[str, str], Set[str]],
|
|
|
|
assignment: Assignment, parent_scope: Scope) -> None:
|
2018-01-08 00:51:36 +00:00
|
|
|
# node.right (expression, or another Assignment)
|
|
|
|
if isinstance(assignment.right, Assignment):
|
2018-01-09 23:44:11 +00:00
|
|
|
self._get_subroutine_usages_from_assignment(usages, assignment.right, parent_scope)
|
2018-01-08 00:51:36 +00:00
|
|
|
else:
|
2018-01-09 23:44:11 +00:00
|
|
|
self._get_subroutine_usages_from_expression(usages, assignment.right, parent_scope)
|
2018-01-08 00:51:36 +00:00
|
|
|
|
2018-01-09 23:44:11 +00:00
|
|
|
def _get_subroutine_usages_from_asm(self, usages: Dict[Tuple[str, str], Set[str]],
|
2018-01-08 00:51:36 +00:00
|
|
|
asmnode: InlineAssembly, parent_scope: Scope) -> None:
|
|
|
|
# asm can refer to other symbols as well, track subroutine usage
|
|
|
|
for line in asmnode.assembly.splitlines():
|
|
|
|
splits = line.split(maxsplit=1)
|
|
|
|
if len(splits) == 2:
|
|
|
|
for match in re.finditer(r"(?P<symbol>[a-zA-Z_$][a-zA-Z0-9_\.]+)", splits[1]):
|
|
|
|
name = match.group("symbol")
|
2018-01-13 22:49:57 +00:00
|
|
|
if name[0] == '$':
|
2018-01-08 00:51:36 +00:00
|
|
|
continue
|
|
|
|
try:
|
2018-01-16 00:47:55 +00:00
|
|
|
symbol = parent_scope.lookup(name)
|
|
|
|
except UndefinedSymbolError:
|
2018-01-08 00:51:36 +00:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if isinstance(symbol, Subroutine):
|
2018-01-13 22:49:57 +00:00
|
|
|
if symbol.scope:
|
|
|
|
namespace = symbol.scope.parent_scope.name
|
|
|
|
else:
|
|
|
|
namespace, name = name.rsplit(".", maxsplit=2)
|
|
|
|
usages[(namespace, symbol.name)].add(str(asmnode.sourceref))
|
2018-01-07 18:14:21 +00:00
|
|
|
|
|
|
|
def check_directives(self, module: Module) -> None:
|
2018-01-17 00:26:58 +00:00
|
|
|
# XXX done: converted to new nodes
|
|
|
|
imports = set() # type: Set[str]
|
|
|
|
for node in module.all_nodes():
|
|
|
|
if isinstance(node, Directive):
|
|
|
|
assert isinstance(node.parent, Scope)
|
|
|
|
if node.parent.level == "module":
|
|
|
|
if node.name not in {"output", "zp", "address", "import", "saveregisters", "noreturn"}:
|
|
|
|
raise ParseError("invalid directive in module", node.sourceref)
|
|
|
|
if node.name == "import":
|
|
|
|
if imports & set(node.args):
|
|
|
|
raise ParseError("duplicate import", node.sourceref)
|
|
|
|
imports |= set(node.args)
|
|
|
|
else:
|
|
|
|
if node.name not in {"asmbinary", "asminclude", "breakpoint", "saveregisters", "noreturn"}:
|
|
|
|
raise ParseError("invalid directive in " + node.parent.__class__.__name__.lower(), node.sourceref)
|
|
|
|
if node.name == "saveregisters":
|
|
|
|
# it should be the first node in the scope
|
|
|
|
if node.parent.nodes[0] is not node:
|
|
|
|
raise ParseError("saveregisters directive must be first in this scope", node.sourceref)
|
2018-01-07 18:14:21 +00:00
|
|
|
|
|
|
|
def process_imports(self, module: Module) -> None:
|
|
|
|
# (recursively) imports the modules
|
2018-01-17 00:26:58 +00:00
|
|
|
# XXX done: converted to new nodes
|
2018-01-07 18:14:21 +00:00
|
|
|
imported = []
|
2018-01-17 00:26:58 +00:00
|
|
|
for directive in module.all_nodes([Directive]):
|
2018-01-07 18:14:21 +00:00
|
|
|
if directive.name == "import":
|
|
|
|
if len(directive.args) < 1:
|
2018-01-09 01:40:32 +00:00
|
|
|
raise ParseError("missing argument(s) for import directive", directive.sourceref)
|
2018-01-07 18:14:21 +00:00
|
|
|
for arg in directive.args:
|
|
|
|
filename = self.find_import_file(arg, directive.sourceref.file)
|
|
|
|
if not filename:
|
2018-01-09 01:40:32 +00:00
|
|
|
raise ParseError("imported file not found", directive.sourceref)
|
2018-01-07 22:45:42 +00:00
|
|
|
imported_module, import_parse_errors = self.import_file(filename)
|
2018-01-07 18:14:21 +00:00
|
|
|
imported_module.scope.parent_scope = module.scope
|
|
|
|
imported.append(imported_module)
|
2018-01-07 22:45:42 +00:00
|
|
|
self.parse_errors += import_parse_errors
|
2018-01-17 00:26:58 +00:00
|
|
|
if not self.imported_module:
|
2018-01-07 22:45:42 +00:00
|
|
|
# compiler support library is always imported (in main parser)
|
|
|
|
filename = self.find_import_file("il65lib", module.sourceref.file)
|
|
|
|
if filename:
|
|
|
|
imported_module, import_parse_errors = self.import_file(filename)
|
|
|
|
imported_module.scope.parent_scope = module.scope
|
|
|
|
imported.append(imported_module)
|
|
|
|
self.parse_errors += import_parse_errors
|
|
|
|
else:
|
|
|
|
raise FileNotFoundError("missing il65lib")
|
2018-01-17 00:26:58 +00:00
|
|
|
# XXX append the imported module's contents (blocks) at the end of the current module
|
|
|
|
# for block in (node for imported_module in imported
|
|
|
|
# for node in imported_module.scope.nodes
|
|
|
|
# if isinstance(node, Block)):
|
|
|
|
# module.scope.add_node(block)
|
2018-01-07 18:14:21 +00:00
|
|
|
|
2018-01-07 22:45:42 +00:00
|
|
|
def import_file(self, filename: str) -> Tuple[Module, int]:
|
2018-01-17 00:26:58 +00:00
|
|
|
sub_parser = PlyParser(imported_module=True)
|
2018-01-07 22:45:42 +00:00
|
|
|
return sub_parser.parse_file(filename), sub_parser.parse_errors
|
2018-01-07 18:14:21 +00:00
|
|
|
|
|
|
|
def find_import_file(self, modulename: str, sourcefile: str) -> Optional[str]:
|
2018-01-07 22:45:42 +00:00
|
|
|
candidates = [modulename+".ill", modulename]
|
2018-01-07 18:14:21 +00:00
|
|
|
filename_at_source_location = os.path.join(os.path.split(sourcefile)[0], modulename)
|
2018-01-07 22:45:42 +00:00
|
|
|
if filename_at_source_location not in candidates:
|
|
|
|
candidates.append(filename_at_source_location+".ill")
|
|
|
|
candidates.append(filename_at_source_location)
|
|
|
|
filename_at_libs_location = os.path.join(os.path.split(__file__)[0], "lib", modulename)
|
|
|
|
if filename_at_libs_location not in candidates:
|
|
|
|
candidates.append(filename_at_libs_location+".ill")
|
|
|
|
candidates.append(filename_at_libs_location)
|
2018-01-07 18:14:21 +00:00
|
|
|
for filename in candidates:
|
|
|
|
if os.path.isfile(filename):
|
|
|
|
return filename
|
|
|
|
return None
|
|
|
|
|
|
|
|
def handle_parse_error(self, exc: ParseError) -> None:
|
|
|
|
self.parse_errors += 1
|
2018-01-14 23:20:36 +00:00
|
|
|
out = sys.stdout
|
|
|
|
if out.isatty():
|
|
|
|
print("\x1b[1m", file=out)
|
2018-01-17 00:26:58 +00:00
|
|
|
if self.imported_module:
|
2018-01-14 23:20:36 +00:00
|
|
|
print("Error (in imported file):", str(exc), file=out)
|
2018-01-07 18:14:21 +00:00
|
|
|
else:
|
2018-01-14 23:20:36 +00:00
|
|
|
print("Error:", str(exc), file=out)
|
2018-01-09 01:40:32 +00:00
|
|
|
sourcetext = linecache.getline(exc.sourceref.file, exc.sourceref.line).rstrip()
|
|
|
|
if sourcetext:
|
2018-01-14 23:20:36 +00:00
|
|
|
print(" " + sourcetext.expandtabs(8), file=out)
|
2018-01-07 18:14:21 +00:00
|
|
|
if exc.sourceref.column:
|
2018-01-14 23:20:36 +00:00
|
|
|
print(' ' * (1+exc.sourceref.column) + '^', file=out)
|
|
|
|
if out.isatty():
|
|
|
|
print("\x1b[0m", file=out, end="", flush=True)
|
2018-01-16 00:47:55 +00:00
|
|
|
raise exc # XXX temporary to see where the error occurred
|
2018-01-07 18:14:21 +00:00
|
|
|
|
2018-01-11 23:55:47 +00:00
|
|
|
def handle_internal_error(self, exc: Exception, msg: str="") -> None:
|
2018-01-14 23:20:36 +00:00
|
|
|
out = sys.stdout
|
|
|
|
if out.isatty():
|
|
|
|
print("\x1b[1m", file=out)
|
|
|
|
print("\nERROR: internal parser error: ", exc, file=out)
|
2018-01-11 23:55:47 +00:00
|
|
|
if msg:
|
|
|
|
print(" Message:", msg, end="\n\n")
|
2018-01-14 23:20:36 +00:00
|
|
|
if out.isatty():
|
|
|
|
print("\x1b[0m", file=out, end="", flush=True)
|
2018-01-11 23:55:47 +00:00
|
|
|
raise exc
|
|
|
|
|
2018-01-07 18:14:21 +00:00
|
|
|
|
2018-01-13 00:19:45 +00:00
|
|
|
class Zeropage:
|
|
|
|
SCRATCH_B1 = 0x02
|
|
|
|
SCRATCH_B2 = 0x03
|
|
|
|
SCRATCH_W1 = 0xfb # $fb/$fc
|
|
|
|
SCRATCH_W2 = 0xfd # $fd/$fe
|
|
|
|
|
|
|
|
def __init__(self, options: ZpOptions) -> None:
|
|
|
|
self.free = [] # type: List[int]
|
|
|
|
self.allocations = {} # type: Dict[int, Tuple[str, DataType]]
|
|
|
|
if options in (ZpOptions.CLOBBER_RESTORE, ZpOptions.CLOBBER):
|
|
|
|
# clobber the zp, more free storage, yay!
|
|
|
|
self.free = list(range(0x04, 0xfb)) + [0xff]
|
|
|
|
for updated_by_irq in [0xa0, 0xa1, 0xa2, 0x91, 0xc0, 0xc5, 0xcb, 0xf5, 0xf6]:
|
|
|
|
self.free.remove(updated_by_irq)
|
|
|
|
else:
|
|
|
|
# these are valid for the C-64 (when no RS232 I/O is performed):
|
|
|
|
# ($02, $03, $fb-$fc, $fd-$fe are reserved as scratch addresses for various routines)
|
|
|
|
self.free = [0x04, 0x05, 0x06, 0x2a, 0x52, 0xf7, 0xf8, 0xf9, 0xfa]
|
|
|
|
assert self.SCRATCH_B1 not in self.free
|
|
|
|
assert self.SCRATCH_B2 not in self.free
|
|
|
|
assert self.SCRATCH_W1 not in self.free
|
|
|
|
assert self.SCRATCH_W2 not in self.free
|
|
|
|
|
2018-01-13 13:17:18 +00:00
|
|
|
def allocate(self, vardef: VarDef) -> int:
|
|
|
|
assert not vardef.name or vardef.name not in {a[0] for a in self.allocations.values()}, "var name is not unique"
|
2018-01-13 15:09:23 +00:00
|
|
|
assert vardef.vartype == VarType.VAR, "can only allocate var"
|
2018-01-13 00:19:45 +00:00
|
|
|
|
|
|
|
def sequential_free(location: int) -> bool:
|
|
|
|
return all(location + i in self.free for i in range(size))
|
|
|
|
|
|
|
|
def lone_byte(location: int) -> bool:
|
|
|
|
return (location-1) not in self.free and (location+1) not in self.free and location in self.free
|
|
|
|
|
|
|
|
def make_allocation(location: int) -> int:
|
|
|
|
for loc in range(location, location + size):
|
|
|
|
self.free.remove(loc)
|
2018-01-13 13:17:18 +00:00
|
|
|
self.allocations[location] = (vardef.name or "<unnamed>", vardef.datatype)
|
2018-01-13 00:19:45 +00:00
|
|
|
return location
|
|
|
|
|
2018-01-13 13:17:18 +00:00
|
|
|
if vardef.datatype == DataType.BYTE:
|
|
|
|
size = 1
|
|
|
|
elif vardef.datatype == DataType.WORD:
|
|
|
|
size = 2
|
|
|
|
elif vardef.datatype == DataType.FLOAT:
|
|
|
|
print_bold("warning: {}: allocating a large datatype in zeropage".format(vardef.sourceref))
|
|
|
|
size = 5
|
|
|
|
elif vardef.datatype == DataType.BYTEARRAY:
|
|
|
|
print_bold("warning: {}: allocating a large datatype in zeropage".format(vardef.sourceref))
|
|
|
|
size = vardef.size[0]
|
|
|
|
elif vardef.datatype == DataType.WORDARRAY:
|
|
|
|
print_bold("warning: {}: allocating a large datatype in zeropage".format(vardef.sourceref))
|
|
|
|
size = vardef.size[0] * 2
|
|
|
|
elif vardef.datatype == DataType.MATRIX:
|
|
|
|
print_bold("warning: {}: allocating a large datatype in zeropage".format(vardef.sourceref))
|
|
|
|
size = vardef.size[0] * vardef.size[1]
|
2018-01-13 22:49:57 +00:00
|
|
|
elif vardef.datatype.isstring():
|
2018-01-13 13:17:18 +00:00
|
|
|
print_bold("warning: {}: allocating a large datatype in zeropage".format(vardef.sourceref))
|
|
|
|
size = vardef.size[0]
|
|
|
|
else:
|
|
|
|
raise CompileError("cannot put datatype {:s} in ZP".format(vardef.datatype.name))
|
2018-01-13 00:19:45 +00:00
|
|
|
if len(self.free) > 0:
|
|
|
|
if size == 1:
|
|
|
|
for candidate in range(min(self.free), max(self.free)+1):
|
|
|
|
if lone_byte(candidate):
|
|
|
|
return make_allocation(candidate)
|
|
|
|
return make_allocation(self.free[0])
|
|
|
|
for candidate in range(min(self.free), max(self.free)+1):
|
|
|
|
if sequential_free(candidate):
|
|
|
|
return make_allocation(candidate)
|
|
|
|
raise CompileError("ERROR: no more free space in ZP to allocate {:d} sequential bytes".format(size))
|
|
|
|
|
|
|
|
def available(self) -> int:
|
|
|
|
return len(self.free)
|