mirror of
https://github.com/catseye/SixtyPical.git
synced 2025-01-10 02:29:23 +00:00
Beginnings of modularity.
This commit is contained in:
parent
d9e625db30
commit
6744ad29a9
@ -84,7 +84,7 @@ is probably NP-complete. But doing it adequately is probably not that hard.
|
||||
* Tests, and implementation, ensuring a routine can be assigned to a vector of "wider" type
|
||||
* Related: can we simply view a (small) part of a buffer as a byte table? If not, why not?
|
||||
* Related: add constant to buffer to get new buffer. (Or to table, but... well, maybe.)
|
||||
* Check that the buffer being read or written to through pointer, appears in approporiate inputs or outputs set.
|
||||
* Check that the buffer being read or written to through pointer, appears in appropriate inputs or outputs set.
|
||||
(Associate each pointer with the buffer it points into.)
|
||||
* `static` pointers -- currently not possible because pointers must be zero-page, thus `@`, thus uninitialized.
|
||||
* Question the value of the "consistent initialization" principle for `if` statement analysis.
|
||||
@ -94,6 +94,5 @@ is probably NP-complete. But doing it adequately is probably not that hard.
|
||||
* Possibly `ld x, [ptr] + y`, possibly `st x, [ptr] + y`.
|
||||
* Maybe even `copy [ptra] + y, [ptrb] + y`, which can be compiled to indirect LDA then indirect STA!
|
||||
* Optimize `ld a, z` and `st a, z` to zero-page operations if address of z < 256.
|
||||
* Include files?
|
||||
|
||||
[VICE]: http://vice-emu.sourceforge.net/
|
||||
|
@ -18,26 +18,42 @@ from pprint import pprint
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from sixtypical.parser import Parser
|
||||
from sixtypical.parser import Parser, ParsingContext
|
||||
from sixtypical.analyzer import Analyzer
|
||||
from sixtypical.emitter import Emitter, Byte, Word
|
||||
from sixtypical.compiler import Compiler
|
||||
|
||||
|
||||
def merge_programs(programs):
|
||||
"""Assumes that the programs do not have any conflicts."""
|
||||
|
||||
from sixtypical.ast import Program
|
||||
|
||||
full = Program(1, defns=[], routines=[])
|
||||
for p in programs:
|
||||
full.defns.extend(p.defns)
|
||||
full.routines.extend(p.routines)
|
||||
|
||||
return full
|
||||
|
||||
|
||||
def process_input_files(filenames, options):
|
||||
context = ParsingContext()
|
||||
|
||||
programs = []
|
||||
|
||||
for filename in options.filenames:
|
||||
text = open(filename).read()
|
||||
parser = Parser(text, filename)
|
||||
parser = Parser(context, text, filename)
|
||||
if options.debug:
|
||||
print(context)
|
||||
program = parser.program()
|
||||
programs.append(program)
|
||||
|
||||
if options.parse_only:
|
||||
return
|
||||
|
||||
#program = merge_programs(programs)
|
||||
program = programs[0]
|
||||
program = merge_programs(programs)
|
||||
|
||||
analyzer = Analyzer(debug=options.debug)
|
||||
analyzer.analyze_program(program)
|
||||
|
18
eg/rudiments/vector-inc.60p
Normal file
18
eg/rudiments/vector-inc.60p
Normal file
@ -0,0 +1,18 @@
|
||||
routine chrout
|
||||
inputs a
|
||||
trashes a
|
||||
@ 65490
|
||||
|
||||
routine printa
|
||||
trashes a, z, n
|
||||
{
|
||||
ld a, 65
|
||||
call chrout
|
||||
}
|
||||
|
||||
routine printb
|
||||
trashes a, z, n
|
||||
{
|
||||
ld a, 66
|
||||
call chrout
|
||||
}
|
19
eg/rudiments/vector-main.60p
Normal file
19
eg/rudiments/vector-main.60p
Normal file
@ -0,0 +1,19 @@
|
||||
vector routine
|
||||
trashes a, z, n
|
||||
print
|
||||
|
||||
// routine printb
|
||||
// trashes a, z, n
|
||||
// {
|
||||
// ld a, 66
|
||||
// call chrout
|
||||
// }
|
||||
|
||||
routine main
|
||||
trashes print, a, z, n
|
||||
{
|
||||
copy printa, print
|
||||
call print
|
||||
copy printb, print
|
||||
call print
|
||||
}
|
@ -18,27 +18,36 @@ class SymEntry(object):
|
||||
return "%s(%r, %r)" % (self.__class__.__name__, self.ast_node, self.model)
|
||||
|
||||
|
||||
class Parser(object):
|
||||
def __init__(self, text, filename):
|
||||
self.scanner = Scanner(text, filename)
|
||||
class ParsingContext(object):
|
||||
def __init__(self):
|
||||
self.symbols = {} # token -> SymEntry
|
||||
self.current_statics = {} # token -> SymEntry
|
||||
self.statics = {} # token -> SymEntry
|
||||
self.typedefs = {} # token -> Type AST
|
||||
self.consts = {} # token -> Loc
|
||||
|
||||
for token in ('a', 'x', 'y'):
|
||||
self.symbols[token] = SymEntry(None, LocationRef(TYPE_BYTE, token))
|
||||
for token in ('c', 'z', 'n', 'v'):
|
||||
self.symbols[token] = SymEntry(None, LocationRef(TYPE_BIT, token))
|
||||
|
||||
def __str__(self):
|
||||
return "Symbols: {}\nStatics: {}\nTypedefs: {}\nConsts: {}".format(self.symbols, self.statics, self.typedefs, self.consts)
|
||||
|
||||
|
||||
class Parser(object):
|
||||
def __init__(self, context, text, filename):
|
||||
self.context = context
|
||||
self.scanner = Scanner(text, filename)
|
||||
self.backpatch_instrs = []
|
||||
|
||||
def syntax_error(self, msg):
|
||||
self.scanner.syntax_error(msg)
|
||||
|
||||
def soft_lookup(self, name):
|
||||
if name in self.current_statics:
|
||||
return self.current_statics[name].model
|
||||
if name in self.symbols:
|
||||
return self.symbols[name].model
|
||||
if name in self.context.statics:
|
||||
return self.context.statics[name].model
|
||||
if name in self.context.symbols:
|
||||
return self.context.symbols[name].model
|
||||
return None
|
||||
|
||||
def lookup(self, name):
|
||||
@ -58,13 +67,13 @@ class Parser(object):
|
||||
if self.scanner.on('const'):
|
||||
self.defn_const()
|
||||
typenames = ['byte', 'word', 'table', 'vector', 'buffer', 'pointer'] # 'routine',
|
||||
typenames.extend(self.typedefs.keys())
|
||||
typenames.extend(self.context.typedefs.keys())
|
||||
while self.scanner.on(*typenames):
|
||||
defn = self.defn()
|
||||
name = defn.name
|
||||
if name in self.symbols:
|
||||
if name in self.context.symbols:
|
||||
self.syntax_error('Symbol "%s" already declared' % name)
|
||||
self.symbols[name] = SymEntry(defn, defn.location)
|
||||
self.context.symbols[name] = SymEntry(defn, defn.location)
|
||||
defns.append(defn)
|
||||
while self.scanner.on('define', 'routine'):
|
||||
if self.scanner.consume('define'):
|
||||
@ -74,14 +83,14 @@ class Parser(object):
|
||||
else:
|
||||
routine = self.legacy_routine()
|
||||
name = routine.name
|
||||
if name in self.symbols:
|
||||
if name in self.context.symbols:
|
||||
self.syntax_error('Symbol "%s" already declared' % name)
|
||||
self.symbols[name] = SymEntry(routine, routine.location)
|
||||
self.context.symbols[name] = SymEntry(routine, routine.location)
|
||||
routines.append(routine)
|
||||
self.scanner.check_type('EOF')
|
||||
|
||||
# now backpatch the executable types.
|
||||
#for type_name, type_ in self.typedefs.iteritems():
|
||||
#for type_name, type_ in self.context.typedefs.iteritems():
|
||||
# type_.backpatch_constraint_labels(lambda w: self.lookup(w))
|
||||
for defn in defns:
|
||||
defn.location.type.backpatch_constraint_labels(lambda w: self.lookup(w))
|
||||
@ -90,18 +99,18 @@ class Parser(object):
|
||||
for instr in self.backpatch_instrs:
|
||||
if instr.opcode in ('call', 'goto'):
|
||||
name = instr.location
|
||||
if name not in self.symbols:
|
||||
if name not in self.context.symbols:
|
||||
self.syntax_error('Undefined routine "%s"' % name)
|
||||
if not isinstance(self.symbols[name].model.type, (RoutineType, VectorType)):
|
||||
if not isinstance(self.context.symbols[name].model.type, (RoutineType, VectorType)):
|
||||
self.syntax_error('Illegal call of non-executable "%s"' % name)
|
||||
instr.location = self.symbols[name].model
|
||||
instr.location = self.context.symbols[name].model
|
||||
if instr.opcode in ('copy',) and isinstance(instr.src, basestring):
|
||||
name = instr.src
|
||||
if name not in self.symbols:
|
||||
if name not in self.context.symbols:
|
||||
self.syntax_error('Undefined routine "%s"' % name)
|
||||
if not isinstance(self.symbols[name].model.type, (RoutineType, VectorType)):
|
||||
if not isinstance(self.context.symbols[name].model.type, (RoutineType, VectorType)):
|
||||
self.syntax_error('Illegal copy of non-executable "%s"' % name)
|
||||
instr.src = self.symbols[name].model
|
||||
instr.src = self.context.symbols[name].model
|
||||
|
||||
return Program(self.scanner.line_number, defns=defns, routines=routines)
|
||||
|
||||
@ -109,18 +118,18 @@ class Parser(object):
|
||||
self.scanner.expect('typedef')
|
||||
type_ = self.defn_type()
|
||||
name = self.defn_name()
|
||||
if name in self.typedefs:
|
||||
if name in self.context.typedefs:
|
||||
self.syntax_error('Type "%s" already declared' % name)
|
||||
self.typedefs[name] = type_
|
||||
self.context.typedefs[name] = type_
|
||||
return type_
|
||||
|
||||
def defn_const(self):
|
||||
self.scanner.expect('const')
|
||||
name = self.defn_name()
|
||||
if name in self.consts:
|
||||
if name in self.context.consts:
|
||||
self.syntax_error('Const "%s" already declared' % name)
|
||||
loc = self.const()
|
||||
self.consts[name] = loc
|
||||
self.context.consts[name] = loc
|
||||
return loc
|
||||
|
||||
def defn(self):
|
||||
@ -163,8 +172,8 @@ class Parser(object):
|
||||
loc = ConstantRef(TYPE_WORD, int(self.scanner.token))
|
||||
self.scanner.scan()
|
||||
return loc
|
||||
elif self.scanner.token in self.consts:
|
||||
loc = self.consts[self.scanner.token]
|
||||
elif self.scanner.token in self.context.consts:
|
||||
loc = self.context.consts[self.scanner.token]
|
||||
self.scanner.scan()
|
||||
return loc
|
||||
else:
|
||||
@ -215,9 +224,9 @@ class Parser(object):
|
||||
else:
|
||||
type_name = self.scanner.token
|
||||
self.scanner.scan()
|
||||
if type_name not in self.typedefs:
|
||||
if type_name not in self.context.typedefs:
|
||||
self.syntax_error("Undefined type '%s'" % type_name)
|
||||
type_ = self.typedefs[type_name]
|
||||
type_ = self.context.typedefs[type_name]
|
||||
|
||||
return type_
|
||||
|
||||
@ -273,9 +282,9 @@ class Parser(object):
|
||||
else:
|
||||
statics = self.statics()
|
||||
|
||||
self.current_statics = self.compose_statics_dict(statics)
|
||||
self.context.statics = self.compose_statics_dict(statics)
|
||||
block = self.block()
|
||||
self.current_statics = {}
|
||||
self.context.statics = {}
|
||||
|
||||
addr = None
|
||||
location = LocationRef(type_, name)
|
||||
@ -289,7 +298,7 @@ class Parser(object):
|
||||
c = {}
|
||||
for defn in statics:
|
||||
name = defn.name
|
||||
if name in self.symbols or name in self.current_statics:
|
||||
if name in self.context.symbols or name in self.context.statics:
|
||||
self.syntax_error('Symbol "%s" already declared' % name)
|
||||
c[name] = SymEntry(defn, defn.location)
|
||||
return c
|
||||
@ -316,7 +325,7 @@ class Parser(object):
|
||||
return accum
|
||||
|
||||
def locexpr(self, forward=False):
|
||||
if self.scanner.token in ('on', 'off', 'word') or self.scanner.token in self.consts or self.scanner.on_type('integer literal'):
|
||||
if self.scanner.token in ('on', 'off', 'word') or self.scanner.token in self.context.consts or self.scanner.on_type('integer literal'):
|
||||
return self.const()
|
||||
elif forward:
|
||||
name = self.scanner.token
|
||||
|
Loading…
x
Reference in New Issue
Block a user