diff --git a/src/main/java/dk/camelot64/kickc/macros/CMacroExpander.java b/src/main/java/dk/camelot64/kickc/macros/CMacroExpander.java
deleted file mode 100644
index a593cb2fd..000000000
--- a/src/main/java/dk/camelot64/kickc/macros/CMacroExpander.java
+++ /dev/null
@@ -1,269 +0,0 @@
-package dk.camelot64.kickc.macros;
-
-import dk.camelot64.kickc.model.CompileError;
-import org.antlr.v4.runtime.*;
-
-import java.util.*;
-
-/**
- * C Macro expander.
- *
- * The macro expander takes one token source as input and produces a new expanded token source as output
- */
-public class CMacroExpander {
-
- /** The channel containing whitespace. */
- private final int channelWhitespace;
- /** The token type for tokens containing whitespace. */
- private final int tokenWhitespace;
- /** The token type for #define. */
- private final int tokenDefine;
- /** The token type for identifiers. */
- private final int tokenIdentifier;
- /** The token type for parenthesis begin. */
- private final int tokenParBegin;
- /** The token type for parenthesis end. */
- private final int tokenParEnd;
- /** The token type for comma. */
- private final int tokenComma;
- /** The token type for define multi-line. */
- private final int tokenDefineMultiline;
-
- public CMacroExpander(int channelWhitespace, int tokenWhitespace, int tokenDefine, int tokenIdentifier, int tokenParBegin, int tokenParEnd, int tokenComma, int tokenDefineMultiline) {
- this.channelWhitespace = channelWhitespace;
- this.tokenWhitespace = tokenWhitespace;
- this.tokenDefine = tokenDefine;
- this.tokenIdentifier = tokenIdentifier;
- this.tokenParBegin = tokenParBegin;
- this.tokenParEnd = tokenParEnd;
- this.tokenComma = tokenComma;
- this.tokenDefineMultiline = tokenDefineMultiline;
- }
-
- public TokenSource expandMacros(TokenSource inputTokenSource) {
- List inputTokens = getTokenList(inputTokenSource);
- final TokenIterator tokenIterator = new TokenIterator(inputTokens);
- Map> macros = new LinkedHashMap<>();
- final ArrayList expandedTokens = new ArrayList<>();
- while(tokenIterator.hasNext()) {
- Token inputToken = tokenIterator.next();
- if(inputToken.getType() == tokenDefine) {
- // #define a new macro - find the name
- skipWhitespace(tokenIterator);
- String macroName = getToken(tokenIterator, tokenIdentifier).getText();
- // Examine whether the macro has parameters
- skipWhitespace(tokenIterator);
- if(tokenIterator.peek().getType() == tokenParBegin) {
- // Macro has parameters - find parameter name list
- throw new CompileError("Macros with parameters not supported!");
- }
- // Find body by gobbling tokens until the line ends
- final ArrayList macroBody = new ArrayList<>();
- boolean macroRead = true;
- while(macroRead) {
- final Token bodyToken = tokenIterator.next();
- if(bodyToken.getType() == tokenDefineMultiline) {
- // Skip the multi-line token, add a newline token and continue reading body on the next line
- final CommonToken newlineToken = new CommonToken(bodyToken);
- newlineToken.setType(tokenWhitespace);
- newlineToken.setChannel(channelWhitespace);
- newlineToken.setText("\n");
- macroBody.add(newlineToken);
- continue;
- }
- if(bodyToken.getChannel() == channelWhitespace && bodyToken.getText().contains("\n")) {
- macroRead = false;
- } else {
- macroBody.add(bodyToken);
- }
- }
- macros.put(macroName, macroBody);
- } else {
- if(inputToken.getType() == tokenIdentifier) {
- final String macroName = inputToken.getText();
- if(macros.containsKey(macroName)) {
- // Check for macro recursion
- if(inputToken instanceof ExpansionToken) {
- if(((ExpansionToken) inputToken).getMacroNames().contains(macroName)) {
- // Detected macro recursion in the expansion - add directly to output and move on!
- expandedTokens.add(inputToken);
- continue;
- }
- }
- // Macro expansion is needed
- final List macroBody = macros.get(macroName);
- List expandedBody = new ArrayList<>();
- for(Token bodyToken : macroBody) {
- final CommonToken expandedToken = new CommonToken(inputToken);
- expandedToken.setText(bodyToken.getText());
- expandedToken.setType(bodyToken.getType());
- expandedToken.setChannel(bodyToken.getChannel());
- Set macroNames = new HashSet<>();
- if(inputToken instanceof ExpansionToken) {
- // Transfer macro names to the new expansion
- macroNames = ((ExpansionToken) inputToken).getMacroNames();
- }
- macroNames.add(macroName);
- expandedBody.add(new ExpansionToken(expandedToken, macroNames));
- }
- tokenIterator.addFirst(expandedBody);
- } else {
- expandedTokens.add(inputToken);
- }
- } else {
- expandedTokens.add(inputToken);
- }
- }
- }
- return new ListTokenSource(expandedTokens);
- }
-
-
- private Token getToken(TokenIterator tokenIterator, int tokenType) {
- if(!tokenIterator.hasNext())
- throw new CompileError("File ended unexpectedly. Was expecting token " + tokenType);
- final Token token = tokenIterator.next();
- if(token.getType() != tokenType)
- throw new CompileError("Unexpected token. Was expecting " + tokenType);
- return token;
- }
-
- /**
- * Skip whitespace tokens, positioning iterator at the next non-whitespace
- *
- * @param tokenIterator The token iterator
- */
- private void skipWhitespace(TokenIterator tokenIterator) {
- while(tokenIterator.hasNext() && tokenIterator.peek().getChannel() == channelWhitespace)
- tokenIterator.next();
- }
-
- private List getTokenList(TokenSource inputTokenSource) {
- List inputTokens = new ArrayList<>();
- Token inputToken;
- do {
- inputToken = inputTokenSource.nextToken();
- inputTokens.add(inputToken);
- } while(inputToken.getType() != Token.EOF);
-
- return inputTokens;
- }
-
- /** A token iterator supporting peeking backed by a list of lists of tokens.
- * Macro expansion works by prepending a new list of tokens which contains the body of the macro being expanded */
- static class TokenIterator implements Iterator {
-
- Deque tokens;
-
- public TokenIterator(Collection tokens) {
- this.tokens = new LinkedList<>(tokens);
- }
-
- /**
- * Get the next token without advancing the cursor.
- *
- * @return The next token. null if there are no more tokens.
- */
- public Token peek() {
- return tokens.getFirst();
- }
-
- @Override
- public boolean hasNext() {
- return !tokens.isEmpty();
- }
-
- @Override
- public Token next() {
- return tokens.removeFirst();
- }
-
- /**
- * Add a bunch of tokens to the start of the iterator.
- * This is called when a macro is expanded to add the macro body to the start of the input.
- * @param tokens The tokens to add
- */
- public void addFirst(List tokens) {
- Collections.reverse(tokens);
- for(Token token : tokens) {
- this.tokens.addFirst(token);
- }
- }
-
- }
-
-
- /** A token that is the result of macro expansion.
- * Keeps track of which macros was used for the expansion.
- * */
- public class ExpansionToken implements Token {
-
- /** The underlying token. */
- private Token subToken;
-
- /** The names of all macros used for expanding this token. */
- private Set macroNames;
-
- public ExpansionToken(Token subToken, Set macroNames) {
- this.subToken = subToken;
- this.macroNames = macroNames;
- }
-
- public Set getMacroNames() {
- return macroNames;
- }
-
- @Override
- public String getText() {
- return subToken.getText();
- }
-
- @Override
- public int getType() {
- return subToken.getType();
- }
-
- @Override
- public int getLine() {
- return subToken.getLine();
- }
-
- @Override
- public int getCharPositionInLine() {
- return subToken.getCharPositionInLine();
- }
-
- @Override
- public int getChannel() {
- return subToken.getChannel();
- }
-
- @Override
- public int getTokenIndex() {
- return subToken.getTokenIndex();
- }
-
- @Override
- public int getStartIndex() {
- return subToken.getStartIndex();
- }
-
- @Override
- public int getStopIndex() {
- return subToken.getStopIndex();
- }
-
- @Override
- public TokenSource getTokenSource() {
- return subToken.getTokenSource();
- }
-
- @Override
- public CharStream getInputStream() {
- return subToken.getInputStream();
- }
- }
-
-
-
-}
diff --git a/src/main/java/dk/camelot64/kickc/parser/CParser.java b/src/main/java/dk/camelot64/kickc/parser/CParser.java
index 5044f7e31..9c342990c 100644
--- a/src/main/java/dk/camelot64/kickc/parser/CParser.java
+++ b/src/main/java/dk/camelot64/kickc/parser/CParser.java
@@ -1,6 +1,7 @@
package dk.camelot64.kickc.parser;
import dk.camelot64.kickc.SourceLoader;
+import dk.camelot64.kickc.preprocessor.CTokenSourcePreprocessor;
import dk.camelot64.kickc.model.CompileError;
import dk.camelot64.kickc.model.Program;
import org.antlr.v4.runtime.*;
@@ -38,7 +39,7 @@ public class CParser {
private final CommonTokenStream tokenStream;
/** The token source stack handling import files. */
- private CTokenSourceStack cFileTokenStack;
+ private CTokenSource cTokenSource;
/** The input files that have been parsed. Maps file name to the lexer. */
private Map cFiles;
@@ -62,8 +63,9 @@ public class CParser {
public CParser(Program program) {
this.program = program;
this.cFiles = new LinkedHashMap<>();
- this.cFileTokenStack = new CTokenSourceStack();
- this.tokenStream = new CommonTokenStream(cFileTokenStack);
+ this.cTokenSource = new CTokenSource();
+ final CTokenSourcePreprocessor preprocessor = new CTokenSourcePreprocessor(cTokenSource, CHANNEL_WHITESPACE, KickCLexer.WS, KickCLexer.DEFINE, KickCLexer.NAME, KickCLexer.PAR_BEGIN, KickCLexer.PAR_END, KickCLexer.COMMA, KickCLexer.DEFINE_CONTINUE);
+ this.tokenStream = new CommonTokenStream(preprocessor);
this.parser = new KickCParser(tokenStream, this);
this.typedefs = new ArrayList<>();
parser.setBuildParseTree(true);
@@ -131,7 +133,7 @@ public class CParser {
* @return The path of the folder containing the source file currently being tokenized
*/
private Path getCurrentSourceFolderPath() {
- TokenSource currentSource = cFileTokenStack.getCurrentSource();
+ TokenSource currentSource = cTokenSource.getCurrentSource();
String sourceName = currentSource.getSourceName();
CFile cFile = cFiles.get(sourceName);
File file = cFile.file;
@@ -190,7 +192,7 @@ public class CParser {
});
CFile cFile = new CFile(file, lexer);
cFiles.put(file.getAbsolutePath(), cFile);
- cFileTokenStack.pushSource(lexer);
+ cTokenSource.addSource(lexer);
} catch(IOException e) {
throw new CompileError("Error parsing file " + fileName, e);
}
diff --git a/src/main/java/dk/camelot64/kickc/parser/CTokenSource.java b/src/main/java/dk/camelot64/kickc/parser/CTokenSource.java
new file mode 100644
index 000000000..eeb0d55be
--- /dev/null
+++ b/src/main/java/dk/camelot64/kickc/parser/CTokenSource.java
@@ -0,0 +1,99 @@
+package dk.camelot64.kickc.parser;
+
+import org.antlr.v4.runtime.*;
+
+import java.util.ArrayList;
+import java.util.Deque;
+import java.util.LinkedList;
+
+/**
+ * An ANTLR4 Token Source that supports pushing sub-sources at the front of the stream.
+ * This can be used for importing files or for macro expansion.
+ */
+public class CTokenSource implements TokenSource {
+
+ /** Stack of underlying sources */
+ private Deque subSources;
+
+ public CTokenSource() {
+ this.subSources = new LinkedList<>();
+ }
+
+ public CTokenSource(TokenSource tokenSource) {
+ this.subSources = new LinkedList<>();
+ addSource(tokenSource);
+ }
+
+ /**
+ * Pushes a token source at the current location ).
+ * The pushed source will immediately be used for tokens and only when it is exhausted will tokens resume from the current source
+ *
+ * @param source The source to push
+ */
+ public void addSource(TokenSource source) {
+ subSources.addFirst(source);
+ }
+
+ public TokenSource getCurrentSource() {
+ return subSources.peekFirst();
+ }
+
+ /**
+ * Peek the next token without removing it from the source.
+ *
+ * @return The next token of the source.
+ */
+ public Token peekToken() {
+ // Get the next token
+ final Token token = nextToken();
+ // And push it back to the front of the stack
+ final ArrayList tokens = new ArrayList<>();
+ tokens.add(token);
+ addSource(new ListTokenSource(tokens));
+ return token;
+ }
+
+ @Override
+ public Token nextToken() {
+ TokenSource currentSource = getCurrentSource();
+ Token token = currentSource.nextToken();
+ if(token.getType() == Token.EOF && subSources.size() > 1) {
+ // We are at the end of the current sub-source and have more sub-sources to go through - move on to the next one!
+ subSources.pop();
+ return nextToken();
+ } else {
+ return token;
+ }
+ }
+
+ @Override
+ public int getLine() {
+ return getCurrentSource().getLine();
+ }
+
+ @Override
+ public int getCharPositionInLine() {
+ return getCurrentSource().getCharPositionInLine();
+ }
+
+ @Override
+ public CharStream getInputStream() {
+ return getCurrentSource().getInputStream();
+ }
+
+ @Override
+ public String getSourceName() {
+ return getCurrentSource().getSourceName();
+ }
+
+ @Override
+ public void setTokenFactory(TokenFactory> factory) {
+ throw new RuntimeException("Not implemented!!");
+ }
+
+ @Override
+ public TokenFactory> getTokenFactory() {
+ return getCurrentSource().getTokenFactory();
+ }
+
+}
diff --git a/src/main/java/dk/camelot64/kickc/parser/CTokenSourceStack.java b/src/main/java/dk/camelot64/kickc/parser/CTokenSourceStack.java
deleted file mode 100644
index 704fadef5..000000000
--- a/src/main/java/dk/camelot64/kickc/parser/CTokenSourceStack.java
+++ /dev/null
@@ -1,113 +0,0 @@
-package dk.camelot64.kickc.parser;
-
-import org.antlr.v4.runtime.*;
-
-import java.util.Stack;
-
-/**
- * An ANTLR4 Token Source that can keep track of multiple underlying source files.
- */
-public class CTokenSourceStack implements TokenSource {
-
- /** Stack of underlying sources */
- private Stack sourceStack;
-
- public CTokenSourceStack() {
- this.sourceStack = new Stack<>();
- }
-
- /**
- * Pushes a token source at the current location.
- * The pushed source will immediately be used for tokens and only when it is exhausted will tokens resume from the current source
- * @param source The source to push
- */
- public void pushSource(TokenSource source) {
- sourceStack.push(source);
- }
-
- public TokenSource getCurrentSource() {
- if(sourceStack.size()>0)
- return sourceStack.peek();
- else
- return new TokenSource() {
- @Override
- public Token nextToken() {
- return null;
- }
-
- @Override
- public int getLine() {
- return 0;
- }
-
- @Override
- public int getCharPositionInLine() {
- return 0;
- }
-
- @Override
- public CharStream getInputStream() {
- return null;
- }
-
- @Override
- public String getSourceName() {
- return "";
- }
-
- @Override
- public void setTokenFactory(TokenFactory> factory) {
- }
-
- @Override
- public TokenFactory> getTokenFactory() {
- return null;
- }
- };
- }
-
- @Override
- public Token nextToken() {
- TokenSource currentSource = getCurrentSource();
- Token token = currentSource.nextToken();
- if(token.getType()==Token.EOF) {
- // Last token of the current source - pop the stack!
- sourceStack.pop();
- if(!sourceStack.isEmpty()) {
- // Recurse to find next token
- return nextToken();
- }
- }
- return token;
- }
-
- @Override
- public int getLine() {
- return getCurrentSource().getLine();
- }
-
- @Override
- public int getCharPositionInLine() {
- return getCurrentSource().getCharPositionInLine();
- }
-
- @Override
- public CharStream getInputStream() {
- return getCurrentSource().getInputStream();
- }
-
- @Override
- public String getSourceName() {
- return getCurrentSource().getSourceName();
- }
-
- @Override
- public void setTokenFactory(TokenFactory> factory) {
- throw new RuntimeException("Not implemented!!");
- }
-
- @Override
- public TokenFactory> getTokenFactory() {
- return getCurrentSource().getTokenFactory();
- }
-}
diff --git a/src/main/java/dk/camelot64/kickc/preprocessor/CTokenSourcePreprocessor.java b/src/main/java/dk/camelot64/kickc/preprocessor/CTokenSourcePreprocessor.java
new file mode 100644
index 000000000..73c8eb5c1
--- /dev/null
+++ b/src/main/java/dk/camelot64/kickc/preprocessor/CTokenSourcePreprocessor.java
@@ -0,0 +1,278 @@
+package dk.camelot64.kickc.preprocessor;
+
+import dk.camelot64.kickc.model.CompileError;
+import dk.camelot64.kickc.parser.CTokenSource;
+import org.antlr.v4.runtime.*;
+
+import java.util.*;
+
+/**
+ * C preprocessor
+ *
+ * The preprocessor takes a token source as input and produces macro expanded tokens as output
+ */
+public class CTokenSourcePreprocessor implements TokenSource {
+
+ /** The token source containing the input */
+ private CTokenSource input;
+
+ /**
+ * The #defined macros.
+ * Maps macro name to the tokens of the expansion
+ */
+ private Map> defines;
+
+ /** The channel containing whitespace. */
+ private final int channelWhitespace;
+ /** The token type for tokens containing whitespace. */
+ private final int tokenWhitespace;
+ /** The token type for #define. */
+ private final int tokenDefine;
+ /** The token type for identifiers. */
+ private final int tokenIdentifier;
+ /** The token type for define multi-line. */
+ private final int tokenDefineMultiline;
+ /** The token type for parenthesis begin. */
+ private final int tokenParBegin;
+ /** The token type for parenthesis end. */
+ private final int tokenParEnd;
+ /** The token type for comma. */
+ private final int tokenComma;
+
+ public CTokenSourcePreprocessor(TokenSource input, int channelWhitespace, int tokenWhitespace, int tokenDefine, int tokenIdentifier, int tokenParBegin, int tokenParEnd, int tokenComma, int tokenDefineMultiline) {
+ if(input instanceof CTokenSource) {
+ // If possible use the input directly instead of wrapping it
+ this.input = (CTokenSource) input;
+ } else {
+ this.input = new CTokenSource(input);
+ }
+ this.defines = new LinkedHashMap<>();
+ this.channelWhitespace = channelWhitespace;
+ this.tokenWhitespace = tokenWhitespace;
+ this.tokenDefine = tokenDefine;
+ this.tokenIdentifier = tokenIdentifier;
+ this.tokenParBegin = tokenParBegin;
+ this.tokenParEnd = tokenParEnd;
+ this.tokenComma = tokenComma;
+ this.tokenDefineMultiline = tokenDefineMultiline;
+ }
+
+ @Override
+ public Token nextToken() {
+ Token token = input.nextToken();
+ // Perform preprocessing on tokens as long as it is needed
+ while(preprocess(token, input)) {
+ token = input.nextToken();
+ }
+ return token;
+ }
+
+ @Override
+ public int getLine() {
+ return input.getLine();
+ }
+
+ @Override
+ public int getCharPositionInLine() {
+ return input.getCharPositionInLine();
+ }
+
+ @Override
+ public CharStream getInputStream() {
+ return input.getInputStream();
+ }
+
+ @Override
+ public String getSourceName() {
+ return input.getSourceName();
+ }
+
+ @Override
+ public void setTokenFactory(TokenFactory> factory) {
+ input.setTokenFactory(factory);
+ }
+
+ @Override
+ public TokenFactory> getTokenFactory() {
+ return input.getTokenFactory();
+ }
+
+ /**
+ * Perform any preprocessing needed on a token. If preprocessing is not needed nothing is done.
+ *
+ * This method may gobble more tokens from the source (for instance if a macro is being defined) and it may push tokens at the front of the source (if a macro is being expanded).
+ *
+ * @param inputToken The token to process
+ * @param cTokenSource The token source used for getting more tokens or for pushing macro expansions
+ * @return true if the input token was preprocessed (and should not be added to the output). False if the token was not a preprocessor token
+ */
+ private boolean preprocess(Token inputToken, CTokenSource cTokenSource) {
+ boolean wasPreprocessed;
+ if(inputToken.getType() == tokenDefine) {
+ // #define a new macro - find the name
+ skipWhitespace(cTokenSource);
+ String macroName = nextToken(cTokenSource, tokenIdentifier).getText();
+ // Examine whether the macro has parameters
+ skipWhitespace(cTokenSource);
+ if(cTokenSource.peekToken().getType() == tokenParBegin) {
+ // Macro has parameters - find parameter name list
+ throw new CompileError("Macros with parameters not supported!");
+ }
+ // Find body by gobbling tokens until the line ends
+ final ArrayList macroBody = new ArrayList<>();
+ boolean macroRead = true;
+ while(macroRead) {
+ final Token bodyToken = cTokenSource.nextToken();
+ if(bodyToken.getType() == tokenDefineMultiline) {
+ // Skip the multi-line token, add a newline token and continue reading body on the next line
+ final CommonToken newlineToken = new CommonToken(bodyToken);
+ newlineToken.setType(tokenWhitespace);
+ newlineToken.setChannel(channelWhitespace);
+ newlineToken.setText("\n");
+ macroBody.add(newlineToken);
+ continue;
+ }
+ if(bodyToken.getChannel() == channelWhitespace && bodyToken.getText().contains("\n")) {
+ macroRead = false;
+ } else {
+ macroBody.add(bodyToken);
+ }
+ }
+ defines.put(macroName, macroBody);
+ return true;
+ } else {
+ if(inputToken.getType() == tokenIdentifier) {
+ final String macroName = inputToken.getText();
+ List macroBody = defines.get(macroName);
+ if(macroBody != null) {
+ // Check for macro recursion
+ if(inputToken instanceof ExpansionToken) {
+ if(((ExpansionToken) inputToken).getMacroNames().contains(macroName)) {
+ // Detected macro recursion in the expansion - add directly to output and do not perform expansion!
+ macroBody = null;
+ }
+ }
+ }
+ if(macroBody != null) {
+ // Macro expansion is needed
+ List expandedBody = new ArrayList<>();
+ for(Token bodyToken : macroBody) {
+ final CommonToken expandedToken = new CommonToken(inputToken);
+ expandedToken.setText(bodyToken.getText());
+ expandedToken.setType(bodyToken.getType());
+ expandedToken.setChannel(bodyToken.getChannel());
+ Set macroNames = new HashSet<>();
+ if(inputToken instanceof ExpansionToken) {
+ // Transfer macro names to the new expansion
+ macroNames = ((ExpansionToken) inputToken).getMacroNames();
+ }
+ macroNames.add(macroName);
+ expandedBody.add(new ExpansionToken(expandedToken, macroNames));
+ }
+ cTokenSource.addSource(new ListTokenSource(expandedBody));
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+
+ /**
+ * Pull first token from a source and check that it matches the expected type. Any other type will produce an error.
+ *
+ * @param cTokenSource The token source
+ * @param tokenType The type to expect
+ * @return The token
+ */
+ private Token nextToken(CTokenSource cTokenSource, int tokenType) {
+ final Token token = cTokenSource.nextToken();
+ if(token.getType() != tokenType)
+ throw new CompileError("Unexpected token. Was expecting " + tokenType);
+ return token;
+ }
+
+ /**
+ * Skip whitespace tokens, positioning iterator at the next non-whitespace
+ *
+ * @param cTokenSource The token iterator
+ */
+ private void skipWhitespace(CTokenSource cTokenSource) {
+ while(cTokenSource.peekToken().getChannel() == channelWhitespace)
+ cTokenSource.nextToken();
+ }
+
+ /**
+ * A token that is the result of macro expansion.
+ * Keeps track of which macros was used for the expansion to avoid macro recursion.
+ **/
+ public static class ExpansionToken implements Token {
+
+ /** The underlying token. */
+ private Token subToken;
+
+ /** The names of all macros used for expanding this token. */
+ private Set macroNames;
+
+ ExpansionToken(Token subToken, Set macroNames) {
+ this.subToken = subToken;
+ this.macroNames = macroNames;
+ }
+
+ Set getMacroNames() {
+ return macroNames;
+ }
+
+ @Override
+ public String getText() {
+ return subToken.getText();
+ }
+
+ @Override
+ public int getType() {
+ return subToken.getType();
+ }
+
+ @Override
+ public int getLine() {
+ return subToken.getLine();
+ }
+
+ @Override
+ public int getCharPositionInLine() {
+ return subToken.getCharPositionInLine();
+ }
+
+ @Override
+ public int getChannel() {
+ return subToken.getChannel();
+ }
+
+ @Override
+ public int getTokenIndex() {
+ return subToken.getTokenIndex();
+ }
+
+ @Override
+ public int getStartIndex() {
+ return subToken.getStartIndex();
+ }
+
+ @Override
+ public int getStopIndex() {
+ return subToken.getStopIndex();
+ }
+
+ @Override
+ public TokenSource getTokenSource() {
+ return subToken.getTokenSource();
+ }
+
+ @Override
+ public CharStream getInputStream() {
+ return subToken.getInputStream();
+ }
+ }
+
+
+}
diff --git a/src/test/java/dk/camelot64/kickc/parsing/macros/TestMacrosParser.java b/src/test/java/dk/camelot64/kickc/parsing/macros/TestMacrosParser.java
index 343876d72..1f3bbea55 100644
--- a/src/test/java/dk/camelot64/kickc/parsing/macros/TestMacrosParser.java
+++ b/src/test/java/dk/camelot64/kickc/parsing/macros/TestMacrosParser.java
@@ -1,6 +1,6 @@
package dk.camelot64.kickc.parsing.macros;
-import dk.camelot64.kickc.macros.CMacroExpander;
+import dk.camelot64.kickc.preprocessor.CTokenSourcePreprocessor;
import dk.camelot64.kickc.model.CompileError;
import org.antlr.v4.runtime.*;
import org.junit.Test;
@@ -106,8 +106,7 @@ public class TestMacrosParser {
}
});
- final CMacroExpander cMacroExpander = new CMacroExpander(CHANNEL_WHITESPACE, MacrosLexer.WHITESPACE, MacrosLexer.DEFINE, MacrosLexer.IDENTIFIER, MacrosLexer.PAR_BEGIN, MacrosLexer.PAR_END, MacrosLexer.COMMA, MacrosLexer.DEFINE_CONTINUE);
- final TokenSource expandedTokenSource = cMacroExpander.expandMacros(lexer);
+ final CTokenSourcePreprocessor expandedTokenSource = new CTokenSourcePreprocessor(lexer, CHANNEL_WHITESPACE, MacrosLexer.WHITESPACE, MacrosLexer.DEFINE, MacrosLexer.IDENTIFIER, MacrosLexer.PAR_BEGIN, MacrosLexer.PAR_END, MacrosLexer.COMMA, MacrosLexer.DEFINE_CONTINUE);
MacrosParser parser = new MacrosParser(new CommonTokenStream(expandedTokenSource));
parser.setBuildParseTree(true);
parser.addErrorListener(new BaseErrorListener() {
diff --git a/src/test/java/dk/camelot64/kickc/test/TestPrograms.java b/src/test/java/dk/camelot64/kickc/test/TestPrograms.java
index 52931550c..19275a8ff 100644
--- a/src/test/java/dk/camelot64/kickc/test/TestPrograms.java
+++ b/src/test/java/dk/camelot64/kickc/test/TestPrograms.java
@@ -37,6 +37,11 @@ public class TestPrograms {
public TestPrograms() {
}
+ @Test
+ public void testPreprocessor0() throws IOException, URISyntaxException {
+ compileAndCompare("preprocessor-0");
+ }
+
@Test
public void testMaCoalesceProblem() throws IOException, URISyntaxException {
compileAndCompare("ma_coalesce_problem");
diff --git a/src/test/kc/forrangemin.kc b/src/test/kc/forrangemin.kc
index 40064a88d..bd56656f5 100644
--- a/src/test/kc/forrangemin.kc
+++ b/src/test/kc/forrangemin.kc
@@ -1,8 +1,7 @@
-
// Minimal range based for() loop
-byte* SCREEN1 = $0400;
-byte* SCREEN2 = $0500;
+char* SCREEN1 = 0x0400;
+char* SCREEN2 = 0x0500;
void main() {
for(byte i : 0..255) {
diff --git a/src/test/kc/preprocessor-0.kc b/src/test/kc/preprocessor-0.kc
new file mode 100644
index 000000000..3f5dd33f2
--- /dev/null
+++ b/src/test/kc/preprocessor-0.kc
@@ -0,0 +1,10 @@
+// Test the preprocessor
+// A simple #define
+
+#define A 'a'
+
+char * const SCREEN = 0x0400;
+
+void main() {
+ *SCREEN = A;
+}
diff --git a/src/test/ref/preprocessor-0.asm b/src/test/ref/preprocessor-0.asm
new file mode 100644
index 000000000..4246e6473
--- /dev/null
+++ b/src/test/ref/preprocessor-0.asm
@@ -0,0 +1,13 @@
+// Test the preprocessor
+// A simple #define
+.pc = $801 "Basic"
+:BasicUpstart(main)
+.pc = $80d "Program"
+ .label SCREEN = $400
+main: {
+ // *SCREEN = A
+ lda #'a'
+ sta SCREEN
+ // }
+ rts
+}
diff --git a/src/test/ref/preprocessor-0.cfg b/src/test/ref/preprocessor-0.cfg
new file mode 100644
index 000000000..6a5142472
--- /dev/null
+++ b/src/test/ref/preprocessor-0.cfg
@@ -0,0 +1,17 @@
+@begin: scope:[] from
+ [0] phi()
+ to:@1
+@1: scope:[] from @begin
+ [1] phi()
+ [2] call main
+ to:@end
+@end: scope:[] from @1
+ [3] phi()
+
+(void()) main()
+main: scope:[main] from @1
+ [4] *((const nomodify byte*) SCREEN) ← (byte) 'a'
+ to:main::@return
+main::@return: scope:[main] from main
+ [5] return
+ to:@return
diff --git a/src/test/ref/preprocessor-0.log b/src/test/ref/preprocessor-0.log
new file mode 100644
index 000000000..7c16e42d3
--- /dev/null
+++ b/src/test/ref/preprocessor-0.log
@@ -0,0 +1,214 @@
+
+CONTROL FLOW GRAPH SSA
+@begin: scope:[] from
+ to:@1
+
+(void()) main()
+main: scope:[main] from @1
+ *((const nomodify byte*) SCREEN) ← (byte) 'a'
+ to:main::@return
+main::@return: scope:[main] from main
+ return
+ to:@return
+@1: scope:[] from @begin
+ call main
+ to:@2
+@2: scope:[] from @1
+ to:@end
+@end: scope:[] from @2
+
+SYMBOL TABLE SSA
+(label) @1
+(label) @2
+(label) @begin
+(label) @end
+(const nomodify byte*) SCREEN = (byte*)(number) $400
+(void()) main()
+(label) main::@return
+
+Simplifying constant pointer cast (byte*) 1024
+Successful SSA optimization PassNCastSimplification
+Adding NOP phi() at start of @begin
+Adding NOP phi() at start of @1
+Adding NOP phi() at start of @2
+Adding NOP phi() at start of @end
+CALL GRAPH
+Calls in [] to main:2
+
+Created 0 initial phi equivalence classes
+Coalesced down to 0 phi equivalence classes
+Culled Empty Block (label) @2
+Adding NOP phi() at start of @begin
+Adding NOP phi() at start of @1
+Adding NOP phi() at start of @end
+
+FINAL CONTROL FLOW GRAPH
+@begin: scope:[] from
+ [0] phi()
+ to:@1
+@1: scope:[] from @begin
+ [1] phi()
+ [2] call main
+ to:@end
+@end: scope:[] from @1
+ [3] phi()
+
+(void()) main()
+main: scope:[main] from @1
+ [4] *((const nomodify byte*) SCREEN) ← (byte) 'a'
+ to:main::@return
+main::@return: scope:[main] from main
+ [5] return
+ to:@return
+
+
+VARIABLE REGISTER WEIGHTS
+(void()) main()
+
+Initial phi equivalence classes
+Complete equivalence classes
+
+INITIAL ASM
+Target platform is c64basic / MOS6502X
+ // File Comments
+// Test the preprocessor
+// A simple #define
+ // Upstart
+.pc = $801 "Basic"
+:BasicUpstart(__bbegin)
+.pc = $80d "Program"
+ // Global Constants & labels
+ .label SCREEN = $400
+ // @begin
+__bbegin:
+ // [1] phi from @begin to @1 [phi:@begin->@1]
+__b1_from___bbegin:
+ jmp __b1
+ // @1
+__b1:
+ // [2] call main
+ jsr main
+ // [3] phi from @1 to @end [phi:@1->@end]
+__bend_from___b1:
+ jmp __bend
+ // @end
+__bend:
+ // main
+main: {
+ // [4] *((const nomodify byte*) SCREEN) ← (byte) 'a' -- _deref_pbuc1=vbuc2
+ lda #'a'
+ sta SCREEN
+ jmp __breturn
+ // main::@return
+ __breturn:
+ // [5] return
+ rts
+}
+ // File Data
+
+REGISTER UPLIFT POTENTIAL REGISTERS
+Statement [4] *((const nomodify byte*) SCREEN) ← (byte) 'a' [ ] ( main:2 [ ] { } ) always clobbers reg byte a
+
+REGISTER UPLIFT SCOPES
+Uplift Scope [main]
+Uplift Scope []
+
+Uplifting [main] best 27 combination
+Uplifting [] best 27 combination
+
+ASSEMBLER BEFORE OPTIMIZATION
+ // File Comments
+// Test the preprocessor
+// A simple #define
+ // Upstart
+.pc = $801 "Basic"
+:BasicUpstart(__bbegin)
+.pc = $80d "Program"
+ // Global Constants & labels
+ .label SCREEN = $400
+ // @begin
+__bbegin:
+ // [1] phi from @begin to @1 [phi:@begin->@1]
+__b1_from___bbegin:
+ jmp __b1
+ // @1
+__b1:
+ // [2] call main
+ jsr main
+ // [3] phi from @1 to @end [phi:@1->@end]
+__bend_from___b1:
+ jmp __bend
+ // @end
+__bend:
+ // main
+main: {
+ // [4] *((const nomodify byte*) SCREEN) ← (byte) 'a' -- _deref_pbuc1=vbuc2
+ lda #'a'
+ sta SCREEN
+ jmp __breturn
+ // main::@return
+ __breturn:
+ // [5] return
+ rts
+}
+ // File Data
+
+ASSEMBLER OPTIMIZATIONS
+Removing instruction jmp __b1
+Removing instruction jmp __bend
+Removing instruction jmp __breturn
+Succesful ASM optimization Pass5NextJumpElimination
+Removing instruction __b1_from___bbegin:
+Removing instruction __b1:
+Removing instruction __bend_from___b1:
+Succesful ASM optimization Pass5RedundantLabelElimination
+Removing instruction __bend:
+Removing instruction __breturn:
+Succesful ASM optimization Pass5UnusedLabelElimination
+Updating BasicUpstart to call main directly
+Removing instruction jsr main
+Succesful ASM optimization Pass5SkipBegin
+Removing instruction __bbegin:
+Succesful ASM optimization Pass5UnusedLabelElimination
+
+FINAL SYMBOL TABLE
+(label) @1
+(label) @begin
+(label) @end
+(const nomodify byte*) SCREEN = (byte*) 1024
+(void()) main()
+(label) main::@return
+
+
+
+FINAL ASSEMBLER
+Score: 12
+
+ // File Comments
+// Test the preprocessor
+// A simple #define
+ // Upstart
+.pc = $801 "Basic"
+:BasicUpstart(main)
+.pc = $80d "Program"
+ // Global Constants & labels
+ .label SCREEN = $400
+ // @begin
+ // [1] phi from @begin to @1 [phi:@begin->@1]
+ // @1
+ // [2] call main
+ // [3] phi from @1 to @end [phi:@1->@end]
+ // @end
+ // main
+main: {
+ // *SCREEN = A
+ // [4] *((const nomodify byte*) SCREEN) ← (byte) 'a' -- _deref_pbuc1=vbuc2
+ lda #'a'
+ sta SCREEN
+ // main::@return
+ // }
+ // [5] return
+ rts
+}
+ // File Data
+
diff --git a/src/test/ref/preprocessor-0.sym b/src/test/ref/preprocessor-0.sym
new file mode 100644
index 000000000..17d516eda
--- /dev/null
+++ b/src/test/ref/preprocessor-0.sym
@@ -0,0 +1,7 @@
+(label) @1
+(label) @begin
+(label) @end
+(const nomodify byte*) SCREEN = (byte*) 1024
+(void()) main()
+(label) main::@return
+