From fb467e5289a7e084f91b6e6f6329dc0a371386b1 Mon Sep 17 00:00:00 2001 From: Lawrence Kesteloot Date: Sat, 4 Aug 2018 17:35:55 -0700 Subject: [PATCH] Speed up tokenization a bit. --- main.c | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/main.c b/main.c index 078dadb..b275321 100644 --- a/main.c +++ b/main.c @@ -625,7 +625,8 @@ static uint16_t tokenize(uint8_t *s) { // Try every token. for (i = 0; i < TOKEN_COUNT; i++) { - skipped = skip_over(s, TOKEN[i]); + // Quick optimization, peek at the first letter. + skipped = s[0] == TOKEN[i][0] ? skip_over(s, TOKEN[i]) : 0; if (skipped != 0) { // Record token. *t++ = 0x80 + i;