added examples for tests that compare the compiler output with a reference

This commit is contained in:
mrdudz 2022-11-03 17:40:13 +01:00
parent 0eba33ee74
commit 44b2e4f331
7 changed files with 140 additions and 5 deletions

View File

@ -8,6 +8,7 @@ This document contains all kinds of information that you should know if you want
* You must obey these rules when contributing new code or documentation to cc65. We are well aware that not all existing code may respect all rules outlined here - but this is no reason for you not to respect them.
* One commit/patch/PR per issue. Do not mix several things unless they are very closely related.
* Sometimes when you make a PR, it may break completely unrelated tests. However, any PR is expected to merge cleanly with no failures. That means in practise that you are expected to fix/update the failing tests if required - for example this might be needed if you make changes to the compiler that changes the format of error- or warning messages. In that case you might have to update some reference files in the testbench. Obviously still check if that is actually the right thing to do ;)
# Codestyle rules

View File

@ -20,6 +20,28 @@ compiler is working as expected (when the tests behave as described):
library.
/ref - These tests produce output that must be compared with reference output.
Normally the reference output is produced by compiling the program on the
host (using gcc mostly) and then running them on the host. Tests should
be tweaked to produce the same output as on the host in the cases where
it would be different.
The Makefile also handles some special cases (add the tests to the
respective list in the makefile):
- Sometimes we want to check the warnings produced by the compiler. In
that case use the CUSTOMSOURCES list. Whatever output the compiler writes
to stderr will be compared against the matching .cref file. There is an
example in custom-reference.c/.cref
- Sometimes we want to check what kind of output the compiler produces
for a file that does not compile. In that case use the ERRORSOURCES list.
There is an example in custom-reference-error.c/.cref
Warning: please understand that comparing the compiler output against
a reference produces a moving target, ie the tests may break randomly
at any time when the compiler output changes for whatever reason. So
only ever use this as a last resort when something can not be tested by
other means.
/err - contains tests that MUST NOT compile

View File

@ -11,12 +11,14 @@ ifdef CMD_EXE
NULLDEV = nul:
MKDIR = mkdir $(subst /,\,$1)
RMDIR = -rmdir /s /q $(subst /,\,$1)
CP=copy
else
S = /
EXE =
NULLDEV = /dev/null
MKDIR = mkdir -p $1
RMDIR = $(RM) -r $1
CP=cp
endif
ifdef QUIET
@ -42,24 +44,47 @@ CFLAGS = -O2 -Wall -W -Wextra -funsigned-char -fwrapv -fno-strict-overflow
.PHONY: all clean
SOURCES := $(wildcard *.c)
# list of sources that produces warnings that we want to check. a .cref file
# containing the exact output is required.
CUSTOMSOURCES = \
custom-reference.c
# list of sources that produce a compiler error. a .cref files containing the
# exact error output is required
ERRORSOURCES = \
custom-reference-error.c
SOURCES := $(filter-out $(CUSTOMSOURCES) $(ERRORSOURCES),$(wildcard *.c))
REFS = $(SOURCES:%.c=$(WORKDIR)/%.ref)
CUSTOMREFS = $(CUSTOMSOURCES:%.c=$(WORKDIR)/%.cref) $(ERRORSOURCES:%.c=$(WORKDIR)/%.cref)
TESTS = $(foreach option,$(OPTIONS),$(SOURCES:%.c=$(WORKDIR)/%.$(option).6502.prg))
TESTS += $(foreach option,$(OPTIONS),$(SOURCES:%.c=$(WORKDIR)/%.$(option).65c02.prg))
all: $(REFS) $(TESTS)
CUSTOMTESTS = $(foreach option,$(OPTIONS),$(CUSTOMSOURCES:%.c=$(WORKDIR)/%.$(option).6502.custom.prg))
CUSTOMTESTS += $(foreach option,$(OPTIONS),$(CUSTOMSOURCES:%.c=$(WORKDIR)/%.$(option).65c02.custom.prg))
ERRORTESTS = $(foreach option,$(OPTIONS),$(ERRORSOURCES:%.c=$(WORKDIR)/%.$(option).6502.error.prg))
ERRORTESTS += $(foreach option,$(OPTIONS),$(ERRORSOURCES:%.c=$(WORKDIR)/%.$(option).65c02.error.prg))
all: $(CUSTOMREFS) $(REFS) $(TESTS) $(CUSTOMTESTS) $(ERRORTESTS)
$(WORKDIR):
$(call MKDIR,$(WORKDIR))
$(ISEQUAL): ../isequal.c | $(WORKDIR)
$(CC) $(CFLAGS) -o $@ $<
$(WORKDIR)/%.cref: %.cref | $(WORKDIR)
$(if $(QUIET),echo ref/$*.cref)
$(CP) $*.cref $@
$(WORKDIR)/%.ref: %.c | $(WORKDIR)
$(if $(QUIET),echo ref/$*.host)
$(CC) $(CFLAGS) -o $(WORKDIR)/$*.host $< $(NULLERR)
$(WORKDIR)$S$*.host > $@
$(ISEQUAL): ../isequal.c | $(WORKDIR)
$(CC) $(CFLAGS) -o $@ $<
# "yaccdbg.c" includes "yacc.c".
# yaccdbg's built files must depend on both of them.
#
@ -78,8 +103,43 @@ $(WORKDIR)/%.$1.$2.prg: %.c $(WORKDIR)/%.ref $(ISEQUAL)
endef # PRG_template
# extra template for the case when compilation works, but we still want to
# compare the warning output with our custom reference
define PRG_custom_template
$(WORKDIR)/%.$1.$2.custom.prg: %.c $(WORKDIR)/%.ref %.c $(WORKDIR)/%.cref $(ISEQUAL)
$(if $(QUIET),echo cref/$$*.$1.$2.custom.prg)
-$(CC65) -t sim$2 $$(CC65FLAGS) -$1 -o $$(@:.custom.prg=.s) $$< 2> $(WORKDIR)/$$*.$1.$2.cout
$(CA65) -t sim$2 -o $$(@:.custom.prg=.o) $$(@:.custom.prg=.s) $(NULLERR)
$(LD65) -t sim$2 -o $$@ $$(@:.custom.prg=.o) sim$2.lib $(NULLERR)
$(SIM65) $(SIM65FLAGS) $$@ > $(WORKDIR)/$$*.$1.$2.out
$(ISEQUAL) $(WORKDIR)/$$*.$1.$2.cout $(WORKDIR)/$$*.cref
$(ISEQUAL) $(WORKDIR)/$$*.$1.$2.out $(WORKDIR)/$$*.ref
endef # PRG_error_template
# extra template for the case when compilation fails, but we still want to
# compare the error output with our custom reference
define PRG_error_template
$(WORKDIR)/%.$1.$2.error.prg: %.c $(WORKDIR)/%.cref $(ISEQUAL)
$(if $(QUIET),echo cref/$$*.$1.$2.error.prg)
-$(CC65) -t sim$2 $$(CC65FLAGS) -$1 -o $$(@:.error.prg=.s) $$< 2> $(WORKDIR)/$$*.$1.$2.cout
# $(CA65) -t sim$2 -o $$(@:.error.prg=.o) $$(@:.error.prg=.s) $(NULLERR)
# $(LD65) -t sim$2 -o $$@ $$(@:.error.prg=.o) sim$2.lib $(NULLERR)
# $(SIM65) $(SIM65FLAGS) $$@ > $(WORKDIR)/$$*.$1.$2.out
$(ISEQUAL) $(WORKDIR)/$$*.$1.$2.cout $(WORKDIR)/$$*.cref
endef # PRG_error_template
$(foreach option,$(OPTIONS),$(eval $(call PRG_template,$(option),6502)))
$(foreach option,$(OPTIONS),$(eval $(call PRG_template,$(option),65c02)))
$(foreach option,$(OPTIONS),$(eval $(call PRG_custom_template,$(option),6502)))
$(foreach option,$(OPTIONS),$(eval $(call PRG_custom_template,$(option),65c02)))
$(foreach option,$(OPTIONS),$(eval $(call PRG_error_template,$(option),6502)))
$(foreach option,$(OPTIONS),$(eval $(call PRG_error_template,$(option),65c02)))
clean:
@$(call RMDIR,$(WORKDIR))

View File

@ -0,0 +1,21 @@
/*
this is an example (not actually a regression test) that shows how to
make a check that compares the compiler (error-) output with a provided
reference.
to produce a reference file, first make sure your program "works" as intended,
then "make" in this directory once and copy the produced compiler output to
the reference:
$ cp ../../testwrk/ref/custom-reference-error.g.6502.out custom-reference-error.cref
and then "make" again to confirm
*/
int main(int argc, char* argv[])
{
printf("%02x", 0x42);
n = 0; /* produce an error */
/* another error */
}

View File

@ -0,0 +1,5 @@
custom-reference-error.c:18: Error: Call to undeclared function 'printf'
custom-reference-error.c:19: Error: Undefined symbol: 'n'
custom-reference-error.c:21: Warning: Control reaches end of non-void function [-Wreturn-type]
custom-reference-error.c:21: Warning: Parameter 'argc' is never used
custom-reference-error.c:21: Warning: Parameter 'argv' is never used

View File

@ -0,0 +1,24 @@
/*
this is an example (not actually a regression test) that shows how to
make a check that compares the compiler (error-) output with a provided
reference.
to produce a reference file, first make sure your program "works" as intended,
then "make" in this directory once and copy the produced compiler output to
the reference:
$ cp ../../testwrk/ref/custom-reference.g.6502.out custom-reference.cref
and then "make" again to confirm
*/
#include <stdint.h>
#include <stdio.h>
int main(int argc, char* argv[])
{
printf("%02x", 0x42);
/* produce a warning */
return 0;
}

View File

@ -0,0 +1,2 @@
custom-reference.c:24: Warning: Parameter 'argc' is never used
custom-reference.c:24: Warning: Parameter 'argv' is never used