Implement NTSC emulation, using an 8 pixel window for chroma signal.

Use this to precompute a new ntsc palette with 256 entries (though
only 84 unique colours) that are available by appropriate pixel
sequences.  Unfortunately the precomputed distance matrix for this
palette is 4GB!

Optimize the precomputation to be less memory hungry, while also
making efficient use of the mmapped output file.

Add support for dithering images using this 8-bit palette depth,
i.e. to optimize for NTSC rendering.  This often gives better image
quality since more colours are available, especially when modulating
areas of similar colour.

Fix 140 pixel dithering and render the output including NTSC fringing
instead of the unrealistic 140px output that doesn't include it.

Add support for rendering output image using any target palette, which
is useful e.g. for comparing how an 8-pixel NTSC rendered image will
be displayed on an emulator using 4-pixel ntsc emulation (there is
usually some colour bias, because the 8 pixel chroma blending tends to
average away colours).

Switch the output binary format to write AUX memory first, which
matches the image format of other utilities.
This commit is contained in:
kris 2021-02-14 23:34:25 +00:00
parent 301ca2772b
commit ad9515dcf2
6 changed files with 480 additions and 98 deletions

View File

@ -27,39 +27,57 @@ def main():
parser.add_argument(
"--lookahead", type=int, default=6,
help=("How many pixels to look ahead to compensate for NTSC colour "
"artifacts."))
"artifacts. Default: 6"))
parser.add_argument(
'--dither', type=str, choices=list(dither_pattern.PATTERNS.keys()),
default=dither_pattern.DEFAULT_PATTERN,
help="Error distribution pattern to apply when dithering.")
help="Error distribution pattern to apply when dithering. Default: "
+ dither_pattern.DEFAULT_PATTERN)
parser.add_argument(
'--show_input', action=argparse.BooleanOptionalAction, default=False,
help="Whether to show the input image before conversion.")
help="Whether to show the input image before conversion. Default: "
"False")
parser.add_argument(
'--show_output', action=argparse.BooleanOptionalAction, default=True,
help="Whether to show the output image after conversion.")
help="Default: True. Whether to show the output image after "
"conversion. Default: True")
parser.add_argument(
'--resolution', type=int, choices=(560, 140), default=560,
help=("Double hi-res resolution to target. 140 treats pixels in "
"groups of 4, with 16 colours that can be chosen independently, "
"and ignores NTSC fringing. 560 treats each pixel individually, "
"with choice of 2 colours (depending on NTSC colour phase), "
"and looking ahead over next --lookahead pixels to optimize the "
"colour sequence.")
'--resolution', type=str, choices=("140", "560", "ntsc"), default="560",
help=("Effective double hi-res resolution to target. '140' treats "
"pixels in groups of 4, with 16 colours that are chosen "
"independently, and ignores NTSC fringing. This is mostly only "
"useful for comparison to other 140px converters. '560' treats "
"each pixel individually, with choice of 2 colours (depending on "
"NTSC colour phase), and looking ahead over next --lookahead "
"pixels to optimize the colour sequence. 'ntsc' additionally "
"simulates the reduced bandwidth of the NTSC chroma signal, and "
"causes colours to bleed over 8 successive pixels instead of 4. "
"Default: 560")
)
parser.add_argument(
'--palette', type=str, choices=list(palette_py.PALETTES.keys()),
'--palette', type=str, choices=list(
set(palette_py.PALETTES.keys()) - {"ntsc"}),
default=palette_py.DEFAULT_PALETTE,
help="RGB colour palette to dither to.")
help="RGB colour palette to dither to. Ignored for "
"--resolution=ntsc. Default: " + palette_py.DEFAULT_PALETTE)
parser.add_argument(
'--show_palette', type=str, choices=list(palette_py.PALETTES.keys()),
help="RGB colour palette to use when --show_output. Default: "
"value of --palette.")
args = parser.parse_args()
palette = palette_py.PALETTES[args.palette]()
if args.resolution == 560:
screen = screen_py.DHGR560Screen(palette)
if args.resolution == "ntsc":
palette = palette_py.PALETTES["ntsc"]()
screen = screen_py.DHGR560NTSCScreen(palette)
lookahead = args.lookahead
else:
screen = screen_py.DHGR140Screen(palette)
lookahead = 0
palette = palette_py.PALETTES[args.palette]()
if args.resolution == "560":
screen = screen_py.DHGR560Screen(palette)
lookahead = args.lookahead
else:
screen = screen_py.DHGR140Screen(palette)
lookahead = 0
# Open and resize source image
image = image_py.open(args.input)
@ -69,39 +87,33 @@ def main():
screen.Y_RES)).astype(np.float32)
dither = dither_pattern.PATTERNS[args.dither]()
# start = time.time()
output_4bit, output_rgb = dither_pyx.dither_image(
output_4bit, _ = dither_pyx.dither_image(
screen, resized, dither, lookahead)
# print(time.time() - start)
if args.resolution == 140:
# Show un-fringed 140px output image
out_image = Image.fromarray(image_py.linear_to_srgb(output_rgb).astype(
np.uint8))
if args.show_output:
image_py.resize(out_image, 560, 384, srgb_output=True).show()
bitmap = screen.pack(output_4bit)
output_rgb = screen.bitmap_to_image_rgb(bitmap)
# Show output image
# Show output image by rendering in target palette
if args.show_palette:
output_palette = palette_py.PALETTES[args.show_palette]()
else:
output_palette = palette
if args.show_palette == 'ntsc':
output_screen = screen_py.DHGR560NTSCScreen(output_palette)
else:
output_screen = screen_py.DHGR560Screen(output_palette)
output_rgb = output_screen.bitmap_to_image_rgb(bitmap)
out_image = Image.fromarray(image_py.linear_to_srgb(output_rgb).astype(
np.uint8))
out_image = image_py.resize(out_image, 560, 384, srgb_output=True)
if args.show_output:
out_image.show()
ntsc = Image.fromarray(screen.bitmap_to_ntsc(bitmap))
ntsc = image_py.resize(ntsc, 560 * 3 // 2, 384 * 3 // 2, srgb_output=True)
ntsc.show()
# Save Double hi-res image
outfile = os.path.join(os.path.splitext(args.output)[0] + "-preview.png")
out_image.save(outfile, "PNG")
with open(args.output, "wb") as f:
f.write(bytes(screen.main))
f.write(bytes(screen.aux))
f.write(bytes(screen.main))
if __name__ == "__main__":

View File

@ -181,8 +181,8 @@ cdef apply(Dither* dither, screen, int x, int y, Image* image, float[] quant_err
@cython.boundscheck(False)
@cython.wraparound(False)
def find_nearest_colour(screen, float[::1] pixel_rgb, unsigned char[::1] options_4bit, unsigned char[:, ::1] options_rgb):
cdef int best, dist
cdef find_nearest_colour(screen, float[3] pixel_rgb, unsigned char[::1] options_4bit, unsigned char[:, ::1] options_rgb):
cdef int best, dist, i
cdef unsigned char bit4
cdef int best_dist = 2**8
cdef long flat

41
ntsc_colours.py Normal file
View File

@ -0,0 +1,41 @@
"""Precomputes all possible colours available via NTSC emulation."""
import colour
import numpy as np
from PIL import Image
import screen
def main():
s = screen.DHGR560Screen(palette=None)
bitmap = np.zeros((1, 8), dtype=np.bool)
colours = {}
unique = set()
print("SRGB = {")
# For each sequence of 8 pixels, compute the RGB colour of the right-most
# pixel, using NTSC emulation.
for bits in range(256):
for i in range(8):
bitmap[0, i] = bits & (1 << i)
ntsc = s.bitmap_to_ntsc(bitmap)
last_colour = ntsc[0, -1, :]
colours[bits] = last_colour
unique.add(tuple(last_colour))
print("%d: np.array((%d, %d, %d))," % (
bits, last_colour[0], last_colour[1], last_colour[2]))
print("}")
print("# %d unique colours" % len(unique))
# Show spectrum of available colours sorted by HSV hue value
im = np.zeros((128, 256 * 16, 3), dtype=np.uint8)
for x, hsv in enumerate(sorted([tuple(colour.RGB_to_HSV(c / 256)) for c in
colours.values()])):
im[0:128, x * 16: (x + 1) * 16, :] = colour.HSV_to_RGB(hsv) * 256
Image.fromarray(im).show()
if __name__ == "__main__":
main()

View File

@ -5,50 +5,45 @@ import image
class Palette:
RGB = None
RGB = {}
SRGB = None
DOTS = {}
DOTS_TO_INDEX = {}
DISTANCES_PATH = None
# Maps palette values to screen dots. Note that these are the same as
# the binary values in reverse order.
DOTS = {
0: (False, False, False, False),
1: (True, False, False, False),
2: (False, True, False, False),
3: (True, True, False, False),
4: (False, False, True, False),
5: (True, False, True, False),
6: (False, True, True, False),
7: (True, True, True, False),
8: (False, False, False, True),
9: (True, False, False, True),
10: (False, True, False, True),
11: (True, True, False, True),
12: (False, False, True, True),
13: (True, False, True, True),
14: (False, True, True, True),
15: (True, True, True, True)
}
DOTS_TO_4BIT = {}
for k, v in DOTS.items():
DOTS_TO_4BIT[v] = k
# How many successive screen pixels are used to compute output pixel
# palette index.
PALETTE_DEPTH = None
def __init__(self, load_distances=True):
if load_distances:
# CIE2000 colour distance matrix from 24-bit RGB tuple to 4-bit
# palette colour.
self.distances = np.memmap(self.DISTANCES_PATH, mode="r+",
dtype=np.uint8, shape=(16777216, 16))
dtype=np.uint8, shape=(16777216,
len(self.SRGB)))
self.RGB = {}
for k, v in self.SRGB.items():
self.RGB[k] = (np.clip(image.srgb_to_linear_array(v / 255), 0.0,
1.0) * 255).astype(np.uint8)
# Maps palette values to screen dots. Note that these are the same as
# the binary index values in reverse order.
for i in range(1 << self.PALETTE_DEPTH):
self.DOTS[i] = tuple(
bool(i & (1 << j)) for j in range(self.PALETTE_DEPTH))
# Reverse mapping from screen dots to palette index.
self.DOTS_TO_INDEX = {}
for k, v in self.DOTS.items():
self.DOTS_TO_INDEX[v] = k
class ToHgrPalette(Palette):
DISTANCES_PATH = "data/distances_tohgr.data"
PALETTE_DEPTH = 4
# Default tohgr/bmp2dhr palette
SRGB = {
0: np.array((0, 0, 0)), # Black
@ -72,7 +67,8 @@ class ToHgrPalette(Palette):
class OpenEmulatorPalette(Palette):
DISTANCES_PATH = "data/distances_openemulator.data"
PALETTE_DEPTH = 4
# OpenEmulator
SRGB = {
0: np.array((0, 0, 0)), # Black
@ -96,7 +92,8 @@ class OpenEmulatorPalette(Palette):
class VirtualIIPalette(Palette):
DISTANCES_PATH = "data/distances_virtualii.data"
PALETTE_DEPTH = 4
SRGB = {
0: np.array((0, 0, 0)), # Black
8: np.array((231, 36, 66)), # Magenta
@ -117,10 +114,277 @@ class VirtualIIPalette(Palette):
}
class NTSCPalette(Palette):
DISTANCES_PATH = 'data/distances_ntsc.data'
PALETTE_DEPTH = 8
# Computed using ntsc_colours.py
SRGB = {
0: np.array((0, 0, 0)),
1: np.array((0, 0, 62)),
2: np.array((0, 18, 0)),
3: np.array((0, 3, 28)),
4: np.array((44, 14, 0)),
5: np.array((0, 0, 0)),
6: np.array((0, 32, 0)),
7: np.array((0, 18, 0)),
8: np.array((67, 0, 34)),
9: np.array((22, 0, 96)),
10: np.array((0, 0, 0)),
11: np.array((0, 0, 62)),
12: np.array((112, 0, 0)),
13: np.array((67, 0, 34)),
14: np.array((44, 14, 0)),
15: np.array((0, 0, 0)),
16: np.array((24, 54, 131)),
17: np.array((0, 40, 193)),
18: np.array((0, 73, 97)),
19: np.array((0, 58, 159)),
20: np.array((69, 69, 69)),
21: np.array((24, 54, 131)),
22: np.array((1, 87, 35)),
23: np.array((0, 73, 97)),
24: np.array((91, 36, 165)),
25: np.array((47, 22, 227)),
26: np.array((24, 54, 131)),
27: np.array((0, 40, 193)),
28: np.array((136, 50, 103)),
29: np.array((91, 36, 165)),
30: np.array((69, 69, 69)),
31: np.array((24, 54, 131)),
32: np.array((1, 87, 35)),
33: np.array((0, 73, 97)),
34: np.array((0, 105, 1)),
35: np.array((0, 91, 63)),
36: np.array((46, 101, 0)),
37: np.array((1, 87, 35)),
38: np.array((0, 120, 0)),
39: np.array((0, 105, 1)),
40: np.array((69, 69, 69)),
41: np.array((24, 54, 131)),
42: np.array((1, 87, 35)),
43: np.array((0, 73, 97)),
44: np.array((113, 83, 7)),
45: np.array((69, 69, 69)),
46: np.array((46, 101, 0)),
47: np.array((1, 87, 35)),
48: np.array((26, 142, 166)),
49: np.array((0, 127, 228)),
50: np.array((0, 160, 132)),
51: np.array((0, 146, 194)),
52: np.array((70, 156, 104)),
53: np.array((26, 142, 166)),
54: np.array((3, 174, 70)),
55: np.array((0, 160, 132)),
56: np.array((93, 124, 200)),
57: np.array((48, 109, 255)),
58: np.array((26, 142, 166)),
59: np.array((0, 127, 228)),
60: np.array((138, 138, 138)),
61: np.array((93, 124, 200)),
62: np.array((70, 156, 104)),
63: np.array((26, 142, 166)),
64: np.array((113, 83, 7)),
65: np.array((69, 69, 69)),
66: np.array((46, 101, 0)),
67: np.array((1, 87, 35)),
68: np.array((158, 97, 0)),
69: np.array((113, 83, 7)),
70: np.array((91, 116, 0)),
71: np.array((46, 101, 0)),
72: np.array((181, 65, 41)),
73: np.array((136, 50, 103)),
74: np.array((113, 83, 7)),
75: np.array((69, 69, 69)),
76: np.array((226, 79, 0)),
77: np.array((181, 65, 41)),
78: np.array((158, 97, 0)),
79: np.array((113, 83, 7)),
80: np.array((138, 138, 138)),
81: np.array((93, 124, 200)),
82: np.array((70, 156, 104)),
83: np.array((26, 142, 166)),
84: np.array((183, 152, 76)),
85: np.array((138, 138, 138)),
86: np.array((115, 171, 42)),
87: np.array((70, 156, 104)),
88: np.array((205, 120, 172)),
89: np.array((161, 105, 234)),
90: np.array((138, 138, 138)),
91: np.array((93, 124, 200)),
92: np.array((250, 134, 110)),
93: np.array((205, 120, 172)),
94: np.array((183, 152, 76)),
95: np.array((138, 138, 138)),
96: np.array((115, 171, 42)),
97: np.array((70, 156, 104)),
98: np.array((48, 189, 8)),
99: np.array((3, 174, 70)),
100: np.array((160, 185, 0)),
101: np.array((115, 171, 42)),
102: np.array((93, 203, 0)),
103: np.array((48, 189, 8)),
104: np.array((183, 152, 76)),
105: np.array((138, 138, 138)),
106: np.array((115, 171, 42)),
107: np.array((70, 156, 104)),
108: np.array((227, 167, 14)),
109: np.array((183, 152, 76)),
110: np.array((160, 185, 0)),
111: np.array((115, 171, 42)),
112: np.array((140, 225, 173)),
113: np.array((95, 211, 235)),
114: np.array((72, 244, 139)),
115: np.array((28, 229, 201)),
116: np.array((184, 240, 111)),
117: np.array((140, 225, 173)),
118: np.array((117, 255, 77)),
119: np.array((72, 244, 139)),
120: np.array((207, 207, 207)),
121: np.array((162, 193, 255)),
122: np.array((140, 225, 173)),
123: np.array((95, 211, 235)),
124: np.array((252, 221, 145)),
125: np.array((207, 207, 207)),
126: np.array((184, 240, 111)),
127: np.array((140, 225, 173)),
128: np.array((136, 50, 103)),
129: np.array((91, 36, 165)),
130: np.array((69, 69, 69)),
131: np.array((24, 54, 131)),
132: np.array((181, 65, 41)),
133: np.array((136, 50, 103)),
134: np.array((113, 83, 7)),
135: np.array((69, 69, 69)),
136: np.array((203, 32, 137)),
137: np.array((159, 18, 199)),
138: np.array((136, 50, 103)),
139: np.array((91, 36, 165)),
140: np.array((248, 47, 75)),
141: np.array((203, 32, 137)),
142: np.array((181, 65, 41)),
143: np.array((136, 50, 103)),
144: np.array((161, 105, 234)),
145: np.array((116, 91, 255)),
146: np.array((93, 124, 200)),
147: np.array((48, 109, 255)),
148: np.array((205, 120, 172)),
149: np.array((161, 105, 234)),
150: np.array((138, 138, 138)),
151: np.array((93, 124, 200)),
152: np.array((228, 87, 255)),
153: np.array((183, 73, 255)),
154: np.array((161, 105, 234)),
155: np.array((116, 91, 255)),
156: np.array((255, 101, 206)),
157: np.array((228, 87, 255)),
158: np.array((205, 120, 172)),
159: np.array((161, 105, 234)),
160: np.array((138, 138, 138)),
161: np.array((93, 124, 200)),
162: np.array((70, 156, 104)),
163: np.array((26, 142, 166)),
164: np.array((183, 152, 76)),
165: np.array((138, 138, 138)),
166: np.array((115, 171, 42)),
167: np.array((70, 156, 104)),
168: np.array((205, 120, 172)),
169: np.array((161, 105, 234)),
170: np.array((138, 138, 138)),
171: np.array((93, 124, 200)),
172: np.array((250, 134, 110)),
173: np.array((205, 120, 172)),
174: np.array((183, 152, 76)),
175: np.array((138, 138, 138)),
176: np.array((162, 193, 255)),
177: np.array((118, 178, 255)),
178: np.array((95, 211, 235)),
179: np.array((50, 197, 255)),
180: np.array((207, 207, 207)),
181: np.array((162, 193, 255)),
182: np.array((140, 225, 173)),
183: np.array((95, 211, 235)),
184: np.array((230, 174, 255)),
185: np.array((185, 160, 255)),
186: np.array((162, 193, 255)),
187: np.array((118, 178, 255)),
188: np.array((255, 189, 241)),
189: np.array((230, 174, 255)),
190: np.array((207, 207, 207)),
191: np.array((162, 193, 255)),
192: np.array((250, 134, 110)),
193: np.array((205, 120, 172)),
194: np.array((183, 152, 76)),
195: np.array((138, 138, 138)),
196: np.array((255, 148, 48)),
197: np.array((250, 134, 110)),
198: np.array((227, 167, 14)),
199: np.array((183, 152, 76)),
200: np.array((255, 116, 144)),
201: np.array((255, 101, 206)),
202: np.array((250, 134, 110)),
203: np.array((205, 120, 172)),
204: np.array((255, 130, 82)),
205: np.array((255, 116, 144)),
206: np.array((255, 148, 48)),
207: np.array((250, 134, 110)),
208: np.array((255, 189, 241)),
209: np.array((230, 174, 255)),
210: np.array((207, 207, 207)),
211: np.array((162, 193, 255)),
212: np.array((255, 203, 179)),
213: np.array((255, 189, 241)),
214: np.array((252, 221, 145)),
215: np.array((207, 207, 207)),
216: np.array((255, 171, 255)),
217: np.array((255, 156, 255)),
218: np.array((255, 189, 241)),
219: np.array((230, 174, 255)),
220: np.array((255, 185, 213)),
221: np.array((255, 171, 255)),
222: np.array((255, 203, 179)),
223: np.array((255, 189, 241)),
224: np.array((252, 221, 145)),
225: np.array((207, 207, 207)),
226: np.array((184, 240, 111)),
227: np.array((140, 225, 173)),
228: np.array((255, 236, 83)),
229: np.array((252, 221, 145)),
230: np.array((229, 254, 49)),
231: np.array((184, 240, 111)),
232: np.array((255, 203, 179)),
233: np.array((255, 189, 241)),
234: np.array((252, 221, 145)),
235: np.array((207, 207, 207)),
236: np.array((255, 218, 117)),
237: np.array((255, 203, 179)),
238: np.array((255, 236, 83)),
239: np.array((252, 221, 145)),
240: np.array((255, 255, 255)),
241: np.array((232, 255, 255)),
242: np.array((209, 255, 242)),
243: np.array((164, 255, 255)),
244: np.array((255, 255, 214)),
245: np.array((255, 255, 255)),
246: np.array((254, 255, 180)),
247: np.array((209, 255, 242)),
248: np.array((255, 255, 255)),
249: np.array((255, 244, 255)),
250: np.array((255, 255, 255)),
251: np.array((232, 255, 255)),
252: np.array((255, 255, 248)),
253: np.array((255, 255, 255)),
254: np.array((255, 255, 214)),
255: np.array((255, 255, 255)),
}
# 84 unique colours
PALETTES = {
'openemulator': OpenEmulatorPalette,
'virtualii': VirtualIIPalette,
'tohgr': ToHgrPalette,
'ntsc': NTSCPalette
}
DEFAULT_PALETTE = 'openemulator'

View File

@ -1,8 +1,9 @@
"""Precompute CIE2000 perceptual colour distance matrices.
The matrix of delta-E values is computed for all pairs of 24-bit RGB values,
and 4-bit Apple II target palette. This is a 256MB file that is mmapped at
runtime for efficient access.
and Apple II target palette values. This is written out as a file that is
mmapped at runtime for efficient access. For a 16-colour target palette this
file is 256MB; for a 256-colour (NTSC) target palette it is 4GB.
"""
import argparse
@ -13,7 +14,9 @@ import palette as palette_py
import colour.difference
import numpy as np
COLOURS = 256
RGB_LEVELS = 256
# Largest possible value of delta_E_CIE2000 between two RGB values
DELTA_E_MAX = 120 # TODO: fine-tune
def rgb_to_lab(rgb: np.ndarray):
@ -25,21 +28,29 @@ def rgb_to_lab(rgb: np.ndarray):
def all_lab_colours():
all_rgb = np.array(tuple(np.ndindex(COLOURS, COLOURS, COLOURS)),
all_rgb = np.array(tuple(np.ndindex(RGB_LEVELS, RGB_LEVELS, RGB_LEVELS)),
dtype=np.uint8)
return rgb_to_lab(all_rgb)
def nearest_colours(palette, all_lab):
diffs = np.empty((COLOURS ** 3, 16), dtype=np.float32)
def nearest_colours(palette, all_lab, diffs):
palette_size = len(palette)
palette_labs = np.empty((palette_size, 3), dtype=np.float)
for i, palette_rgb in palette.RGB.items():
palette_labs[i, :] = rgb_to_lab(palette_rgb)
for i, palette_rgb in sorted(palette.RGB.items()):
print("...palette colour %d" % i)
palette_lab = rgb_to_lab(palette_rgb)
diffs[:, i] = colour.difference.delta_E_CIE2000(all_lab, palette_lab)
norm = np.max(diffs)
return (diffs / norm * 255).astype(np.uint8)
print("Computing all 24-bit palette diffs:")
for i in range(palette_size):
print(" %d/%d" % (i, palette_size))
# Compute all palette diffs for a block of 65536 successive RGB
# source values at once, which bounds the memory use while also writing
# contiguously to the mmapped array.
diffs[i * (1 << 16):(i + 1) * (1 << 16), :] = (
colour.difference.delta_E_CIE2000(
all_lab[i * (1 << 16):(i + 1) * (
1 << 16)].reshape((1 << 16, 1, 3)),
palette_labs.reshape((1, palette_size, 3))) / DELTA_E_MAX *
255).astype(np.uint8)
def main():
@ -60,16 +71,15 @@ def main():
print("Precomputing matrix of all 24-bit LAB colours")
all_lab = all_lab_colours()
for palette_name in palette_names:
print("Processing palette %s" % palette_name)
print("Creating distance file for palette %s" % palette_name)
palette = palette_py.PALETTES[palette_name](load_distances=False)
try:
os.mkdir(os.path.dirname(palette.DISTANCES_PATH))
except FileExistsError:
pass
n = nearest_colours(palette, all_lab)
out = np.memmap(filename=palette.DISTANCES_PATH, mode="w+",
dtype=np.uint8, shape=n.shape)
out[:] = n[:]
dtype=np.uint8, shape=(RGB_LEVELS ** 3, len(palette)))
nearest_colours(palette, all_lab, out)
if __name__ == "__main__":

View File

@ -3,6 +3,8 @@
import numpy as np
import palette as palette_py
# TODO: rename "4bit" variable naming now that we also have palettes with 8 bit
# depth.
class Screen:
X_RES = None
@ -72,7 +74,7 @@ class Screen:
pixel = [False, False, False, False]
for x in range(560):
pixel[x % 4] = bitmap[y, x]
dots = self.palette.DOTS_TO_4BIT[tuple(pixel)]
dots = self.palette.DOTS_TO_INDEX[tuple(pixel)]
image_rgb[y, x, :] = self.palette.RGB[dots]
return image_rgb
@ -81,12 +83,12 @@ class Screen:
raise NotImplementedError
@staticmethod
def _sin(pos, phase0=3):
def _sin(pos, phase0=4):
x = pos % 12 + phase0
return 8 * np.sin(x * 2 * np.pi / 12)
@staticmethod
def _cos(pos, phase0=3):
def _cos(pos, phase0=4):
x = pos % 12 + phase0
return 8 * np.cos(x * 2 * np.pi / 12)
@ -128,17 +130,18 @@ class Screen:
ib = contrast * -1.012984e-6 * saturation / i_width
qb = contrast * 1.667217e-6 * saturation / q_width
out_rgb = np.empty((192, 560 * 3, 3), dtype=np.uint8)
for y in range(self.Y_RES):
out_rgb = np.empty((bitmap.shape[0], bitmap.shape[1] * 3, 3),
dtype=np.uint8)
for y in range(bitmap.shape[0]):
ysum = 0
isum = 0
qsum = 0
line = np.repeat(bitmap[y], 3)
color = y // (192//16)
line = np.repeat(np.tile((color & 1, color & 2, color & 4,
color & 8), 140), 3)
for x in range(560 * 3):
# color = y // (192//16)
# line = np.repeat(np.tile((color & 1, color & 2, color & 4,
# color & 8), 140), 3)
for x in range(bitmap.shape[1] * 3):
ysum += self._read(line, x) - self._read(line, x - y_width)
isum += self._read(line, x) * self._cos(x) - self._read(
line, x - i_width) * self._cos((x - i_width))
@ -182,7 +185,7 @@ class DHGR140Screen(Screen):
class DHGR560Screen(Screen):
"""DHGR screen including colour fringing."""
"""DHGR screen including colour fringing and 4 pixel chroma bleed."""
X_RES = 560
Y_RES = 192
X_PIXEL_WIDTH = 1
@ -205,8 +208,60 @@ class DHGR560Screen(Screen):
other_dots = list(last_dots)
other_dots[x % 4] = not other_dots[x % 4]
other_dots = tuple(other_dots)
other_pixel_4bit = self.palette.DOTS_TO_4BIT[other_dots]
other_pixel_4bit = self.palette.DOTS_TO_INDEX[other_dots]
return (np.array([last_pixel_4bit, other_pixel_4bit], dtype=np.uint8),
np.array([self.palette.RGB[last_pixel_4bit],
self.palette.RGB[other_pixel_4bit]], dtype=np.uint8))
# TODO: refactor to share implementation with DHGR560Screen
class DHGR560NTSCScreen(Screen):
"""DHGR screen including colour fringing and 8 pixel chroma bleed."""
X_RES = 560
Y_RES = 192
X_PIXEL_WIDTH = 1
def _image_to_bitmap(self, image_4bit: np.ndarray) -> np.ndarray:
bitmap = np.zeros((self.Y_RES, self.X_RES), dtype=np.bool)
for y in range(self.Y_RES):
for x in range(self.X_RES):
pixel = image_4bit[y, x]
dots = self.palette.DOTS[pixel]
phase = x % 4
bitmap[y, x] = dots[4 + phase]
return bitmap
def bitmap_to_image_rgb(self, bitmap: np.ndarray) -> np.ndarray:
"""Convert our 2-bit bitmap image into a RGB image.
Colour at every pixel is determined by the value of a 8-bit sliding
window indexed by x % 4, which gives the index into our 256-colour RGB
palette.
"""
image_rgb = np.empty((192, 560, 3), dtype=np.uint8)
for y in range(self.Y_RES):
pixel = [False, False, False, False, False, False, False, False]
for x in range(560):
pixel[x % 4] = pixel[x % 4 + 4]
pixel[x % 4 + 4] = bitmap[y, x]
dots = self.palette.DOTS_TO_INDEX[tuple(pixel)]
image_rgb[y, x, :] = self.palette.RGB[dots]
return image_rgb
def pixel_palette_options(self, last_pixel_4bit, x: int):
# The two available 8-bit pixel colour choices are given by:
# - Rotating the pixel value from the current x % 4 + 4 position to
# x % 4
# - choosing 0 and 1 for the new values of x % 4 + 4
next_dots0 = list(self.palette.DOTS[last_pixel_4bit])
next_dots1 = list(next_dots0)
next_dots0[x % 4] = next_dots0[x % 4 + 4]
next_dots0[x % 4 + 4] = False
next_dots1[x % 4] = next_dots1[x % 4 + 4]
next_dots1[x % 4 + 4] = True
pixel_4bit_0 = self.palette.DOTS_TO_INDEX[tuple(next_dots0)]
pixel_4bit_1 = self.palette.DOTS_TO_INDEX[tuple(next_dots1)]
return (
np.array([last_pixel_4bit, other_pixel_4bit], dtype=np.uint8),
np.array([self.palette.RGB[last_pixel_4bit],
self.palette.RGB[other_pixel_4bit]], dtype=np.uint8))
np.array([pixel_4bit_0, pixel_4bit_1], dtype=np.uint8),
np.array([self.palette.RGB[pixel_4bit_0],
self.palette.RGB[pixel_4bit_1]], dtype=np.uint8))