This commit is contained in:
kris 2021-07-19 12:55:50 +01:00
parent 20a73ffb8a
commit e08f25e4cc
4 changed files with 185 additions and 442 deletions

View File

@ -55,8 +55,8 @@ cdef int dither_bounds_yb(Dither *dither, int y_res, int y) nogil:
@cython.boundscheck(False)
@cython.wraparound(False)
@functools.lru_cache(None)
def lookahead_options(object screen, int lookahead, unsigned char last_pixel_nbit, int x):
# @functools.lru_cache(None)
cdef inline unsigned char lookahead_pixels(unsigned char last_pixel_nbit, unsigned int next_pixels, int lookahead):
"""Compute all possible n-bit palette values for upcoming pixels, given x coord and state of n pixels to the left.
Args:
@ -69,28 +69,25 @@ def lookahead_options(object screen, int lookahead, unsigned char last_pixel_nbi
Returns: matrix of size (2**lookahead, lookahead) containing all 2**lookahead possible vectors of n-bit palette
values accessible at positions x .. x + lookahead
"""
cdef unsigned char[:, ::1] options_nbit = np.empty((2 ** lookahead, lookahead), dtype=np.uint8)
cdef int i, j, xx, p
cdef unsigned char output_pixel_nbit
cdef unsigned char[::1] palette_choices_nbit
# XXX palette bit depth
return (last_pixel_nbit >> (lookahead+1)) | (next_pixels << (8 - (lookahead + 1)))
cdef object palette = screen.palette
cdef dict palette_rgb = palette.RGB
for i in range(2 ** lookahead):
output_pixel_nbit = last_pixel_nbit
for j in range(lookahead):
xx = x + j
# Two possible n-bit palette choices at position xx, given state of n pixels to left.
# TODO: port screen.py to pyx
palette_choices_nbit = screen.pixel_palette_options(output_pixel_nbit, xx)
next_bit_choice = (i & (1 << j)) >> j
options_nbit[i, j] = palette_choices_nbit[next_bit_choice]
output_pixel_nbit >>= 1
output_pixel_nbit |= (next_bit_choice << 7)
# print(bin(i),j,bin(last_pixel_nbit), bin(output_pixel_nbit), bin(options_nbit[i, j]))
#print("Picking %s" % ((i & (1 << j)) >> j))
# cdef unsigned char[:, ::1] options_nbit = np.empty((2 ** lookahead, lookahead), dtype=np.uint8)
# cdef int i, j, p, k
# cdef unsigned char output_pixel_nbit
# cdef unsigned char[::1] palette_choices_nbit
return options_nbit
# # cdef object palette = screen.palette
# # cdef dict palette_rgb = palette.RGB
# cdef unsigned int[::1] lookahead_pixel_values = (np.arange(2**lookahead, dtype=np.uint32) << 8) | last_pixel_nbit
# # XXX inline into dither_lookahead once working
# for i in range(2 ** lookahead):
# for j in range(lookahead):
# options_nbit[:, j] = (lookahead_pixel_values[i] >> (j+1)) & 0xff
# return options_nbit
# Look ahead a number of pixels and compute choice for next pixel with lowest total squared error after dithering.
@ -110,8 +107,8 @@ def lookahead_options(object screen, int lookahead, unsigned char last_pixel_nbi
#
@cython.boundscheck(False)
@cython.wraparound(False)
cdef int dither_lookahead(Dither* dither, float[:, ::1] palette_rgb,
float[:, :, ::1] image_rgb, int x, int y, unsigned char[:, ::1] options_nbit, int lookahead,
cdef int dither_lookahead(Dither* dither, float[:, :, ::1] palette_rgb,
float[:, :, ::1] image_rgb, int x, int y, int lookahead, unsigned char last_pixels,
int x_res):
cdef int i, j, k, l
cdef float[3] quant_error
@ -130,6 +127,15 @@ cdef int dither_lookahead(Dither* dither, float[:, ::1] palette_rgb,
cdef unsigned char lookahead_bits
# def unsigned int[::1] lookahead_pixel_values = (np.arange(2**lookahead, dtype=np.uint32) << 8) | last_pixel_nbit
# XXX inline into dither_lookahead once working
#for i in range(2 ** lookahead):
# for j in range(lookahead):
# options_nbit[:, j] = (lookahead_pixel_values[i] >> (j+1)) & 0xff
cdef unsigned char next_pixels
# For each 2**lookahead possibilities for the on/off state of the next lookahead pixels, apply error diffusion
# and compute the total squared error to the source image. Since we only have two possible colours for each
# given pixel (dependent on the state already chosen for pixels to the left), we need to look beyond local minima.
@ -145,38 +151,26 @@ cdef int dither_lookahead(Dither* dither, float[:, ::1] palette_rgb,
for j in range(xxr - x):
xl = dither_bounds_xl(dither, j)
xr = dither_bounds_xr(dither, xxr - x, j)
next_pixels = lookahead_pixels(last_pixels, i, j)
# We don't update the input at position x (since we've already chosen
# fixed outputs), but we do propagate quantization errors to positions >x
# so we can compensate for how good/bad these choices were. i.e. the
# options_rgb choices are fixed, but we can still distribute quantization error
# from having made these choices, in order to compute the total error.
for k in range(3):
quant_error[k] = lah_image_rgb[j * lah_shape2 + k] - palette_rgb[options_nbit[i,j], k]
quant_error[k] = lah_image_rgb[j * lah_shape2 + k] - palette_rgb[next_pixels, (x+j) % 4, k]
apply_one_line(dither, xl, xr, j, lah_image_rgb, lah_shape2, quant_error)
#r = <long>lah_image_rgb[j * lah_shape2 + 0]
#g = <long>lah_image_rgb[j * lah_shape2 + 1]
#b = <long>lah_image_rgb[j * lah_shape2 + 2]
#flat = (r << 16) + (g << 8) + b
# bit4 = options_nbit[i, j]
total_error += colour_distance_squared(lah_image_rgb[j*lah_shape2], lah_image_rgb[j*lah_shape2+1], lah_image_rgb[j*lah_shape2+2], palette_rgb[next_pixels, (x+j)%4])
## XXX parametrize number of palette bits
#if j < 8:
# lookahead_bits = (last_bits >> (j+1))
#else:
# lookahead_bits = 0
#lookahead_bits |= (options_nbit[i, j]) << (7-j)
#lookahead_bits &= (1<<8)-1
total_error += colour_distance_squared(lah_image_rgb[j*lah_shape2], lah_image_rgb[j*lah_shape2+1], lah_image_rgb[j*lah_shape2+2], palette_rgb[options_nbit[i,j]])
#if x > (560*3/4) and y == 180:
# print(x, bin(i), j, bin(options_nbit[i, j]), bin(best), best_error, total_error)
#if y == 0:
# print(x, bin(i), j, bin(next_pixels), bin(best), best_error, total_error, list(palette_rgb[next_pixels, (x+j)%4]))
if total_error >= best_error:
break
if total_error < best_error:
best_error = total_error
best = i
@ -184,7 +178,6 @@ cdef int dither_lookahead(Dither* dither, float[:, ::1] palette_rgb,
free(lah_image_rgb)
return best
@cython.boundscheck(False)
@cython.wraparound(False)
cdef inline float colour_distance_squared(float colour1_0, float colour1_1, float colour1_2, float[::1] colour2):
@ -295,7 +288,7 @@ cdef unsigned char find_nearest_colour(float[::1] pixel_rgb, unsigned char[::1]
@cython.boundscheck(False)
@cython.wraparound(False)
def dither_image(screen, float[:, :, ::1] image_rgb, dither, int lookahead, unsigned char verbose):
cdef int y, x, i
cdef int y, x, i, k
cdef float[3] input_pixel_rgb
cdef float[3] quant_error
cdef unsigned char [:, ::1] options_nbit
@ -311,10 +304,10 @@ def dither_image(screen, float[:, :, ::1] image_rgb, dither, int lookahead, unsi
cdef int xres = screen.X_RES
# XXX not rgb any more
cdef float[:, ::1] palette_rgb = np.zeros((len(screen.palette.CAM02UCS), 3), dtype=np.float32)
for i in screen.palette.CAM02UCS.keys():
cdef float[:, :, ::1] palette_rgb = np.zeros((len(screen.palette.CAM02UCS), 4, 3), dtype=np.float32)
for i, k in screen.palette.CAM02UCS.keys():
for j in range(3):
palette_rgb[i, j] = screen.palette.CAM02UCS[i][j]
palette_rgb[i, k, j] = screen.palette.CAM02UCS[i, k][j]
# cdef (unsigned char)[:, ::1] distances = screen.palette.distances
@ -332,8 +325,6 @@ def dither_image(screen, float[:, :, ::1] image_rgb, dither, int lookahead, unsi
cdef (unsigned char)[:, ::1] image_nbit = np.empty(
(image_rgb.shape[0], image_rgb.shape[1]), dtype=np.uint8)
# print(lookahead_options(screen, lookahead, 0b11111111, 0))
for y in range(yres):
if verbose:
print("%d/%d" % (y, yres))
@ -343,16 +334,13 @@ def dither_image(screen, float[:, :, ::1] image_rgb, dither, int lookahead, unsi
input_pixel_rgb[i] = image_rgb[y,x,i]
if lookahead:
# Compute all possible 2**N choices of n-bit pixel colours for positions x .. x + lookahead
lookahead_palette_choices_nbit = lookahead_options(screen, lookahead, output_pixel_nbit, x % 4)
# lookahead_palette_choices_nbit = lookahead_options(lookahead, output_pixel_nbit)
# Apply error diffusion for each of these 2**N choices, and compute which produces the closest match
# to the source image over the succeeding N pixels
best_idx = dither_lookahead(
&cdither, palette_rgb, image_rgb, x, y, lookahead_palette_choices_nbit, lookahead,
xres)
output_pixel_nbit >>= 1
output_pixel_nbit |= (best_idx & 0b1) << 7 # XXX n bit shift
# print("Picked %d" % (best_idx & 0b1))
next_pixels = dither_lookahead(
&cdither, palette_rgb, image_rgb, x, y, lookahead, output_pixel_nbit, xres)
output_pixel_nbit = lookahead_pixels(output_pixel_nbit, next_pixels, 0)
# print("Picked %s" % bin(output_pixel_nbit)) # (best_idx & 0b1))
# lookahead_palette_choices_nbit[best_idx, 0]
#else:
# # Choose the closest colour among the available n-bit palette options
@ -360,11 +348,10 @@ def dither_image(screen, float[:, :, ::1] image_rgb, dither, int lookahead, unsi
# output_pixel_nbit = find_nearest_colour(input_pixel_rgb, palette_choices_nbit, distances)
# Apply error diffusion from chosen output pixel value
# print("picked %d, %s" % (lookahead_palette_choices_nbit[best_idx, 0], bin(lookahead_palette_choices_nbit[best_idx, 0])))
output_pixel_rgb = palette_rgb[lookahead_palette_choices_nbit[best_idx, 0]]
output_pixel_rgb = palette_rgb[output_pixel_nbit, x % 4]
for i in range(3):
quant_error[i] = input_pixel_rgb[i] - output_pixel_rgb[i]
image_nbit[y, x] = lookahead_palette_choices_nbit[best_idx, 0] # output_pixel_nbit
image_nbit[y, x] = output_pixel_nbit
apply(&cdither, xres, yres, x, y, image_rgb, quant_error)
for i in range(3):

View File

@ -8,36 +8,41 @@ import screen
def main():
s = screen.DHGR560NTSCScreen(palette=None)
bitmap = np.zeros((1, 8), dtype=np.bool)
colours = {}
unique = set()
print("import numpy as np")
print()
print("SRGB = {")
# For each sequence of 8 pixels, compute the RGB colour of the right-most
# pixel, using NTSC emulation.
for bits in range(256):
bits8 = np.empty((8,), dtype=np.bool)
for i in range(8):
bits8[i] = bits & (1 << i)
# Double Hi-Res has a timing shift that rotates the displayed bits one
# position with respect to NTSC phase.
bitmap[0, :] = bits8[[3, 0, 1, 2, 7, 4, 5, 6]]
# Double Hi-Res has a timing shift that rotates the displayed bits one
# position with respect to NTSC phase.
# TODO: should be 3? Do I have a compensating off-by-one in bitmap_to_ntsc?
ntsc_shift = 2
for j in range(ntsc_shift, ntsc_shift+4):
bitmap = np.zeros((1, 11+ntsc_shift), dtype=np.bool)
for bits in range(256):
bits8 = np.empty((8,), dtype=np.bool)
for i in range(8):
bits8[i] = bits & (1 << i)
ntsc = s.bitmap_to_ntsc(bitmap)
last_colour = ntsc[0, -1, :]
colours[bits] = last_colour
unique.add(tuple(last_colour))
print("%d: np.array((%d, %d, %d))," % (
bits, last_colour[0], last_colour[1], last_colour[2]))
bitmap[0, j:j+8] = bits8
ntsc = s.bitmap_to_ntsc(bitmap)
last_colour = ntsc[0, 3*(j+8)-1, :]
colours[(bits, j-ntsc_shift)] = last_colour
unique.add(tuple(last_colour))
print(" (%d, %d): np.array((%d, %d, %d))," % (
bits, j-ntsc_shift, last_colour[0], last_colour[1], last_colour[2]))
print("}")
print("# %d unique colours" % len(unique))
# Show spectrum of available colours sorted by HSV hue value
im = np.zeros((128, 256 * 16, 3), dtype=np.uint8)
for x, hsv in enumerate(sorted([tuple(colour.RGB_to_HSV(c / 256)) for c in
colours.values()])):
im[0:128, x * 16: (x + 1) * 16, :] = colour.HSV_to_RGB(hsv) * 256
im = np.zeros((128*4, 256 * 16, 3), dtype=np.uint8)
for x, j in colours:
im[128*j:128*(j+1), x * 16: (x + 1) * 16, :] = colours[x,j]
Image.fromarray(im).show()

View File

@ -3,6 +3,7 @@
import colour
import numpy as np
import image
import palette_ntsc
class Palette:
@ -46,81 +47,81 @@ class Palette:
self.DOTS_TO_INDEX[v] = k
class ToHgrPalette(Palette):
"""4-bit palette used as default by other DHGR image converters."""
DISTANCES_PATH = "data/distances_tohgr.data"
PALETTE_DEPTH = 4
# Default tohgr/bmp2dhr palette
SRGB = {
0: np.array((0, 0, 0)), # Black
8: np.array((148, 12, 125)), # Magenta
4: np.array((99, 77, 0)), # Brown
12: np.array((249, 86, 29)), # Orange
2: np.array((51, 111, 0)), # Dark green
10: np.array((126, 126, 126)), # Grey2
6: np.array((67, 200, 0)), # Green
14: np.array((221, 206, 23)), # Yellow
1: np.array((32, 54, 212)), # Dark blue
9: np.array((188, 55, 255)), # Violet
5: np.array((126, 126, 126)), # Grey1
13: np.array((255, 129, 236)), # Pink
3: np.array((7, 168, 225)), # Med blue
11: np.array((158, 172, 255)), # Light blue
7: np.array((93, 248, 133)), # Aqua
15: np.array((255, 255, 255)), # White
}
class OpenEmulatorPalette(Palette):
"""4-bit palette chosen to approximately match OpenEmulator output."""
DISTANCES_PATH = "data/distances_openemulator.data"
PALETTE_DEPTH = 4
# OpenEmulator
SRGB = {
0: np.array((0, 0, 0)), # Black
8: np.array((203, 0, 121)), # Magenta
4: np.array((99, 103, 0)), # Brown
12: np.array((244, 78, 0)), # Orange
2: np.array((0, 150, 0)), # Dark green
10: np.array((130, 130, 130)), # Grey2
6: np.array((0, 235, 0)), # Green
14: np.array((214, 218, 0)), # Yellow
1: np.array((20, 0, 246)), # Dark blue
9: np.array((230, 0, 244)), # Violet
5: np.array((130, 130, 130)), # Grey1
13: np.array((244, 105, 235)), # Pink
3: np.array((0, 174, 243)), # Med blue
11: np.array((160, 156, 244)), # Light blue
7: np.array((25, 243, 136)), # Aqua
15: np.array((244, 247, 244)), # White
}
class VirtualIIPalette(Palette):
"""4-bit palette exactly matching Virtual II emulator output."""
DISTANCES_PATH = "data/distances_virtualii.data"
PALETTE_DEPTH = 4
SRGB = {
0: np.array((0, 0, 0)), # Black
8: np.array((231, 36, 66)), # Magenta
4: np.array((154, 104, 0)), # Brown
12: np.array((255, 124, 0)), # Orange
2: np.array((0, 135, 45)), # Dark green
10: np.array((104, 104, 104)), # Grey2
6: np.array((0, 222, 0)), # Green
14: np.array((255, 252, 0)), # Yellow
1: np.array((1, 30, 169)), # Dark blue
9: np.array((230, 73, 228)), # Violet
5: np.array((185, 185, 185)), # Grey1
13: np.array((255, 171, 153)), # Pink
3: np.array((47, 69, 255)), # Med blue
11: np.array((120, 187, 255)), # Light blue
7: np.array((83, 250, 208)), # Aqua
15: np.array((255, 255, 255)), # White
}
# class ToHgrPalette(Palette):
# """4-bit palette used as default by other DHGR image converters."""
# DISTANCES_PATH = "data/distances_tohgr.data"
# PALETTE_DEPTH = 4
#
# # Default tohgr/bmp2dhr palette
# SRGB = {
# 0: np.array((0, 0, 0)), # Black
# 8: np.array((148, 12, 125)), # Magenta
# 4: np.array((99, 77, 0)), # Brown
# 12: np.array((249, 86, 29)), # Orange
# 2: np.array((51, 111, 0)), # Dark green
# 10: np.array((126, 126, 126)), # Grey2
# 6: np.array((67, 200, 0)), # Green
# 14: np.array((221, 206, 23)), # Yellow
# 1: np.array((32, 54, 212)), # Dark blue
# 9: np.array((188, 55, 255)), # Violet
# 5: np.array((126, 126, 126)), # Grey1
# 13: np.array((255, 129, 236)), # Pink
# 3: np.array((7, 168, 225)), # Med blue
# 11: np.array((158, 172, 255)), # Light blue
# 7: np.array((93, 248, 133)), # Aqua
# 15: np.array((255, 255, 255)), # White
# }
#
#
# class OpenEmulatorPalette(Palette):
# """4-bit palette chosen to approximately match OpenEmulator output."""
# DISTANCES_PATH = "data/distances_openemulator.data"
# PALETTE_DEPTH = 4
#
# # OpenEmulator
# SRGB = {
# 0: np.array((0, 0, 0)), # Black
# 8: np.array((203, 0, 121)), # Magenta
# 4: np.array((99, 103, 0)), # Brown
# 12: np.array((244, 78, 0)), # Orange
# 2: np.array((0, 150, 0)), # Dark green
# 10: np.array((130, 130, 130)), # Grey2
# 6: np.array((0, 235, 0)), # Green
# 14: np.array((214, 218, 0)), # Yellow
# 1: np.array((20, 0, 246)), # Dark blue
# 9: np.array((230, 0, 244)), # Violet
# 5: np.array((130, 130, 130)), # Grey1
# 13: np.array((244, 105, 235)), # Pink
# 3: np.array((0, 174, 243)), # Med blue
# 11: np.array((160, 156, 244)), # Light blue
# 7: np.array((25, 243, 136)), # Aqua
# 15: np.array((244, 247, 244)), # White
# }
#
#
# class VirtualIIPalette(Palette):
# """4-bit palette exactly matching Virtual II emulator output."""
# DISTANCES_PATH = "data/distances_virtualii.data"
# PALETTE_DEPTH = 4
#
# SRGB = {
# 0: np.array((0, 0, 0)), # Black
# 8: np.array((231, 36, 66)), # Magenta
# 4: np.array((154, 104, 0)), # Brown
# 12: np.array((255, 124, 0)), # Orange
# 2: np.array((0, 135, 45)), # Dark green
# 10: np.array((104, 104, 104)), # Grey2
# 6: np.array((0, 222, 0)), # Green
# 14: np.array((255, 252, 0)), # Yellow
# 1: np.array((1, 30, 169)), # Dark blue
# 9: np.array((230, 73, 228)), # Violet
# 5: np.array((185, 185, 185)), # Grey1
# 13: np.array((255, 171, 153)), # Pink
# 3: np.array((47, 69, 255)), # Med blue
# 11: np.array((120, 187, 255)), # Light blue
# 7: np.array((83, 250, 208)), # Aqua
# 15: np.array((255, 255, 255)), # White
# }
class NTSCPalette(Palette):
@ -129,272 +130,13 @@ class NTSCPalette(Palette):
PALETTE_DEPTH = 8
# Computed using ntsc_colours.py
SRGB = {
0: np.array((0, 0, 0)),
1: np.array((0, 0, 115)),
2: np.array((0, 37, 0)),
3: np.array((0, 23, 88)),
4: np.array((14, 14, 0)),
5: np.array((0, 0, 0)),
6: np.array((0, 52, 0)),
7: np.array((0, 37, 0)),
8: np.array((64, 0, 26)),
9: np.array((49, 0, 141)),
10: np.array((0, 0, 0)),
11: np.array((0, 0, 115)),
12: np.array((79, 0, 0)),
13: np.array((64, 0, 26)),
14: np.array((14, 14, 0)),
15: np.array((0, 0, 0)),
16: np.array((49, 48, 178)),
17: np.array((34, 34, 255)),
18: np.array((0, 86, 152)),
19: np.array((0, 71, 255)),
20: np.array((63, 63, 63)),
21: np.array((49, 48, 178)),
22: np.array((0, 101, 37)),
23: np.array((0, 86, 152)),
24: np.array((113, 10, 204)),
25: np.array((98, 0, 255)),
26: np.array((49, 48, 178)),
27: np.array((34, 34, 255)),
28: np.array((128, 25, 89)),
29: np.array((113, 10, 204)),
30: np.array((63, 63, 63)),
31: np.array((49, 48, 178)),
32: np.array((0, 101, 37)),
33: np.array((0, 86, 152)),
34: np.array((0, 139, 11)),
35: np.array((0, 124, 126)),
36: np.array((13, 116, 0)),
37: np.array((0, 101, 37)),
38: np.array((0, 154, 0)),
39: np.array((0, 139, 11)),
40: np.array((63, 63, 63)),
41: np.array((49, 48, 178)),
42: np.array((0, 101, 37)),
43: np.array((0, 86, 152)),
44: np.array((78, 78, 0)),
45: np.array((63, 63, 63)),
46: np.array((13, 116, 0)),
47: np.array((0, 101, 37)),
48: np.array((48, 150, 216)),
49: np.array((33, 135, 255)),
50: np.array((0, 188, 190)),
51: np.array((0, 173, 255)),
52: np.array((62, 165, 101)),
53: np.array((48, 150, 216)),
54: np.array((0, 203, 75)),
55: np.array((0, 188, 190)),
56: np.array((112, 112, 242)),
57: np.array((98, 97, 255)),
58: np.array((48, 150, 216)),
59: np.array((33, 135, 255)),
60: np.array((127, 127, 127)),
61: np.array((112, 112, 242)),
62: np.array((62, 165, 101)),
63: np.array((48, 150, 216)),
64: np.array((78, 78, 0)),
65: np.array((63, 63, 63)),
66: np.array((13, 116, 0)),
67: np.array((0, 101, 37)),
68: np.array((93, 93, 0)),
69: np.array((78, 78, 0)),
70: np.array((28, 131, 0)),
71: np.array((13, 116, 0)),
72: np.array((142, 40, 0)),
73: np.array((128, 25, 89)),
74: np.array((78, 78, 0)),
75: np.array((63, 63, 63)),
76: np.array((157, 55, 0)),
77: np.array((142, 40, 0)),
78: np.array((93, 93, 0)),
79: np.array((78, 78, 0)),
80: np.array((127, 127, 127)),
81: np.array((112, 112, 242)),
82: np.array((62, 165, 101)),
83: np.array((48, 150, 216)),
84: np.array((142, 142, 12)),
85: np.array((127, 127, 127)),
86: np.array((77, 180, 0)),
87: np.array((62, 165, 101)),
88: np.array((192, 89, 153)),
89: np.array((177, 74, 255)),
90: np.array((127, 127, 127)),
91: np.array((112, 112, 242)),
92: np.array((206, 104, 38)),
93: np.array((192, 89, 153)),
94: np.array((142, 142, 12)),
95: np.array((127, 127, 127)),
96: np.array((77, 180, 0)),
97: np.array((62, 165, 101)),
98: np.array((13, 218, 0)),
99: np.array((0, 203, 75)),
100: np.array((92, 195, 0)),
101: np.array((77, 180, 0)),
102: np.array((27, 233, 0)),
103: np.array((13, 218, 0)),
104: np.array((142, 142, 12)),
105: np.array((127, 127, 127)),
106: np.array((77, 180, 0)),
107: np.array((62, 165, 101)),
108: np.array((156, 157, 0)),
109: np.array((142, 142, 12)),
110: np.array((92, 195, 0)),
111: np.array((77, 180, 0)),
112: np.array((126, 229, 165)),
113: np.array((112, 214, 255)),
114: np.array((62, 255, 138)),
115: np.array((47, 252, 253)),
116: np.array((141, 244, 50)),
117: np.array((126, 229, 165)),
118: np.array((76, 255, 23)),
119: np.array((62, 255, 138)),
120: np.array((191, 191, 191)),
121: np.array((176, 176, 255)),
122: np.array((126, 229, 165)),
123: np.array((112, 214, 255)),
124: np.array((205, 206, 76)),
125: np.array((191, 191, 191)),
126: np.array((141, 244, 50)),
127: np.array((126, 229, 165)),
128: np.array((128, 25, 89)),
129: np.array((113, 10, 204)),
130: np.array((63, 63, 63)),
131: np.array((49, 48, 178)),
132: np.array((142, 40, 0)),
133: np.array((128, 25, 89)),
134: np.array((78, 78, 0)),
135: np.array((63, 63, 63)),
136: np.array((192, 0, 116)),
137: np.array((178, 0, 231)),
138: np.array((128, 25, 89)),
139: np.array((113, 10, 204)),
140: np.array((207, 2, 1)),
141: np.array((192, 0, 116)),
142: np.array((142, 40, 0)),
143: np.array((128, 25, 89)),
144: np.array((177, 74, 255)),
145: np.array((162, 59, 255)),
146: np.array((112, 112, 242)),
147: np.array((98, 97, 255)),
148: np.array((192, 89, 153)),
149: np.array((177, 74, 255)),
150: np.array((127, 127, 127)),
151: np.array((112, 112, 242)),
152: np.array((241, 36, 255)),
153: np.array((227, 21, 255)),
154: np.array((177, 74, 255)),
155: np.array((162, 59, 255)),
156: np.array((255, 51, 179)),
157: np.array((241, 36, 255)),
158: np.array((192, 89, 153)),
159: np.array((177, 74, 255)),
160: np.array((127, 127, 127)),
161: np.array((112, 112, 242)),
162: np.array((62, 165, 101)),
163: np.array((48, 150, 216)),
164: np.array((142, 142, 12)),
165: np.array((127, 127, 127)),
166: np.array((77, 180, 0)),
167: np.array((62, 165, 101)),
168: np.array((192, 89, 153)),
169: np.array((177, 74, 255)),
170: np.array((127, 127, 127)),
171: np.array((112, 112, 242)),
172: np.array((206, 104, 38)),
173: np.array((192, 89, 153)),
174: np.array((142, 142, 12)),
175: np.array((127, 127, 127)),
176: np.array((176, 176, 255)),
177: np.array((161, 161, 255)),
178: np.array((112, 214, 255)),
179: np.array((97, 199, 255)),
180: np.array((191, 191, 191)),
181: np.array((176, 176, 255)),
182: np.array((126, 229, 165)),
183: np.array((112, 214, 255)),
184: np.array((241, 138, 255)),
185: np.array((226, 123, 255)),
186: np.array((176, 176, 255)),
187: np.array((161, 161, 255)),
188: np.array((255, 153, 217)),
189: np.array((241, 138, 255)),
190: np.array((191, 191, 191)),
191: np.array((176, 176, 255)),
192: np.array((206, 104, 38)),
193: np.array((192, 89, 153)),
194: np.array((142, 142, 12)),
195: np.array((127, 127, 127)),
196: np.array((221, 119, 0)),
197: np.array((206, 104, 38)),
198: np.array((156, 157, 0)),
199: np.array((142, 142, 12)),
200: np.array((255, 66, 64)),
201: np.array((255, 51, 179)),
202: np.array((206, 104, 38)),
203: np.array((192, 89, 153)),
204: np.array((255, 81, 0)),
205: np.array((255, 66, 64)),
206: np.array((221, 119, 0)),
207: np.array((206, 104, 38)),
208: np.array((255, 153, 217)),
209: np.array((241, 138, 255)),
210: np.array((191, 191, 191)),
211: np.array((176, 176, 255)),
212: np.array((255, 168, 102)),
213: np.array((255, 153, 217)),
214: np.array((205, 206, 76)),
215: np.array((191, 191, 191)),
216: np.array((255, 115, 243)),
217: np.array((255, 100, 255)),
218: np.array((255, 153, 217)),
219: np.array((241, 138, 255)),
220: np.array((255, 130, 128)),
221: np.array((255, 115, 243)),
222: np.array((255, 168, 102)),
223: np.array((255, 153, 217)),
224: np.array((205, 206, 76)),
225: np.array((191, 191, 191)),
226: np.array((141, 244, 50)),
227: np.array((126, 229, 165)),
228: np.array((220, 220, 0)),
229: np.array((205, 206, 76)),
230: np.array((156, 255, 0)),
231: np.array((141, 244, 50)),
232: np.array((255, 168, 102)),
233: np.array((255, 153, 217)),
234: np.array((205, 206, 76)),
235: np.array((191, 191, 191)),
236: np.array((255, 183, 0)),
237: np.array((255, 168, 102)),
238: np.array((220, 220, 0)),
239: np.array((205, 206, 76)),
240: np.array((254, 255, 255)),
241: np.array((240, 240, 255)),
242: np.array((190, 255, 228)),
243: np.array((175, 255, 255)),
244: np.array((255, 255, 139)),
245: np.array((254, 255, 255)),
246: np.array((205, 255, 113)),
247: np.array((190, 255, 228)),
248: np.array((255, 217, 255)),
249: np.array((255, 202, 255)),
250: np.array((254, 255, 255)),
251: np.array((240, 240, 255)),
252: np.array((255, 231, 166)),
253: np.array((255, 217, 255)),
254: np.array((255, 255, 139)),
255: np.array((254, 255, 255)),
}
# 85 unique colours
SRGB = palette_ntsc.SRGB
PALETTES = {
'openemulator': OpenEmulatorPalette,
'virtualii': VirtualIIPalette,
'tohgr': ToHgrPalette,
# 'openemulator': OpenEmulatorPalette,
# 'virtualii': VirtualIIPalette,
# 'tohgr': ToHgrPalette,
'ntsc': NTSCPalette
}
DEFAULT_PALETTE = 'openemulator'
DEFAULT_PALETTE = 'ntsc'

View File

@ -151,7 +151,7 @@ class DHGR560Screen(Screen):
# XXX wrong
assert next_dots[(3 - x) % 4 + 4] is None
#print(x, last_dots, next_dots)
# print(x, last_dots, next_dots)
next_dots[(3 - x) % 4 + 4] = False
next_pixel_nbit_0 = self.palette.DOTS_TO_INDEX[next_dots]
@ -173,9 +173,9 @@ class DHGR560NTSCScreen(Screen):
for y in range(self.Y_RES):
for x in range(self.X_RES):
pixel = image_nbit[y, x]
dots = self.palette.DOTS[pixel]
phase = x % 4
bitmap[y, x] = dots[4 + phase]
#dots = self.palette.DOTS[pixel]
#phase = x % 4
bitmap[y, x] = pixel >> 7 # dots[4 + phase]
return bitmap
def bitmap_to_image_rgb(self, bitmap: np.ndarray) -> np.ndarray:
@ -190,13 +190,14 @@ class DHGR560NTSCScreen(Screen):
for y in range(self.Y_RES):
pixel = [False, False, False, False, False, False, False, False]
for x in range(self.NATIVE_X_RES):
pixel[x % 4] = pixel[x % 4 + 4]
pixel[x % 4 + 4] = bitmap[y, x]
# pixel[x % 4] = pixel[x % 4 + 4]
# pixel[x % 4 + 4] = bitmap[y, x]
pixel = pixel[1:] + [bitmap[y, x]]
dots = self.palette.DOTS_TO_INDEX[tuple(pixel)]
image_rgb[y, x, :] = self.palette.RGB[dots]
image_rgb[y, x, :] = self.palette.RGB[dots, x % 4]
return image_rgb
def pixel_palette_options(self, last_pixel_nbit, x: int):
def pixel_palette_options(self, last_pixel_nbit):
# # The two available 8-bit pixel colour choices are given by:
# # - Rotating the pixel value from the current x % 4 + 4 position to
# # x % 4
@ -210,25 +211,33 @@ class DHGR560NTSCScreen(Screen):
# pixel_nbit_0 = self.palette.DOTS_TO_INDEX[tuple(next_dots0)]
# pixel_nbit_1 = self.palette.DOTS_TO_INDEX[tuple(next_dots1)]
# return np.array([pixel_nbit_0, pixel_nbit_1], dtype=np.uint8)
last_dots = list(self.palette.DOTS[last_pixel_nbit][1:]) + [None]
#next_dots = last_dots[1:] # list(self.palette.DOTS[
# last_pixel_nbit][1:])
#return np.array([
# self.palette.DOTS_TO_INDEX[tuple(last_dots + [False])],
# self.palette.DOTS_TO_INDEX[tuple(next_dots + [True])]],
# dtype=np.uint8)
# rearrange into palette order
next_dots = [None] * 8
for i in range(4):
next_dots[i] = last_dots[(i - x) % 4]
next_dots[i + 4] = last_dots[(i - x) % 4 + 4]
assert next_dots[(3 + x) % 4 + 4] is None
# print(x, last_dots, next_dots)
next_dots[(3 + x) % 4 + 4] = False
next_pixel_nbit_0 = self.palette.DOTS_TO_INDEX[tuple(next_dots)]
next_dots[(3 + x) % 4 + 4] = True
next_pixel_nbit_1 = self.palette.DOTS_TO_INDEX[tuple(next_dots)]
return np.array([next_pixel_nbit_0, next_pixel_nbit_1],
return np.array(last_pixel_nbit >> 1, (last_pixel_nbit >> 1) + 1,
dtype=np.uint8)
# # rearrange into palette order
# next_dots = [None] * 8
# for i in range(4):
# next_dots[i] = last_dots[(i - x) % 4]
# next_dots[i + 4] = last_dots[(i - x) % 4 + 4]
#
# assert next_dots[(3 + x) % 4 + 4] is None
# # print(x, last_dots, next_dots)
#
# next_dots[(3 + x) % 4 + 4] = False
# next_pixel_nbit_0 = self.palette.DOTS_TO_INDEX[tuple(next_dots)]
#
# next_dots[(3 + x) % 4 + 4] = True
# next_pixel_nbit_1 = self.palette.DOTS_TO_INDEX[tuple(next_dots)]
# return np.array([next_pixel_nbit_0, next_pixel_nbit_1],
# dtype=np.uint8)
def bitmap_to_ntsc(self, bitmap: np.ndarray) -> np.ndarray:
y_width = 12
u_width = 24