diff --git a/convert.py b/convert.py index 0f571a0..f3b837b 100644 --- a/convert.py +++ b/convert.py @@ -4,7 +4,7 @@ import argparse import os.path import time -import colorspacious +import colour from PIL import Image import numpy as np @@ -91,13 +91,13 @@ def main(): screen.Y_RES)).astype(np.float32) # convert from sRGB1-linear to CAM02UCS perceptually uniform colour space - cam02ucs = colorspacious.cspace_convert( - resized/255, "sRGB1-linear", colorspacious.CAM02UCS).astype(np.float32) + cam16ucs = colour.convert( + resized / 255, "RGB", "CAM16UCS").astype(np.float32) # print(cam02ucs) dither = dither_pattern.PATTERNS[args.dither]() output_nbit, _ = dither_pyx.dither_image( - screen, cam02ucs, dither, lookahead, args.verbose) + screen, cam16ucs, dither, lookahead, args.verbose) bitmap = screen.pack(output_nbit) # Show output image by rendering in target palette diff --git a/palette.py b/palette.py index 83b06a9..4f89305 100644 --- a/palette.py +++ b/palette.py @@ -1,6 +1,6 @@ """RGB colour palettes to target for Apple II image conversions.""" -import colorspacious +import colour import numpy as np import image @@ -29,9 +29,8 @@ class Palette: for k, v in self.SRGB.items(): self.RGB[k] = (np.clip(image.srgb_to_linear_array(v / 255), 0.0, 1.0) * 255).astype(np.uint8) - - self.CAM02UCS[k] = colorspacious.cspace_convert( - v, "sRGB255", colorspacious.CAM02UCS) + self.CAM02UCS[k] = colour.convert( + v / 255, "sRGB", "CAM16UCS").astype(np.float32) # print(self.CAM02UCS)