2019-03-21 16:42:47 +00:00
|
|
|
"""Encode a sequence of images as an optimized stream of screen changes."""
|
|
|
|
|
2019-04-25 15:28:25 +00:00
|
|
|
import enum
|
2019-06-13 23:12:26 +00:00
|
|
|
import functools
|
2019-03-07 23:07:24 +00:00
|
|
|
import heapq
|
2019-03-10 22:42:31 +00:00
|
|
|
import os
|
|
|
|
import queue
|
2019-03-21 16:24:40 +00:00
|
|
|
import random
|
2019-03-10 22:42:31 +00:00
|
|
|
import subprocess
|
2019-03-21 16:24:40 +00:00
|
|
|
import threading
|
2019-03-14 22:08:50 +00:00
|
|
|
from typing import List, Iterator, Tuple
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-03-04 23:09:00 +00:00
|
|
|
import numpy as np
|
2019-03-10 22:42:31 +00:00
|
|
|
import skvideo.io
|
2019-03-21 16:24:40 +00:00
|
|
|
from PIL import Image
|
2019-03-04 23:09:00 +00:00
|
|
|
|
2019-03-21 16:24:40 +00:00
|
|
|
import opcodes
|
|
|
|
import screen
|
2019-01-05 23:31:56 +00:00
|
|
|
|
|
|
|
|
2019-04-25 15:28:25 +00:00
|
|
|
class Mode(enum.Enum):
|
|
|
|
HGR = 0
|
|
|
|
DHGR = 1
|
|
|
|
|
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
class FrameSequencer:
|
|
|
|
def __init__(self, mode: Mode):
|
|
|
|
self.video_mode = mode
|
|
|
|
self.input_frame_rate = 30
|
2019-01-05 23:31:56 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
def frames(self) -> Iterator[screen.MemoryMap]:
|
|
|
|
raise NotImplementedError
|
2019-02-23 23:52:25 +00:00
|
|
|
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
class FileFrameSequencer(FrameSequencer):
|
|
|
|
def __init__(self, filename, mode: Mode):
|
|
|
|
super(FileFrameSequencer, self).__init__(mode)
|
|
|
|
|
|
|
|
self.filename = filename # type: str
|
2019-03-10 22:42:31 +00:00
|
|
|
self._reader = skvideo.io.FFmpegReader(filename)
|
|
|
|
|
|
|
|
# Compute frame rate from input video
|
2019-03-14 23:05:15 +00:00
|
|
|
# TODO: possible to compute time offset for each frame instead?
|
2019-03-10 22:42:31 +00:00
|
|
|
data = skvideo.io.ffprobe(self.filename)['video']
|
|
|
|
rate_data = data['@r_frame_rate'].split("/") # e.g. 12000/1001
|
2019-03-21 23:25:51 +00:00
|
|
|
self.input_frame_rate = float(
|
2019-03-14 23:05:15 +00:00
|
|
|
rate_data[0]) / float(rate_data[1]) # type: float
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-03-21 15:57:09 +00:00
|
|
|
def _frame_grabber(self) -> Iterator[Image.Image]:
|
2019-03-10 22:42:31 +00:00
|
|
|
for frame_array in self._reader.nextFrame():
|
|
|
|
yield Image.fromarray(frame_array)
|
2019-01-05 23:31:56 +00:00
|
|
|
|
2019-03-10 22:42:31 +00:00
|
|
|
def frames(self) -> Iterator[screen.MemoryMap]:
|
|
|
|
"""Encode frame to HGR using bmp2dhr.
|
2019-01-05 23:31:56 +00:00
|
|
|
|
2019-03-10 22:42:31 +00:00
|
|
|
We do the encoding in a background thread to parallelize.
|
2019-01-05 23:31:56 +00:00
|
|
|
"""
|
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
frame_dir = ".".join(self.filename.split(".")[:-1])
|
2019-03-10 22:42:31 +00:00
|
|
|
try:
|
|
|
|
os.mkdir(frame_dir)
|
|
|
|
except FileExistsError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
q = queue.Queue(maxsize=10)
|
|
|
|
|
2019-04-25 16:38:04 +00:00
|
|
|
def _hgr_decode(_idx, _frame):
|
|
|
|
outfile = "%s/%08dC.BIN" % (frame_dir, _idx)
|
|
|
|
bmpfile = "%s/%08d.bmp" % (frame_dir, _idx)
|
|
|
|
|
|
|
|
try:
|
|
|
|
os.stat(outfile)
|
|
|
|
except FileNotFoundError:
|
|
|
|
_frame = _frame.resize((280, 192), resample=Image.LANCZOS)
|
|
|
|
_frame.save(bmpfile)
|
|
|
|
|
|
|
|
# TODO: parametrize palette
|
|
|
|
subprocess.call([
|
|
|
|
"/usr/local/bin/bmp2dhr", bmpfile, "hgr",
|
2019-06-13 23:12:26 +00:00
|
|
|
"P5",
|
|
|
|
# "P0", # Kegs32 RGB Color palette(for //gs playback)
|
2019-04-25 16:38:04 +00:00
|
|
|
"D9" # Buckels dither
|
|
|
|
])
|
|
|
|
|
|
|
|
os.remove(bmpfile)
|
|
|
|
|
|
|
|
_main = np.fromfile(outfile, dtype=np.uint8)
|
2019-03-27 21:37:06 +00:00
|
|
|
|
2019-04-25 16:38:04 +00:00
|
|
|
return _main, None
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-04-25 16:38:04 +00:00
|
|
|
def _dhgr_decode(_idx, _frame):
|
|
|
|
mainfile = "%s/%08d.BIN" % (frame_dir, _idx)
|
|
|
|
auxfile = "%s/%08d.AUX" % (frame_dir, _idx)
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-04-25 16:38:04 +00:00
|
|
|
bmpfile = "%s/%08d.bmp" % (frame_dir, _idx)
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-04-25 16:38:04 +00:00
|
|
|
try:
|
|
|
|
os.stat(mainfile)
|
|
|
|
os.stat(auxfile)
|
|
|
|
except FileNotFoundError:
|
|
|
|
_frame = _frame.resize((280, 192), resample=Image.LANCZOS)
|
|
|
|
_frame.save(bmpfile)
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-04-25 16:38:04 +00:00
|
|
|
# TODO: parametrize palette
|
|
|
|
subprocess.call([
|
2019-06-13 23:12:26 +00:00
|
|
|
"/usr/local/bin/bmp2dhr", bmpfile, "dhgr", # "v",
|
|
|
|
"P5", # "P0", # Kegs32 RGB Color palette (for //gs
|
|
|
|
# playback)
|
2019-04-25 16:38:04 +00:00
|
|
|
"A", # Output separate .BIN and .AUX files
|
|
|
|
"D9" # Buckels dither
|
|
|
|
])
|
|
|
|
|
|
|
|
os.remove(bmpfile)
|
|
|
|
|
|
|
|
_main = np.fromfile(mainfile, dtype=np.uint8)
|
|
|
|
_aux = np.fromfile(auxfile, dtype=np.uint8)
|
|
|
|
|
|
|
|
return _main, _aux
|
|
|
|
|
|
|
|
def worker():
|
|
|
|
"""Invoke bmp2dhr to encode input image frames and push to queue."""
|
|
|
|
for _idx, _frame in enumerate(self._frame_grabber()):
|
2019-06-13 23:12:26 +00:00
|
|
|
if self.video_mode == Mode.DHGR:
|
2019-04-25 16:38:04 +00:00
|
|
|
res = _dhgr_decode(_idx, _frame)
|
|
|
|
else:
|
|
|
|
res = _hgr_decode(_idx, _frame)
|
|
|
|
q.put(res)
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-03-27 21:37:06 +00:00
|
|
|
q.put((None, None))
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-03-14 23:05:15 +00:00
|
|
|
t = threading.Thread(target=worker, daemon=True)
|
2019-03-10 22:42:31 +00:00
|
|
|
t.start()
|
|
|
|
|
|
|
|
while True:
|
2019-03-27 21:37:06 +00:00
|
|
|
main, aux = q.get()
|
|
|
|
if main is None:
|
2019-03-10 22:42:31 +00:00
|
|
|
break
|
|
|
|
|
2019-04-25 16:38:04 +00:00
|
|
|
main_map = screen.FlatMemoryMap(
|
|
|
|
screen_page=1, data=main).to_memory_map()
|
|
|
|
if aux is None:
|
|
|
|
aux_map = None
|
|
|
|
else:
|
|
|
|
aux_map = screen.FlatMemoryMap(
|
|
|
|
screen_page=1, data=aux).to_memory_map()
|
|
|
|
yield (main_map, aux_map)
|
2019-03-10 22:42:31 +00:00
|
|
|
q.task_done()
|
|
|
|
|
|
|
|
t.join()
|
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
class Video:
|
|
|
|
"""Apple II screen memory map encoding a bitmapped frame."""
|
|
|
|
|
|
|
|
CLOCK_SPEED = 1024 * 1024 # type: int
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
frame_sequencer: FrameSequencer,
|
|
|
|
mode: Mode = Mode.HGR
|
|
|
|
):
|
|
|
|
self.mode = mode # type: Mode
|
|
|
|
self.frame_sequencer = frame_sequencer # type: FrameSequencer
|
|
|
|
self.cycles_per_frame = (
|
|
|
|
self.CLOCK_SPEED / frame_sequencer.input_frame_rate
|
|
|
|
) # type: float
|
|
|
|
self.frame_number = 0 # type: int
|
|
|
|
|
|
|
|
# Initialize empty screen
|
|
|
|
self.memory_map = screen.MemoryMap(
|
|
|
|
screen_page=1) # type: screen.MemoryMap
|
|
|
|
if self.mode == mode.DHGR:
|
|
|
|
self.aux_memory_map = screen.MemoryMap(
|
|
|
|
screen_page=1) # type: screen.MemoryMap
|
|
|
|
|
|
|
|
self.pixelmap = screen.DHGRBitmap(
|
|
|
|
main_memory=self.memory_map,
|
|
|
|
aux_memory=self.aux_memory_map
|
|
|
|
)
|
|
|
|
|
|
|
|
# Accumulates pending edit weights across frames
|
|
|
|
self.update_priority = np.zeros((32, 256), dtype=np.int)
|
|
|
|
if self.mode == mode.DHGR:
|
|
|
|
self.aux_update_priority = np.zeros((32, 256), dtype=np.int)
|
|
|
|
|
|
|
|
def tick(self, cycles: int) -> bool:
|
|
|
|
if cycles > (self.cycles_per_frame * self.frame_number):
|
|
|
|
self.frame_number += 1
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2019-03-10 22:42:31 +00:00
|
|
|
def encode_frame(
|
2019-06-13 23:12:26 +00:00
|
|
|
self,
|
|
|
|
target: screen.MemoryMap,
|
|
|
|
is_aux: bool,
|
2019-03-10 22:42:31 +00:00
|
|
|
) -> Iterator[opcodes.Opcode]:
|
|
|
|
"""Update to match content of frame within provided budget."""
|
2019-06-13 23:12:26 +00:00
|
|
|
if is_aux:
|
|
|
|
memory_map = self.aux_memory_map
|
|
|
|
update_priority = self.aux_update_priority
|
|
|
|
else:
|
|
|
|
memory_map = self.memory_map
|
|
|
|
update_priority = self.update_priority
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-03-27 21:37:06 +00:00
|
|
|
print("Similarity %f" % (update_priority.mean()))
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
yield from self._index_changes(
|
|
|
|
memory_map, target, update_priority, is_aux)
|
2019-02-23 23:52:25 +00:00
|
|
|
|
2019-03-10 22:42:31 +00:00
|
|
|
def _index_changes(
|
|
|
|
self,
|
|
|
|
source: screen.MemoryMap,
|
2019-03-27 21:37:06 +00:00
|
|
|
target: screen.MemoryMap,
|
2019-06-13 23:12:26 +00:00
|
|
|
update_priority: np.array,
|
|
|
|
is_aux: True
|
2019-03-14 22:32:52 +00:00
|
|
|
) -> Iterator[Tuple[int, int, List[int]]]:
|
|
|
|
"""Transform encoded screen to sequence of change tuples."""
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
if is_aux:
|
|
|
|
target_pixelmap = screen.DHGRBitmap(
|
|
|
|
main_memory=self.memory_map,
|
|
|
|
aux_memory=target
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
target_pixelmap = screen.DHGRBitmap(
|
|
|
|
main_memory=target,
|
|
|
|
aux_memory=self.aux_memory_map
|
|
|
|
)
|
|
|
|
|
|
|
|
diff_weights = self._diff_weights(
|
|
|
|
self.pixelmap, target_pixelmap, is_aux
|
|
|
|
)
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-03-14 22:32:52 +00:00
|
|
|
# Clear any update priority entries that have resolved themselves
|
2019-03-10 22:42:31 +00:00
|
|
|
# with new frame
|
2019-03-27 21:37:06 +00:00
|
|
|
update_priority[diff_weights == 0] = 0
|
|
|
|
update_priority += diff_weights
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-03-27 21:37:06 +00:00
|
|
|
priorities = self._heapify_priorities(update_priority)
|
2019-03-14 22:08:50 +00:00
|
|
|
|
|
|
|
content_deltas = {}
|
|
|
|
|
2019-03-10 22:42:31 +00:00
|
|
|
while priorities:
|
2019-06-13 23:12:26 +00:00
|
|
|
pri, _, page, offset = heapq.heappop(priorities)
|
|
|
|
|
2019-03-10 22:42:31 +00:00
|
|
|
# Check whether we've already cleared this diff while processing
|
|
|
|
# an earlier opcode
|
2019-03-27 21:37:06 +00:00
|
|
|
if update_priority[page, offset] == 0:
|
2019-03-07 23:07:24 +00:00
|
|
|
continue
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-03-07 23:07:24 +00:00
|
|
|
offsets = [offset]
|
|
|
|
content = target.page_offset[page, offset]
|
2019-06-13 23:12:26 +00:00
|
|
|
assert content < 0x80 # DHGR palette bit not expected to be set
|
2019-03-07 23:07:24 +00:00
|
|
|
|
|
|
|
# Clear priority for the offset we're emitting
|
2019-03-27 21:37:06 +00:00
|
|
|
update_priority[page, offset] = 0
|
2019-03-21 22:56:45 +00:00
|
|
|
diff_weights[page, offset] = 0
|
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
# Update memory maps
|
|
|
|
source.page_offset[page, offset] = content
|
|
|
|
self.pixelmap.apply(page, offset, is_aux, content)
|
|
|
|
|
2019-03-21 22:56:45 +00:00
|
|
|
# Make sure we don't emit this offset as a side-effect of some
|
|
|
|
# other offset later.
|
|
|
|
for cd in content_deltas.values():
|
|
|
|
cd[page, offset] = 0
|
2019-06-13 23:12:26 +00:00
|
|
|
# TODO: what if we add another content_deltas entry later?
|
|
|
|
# We might clobber it again
|
2019-03-07 23:07:24 +00:00
|
|
|
|
|
|
|
# Need to find 3 more offsets to fill this opcode
|
2019-06-13 23:12:26 +00:00
|
|
|
for err, o in self._compute_error(
|
2019-03-10 22:42:31 +00:00
|
|
|
page,
|
2019-03-07 23:07:24 +00:00
|
|
|
content,
|
2019-06-13 23:12:26 +00:00
|
|
|
target_pixelmap,
|
2019-03-14 22:08:50 +00:00
|
|
|
diff_weights,
|
2019-06-13 23:12:26 +00:00
|
|
|
content_deltas,
|
|
|
|
is_aux
|
2019-03-10 22:42:31 +00:00
|
|
|
):
|
2019-06-13 23:12:26 +00:00
|
|
|
assert o != offset
|
|
|
|
|
|
|
|
if update_priority[page, o] == 0:
|
|
|
|
# print("Skipping page=%d, offset=%d" % (page, o))
|
|
|
|
continue
|
2019-03-21 22:56:45 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
# Make sure we don't end up considering this (page, offset)
|
|
|
|
# again until the next image frame. Even if a better match
|
|
|
|
# comes along, it's probably better to fix up some other byte.
|
|
|
|
# TODO: or should we recompute it with new error?
|
|
|
|
for cd in content_deltas.values():
|
|
|
|
cd[page, o] = 0
|
|
|
|
|
|
|
|
byte_offset = target_pixelmap.interleaved_byte_offset(o, is_aux)
|
|
|
|
old_packed = target_pixelmap.packed[page, o // 2]
|
|
|
|
|
|
|
|
p = self._byte_pair_difference(
|
|
|
|
target_pixelmap, byte_offset, old_packed, content)
|
2019-03-21 22:56:45 +00:00
|
|
|
|
2019-03-21 15:57:09 +00:00
|
|
|
# Update priority for the offset we're emitting
|
2019-03-27 21:37:06 +00:00
|
|
|
update_priority[page, o] = p # 0
|
2019-03-21 22:56:45 +00:00
|
|
|
|
2019-03-27 21:37:06 +00:00
|
|
|
source.page_offset[page, o] = content
|
2019-06-13 23:12:26 +00:00
|
|
|
self.pixelmap.apply(page, o, is_aux, content)
|
2019-03-21 22:56:45 +00:00
|
|
|
|
|
|
|
if p:
|
|
|
|
# This content byte introduced an error, so put back on the
|
|
|
|
# heap in case we can get back to fixing it exactly
|
|
|
|
# during this frame. Otherwise we'll get to it later.
|
|
|
|
heapq.heappush(
|
2019-06-13 23:12:26 +00:00
|
|
|
priorities, (-p, random.randint(0, 10000), page, o))
|
|
|
|
|
|
|
|
offsets.append(o)
|
|
|
|
if len(offsets) == 3:
|
|
|
|
break
|
2019-03-10 22:42:31 +00:00
|
|
|
|
|
|
|
# Pad to 4 if we didn't find enough
|
2019-03-07 23:07:24 +00:00
|
|
|
for _ in range(len(offsets), 4):
|
|
|
|
offsets.append(offsets[0])
|
2019-03-10 22:42:31 +00:00
|
|
|
yield (page + 32, content, offsets)
|
2019-02-23 23:52:25 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
# TODO: there is still a bug causing residual diffs when we have
|
|
|
|
# apparently run out of work to do
|
|
|
|
if not np.array_equal(source.page_offset, target.page_offset):
|
|
|
|
diffs = np.nonzero(source.page_offset != target.page_offset)
|
|
|
|
for i in range(len(diffs[0])):
|
|
|
|
diff_p = diffs[0][i]
|
|
|
|
diff_o = diffs[1][i]
|
|
|
|
|
|
|
|
print("Diff at (%d, %d): %d != %d" % (
|
|
|
|
diff_p, diff_o, source.page_offset[diff_p, diff_o],
|
|
|
|
target.page_offset[diff_p, diff_o]
|
|
|
|
))
|
|
|
|
# assert False
|
|
|
|
|
2019-03-10 22:42:31 +00:00
|
|
|
# If we run out of things to do, pad forever
|
2019-06-13 23:12:26 +00:00
|
|
|
content = target.page_offset[0, 0]
|
2019-03-10 22:42:31 +00:00
|
|
|
while True:
|
|
|
|
yield (32, content, [0, 0, 0, 0])
|
2019-03-14 22:32:52 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
@staticmethod
|
|
|
|
def _heapify_priorities(update_priority: np.array) -> List:
|
|
|
|
pages, offsets = update_priority.nonzero()
|
|
|
|
priorities = [tuple(data) for data in np.stack((
|
|
|
|
-update_priority[pages, offsets],
|
|
|
|
# Don't use deterministic order for page, offset
|
|
|
|
np.random.randint(0, 10000, size=pages.shape[0]),
|
|
|
|
pages,
|
|
|
|
offsets)
|
|
|
|
).T.tolist()]
|
|
|
|
|
|
|
|
heapq.heapify(priorities)
|
|
|
|
return priorities
|
|
|
|
|
2019-03-14 22:32:52 +00:00
|
|
|
@staticmethod
|
|
|
|
def _diff_weights(
|
2019-06-13 23:12:26 +00:00
|
|
|
source: screen.DHGRBitmap,
|
|
|
|
target: screen.DHGRBitmap,
|
|
|
|
is_aux: bool
|
|
|
|
):
|
|
|
|
diff = np.ndarray((32, 256), dtype=np.int)
|
|
|
|
|
|
|
|
if is_aux:
|
|
|
|
# Pixels influenced by byte offset 0
|
|
|
|
source_pixels0 = source.mask_and_shift_data(source.packed, 0)
|
|
|
|
target_pixels0 = target.mask_and_shift_data(target.packed, 0)
|
|
|
|
|
|
|
|
# Concatenate 8-bit source and target into 16-bit values
|
|
|
|
pair0 = (source_pixels0 << 8) + target_pixels0
|
|
|
|
dist0 = source.edit_distances[0][pair0].reshape(pair0.shape)
|
|
|
|
|
|
|
|
# Pixels influenced by byte offset 2
|
|
|
|
source_pixels2 = source.mask_and_shift_data(source.packed, 2)
|
|
|
|
target_pixels2 = target.mask_and_shift_data(target.packed, 2)
|
|
|
|
# Concatenate 12-bit source and target into 24-bit values
|
|
|
|
pair2 = (source_pixels2 << 12) + target_pixels2
|
|
|
|
dist2 = source.edit_distances[2][pair2].reshape(pair2.shape)
|
|
|
|
|
|
|
|
diff[:, 0::2] = dist0
|
|
|
|
diff[:, 1::2] = dist2
|
|
|
|
|
|
|
|
else:
|
|
|
|
# Pixels influenced by byte offset 1
|
|
|
|
source_pixels1 = source.mask_and_shift_data(source.packed, 1)
|
|
|
|
target_pixels1 = target.mask_and_shift_data(target.packed, 1)
|
|
|
|
pair1 = (source_pixels1 << 12) + target_pixels1
|
|
|
|
dist1 = source.edit_distances[1][pair1].reshape(pair1.shape)
|
|
|
|
|
|
|
|
# Pixels influenced by byte offset 3
|
|
|
|
source_pixels3 = source.mask_and_shift_data(source.packed, 3)
|
|
|
|
target_pixels3 = target.mask_and_shift_data(target.packed, 3)
|
|
|
|
pair3 = (source_pixels3 << 8) + target_pixels3
|
|
|
|
dist3 = source.edit_distances[3][pair3].reshape(pair3.shape)
|
|
|
|
|
|
|
|
diff[:, 0::2] = dist1
|
|
|
|
diff[:, 1::2] = dist3
|
|
|
|
|
|
|
|
return diff
|
|
|
|
|
|
|
|
@functools.lru_cache(None)
|
|
|
|
def _byte_pair_difference(
|
|
|
|
self,
|
|
|
|
target_pixelmap,
|
|
|
|
byte_offset,
|
|
|
|
old_packed,
|
|
|
|
content
|
2019-03-14 22:32:52 +00:00
|
|
|
):
|
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
old_pixels = target_pixelmap.mask_and_shift_data(
|
|
|
|
old_packed, byte_offset)
|
|
|
|
new_pixels = target_pixelmap.mask_and_shift_data(
|
|
|
|
target_pixelmap.masked_update(
|
|
|
|
byte_offset, old_packed, content), byte_offset)
|
2019-03-14 22:32:52 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
if byte_offset == 0 or byte_offset == 3:
|
|
|
|
pair = (old_pixels << 8) + new_pixels
|
|
|
|
else:
|
|
|
|
pair = (old_pixels << 12) + new_pixels
|
2019-03-14 22:32:52 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
p = target_pixelmap.edit_distances[byte_offset][pair]
|
|
|
|
|
|
|
|
return p
|
2019-03-14 22:32:52 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2019-06-13 23:12:26 +00:00
|
|
|
def _compute_delta(
|
|
|
|
content: int,
|
|
|
|
target: screen.DHGRBitmap,
|
|
|
|
old,
|
|
|
|
is_aux: bool
|
|
|
|
):
|
|
|
|
diff = np.ndarray((32, 256), dtype=np.int)
|
|
|
|
|
|
|
|
# TODO: use error edit distance
|
|
|
|
|
|
|
|
if is_aux:
|
|
|
|
# Pixels influenced by byte offset 0
|
|
|
|
source_pixels0 = target.mask_and_shift_data(
|
|
|
|
target.masked_update(0, target.packed, content), 0)
|
|
|
|
target_pixels0 = target.mask_and_shift_data(target.packed, 0)
|
|
|
|
|
|
|
|
# Concatenate 8-bit source and target into 16-bit values
|
|
|
|
pair0 = (source_pixels0 << 8) + target_pixels0
|
|
|
|
dist0 = target.edit_distances[0][pair0].reshape(pair0.shape)
|
|
|
|
|
|
|
|
# Pixels influenced by byte offset 2
|
|
|
|
source_pixels2 = target.mask_and_shift_data(
|
|
|
|
target.masked_update(2, target.packed, content), 2)
|
|
|
|
target_pixels2 = target.mask_and_shift_data(target.packed, 2)
|
|
|
|
# Concatenate 12-bit source and target into 24-bit values
|
|
|
|
pair2 = (source_pixels2 << 12) + target_pixels2
|
|
|
|
dist2 = target.edit_distances[2][pair2].reshape(pair2.shape)
|
|
|
|
|
|
|
|
diff[:, 0::2] = dist0
|
|
|
|
diff[:, 1::2] = dist2
|
|
|
|
|
|
|
|
else:
|
|
|
|
# Pixels influenced by byte offset 1
|
|
|
|
source_pixels1 = target.mask_and_shift_data(
|
|
|
|
target.masked_update(1, target.packed, content), 1)
|
|
|
|
target_pixels1 = target.mask_and_shift_data(target.packed, 1)
|
|
|
|
pair1 = (source_pixels1 << 12) + target_pixels1
|
|
|
|
dist1 = target.edit_distances[1][pair1].reshape(pair1.shape)
|
|
|
|
|
|
|
|
# Pixels influenced by byte offset 3
|
|
|
|
source_pixels3 = target.mask_and_shift_data(
|
|
|
|
target.masked_update(3, target.packed, content), 3)
|
|
|
|
target_pixels3 = target.mask_and_shift_data(target.packed, 3)
|
|
|
|
pair3 = (source_pixels3 << 8) + target_pixels3
|
|
|
|
dist3 = target.edit_distances[3][pair3].reshape(pair3.shape)
|
|
|
|
|
|
|
|
diff[:, 0::2] = dist1
|
|
|
|
diff[:, 1::2] = dist3
|
|
|
|
|
|
|
|
# TODO: try different weightings
|
|
|
|
return (diff * 5) - old
|
2019-03-14 22:32:52 +00:00
|
|
|
|
|
|
|
_OFFSETS = np.arange(256)
|
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
def _compute_error(self, page, content, target_pixelmap, old_error,
|
|
|
|
content_deltas, is_aux):
|
2019-03-21 22:56:45 +00:00
|
|
|
# TODO: move this up into parent
|
2019-03-14 22:32:52 +00:00
|
|
|
delta_screen = content_deltas.get(content)
|
|
|
|
if delta_screen is None:
|
|
|
|
delta_screen = self._compute_delta(
|
2019-06-13 23:12:26 +00:00
|
|
|
content, target_pixelmap, old_error, is_aux)
|
2019-03-14 22:32:52 +00:00
|
|
|
content_deltas[content] = delta_screen
|
|
|
|
|
|
|
|
delta_page = delta_screen[page]
|
|
|
|
cond = delta_page < 0
|
|
|
|
candidate_offsets = self._OFFSETS[cond]
|
2019-03-21 22:56:45 +00:00
|
|
|
priorities = delta_page[cond]
|
2019-03-14 22:32:52 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
# TODO: vectorize this with numpy
|
|
|
|
deltas = [
|
|
|
|
(priorities[i], random.randint(0, 10000), candidate_offsets[i])
|
2019-03-14 22:32:52 +00:00
|
|
|
for i in range(len(candidate_offsets))
|
|
|
|
]
|
2019-06-13 23:12:26 +00:00
|
|
|
heapq.heapify(deltas)
|
2019-03-21 22:56:45 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
while deltas:
|
|
|
|
pri, _, o = heapq.heappop(deltas)
|
|
|
|
assert pri < 0
|
|
|
|
assert o < 255
|
2019-03-14 22:32:52 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
yield -pri, o
|