2019-03-21 16:42:47 +00:00
|
|
|
"""Encode a sequence of images as an optimized stream of screen changes."""
|
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
import functools
|
2019-03-07 23:07:24 +00:00
|
|
|
import heapq
|
2019-03-21 16:24:40 +00:00
|
|
|
import random
|
2019-03-14 22:08:50 +00:00
|
|
|
from typing import List, Iterator, Tuple
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-03-04 23:09:00 +00:00
|
|
|
import numpy as np
|
|
|
|
|
2019-03-21 16:24:40 +00:00
|
|
|
import opcodes
|
|
|
|
import screen
|
2019-06-14 20:59:39 +00:00
|
|
|
from frame_grabber import FrameGrabber
|
2019-06-15 20:02:00 +00:00
|
|
|
from palette import Palette
|
2019-06-14 20:59:39 +00:00
|
|
|
from video_mode import VideoMode
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
class Video:
|
|
|
|
"""Apple II screen memory map encoding a bitmapped frame."""
|
|
|
|
|
|
|
|
CLOCK_SPEED = 1024 * 1024 # type: int
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
2019-06-14 20:59:39 +00:00
|
|
|
frame_grabber: FrameGrabber,
|
2019-06-21 21:08:22 +00:00
|
|
|
ticks_per_second: float,
|
2019-06-15 20:02:00 +00:00
|
|
|
mode: VideoMode = VideoMode.HGR,
|
2019-06-19 21:10:15 +00:00
|
|
|
palette: Palette = Palette.NTSC
|
2019-06-13 23:12:26 +00:00
|
|
|
):
|
2019-06-14 20:59:39 +00:00
|
|
|
self.mode = mode # type: VideoMode
|
|
|
|
self.frame_grabber = frame_grabber # type: FrameGrabber
|
2019-06-21 21:08:22 +00:00
|
|
|
self.ticks_per_second = ticks_per_second # type: float
|
2019-06-13 23:12:26 +00:00
|
|
|
self.ticks_per_frame = (
|
2019-06-21 21:08:22 +00:00
|
|
|
self.ticks_per_second / frame_grabber.input_frame_rate
|
|
|
|
) # type: float
|
2019-06-13 23:12:26 +00:00
|
|
|
self.frame_number = 0 # type: int
|
2019-06-15 20:02:00 +00:00
|
|
|
self.palette = palette # type: Palette
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
# Initialize empty screen
|
|
|
|
self.memory_map = screen.MemoryMap(
|
|
|
|
screen_page=1) # type: screen.MemoryMap
|
|
|
|
if self.mode == mode.DHGR:
|
|
|
|
self.aux_memory_map = screen.MemoryMap(
|
|
|
|
screen_page=1) # type: screen.MemoryMap
|
|
|
|
|
|
|
|
self.pixelmap = screen.DHGRBitmap(
|
|
|
|
main_memory=self.memory_map,
|
|
|
|
aux_memory=self.aux_memory_map
|
|
|
|
)
|
|
|
|
|
|
|
|
# Accumulates pending edit weights across frames
|
|
|
|
self.update_priority = np.zeros((32, 256), dtype=np.int)
|
|
|
|
if self.mode == mode.DHGR:
|
|
|
|
self.aux_update_priority = np.zeros((32, 256), dtype=np.int)
|
|
|
|
|
2019-06-19 21:28:31 +00:00
|
|
|
def tick(self, ticks: int) -> bool:
|
|
|
|
if ticks >= (self.ticks_per_frame * self.frame_number):
|
2019-06-13 23:12:26 +00:00
|
|
|
self.frame_number += 1
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2019-03-10 22:42:31 +00:00
|
|
|
def encode_frame(
|
2019-06-13 23:12:26 +00:00
|
|
|
self,
|
|
|
|
target: screen.MemoryMap,
|
|
|
|
is_aux: bool,
|
2019-03-10 22:42:31 +00:00
|
|
|
) -> Iterator[opcodes.Opcode]:
|
|
|
|
"""Update to match content of frame within provided budget."""
|
2019-06-13 23:12:26 +00:00
|
|
|
if is_aux:
|
|
|
|
memory_map = self.aux_memory_map
|
|
|
|
update_priority = self.aux_update_priority
|
|
|
|
else:
|
|
|
|
memory_map = self.memory_map
|
|
|
|
update_priority = self.update_priority
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-03-27 21:37:06 +00:00
|
|
|
print("Similarity %f" % (update_priority.mean()))
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
yield from self._index_changes(
|
|
|
|
memory_map, target, update_priority, is_aux)
|
2019-02-23 23:52:25 +00:00
|
|
|
|
2019-03-10 22:42:31 +00:00
|
|
|
def _index_changes(
|
|
|
|
self,
|
|
|
|
source: screen.MemoryMap,
|
2019-03-27 21:37:06 +00:00
|
|
|
target: screen.MemoryMap,
|
2019-06-13 23:12:26 +00:00
|
|
|
update_priority: np.array,
|
|
|
|
is_aux: True
|
2019-03-14 22:32:52 +00:00
|
|
|
) -> Iterator[Tuple[int, int, List[int]]]:
|
|
|
|
"""Transform encoded screen to sequence of change tuples."""
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
if is_aux:
|
|
|
|
target_pixelmap = screen.DHGRBitmap(
|
|
|
|
main_memory=self.memory_map,
|
|
|
|
aux_memory=target
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
target_pixelmap = screen.DHGRBitmap(
|
|
|
|
main_memory=target,
|
|
|
|
aux_memory=self.aux_memory_map
|
|
|
|
)
|
|
|
|
|
|
|
|
diff_weights = self._diff_weights(
|
|
|
|
self.pixelmap, target_pixelmap, is_aux
|
|
|
|
)
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-03-14 22:32:52 +00:00
|
|
|
# Clear any update priority entries that have resolved themselves
|
2019-03-10 22:42:31 +00:00
|
|
|
# with new frame
|
2019-03-27 21:37:06 +00:00
|
|
|
update_priority[diff_weights == 0] = 0
|
|
|
|
update_priority += diff_weights
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-03-27 21:37:06 +00:00
|
|
|
priorities = self._heapify_priorities(update_priority)
|
2019-03-14 22:08:50 +00:00
|
|
|
|
|
|
|
content_deltas = {}
|
|
|
|
|
2019-03-10 22:42:31 +00:00
|
|
|
while priorities:
|
2019-06-13 23:12:26 +00:00
|
|
|
pri, _, page, offset = heapq.heappop(priorities)
|
|
|
|
|
2019-03-10 22:42:31 +00:00
|
|
|
# Check whether we've already cleared this diff while processing
|
|
|
|
# an earlier opcode
|
2019-03-27 21:37:06 +00:00
|
|
|
if update_priority[page, offset] == 0:
|
2019-03-07 23:07:24 +00:00
|
|
|
continue
|
2019-03-10 22:42:31 +00:00
|
|
|
|
2019-03-07 23:07:24 +00:00
|
|
|
offsets = [offset]
|
|
|
|
content = target.page_offset[page, offset]
|
2019-06-13 23:12:26 +00:00
|
|
|
assert content < 0x80 # DHGR palette bit not expected to be set
|
2019-03-07 23:07:24 +00:00
|
|
|
|
|
|
|
# Clear priority for the offset we're emitting
|
2019-03-27 21:37:06 +00:00
|
|
|
update_priority[page, offset] = 0
|
2019-03-21 22:56:45 +00:00
|
|
|
diff_weights[page, offset] = 0
|
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
# Update memory maps
|
|
|
|
source.page_offset[page, offset] = content
|
|
|
|
self.pixelmap.apply(page, offset, is_aux, content)
|
|
|
|
|
2019-03-21 22:56:45 +00:00
|
|
|
# Make sure we don't emit this offset as a side-effect of some
|
|
|
|
# other offset later.
|
|
|
|
for cd in content_deltas.values():
|
|
|
|
cd[page, offset] = 0
|
2019-06-13 23:12:26 +00:00
|
|
|
# TODO: what if we add another content_deltas entry later?
|
|
|
|
# We might clobber it again
|
2019-03-07 23:07:24 +00:00
|
|
|
|
|
|
|
# Need to find 3 more offsets to fill this opcode
|
2019-06-13 23:12:26 +00:00
|
|
|
for err, o in self._compute_error(
|
2019-03-10 22:42:31 +00:00
|
|
|
page,
|
2019-03-07 23:07:24 +00:00
|
|
|
content,
|
2019-06-13 23:12:26 +00:00
|
|
|
target_pixelmap,
|
2019-03-14 22:08:50 +00:00
|
|
|
diff_weights,
|
2019-06-13 23:12:26 +00:00
|
|
|
content_deltas,
|
|
|
|
is_aux
|
2019-03-10 22:42:31 +00:00
|
|
|
):
|
2019-06-13 23:12:26 +00:00
|
|
|
assert o != offset
|
|
|
|
|
|
|
|
if update_priority[page, o] == 0:
|
|
|
|
# print("Skipping page=%d, offset=%d" % (page, o))
|
|
|
|
continue
|
2019-03-21 22:56:45 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
# Make sure we don't end up considering this (page, offset)
|
|
|
|
# again until the next image frame. Even if a better match
|
|
|
|
# comes along, it's probably better to fix up some other byte.
|
|
|
|
# TODO: or should we recompute it with new error?
|
|
|
|
for cd in content_deltas.values():
|
|
|
|
cd[page, o] = 0
|
|
|
|
|
|
|
|
byte_offset = target_pixelmap.interleaved_byte_offset(o, is_aux)
|
|
|
|
old_packed = target_pixelmap.packed[page, o // 2]
|
|
|
|
|
|
|
|
p = self._byte_pair_difference(
|
|
|
|
target_pixelmap, byte_offset, old_packed, content)
|
2019-03-21 22:56:45 +00:00
|
|
|
|
2019-03-21 15:57:09 +00:00
|
|
|
# Update priority for the offset we're emitting
|
2019-03-27 21:37:06 +00:00
|
|
|
update_priority[page, o] = p # 0
|
2019-03-21 22:56:45 +00:00
|
|
|
|
2019-03-27 21:37:06 +00:00
|
|
|
source.page_offset[page, o] = content
|
2019-06-13 23:12:26 +00:00
|
|
|
self.pixelmap.apply(page, o, is_aux, content)
|
2019-03-21 22:56:45 +00:00
|
|
|
|
|
|
|
if p:
|
|
|
|
# This content byte introduced an error, so put back on the
|
|
|
|
# heap in case we can get back to fixing it exactly
|
|
|
|
# during this frame. Otherwise we'll get to it later.
|
|
|
|
heapq.heappush(
|
2019-06-15 20:23:36 +00:00
|
|
|
priorities, (-p, random.getrandbits(16), page, o))
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
offsets.append(o)
|
|
|
|
if len(offsets) == 3:
|
|
|
|
break
|
2019-03-10 22:42:31 +00:00
|
|
|
|
|
|
|
# Pad to 4 if we didn't find enough
|
2019-03-07 23:07:24 +00:00
|
|
|
for _ in range(len(offsets), 4):
|
|
|
|
offsets.append(offsets[0])
|
2019-03-10 22:42:31 +00:00
|
|
|
yield (page + 32, content, offsets)
|
2019-02-23 23:52:25 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
# TODO: there is still a bug causing residual diffs when we have
|
|
|
|
# apparently run out of work to do
|
|
|
|
if not np.array_equal(source.page_offset, target.page_offset):
|
|
|
|
diffs = np.nonzero(source.page_offset != target.page_offset)
|
|
|
|
for i in range(len(diffs[0])):
|
|
|
|
diff_p = diffs[0][i]
|
|
|
|
diff_o = diffs[1][i]
|
|
|
|
|
|
|
|
print("Diff at (%d, %d): %d != %d" % (
|
|
|
|
diff_p, diff_o, source.page_offset[diff_p, diff_o],
|
|
|
|
target.page_offset[diff_p, diff_o]
|
|
|
|
))
|
|
|
|
# assert False
|
|
|
|
|
2019-03-10 22:42:31 +00:00
|
|
|
# If we run out of things to do, pad forever
|
2019-06-13 23:12:26 +00:00
|
|
|
content = target.page_offset[0, 0]
|
2019-03-10 22:42:31 +00:00
|
|
|
while True:
|
|
|
|
yield (32, content, [0, 0, 0, 0])
|
2019-03-14 22:32:52 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
@staticmethod
|
|
|
|
def _heapify_priorities(update_priority: np.array) -> List:
|
|
|
|
pages, offsets = update_priority.nonzero()
|
|
|
|
priorities = [tuple(data) for data in np.stack((
|
|
|
|
-update_priority[pages, offsets],
|
|
|
|
# Don't use deterministic order for page, offset
|
2019-06-15 20:23:36 +00:00
|
|
|
np.random.randint(0, 2**8, size=pages.shape[0]),
|
2019-06-13 23:12:26 +00:00
|
|
|
pages,
|
|
|
|
offsets)
|
|
|
|
).T.tolist()]
|
|
|
|
|
|
|
|
heapq.heapify(priorities)
|
|
|
|
return priorities
|
|
|
|
|
2019-03-14 22:32:52 +00:00
|
|
|
def _diff_weights(
|
2019-06-15 20:02:00 +00:00
|
|
|
self,
|
2019-06-13 23:12:26 +00:00
|
|
|
source: screen.DHGRBitmap,
|
|
|
|
target: screen.DHGRBitmap,
|
|
|
|
is_aux: bool
|
|
|
|
):
|
|
|
|
diff = np.ndarray((32, 256), dtype=np.int)
|
|
|
|
|
|
|
|
if is_aux:
|
|
|
|
# Pixels influenced by byte offset 0
|
|
|
|
source_pixels0 = source.mask_and_shift_data(source.packed, 0)
|
|
|
|
target_pixels0 = target.mask_and_shift_data(target.packed, 0)
|
|
|
|
|
|
|
|
# Concatenate 8-bit source and target into 16-bit values
|
|
|
|
pair0 = (source_pixels0 << 8) + target_pixels0
|
2019-06-15 20:02:00 +00:00
|
|
|
dist0 = source.edit_distances(self.palette)[0][pair0].reshape(
|
|
|
|
pair0.shape)
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
# Pixels influenced by byte offset 2
|
|
|
|
source_pixels2 = source.mask_and_shift_data(source.packed, 2)
|
|
|
|
target_pixels2 = target.mask_and_shift_data(target.packed, 2)
|
|
|
|
# Concatenate 12-bit source and target into 24-bit values
|
|
|
|
pair2 = (source_pixels2 << 12) + target_pixels2
|
2019-06-15 20:02:00 +00:00
|
|
|
dist2 = source.edit_distances(self.palette)[2][pair2].reshape(
|
|
|
|
pair2.shape)
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
diff[:, 0::2] = dist0
|
|
|
|
diff[:, 1::2] = dist2
|
|
|
|
|
|
|
|
else:
|
|
|
|
# Pixels influenced by byte offset 1
|
|
|
|
source_pixels1 = source.mask_and_shift_data(source.packed, 1)
|
|
|
|
target_pixels1 = target.mask_and_shift_data(target.packed, 1)
|
|
|
|
pair1 = (source_pixels1 << 12) + target_pixels1
|
2019-06-15 20:02:00 +00:00
|
|
|
dist1 = source.edit_distances(self.palette)[1][pair1].reshape(
|
|
|
|
pair1.shape)
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
# Pixels influenced by byte offset 3
|
|
|
|
source_pixels3 = source.mask_and_shift_data(source.packed, 3)
|
|
|
|
target_pixels3 = target.mask_and_shift_data(target.packed, 3)
|
|
|
|
pair3 = (source_pixels3 << 8) + target_pixels3
|
2019-06-15 20:02:00 +00:00
|
|
|
dist3 = source.edit_distances(self.palette)[3][pair3].reshape(
|
|
|
|
pair3.shape)
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
diff[:, 0::2] = dist1
|
|
|
|
diff[:, 1::2] = dist3
|
|
|
|
|
|
|
|
return diff
|
|
|
|
|
|
|
|
@functools.lru_cache(None)
|
|
|
|
def _byte_pair_difference(
|
|
|
|
self,
|
|
|
|
target_pixelmap,
|
|
|
|
byte_offset,
|
|
|
|
old_packed,
|
|
|
|
content
|
2019-03-14 22:32:52 +00:00
|
|
|
):
|
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
old_pixels = target_pixelmap.mask_and_shift_data(
|
|
|
|
old_packed, byte_offset)
|
|
|
|
new_pixels = target_pixelmap.mask_and_shift_data(
|
|
|
|
target_pixelmap.masked_update(
|
|
|
|
byte_offset, old_packed, content), byte_offset)
|
2019-03-14 22:32:52 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
if byte_offset == 0 or byte_offset == 3:
|
|
|
|
pair = (old_pixels << 8) + new_pixels
|
|
|
|
else:
|
|
|
|
pair = (old_pixels << 12) + new_pixels
|
2019-03-14 22:32:52 +00:00
|
|
|
|
2019-06-15 20:02:00 +00:00
|
|
|
p = target_pixelmap.edit_distances(self.palette)[byte_offset][pair]
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
return p
|
2019-03-14 22:32:52 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
def _compute_delta(
|
2019-06-15 20:02:00 +00:00
|
|
|
self,
|
2019-06-13 23:12:26 +00:00
|
|
|
content: int,
|
|
|
|
target: screen.DHGRBitmap,
|
|
|
|
old,
|
|
|
|
is_aux: bool
|
|
|
|
):
|
|
|
|
diff = np.ndarray((32, 256), dtype=np.int)
|
|
|
|
|
|
|
|
# TODO: use error edit distance
|
|
|
|
|
|
|
|
if is_aux:
|
|
|
|
# Pixels influenced by byte offset 0
|
|
|
|
source_pixels0 = target.mask_and_shift_data(
|
|
|
|
target.masked_update(0, target.packed, content), 0)
|
|
|
|
target_pixels0 = target.mask_and_shift_data(target.packed, 0)
|
|
|
|
|
|
|
|
# Concatenate 8-bit source and target into 16-bit values
|
|
|
|
pair0 = (source_pixels0 << 8) + target_pixels0
|
2019-06-15 20:02:00 +00:00
|
|
|
dist0 = target.edit_distances(self.palette)[0][pair0].reshape(
|
|
|
|
pair0.shape)
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
# Pixels influenced by byte offset 2
|
|
|
|
source_pixels2 = target.mask_and_shift_data(
|
|
|
|
target.masked_update(2, target.packed, content), 2)
|
|
|
|
target_pixels2 = target.mask_and_shift_data(target.packed, 2)
|
|
|
|
# Concatenate 12-bit source and target into 24-bit values
|
|
|
|
pair2 = (source_pixels2 << 12) + target_pixels2
|
2019-06-15 20:02:00 +00:00
|
|
|
dist2 = target.edit_distances(self.palette)[2][pair2].reshape(
|
|
|
|
pair2.shape)
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
diff[:, 0::2] = dist0
|
|
|
|
diff[:, 1::2] = dist2
|
|
|
|
|
|
|
|
else:
|
|
|
|
# Pixels influenced by byte offset 1
|
|
|
|
source_pixels1 = target.mask_and_shift_data(
|
|
|
|
target.masked_update(1, target.packed, content), 1)
|
|
|
|
target_pixels1 = target.mask_and_shift_data(target.packed, 1)
|
|
|
|
pair1 = (source_pixels1 << 12) + target_pixels1
|
2019-06-15 20:02:00 +00:00
|
|
|
dist1 = target.edit_distances(self.palette)[1][pair1].reshape(
|
|
|
|
pair1.shape)
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
# Pixels influenced by byte offset 3
|
|
|
|
source_pixels3 = target.mask_and_shift_data(
|
|
|
|
target.masked_update(3, target.packed, content), 3)
|
|
|
|
target_pixels3 = target.mask_and_shift_data(target.packed, 3)
|
|
|
|
pair3 = (source_pixels3 << 8) + target_pixels3
|
2019-06-15 20:02:00 +00:00
|
|
|
dist3 = target.edit_distances(self.palette)[3][pair3].reshape(
|
|
|
|
pair3.shape)
|
2019-06-13 23:12:26 +00:00
|
|
|
|
|
|
|
diff[:, 0::2] = dist1
|
|
|
|
diff[:, 1::2] = dist3
|
|
|
|
|
|
|
|
# TODO: try different weightings
|
|
|
|
return (diff * 5) - old
|
2019-03-14 22:32:52 +00:00
|
|
|
|
|
|
|
_OFFSETS = np.arange(256)
|
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
def _compute_error(self, page, content, target_pixelmap, old_error,
|
|
|
|
content_deltas, is_aux):
|
2019-03-21 22:56:45 +00:00
|
|
|
# TODO: move this up into parent
|
2019-03-14 22:32:52 +00:00
|
|
|
delta_screen = content_deltas.get(content)
|
|
|
|
if delta_screen is None:
|
|
|
|
delta_screen = self._compute_delta(
|
2019-06-13 23:12:26 +00:00
|
|
|
content, target_pixelmap, old_error, is_aux)
|
2019-03-14 22:32:52 +00:00
|
|
|
content_deltas[content] = delta_screen
|
|
|
|
|
|
|
|
delta_page = delta_screen[page]
|
|
|
|
cond = delta_page < 0
|
|
|
|
candidate_offsets = self._OFFSETS[cond]
|
2019-03-21 22:56:45 +00:00
|
|
|
priorities = delta_page[cond]
|
2019-03-14 22:32:52 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
deltas = [
|
2019-06-15 20:23:36 +00:00
|
|
|
(priorities[i], random.getrandbits(16), candidate_offsets[i])
|
2019-03-14 22:32:52 +00:00
|
|
|
for i in range(len(candidate_offsets))
|
|
|
|
]
|
2019-06-13 23:12:26 +00:00
|
|
|
heapq.heapify(deltas)
|
2019-03-21 22:56:45 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
while deltas:
|
|
|
|
pri, _, o = heapq.heappop(deltas)
|
|
|
|
assert pri < 0
|
|
|
|
assert o < 255
|
2019-03-14 22:32:52 +00:00
|
|
|
|
2019-06-13 23:12:26 +00:00
|
|
|
yield -pri, o
|