mirror of
https://github.com/dgelessus/python-rsrcfork.git
synced 2024-11-04 21:04:34 +00:00
Fix mypy errors about byte strings as format string parameters
This commit is contained in:
parent
e132a91dea
commit
6adf8eb88d
@ -367,7 +367,7 @@ def format_compressed_header_info(header_info: compress.CompressedHeaderInfo) ->
|
||||
yield f"Working buffer fractional size: {header_info.working_buffer_fractional_size} 256ths of compressed data length"
|
||||
yield f"Expansion buffer size: {header_info.expansion_buffer_size} bytes"
|
||||
elif isinstance(header_info, compress.CompressedType9HeaderInfo):
|
||||
yield f"Decompressor-specific parameters: {header_info.parameters}"
|
||||
yield f"Decompressor-specific parameters: {header_info.parameters!r}"
|
||||
else:
|
||||
raise AssertionError(f"Unhandled compressed header info type: {type(header_info)}")
|
||||
|
||||
|
@ -133,14 +133,14 @@ class Resource(object):
|
||||
decompress_ok = True
|
||||
|
||||
if len(data) > 32:
|
||||
data_repr = f"<{len(data)} bytes: {data[:32]}...>"
|
||||
data_repr = f"<{len(data)} bytes: {data[:32]!r}...>"
|
||||
else:
|
||||
data_repr = repr(data)
|
||||
|
||||
if not decompress_ok:
|
||||
data_repr = f"<decompression failed - compressed data: {data_repr}>"
|
||||
|
||||
return f"<{type(self).__qualname__} type {self.type}, id {self.id}, name {self.name}, attributes {self.attributes}, data {data_repr}>"
|
||||
return f"<{type(self).__qualname__} type {self.type!r}, id {self.id}, name {self.name!r}, attributes {self.attributes}, data {data_repr}>"
|
||||
|
||||
@property
|
||||
def resource_type(self) -> bytes:
|
||||
@ -272,7 +272,7 @@ class _LazyResourceMap(typing.Mapping[int, Resource]):
|
||||
else:
|
||||
contents = f"{len(self)} resources with IDs {list(self)}"
|
||||
|
||||
return f"<Resource map for type {self.type}, containing {contents}>"
|
||||
return f"<Resource map for type {self.type!r}, containing {contents}>"
|
||||
|
||||
class ResourceFile(typing.Mapping[bytes, typing.Mapping[int, Resource]], typing.ContextManager["ResourceFile"]):
|
||||
"""A resource file reader operating on a byte stream."""
|
||||
|
@ -43,7 +43,7 @@ class CompressedHeaderInfo(object):
|
||||
except struct.error:
|
||||
raise DecompressError(f"Invalid header")
|
||||
if signature != COMPRESSED_SIGNATURE:
|
||||
raise DecompressError(f"Invalid signature: {signature!r}, expected {COMPRESSED_SIGNATURE}")
|
||||
raise DecompressError(f"Invalid signature: {signature!r}, expected {COMPRESSED_SIGNATURE!r}")
|
||||
if header_length != 0x12:
|
||||
raise DecompressError(f"Unsupported header length: 0x{header_length:>04x}, expected 0x12")
|
||||
|
||||
|
@ -169,7 +169,7 @@ def decompress_stream_inner(header_info: common.CompressedHeaderInfo, stream: ty
|
||||
raise common.DecompressError(f"Repeat count must be positive: {count}")
|
||||
|
||||
if debug:
|
||||
print(f"\t-> {to_repeat} * {count}")
|
||||
print(f"\t-> {to_repeat!r} * {count}")
|
||||
yield to_repeat * count
|
||||
elif kind == 0x04:
|
||||
# A sequence of 16-bit signed integers, with each integer encoded as a difference relative to the previous integer. The first integer is stored explicitly.
|
||||
@ -243,7 +243,7 @@ def decompress_stream_inner(header_info: common.CompressedHeaderInfo, stream: ty
|
||||
# Check that there really is no more data left.
|
||||
extra = stream.read(1)
|
||||
if extra:
|
||||
raise common.DecompressError(f"Extra data encountered after end of data marker (first extra byte: {extra})")
|
||||
raise common.DecompressError(f"Extra data encountered after end of data marker (first extra byte: {extra!r})")
|
||||
break
|
||||
else:
|
||||
raise common.DecompressError(f"Unknown tag byte: 0x{byte:>02x}")
|
||||
@ -254,7 +254,7 @@ def decompress_stream(header_info: common.CompressedHeaderInfo, stream: typing.B
|
||||
decompressed_length = 0
|
||||
for chunk in decompress_stream_inner(header_info, stream, debug=debug):
|
||||
if debug:
|
||||
print(f"\t-> {chunk}")
|
||||
print(f"\t-> {chunk!r}")
|
||||
|
||||
if header_info.decompressed_length % 2 != 0 and decompressed_length + len(chunk) == header_info.decompressed_length + 1:
|
||||
# Special case: if the decompressed data length stored in the header is odd and one less than the length of the actual decompressed data, drop the last byte.
|
||||
|
@ -112,7 +112,7 @@ def decompress_stream_inner(header_info: common.CompressedHeaderInfo, stream: ty
|
||||
raise common.DecompressError(f"Repeat count must be positive: {count}")
|
||||
|
||||
if debug:
|
||||
print(f"\t-> {to_repeat} * {count}")
|
||||
print(f"\t-> {to_repeat!r} * {count}")
|
||||
yield to_repeat * count
|
||||
else:
|
||||
raise common.DecompressError(f"Unknown extended code: 0x{kind:>02x}")
|
||||
@ -124,7 +124,7 @@ def decompress_stream_inner(header_info: common.CompressedHeaderInfo, stream: ty
|
||||
# Check that there really is no more data left.
|
||||
extra = stream.read(1)
|
||||
if extra:
|
||||
raise common.DecompressError(f"Extra data encountered after end of data marker (first extra byte: {extra})")
|
||||
raise common.DecompressError(f"Extra data encountered after end of data marker (first extra byte: {extra!r})")
|
||||
break
|
||||
else:
|
||||
raise common.DecompressError(f"Unknown tag byte: 0x{byte:>02x}")
|
||||
@ -135,7 +135,7 @@ def decompress_stream(header_info: common.CompressedHeaderInfo, stream: typing.B
|
||||
decompressed_length = 0
|
||||
for chunk in decompress_stream_inner(header_info, stream, debug=debug):
|
||||
if debug:
|
||||
print(f"\t-> {chunk}")
|
||||
print(f"\t-> {chunk!r}")
|
||||
|
||||
decompressed_length += len(chunk)
|
||||
yield chunk
|
||||
|
@ -83,14 +83,14 @@ def _decompress_untagged(stream: "common.PeekableIO", decompressed_length: int,
|
||||
elif not stream.peek(1) and decompressed_length % 2 != 0:
|
||||
# Special case: if we are at the last byte of the compressed data, and the decompressed data has an odd length, the last byte is a single literal byte, and not a table reference.
|
||||
if debug:
|
||||
print(f"Last byte: {table_index_data}")
|
||||
print(f"Last byte: {table_index_data!r}")
|
||||
yield table_index_data
|
||||
break
|
||||
|
||||
# Compressed data is untagged, every byte is a table reference.
|
||||
(table_index,) = table_index_data
|
||||
if debug:
|
||||
print(f"Reference: {table_index} -> {table[table_index]}")
|
||||
print(f"Reference: {table_index} -> {table[table_index]!r}")
|
||||
yield table[table_index]
|
||||
|
||||
def _decompress_tagged(stream: "common.PeekableIO", decompressed_length: int, table: typing.Sequence[bytes], *, debug: bool=False) -> typing.Iterator[bytes]:
|
||||
@ -102,7 +102,7 @@ def _decompress_tagged(stream: "common.PeekableIO", decompressed_length: int, ta
|
||||
elif not stream.peek(1) and decompressed_length % 2 != 0:
|
||||
# Special case: if we are at the last byte of the compressed data, and the decompressed data has an odd length, the last byte is a single literal byte, and not a tag or a table reference.
|
||||
if debug:
|
||||
print(f"Last byte: {tag_data}")
|
||||
print(f"Last byte: {tag_data!r}")
|
||||
yield tag_data
|
||||
break
|
||||
|
||||
@ -119,7 +119,7 @@ def _decompress_tagged(stream: "common.PeekableIO", decompressed_length: int, ta
|
||||
break
|
||||
(table_index,) = table_index_data
|
||||
if debug:
|
||||
print(f"Reference: {table_index} -> {table[table_index]}")
|
||||
print(f"Reference: {table_index} -> {table[table_index]!r}")
|
||||
yield table[table_index]
|
||||
else:
|
||||
# This is a literal (two uncompressed bytes that are literally copied into the output).
|
||||
@ -129,7 +129,7 @@ def _decompress_tagged(stream: "common.PeekableIO", decompressed_length: int, ta
|
||||
break
|
||||
# Note: the literal may be only a single byte long if it is located exactly at EOF. This is intended and expected - the 1-byte literal is yielded normally, and on the next iteration, decompression is terminated as EOF is detected.
|
||||
if debug:
|
||||
print(f"Literal: {literal}")
|
||||
print(f"Literal: {literal!r}")
|
||||
yield literal
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user