mirror of
https://github.com/dgelessus/python-rsrcfork.git
synced 2024-12-30 10:30:37 +00:00
Fix various issues reported by mypy
This commit is contained in:
parent
449bf4dd71
commit
e5875ffe67
3
.gitignore
vendored
3
.gitignore
vendored
@ -6,3 +6,6 @@ __pycache__/
|
||||
*.egg-info/
|
||||
build/
|
||||
dist/
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
@ -190,6 +190,7 @@ Version 1.3.1 (next version)
|
||||
|
||||
* Fixed an incorrect ``options.packages`` in ``setup.cfg``, which made the library unusable except when installing from source using ``--editable``.
|
||||
* Fixed ``ResourceFile.__enter__`` returning ``None``, which made it impossible to use ``ResourceFile`` properly in a ``with`` statement.
|
||||
* Fixed various minor errors reported by type checking with ``mypy``.
|
||||
|
||||
Version 1.3.0.post1
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
@ -25,7 +25,7 @@ _REZ_ATTR_NAMES = {
|
||||
api.ResourceAttrs.resCompressed: None, # "Extended Header resource attribute"
|
||||
}
|
||||
|
||||
F = typing.TypeVar("F", bound=enum.Flag, covariant=True)
|
||||
F = typing.TypeVar("F", bound=enum.Flag)
|
||||
def _decompose_flags(value: F) -> typing.Sequence[F]:
|
||||
"""Decompose an enum.Flags instance into separate enum constants."""
|
||||
|
||||
@ -45,14 +45,14 @@ def _bytes_unescape(string: str) -> bytes:
|
||||
(We implement our own unescaping mechanism here to not depend on any of Python's string/bytes escape syntax.)
|
||||
"""
|
||||
|
||||
out = []
|
||||
out: typing.List[int] = []
|
||||
it = iter(string)
|
||||
for char in it:
|
||||
if char == "\\":
|
||||
try:
|
||||
esc = next(it)
|
||||
if esc in "\\\'\"":
|
||||
out.append(esc)
|
||||
out.extend(esc.encode(_TEXT_ENCODING))
|
||||
elif esc == "x":
|
||||
x1, x2 = next(it), next(it)
|
||||
out.append(int(x1+x2, 16))
|
||||
@ -82,8 +82,8 @@ def _bytes_escape(bs: bytes, *, quote: str=None) -> str:
|
||||
|
||||
return "".join(out)
|
||||
|
||||
def _filter_resources(rf: api.ResourceFile, filters: typing.Sequence[str]) -> typing.Sequence[api.Resource]:
|
||||
matching = collections.OrderedDict()
|
||||
def _filter_resources(rf: api.ResourceFile, filters: typing.Sequence[str]) -> typing.List[api.Resource]:
|
||||
matching: typing.MutableMapping[typing.Tuple[bytes, int], api.Resource] = collections.OrderedDict()
|
||||
|
||||
for filter in filters:
|
||||
if len(filter) == 4:
|
||||
@ -109,44 +109,44 @@ def _filter_resources(rf: api.ResourceFile, filters: typing.Sequence[str]) -> ty
|
||||
elif filter[pos + 1] != " ":
|
||||
raise ValueError(f"Invalid filter {filter!r}: Resource type and ID must be separated by a space")
|
||||
|
||||
restype, resid = filter[:pos + 1], filter[pos + 2:]
|
||||
restype_str, resid_str = filter[:pos + 1], filter[pos + 2:]
|
||||
|
||||
if not restype[0] == restype[-1] == "'":
|
||||
if not restype_str[0] == restype_str[-1] == "'":
|
||||
raise ValueError(
|
||||
f"Invalid filter {filter!r}: Resource type is not a single-quoted type identifier: {restype!r}")
|
||||
restype = _bytes_unescape(restype[1:-1])
|
||||
f"Invalid filter {filter!r}: Resource type is not a single-quoted type identifier: {restype_str!r}")
|
||||
restype = _bytes_unescape(restype_str[1:-1])
|
||||
|
||||
if len(restype) != 4:
|
||||
raise ValueError(
|
||||
f"Invalid filter {filter!r}: Type identifier must be 4 bytes after replacing escapes, got {len(restype)} bytes: {restype!r}")
|
||||
|
||||
if resid[0] != "(" or resid[-1] != ")":
|
||||
if resid_str[0] != "(" or resid_str[-1] != ")":
|
||||
raise ValueError(f"Invalid filter {filter!r}: Resource ID must be parenthesized")
|
||||
resid = resid[1:-1]
|
||||
resid_str = resid_str[1:-1]
|
||||
|
||||
try:
|
||||
resources = rf[restype]
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
if resid[0] == resid[-1] == '"':
|
||||
name = _bytes_unescape(resid[1:-1])
|
||||
if resid_str[0] == resid_str[-1] == '"':
|
||||
name = _bytes_unescape(resid_str[1:-1])
|
||||
|
||||
for res in resources.values():
|
||||
if res.name == name:
|
||||
matching[res.type, res.id] = res
|
||||
break
|
||||
elif ":" in resid:
|
||||
if resid.count(":") > 1:
|
||||
raise ValueError(f"Invalid filter {filter!r}: Too many colons in ID range expression: {resid!r}")
|
||||
start, end = resid.split(":")
|
||||
start, end = int(start), int(end)
|
||||
elif ":" in resid_str:
|
||||
if resid_str.count(":") > 1:
|
||||
raise ValueError(f"Invalid filter {filter!r}: Too many colons in ID range expression: {resid_str!r}")
|
||||
start_str, end_str = resid_str.split(":")
|
||||
start, end = int(start_str), int(end_str)
|
||||
|
||||
for res in resources.values():
|
||||
if start <= res.id <= end:
|
||||
matching[res.type, res.id] = res
|
||||
else:
|
||||
resid = int(resid)
|
||||
resid = int(resid_str)
|
||||
try:
|
||||
res = resources[resid]
|
||||
except KeyError:
|
||||
@ -201,8 +201,10 @@ def _describe_resource(res: api.Resource, *, include_type: bool, decompress: boo
|
||||
except compress.DecompressError:
|
||||
length_desc = f"unparseable compressed data header ({res.length_raw} bytes compressed)"
|
||||
else:
|
||||
assert res.compressed_info is not None
|
||||
length_desc = f"{res.length} bytes ({res.length_raw} bytes compressed, 'dcmp' ({res.compressed_info.dcmp_id}) format)"
|
||||
else:
|
||||
assert res.compressed_info is None
|
||||
length_desc = f"{res.length_raw} bytes"
|
||||
content_desc_parts.append(length_desc)
|
||||
|
||||
@ -323,9 +325,11 @@ def _show_filtered_resources(resources: typing.Sequence[api.Resource], format: s
|
||||
else:
|
||||
attrs_comment = ""
|
||||
|
||||
attr_descs = [_REZ_ATTR_NAMES[attr] for attr in attrs]
|
||||
if None in attr_descs:
|
||||
attr_descs[:] = [f"${res.attributes.value:02X}"]
|
||||
attr_descs_with_none = [_REZ_ATTR_NAMES[attr] for attr in attrs]
|
||||
if None in attr_descs_with_none:
|
||||
attr_descs = [f"${res.attributes.value:02X}"]
|
||||
else:
|
||||
attr_descs = typing.cast(typing.List[str], attr_descs_with_none)
|
||||
|
||||
parts = [str(res.id)]
|
||||
|
||||
@ -376,7 +380,7 @@ def _list_resource_file(rf: api.ResourceFile, *, sort: bool, group: str, decompr
|
||||
return
|
||||
|
||||
if group == "none":
|
||||
all_resources = []
|
||||
all_resources: typing.List[api.Resource] = []
|
||||
for reses in rf.values():
|
||||
all_resources.extend(reses.values())
|
||||
if sort:
|
||||
@ -386,13 +390,13 @@ def _list_resource_file(rf: api.ResourceFile, *, sort: bool, group: str, decompr
|
||||
print(_describe_resource(res, include_type=True, decompress=decompress))
|
||||
elif group == "type":
|
||||
print(f"{len(rf)} resource types:")
|
||||
restype_items = rf.items()
|
||||
restype_items: typing.Collection[typing.Tuple[bytes, typing.Mapping[int, api.Resource]]] = rf.items()
|
||||
if sort:
|
||||
restype_items = sorted(restype_items, key=lambda item: item[0])
|
||||
for typecode, resources in restype_items:
|
||||
for typecode, resources_map in restype_items:
|
||||
restype = _bytes_escape(typecode, quote="'")
|
||||
print(f"'{restype}': {len(resources)} resources:")
|
||||
resources_items = resources.items()
|
||||
print(f"'{restype}': {len(resources_map)} resources:")
|
||||
resources_items: typing.Collection[typing.Tuple[int, api.Resource]] = resources_map.items()
|
||||
if sort:
|
||||
resources_items = sorted(resources_items, key=lambda item: item[0])
|
||||
for resid, res in resources_items:
|
||||
|
@ -176,7 +176,7 @@ class Resource(object):
|
||||
Accessing this attribute may be faster than computing len(self.data) manually.
|
||||
"""
|
||||
|
||||
if ResourceAttrs.resCompressed in self.attributes:
|
||||
if self.compressed_info is not None:
|
||||
return self.compressed_info.decompressed_length
|
||||
else:
|
||||
return self.length_raw
|
||||
@ -188,7 +188,7 @@ class Resource(object):
|
||||
Accessing this attribute may raise a DecompressError if the resource data is compressed and could not be decompressed. To access the compressed resource data, use the data_raw attribute.
|
||||
"""
|
||||
|
||||
if ResourceAttrs.resCompressed in self.attributes:
|
||||
if self.compressed_info is not None:
|
||||
try:
|
||||
return self._data_decompressed
|
||||
except AttributeError:
|
||||
@ -227,7 +227,7 @@ class ResourceFile(typing.Mapping[bytes, typing.Mapping[int, Resource]]):
|
||||
|
||||
return iter(self._submap)
|
||||
|
||||
def __contains__(self, key: int) -> bool:
|
||||
def __contains__(self, key: object) -> bool:
|
||||
"""Check if a resource with the given ID exists for this type code."""
|
||||
|
||||
return key in self._submap
|
||||
@ -274,7 +274,7 @@ class ResourceFile(typing.Mapping[bytes, typing.Mapping[int, Resource]]):
|
||||
_references: typing.MutableMapping[bytes, typing.MutableMapping[int, typing.Tuple[int, ResourceAttrs, int]]]
|
||||
|
||||
@classmethod
|
||||
def open(cls, filename: typing.Union[str, bytes, os.PathLike], *, fork: str="auto", **kwargs) -> "ResourceFile":
|
||||
def open(cls, filename: typing.Union[str, os.PathLike], *, fork: str="auto", **kwargs) -> "ResourceFile":
|
||||
"""Open the file at the given path as a ResourceFile.
|
||||
|
||||
The fork parameter controls which fork of the file the resource data will be read from. It accepts the following values:
|
||||
@ -371,7 +371,7 @@ class ResourceFile(typing.Mapping[bytes, typing.Mapping[int, Resource]]):
|
||||
raise InvalidResourceFileError(f"Attempted to read {byte_count} bytes of data, but only got {len(data)} bytes")
|
||||
return data
|
||||
|
||||
def _stream_unpack(self, st: struct.Struct) -> typing.Tuple:
|
||||
def _stream_unpack(self, st: struct.Struct) -> tuple:
|
||||
"""Unpack data from the stream according to the struct st. The number of bytes to read is determined using st.size, so variable-sized structs cannot be used with this method."""
|
||||
|
||||
try:
|
||||
@ -432,7 +432,8 @@ class ResourceFile(typing.Mapping[bytes, typing.Mapping[int, Resource]]):
|
||||
self._references = collections.OrderedDict()
|
||||
|
||||
for resource_type, count in self._reference_counts.items():
|
||||
self._references[resource_type] = resmap = collections.OrderedDict()
|
||||
resmap: typing.MutableMapping[int, typing.Tuple[int, ResourceAttrs, int]] = collections.OrderedDict()
|
||||
self._references[resource_type] = resmap
|
||||
for _ in range(count):
|
||||
(
|
||||
resource_id,
|
||||
@ -470,7 +471,7 @@ class ResourceFile(typing.Mapping[bytes, typing.Mapping[int, Resource]]):
|
||||
|
||||
return iter(self._references)
|
||||
|
||||
def __contains__(self, key: bytes) -> bool:
|
||||
def __contains__(self, key: object) -> bool:
|
||||
"""Check whether this ResourceFile contains any resources of the given type."""
|
||||
|
||||
return key in self._references
|
||||
|
@ -1,3 +1,5 @@
|
||||
import typing
|
||||
|
||||
from . import common
|
||||
|
||||
# Lookup table for codes in range(0x4b, 0xfe).
|
||||
@ -42,7 +44,7 @@ def decompress(header_info: common.CompressedHeaderInfo, data: bytes, *, debug:
|
||||
if not isinstance(header_info, common.CompressedApplicationHeaderInfo):
|
||||
raise common.DecompressError(f"Incorrect header type: {type(header_info).__qualname__}")
|
||||
|
||||
prev_literals = []
|
||||
prev_literals: typing.List[bytes] = []
|
||||
decompressed = b""
|
||||
|
||||
i = 0
|
||||
|
@ -1,3 +1,5 @@
|
||||
import typing
|
||||
|
||||
from . import common
|
||||
|
||||
# Lookup table for codes in range(0xd5, 0xfe).
|
||||
@ -25,7 +27,7 @@ def decompress(header_info: common.CompressedHeaderInfo, data: bytes, *, debug:
|
||||
if not isinstance(header_info, common.CompressedApplicationHeaderInfo):
|
||||
raise common.DecompressError(f"Incorrect header type: {type(header_info).__qualname__}")
|
||||
|
||||
prev_literals = []
|
||||
prev_literals: typing.List[bytes] = []
|
||||
decompressed = b""
|
||||
|
||||
i = 0
|
||||
|
Loading…
Reference in New Issue
Block a user