mirror of
https://github.com/dgelessus/python-rsrcfork.git
synced 2026-04-20 02:17:24 +00:00
Remove custom stream types and read all resource data upfront again
The custom stream types were almost always slower than just reading the entire data into memory, and there's no reason not to do that - resources are small enough that memory usage and disk IO speed aren't a concern (at least not for any machine that's modern enough to run Python 3...). Perhaps the only performance advantage was when reading a small amount of data from the start of a compressed resource. In that case the custom stream could incrementally decompress only the part of the data that's actually needed, which was a bit faster than decompressing the entire resource and then throwing away most of the data. But this situation is rare enough that it's not worth handling in the rsrcfork library. If this is a real performance issue for someone, they can manually call the incremental decompression functions from rsrcfork.compress where needed.
This commit is contained in:
+7
-11
@@ -179,8 +179,8 @@ class Resource(object):
|
||||
try:
|
||||
return self._data_raw
|
||||
except AttributeError:
|
||||
with self.open_raw() as f:
|
||||
self._data_raw = f.read()
|
||||
self._resfile._stream.seek(self._resfile.data_offset + self.data_raw_offset + STRUCT_RESOURCE_DATA_HEADER.size)
|
||||
self._data_raw = _io_utils.read_exact(self._resfile._stream, self.length_raw)
|
||||
return self._data_raw
|
||||
|
||||
def open_raw(self) -> typing.BinaryIO:
|
||||
@@ -196,7 +196,7 @@ class Resource(object):
|
||||
because the stream API does not require the entire resource data to be read in advance.
|
||||
"""
|
||||
|
||||
return _io_utils.make_substream(self._resfile._stream, self._resfile.data_offset + self.data_raw_offset + STRUCT_RESOURCE_DATA_HEADER.size, self.length_raw)
|
||||
return io.BytesIO(self.data_raw)
|
||||
|
||||
@property
|
||||
def compressed_info(self) -> typing.Optional[compress.common.CompressedHeaderInfo]:
|
||||
@@ -252,8 +252,9 @@ class Resource(object):
|
||||
try:
|
||||
return self._data_decompressed
|
||||
except AttributeError:
|
||||
with self.open() as f:
|
||||
self._data_decompressed = f.read()
|
||||
with self.open_raw() as compressed_f:
|
||||
compressed_f.seek(self.compressed_info.header_length)
|
||||
self._data_decompressed = b"".join(compress.decompress_stream_parsed(self.compressed_info, compressed_f))
|
||||
return self._data_decompressed
|
||||
else:
|
||||
return self.data_raw
|
||||
@@ -271,12 +272,7 @@ class Resource(object):
|
||||
because the stream API does not require the entire resource data to be read (and possibly decompressed) in advance.
|
||||
"""
|
||||
|
||||
if self.compressed_info is None:
|
||||
return self.open_raw()
|
||||
else:
|
||||
f = self.open_raw()
|
||||
f.seek(self.compressed_info.header_length)
|
||||
return compress.DecompressingStream(f, self.compressed_info, close_stream=True)
|
||||
return io.BytesIO(self.data)
|
||||
|
||||
|
||||
class _LazyResourceMap(typing.Mapping[int, Resource]):
|
||||
|
||||
Reference in New Issue
Block a user