Added IOExtras.copy_stream_n and used it to avoid loading large entries into memory when copying from one stream to another.

This commit is contained in:
thomas 2006-12-24 10:42:08 +00:00
parent 486ae7d07b
commit f3877622d3
2 changed files with 13 additions and 3 deletions

View File

@ -1,6 +1,6 @@
module IOExtras #:nodoc:
CHUNK_SIZE = 32768
CHUNK_SIZE = 131072
RANGE_ALL = 0..-1
@ -9,6 +9,16 @@ module IOExtras #:nodoc:
ostream.write(istream.read(CHUNK_SIZE, s)) until istream.eof?
end
def self.copy_stream_n(ostream, istream, nbytes)
s = ''
toread = nbytes
while (toread > 0 && ! istream.eof?)
tr = toread > CHUNK_SIZE ? CHUNK_SIZE : toread
ostream.write(istream.read(tr, s))
toread -= tr
end
end
# Implements kind_of? in order to pretend to be an IO object
module FakeIO

View File

@ -968,10 +968,10 @@ module Zip
src_pos = entry.local_entry_offset
entry.write_local_entry(@outputStream)
@compressor = NullCompressor.instance
@outputStream << entry.get_raw_input_stream {
entry.get_raw_input_stream {
|is|
is.seek(src_pos, IO::SEEK_SET)
is.read(entry.compressed_size)
IOExtras.copy_stream_n(@outputStream, is, entry.compressed_size)
}
@compressor = NullCompressor.instance
@currentEntry = nil