2013-10-11 17:28:32 +00:00
|
|
|
#
|
|
|
|
# The Python Imaging Library.
|
|
|
|
# $Id$
|
|
|
|
#
|
|
|
|
# base class for image file handlers
|
|
|
|
#
|
|
|
|
# history:
|
|
|
|
# 1995-09-09 fl Created
|
|
|
|
# 1996-03-11 fl Fixed load mechanism.
|
|
|
|
# 1996-04-15 fl Added pcx/xbm decoders.
|
|
|
|
# 1996-04-30 fl Added encoders.
|
|
|
|
# 1996-12-14 fl Added load helpers
|
|
|
|
# 1997-01-11 fl Use encode_to_file where possible
|
|
|
|
# 1997-08-27 fl Flush output in _save
|
|
|
|
# 1998-03-05 fl Use memory mapping for some modes
|
|
|
|
# 1999-02-04 fl Use memory mapping also for "I;16" and "I;16B"
|
|
|
|
# 1999-05-31 fl Added image parser
|
|
|
|
# 2000-10-12 fl Set readonly flag on memory-mapped images
|
|
|
|
# 2002-03-20 fl Use better messages for common decoder errors
|
|
|
|
# 2003-04-21 fl Fall back on mmap/map_buffer if map is not available
|
|
|
|
# 2003-10-30 fl Added StubImageFile class
|
|
|
|
# 2004-02-25 fl Made incremental parser more robust
|
|
|
|
#
|
|
|
|
# Copyright (c) 1997-2004 by Secret Labs AB
|
|
|
|
# Copyright (c) 1995-2004 by Fredrik Lundh
|
|
|
|
#
|
|
|
|
# See the README file for information on usage and redistribution.
|
|
|
|
#
|
|
|
|
|
2014-09-30 16:15:32 +00:00
|
|
|
from PIL import Image
|
|
|
|
from PIL._util import isPath
|
|
|
|
import io
|
2015-11-25 01:25:01 +00:00
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import struct
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
MAXBLOCK = 65536
|
|
|
|
|
|
|
|
SAFEBLOCK = 1024*1024
|
|
|
|
|
2014-09-30 16:15:32 +00:00
|
|
|
LOAD_TRUNCATED_IMAGES = False
|
|
|
|
|
2013-10-11 17:28:32 +00:00
|
|
|
ERRORS = {
|
|
|
|
-1: "image buffer overrun error",
|
|
|
|
-2: "decoding error",
|
|
|
|
-3: "unknown error",
|
|
|
|
-8: "bad configuration",
|
|
|
|
-9: "out of memory error"
|
|
|
|
}
|
|
|
|
|
2015-11-25 01:25:01 +00:00
|
|
|
|
2013-10-11 17:28:32 +00:00
|
|
|
def raise_ioerror(error):
|
|
|
|
try:
|
|
|
|
message = Image.core.getcodecstatus(error)
|
|
|
|
except AttributeError:
|
|
|
|
message = ERRORS.get(error)
|
|
|
|
if not message:
|
|
|
|
message = "decoder error %d" % error
|
|
|
|
raise IOError(message + " when reading image file")
|
|
|
|
|
2015-11-25 01:25:01 +00:00
|
|
|
|
2013-10-11 17:28:32 +00:00
|
|
|
#
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# Helpers
|
|
|
|
|
2014-09-30 16:15:32 +00:00
|
|
|
def _tilesort(t):
|
2013-10-11 17:28:32 +00:00
|
|
|
# sort on offset
|
2014-09-30 16:15:32 +00:00
|
|
|
return t[2]
|
2013-10-11 17:28:32 +00:00
|
|
|
|
2015-11-25 01:25:01 +00:00
|
|
|
|
2013-10-11 17:28:32 +00:00
|
|
|
#
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# ImageFile base class
|
|
|
|
|
|
|
|
class ImageFile(Image.Image):
|
|
|
|
"Base class for image file format handlers."
|
|
|
|
|
|
|
|
def __init__(self, fp=None, filename=None):
|
|
|
|
Image.Image.__init__(self)
|
|
|
|
|
|
|
|
self.tile = None
|
2015-11-25 01:25:01 +00:00
|
|
|
self.readonly = 1 # until we know better
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
self.decoderconfig = ()
|
|
|
|
self.decodermaxblock = MAXBLOCK
|
|
|
|
|
2014-09-30 16:15:32 +00:00
|
|
|
if isPath(fp):
|
2013-10-11 17:28:32 +00:00
|
|
|
# filename
|
|
|
|
self.fp = open(fp, "rb")
|
|
|
|
self.filename = fp
|
|
|
|
else:
|
|
|
|
# stream
|
|
|
|
self.fp = fp
|
|
|
|
self.filename = filename
|
|
|
|
|
|
|
|
try:
|
|
|
|
self._open()
|
2015-11-25 01:25:01 +00:00
|
|
|
except (IndexError, # end of data
|
|
|
|
TypeError, # end of data (ord)
|
|
|
|
KeyError, # unsupported mode
|
|
|
|
EOFError, # got header but not the first frame
|
|
|
|
struct.error) as v:
|
|
|
|
logger.exception("%s")
|
2014-09-30 16:15:32 +00:00
|
|
|
raise SyntaxError(v)
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
if not self.mode or self.size[0] <= 0:
|
2014-09-30 16:15:32 +00:00
|
|
|
raise SyntaxError("not identified by this driver")
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
def draft(self, mode, size):
|
|
|
|
"Set draft mode"
|
|
|
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
def verify(self):
|
|
|
|
"Check file integrity"
|
|
|
|
|
|
|
|
# raise exception if something's wrong. must be called
|
|
|
|
# directly after open, and closes file when finished.
|
|
|
|
self.fp = None
|
|
|
|
|
|
|
|
def load(self):
|
|
|
|
"Load image data based on tile list"
|
|
|
|
|
|
|
|
pixel = Image.Image.load(self)
|
|
|
|
|
|
|
|
if self.tile is None:
|
|
|
|
raise IOError("cannot load this image")
|
|
|
|
if not self.tile:
|
|
|
|
return pixel
|
|
|
|
|
|
|
|
self.map = None
|
2015-11-25 01:25:01 +00:00
|
|
|
use_mmap = self.filename and len(self.tile) == 1
|
|
|
|
# As of pypy 2.1.0, memory mapping was failing here.
|
|
|
|
use_mmap = use_mmap and not hasattr(sys, 'pypy_version_info')
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
readonly = 0
|
|
|
|
|
2015-11-25 01:25:01 +00:00
|
|
|
# look for read/seek overrides
|
|
|
|
try:
|
|
|
|
read = self.load_read
|
|
|
|
# don't use mmap if there are custom read/seek functions
|
|
|
|
use_mmap = False
|
|
|
|
except AttributeError:
|
|
|
|
read = self.fp.read
|
|
|
|
|
|
|
|
try:
|
|
|
|
seek = self.load_seek
|
|
|
|
use_mmap = False
|
|
|
|
except AttributeError:
|
|
|
|
seek = self.fp.seek
|
|
|
|
|
|
|
|
if use_mmap:
|
2013-10-11 17:28:32 +00:00
|
|
|
# try memory mapping
|
|
|
|
d, e, o, a = self.tile[0]
|
|
|
|
if d == "raw" and a[0] == self.mode and a[0] in Image._MAPMODES:
|
|
|
|
try:
|
|
|
|
if hasattr(Image.core, "map"):
|
|
|
|
# use built-in mapper
|
|
|
|
self.map = Image.core.map(self.filename)
|
|
|
|
self.map.seek(o)
|
|
|
|
self.im = self.map.readimage(
|
|
|
|
self.mode, self.size, a[1], a[2]
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
# use mmap, if possible
|
|
|
|
import mmap
|
2015-11-25 01:25:01 +00:00
|
|
|
fp = open(self.filename, "r+")
|
2013-10-11 17:28:32 +00:00
|
|
|
size = os.path.getsize(self.filename)
|
|
|
|
# FIXME: on Unix, use PROT_READ etc
|
2015-11-25 01:25:01 +00:00
|
|
|
self.map = mmap.mmap(fp.fileno(), size)
|
2013-10-11 17:28:32 +00:00
|
|
|
self.im = Image.core.map_buffer(
|
|
|
|
self.map, self.size, d, e, o, a
|
|
|
|
)
|
|
|
|
readonly = 1
|
|
|
|
except (AttributeError, EnvironmentError, ImportError):
|
|
|
|
self.map = None
|
|
|
|
|
|
|
|
self.load_prepare()
|
|
|
|
|
|
|
|
if not self.map:
|
|
|
|
# sort tiles in file order
|
2014-09-30 16:15:32 +00:00
|
|
|
self.tile.sort(key=_tilesort)
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
# FIXME: This is a hack to handle TIFF's JpegTables tag.
|
|
|
|
prefix = self.tile_prefix
|
|
|
|
except AttributeError:
|
2014-09-30 16:15:32 +00:00
|
|
|
prefix = b""
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
for d, e, o, a in self.tile:
|
|
|
|
d = Image._getdecoder(self.mode, d, a, self.decoderconfig)
|
|
|
|
seek(o)
|
|
|
|
try:
|
|
|
|
d.setimage(self.im, e)
|
|
|
|
except ValueError:
|
|
|
|
continue
|
|
|
|
b = prefix
|
2014-09-30 16:15:32 +00:00
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
s = read(self.decodermaxblock)
|
2015-11-25 01:25:01 +00:00
|
|
|
except (IndexError, struct.error): # truncated png/gif
|
2014-09-30 16:15:32 +00:00
|
|
|
if LOAD_TRUNCATED_IMAGES:
|
|
|
|
break
|
|
|
|
else:
|
2015-11-25 01:25:01 +00:00
|
|
|
raise IOError("image file is truncated")
|
2014-09-30 16:15:32 +00:00
|
|
|
|
2015-11-25 01:25:01 +00:00
|
|
|
if not s and not d.handles_eof: # truncated jpeg
|
2013-10-11 17:28:32 +00:00
|
|
|
self.tile = []
|
2014-09-30 16:15:32 +00:00
|
|
|
|
|
|
|
# JpegDecode needs to clean things up here either way
|
2015-11-25 01:25:01 +00:00
|
|
|
# If we don't destroy the decompressor,
|
|
|
|
# we have a memory leak.
|
2014-09-30 16:15:32 +00:00
|
|
|
d.cleanup()
|
|
|
|
|
|
|
|
if LOAD_TRUNCATED_IMAGES:
|
|
|
|
break
|
|
|
|
else:
|
2015-11-25 01:25:01 +00:00
|
|
|
raise IOError("image file is truncated "
|
|
|
|
"(%d bytes not processed)" % len(b))
|
2014-09-30 16:15:32 +00:00
|
|
|
|
2013-10-11 17:28:32 +00:00
|
|
|
b = b + s
|
|
|
|
n, e = d.decode(b)
|
|
|
|
if n < 0:
|
|
|
|
break
|
|
|
|
b = b[n:]
|
2015-11-25 01:25:01 +00:00
|
|
|
# Need to cleanup here to prevent leaks in PyPy
|
|
|
|
d.cleanup()
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
self.tile = []
|
|
|
|
self.readonly = readonly
|
|
|
|
|
2015-11-25 01:25:01 +00:00
|
|
|
self.fp = None # might be shared
|
2013-10-11 17:28:32 +00:00
|
|
|
|
2015-11-25 01:25:01 +00:00
|
|
|
if not self.map and not LOAD_TRUNCATED_IMAGES and e < 0:
|
2014-09-30 16:15:32 +00:00
|
|
|
# still raised if decoder fails to return anything
|
2013-10-11 17:28:32 +00:00
|
|
|
raise_ioerror(e)
|
|
|
|
|
|
|
|
# post processing
|
|
|
|
if hasattr(self, "tile_post_rotate"):
|
|
|
|
# FIXME: This is a hack to handle rotated PCD's
|
|
|
|
self.im = self.im.rotate(self.tile_post_rotate)
|
|
|
|
self.size = self.im.size
|
|
|
|
|
|
|
|
self.load_end()
|
|
|
|
|
|
|
|
return Image.Image.load(self)
|
|
|
|
|
|
|
|
def load_prepare(self):
|
|
|
|
# create image memory if necessary
|
|
|
|
if not self.im or\
|
|
|
|
self.im.mode != self.mode or self.im.size != self.size:
|
|
|
|
self.im = Image.core.new(self.mode, self.size)
|
|
|
|
# create palette (optional)
|
|
|
|
if self.mode == "P":
|
|
|
|
Image.Image.load(self)
|
|
|
|
|
|
|
|
def load_end(self):
|
|
|
|
# may be overridden
|
|
|
|
pass
|
|
|
|
|
|
|
|
# may be defined for contained formats
|
|
|
|
# def load_seek(self, pos):
|
|
|
|
# pass
|
|
|
|
|
|
|
|
# may be defined for blocked formats (e.g. PNG)
|
|
|
|
# def load_read(self, bytes):
|
|
|
|
# pass
|
|
|
|
|
|
|
|
|
|
|
|
class StubImageFile(ImageFile):
|
2014-09-30 16:15:32 +00:00
|
|
|
"""
|
|
|
|
Base class for stub image loaders.
|
|
|
|
|
|
|
|
A stub loader is an image loader that can identify files of a
|
|
|
|
certain format, but relies on external code to load the file.
|
|
|
|
"""
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
def _open(self):
|
|
|
|
raise NotImplementedError(
|
|
|
|
"StubImageFile subclass must implement _open"
|
|
|
|
)
|
|
|
|
|
|
|
|
def load(self):
|
|
|
|
loader = self._load()
|
|
|
|
if loader is None:
|
|
|
|
raise IOError("cannot find loader for this %s file" % self.format)
|
|
|
|
image = loader.load(self)
|
|
|
|
assert image is not None
|
|
|
|
# become the other object (!)
|
|
|
|
self.__class__ = image.__class__
|
|
|
|
self.__dict__ = image.__dict__
|
|
|
|
|
|
|
|
def _load(self):
|
2014-09-30 16:15:32 +00:00
|
|
|
"(Hook) Find actual image loader."
|
2013-10-11 17:28:32 +00:00
|
|
|
raise NotImplementedError(
|
|
|
|
"StubImageFile subclass must implement _load"
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2015-11-25 01:25:01 +00:00
|
|
|
class Parser(object):
|
2014-09-30 16:15:32 +00:00
|
|
|
"""
|
|
|
|
Incremental image parser. This class implements the standard
|
|
|
|
feed/close consumer interface.
|
2013-10-11 17:28:32 +00:00
|
|
|
|
2014-09-30 16:15:32 +00:00
|
|
|
In Python 2.x, this is an old-style class.
|
|
|
|
"""
|
2013-10-11 17:28:32 +00:00
|
|
|
incremental = None
|
|
|
|
image = None
|
|
|
|
data = None
|
|
|
|
decoder = None
|
2015-11-25 01:25:01 +00:00
|
|
|
offset = 0
|
2013-10-11 17:28:32 +00:00
|
|
|
finished = 0
|
|
|
|
|
|
|
|
def reset(self):
|
2014-09-30 16:15:32 +00:00
|
|
|
"""
|
|
|
|
(Consumer) Reset the parser. Note that you can only call this
|
|
|
|
method immediately after you've created a parser; parser
|
|
|
|
instances cannot be reused.
|
|
|
|
"""
|
2013-10-11 17:28:32 +00:00
|
|
|
assert self.data is None, "cannot reuse parsers"
|
|
|
|
|
|
|
|
def feed(self, data):
|
2014-09-30 16:15:32 +00:00
|
|
|
"""
|
|
|
|
(Consumer) Feed data to the parser.
|
|
|
|
|
|
|
|
:param data: A string buffer.
|
|
|
|
:exception IOError: If the parser failed to parse the image file.
|
|
|
|
"""
|
2013-10-11 17:28:32 +00:00
|
|
|
# collect data
|
|
|
|
|
|
|
|
if self.finished:
|
|
|
|
return
|
|
|
|
|
|
|
|
if self.data is None:
|
|
|
|
self.data = data
|
|
|
|
else:
|
|
|
|
self.data = self.data + data
|
|
|
|
|
|
|
|
# parse what we have
|
|
|
|
if self.decoder:
|
|
|
|
|
|
|
|
if self.offset > 0:
|
|
|
|
# skip header
|
|
|
|
skip = min(len(self.data), self.offset)
|
|
|
|
self.data = self.data[skip:]
|
|
|
|
self.offset = self.offset - skip
|
|
|
|
if self.offset > 0 or not self.data:
|
|
|
|
return
|
|
|
|
|
|
|
|
n, e = self.decoder.decode(self.data)
|
|
|
|
|
|
|
|
if n < 0:
|
|
|
|
# end of stream
|
|
|
|
self.data = None
|
|
|
|
self.finished = 1
|
|
|
|
if e < 0:
|
|
|
|
# decoding error
|
|
|
|
self.image = None
|
|
|
|
raise_ioerror(e)
|
|
|
|
else:
|
|
|
|
# end of image
|
|
|
|
return
|
|
|
|
self.data = self.data[n:]
|
|
|
|
|
|
|
|
elif self.image:
|
|
|
|
|
|
|
|
# if we end up here with no decoder, this file cannot
|
|
|
|
# be incrementally parsed. wait until we've gotten all
|
|
|
|
# available data
|
|
|
|
pass
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
# attempt to open this file
|
|
|
|
try:
|
|
|
|
try:
|
2014-09-30 16:15:32 +00:00
|
|
|
fp = io.BytesIO(self.data)
|
2013-10-11 17:28:32 +00:00
|
|
|
im = Image.open(fp)
|
|
|
|
finally:
|
2015-11-25 01:25:01 +00:00
|
|
|
fp.close() # explicitly close the virtual file
|
2013-10-11 17:28:32 +00:00
|
|
|
except IOError:
|
2014-09-30 16:15:32 +00:00
|
|
|
# traceback.print_exc()
|
2015-11-25 01:25:01 +00:00
|
|
|
pass # not enough data
|
2013-10-11 17:28:32 +00:00
|
|
|
else:
|
|
|
|
flag = hasattr(im, "load_seek") or hasattr(im, "load_read")
|
|
|
|
if flag or len(im.tile) != 1:
|
|
|
|
# custom load code, or multiple tiles
|
|
|
|
self.decode = None
|
|
|
|
else:
|
|
|
|
# initialize decoder
|
|
|
|
im.load_prepare()
|
|
|
|
d, e, o, a = im.tile[0]
|
|
|
|
im.tile = []
|
|
|
|
self.decoder = Image._getdecoder(
|
|
|
|
im.mode, d, a, im.decoderconfig
|
|
|
|
)
|
|
|
|
self.decoder.setimage(im.im, e)
|
|
|
|
|
|
|
|
# calculate decoder offset
|
|
|
|
self.offset = o
|
|
|
|
if self.offset <= len(self.data):
|
|
|
|
self.data = self.data[self.offset:]
|
|
|
|
self.offset = 0
|
|
|
|
|
|
|
|
self.image = im
|
|
|
|
|
|
|
|
def close(self):
|
2014-09-30 16:15:32 +00:00
|
|
|
"""
|
|
|
|
(Consumer) Close the stream.
|
|
|
|
|
|
|
|
:returns: An image object.
|
|
|
|
:exception IOError: If the parser failed to parse the image file either
|
|
|
|
because it cannot be identified or cannot be
|
|
|
|
decoded.
|
|
|
|
"""
|
2013-10-11 17:28:32 +00:00
|
|
|
# finish decoding
|
|
|
|
if self.decoder:
|
|
|
|
# get rid of what's left in the buffers
|
2014-09-30 16:15:32 +00:00
|
|
|
self.feed(b"")
|
2013-10-11 17:28:32 +00:00
|
|
|
self.data = self.decoder = None
|
|
|
|
if not self.finished:
|
|
|
|
raise IOError("image was incomplete")
|
|
|
|
if not self.image:
|
|
|
|
raise IOError("cannot parse this image")
|
|
|
|
if self.data:
|
|
|
|
# incremental parsing not possible; reopen the file
|
|
|
|
# not that we have all data
|
|
|
|
try:
|
2014-09-30 16:15:32 +00:00
|
|
|
fp = io.BytesIO(self.data)
|
2013-10-11 17:28:32 +00:00
|
|
|
self.image = Image.open(fp)
|
|
|
|
finally:
|
|
|
|
self.image.load()
|
2015-11-25 01:25:01 +00:00
|
|
|
fp.close() # explicitly close the virtual file
|
2013-10-11 17:28:32 +00:00
|
|
|
return self.image
|
|
|
|
|
2015-11-25 01:25:01 +00:00
|
|
|
|
2013-10-11 17:28:32 +00:00
|
|
|
# --------------------------------------------------------------------
|
|
|
|
|
2014-09-30 16:15:32 +00:00
|
|
|
def _save(im, fp, tile, bufsize=0):
|
|
|
|
"""Helper to save image based on tile list
|
2013-10-11 17:28:32 +00:00
|
|
|
|
2014-09-30 16:15:32 +00:00
|
|
|
:param im: Image object.
|
|
|
|
:param fp: File object.
|
|
|
|
:param tile: Tile list.
|
|
|
|
:param bufsize: Optional buffer size
|
|
|
|
"""
|
2013-10-11 17:28:32 +00:00
|
|
|
|
|
|
|
im.load()
|
|
|
|
if not hasattr(im, "encoderconfig"):
|
|
|
|
im.encoderconfig = ()
|
2014-09-30 16:15:32 +00:00
|
|
|
tile.sort(key=_tilesort)
|
2013-10-11 17:28:32 +00:00
|
|
|
# FIXME: make MAXBLOCK a configuration parameter
|
2015-11-25 01:25:01 +00:00
|
|
|
# It would be great if we could have the encoder specify what it needs
|
2014-09-30 16:15:32 +00:00
|
|
|
# But, it would need at least the image size in most cases. RawEncode is
|
|
|
|
# a tricky case.
|
2015-11-25 01:25:01 +00:00
|
|
|
bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c
|
|
|
|
if fp == sys.stdout:
|
|
|
|
fp.flush()
|
|
|
|
return
|
2013-10-11 17:28:32 +00:00
|
|
|
try:
|
|
|
|
fh = fp.fileno()
|
|
|
|
fp.flush()
|
2014-09-30 16:15:32 +00:00
|
|
|
except (AttributeError, io.UnsupportedOperation):
|
2013-10-11 17:28:32 +00:00
|
|
|
# compress to Python file-compatible object
|
|
|
|
for e, b, o, a in tile:
|
|
|
|
e = Image._getencoder(im.mode, e, a, im.encoderconfig)
|
|
|
|
if o > 0:
|
|
|
|
fp.seek(o, 0)
|
|
|
|
e.setimage(im.im, b)
|
2014-09-30 16:15:32 +00:00
|
|
|
while True:
|
2013-10-11 17:28:32 +00:00
|
|
|
l, s, d = e.encode(bufsize)
|
|
|
|
fp.write(d)
|
|
|
|
if s:
|
|
|
|
break
|
|
|
|
if s < 0:
|
|
|
|
raise IOError("encoder error %d when writing image file" % s)
|
2015-11-25 01:25:01 +00:00
|
|
|
e.cleanup()
|
2013-10-11 17:28:32 +00:00
|
|
|
else:
|
|
|
|
# slight speedup: compress to real file object
|
|
|
|
for e, b, o, a in tile:
|
|
|
|
e = Image._getencoder(im.mode, e, a, im.encoderconfig)
|
|
|
|
if o > 0:
|
|
|
|
fp.seek(o, 0)
|
|
|
|
e.setimage(im.im, b)
|
|
|
|
s = e.encode_to_file(fh, bufsize)
|
|
|
|
if s < 0:
|
|
|
|
raise IOError("encoder error %d when writing image file" % s)
|
2015-11-25 01:25:01 +00:00
|
|
|
e.cleanup()
|
|
|
|
if hasattr(fp, "flush"):
|
2013-10-11 17:28:32 +00:00
|
|
|
fp.flush()
|
|
|
|
|
|
|
|
|
|
|
|
def _safe_read(fp, size):
|
2014-09-30 16:15:32 +00:00
|
|
|
"""
|
|
|
|
Reads large blocks in a safe way. Unlike fp.read(n), this function
|
|
|
|
doesn't trust the user. If the requested size is larger than
|
|
|
|
SAFEBLOCK, the file is read block by block.
|
|
|
|
|
|
|
|
:param fp: File handle. Must implement a <b>read</b> method.
|
|
|
|
:param size: Number of bytes to read.
|
|
|
|
:returns: A string containing up to <i>size</i> bytes of data.
|
|
|
|
"""
|
2013-10-11 17:28:32 +00:00
|
|
|
if size <= 0:
|
2014-09-30 16:15:32 +00:00
|
|
|
return b""
|
2013-10-11 17:28:32 +00:00
|
|
|
if size <= SAFEBLOCK:
|
|
|
|
return fp.read(size)
|
|
|
|
data = []
|
|
|
|
while size > 0:
|
|
|
|
block = fp.read(min(size, SAFEBLOCK))
|
|
|
|
if not block:
|
|
|
|
break
|
|
|
|
data.append(block)
|
2014-09-30 16:15:32 +00:00
|
|
|
size -= len(block)
|
|
|
|
return b"".join(data)
|