diff options
Diffstat (limited to 'src/build/png.py')
-rw-r--r-- | src/build/png.py | 74 |
1 files changed, 53 insertions, 21 deletions
diff --git a/src/build/png.py b/src/build/png.py index f10210ed08d..f096a1dbdf5 100644 --- a/src/build/png.py +++ b/src/build/png.py @@ -325,7 +325,7 @@ def check_color(c, greyscale, which): return c if greyscale: try: - l = len(c) + len(c) except TypeError: c = (c,) if len(c) != 1: @@ -376,7 +376,10 @@ class Writer: planes=None, colormap=None, maxval=None, - chunk_limit=2**20): + chunk_limit=2**20, + x_pixels_per_unit = None, + y_pixels_per_unit = None, + unit_is_meter = False): """ Create a PNG encoder object. @@ -407,6 +410,13 @@ class Writer: Create an interlaced image. chunk_limit Write multiple ``IDAT`` chunks to save memory. + x_pixels_per_unit (pHYs chunk) + Number of pixels a unit along the x axis + y_pixels_per_unit (pHYs chunk) + Number of pixels a unit along the y axis + With x_pixel_unit, give the pixel size ratio + unit_is_meter (pHYs chunk) + Indicates if unit is meter or not The image size (in pixels) can be specified either by using the `width` and `height` arguments, or with the single `size` @@ -527,10 +537,11 @@ class Writer: bitdepth = int(8*bytes_per_sample) del bytes_per_sample if not isinteger(bitdepth) or bitdepth < 1 or 16 < bitdepth: - raise ValueError("bitdepth (%r) must be a postive integer <= 16" % + raise ValueError("bitdepth (%r) must be a positive integer <= 16" % bitdepth) self.rescale = None + palette = check_palette(palette) if palette: if bitdepth not in (1,2,4,8): raise ValueError("with palette, bitdepth must be 1, 2, 4, or 8") @@ -588,7 +599,10 @@ class Writer: self.compression = compression self.chunk_limit = chunk_limit self.interlace = bool(interlace) - self.palette = check_palette(palette) + self.palette = palette + self.x_pixels_per_unit = x_pixels_per_unit + self.y_pixels_per_unit = y_pixels_per_unit + self.unit_is_meter = bool(unit_is_meter) self.color_type = 4*self.alpha + 2*(not greyscale) + 1*self.colormap assert self.color_type in (0,2,3,4,6) @@ -637,12 +651,12 @@ class Writer: fmt = 'BH'[self.bitdepth > 8] a = array(fmt, itertools.chain(*rows)) return self.write_array(outfile, a) - else: - nrows = self.write_passes(outfile, rows) - if nrows != self.height: - raise ValueError( - "rows supplied (%d) does not match height (%d)" % - (nrows, self.height)) + + nrows = self.write_passes(outfile, rows) + if nrows != self.height: + raise ValueError( + "rows supplied (%d) does not match height (%d)" % + (nrows, self.height)) def write_passes(self, outfile, rows, packed=False): """ @@ -715,6 +729,11 @@ class Writer: write_chunk(outfile, 'bKGD', struct.pack("!3H", *self.background)) + # http://www.w3.org/TR/PNG/#11pHYs + if self.x_pixels_per_unit is not None and self.y_pixels_per_unit is not None: + tup = (self.x_pixels_per_unit, self.y_pixels_per_unit, int(self.unit_is_meter)) + write_chunk(outfile, 'pHYs', struct.pack("!LLB",*tup)) + # http://www.w3.org/TR/PNG/#11IDAT if self.compression is not None: compressor = zlib.compressobj(self.compression) @@ -1030,9 +1049,12 @@ def filter_scanline(type, line, fo, prev=None): pa = abs(p - a) pb = abs(p - b) pc = abs(p - c) - if pa <= pb and pa <= pc: Pr = a - elif pb <= pc: Pr = b - else: Pr = c + if pa <= pb and pa <= pc: + Pr = a + elif pb <= pc: + Pr = b + else: + Pr = c x = (x - Pr) & 0xff out.append(x) @@ -1413,7 +1435,7 @@ class Reader: % (type, length)) checksum = self.file.read(4) if len(checksum) != 4: - raise ValueError('Chunk %s too short for checksum.', tag) + raise ChunkError('Chunk %s too short for checksum.' % type) if seek and type != seek: continue verify = zlib.crc32(strtobytes(type)) @@ -1747,7 +1769,7 @@ class Reader: def process_chunk(self, lenient=False): """Process the next chunk and its data. This only processes the following chunk types, all others are ignored: ``IHDR``, - ``PLTE``, ``bKGD``, ``tRNS``, ``gAMA``, ``sBIT``. + ``PLTE``, ``bKGD``, ``tRNS``, ``gAMA``, ``sBIT``, ``pHYs``. If the optional `lenient` argument evaluates to True, checksum failures will raise warnings rather than exceptions. @@ -1866,6 +1888,15 @@ class Reader: not self.colormap and len(data) != self.planes): raise FormatError("sBIT chunk has incorrect length.") + def _process_pHYs(self, data): + # http://www.w3.org/TR/PNG/#11pHYs + self.phys = data + fmt = "!LLB" + if len(data) != struct.calcsize(fmt): + raise FormatError("pHYs chunk has incorrect length.") + self.x_pixels_per_unit, self.y_pixels_per_unit, unit = struct.unpack(fmt,data) + self.unit_is_meter = bool(unit) + def read(self, lenient=False): """ Read the PNG file and decode it. Returns (`width`, `height`, @@ -1902,8 +1933,8 @@ class Reader: be an iterator that yields the ``IDAT`` chunk data. """ - # Currently, with no max_length paramter to decompress, this - # routine will do one yield per IDAT chunk. So not very + # Currently, with no max_length parameter to decompress, + # this routine will do one yield per IDAT chunk: Not very # incremental. d = zlib.decompressobj() # Each IDAT chunk is passed to the decompressor, then any @@ -2590,7 +2621,7 @@ def write_pnm(file, width, height, pixels, meta): else: # PPM fmt = 'P6' - file.write('%s %d %d %d\n' % (fmt, width, height, maxval)) + header = '%s %d %d %d\n' % (fmt, width, height, maxval) if planes in (2,4): # PAM # See http://netpbm.sourceforge.net/doc/pam.html @@ -2598,9 +2629,10 @@ def write_pnm(file, width, height, pixels, meta): tupltype = 'GRAYSCALE_ALPHA' else: tupltype = 'RGB_ALPHA' - file.write('P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\n' - 'TUPLTYPE %s\nENDHDR\n' % - (width, height, planes, maxval, tupltype)) + header = ('P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\n' + 'TUPLTYPE %s\nENDHDR\n' % + (width, height, planes, maxval, tupltype)) + file.write(header.encode('ascii')) # Values per row vpr = planes * width # struct format |