diff options
Diffstat (limited to 'scripts/build')
-rwxr-xr-x | scripts/build/check_include_guards.py | 51 | ||||
-rwxr-xr-x[-rw-r--r--] | scripts/build/complay.py | 593 | ||||
-rwxr-xr-x | scripts/build/file2lines.py | 96 | ||||
-rwxr-xr-x[-rw-r--r--] | scripts/build/makedep.py | 1308 | ||||
-rw-r--r-- | scripts/build/makelist.py | 167 | ||||
-rw-r--r-- | scripts/build/msgfmt.py | 115 | ||||
-rw-r--r-- | scripts/build/png.py | 3269 | ||||
-rw-r--r-- | scripts/build/png2bdc.py | 10 | ||||
-rw-r--r-- | scripts/build/verinfo.py | 263 |
9 files changed, 3169 insertions, 2703 deletions
diff --git a/scripts/build/check_include_guards.py b/scripts/build/check_include_guards.py new file mode 100755 index 00000000000..bcc1bd889e9 --- /dev/null +++ b/scripts/build/check_include_guards.py @@ -0,0 +1,51 @@ +#!/usr/bin/python3 +## +## icense:BSD-3-Clause +## copyright-holders:Vas Crabb + +import io +import os +import os.path +import re +import sys + + +def pathsplit(p): + result = [ ] + while p: + d, n = os.path.split(p) + if not n: + result.insert(0, d) + break + else: + result.insert(0, n) + p = d + return result + + +if __name__ == '__main__': + extpat = re.compile('.+\\.(h|hpp)$') + substpat = re.compile('[-.]') + guardpat = re.compile('^ *# *ifndef +([^\s]+)(\s+.*)?') + bad = False + if len(sys.argv) < 2: + sys.stderr.write("Error: requires at least one path defined\n") + sys.exit(2) + + for root in sys.argv[1:]: + for path, subdirs, files in os.walk(root): + prefix = 'MAME_' + '_'.join([n.upper() for n in pathsplit(os.path.relpath(path, root))]) + '_' + for f in files: + if extpat.match(f): + expected = prefix + substpat.sub('_', f.upper()) + fp = os.path.join(path, f) + with io.open(fp, 'r', encoding='utf-8') as fd: + for l in fd: + m = guardpat.match(l) + if m: + if m.group(1) != expected: + sys.stderr.write('%s: #include guard does not appear to match expected %s\n' % (fp, expected)) + bad = True + break + if bad: + sys.exit(1) diff --git a/scripts/build/complay.py b/scripts/build/complay.py index 72bdcfa01c8..6519a01de06 100644..100755 --- a/scripts/build/complay.py +++ b/scripts/build/complay.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ## ## license:BSD-3-Clause ## copyright-holders:Vas Crabb @@ -12,14 +12,9 @@ import xml.sax.saxutils import zlib -# workaround for version incompatibility -if sys.version_info > (3, ): - long = int - - -class ErrorHandler(object): +class ErrorHandler: def __init__(self, **kwargs): - super(ErrorHandler, self).__init__(**kwargs) + super().__init__(**kwargs) self.errors = 0 self.warnings = 0 @@ -35,9 +30,9 @@ class ErrorHandler(object): sys.stderr.write('warning: %s' % (exception)) -class Minifyer(object): +class Minifyer: def __init__(self, output, **kwargs): - super(Minifyer, self).__init__(**kwargs) + super().__init__(**kwargs) self.output = output self.incomplete_tag = False @@ -96,14 +91,13 @@ class LayoutChecker(Minifyer): BADTAGPATTERN = re.compile('[^abcdefghijklmnopqrstuvwxyz0123456789_.:^$]') VARPATTERN = re.compile('^.*~[0-9A-Za-z_]+~.*$') FLOATCHARS = re.compile('^.*[.eE].*$') - SHAPES = frozenset(('disk', 'dotmatrix', 'dotmatrix5dot', 'dotmatrixdot', 'led14seg', 'led14segsc', 'led16seg', 'led16segsc', 'led7seg', 'led8seg_gts1', 'rect')) - OBJECTS = frozenset(('backdrop', 'bezel', 'cpanel', 'marquee', 'overlay')) + SHAPES = frozenset(('disk', 'led14seg', 'led14segsc', 'led16seg', 'led16segsc', 'led7seg', 'rect')) ORIENTATIONS = frozenset((0, 90, 180, 270)) YESNO = frozenset(('yes', 'no')) BLENDMODES = frozenset(('none', 'alpha', 'multiply', 'add')) def __init__(self, output, **kwargs): - super(LayoutChecker, self).__init__(output=output, **kwargs) + super().__init__(output=output, **kwargs) self.locator = None self.errors = 0 self.elements = { } @@ -111,15 +105,17 @@ class LayoutChecker(Minifyer): self.views = { } self.referenced_elements = { } self.referenced_groups = { } + self.group_collections = { } + self.current_collections = None - def formatLocation(self): + def format_location(self): return '%s:%d:%d' % (self.locator.getSystemId(), self.locator.getLineNumber(), self.locator.getColumnNumber()) - def handleError(self, msg): + def handle_error(self, msg): self.errors += 1 - sys.stderr.write('error: %s: %s\n' % (self.formatLocation(), msg)) + sys.stderr.write('error: %s: %s\n' % (self.format_location(), msg)) - def checkIntAttribute(self, name, attrs, key, default): + def check_int_attribute(self, name, attrs, key, default): if key not in attrs: return default val = attrs[key] @@ -138,10 +134,10 @@ class LayoutChecker(Minifyer): try: return int(val[offs:], base) except: - self.handleError('Element %s attribute %s "%s" is not an integer' % (name, key, val)) + self.handle_error('Element %s attribute %s "%s" is not an integer' % (name, key, val)) return None - def checkFloatAttribute(self, name, attrs, key, default): + def check_float_attribute(self, name, attrs, key, default): if key not in attrs: return default val = attrs[key] @@ -150,10 +146,10 @@ class LayoutChecker(Minifyer): try: return float(val) except: - self.handleError('Element %s attribute %s "%s" is not a floating point number' % (name, key, val)) + self.handle_error('Element %s attribute %s "%s" is not a floating point number' % (name, key, val)) return None - def checkNumericAttribute(self, name, attrs, key, default): + def check_numeric_attribute(self, name, attrs, key, default): if key not in attrs: return default val = attrs[key] @@ -175,127 +171,193 @@ class LayoutChecker(Minifyer): return float(val) return int(val[offs:], base) except: - self.handleError('Element %s attribute %s "%s" is not a number' % (name, key, val)) + self.handle_error('Element %s attribute %s "%s" is not a number' % (name, key, val)) return None - def checkParameter(self, attrs): + def check_bool_attribute(self, name, attrs, key, default): + if key not in attrs: + return default + val = attrs[key] + if self.VARPATTERN.match(val): + return None + elif val in self.YESNO: + return 'yes' == val + self.handle_error('Element %s attribute %s "%s" is not "yes" or "no"' % (name, key, val)) + return None + + def check_parameter(self, attrs): if 'name' not in attrs: - self.handleError('Element param missing attribute name') + self.handle_error('Element param missing attribute name') else: name = attrs['name'] - self.checkNumericAttribute('param', attrs, 'increment', None) - lshift = self.checkIntAttribute('param', attrs, 'lshift', None) + self.check_numeric_attribute('param', attrs, 'increment', None) + lshift = self.check_int_attribute('param', attrs, 'lshift', None) if (lshift is not None) and (0 > lshift): - self.handleError('Element param attribute lshift "%s" is negative' % (attrs['lshift'], )) - rshift = self.checkIntAttribute('param', attrs, 'rshift', None) + self.handle_error('Element param attribute lshift "%s" is negative' % (attrs['lshift'], )) + rshift = self.check_int_attribute('param', attrs, 'rshift', None) if (rshift is not None) and (0 > rshift): - self.handleError('Element param attribute rshift "%s" is negative' % (attrs['rshift'], )) + self.handle_error('Element param attribute rshift "%s" is negative' % (attrs['rshift'], )) if self.repeat_depth and self.repeat_depth[-1]: if 'start' in attrs: if 'value' in attrs: - self.handleError('Element param has both start and value attributes') + self.handle_error('Element param has both start and value attributes') if 'name' in attrs: if name not in self.variable_scopes[-1]: self.variable_scopes[-1][name] = True elif not self.VARPATTERN.match(name): - self.handleError('Generator parameter "%s" redefined' % (name, )) + self.handle_error('Generator parameter "%s" redefined' % (name, )) else: if 'value' not in attrs: - self.handleError('Element param missing attribute value') + self.handle_error('Element param missing attribute value') if ('increment' in attrs) or ('lshift' in attrs) or ('rshift' in attrs): - self.handleError('Element param has increment/lshift/rshift attribute(s) without start attribute') + self.handle_error('Element param has increment/lshift/rshift attribute(s) without start attribute') if 'name' in attrs: if not self.variable_scopes[-1].get(name, False): self.variable_scopes[-1][name] = False elif not self.VARPATTERN.match(name): - self.handleError('Generator parameter "%s" redefined' % (name, )) + self.handle_error('Generator parameter "%s" redefined' % (name, )) else: if ('start' in attrs) or ('increment' in attrs) or ('lshift' in attrs) or ('rshift' in attrs): - self.handleError('Element param with start/increment/lshift/rshift attribute(s) not in repeat scope') + self.handle_error('Element param with start/increment/lshift/rshift attribute(s) not in repeat scope') if 'value' not in attrs: - self.handleError('Element param missing attribute value') + self.handle_error('Element param missing attribute value') if 'name' in attrs: self.variable_scopes[-1][attrs['name']] = False - def checkBounds(self, attrs): - if self.have_bounds[-1]: - self.handleError('Duplicate element bounds') - else: - self.have_bounds[-1] = True - left = self.checkFloatAttribute('bounds', attrs, 'left', 0.0) - top = self.checkFloatAttribute('bounds', attrs, 'top', 0.0) - right = self.checkFloatAttribute('bounds', attrs, 'right', 1.0) - bottom = self.checkFloatAttribute('bounds', attrs, 'bottom', 1.0) - x = self.checkFloatAttribute('bounds', attrs, 'x', 0.0) - y = self.checkFloatAttribute('bounds', attrs, 'y', 0.0) - width = self.checkFloatAttribute('bounds', attrs, 'width', 1.0) - height = self.checkFloatAttribute('bounds', attrs, 'height', 1.0) + def check_bounds(self, attrs): + left = self.check_float_attribute('bounds', attrs, 'left', 0.0) + top = self.check_float_attribute('bounds', attrs, 'top', 0.0) + right = self.check_float_attribute('bounds', attrs, 'right', 1.0) + bottom = self.check_float_attribute('bounds', attrs, 'bottom', 1.0) + self.check_float_attribute('bounds', attrs, 'x', 0.0) + self.check_float_attribute('bounds', attrs, 'y', 0.0) + self.check_float_attribute('bounds', attrs, 'xc', 0.0) + self.check_float_attribute('bounds', attrs, 'yc', 0.0) + width = self.check_float_attribute('bounds', attrs, 'width', 1.0) + height = self.check_float_attribute('bounds', attrs, 'height', 1.0) if (left is not None) and (right is not None) and (left > right): - self.handleError('Element bounds attribute left "%s" is greater than attribute right "%s"' % ( + self.handle_error('Element bounds attribute left "%s" is greater than attribute right "%s"' % ( attrs.get('left', 0.0), attrs.get('right', 1.0))) if (top is not None) and (bottom is not None) and (top > bottom): - self.handleError('Element bounds attribute top "%s" is greater than attribute bottom "%s"' % ( + self.handle_error('Element bounds attribute top "%s" is greater than attribute bottom "%s"' % ( attrs.get('top', 0.0), attrs.get('bottom', 1.0))) if (width is not None) and (0.0 > width): - self.handleError('Element bounds attribute width "%s" is negative' % (attrs['width'], )) + self.handle_error('Element bounds attribute width "%s" is negative' % (attrs['width'], )) if (height is not None) and (0.0 > height): - self.handleError('Element bounds attribute height "%s" is negative' % (attrs['height'], )) - if ('left' not in attrs) and ('x' not in attrs): - self.handleError('Element bounds has neither attribute left nor attribute x') - has_ltrb = ('left' in attrs) or ('top' in attrs) or ('right' in attrs) or ('bottom' in attrs) - has_origin_size = ('x' in attrs) or ('y' in attrs) or ('width' in attrs) or ('height' in attrs) - if has_ltrb and has_origin_size: - self.handleError('Element bounds has both left/top/right/bottom and origin/size attributes') - - def checkOrientation(self, attrs): + self.handle_error('Element bounds attribute height "%s" is negative' % (attrs['height'], )) + if (('left' in attrs) and (('x' in attrs) or ('xc' in attrs))) or (('x' in attrs) and ('xc' in attrs)): + self.handle_error('Element bounds has multiple horizontal origin attributes (left/x/xc)') + if (('left' in attrs) and ('width' in attrs)) or ((('x' in attrs) or ('xc' in attrs)) and ('right' in attrs)): + self.handle_error('Element bounds has both left/right and x/xc/width attributes') + if (('top' in attrs) and (('y' in attrs) or ('yc' in attrs))) or (('y' in attrs) and ('yc' in attrs)): + self.handle_error('Element bounds has multiple vertical origin attributes (top/y/yc)') + if (('top' in attrs) and ('height' in attrs)) or ((('y' in attrs) or ('yc' in attrs)) and ('bottom' in attrs)): + self.handle_error('Element bounds has both top/bottom and y/yc/height attributes') + + def check_orientation(self, attrs): if self.have_orientation[-1]: - self.handleError('Duplicate element orientation') + self.handle_error('Duplicate element orientation') else: self.have_orientation[-1] = True - if self.checkIntAttribute('orientation', attrs, 'rotate', 0) not in self.ORIENTATIONS: - self.handleError('Element orientation attribute rotate "%s" is unsupported' % (attrs['rotate'], )) + if self.check_int_attribute('orientation', attrs, 'rotate', 0) not in self.ORIENTATIONS: + self.handle_error('Element orientation attribute rotate "%s" is unsupported' % (attrs['rotate'], )) for name in ('swapxy', 'flipx', 'flipy'): - if attrs.get(name, 'no') not in self.YESNO: - self.handleError('Element orientation attribute %s "%s" is not "yes" or "no"' % (name, attrs[name])) + self.check_bool_attribute('orientation', attrs, name, None) - def checkColorChannel(self, attrs, name): - channel = self.checkFloatAttribute('color', attrs, name, None) + def check_color(self, attrs): + self.check_color_channel(attrs, 'red') + self.check_color_channel(attrs, 'green') + self.check_color_channel(attrs, 'blue') + self.check_color_channel(attrs, 'alpha') + + def check_color_channel(self, attrs, name): + channel = self.check_float_attribute('color', attrs, name, None) if (channel is not None) and ((0.0 > channel) or (1.0 < channel)): - self.handleError('Element color attribute %s "%s" outside valid range 0.0-1.0' % (name, attrs[name])) + self.handle_error('Element color attribute %s "%s" outside valid range 0.0-1.0' % (name, attrs[name])) - def checkTag(self, tag, element, attr): + def check_tag(self, tag, element, attr): if '' == tag: - self.handleError('Element %s attribute %s is empty', (element, attr)) + self.handle_error('Element %s attribute %s is empty' % (element, attr)) else: if tag.find('^') >= 0: - self.handleError('Element %s attribute %s "%s" contains parent device reference' % (element, attr, tag)) + self.handle_error('Element %s attribute %s "%s" contains parent device reference' % (element, attr, tag)) if ':' == tag[-1]: - self.handleError('Element %s attribute %s "%s" ends with separator' % (element, attr, tag)) + self.handle_error('Element %s attribute %s "%s" ends with separator' % (element, attr, tag)) if tag.find('::') >= 0: - self.handleError('Element %s attribute %s "%s" contains double separator' % (element, attr, tag)) - - def checkComponent(self, name, attrs): - state = self.checkIntAttribute(name, attrs, 'state', None) - if (state is not None) and (0 > state): - self.handleError('Element %s attribute state "%s" is negative' % (name, attrs['state'])) - self.handlers.append((self.componentStartHandler, self.componentEndHandler)) - self.have_bounds.append(False) - self.have_color.append(False) + self.handle_error('Element %s attribute %s "%s" contains double separator' % (element, attr, tag)) + + def check_component(self, name, attrs): + statemask = self.check_int_attribute(name, attrs, 'statemask', None) + stateval = self.check_int_attribute(name, attrs, 'state', None) + if stateval is not None: + if 0 > stateval: + self.handle_error('Element %s attribute state "%s" is negative' % (name, attrs['state'])) + if (statemask is not None) and (stateval & ~statemask): + self.handle_error('Element %s attribute state "%s" has bits set that are clear in attribute statemask "%s"' % (name, attrs['state'], attrs['statemask'])) + if 'image' == name: + self.handlers.append((self.imageComponentStartHandler, self.imageComponentEndHandler)) + else: + self.handlers.append((self.componentStartHandler, self.componentEndHandler)) + self.have_bounds.append({ }) + self.have_color.append({ }) + + def check_view_item(self, name, attrs): + if 'id' in attrs: + if not attrs['id']: + self.handle_error('Element %s attribute id is empty' % (name, )) + elif not self.VARPATTERN.match(attrs['id']): + if attrs['id'] in self.item_ids: + self.handle_error('Element %s has duplicate id "%s" (previous %s)' % (name, attrs['id'], self.item_ids[attrs['id']])) + else: + self.item_ids[attrs['id']] = self.format_location() + if self.repeat_depth[-1]: + self.handle_error('Element %s attribute id "%s" in repeat contains no parameter references' % (name, attrs['id'])) + if ('blend' in attrs) and (attrs['blend'] not in self.BLENDMODES) and not self.VARPATTERN.match(attrs['blend']): + self.handle_error('Element %s attribute blend "%s" is unsupported' % (name, attrs['blend'])) + if 'inputtag' in attrs: + if 'inputmask' not in attrs: + self.handle_error('Element %s has inputtag attribute without inputmask attribute' % (name, )) + self.check_tag(attrs['inputtag'], name, 'inputtag') + elif 'inputmask' in attrs: + self.handle_error('Element %s has inputmask attribute without inputtag attribute' % (name, )) + inputraw = None + if 'inputraw' in attrs: + if (attrs['inputraw'] not in self.YESNO) and (not self.VARPATTERN.match(attrs['inputraw'])): + self.handle_error('Element %s attribute inputraw "%s" is not "yes" or "no"' % (name, attrs['inputraw'])) + else: + inputraw = 'yes' == attrs['inputraw'] + if 'inputmask' not in attrs: + self.handle_error('Element %s has inputraw attribute without inputmask attribute' % (name, )) + if 'inputtag' not in attrs: + self.handle_error('Element %s has inputraw attribute without inputtag attribute' % (name, )) + inputmask = self.check_int_attribute(name, attrs, 'inputmask', None) + if (inputmask is not None) and (not inputmask): + self.handle_error('Element %s attribute inputmask "%s" is zero' % (name, attrs['inputmask'])) + self.check_bool_attribute(name, attrs, 'clickthrough', None) + + def startViewItem(self, name): + self.handlers.append((self.viewItemStartHandler, self.viewItemEndHandler)) + self.have_bounds.append(None if 'group' == name else { }) + self.have_orientation.append(False) + self.have_color.append(None if 'group' == name else { }) + self.have_xscroll.append(None if ('group' == name) or ('screen' == name) else False) + self.have_yscroll.append(None if ('group' == name) or ('screen' == name) else False) def rootStartHandler(self, name, attrs): if 'mamelayout' != name: self.ignored_depth = 1 - self.handleError('Expected root element mamelayout but found %s' % (name, )) + self.handle_error('Expected root element mamelayout but found %s' % (name, )) else: if 'version' not in attrs: - self.handleError('Element mamelayout missing attribute version') + self.handle_error('Element mamelayout missing attribute version') else: try: - long(attrs['version']) + int(attrs['version']) except: - self.handleError('Element mamelayout attribute version "%s" is not an integer' % (attrs['version'], )) + self.handle_error('Element mamelayout attribute version "%s" is not an integer' % (attrs['version'], )) + self.have_script = None self.variable_scopes.append({ }) self.repeat_depth.append(0) self.handlers.append((self.layoutStartHandler, self.layoutEndHandler)) @@ -306,62 +368,73 @@ class LayoutChecker(Minifyer): def layoutStartHandler(self, name, attrs): if 'element' == name: if 'name' not in attrs: - self.handleError('Element element missing attribute name') + self.handle_error('Element element missing attribute name') else: generated_name = self.VARPATTERN.match(attrs['name']) if generated_name: self.generated_element_names = True if attrs['name'] not in self.elements: - self.elements[attrs['name']] = self.formatLocation() + self.elements[attrs['name']] = self.format_location() elif not generated_name: - self.handleError('Element element has duplicate name (previous %s)' % (self.elements[attrs['name']], )) - defstate = self.checkIntAttribute(name, attrs, 'defstate', None) + self.handle_error('Element element has duplicate name (previous %s)' % (self.elements[attrs['name']], )) + defstate = self.check_int_attribute(name, attrs, 'defstate', None) if (defstate is not None) and (0 > defstate): - self.handleError('Element element attribute defstate "%s" is negative' % (attrs['defstate'], )) + self.handle_error('Element element attribute defstate "%s" is negative' % (attrs['defstate'], )) self.handlers.append((self.elementStartHandler, self.elementEndHandler)) elif 'group' == name: + self.current_collections = { } if 'name' not in attrs: - self.handleError('Element group missing attribute name') + self.handle_error('Element group missing attribute name') else: generated_name = self.VARPATTERN.match(attrs['name']) if generated_name: self.generated_group_names = True if attrs['name'] not in self.groups: - self.groups[attrs['name']] = self.formatLocation() + self.groups[attrs['name']] = self.format_location() + if not generated_name: + self.group_collections[attrs['name']] = self.current_collections elif not generated_name: - self.handleError('Element group has duplicate name (previous %s)' % (self.groups[attrs['name']], )) + self.handle_error('Element group has duplicate name (previous %s)' % (self.groups[attrs['name']], )) self.handlers.append((self.groupViewStartHandler, self.groupViewEndHandler)) self.variable_scopes.append({ }) + self.item_ids = { } self.repeat_depth.append(0) - self.have_bounds.append(False) + self.have_bounds.append(None) elif ('view' == name) and (not self.repeat_depth[-1]): + self.current_collections = { } if 'name' not in attrs: - self.handleError('Element view missing attribute name') + self.handle_error('Element view missing attribute name') else: if attrs['name'] not in self.views: - self.views[attrs['name']] = self.formatLocation() + self.views[attrs['name']] = self.format_location() elif not self.VARPATTERN.match(attrs['name']): - self.handleError('Element view has duplicate name (previous %s)' % (self.views[attrs['name']], )) + self.handle_error('Element view has duplicate name "%s" (previous %s)' % (attrs['name'], self.views[attrs['name']])) + self.check_bool_attribute(name, attrs, 'showpointers', None) self.handlers.append((self.groupViewStartHandler, self.groupViewEndHandler)) self.variable_scopes.append({ }) + self.item_ids = { } self.repeat_depth.append(0) - self.have_bounds.append(False) + self.have_bounds.append(None) elif 'repeat' == name: if 'count' not in attrs: - self.handleError('Element repeat missing attribute count') + self.handle_error('Element repeat missing attribute count') else: - count = self.checkIntAttribute(name, attrs, 'count', None) + count = self.check_int_attribute(name, attrs, 'count', None) if (count is not None) and (0 >= count): - self.handleError('Element repeat attribute count "%s" is not positive' % (attrs['count'], )) + self.handle_error('Element repeat attribute count "%s" is not positive' % (attrs['count'], )) self.variable_scopes.append({ }) self.repeat_depth[-1] += 1 elif 'param' == name: - self.checkParameter(attrs) + self.check_parameter(attrs) self.ignored_depth = 1 elif ('script' == name) and (not self.repeat_depth[-1]): + if self.have_script is None: + self.have_script = self.format_location() + else: + self.handle_error('Duplicate script element (previous %s)' % (self.have_script, )) self.ignored_depth = 1 else: - self.handleError('Encountered unexpected element %s' % (name, )) + self.handle_error('Encountered unexpected element %s' % (name, )) self.ignored_depth = 1 def layoutEndHandler(self, name): @@ -372,55 +445,56 @@ class LayoutChecker(Minifyer): if not self.generated_element_names: for element in self.referenced_elements: if (element not in self.elements) and (not self.VARPATTERN.match(element)): - self.handleError('Element "%s" not found (first referenced at %s)' % (element, self.referenced_elements[element])) + self.handle_error('Element "%s" not found (first referenced at %s)' % (element, self.referenced_elements[element])) if not self.generated_group_names: for group in self.referenced_groups: if (group not in self.groups) and (not self.VARPATTERN.match(group)): - self.handleError('Group "%s" not found (first referenced at %s)' % (group, self.referenced_groups[group])) + self.handle_error('Group "%s" not found (first referenced at %s)' % (group, self.referenced_groups[group])) if not self.views: - self.handleError('No view elements found') + self.handle_error('No view elements found') + del self.have_script self.handlers.pop() def elementStartHandler(self, name, attrs): if name in self.SHAPES: - self.checkComponent(name, attrs) + self.check_component(name, attrs) elif 'text' == name: if 'string' not in attrs: - self.handleError('Element text missing attribute string') - align = self.checkIntAttribute(name, attrs, 'align', None) + self.handle_error('Element text missing attribute string') + align = self.check_int_attribute(name, attrs, 'align', None) if (align is not None) and ((0 > align) or (2 < align)): - self.handleError('Element text attribute align "%s" not in valid range 0-2' % (attrs['align'], )) - self.checkComponent(name, attrs) + self.handle_error('Element text attribute align "%s" not in valid range 0-2' % (attrs['align'], )) + self.check_component(name, attrs) elif 'simplecounter' == name: - maxstate = self.checkIntAttribute(name, attrs, 'maxstate', None) + maxstate = self.check_int_attribute(name, attrs, 'maxstate', None) if (maxstate is not None) and (0 > maxstate): - self.handleError('Element simplecounter attribute maxstate "%s" is negative' % (attrs['maxstate'], )) - digits = self.checkIntAttribute(name, attrs, 'digits', None) + self.handle_error('Element simplecounter attribute maxstate "%s" is negative' % (attrs['maxstate'], )) + digits = self.check_int_attribute(name, attrs, 'digits', None) if (digits is not None) and (0 >= digits): - self.handleError('Element simplecounter attribute digits "%s" is not positive' % (attrs['digits'], )) - align = self.checkIntAttribute(name, attrs, 'align', None) + self.handle_error('Element simplecounter attribute digits "%s" is not positive' % (attrs['digits'], )) + align = self.check_int_attribute(name, attrs, 'align', None) if (align is not None) and ((0 > align) or (2 < align)): - self.handleError('Element simplecounter attribute align "%s" not in valid range 0-2' % (attrs['align'], )) - self.checkComponent(name, attrs) + self.handle_error('Element simplecounter attribute align "%s" not in valid range 0-2' % (attrs['align'], )) + self.check_component(name, attrs) elif 'image' == name: - if 'file' not in attrs: - self.handleError('Element image missing attribute file') - self.checkComponent(name, attrs) + self.have_file = 'file' in attrs + self.have_data = None + self.check_component(name, attrs) elif 'reel' == name: # TODO: validate symbollist and improve validation of other attributes - self.checkIntAttribute(name, attrs, 'stateoffset', None) - numsymbolsvisible = self.checkIntAttribute(name, attrs, 'numsymbolsvisible', None) + self.check_int_attribute(name, attrs, 'stateoffset', None) + numsymbolsvisible = self.check_int_attribute(name, attrs, 'numsymbolsvisible', None) if (numsymbolsvisible is not None) and (0 >= numsymbolsvisible): - self.handleError('Element reel attribute numsymbolsvisible "%s" not positive' % (attrs['numsymbolsvisible'], )) - reelreversed = self.checkIntAttribute(name, attrs, 'reelreversed', None) + self.handle_error('Element reel attribute numsymbolsvisible "%s" not positive' % (attrs['numsymbolsvisible'], )) + reelreversed = self.check_int_attribute(name, attrs, 'reelreversed', None) if (reelreversed is not None) and ((0 > reelreversed) or (1 < reelreversed)): - self.handleError('Element reel attribute reelreversed "%s" not in valid range 0-1' % (attrs['reelreversed'], )) - beltreel = self.checkIntAttribute(name, attrs, 'beltreel', None) + self.handle_error('Element reel attribute reelreversed "%s" not in valid range 0-1' % (attrs['reelreversed'], )) + beltreel = self.check_int_attribute(name, attrs, 'beltreel', None) if (beltreel is not None) and ((0 > beltreel) or (1 < beltreel)): - self.handleError('Element reel attribute beltreel "%s" not in valid range 0-1' % (attrs['beltreel'], )) - self.checkComponent(name, attrs) + self.handle_error('Element reel attribute beltreel "%s" not in valid range 0-1' % (attrs['beltreel'], )) + self.check_component(name, attrs) else: - self.handleError('Encountered unexpected element %s' % (name, )) + self.handle_error('Encountered unexpected element %s' % (name, )) self.ignored_depth = 1 def elementEndHandler(self, name): @@ -428,16 +502,25 @@ class LayoutChecker(Minifyer): def componentStartHandler(self, name, attrs): if 'bounds' == name: - self.checkBounds(attrs) + state = self.check_int_attribute(name, attrs, 'state', 0) + if state is not None: + if 0 > state: + self.handle_error('Element bounds attribute state "%s" is negative' % (attrs['state'], )) + if state in self.have_bounds[-1]: + self.handle_error('Duplicate bounds for state %d (previous %s)' % (state, self.have_bounds[-1][state])) + else: + self.have_bounds[-1][state] = self.format_location() + self.check_bounds(attrs) elif 'color' == name: - if self.have_color[-1]: - self.handleError('Duplicate color element') - else: - self.have_color[-1] = True - self.checkColorChannel(attrs, 'red') - self.checkColorChannel(attrs, 'green') - self.checkColorChannel(attrs, 'blue') - self.checkColorChannel(attrs, 'alpha') + state = self.check_int_attribute(name, attrs, 'state', 0) + if state is not None: + if 0 > state: + self.handle_error('Element color attribute state "%s" is negative' % (attrs['state'], )) + if state in self.have_color[-1]: + self.handle_error('Duplicate color for state %d (previous %s)' % (state, self.have_color[-1][state])) + else: + self.have_color[-1][state] = self.format_location() + self.check_color(attrs) self.ignored_depth = 1 def componentEndHandler(self, name): @@ -445,116 +528,203 @@ class LayoutChecker(Minifyer): self.have_color.pop() self.handlers.pop() + def imageComponentStartHandler(self, name, attrs): + if 'data' == name: + if self.have_data is not None: + self.handle_error('Element image has multiple data child elements (previous %s)' % (self.have_data)) + else: + self.have_data = self.format_location() + if self.have_file: + self.handle_error('Element image has attribute file and child element data') + self.ignored_depth = 1 + else: + self.componentStartHandler(name, attrs) + + def imageComponentEndHandler(self, name): + if (not self.have_file) and (self.have_data is None): + self.handle_error('Element image missing attribute file or child element data') + del self.have_file + del self.have_data + self.componentEndHandler(name) + def groupViewStartHandler(self, name, attrs): - if (name in self.OBJECTS) or ('element' == name): - refattr = 'ref' if 'element' == name else 'element' - if refattr not in attrs: - self.handleError('Element %s missing attribute %s' % (name, refattr)) - elif attrs[refattr] not in self.referenced_elements: - self.referenced_elements[attrs[refattr]] = self.formatLocation() - if ('blend' in attrs) and (attrs['blend'] not in self.BLENDMODES) and not self.VARPATTERN.match(attrs['blend']): - self.handleError('Element %s attribute blend "%s" is unsupported' % (name, attrs['blend'])) - if 'inputtag' in attrs: - if 'inputmask' not in attrs: - self.handleError('Element %s has inputtag attribute without inputmask attribute' % (name, )) - self.checkTag(attrs['inputtag'], name, 'inputtag') - elif 'inputmask' in attrs: - self.handleError('Element %s has inputmask attribute without inputtag attribute' % (name, )) - inputraw = self.checkIntAttribute(name, attrs, 'inputraw', None) - if (inputraw is not None): - if 'inputmask' not in attrs: - self.handleError('Element %s has inputraw attribute without inputmask attribute' % (name, )) - if 'inputtag' not in attrs: - self.handleError('Element %s has inputraw attribute without inputtag attribute' % (name, )) - if ((0 > inputraw) or (1 < inputraw)): - self.handleError('Element %s attribute inputraw "%s" not in valid range 0-1' % (name, attrs['inputraw'])) - inputmask = self.checkIntAttribute(name, attrs, 'inputmask', None) - if (inputmask is not None) and (0 == inputmask): - if (inputraw is None) or (0 == inputraw): - self.handleError('Element %s has attribute inputmask "%s" is zero' % (name, attrs['inputmask'])) - self.handlers.append((self.objectStartHandler, self.objectEndHandler)) - self.have_bounds.append(False) - self.have_orientation.append(False) + if 'element' == name: + if 'ref' not in attrs: + self.handle_error('Element %s missing attribute ref' % (name, )) + elif attrs['ref'] not in self.referenced_elements: + self.referenced_elements[attrs['ref']] = self.format_location() + self.check_view_item(name, attrs) + self.startViewItem(name) elif 'screen' == name: if 'index' in attrs: - index = self.checkIntAttribute(name, attrs, 'index', None) + index = self.check_int_attribute(name, attrs, 'index', None) if (index is not None) and (0 > index): - self.handleError('Element screen attribute index "%s" is negative' % (attrs['index'], )) + self.handle_error('Element screen attribute index "%s" is negative' % (attrs['index'], )) if 'tag' in attrs: - self.handleError('Element screen has both index and tag attributes') + self.handle_error('Element screen has both index and tag attributes') if 'tag' in attrs: tag = attrs['tag'] - self.checkTag(tag, name, 'tag') + self.check_tag(tag, name, 'tag') if self.BADTAGPATTERN.search(tag): - self.handleError('Element screen attribute tag "%s" contains invalid characters' % (tag, )) - self.handlers.append((self.objectStartHandler, self.objectEndHandler)) - self.have_bounds.append(False) - self.have_orientation.append(False) + self.handle_error('Element screen attribute tag "%s" contains invalid characters' % (tag, )) + self.check_view_item(name, attrs) + self.startViewItem(name) elif 'group' == name: if 'ref' not in attrs: - self.handleError('Element group missing attribute ref') - elif attrs['ref'] not in self.referenced_groups: - self.referenced_groups[attrs['ref']] = self.formatLocation() - self.handlers.append((self.objectStartHandler, self.objectEndHandler)) - self.have_bounds.append(False) - self.have_orientation.append(False) + self.handle_error('Element group missing attribute ref') + else: + if attrs['ref'] not in self.referenced_groups: + self.referenced_groups[attrs['ref']] = self.format_location() + if (not self.VARPATTERN.match(attrs['ref'])) and (attrs['ref'] in self.group_collections): + for n, l in self.group_collections[attrs['ref']].items(): + if n not in self.current_collections: + self.current_collections[n] = l + else: + self.handle_error('Element group instantiates collection with duplicate name "%s" from %s (previous %s)' % (n, l, self.current_collections[n])) + self.startViewItem(name) elif 'repeat' == name: if 'count' not in attrs: - self.handleError('Element repeat missing attribute count') + self.handle_error('Element repeat missing attribute count') else: - count = self.checkIntAttribute(name, attrs, 'count', None) + count = self.check_int_attribute(name, attrs, 'count', None) if (count is not None) and (0 >= count): - self.handleError('Element repeat attribute count "%s" is negative' % (attrs['count'], )) + self.handle_error('Element repeat attribute count "%s" is negative' % (attrs['count'], )) self.variable_scopes.append({ }) self.repeat_depth[-1] += 1 + elif 'collection' == name: + if 'name' not in attrs: + self.handle_error('Element collection missing attribute name') + elif not self.VARPATTERN.match(attrs['name']): + if attrs['name'] not in self.current_collections: + self.current_collections[attrs['name']] = self.format_location() + else: + self.handle_error('Element collection has duplicate name (previous %s)' % (self.current_collections[attrs['name']], )) + if attrs.get('visible', 'yes') not in self.YESNO: + self.handle_error('Element collection attribute visible "%s" is not "yes" or "no"' % (attrs['visible'], )) + self.variable_scopes.append({ }) + self.collection_depth += 1 elif 'param' == name: - self.checkParameter(attrs) + self.check_parameter(attrs) self.ignored_depth = 1 elif 'bounds' == name: - self.checkBounds(attrs) + if self.have_bounds[-1] is not None: + self.handle_error('Duplicate element bounds (previous %s)' % (self.have_bounds[-1], )) + else: + self.have_bounds[-1] = self.format_location() + self.check_bounds(attrs) if self.repeat_depth[-1]: - self.handleError('Element bounds inside repeat') + self.handle_error('Element bounds inside repeat') + elif self.collection_depth: + self.handle_error('Element bounds inside collection') self.ignored_depth = 1 else: - self.handleError('Encountered unexpected element %s' % (name, )) + self.handle_error('Encountered unexpected element %s' % (name, )) self.ignored_depth = 1 def groupViewEndHandler(self, name): self.variable_scopes.pop() - if self.repeat_depth[-1]: + if 'collection' == name: + self.collection_depth -= 1 + elif self.repeat_depth[-1]: self.repeat_depth[-1] -= 1 else: + del self.item_ids + self.current_collections = None self.repeat_depth.pop() self.have_bounds.pop() self.handlers.pop() - def objectStartHandler(self, name, attrs): - if 'bounds' == name: - self.checkBounds(attrs) + def viewItemStartHandler(self, name, attrs): + if 'animate' == name: + if isinstance(self.have_bounds[-1], dict): + if 'inputtag' in attrs: + if 'name' in attrs: + self.handle_error('Element animate has both attribute inputtag and attribute name') + self.check_tag(attrs['inputtag'], name, 'inputtag') + elif 'name' not in attrs: + self.handle_error('Element animate has neither attribute inputtag nor attribute name') + self.check_int_attribute(name, attrs, 'mask', None) + else: + self.handle_error('Encountered unexpected element %s' % (name, )) + elif 'bounds' == name: + if self.have_bounds[-1] is None: + self.have_bounds[-1] = self.format_location() + elif isinstance(self.have_bounds[-1], dict): + state = self.check_int_attribute(name, attrs, 'state', 0) + if state is not None: + if 0 > state: + self.handle_error('Element bounds attribute state "%s" is negative' % (attrs['state'], )) + if state in self.have_bounds[-1]: + self.handle_error('Duplicate bounds for state %d (previous %s)' % (state, self.have_bounds[-1][state])) + else: + self.have_bounds[-1][state] = self.format_location() + else: + self.handle_error('Duplicate element bounds (previous %s)' % (self.have_bounds[-1], )) + self.check_bounds(attrs) elif 'orientation' == name: - self.checkOrientation(attrs) + self.check_orientation(attrs) + elif 'color' == name: + if self.have_color[-1] is None: + self.have_color[-1] = self.format_location() + elif isinstance(self.have_color[-1], dict): + state = self.check_int_attribute(name, attrs, 'state', 0) + if state is not None: + if 0 > state: + self.handle_error('Element color attribute state "%s" is negative' % (attrs['state'], )) + if state in self.have_color[-1]: + self.handle_error('Duplicate color for state %d (previous %s)' % (state, self.have_color[-1][state])) + else: + self.have_color[-1][state] = self.format_location() + else: + self.handle_error('Duplicate element color (previous %s)' % (self.have_color[-1], )) + self.check_color(attrs) + elif ('xscroll' == name) or ('yscroll' == name): + have_scroll = self.have_xscroll if 'xscroll' == name else self.have_yscroll + if have_scroll[-1] is None: + self.handle_error('Encountered unexpected element %s' % (name, )) + elif have_scroll[-1]: + self.handle_error('Duplicate element %s' % (name, )) + else: + have_scroll[-1] = self.format_location() + self.check_float_attribute(name, attrs, 'size', 1.0) + self.check_bool_attribute(name, attrs, 'wrap', False) + if 'inputtag' in attrs: + if 'name' in attrs: + self.handle_error('Element %s has both attribute inputtag and attribute name' % (name, )) + self.check_tag(attrs['inputtag'], name, 'inputtag') + self.check_int_attribute(name, attrs, 'mask', None) + self.check_int_attribute(name, attrs, 'min', None) + self.check_int_attribute(name, attrs, 'max', None) + else: + self.handle_error('Encountered unexpected element %s' % (name, )) self.ignored_depth = 1 - def objectEndHandler(self, name): + def viewItemEndHandler(self, name): self.have_bounds.pop() self.have_orientation.pop() + self.have_color.pop() + self.have_xscroll.pop() + self.have_yscroll.pop() self.handlers.pop() def setDocumentLocator(self, locator): self.locator = locator - super(LayoutChecker, self).setDocumentLocator(locator) + super().setDocumentLocator(locator) def startDocument(self): self.handlers = [(self.rootStartHandler, self.rootEndHandler)] self.ignored_depth = 0 self.variable_scopes = [ ] self.repeat_depth = [ ] + self.collection_depth = 0 self.have_bounds = [ ] self.have_orientation = [ ] self.have_color = [ ] + self.have_xscroll = [ ] + self.have_yscroll = [ ] self.generated_element_names = False self.generated_group_names = False - super(LayoutChecker, self).startDocument() + super().startDocument() def endDocument(self): self.locator = None @@ -563,36 +733,41 @@ class LayoutChecker(Minifyer): self.views.clear() self.referenced_elements.clear() self.referenced_groups.clear() + self.group_collections.clear() + self.current_collections = None del self.handlers del self.ignored_depth del self.variable_scopes del self.repeat_depth + del self.collection_depth del self.have_bounds del self.have_orientation del self.have_color + del self.have_xscroll + del self.have_yscroll del self.generated_element_names del self.generated_group_names - super(LayoutChecker, self).endDocument() + super().endDocument() def startElement(self, name, attrs): if 0 < self.ignored_depth: self.ignored_depth += 1 else: self.handlers[-1][0](name, attrs) - super(LayoutChecker, self).startElement(name, attrs) + super().startElement(name, attrs) def endElement(self, name): if 0 < self.ignored_depth: self.ignored_depth -= 1 else: self.handlers[-1][1](name) - super(LayoutChecker, self).endElement(name) + super().endElement(name) -def compressLayout(src, dst, comp): +def compress_layout(src, dst, comp): state = [0, 0] def write(block): - for ch in bytearray(block): + for octet in bytearray(block): if 0 == state[0]: dst('\t') elif 0 == (state[0] % 32): @@ -600,7 +775,7 @@ def compressLayout(src, dst, comp): else: dst(', ') state[0] += 1 - dst('%3u' % (ch)) + dst('%3u' % (octet, )) def output(text): block = text.encode('UTF-8') @@ -617,7 +792,7 @@ def compressLayout(src, dst, comp): write(comp.flush()) dst('\n') except xml.sax.SAXException as exception: - print('fatal error: %s' % (exception)) + print('fatal error: %s' % (exception, )) raise XmlError('Fatal error parsing XML') if (content_handler.errors > 0) or (error_handler.errors > 0) or (error_handler.warnings > 0): raise XmlError('Error(s) and/or warning(s) parsing XML') @@ -625,7 +800,7 @@ def compressLayout(src, dst, comp): return state[1], state[0] -class BlackHole(object): +class BlackHole: def write(self, *args): pass def close(self): @@ -649,14 +824,14 @@ if __name__ == '__main__': varname = base varname = 'layout_' + re.sub('[^0-9A-Za-z_]', '_', varname) - comp_type = 1 + comp_type = 'internal_layout::compression::ZLIB' try: dst = open(dstfile,'w') if dstfile is not None else BlackHole() dst.write('static const unsigned char %s_data[] = {\n' % (varname)) - byte_count, comp_size = compressLayout(srcfile, lambda x: dst.write(x), zlib.compressobj()) + byte_count, comp_size = compress_layout(srcfile, dst.write, zlib.compressobj()) dst.write('};\n\n') dst.write('const internal_layout %s = {\n' % (varname)) - dst.write('\t%d, sizeof(%s_data), %d, %s_data\n' % (byte_count, varname, comp_type, varname)) + dst.write('\t%d, sizeof(%s_data), %s, %s_data\n' % (byte_count, varname, comp_type, varname)) dst.write('};\n') dst.close() except XmlError: diff --git a/scripts/build/file2lines.py b/scripts/build/file2lines.py new file mode 100755 index 00000000000..1a8cf897656 --- /dev/null +++ b/scripts/build/file2lines.py @@ -0,0 +1,96 @@ +#!/usr/bin/python +## +## license:BSD-3-Clause +## copyright-holders:Vas Crabb + +import io +import os +import os.path +import re +import sys + + +def write_output(text): + try: + dst.write(text) + except IOError: + if dstfile is not None: + sys.stderr.write('Error writing to output file \'%s\'\n' % dstfile) + dst.close() + os.remove(dstfile) + else: + sys.stderr.write('Error writing to output\n') + sys.exit(3) + + +if __name__ == '__main__': + if (len(sys.argv) > 4) or (len(sys.argv) < 2): + print('Usage:') + print(' file2lines <source.txt> [<output.h> [<varname>]]') + sys.exit(0 if len(sys.argv) <= 1 else 1) + + srcfile = sys.argv[1] + dstfile = sys.argv[2] if len(sys.argv) >= 3 else None + if len(sys.argv) >= 4: + varname = sys.argv[3] + else: + varname = os.path.basename(srcfile) + base, ext = os.path.splitext(varname) + if ext.lower() == '.txt': + varname = base + varname = 'lines_' + re.sub('[^0-9A-Za-z_]', '_', varname) + + dst = None + try: + with io.open(srcfile, 'r', encoding='utf-8') as src: + if dstfile is not None: + try: + dst = io.open(dstfile, 'w', encoding='utf-8') + except IOError: + sys.stderr.write('Unable to open output file \'%s\'\n' % dstfile) + sys.exit(3) + else: + dst = sys.stdout + write_output(u'char const *const %s[] = {\n' % varname) + for line in src: + if line[-1] == u'\n': + line = line[:-1] + write_output(u'\t\t"') + i = 0 + while i < len(line): + for j in range(i, len(line) + 1): + if j < len(line): + ch = line[j] + if (ch < u' ') or (ch > u'~') or (ch in u'\"\\'): + break + if j > i: + write_output(line[i:j]) + if j < len(line): + ch = line[j] + if ch == u'\a': + write_output(u'\\a') + elif ch == u'\f': + write_output(u'\\f') + elif ch == u'\t': + write_output(u'\\t') + elif ch == u'\v': + write_output(u'\\v') + elif ch in u'\"\\': + write_output(u'\\' + ch) + else: + ch = ord(ch) + if ch < 0x20: + write_output(u'\\{0:03o}'.format(ch)) + elif ch < 0x10000: + write_output(u'\\u{0:04X}'.format(ch)) + else: + write_output(u'\\U{0:08X}'.format(ch)) + i = j + 1 + write_output(u'",\n') + write_output(u'\t\tnullptr };\n') + except IOError: + sys.stderr.write('Error reading input file \'%s\'\n' % srcfile) + if (dstfile is not None) and (dst is not None): + dst.close() + os.remove(dstfile) + sys.exit(2) diff --git a/scripts/build/makedep.py b/scripts/build/makedep.py index be885f1c6b0..860c11a890f 100644..100755 --- a/scripts/build/makedep.py +++ b/scripts/build/makedep.py @@ -1,261 +1,1083 @@ #!/usr/bin/python ## ## license:BSD-3-Clause -## copyright-holders:Miodrag Milanovic - -from __future__ import with_statement +## copyright-holders:Vas Crabb +import argparse +import glob +import io +import os.path import sys -## to ignore include of emu.h add it always to list +import xml.sax + + +def path_components(path): + result = [ ] + while True: + path, basename = os.path.split(path) + if basename: + result.append(basename) + else: + if path: + result.append(path) + return tuple(reversed(result)) + + +class ParserBase: + def process_lines(self, inputfile): + self.input_line = 1 + for line in inputfile: + start = 0 + if line.endswith('\n'): + line = line[:-1] + used = 0 + while used is not None: + start += used + used = self.processors[self.parse_state](line[start:]) + self.input_line += 1 + + +class CppParser(ParserBase): + TOKEN_LEAD = frozenset( + [chr(x) for x in range(ord('A'), ord('Z') + 1)] + + [chr(x) for x in range(ord('a'), ord('z') + 1)] + + ['_']) + TOKEN_CONTINUATION = frozenset( + [chr(x) for x in range(ord('0'), ord('9') + 1)] + + [chr(x) for x in range(ord('A'), ord('Z') + 1)] + + [chr(x) for x in range(ord('a'), ord('z') + 1)] + + ['_']) + HEXADECIMAL_DIGIT = frozenset( + [chr(x) for x in range(ord('0'), ord('9') + 1)] + + [chr(x) for x in range(ord('A'), ord('F') + 1)] + + [chr(x) for x in range(ord('a'), ord('f') + 1)]) + + class Handler: + def line(self, text): + pass + + def comment(self, text): + pass + + def line_comment(self, text): + pass + + class ParseState: + DEFAULT = 0 + COMMENT = 1 + LINE_COMMENT = 2 + TOKEN = 3 + STRING_CONSTANT = 4 + CHARACTER_CONSTANT = 5 + NUMERIC_CONSTANT = 6 + + def __init__(self, handler, **kwargs): + super().__init__(**kwargs) + self.handler = handler + self.processors = { + self.ParseState.DEFAULT: self.process_default, + self.ParseState.COMMENT: self.process_comment, + self.ParseState.LINE_COMMENT: self.process_line_comment, + self.ParseState.TOKEN: self.process_token, + self.ParseState.STRING_CONSTANT: self.process_text, + self.ParseState.CHARACTER_CONSTANT: self.process_text, + self.ParseState.NUMERIC_CONSTANT: self.process_numeric } + + def parse(self, inputfile): + self.parse_state = self.ParseState.DEFAULT + self.comment_line = None + self.lead_digit = None + self.radix = None + self.line_buffer = '' + self.comment_buffer = '' + self.process_lines(inputfile) + if self.parse_state == self.ParseState.COMMENT: + raise Exception('unterminated multi-line comment beginning on line %d' % (self.comment_line, )) + elif self.parse_state == self.ParseState.CHARACTER_CONSTANT: + raise Exception('unterminated character literal on line %d' % (self.input_line, )) + elif self.parse_state == self.ParseState.STRING_CONSTANT: + raise Exception('unterminated string literal on line %d' % (self.input_line, )) + + def process_default(self, line): + escape = False + pos = 0 + length = len(line) + while pos < length: + ch = line[pos] + if (ch == '"') or (ch == "'"): + self.parse_state = self.ParseState.STRING_CONSTANT if ch == '"' else self.ParseState.CHARACTER_CONSTANT + self.line_buffer += line[:pos + 1] + return pos + 1 + elif ch == '*': + if escape: + self.parse_state = self.ParseState.COMMENT + self.comment_line = self.input_line + self.line_buffer += line[:pos - 1] + ' ' + return pos + 1 + elif ch == '/': + if escape: + self.parse_state = self.ParseState.LINE_COMMENT + self.handler.line(self.line_buffer + line[:pos - 1] + ' ') + self.line_buffer = '' + return pos + 1 + elif ch in self.TOKEN_LEAD: + self.parse_state = self.ParseState.TOKEN + self.line_buffer += line[:pos] + return pos + elif (ch >= '0') and (ch <= '9'): + self.parse_state = self.ParseState.NUMERIC_CONSTANT + self.line_buffer += line[:pos] + return pos + escape = ch == '/' + pos += 1 + if line.endswith('\\'): + self.line_buffer += line[:-1] + else: + self.handler.line(self.line_buffer + line) + self.line_buffer = '' + + def process_comment(self, line): + escape = False + pos = 0 + length = len(line) + while pos < length: + ch = line[pos] + if escape and (ch == '/'): + self.parse_state = self.ParseState.DEFAULT + self.comment_line = None + self.handler.comment(self.comment_buffer + line[:pos - 1]) + self.comment_buffer = '' + return pos + 1 + escape = ch == '*' + pos += 1 + if line.endswith('\\'): + self.comment_buffer += line[:-1] + else: + self.comment_buffer += line + '\n' + + def process_line_comment(self, line): + self.parse_state = self.ParseState.DEFAULT + self.handler.line_comment(self.comment_buffer + line) + self.comment_buffer = '' + + def process_token(self, line): + pos = 0 + length = len(line) + while pos < length: + ch = line[pos] + if ch not in self.TOKEN_CONTINUATION: + self.parse_state = self.ParseState.DEFAULT + self.line_buffer += line[:pos] + return pos + pos += 1 + self.parse_state = self.ParseState.DEFAULT + self.handler.line(self.line_buffer + line) + self.line_buffer = '' + + def process_text(self, line): + quote = '"' if self.parse_state == self.ParseState.STRING_CONSTANT else "'" + escape = False + pos = 0 + length = len(line) + while pos < length: + ch = line[pos] + if (ch == quote) and not escape: + self.parse_state = self.ParseState.DEFAULT + self.line_buffer += line[:pos + 1] + return pos + 1 + escape = (ch == '\\') and not escape + pos += 1 + if line.endswith('\\'): + self.line_buffer += line[:-1] + else: + t = 'string' if self.ParseState == self.ParseState.STRING_CONSTANT else 'character' + raise Exception('unterminated %s literal on line %d' % (t, self.input_line)) + + def process_numeric(self, line): + escape = False + pos = 0 + length = len(line) + while pos < length: + ch = line[pos] + if self.lead_digit is None: + self.lead_digit = ch + if ch != '0': + self.radix = 10 + elif self.radix is None: + if ch == "'": + if escape: + raise Exception('adjacent digit separators on line %d' % (self.input_line, )) + else: + escape = True + elif (ch == 'B') or (ch == 'b'): + self.radix = 2 + elif (ch == 'X') or (ch == 'x'): + self.radix = 16 + elif (ch >= '0') and (ch <= '7'): + self.radix = 8 + else: + self.parse_state = self.ParseState.DEFAULT # probably an argument to a token-pasting or stringifying macro + else: + if ch == "'": + if escape: + raise Exception('adjacent digit separators on line %d' % (self.input_line, )) + else: + escape = True + else: + escape = False + if self.radix == 2: + if (ch < '0') or (ch > '1'): + self.parse_state = self.ParseState.DEFAULT + elif self.radix == 8: + if (ch < '0') or (ch > '7'): + self.parse_state = self.ParseState.DEFAULT + elif self.radix == 10: + if (ch < '0') or (ch > '9'): + self.parse_state = self.ParseState.DEFAULT + elif self.radix == 16: + if ch not in self.HEXADECIMAL_DIGIT: + self.parse_state = self.ParseState.DEFAULT + if self.parse_state == self.ParseState.DEFAULT: + self.lead_digit = None + self.radix = None + self.line_buffer += line[:pos] + return pos + pos += 1 + self.parse_state = self.ParseState.DEFAULT + self.lead_digit = None + self.radix = None + self.handler.line(self.line_buffer + line) + self.line_buffer = '' + + +class LuaParser(ParserBase): + class Handler: + def short_comment(self, text): + pass + + def long_comment_start(self, level): + pass + + def long_comment_line(self, text): + pass + + def long_comment_end(self): + pass -files_included = ['src/emu/emu.h'] + class ParseState: + DEFAULT = 0 + SHORT_COMMENT = 1 + LONG_COMMENT = 2 + STRING_CONSTANT = 3 + LONG_STRING_CONSTANT = 4 -include_dirs = ['src/emu/', 'src/devices/', 'src/mame/', 'src/lib/'] + def __init__(self, handler, **kwargs): + super().__init__(**kwargs) + self.handler = handler + self.processors = { + self.ParseState.DEFAULT: self.process_default, + self.ParseState.SHORT_COMMENT: self.process_short_comment, + self.ParseState.LONG_COMMENT: self.process_long_comment, + self.ParseState.STRING_CONSTANT: self.process_string_constant, + self.ParseState.LONG_STRING_CONSTANT: self.process_long_string_constant } -mappings = dict() + def parse(self, inputfile): + self.parse_state = self.ParseState.DEFAULT + self.long_bracket_level = None + self.escape = False + self.block_line = None + self.block_level = None + self.string_quote = None + self.process_lines(inputfile) + if self.parse_state == self.ParseState.LONG_COMMENT: + raise Exception('unterminated long comment beginning on line %d' % (self.block_line, )) + if self.parse_state == self.ParseState.STRING_CONSTANT: + raise Exception('unterminated string literal on line %d' % (self.input_line, )) + if self.parse_state == self.ParseState.LONG_STRING_CONSTANT: + raise Exception('unterminated long string literal beginning on line %d' % (self.block_line, )) -deps_files_included = [ ] + def process_default(self, line): + pos = 0 + length = len(line) + while pos < length: + ch = line[pos] + if (ch == '"') or (ch == "'"): + self.string_quote = ch + self.parse_state = self.ParseState.STRING_CONSTANT + self.long_bracket_level = None + self.escape = False + return pos + 1 + elif (ch == '-') and self.escape: + self.parse_state = self.ParseState.SHORT_COMMENT + self.long_bracket_level = None + self.escape = False + return pos + 1 + elif self.long_bracket_level is not None: + if ch == '=': + self.long_bracket_level += 1 + elif ch == '[': + self.block_line = self.input_line + self.block_level = self.long_bracket_level + self.parse_state = self.ParseState.LONG_STRING_CONSTANT + self.long_bracket_level = None + self.escape = False + return pos + 1 + else: + self.long_bracket_level = None + elif ch == '[': + self.long_bracket_level = 0 + self.escape = ch == '-' + pos += 1 + self.escape = False -deps_include_dirs = ['src/mame/'] + def process_short_comment(self, line): + pos = 0 + length = len(line) + while pos < length: + ch = line[pos] + if self.long_bracket_level is not None: + if ch == '=': + self.long_bracket_level += 1 + elif ch == '[': + self.block_line = self.input_line + self.block_level = self.long_bracket_level + self.parse_state = self.ParseState.LONG_COMMENT + self.long_bracket_level = None + self.handler.long_comment_start(self.block_level) + return pos + 1 + else: + self.long_bracket_level = None + elif ch == '[': + self.long_bracket_level = 0 + if self.long_bracket_level is None: + self.handler.short_comment(line[pos:]) + self.parse_state = self.ParseState.DEFAULT + return None + pos += 1 + self.handler.short_comment(line) + self.parse_state = self.ParseState.DEFAULT -components = [ ] + def process_long_comment(self, line): + pos = 0 + length = len(line) + while pos < length: + ch = line[pos] + if self.long_bracket_level is not None: + if ch == '=': + self.long_bracket_level += 1 + elif ch == ']': + if self.long_bracket_level == self.block_level: + if self.parse_state == self.ParseState.LONG_COMMENT: + self.handler.long_comment_line(line[:endpos]) + self.handler.long_comment_end() + self.parse_state = self.ParseState.DEFAULT + return pos + 1 + else: + self.long_bracket_level = 0 + else: + self.long_bracket_level = None + elif ch == ']': + endpos = pos + self.long_bracket_level = 0 + pos += 1 + self.long_bracket_level = None + self.handler.long_comment_line(line) -drivers = [ ] + def process_string_constant(self, line): + pos = 0 + length = len(line) + while pos < length: + ch = line[pos] + if (ch == self.string_quote) and not self.escape: + self.parse_state = self.ParseState.DEFAULT + return pos + 1 + self.escape = (ch == '\\') and not self.escape + pos += 1 + if not self.escape: + raise Exception('unterminated string literal on line %d' % (self.input_line, )) + + def process_long_string_constant(self, line): + self.process_long_comment(line) # this works because they're both closed by a matching long bracket + + +class DriverFilter: + DRIVER_CHARS = frozenset( + [chr(x) for x in range(ord('0'), ord('9') + 1)] + + [chr(x) for x in range(ord('a'), ord('z') + 1)] + + ['_']) + + def parse_filter(self, root, path, sourcefile, inclusion, exclusion): + def line_hook(text): + text = text.strip() + if text.startswith('#'): + do_parse(os.path.join(os.path.dirname(n), text[1:].lstrip())) + elif text.startswith('+'): + text = text[1:].lstrip() + if not text: + sys.stderr.write('%s:%s: Empty driver name\n' % (path, parser.input_line)) + sys.exit(1) + elif not all(x in self.DRIVER_CHARS for x in text): + sys.stderr.write('%s:%s: Invalid character in driver name "%s"\n' % (path, parser.input_line, text)) + sys.exit(1) + inclusion(text) + elif text.startswith('-'): + text = text[1:].lstrip() + if not text: + sys.stderr.write('%s:%s: Empty driver name\n' % (path, parser.input_line)) + sys.exit(1) + elif not all(x in self.DRIVER_CHARS for x in text): + sys.stderr.write('%s:%s: Invalid character in driver name "%s"\n' % (path, parser.input_line, text)) + sys.exit(1) + exclusion(text) + elif text: + if (len(text) >= 2) and ((text[0] == '"') or (text[0] == "'")) and (text[0] == text[-1]): + text = text[1:-1] + paths = glob.glob(os.path.join(basepath, *text.split('/'))) + if not paths: + sys.stderr.write('%s:%s: Pattern "%s" did not match any source files\n' % (path, parser.input_line, text)) + sys.exit(1) + for source in paths: + sourcefile('/'.join(path_components(os.path.relpath(source, basepath)))) + + try: + filterfile = io.open(path, 'r', encoding='utf-8') + except IOError: + sys.stderr.write('Unable to open filter file "%s"\n' % (path, )) + sys.exit(1) + with filterfile: + basepath = os.path.join(root, 'src', 'mame') + handler = CppParser.Handler() + handler.line = line_hook + parser = CppParser(handler) + try: + parser.parse(filterfile) + except IOError: + sys.stderr.write('Error reading filter file "%s"\n' % (path, )) + sys.exit(1) + except Exception as e: + sys.stderr.write('Error parsing filter file "%s": %s\n' % (path, e)) + sys.exit(1) + + def parse_list(self, path, sourcefile, driver): + def line_hook(text): + text = text.strip() + if text.startswith('#'): + do_parse(os.path.join(os.path.dirname(n), text[1:].lstrip())) + elif text.startswith('@'): + parts = text[1:].lstrip().split(':', 1) + parts[0] = parts[0].strip() + if (parts[0] == 'source') and (len(parts) == 2): + parts[1] = parts[1].strip() + if not parts[1]: + sys.stderr.write('%s:%s: Empty source file name "%s"\n' % (path, parser.input_line, text)) + sys.exit(1) + else: + sourcefile(parts[1]) + else: + sys.stderr.write('%s:%s: Unsupported directive "%s"\n' % (path, parser.input_line, text)) + sys.exit(1) + elif text: + if not all(x in self.DRIVER_CHARS for x in text): + sys.stderr.write('%s:%s: Invalid character in driver name "%s"\n' % (path, parser.input_line, text)) + sys.exit(1) + else: + driver(text) -def file_exists(root, srcfile, folder, inc_dir): - includes = [ folder ] - includes.extend(inc_dir) - for line in includes: try: - fp = open(root + line + srcfile, 'r') - fp.close() - return line + srcfile + listfile = io.open(path, 'r', encoding='utf-8') except IOError: + sys.stderr.write('Unable to open list file "%s"\n' % (path, )) + sys.exit(1) + with listfile: + handler = CppParser.Handler() + handler.line = line_hook + parser = CppParser(handler) + try: + parser.parse(listfile) + except IOError: + sys.stderr.write('Error reading list file "%s"\n' % (path, )) + sys.exit(1) + except Exception as e: + sys.stderr.write('Error parsing list file "%s": %s\n' % (path, e)) + sys.exit(1) + + +class DriverLister(DriverFilter): + def __init__(self, options, **kwargs): + super().__init__(**kwargs) + + def includesource(filename): + sources.add(filename) + + def includedriver(shortname): + includes.add(shortname) + excludes.discard(shortname) + + def excludedriver(shortname): + includes.discard(shortname) + excludes.add(shortname) + + def sourcefile(filename): + if self.sources: + state['includesrc'] = filename in self.sources + + def driver(shortname): + if state['includesrc'] and (shortname not in self.excludes): + drivers.add(shortname) + + sources = set() + includes = set() + excludes = set() + if options.filter is not None: + self.parse_filter(options.root, options.filter, includesource, includedriver, excludedriver) + sys.stderr.write('%d source file(s) found\n' % (len(sources), )) + self.sources = frozenset(sources) + self.includes = frozenset(includes) + self.excludes = frozenset(excludes) + + drivers = set() + state = { 'includesrc': True } + self.parse_list(options.list, sourcefile, driver) + + for driver in self.includes: + drivers.add(driver) + sys.stderr.write('%d driver(s) found\n' % (len(drivers), )) + drivers.add('___empty') + self.drivers = sorted(drivers) + + def write_source(self, f): + f.write( + '#include "emu.h"\n' \ + '\n' \ + '#include "drivenum.h"\n' \ + '\n') + for driver in self.drivers: + f.write('GAME_EXTERN(%s);\n' % driver) + f.write( + '\n' \ + 'game_driver const *const driver_list::s_drivers_sorted[%d] =\n' \ + '{\n' % (len(self.drivers), )) + for driver in self.drivers: + f.write('\t&GAME_NAME(%s),\n' % driver) + f.write( + '};\n' \ + '\n' \ + 'std::size_t const driver_list::s_driver_count = %d;\n' % (len(self.drivers), )) + + +class DriverCollector(DriverFilter): + def __init__(self, options, **kwargs): + super().__init__(**kwargs) + + def includesource(filename): + sources.add(filename) + + def includedriver(shortname): + includes.add(shortname) + + def excludedriver(shortname): + includes.discard(shortname) + + def sourcefile(filename): + state['prevsource'] = filename + + def driver(shortname): + if shortname in includes: + sources.add(state['prevsource']) + + sources = set() + includes = set() + state = { 'prevsource': None } + self.parse_filter(options.root, options.filter, includesource, includedriver, excludedriver) + self.parse_list(options.list, sourcefile, driver) + sys.stderr.write('%d source file(s) found\n' % (len(sources), )) + self.sources = sorted(sources) + + +class DriverReconciler(DriverFilter): + class InfoHandler: + def __init__(self, drivers, **kwargs): + super().__init__(**kwargs) + self.drivers = drivers + self.bad = False + self.locator = None + self.ignored_depth = 0 + self.in_document = False + self.in_mame = False + + def startElement(self, name, attrs): + if not self.in_document: + raise xml.sax.SAXParseException('Unexpected start of element "%s"' % (name, ), None, self.locator) + elif self.ignored_depth > 0: + self.ignored_depth += 1 + elif not self.in_mame: + if name != 'mame': + raise xml.sax.SAXParseException('Unexpected start of element "%s"' % (name, ), None, self.locator) + self.in_mame = True + elif name != 'machine': + raise xml.sax.SAXParseException('Unexpected start of element "%s"' % (name, ), None, self.locator) + else: + runnable = attrs.get('runnable', 'yes') + if runnable == 'yes': + shortname = attrs['name'] + source = attrs['sourcefile'].replace('\\', '/') + declared = self.drivers.get(shortname) + if declared is None: + sys.stderr.write('Driver "%s" not declared in list file\n' % (shortname, )) + self.bad = True + elif declared != source: + sys.stderr.write('Driver "%s" found for source file "%s" but defined in source file "%s"\n' % (shortname, declared, source)) + self.bad = True + self.ignored_depth = 1 + + def endElement(self, name): + if self.ignored_depth > 0: + self.ignored_depth -= 1 + elif self.in_mame: + if name != 'mame': + raise xml.sax.SAXParseException('Unexpected end of element "%s"' % (name, ), None, self.locator) + self.in_mame = False + else: + raise xml.sax.SAXParseException('Unexpected end of element "%s"' % (name, ), None, self.locator) + + def startDocument(self): + if self.in_document: + raise xml.sax.SAXParseException('Unexpected start of document', None, self.locator) + self.in_document = True + + def endDocument(self): + if not self.in_document: + raise xml.sax.SAXParseException('Unexpected end of document', None, self.locator) + self.in_document = False + + def setDocumentLocator(self, locator): + self.locator = locator + + def startPrefixMapping(self, prefix, uri): + pass + + def endPrefixMapping(self, prefix): pass - return '' - -def add_c_if_exists(root, fullname): - try: - fp = open(root + fullname, 'r') - fp.close() - deps_files_included.append(fullname) - except IOError: + + def characters(self, content): + pass + + def ignorableWhitespace(self, whitespace): + pass + + def processingInstruction(self, target, data): + pass + + + def __init__(self, options, **kwargs): + super().__init__(**kwargs) + + def sourcefile(filename): + if (state['prevsource'] is not None) and (not state['prevdrivers']): + sys.stderr.write('No drivers for source file "%s"\n' % (state['prevsource'], )) + self.bad = True + if filename in self.sources: + sys.stderr.write('Duplicate source file "%s"\n' % (filename, )) + state['prevdrivers'] = self.sources[filename] + self.bad = True + else: + drivers = set() + state['prevdrivers'] = drivers + self.sources[filename] = drivers + state['prevsource'] = filename + + def driver(shortname): + if shortname in self.drivers: + sys.stderr.write('Duplicate driver "%s" for source file "%s" (previously seen for source file "%s")\n' % (shortname, state['prevsource'], self.drivers[shortname])) + self.bad = True + else: + self.drivers[shortname] = state['prevsource'] + drivers = state['prevdrivers'] + if drivers is None: + sys.stderr.write('Driver "%s" found outside source file section\n' % (shortname, )) + self.bad = True + else: + drivers.add(shortname) + + state = { 'prevsource': None, 'prevdrivers': None } + self.bad = False + self.sources = { } + self.drivers = { } + self.parse_list(options.list, sourcefile, driver) + + def reconcile_xml(self, xmlfile): + handler = self.InfoHandler(self.drivers) + try: + xml.sax.parse(xmlfile, handler=handler) + if handler.bad: + self.bad = True + except xml.sax.SAXException as err: + sys.stderr.write('Error parsing system information file: %s\n' % (err, )) + self.bad = True + + +def split_path(path): + path = os.path.normpath(path) + result = [ ] + while True: + dirname, basename = os.path.split(path) + if dirname == path: + result.insert(0, dirname) + return result + elif basename == path: + result.insert(0, basename) + return result + else: + result.insert(0, basename) + path = dirname + + +def parse_command_line(): + parser = argparse.ArgumentParser() + parser.add_argument('-r', '--root', metavar='<srcroot>', default='.', help='path to emulator source root (defaults to working directory)') + subparsers = parser.add_subparsers(title='commands', dest='command', metavar='<command>') + + subparser = subparsers.add_parser('sourcesproject', help='generate project directives for source files') + subparser.add_argument('-t', '--target', metavar='<target>', required=True, help='generated emulator target name') + subparser.add_argument('-l', '--list', metavar='<lstfile>', required=True, help='master driver list file') + subparser.add_argument('sources', metavar='<srcfile>', nargs='+', help='source files to include') + + subparser = subparsers.add_parser('filterproject', help='generate project directives using filter file') + subparser.add_argument('-t', '--target', metavar='<target>', required=True, help='generated emulator target name') + subparser.add_argument('-f', '--filter', metavar='<fltfile>', required=True, help='input filter file') + subparser.add_argument('list', metavar='<lstfile>', help='input list file') + + subparser = subparsers.add_parser('sourcesfilter', help='generate driver filter for source files') + subparser.add_argument('-l', '--list', metavar='<lstfile>', required=True, help='master driver list file') + subparser.add_argument('sources', metavar='<srcfile>', nargs='+', help='source files to include') + + subparser = subparsers.add_parser('driverlist', help='generate driver list source') + subparser.add_argument('-f', '--filter', metavar='<fltfile>', help='input filter file') + subparser.add_argument('list', metavar='<lstfile>', help='input list file') + + subparser = subparsers.add_parser('reconcilelist', help='reconcile driver list') + subparser.add_argument('-l', '--list', metavar='<lstfile>', required=True, help='master driver list file') + subparser.add_argument('infoxml', metavar='<xmlfile>', nargs='?', help='XML system information file') + + return parser.parse_args() + + +def collect_lua_directives(options): + def short_comment_hook(text): + if text.startswith('@'): + name, action = text[1:].rstrip().rsplit(',', 1) + if name not in result: + result[name] = [ ] + result[name].append(action) + + base = os.path.join(options.root, 'scripts', 'src') + result = { } + handler = LuaParser.Handler() + handler.short_comment = short_comment_hook + parser = LuaParser(handler) + for name in ('bus', 'cpu', 'machine', 'sound', 'video', 'formats'): + path = os.path.join(base, name + '.lua') + try: + f = io.open(path, 'r', encoding='utf-8') + except IOError: + sys.stderr.write('Unable to open source file "%s"\n' % (path, )) + sys.exit(1) + try: + with f: + parser.parse(f) + except IOError: + sys.stderr.write('Error reading source file "%s"\n' % (path, )) + sys.exit(1) + except Exception as e: + sys.stderr.write('Error parsing source file "%s": %s\n' % (path, e)) + sys.exit(1) + return result + + +def scan_source_dependencies(root, sources): + def locate_include(path): + split = [ ] + forward = 0 + reverse = 0 + for part in path.split('/'): + if part and (part != '.'): + if part != '..': + forward += 1 + split.append(part) + elif forward: + split.pop() + forward -= 1 + else: + split.append(part) + reverse += 1 + split = tuple(split) + for incdir, depth in roots: + if (not depth) or (not reverse): + components = incdir + split + depth = depth + forward - 1 + elif depth >= reverse: + components = incdir[:-reverse] + split[reverse:] + depth = depth + forward - reverse - 1 + else: + components = incdir[:-depth] + split[depth:] + depth = forward - 1 + if os.path.isfile(os.path.join(root, *components)): + return components, depth + return None, 0 + + def test_siblings(relative, basename, depth): + pathbase = '/'.join(relative) + '/' + dirname = os.path.join(root, *relative) + for ext in ('.cpp', '.ipp', '.hxx'): + path = pathbase + basename + ext + if (path not in seen) and os.path.isfile(os.path.join(dirname, basename + ext)): + remaining.append((path, depth)) + seen.add(path) + + def line_hook(text): + text = text.lstrip() + if text.startswith('#'): + text = text[1:].lstrip() + if text.startswith('include'): + text = text[7:] + if text[:1].isspace(): + text = text.strip() + if (len(text) > 2) and (text[0] == '"') and (text[-1] == '"'): + components, depth = locate_include(text[1:-1]) + if components: + path = '/'.join(components) + if path not in seen: + remaining.append((path, depth)) + seen.add(path) + base, ext = os.path.splitext(components[-1]) + if ext.lower().startswith('.h'): + components = components[:-1] + test_siblings(components, base, depth) + if components[:2] == ('src', 'mame'): + for aspect in ('_a', '_v', '_m'): + test_siblings(components, base + aspect, depth) + + handler = CppParser.Handler() + handler.line = line_hook + parser = CppParser(handler) + seen = set('/'.join(x for x in split_path(source) if x) for source in sources) + remaining = list([(x, 0) for x in seen]) + default_roots = ((('src', 'devices'), 0), (('src', 'mame', 'shared'), 0), (('src', 'lib'), 0)) + while remaining: + source, depth = remaining.pop() + components = tuple(source.split('/')) + roots = ((components[:-1], depth), ) + default_roots + try: + f = io.open(os.path.join(root, *components), 'r', encoding='utf-8') + except IOError: + sys.stderr.write('Unable to open source file "%s"\n' % (source, )) + sys.exit(1) + try: + with f: + parser.parse(f) + except IOError: + sys.stderr.write('Error reading source file "%s"\n' % (source, )) + sys.exit(1) + except Exception as e: + sys.stderr.write('Error parsing source file "%s": %s\n' % (source, e)) + sys.exit(1) + return seen + + +def write_project(options, projectfile, mappings, sources, single): + if single: + targetsrc = '' + for source in sorted(sources): + action = mappings.get(source) + if action: + for line in action: + projectfile.write(line + '\n') + if source.startswith('src/mame/'): + targetsrc += ' MAME_DIR .. "%s",\n' % (source, ) + projectfile.write( + '\n' \ + 'function createProjects_mame_%s(_target, _subtarget)\n' \ + ' project ("mame_%s")\n' \ + ' targetsubdir(_target .."_" .. _subtarget)\n' \ + ' kind (LIBTYPE)\n' \ + ' uuid (os.uuid("drv-mame-%s"))\n' \ + ' addprojectflags()\n' \ + ' \n' \ + ' includedirs {\n' \ + ' MAME_DIR .. "src/osd",\n' \ + ' MAME_DIR .. "src/emu",\n' \ + ' MAME_DIR .. "src/devices",\n' \ + ' MAME_DIR .. "src/mame/shared",\n' \ + ' MAME_DIR .. "src/lib",\n' \ + ' MAME_DIR .. "src/lib/util",\n' \ + ' MAME_DIR .. "src/lib/netlist",\n' \ + ' MAME_DIR .. "3rdparty",\n' \ + ' GEN_DIR .. "mame/layout",\n' \ + ' ext_includedir("asio"),\n' \ + ' ext_includedir("flac"),\n' \ + ' ext_includedir("glm"),\n' \ + ' ext_includedir("jpeg"),\n' \ + ' ext_includedir("rapidjson"),\n' \ + ' ext_includedir("zlib"),\n' \ + ' }\n' \ + '\n' \ + ' files{\n%s' \ + ' }\n' \ + 'end\n' \ + '\n' \ + 'function linkProjects_mame_%s(_target, _subtarget)\n' \ + ' links {\n' \ + ' "mame_%s",\n' \ + ' }\n' \ + 'end\n' % (options.target, options.target, options.target, targetsrc, options.target, options.target)) + else: + libraries = { } + for source in sorted(sources): + components = source.split('/') + if (len(components) > 3) and (components[:2] == ['src', 'mame']): + line = ' MAME_DIR .. "%s",\n' % (source, ) + liblines = libraries.get(components[2]) + if liblines is not None: + liblines.append(line) + else: + libraries[components[2]] = [line] + action = mappings.get(source) + if action: + for line in action: + projectfile.write(line + '\n') + libnames = sorted(libraries.keys()) + projectfile.write( + '\n' \ + 'function createMAMEProjects(_target, _subtarget, _name)\n' \ + ' project (_name)\n' \ + ' targetsubdir(_target .."_" .. _subtarget)\n' \ + ' kind (LIBTYPE)\n' \ + ' uuid (os.uuid("drv-" .. _target .. "_" .. _subtarget .. "-" .. _name))\n' \ + ' addprojectflags()\n' \ + ' \n' \ + ' includedirs {\n' \ + ' MAME_DIR .. "src/osd",\n' \ + ' MAME_DIR .. "src/emu",\n' \ + ' MAME_DIR .. "src/devices",\n' \ + ' MAME_DIR .. "src/mame/shared",\n' \ + ' MAME_DIR .. "src/lib",\n' \ + ' MAME_DIR .. "src/lib/util",\n' \ + ' MAME_DIR .. "src/lib/netlist",\n' \ + ' MAME_DIR .. "3rdparty",\n' \ + ' GEN_DIR .. "mame/layout",\n' \ + ' ext_includedir("asio"),\n' \ + ' ext_includedir("flac"),\n' \ + ' ext_includedir("glm"),\n' \ + ' ext_includedir("jpeg"),\n' \ + ' ext_includedir("rapidjson"),\n' \ + ' ext_includedir("zlib"),\n' \ + ' }\n' \ + 'end\n' \ + '\n' \ + 'function linkProjects_mame_%s(_target, _subtarget)\n' \ + ' links {\n' % (options.target, )) + for lib in libnames: + if lib != 'shared': + projectfile.write(' "%s",\n' % (lib, )) + if 'shared' in libraries: + projectfile.write(' "shared",\n') + projectfile.write( + ' }\n' \ + 'end\n' \ + '\n' \ + 'function createProjects_mame_%s(_target, _subtarget)\n' \ + '\n' % (options.target, )) + for lib in libnames: + projectfile.write( + 'createMAMEProjects(_target, _subtarget, "%s")\n' \ + 'files {\n' % (lib, )) + for line in libraries[lib]: + projectfile.write(line) + projectfile.write('}\n\n') + projectfile.write('end\n') + + +def collect_sources(root, sources): + result = [ ] + for source in sources: + fullpath = os.path.join(root, source) + if os.path.isdir(fullpath): + for subdir, dirs, files in os.walk(fullpath): + for candidate in files: + if os.path.splitext(candidate)[1] == '.cpp': + if subdir != fullpath: + result.append(os.path.join(source, os.path.relpath(subdir, fullpath), candidate)) + else: + result.append(os.path.join(source, candidate)) + else: + result.append(source) + return result + + +def write_sources_project(options, projectfile): + def sourcefile(filename): + if tuple(filename.split('/')) in splitsources: + state['havedrivers'] = True + + def driver(shortname): pass -def add_rest_if_exists(root, srcfile,folder): - t = srcfile.rsplit('/', 2) - if t[1]=='includes': - t[2] = t[2].replace('.h','.cpp') - t[1] = 'drivers' - add_c_if_exists(root,"/".join(t)) - parse_file_for_deps(root, "/".join(t), folder) - t[1] = 'machine' - add_c_if_exists(root,"/".join(t)) - parse_file_for_deps(root, "/".join(t), folder) - t[1] = 'video' - add_c_if_exists(root,"/".join(t)) - parse_file_for_deps(root, "/".join(t), folder) - t[1] = 'audio' - add_c_if_exists(root,"/".join(t)) - parse_file_for_deps(root, "/".join(t), folder) - -def parse_file_for_deps(root, srcfile, folder): - try: - fp = open(root + srcfile, 'r') - except IOError: - return 1 - in_comment = 0 - linenum = 0 - for line in fp.readlines(): - content = '' - linenum+=1 - srcptr = 0 - while srcptr < len(line): - c = line[srcptr] - srcptr+=1 - if ord(c)==13 or ord(c)==10: - if ord(c)==13 and ord(line[srcptr])==10: - srcptr+=1 - continue - if c==' ' or ord(c)==9: - continue - if in_comment==1 and c=='*' and line[srcptr]=='/' : - srcptr+=1 - in_comment = 0 - continue - if in_comment: - continue - if c=='/' and line[srcptr]=='*' : - srcptr+=1 - in_comment = 1 - continue - if c=='/' and line[srcptr]=='/' : - break - content += c - content = content.strip() - if len(content)>0: - if content.startswith('#include'): - name = content[8:] - name = name.replace('"','') - fullname = file_exists(root, name, folder,deps_include_dirs) - if fullname in deps_files_included: - continue - if fullname!='': - deps_files_included.append(fullname) - add_c_if_exists(root, fullname.replace('.h','.cpp')) - add_rest_if_exists(root, fullname,folder) - newfolder = fullname.rsplit('/', 1)[0] + '/' - parse_file_for_deps(root, fullname, newfolder) - continue - fp.close() - return 0 - -def parse_file(root, srcfile, folder): - try: - fp = open(root + srcfile, 'r') - except IOError: - return 1 - in_comment = 0 - linenum = 0 - for line in fp.readlines(): - content = '' - linenum+=1 - srcptr = 0 - while srcptr < len(line): - c = line[srcptr] - srcptr+=1 - if ord(c)==13 or ord(c)==10: - if ord(c)==13 and ord(line[srcptr])==10: - srcptr+=1 - continue - if c==' ' or ord(c)==9: - continue - if in_comment==1 and c=='*' and line[srcptr]=='/' : - srcptr+=1 - in_comment = 0 - continue - if in_comment: - continue - if c=='/' and line[srcptr]=='*' : - srcptr+=1 - in_comment = 1 - continue - if c=='/' and line[srcptr]=='/' : - break - content += c - content = content.strip() - if len(content)>0: - if content.startswith('#include'): - name = content[8:] - name = name.replace('"','') - fullname = file_exists(root, name, folder,include_dirs) - if fullname in files_included: - continue - if "src/lib/netlist/" in fullname: - continue - if fullname!='': - if fullname in mappings.keys(): - if not(mappings[fullname] in components): - components.append(mappings[fullname]) - files_included.append(fullname) - newfolder = fullname.rsplit('/', 1)[0] + '/' - parse_file(root, fullname, newfolder) - if (fullname.endswith('.h') and not("src/emu" in fullname) and not("src/devices" in fullname) and not("src/lib" in fullname) and not("src/osd" in fullname)): - parse_file_for_deps(root, fullname.replace('.h','.cpp'), newfolder) - elif fullname.endswith('.h'): - parse_file(root, fullname.replace('.h','.cpp'), newfolder) - continue - fp.close() - return 0 - -def parse_file_for_drivers(root, srcfile): - srcfile = srcfile.replace('\\','/') - if srcfile.startswith('src/mame/drivers'): - splitname = srcfile.split('/', 4) - drivers.append(splitname[3]) - return 0 - -def parse_lua_file(srcfile): - try: - fp = open(srcfile, 'r') - except IOError: - sys.stderr.write("Unable to open source file '%s'\n" % srcfile) - return 1 - for line in fp.readlines(): - content = line.strip() - if len(content)>0: - if content.startswith('--@'): - name = content[3:] - mappings[name.rsplit(',', 1)[0]] = name.rsplit(',', 1)[1] - return 0 - -if len(sys.argv) < 5: - print('Usage:') - print(' makedep <root> <source.c> <type> <target>') - sys.exit(0) - -root = sys.argv[1] + '/' - -parse_lua_file(root +'scripts/src/bus.lua') -parse_lua_file(root +'scripts/src/cpu.lua') -parse_lua_file(root +'scripts/src/machine.lua') -parse_lua_file(root +'scripts/src/sound.lua') -parse_lua_file(root +'scripts/src/video.lua') -parse_lua_file(root +'scripts/src/formats.lua') - -for filename in sys.argv[2].rsplit(',') : - deps_files_included.append(filename.replace('\\','/')) - parse_file_for_deps(root,filename,'') - -for filename in deps_files_included: - parse_file(root,filename,'') - -for filename in sys.argv[2].rsplit(',') : - parse_file_for_drivers(root,filename) - -# display output -if sys.argv[3]=='drivers': - #output the list of externs first - for drv in sorted(drivers): - print(drv) - print("") - -if sys.argv[3]=='target': - for line in components: - sys.stdout.write("%s\n" % line) - sys.stdout.write('\n') - sys.stdout.write('function createProjects_mame_%s(_target, _subtarget)\n' % sys.argv[4]) - sys.stdout.write(' project ("mame_%s")\n' % sys.argv[4]) - sys.stdout.write(' targetsubdir(_target .."_" .. _subtarget)\n') - sys.stdout.write(' kind (LIBTYPE)\n') - sys.stdout.write(' uuid (os.uuid("drv-mame-%s"))\n' % sys.argv[4]) - sys.stdout.write(' addprojectflags()\n') - sys.stdout.write(' \n') - sys.stdout.write(' includedirs {\n') - sys.stdout.write(' MAME_DIR .. "src/osd",\n') - sys.stdout.write(' MAME_DIR .. "src/emu",\n') - sys.stdout.write(' MAME_DIR .. "src/devices",\n') - sys.stdout.write(' MAME_DIR .. "src/mame",\n') - sys.stdout.write(' MAME_DIR .. "src/lib",\n') - sys.stdout.write(' MAME_DIR .. "src/lib/util",\n') - sys.stdout.write(' MAME_DIR .. "src/lib/netlist",\n') - sys.stdout.write(' MAME_DIR .. "3rdparty",\n') - sys.stdout.write(' GEN_DIR .. "mame/layout",\n') - sys.stdout.write(' ext_includedir("flac"),\n') - sys.stdout.write(' ext_includedir("glm"),\n') - sys.stdout.write(' ext_includedir("jpeg"),\n') - sys.stdout.write(' ext_includedir("rapidjson"),\n') - sys.stdout.write(' ext_includedir("zlib"),\n') - sys.stdout.write(' }\n') - sys.stdout.write('\n') - sys.stdout.write(' files{\n') - for line in deps_files_included: - sys.stdout.write(' MAME_DIR .. "%s",\n' % line) - sys.stdout.write(' }\n') - sys.stdout.write('end\n') - sys.stdout.write('\n') - sys.stdout.write('function linkProjects_mame_%s(_target, _subtarget)\n' % sys.argv[4]) - sys.stdout.write(' links {\n') - sys.stdout.write(' "mame_%s",\n' % sys.argv[4]) - sys.stdout.write(' }\n') - sys.stdout.write('end\n') + header_to_optional = collect_lua_directives(options) + sources = collect_sources(options.root, options.sources) + splitsources = frozenset(s[2:] for s in (path_components(s) for s in sources) if s[:2] == ('src', 'mame')) + state = { 'havedrivers': False } + DriverFilter().parse_list(options.list, sourcefile, driver) + if not state['havedrivers']: + sys.stderr.write('None of the specified source files contain system drivers\n') + sys.exit(1) + source_dependencies = scan_source_dependencies(options.root, sources) + write_project(options, projectfile, header_to_optional, source_dependencies, True) + + +def write_filter_project(options, projectfile): + header_to_optional = collect_lua_directives(options) + sources = DriverCollector(options).sources + source_dependencies = scan_source_dependencies(options.root, (os.path.join('src', 'mame', *n.split('/')) for n in sources)) + write_project(options, projectfile, header_to_optional, source_dependencies, False) + + +def write_sources_filter(options, filterfile): + sources = set() + DriverFilter().parse_list(options.list, lambda n: sources.add(n), lambda n: None) + + drivers = set() + for source in collect_sources(options.root, options.sources): + components = tuple(x for x in split_path(source) if x) + if (len(components) > 3) and (components[:2] == ('src', 'mame')): + ext = os.path.splitext(components[-1])[1].lower() + if ext.startswith('.c'): + if '/'.join(components[2:]) in sources: + drivers.add('/'.join(components[2:])) + for driver in sorted(drivers): + filterfile.write(driver + '\n') + + +if __name__ == '__main__': + options = parse_command_line() + if options.command == 'sourcesproject': + write_sources_project(options, sys.stdout) + elif options.command == 'filterproject': + write_filter_project(options, sys.stdout) + elif options.command == 'sourcesfilter': + write_sources_filter(options, sys.stdout) + elif options.command == 'driverlist': + DriverLister(options).write_source(sys.stdout) + elif options.command == 'reconcilelist': + reconciler = DriverReconciler(options) + if options.infoxml == '-': + reconciler.reconcile_xml(sys.stdin) + elif options.infoxml is not None: + try: + xmlfile = io.open(options.infoxml, 'rb') + with xmlfile: + reconciler.reconcile_xml(xmlfile) + except IOError: + sys.stderr.write('Unable to open system information file "%s"\n' % (options.infoxml, )) + sys.exit(1) + if reconciler.bad: + sys.exit(1) diff --git a/scripts/build/makelist.py b/scripts/build/makelist.py deleted file mode 100644 index 1a27a515e04..00000000000 --- a/scripts/build/makelist.py +++ /dev/null @@ -1,167 +0,0 @@ -#!/usr/bin/python -## -## license:BSD-3-Clause -## copyright-holders:Aaron Giles, Andrew Gardner - -from __future__ import with_statement - -import sys - -drivlist = [] -sourcelist = [] -filter_addlist = [] -filter_removelist = [] - -def parse_file(srcfile): - try: - fp = open(srcfile, 'rt') - except IOError: - sys.stderr.write("Unable to open source file '%s'\n" % srcfile) - return 1 - in_comment = 0 - linenum = 0 - curr_source = '' - for line in fp.readlines(): - drivname = '' - linenum+=1 - srcptr = 0 - while srcptr < len(line): - c = line[srcptr] - srcptr+=1 - if c==13 or c==10: - if c==13 and line[srcptr]==10: - srcptr+=1 - continue - if c==' ' or c==9: - continue - if in_comment==1 and c=='*' and line[srcptr]=='/' : - srcptr+=1 - in_comment = 0 - continue - if in_comment: - continue - if c=='/' and line[srcptr]=='*' : - srcptr+=1 - in_comment = 1 - continue - if c=='/' and line[srcptr]=='/' : - break - drivname += c - drivname = drivname.strip() - if len(drivname)>0: - if drivname[0]=='#': - sys.stderr.write("Importing drivers from '%s'\n" % drivname[1:]) - parse_file(drivname[1:]) - continue - if drivname[0]=='@': - curr_source= drivname[8:] - continue - if not all(((c >='a' and c<='z') or (c>='0' and c<='9') or c=='_') for c in drivname): - sys.stderr.write("%s:%d - Invalid character in driver \"%s\"\n" % (srcfile, linenum, drivname)) - return 1 - else: - if (curr_source == '') or (len(sourcelist)==0) or (curr_source in sourcelist): - drivlist.append(drivname) - return 0 - -def parse_filter_file(srcfile): - try: - fp = open(srcfile, 'rt') - except IOError: - sys.stderr.write("Unable to open filter file '%s'\n" % srcfile) - return 1 - in_comment = 0 - linenum = 0 - for line in fp.readlines(): - sourcename = '' - linenum+=1 - srcptr = 0 - while srcptr < len(line): - c = line[srcptr] - srcptr+=1 - if c==13 or c==10: - if c==13 and line[srcptr]==10: - srcptr+=1 - continue - if c==' ' or c==9: - continue - if in_comment==1 and c=='*' and line[srcptr]=='/' : - srcptr+=1 - in_comment = 0 - continue - if in_comment: - continue - if c=='/' and line[srcptr]=='*' : - srcptr+=1 - in_comment = 1 - continue - if c=='/' and line[srcptr]=='/' : - break - sourcename += c - sourcename = sourcename.strip() - if len(sourcename)>0: - if sourcename[0]=='#': - sys.stderr.write("Importing drivers from '%s'\n" % sourcename[1:]) - parse_filter_file(sourcename[1:]) - continue - if sourcename[0]=='+': - filter_addlist.append(sourcename[1:]) - continue - if sourcename[0]=='-': - filter_removelist.append(sourcename[1:]) - continue - if not all(((c >='a' and c<='z') or (c>='0' and c<='9') or c=='_' or c=='.' or c=='-') for c in sourcename): - sys.stderr.write("%s:%d - Invalid character in driver \"%s\"\n" % (srcfile, linenum, sourcename)) - return 1 - else: - sourcelist.append(sourcename) - return 0 - - -if len(sys.argv) < 2 or len(sys.argv) > 3: - print('Usage:') - print(' makelist <source.lst> [<filter.flt>]') - sys.exit(0) - -if len(sys.argv) == 3: - if parse_filter_file(sys.argv[2]) : - sys.exit(1) - sys.stderr.write("%d source file(s) found\n" % len(sourcelist)) - -if parse_file(sys.argv[1]) : - sys.exit(1) - -# output a count -if len(drivlist)==0 : - sys.stderr.write("No drivers found\n") - sys.exit(1) - -for x in filter_addlist: - drivlist.append(x) - -drivlist = [x for x in drivlist if (x not in filter_removelist)] - -sys.stderr.write("%d driver(s) found\n" % len(drivlist)) - -# add a reference to the ___empty driver -drivlist.append("___empty") - -# start with a header -print('#include "emu.h"\n') -print('#include "drivenum.h"\n') - -#output the list of externs first -for drv in sorted(drivlist): - print("GAME_EXTERN(%s);" % drv) -print("") - -# then output the array -print("const game_driver * const driver_list::s_drivers_sorted[%d] =" % len(drivlist)) -print("{") -for drv in sorted(drivlist): - print("\t&GAME_NAME(%s)," % drv) -print("};") -print("") - -# also output a global count -print("std::size_t const driver_list::s_driver_count = %d;\n" % len(drivlist)) diff --git a/scripts/build/msgfmt.py b/scripts/build/msgfmt.py index deb02ae32b2..3f731e941ea 100644 --- a/scripts/build/msgfmt.py +++ b/scripts/build/msgfmt.py @@ -1,12 +1,12 @@ -#! /usr/bin/env python -# -*- coding: utf-8 -*- +#! /usr/bin/env python3 # Written by Martin v. Löwis <loewis@informatik.hu-berlin.de> """Generate binary message catalog from textual translation description. This program converts a textual Uniforum-style message catalog (.po file) into a binary GNU catalog (.mo file). This is essentially the same function as the -GNU msgfmt program, however, it is a simpler implementation. +GNU msgfmt program, however, it is a simpler implementation. Currently it +does not handle plural forms but it does handle message contexts. Usage: msgfmt.py [OPTIONS] filename.po @@ -25,14 +25,12 @@ Options: Display version information and exit. """ -from __future__ import print_function import os import sys +import ast import getopt import struct import array -import re -import codecs from email.parser import HeaderParser __version__ = "1.2" @@ -40,7 +38,6 @@ __version__ = "1.2" MESSAGES = {} - def usage(code, msg=''): print(__doc__, file=sys.stderr) if msg: @@ -48,33 +45,14 @@ def usage(code, msg=''): sys.exit(code) - -def add(id, str, fuzzy): +def add(ctxt, id, str, fuzzy): "Add a non-fuzzy translation to the dictionary." global MESSAGES if not fuzzy and str: - MESSAGES[id] = str - -def dequote(s): - if (s[0] == s[-1]) and s.startswith(("'", '"')): - return s[1:-1] - return s - -# decode_escapes from http://stackoverflow.com/a/24519338 -ESCAPE_SEQUENCE_RE = re.compile(r''' - ( \\U........ # 8-digit hex escapes - | \\u.... # 4-digit hex escapes - | \\x.. # 2-digit hex escapes - | \\[0-7]{1,3} # Octal escapes - | \\N\{[^}]+\} # Unicode characters by name - | \\[\\'"abfnrtv] # Single-character escapes - )''', re.UNICODE | re.VERBOSE) - -def decode_escapes(s): - def decode_match(match): - return codecs.decode(match.group(0), 'unicode-escape') - - return ESCAPE_SEQUENCE_RE.sub(decode_match, s) + if ctxt is None: + MESSAGES[id] = str + else: + MESSAGES[b"%b\x04%b" % (ctxt, id)] = str def generate(): @@ -112,16 +90,16 @@ def generate(): 7*4, # start of key index 7*4+len(keys)*8, # start of value index 0, 0) # size and offset of hash table - output += array.array("i", offsets).tostring() + output += array.array("i", offsets).tobytes() output += ids output += strs return output - def make(filename, outfile): ID = 1 STR = 2 + CTXT = 3 # Compute .mo name from .po name and arguments if filename.endswith('.po'): @@ -132,31 +110,28 @@ def make(filename, outfile): outfile = os.path.splitext(infile)[0] + '.mo' try: - lines = open(infile, 'rb').readlines() + with open(infile, 'rb') as f: + lines = f.readlines() except IOError as msg: print(msg, file=sys.stderr) sys.exit(1) - section = None + section = msgctxt = None fuzzy = 0 - empty = 0 - header_attempted = False - - # Start off assuming Latin-1, so everything decodes without failure, - # until we know the exact encoding - encoding = 'latin-1' # Start off assuming Latin-1, so everything decodes without failure, # until we know the exact encoding encoding = 'latin-1' # Parse the catalog - for lno, l in enumerate(lines): + lno = 0 + for l in lines: l = l.decode(encoding) + lno += 1 # If we get a comment line after a msgstr, this is a new entry if l[0] == '#' and section == STR: - add(msgid, msgstr, fuzzy) - section = None + add(msgctxt, msgid, msgstr, fuzzy) + section = msgctxt = None fuzzy = 0 # Record a fuzzy mark if l[:2] == '#,' and 'fuzzy' in l: @@ -164,10 +139,16 @@ def make(filename, outfile): # Skip comments if l[0] == '#': continue - # Now we are in a msgid section, output previous section - if l.startswith('msgid') and not l.startswith('msgid_plural'): + # Now we are in a msgid or msgctxt section, output previous section + if l.startswith('msgctxt'): if section == STR: - add(msgid, msgstr, fuzzy) + add(msgctxt, msgid, msgstr, fuzzy) + section = CTXT + l = l[7:] + msgctxt = b'' + elif l.startswith('msgid') and not l.startswith('msgid_plural'): + if section == STR: + add(msgctxt, msgid, msgstr, fuzzy) if not msgid: # See whether there is an encoding declaration p = HeaderParser() @@ -178,14 +159,6 @@ def make(filename, outfile): l = l[5:] msgid = msgstr = b'' is_plural = False - if l.strip() == '""': - # Check if next line is msgstr. If so, this is a multiline msgid. - if lines[lno+1].decode(encoding).startswith('msgstr'): - # If this is the first empty msgid and is followed by msgstr, this is the header, which may contain the encoding declaration. - # Otherwise this file is not valid - if empty > 1: - print("Found multiple empty msgids on line " + str(lno) + ", not valid!") - empty += 1 # This is a message with plural forms elif l.startswith('msgid_plural'): if section != ID: @@ -207,26 +180,6 @@ def make(filename, outfile): if msgstr: msgstr += b'\0' # Separator of the various plural forms else: - if (l[6:].strip() == '""') and (empty == 1) and (not header_attempted): - header = "" - # parse up until next empty line = end of header - hdrno = lno - while(hdrno < len(lines)-1): - # This is a roundabout way to strip non-ASCII unicode characters from the header. - # As we are only parsing out the encoding, we don't need any unicode chars in it. - l = lines[hdrno+1].decode('unicode_escape').encode('ascii','ignore').decode(encoding) - if l.strip(): - header += decode_escapes(dequote(l.strip())) - else: - break - hdrno += 1 - # See whether there is an encoding declaration - if(hdrno > lno): - p = HeaderParser() - charset = p.parsestr(str(header)).get_content_charset() - header_attempted = True - if charset: - encoding = charset if is_plural: print('indexed msgstr required for plural on %s:%d' % (infile, lno), file=sys.stderr) @@ -236,8 +189,10 @@ def make(filename, outfile): l = l.strip() if not l: continue - l = decode_escapes(dequote(l)) # strip quotes and replace newlines if present - if section == ID: + l = ast.literal_eval(l) + if section == CTXT: + msgctxt += l.encode(encoding) + elif section == ID: msgid += l.encode(encoding) elif section == STR: msgstr += l.encode(encoding) @@ -248,18 +203,18 @@ def make(filename, outfile): sys.exit(1) # Add last entry if section == STR: - add(msgid, msgstr, fuzzy) + add(msgctxt, msgid, msgstr, fuzzy) # Compute output output = generate() try: - open(outfile,"wb").write(output) + with open(outfile,"wb") as f: + f.write(output) except IOError as msg: print(msg, file=sys.stderr) - def main(): try: opts, args = getopt.getopt(sys.argv[1:], 'hVo:', diff --git a/scripts/build/png.py b/scripts/build/png.py index 76798443669..43c6e3c93e0 100644 --- a/scripts/build/png.py +++ b/scripts/build/png.py @@ -31,79 +31,113 @@ # SOFTWARE. """ -Pure Python PNG Reader/Writer - -This Python module implements support for PNG images (see PNG -specification at http://www.w3.org/TR/2003/REC-PNG-20031110/ ). It reads -and writes PNG files with all allowable bit depths -(1/2/4/8/16/24/32/48/64 bits per pixel) and colour combinations: -greyscale (1/2/4/8/16 bit); RGB, RGBA, LA (greyscale with alpha) with -8/16 bits per channel; colour mapped images (1/2/4/8 bit). -Adam7 interlacing is supported for reading and -writing. A number of optional chunks can be specified (when writing) -and understood (when reading): ``tRNS``, ``bKGD``, ``gAMA``. +The ``png`` module can read and write PNG files. + +Installation and Overview +------------------------- + +``pip install pypng`` For help, type ``import png; help(png)`` in your python interpreter. -A good place to start is the :class:`Reader` and :class:`Writer` -classes. +A good place to start is the :class:`Reader` and :class:`Writer` classes. + +Coverage of PNG formats is fairly complete; +all allowable bit depths (1/2/4/8/16/24/32/48/64 bits per pixel) and +colour combinations are supported: + +- greyscale (1/2/4/8/16 bit); +- RGB, RGBA, LA (greyscale with alpha) with 8/16 bits per channel; +- colour mapped images (1/2/4/8 bit). + +Interlaced images, +which support a progressive display when downloading, +are supported for both reading and writing. + +A number of optional chunks can be specified (when writing) +and understood (when reading): ``tRNS``, ``bKGD``, ``gAMA``. + +The ``sBIT`` chunk can be used to specify precision for +non-native bit depths. + +Requires Python 3.4 or higher (or Python 2.7). +Installation is trivial, +but see the ``README.txt`` file (with the source distribution) for details. + +Full use of all features will need some reading of the PNG specification +http://www.w3.org/TR/2003/REC-PNG-20031110/. + +The package also comes with command line utilities. -Requires Python 2.3. Limited support is available for Python 2.2, but -not everything works. Best with Python 2.4 and higher. Installation is -trivial, but see the ``README.txt`` file (with the source distribution) -for details. +- ``pripamtopng`` converts + `Netpbm <http://netpbm.sourceforge.net/>`_ PAM/PNM files to PNG; +- ``pripngtopam`` converts PNG to file PAM/PNM. -This file can also be used as a command-line utility to convert -`Netpbm <http://netpbm.sourceforge.net/>`_ PNM files to PNG, and the -reverse conversion from PNG to PNM. The interface is similar to that -of the ``pnmtopng`` program from Netpbm. Type ``python png.py --help`` -at the shell prompt for usage and a list of options. +There are a few more for simple PNG manipulations. -A note on spelling and terminology ----------------------------------- +Spelling and Terminology +------------------------ -Generally British English spelling is used in the documentation. So -that's "greyscale" and "colour". This not only matches the author's -native language, it's also used by the PNG specification. +Generally British English spelling is used in the documentation. +So that's "greyscale" and "colour". +This not only matches the author's native language, +it's also used by the PNG specification. + +Colour Models +------------- The major colour models supported by PNG (and hence by PyPNG) are: -greyscale, RGB, greyscale--alpha, RGB--alpha. These are sometimes -referred to using the abbreviations: L, RGB, LA, RGBA. In this case -each letter abbreviates a single channel: *L* is for Luminance or Luma -or Lightness which is the channel used in greyscale images; *R*, *G*, -*B* stand for Red, Green, Blue, the components of a colour image; *A* -stands for Alpha, the opacity channel (used for transparency effects, -but higher values are more opaque, so it makes sense to call it -opacity). - -A note on formats ------------------ - -When getting pixel data out of this module (reading) and presenting -data to this module (writing) there are a number of ways the data could -be represented as a Python value. Generally this module uses one of -three formats called "flat row flat pixel", "boxed row flat pixel", and -"boxed row boxed pixel". Basically the concern is whether each pixel -and each row comes in its own little tuple (box), or not. + +- greyscale; +- greyscale--alpha; +- RGB; +- RGB--alpha. + +Also referred to using the abbreviations: L, LA, RGB, RGBA. +Each letter codes a single channel: +*L* is for Luminance or Luma or Lightness (greyscale images); +*A* stands for Alpha, the opacity channel +(used for transparency effects, but higher values are more opaque, +so it makes sense to call it opacity); +*R*, *G*, *B* stand for Red, Green, Blue (colour image). + +Lists, arrays, sequences, and so on +----------------------------------- + +When getting pixel data out of this module (reading) and +presenting data to this module (writing) there are +a number of ways the data could be represented as a Python value. + +The preferred format is a sequence of *rows*, +which each row being a sequence of *values*. +In this format, the values are in pixel order, +with all the values from all the pixels in a row +being concatenated into a single sequence for that row. Consider an image that is 3 pixels wide by 2 pixels high, and each pixel has RGB components: -Boxed row flat pixel:: +Sequence of rows:: list([R,G,B, R,G,B, R,G,B], [R,G,B, R,G,B, R,G,B]) -Each row appears as its own list, but the pixels are flattened so -that three values for one pixel simply follow the three values for -the previous pixel. This is the most common format used, because it -provides a good compromise between space and convenience. PyPNG regards -itself as at liberty to replace any sequence type with any sufficiently -compatible other sequence type; in practice each row is an array (from -the array module), and the outer list is sometimes an iterator rather -than an explicit list (so that streaming is possible). +Each row appears as its own list, +but the pixels are flattened so that three values for one pixel +simply follow the three values for the previous pixel. -Flat row flat pixel:: +This is the preferred because +it provides a good compromise between space and convenience. +PyPNG regards itself as at liberty to replace any sequence type with +any sufficiently compatible other sequence type; +in practice each row is an array (``bytearray`` or ``array.array``). + +To allow streaming the outer list is sometimes +an iterator rather than an explicit list. + +An alternative format is a single array holding all the values. + +Array of values:: [R,G,B, R,G,B, R,G,B, R,G,B, R,G,B, R,G,B] @@ -111,64 +145,48 @@ Flat row flat pixel:: The entire image is one single giant sequence of colour values. Generally an array will be used (to save space), not a list. -Boxed row boxed pixel:: - - list([ (R,G,B), (R,G,B), (R,G,B) ], - [ (R,G,B), (R,G,B), (R,G,B) ]) - -Each row appears in its own list, but each pixel also appears in its own -tuple. A serious memory burn in Python. - -In all cases the top row comes first, and for each row the pixels are -ordered from left-to-right. Within a pixel the values appear in the -order, R-G-B-A (or L-A for greyscale--alpha). - -There is a fourth format, mentioned because it is used internally, -is close to what lies inside a PNG file itself, and has some support -from the public API. This format is called packed. When packed, -each row is a sequence of bytes (integers from 0 to 255), just as -it is before PNG scanline filtering is applied. When the bit depth -is 8 this is essentially the same as boxed row flat pixel; when the -bit depth is less than 8, several pixels are packed into each byte; -when the bit depth is 16 (the only value more than 8 that is supported -by the PNG image format) each pixel value is decomposed into 2 bytes -(and `packed` is a misnomer). This format is used by the -:meth:`Writer.write_packed` method. It isn't usually a convenient -format, but may be just right if the source data for the PNG image -comes from something that uses a similar format (for example, 1-bit -BMPs, or another PNG file). - -And now, my famous members --------------------------- +The top row comes first, +and within each row the pixels are ordered from left-to-right. +Within a pixel the values appear in the order R-G-B-A +(or L-A for greyscale--alpha). + +There is another format, which should only be used with caution. +It is mentioned because it is used internally, +is close to what lies inside a PNG file itself, +and has some support from the public API. +This format is called *packed*. +When packed, each row is a sequence of bytes (integers from 0 to 255), +just as it is before PNG scanline filtering is applied. +When the bit depth is 8 this is the same as a sequence of rows; +when the bit depth is less than 8 (1, 2 and 4), +several pixels are packed into each byte; +when the bit depth is 16 each pixel value is decomposed into 2 bytes +(and `packed` is a misnomer). +This format is used by the :meth:`Writer.write_packed` method. +It isn't usually a convenient format, +but may be just right if the source data for +the PNG image comes from something that uses a similar format +(for example, 1-bit BMPs, or another PNG file). """ -# http://www.python.org/doc/2.2.3/whatsnew/node5.html -from __future__ import generators,print_function +from __future__ import print_function -__version__ = "0.0.17" +__version__ = "0.0.20" -from array import array -try: - from itertools import imap -except ImportError: - imap = map +import collections +import io # For io.BytesIO +import itertools import math # http://www.python.org/doc/2.4.4/lib/module-operator.html import operator +import re import struct import sys -import zlib # http://www.python.org/doc/2.4.4/lib/module-warnings.html import warnings -try: - # `cpngfilters` is a Cython module: it must be compiled by - # Cython for this import to work. - # If this import does work, then it overrides pure-python - # filtering functions defined later in this file (see `class - # pngfilters`). - import cpngfilters as pngfilters -except ImportError: - pass +import zlib + +from array import array __all__ = ['Image', 'Reader', 'Writer', 'write_chunks', 'from_array'] @@ -176,99 +194,53 @@ __all__ = ['Image', 'Reader', 'Writer', 'write_chunks', 'from_array'] # The PNG signature. # http://www.w3.org/TR/PNG/#5PNG-file-signature -_signature = struct.pack('8B', 137, 80, 78, 71, 13, 10, 26, 10) +signature = struct.pack('8B', 137, 80, 78, 71, 13, 10, 26, 10) -_adam7 = ((0, 0, 8, 8), - (4, 0, 8, 8), - (0, 4, 4, 8), - (2, 0, 4, 4), - (0, 2, 2, 4), - (1, 0, 2, 2), - (0, 1, 1, 2)) +# The xstart, ystart, xstep, ystep for the Adam7 interlace passes. +adam7 = ((0, 0, 8, 8), + (4, 0, 8, 8), + (0, 4, 4, 8), + (2, 0, 4, 4), + (0, 2, 2, 4), + (1, 0, 2, 2), + (0, 1, 1, 2)) -def group(s, n): - # See http://www.python.org/doc/2.6/library/functions.html#zip - return zip(*[iter(s)]*n) -def isarray(x): - """Same as ``isinstance(x, array)`` except on Python 2.2, where it - always returns ``False``. This helps PyPNG work on Python 2.2. +def adam7_generate(width, height): + """ + Generate the coordinates for the reduced scanlines + of an Adam7 interlaced image + of size `width` by `height` pixels. + + Yields a generator for each pass, + and each pass generator yields a series of (x, y, xstep) triples, + each one identifying a reduced scanline consisting of + pixels starting at (x, y) and taking every xstep pixel to the right. """ - try: - return isinstance(x, array) - except TypeError: - # Because on Python 2.2 array.array is not a type. - return False + for xstart, ystart, xstep, ystep in adam7: + if xstart >= width: + continue + yield ((xstart, y, xstep) for y in range(ystart, height, ystep)) -try: - array.tobytes -except AttributeError: - try: # see :pyver:old - array.tostring - except AttributeError: - def tostring(row): - l = len(row) - return struct.pack('%dB' % l, *row) - else: - def tostring(row): - """Convert row of bytes to string. Expects `row` to be an - ``array``. - """ - return row.tostring() -else: - def tostring(row): - """ Python3 definition, array.tostring() is deprecated in Python3 - """ - return row.tobytes() - -# Conditionally convert to bytes. Works on Python 2 and Python 3. -try: - bytes('', 'ascii') - def strtobytes(x): return bytes(x, 'iso8859-1') - def bytestostr(x): return str(x, 'iso8859-1') -except (NameError, TypeError): - # We get NameError when bytes() does not exist (most Python - # 2.x versions), and TypeError when bytes() exists but is on - # Python 2.x (when it is an alias for str() and takes at most - # one argument). - strtobytes = str - bytestostr = str - -def interleave_planes(ipixels, apixels, ipsize, apsize): - """ - Interleave (colour) planes, e.g. RGB + A = RGBA. - - Return an array of pixels consisting of the `ipsize` elements of - data from each pixel in `ipixels` followed by the `apsize` elements - of data from each pixel in `apixels`. Conventionally `ipixels` - and `apixels` are byte arrays so the sizes are bytes, but it - actually works with any arrays of the same type. The returned - array is the same type as the input arrays which should be the - same type as each other. - """ - itotal = len(ipixels) - atotal = len(apixels) - newtotal = itotal + atotal - newpsize = ipsize + apsize - # Set up the output buffer - # See http://www.python.org/doc/2.4.4/lib/module-array.html#l2h-1356 - out = array(ipixels.typecode) - # It's annoying that there is no cheap way to set the array size :-( - out.extend(ipixels) - out.extend(apixels) - # Interleave in the pixel data - for i in range(ipsize): - out[i:newtotal:newpsize] = ipixels[i:itotal:ipsize] - for i in range(apsize): - out[i+ipsize:newtotal:newpsize] = apixels[i:atotal:apsize] - return out +# Models the 'pHYs' chunk (used by the Reader) +Resolution = collections.namedtuple('_Resolution', 'x y unit_is_meter') + + +def group(s, n): + return list(zip(* [iter(s)] * n)) + + +def isarray(x): + return isinstance(x, array) + def check_palette(palette): - """Check a palette argument (to the :class:`Writer` class) - for validity. Returns the palette as a list if okay; raises an - exception otherwise. + """ + Check a palette argument (to the :class:`Writer` class) for validity. + Returns the palette as a list if okay; + raises an exception otherwise. """ # None is the default and is allowed. @@ -277,25 +249,30 @@ def check_palette(palette): p = list(palette) if not (0 < len(p) <= 256): - raise ValueError("a palette must have between 1 and 256 entries") + raise ProtocolError( + "a palette must have between 1 and 256 entries," + " see https://www.w3.org/TR/PNG/#11PLTE") seen_triple = False - for i,t in enumerate(p): - if len(t) not in (3,4): - raise ValueError( - "palette entry %d: entries must be 3- or 4-tuples." % i) + for i, t in enumerate(p): + if len(t) not in (3, 4): + raise ProtocolError( + "palette entry %d: entries must be 3- or 4-tuples." % i) if len(t) == 3: seen_triple = True if seen_triple and len(t) == 4: - raise ValueError( - "palette entry %d: all 4-tuples must precede all 3-tuples" % i) + raise ProtocolError( + "palette entry %d: all 4-tuples must precede all 3-tuples" % i) for x in t: if int(x) != x or not(0 <= x <= 255): - raise ValueError( - "palette entry %d: values must be integer: 0 <= x <= 255" % i) + raise ProtocolError( + "palette entry %d: " + "values must be integer: 0 <= x <= 255" % i) return p + def check_sizes(size, width, height): - """Check that these arguments, in supplied, are consistent. + """ + Check that these arguments, if supplied, are consistent. Return a (width, height) pair. """ @@ -303,22 +280,25 @@ def check_sizes(size, width, height): return width, height if len(size) != 2: - raise ValueError( - "size argument should be a pair (width, height)") + raise ProtocolError( + "size argument should be a pair (width, height)") if width is not None and width != size[0]: - raise ValueError( - "size[0] (%r) and width (%r) should match when both are used." + raise ProtocolError( + "size[0] (%r) and width (%r) should match when both are used." % (size[0], width)) if height is not None and height != size[1]: - raise ValueError( - "size[1] (%r) and height (%r) should match when both are used." + raise ProtocolError( + "size[1] (%r) and height (%r) should match when both are used." % (size[1], height)) return size + def check_color(c, greyscale, which): - """Checks that a colour argument for transparent or - background options is the right form. Returns the colour - (which, if it's a bar integer, is "corrected" to a 1-tuple). + """ + Checks that a colour argument for transparent or background options + is the right form. + Returns the colour + (which, if it's a bare integer, is "corrected" to a 1-tuple). """ if c is None: @@ -329,33 +309,48 @@ def check_color(c, greyscale, which): except TypeError: c = (c,) if len(c) != 1: - raise ValueError("%s for greyscale must be 1-tuple" % - which) - if not isinteger(c[0]): - raise ValueError( + raise ProtocolError("%s for greyscale must be 1-tuple" % which) + if not is_natural(c[0]): + raise ProtocolError( "%s colour for greyscale must be integer" % which) else: if not (len(c) == 3 and - isinteger(c[0]) and - isinteger(c[1]) and - isinteger(c[2])): - raise ValueError( + is_natural(c[0]) and + is_natural(c[1]) and + is_natural(c[2])): + raise ProtocolError( "%s colour must be a triple of integers" % which) return c + class Error(Exception): def __str__(self): return self.__class__.__name__ + ': ' + ' '.join(self.args) + class FormatError(Error): - """Problem with input file format. In other words, PNG file does - not conform to the specification in some way and is invalid. + """ + Problem with input file format. + In other words, PNG file does not conform to + the specification in some way and is invalid. + """ + + +class ProtocolError(Error): + """ + Problem with the way the programming interface has been used, + or the data presented to it. """ + class ChunkError(FormatError): pass +class Default: + """The default for the greyscale paramter.""" + + class Writer: """ PNG encoder in pure Python. @@ -363,7 +358,7 @@ class Writer: def __init__(self, width=None, height=None, size=None, - greyscale=False, + greyscale=Default, alpha=False, bitdepth=8, palette=None, @@ -372,14 +367,13 @@ class Writer: gamma=None, compression=None, interlace=False, - bytes_per_sample=None, # deprecated planes=None, colormap=None, maxval=None, chunk_limit=2**20, - x_pixels_per_unit = None, - y_pixels_per_unit = None, - unit_is_meter = False): + x_pixels_per_unit=None, + y_pixels_per_unit=None, + unit_is_meter=False): """ Create a PNG encoder object. @@ -390,11 +384,11 @@ class Writer: size Image size (w,h) in pixels, as single argument. greyscale - Input data is greyscale, not RGB. + Pixels are greyscale, not RGB. alpha Input data has alpha channel (RGBA or LA). bitdepth - Bit depth: from 1 to 16. + Bit depth: from 1 to 16 (for each channel). palette Create a palette for a colour mapped image (colour type 3). transparent @@ -410,91 +404,113 @@ class Writer: Create an interlaced image. chunk_limit Write multiple ``IDAT`` chunks to save memory. - x_pixels_per_unit (pHYs chunk) - Number of pixels a unit along the x axis - y_pixels_per_unit (pHYs chunk) - Number of pixels a unit along the y axis - With x_pixel_unit, give the pixel size ratio - unit_is_meter (pHYs chunk) - Indicates if unit is meter or not + x_pixels_per_unit + Number of pixels a unit along the x axis (write a + `pHYs` chunk). + y_pixels_per_unit + Number of pixels a unit along the y axis (write a + `pHYs` chunk). Along with `x_pixel_unit`, this gives + the pixel size ratio. + unit_is_meter + `True` to indicate that the unit (for the `pHYs` + chunk) is metre. The image size (in pixels) can be specified either by using the `width` and `height` arguments, or with the single `size` - argument. If `size` is used it should be a pair (*width*, - *height*). + argument. + If `size` is used it should be a pair (*width*, *height*). - `greyscale` and `alpha` are booleans that specify whether - an image is greyscale (or colour), and whether it has an - alpha channel (or not). + The `greyscale` argument indicates whether input pixels + are greyscale (when true), or colour (when false). + The default is true unless `palette=` is used. + + The `alpha` argument (a boolean) specifies + whether input pixels have an alpha channel (or not). `bitdepth` specifies the bit depth of the source pixel values. - Each source pixel value must be an integer between 0 and - ``2**bitdepth-1``. For example, 8-bit images have values - between 0 and 255. PNG only stores images with bit depths of - 1,2,4,8, or 16. When `bitdepth` is not one of these values, - the next highest valid bit depth is selected, and an ``sBIT`` - (significant bits) chunk is generated that specifies the - original precision of the source image. In this case the - supplied pixel values will be rescaled to fit the range of - the selected bit depth. - - The details of which bit depth / colour model combinations the - PNG file format supports directly, are somewhat arcane - (refer to the PNG specification for full details). Briefly: - "small" bit depths (1,2,4) are only allowed with greyscale and - colour mapped images; colour mapped images cannot have bit depth - 16. - - For colour mapped images (in other words, when the `palette` - argument is specified) the `bitdepth` argument must match one of - the valid PNG bit depths: 1, 2, 4, or 8. (It is valid to have a - PNG image with a palette and an ``sBIT`` chunk, but the meaning - is slightly different; it would be awkward to press the - `bitdepth` argument into service for this.) - - The `palette` option, when specified, causes a colour mapped - image to be created: the PNG colour type is set to 3; greyscale - must not be set; alpha must not be set; transparent must not be - set; the bit depth must be 1,2,4, or 8. When a colour mapped - image is created, the pixel values are palette indexes and + Each channel may have a different bit depth. + Each source pixel must have values that are + an integer between 0 and ``2**bitdepth-1``, where + `bitdepth` is the bit depth for the corresponding channel. + For example, 8-bit images have values between 0 and 255. + PNG only stores images with bit depths of + 1,2,4,8, or 16 (the same for all channels). + When `bitdepth` is not one of these values or where + channels have different bit depths, + the next highest valid bit depth is selected, + and an ``sBIT`` (significant bits) chunk is generated + that specifies the original precision of the source image. + In this case the supplied pixel values will be rescaled to + fit the range of the selected bit depth. + + The PNG file format supports many bit depth / colour model + combinations, but not all. + The details are somewhat arcane + (refer to the PNG specification for full details). + Briefly: + Bit depths < 8 (1,2,4) are only allowed with greyscale and + colour mapped images; + colour mapped images cannot have bit depth 16. + + For colour mapped images + (in other words, when the `palette` argument is specified) + the `bitdepth` argument must match one of + the valid PNG bit depths: 1, 2, 4, or 8. + (It is valid to have a PNG image with a palette and + an ``sBIT`` chunk, but the meaning is slightly different; + it would be awkward to use the `bitdepth` argument for this.) + + The `palette` option, when specified, + causes a colour mapped image to be created: + the PNG colour type is set to 3; + `greyscale` must not be true; `alpha` must not be true; + `transparent` must not be set. + The bit depth must be 1,2,4, or 8. + When a colour mapped image is created, + the pixel values are palette indexes and the `bitdepth` argument specifies the size of these indexes (not the size of the colour values in the palette). The palette argument value should be a sequence of 3- or - 4-tuples. 3-tuples specify RGB palette entries; 4-tuples - specify RGBA palette entries. If both 4-tuples and 3-tuples - appear in the sequence then all the 4-tuples must come - before all the 3-tuples. A ``PLTE`` chunk is created; if there - are 4-tuples then a ``tRNS`` chunk is created as well. The - ``PLTE`` chunk will contain all the RGB triples in the same - sequence; the ``tRNS`` chunk will contain the alpha channel for - all the 4-tuples, in the same sequence. Palette entries - are always 8-bit. - - If specified, the `transparent` and `background` parameters must - be a tuple with three integer values for red, green, blue, or - a simple integer (or singleton tuple) for a greyscale image. + 4-tuples. + 3-tuples specify RGB palette entries; + 4-tuples specify RGBA palette entries. + All the 4-tuples (if present) must come before all the 3-tuples. + A ``PLTE`` chunk is created; + if there are 4-tuples then a ``tRNS`` chunk is created as well. + The ``PLTE`` chunk will contain all the RGB triples in the same + sequence; + the ``tRNS`` chunk will contain the alpha channel for + all the 4-tuples, in the same sequence. + Palette entries are always 8-bit. + + If specified, the `transparent` and `background` parameters must be + a tuple with one element for each channel in the image. + Either a 3-tuple of integer (RGB) values for a colour image, or + a 1-tuple of a single integer for a greyscale image. If specified, the `gamma` parameter must be a positive number - (generally, a float). A ``gAMA`` chunk will be created. + (generally, a `float`). + A ``gAMA`` chunk will be created. Note that this will not change the values of the pixels as - they appear in the PNG file, they are assumed to have already + they appear in the PNG file, + they are assumed to have already been converted appropriately for the gamma specified. The `compression` argument specifies the compression level to - be used by the ``zlib`` module. Values from 1 to 9 specify - compression, with 9 being "more compressed" (usually smaller - and slower, but it doesn't always work out that way). 0 means - no compression. -1 and ``None`` both mean that the default - level of compession will be picked by the ``zlib`` module - (which is generally acceptable). + be used by the ``zlib`` module. + Values from 1 to 9 (highest) specify compression. + 0 means no compression. + -1 and ``None`` both mean that the ``zlib`` module uses + the default level of compession (which is generally acceptable). If `interlace` is true then an interlaced image is created - (using PNG's so far only interace method, *Adam7*). This does - not affect how the pixels should be presented to the encoder, + (using PNG's so far only interace method, *Adam7*). + This does not affect how the pixels should be passed in, rather it changes how they are arranged into the PNG file. - On slow connexions interlaced images can be partially decoded - by the browser to give a rough view of the image that is + On slow connexions interlaced images can be + partially decoded by the browser to give + a rough view of the image that is successively refined as more image data appears. .. note :: @@ -503,8 +519,9 @@ class Writer: to be processed in working memory. `chunk_limit` is used to limit the amount of memory used whilst - compressing the image. In order to avoid using large amounts of - memory, multiple ``IDAT`` chunks may be created. + compressing the image. + In order to avoid using large amounts of memory, + multiple ``IDAT`` chunks may be created. """ # At the moment the `planes` argument is ignored; @@ -516,85 +533,75 @@ class Writer: width, height = check_sizes(size, width, height) del size + if not is_natural(width) or not is_natural(height): + raise ProtocolError("width and height must be integers") if width <= 0 or height <= 0: - raise ValueError("width and height must be greater than zero") - if not isinteger(width) or not isinteger(height): - raise ValueError("width and height must be integers") + raise ProtocolError("width and height must be greater than zero") # http://www.w3.org/TR/PNG/#7Integers-and-byte-order - if width > 2**32-1 or height > 2**32-1: - raise ValueError("width and height cannot exceed 2**32-1") + if width > 2 ** 31 - 1 or height > 2 ** 31 - 1: + raise ProtocolError("width and height cannot exceed 2**31-1") if alpha and transparent is not None: - raise ValueError( + raise ProtocolError( "transparent colour not allowed with alpha channel") - if bytes_per_sample is not None: - warnings.warn('please use bitdepth instead of bytes_per_sample', - DeprecationWarning) - if bytes_per_sample not in (0.125, 0.25, 0.5, 1, 2): - raise ValueError( - "bytes per sample must be .125, .25, .5, 1, or 2") - bitdepth = int(8*bytes_per_sample) - del bytes_per_sample - if not isinteger(bitdepth) or bitdepth < 1 or 16 < bitdepth: - raise ValueError("bitdepth (%r) must be a positive integer <= 16" % - bitdepth) - - self.rescale = None + # bitdepth is either single integer, or tuple of integers. + # Convert to tuple. + try: + len(bitdepth) + except TypeError: + bitdepth = (bitdepth, ) + for b in bitdepth: + valid = is_natural(b) and 1 <= b <= 16 + if not valid: + raise ProtocolError( + "each bitdepth %r must be a positive integer <= 16" % + (bitdepth,)) + + # Calculate channels, and + # expand bitdepth to be one element per channel. palette = check_palette(palette) - if palette: - if bitdepth not in (1,2,4,8): - raise ValueError("with palette, bitdepth must be 1, 2, 4, or 8") - if transparent is not None: - raise ValueError("transparent and palette not compatible") - if alpha: - raise ValueError("alpha and palette not compatible") - if greyscale: - raise ValueError("greyscale and palette not compatible") + alpha = bool(alpha) + colormap = bool(palette) + if greyscale is Default and palette: + greyscale = False + greyscale = bool(greyscale) + if colormap: + color_planes = 1 + planes = 1 else: - # No palette, check for sBIT chunk generation. - if alpha or not greyscale: - if bitdepth not in (8,16): - targetbitdepth = (8,16)[bitdepth > 8] - self.rescale = (bitdepth, targetbitdepth) - bitdepth = targetbitdepth - del targetbitdepth - else: - assert greyscale - assert not alpha - if bitdepth not in (1,2,4,8,16): - if bitdepth > 8: - targetbitdepth = 16 - elif bitdepth == 3: - targetbitdepth = 4 - else: - assert bitdepth in (5,6,7) - targetbitdepth = 8 - self.rescale = (bitdepth, targetbitdepth) - bitdepth = targetbitdepth - del targetbitdepth - - if bitdepth < 8 and (alpha or not greyscale and not palette): - raise ValueError( - "bitdepth < 8 only permitted with greyscale or palette") - if bitdepth > 8 and palette: - raise ValueError( - "bit depth must be 8 or less for images with palette") + color_planes = (3, 1)[greyscale] + planes = color_planes + alpha + if len(bitdepth) == 1: + bitdepth *= planes + + bitdepth, self.rescale = check_bitdepth_rescale( + palette, + bitdepth, + transparent, alpha, greyscale) + + # These are assertions, because above logic should have + # corrected or raised all problematic cases. + if bitdepth < 8: + assert greyscale or palette + assert not alpha + if bitdepth > 8: + assert not palette transparent = check_color(transparent, greyscale, 'transparent') background = check_color(background, greyscale, 'background') - # It's important that the true boolean values (greyscale, alpha, - # colormap, interlace) are converted to bool because Iverson's - # convention is relied upon later on. + # It's important that the true boolean values + # (greyscale, alpha, colormap, interlace) are converted + # to bool because Iverson's convention is relied upon later on. self.width = width self.height = height self.transparent = transparent self.background = background self.gamma = gamma - self.greyscale = bool(greyscale) - self.alpha = bool(alpha) - self.colormap = bool(palette) + self.greyscale = greyscale + self.alpha = alpha + self.colormap = colormap self.bitdepth = int(bitdepth) self.compression = compression self.chunk_limit = chunk_limit @@ -604,84 +611,159 @@ class Writer: self.y_pixels_per_unit = y_pixels_per_unit self.unit_is_meter = bool(unit_is_meter) - self.color_type = 4*self.alpha + 2*(not greyscale) + 1*self.colormap - assert self.color_type in (0,2,3,4,6) + self.color_type = (4 * self.alpha + + 2 * (not greyscale) + + 1 * self.colormap) + assert self.color_type in (0, 2, 3, 4, 6) - self.color_planes = (3,1)[self.greyscale or self.colormap] - self.planes = self.color_planes + self.alpha + self.color_planes = color_planes + self.planes = planes # :todo: fix for bitdepth < 8 - self.psize = (self.bitdepth/8) * self.planes - - def make_palette(self): - """Create the byte sequences for a ``PLTE`` and if necessary a - ``tRNS`` chunk. Returned as a pair (*p*, *t*). *t* will be - ``None`` if no ``tRNS`` chunk is necessary. - """ - - p = array('B') - t = array('B') - - for x in self.palette: - p.extend(x[0:3]) - if len(x) > 3: - t.append(x[3]) - p = tostring(p) - t = tostring(t) - if t: - return p,t - return p,None + self.psize = (self.bitdepth / 8) * self.planes def write(self, outfile, rows): - """Write a PNG image to the output file. `rows` should be - an iterable that yields each row in boxed row flat pixel - format. The rows should be the rows of the original image, - so there should be ``self.height`` rows of ``self.width * - self.planes`` values. If `interlace` is specified (when - creating the instance), then an interlaced PNG file will - be written. Supply the rows in the normal image order; + """ + Write a PNG image to the output file. + `rows` should be an iterable that yields each row + (each row is a sequence of values). + The rows should be the rows of the original image, + so there should be ``self.height`` rows of + ``self.width * self.planes`` values. + If `interlace` is specified (when creating the instance), + then an interlaced PNG file will be written. + Supply the rows in the normal image order; the interlacing is carried out internally. .. note :: - Interlacing will require the entire image to be in working - memory. + Interlacing requires the entire image to be in working memory. """ + # Values per row + vpr = self.width * self.planes + + def check_rows(rows): + """ + Yield each row in rows, + but check each row first (for correct width). + """ + for i, row in enumerate(rows): + try: + wrong_length = len(row) != vpr + except TypeError: + # When using an itertools.ichain object or + # other generator not supporting __len__, + # we set this to False to skip the check. + wrong_length = False + if wrong_length: + # Note: row numbers start at 0. + raise ProtocolError( + "Expected %d values but got %d value, in row %d" % + (vpr, len(row), i)) + yield row + if self.interlace: fmt = 'BH'[self.bitdepth > 8] - a = array(fmt, itertools.chain(*rows)) + a = array(fmt, itertools.chain(*check_rows(rows))) return self.write_array(outfile, a) - nrows = self.write_passes(outfile, rows) + nrows = self.write_passes(outfile, check_rows(rows)) if nrows != self.height: - raise ValueError( - "rows supplied (%d) does not match height (%d)" % - (nrows, self.height)) + raise ProtocolError( + "rows supplied (%d) does not match height (%d)" % + (nrows, self.height)) - def write_passes(self, outfile, rows, packed=False): + def write_passes(self, outfile, rows): """ Write a PNG image to the output file. Most users are expected to find the :meth:`write` or :meth:`write_array` method more convenient. - + The rows should be given to this method in the order that - they appear in the output file. For straightlaced images, - this is the usual top to bottom ordering, but for interlaced - images the rows should have already been interlaced before + they appear in the output file. + For straightlaced images, this is the usual top to bottom ordering. + For interlaced images the rows should have been interlaced before passing them to this function. - `rows` should be an iterable that yields each row. When - `packed` is ``False`` the rows should be in boxed row flat pixel - format; when `packed` is ``True`` each row should be a packed - sequence of bytes. + `rows` should be an iterable that yields each row + (each row being a sequence of values). """ + # Ensure rows are scaled (to 4-/8-/16-bit), + # and packed into bytes. + + if self.rescale: + rows = rescale_rows(rows, self.rescale) + + if self.bitdepth < 8: + rows = pack_rows(rows, self.bitdepth) + elif self.bitdepth == 16: + rows = unpack_rows(rows) + + return self.write_packed(outfile, rows) + + def write_packed(self, outfile, rows): + """ + Write PNG file to `outfile`. + `rows` should be an iterator that yields each packed row; + a packed row being a sequence of packed bytes. + + The rows have a filter byte prefixed and + are then compressed into one or more IDAT chunks. + They are not processed any further, + so if bitdepth is other than 1, 2, 4, 8, 16, + the pixel values should have been scaled + before passing them to this method. + + This method does work for interlaced images but it is best avoided. + For interlaced images, the rows should be + presented in the order that they appear in the file. + """ + + self.write_preamble(outfile) + + # http://www.w3.org/TR/PNG/#11IDAT + if self.compression is not None: + compressor = zlib.compressobj(self.compression) + else: + compressor = zlib.compressobj() + + # data accumulates bytes to be compressed for the IDAT chunk; + # it's compressed when sufficiently large. + data = bytearray() + + for i, row in enumerate(rows): + # Add "None" filter type. + # Currently, it's essential that this filter type be used + # for every scanline as + # we do not mark the first row of a reduced pass image; + # that means we could accidentally compute + # the wrong filtered scanline if we used + # "up", "average", or "paeth" on such a line. + data.append(0) + data.extend(row) + if len(data) > self.chunk_limit: + # :todo: bytes() only necessary in Python 2 + compressed = compressor.compress(bytes(data)) + if len(compressed): + write_chunk(outfile, b'IDAT', compressed) + data = bytearray() + + compressed = compressor.compress(bytes(data)) + flushed = compressor.flush() + if len(compressed) or len(flushed): + write_chunk(outfile, b'IDAT', compressed + flushed) + # http://www.w3.org/TR/PNG/#11IEND + write_chunk(outfile, b'IEND') + return i + 1 + + def write_preamble(self, outfile): # http://www.w3.org/TR/PNG/#5PNG-file-signature - outfile.write(_signature) + outfile.write(signature) # http://www.w3.org/TR/PNG/#11IHDR - write_chunk(outfile, 'IHDR', + write_chunk(outfile, b'IHDR', struct.pack("!2I5B", self.width, self.height, self.bitdepth, self.color_type, 0, 0, self.interlace)) @@ -689,246 +771,76 @@ class Writer: # See :chunk:order # http://www.w3.org/TR/PNG/#11gAMA if self.gamma is not None: - write_chunk(outfile, 'gAMA', - struct.pack("!L", int(round(self.gamma*1e5)))) + write_chunk(outfile, b'gAMA', + struct.pack("!L", int(round(self.gamma * 1e5)))) # See :chunk:order # http://www.w3.org/TR/PNG/#11sBIT if self.rescale: - write_chunk(outfile, 'sBIT', + write_chunk( + outfile, b'sBIT', struct.pack('%dB' % self.planes, - *[self.rescale[0]]*self.planes)) - - # :chunk:order: Without a palette (PLTE chunk), ordering is - # relatively relaxed. With one, gAMA chunk must precede PLTE - # chunk which must precede tRNS and bKGD. + * [s[0] for s in self.rescale])) + + # :chunk:order: Without a palette (PLTE chunk), + # ordering is relatively relaxed. + # With one, gAMA chunk must precede PLTE chunk + # which must precede tRNS and bKGD. # See http://www.w3.org/TR/PNG/#5ChunkOrdering if self.palette: - p,t = self.make_palette() - write_chunk(outfile, 'PLTE', p) + p, t = make_palette_chunks(self.palette) + write_chunk(outfile, b'PLTE', p) if t: - # tRNS chunk is optional. Only needed if palette entries - # have alpha. - write_chunk(outfile, 'tRNS', t) + # tRNS chunk is optional; + # Only needed if palette entries have alpha. + write_chunk(outfile, b'tRNS', t) # http://www.w3.org/TR/PNG/#11tRNS if self.transparent is not None: if self.greyscale: - write_chunk(outfile, 'tRNS', - struct.pack("!1H", *self.transparent)) + fmt = "!1H" else: - write_chunk(outfile, 'tRNS', - struct.pack("!3H", *self.transparent)) + fmt = "!3H" + write_chunk(outfile, b'tRNS', + struct.pack(fmt, *self.transparent)) # http://www.w3.org/TR/PNG/#11bKGD if self.background is not None: if self.greyscale: - write_chunk(outfile, 'bKGD', - struct.pack("!1H", *self.background)) + fmt = "!1H" else: - write_chunk(outfile, 'bKGD', - struct.pack("!3H", *self.background)) + fmt = "!3H" + write_chunk(outfile, b'bKGD', + struct.pack(fmt, *self.background)) # http://www.w3.org/TR/PNG/#11pHYs - if self.x_pixels_per_unit is not None and self.y_pixels_per_unit is not None: - tup = (self.x_pixels_per_unit, self.y_pixels_per_unit, int(self.unit_is_meter)) - write_chunk(outfile, 'pHYs', struct.pack("!LLB",*tup)) - - # http://www.w3.org/TR/PNG/#11IDAT - if self.compression is not None: - compressor = zlib.compressobj(self.compression) - else: - compressor = zlib.compressobj() - - # Choose an extend function based on the bitdepth. The extend - # function packs/decomposes the pixel values into bytes and - # stuffs them onto the data array. - data = array('B') - if self.bitdepth == 8 or packed: - extend = data.extend - elif self.bitdepth == 16: - # Decompose into bytes - def extend(sl): - fmt = '!%dH' % len(sl) - data.extend(array('B', struct.pack(fmt, *sl))) - else: - # Pack into bytes - assert self.bitdepth < 8 - # samples per byte - spb = int(8/self.bitdepth) - def extend(sl): - a = array('B', sl) - # Adding padding bytes so we can group into a whole - # number of spb-tuples. - l = float(len(a)) - extra = math.ceil(l / float(spb))*spb - l - a.extend([0]*int(extra)) - # Pack into bytes - l = group(a, spb) - l = map(lambda e: reduce(lambda x,y: - (x << self.bitdepth) + y, e), l) - data.extend(l) - if self.rescale: - oldextend = extend - factor = \ - float(2**self.rescale[1]-1) / float(2**self.rescale[0]-1) - def extend(sl): - oldextend(map(lambda x: int(round(factor*x)), sl)) - - # Build the first row, testing mostly to see if we need to - # changed the extend function to cope with NumPy integer types - # (they cause our ordinary definition of extend to fail, so we - # wrap it). See - # http://code.google.com/p/pypng/issues/detail?id=44 - enumrows = enumerate(rows) - del rows - - # First row's filter type. - data.append(0) - # :todo: Certain exceptions in the call to ``.next()`` or the - # following try would indicate no row data supplied. - # Should catch. - i,row = enumrows.next() - try: - # If this fails... - extend(row) - except: - # ... try a version that converts the values to int first. - # Not only does this work for the (slightly broken) NumPy - # types, there are probably lots of other, unknown, "nearly" - # int types it works for. - def wrapmapint(f): - return lambda sl: f(map(int, sl)) - extend = wrapmapint(extend) - del wrapmapint - extend(row) - - for i,row in enumrows: - # Add "None" filter type. Currently, it's essential that - # this filter type be used for every scanline as we do not - # mark the first row of a reduced pass image; that means we - # could accidentally compute the wrong filtered scanline if - # we used "up", "average", or "paeth" on such a line. - data.append(0) - extend(row) - if len(data) > self.chunk_limit: - compressed = compressor.compress(tostring(data)) - if len(compressed): - write_chunk(outfile, 'IDAT', compressed) - # Because of our very witty definition of ``extend``, - # above, we must re-use the same ``data`` object. Hence - # we use ``del`` to empty this one, rather than create a - # fresh one (which would be my natural FP instinct). - del data[:] - if len(data): - compressed = compressor.compress(tostring(data)) - else: - compressed = strtobytes('') - flushed = compressor.flush() - if len(compressed) or len(flushed): - write_chunk(outfile, 'IDAT', compressed + flushed) - # http://www.w3.org/TR/PNG/#11IEND - write_chunk(outfile, 'IEND') - return i+1 + if (self.x_pixels_per_unit is not None and + self.y_pixels_per_unit is not None): + tup = (self.x_pixels_per_unit, + self.y_pixels_per_unit, + int(self.unit_is_meter)) + write_chunk(outfile, b'pHYs', struct.pack("!LLB", *tup)) def write_array(self, outfile, pixels): """ - Write an array in flat row flat pixel format as a PNG file on - the output file. See also :meth:`write` method. - """ - - if self.interlace: - self.write_passes(outfile, self.array_scanlines_interlace(pixels)) - else: - self.write_passes(outfile, self.array_scanlines(pixels)) - - def write_packed(self, outfile, rows): - """ - Write PNG file to `outfile`. The pixel data comes from `rows` - which should be in boxed row packed format. Each row should be - a sequence of packed bytes. - - Technically, this method does work for interlaced images but it - is best avoided. For interlaced images, the rows should be - presented in the order that they appear in the file. - - This method should not be used when the source image bit depth - is not one naturally supported by PNG; the bit depth should be - 1, 2, 4, 8, or 16. - """ - - if self.rescale: - raise Error("write_packed method not suitable for bit depth %d" % - self.rescale[0]) - return self.write_passes(outfile, rows, packed=True) - - def convert_pnm(self, infile, outfile): - """ - Convert a PNM file containing raw pixel data into a PNG file - with the parameters set in the writer object. Works for - (binary) PGM, PPM, and PAM formats. + Write an array that holds all the image values + as a PNG file on the output file. + See also :meth:`write` method. """ if self.interlace: - pixels = array('B') - pixels.fromfile(infile, - (self.bitdepth/8) * self.color_planes * - self.width * self.height) - self.write_passes(outfile, self.array_scanlines_interlace(pixels)) - else: - self.write_passes(outfile, self.file_scanlines(infile)) - - def convert_ppm_and_pgm(self, ppmfile, pgmfile, outfile): - """ - Convert a PPM and PGM file containing raw pixel data into a - PNG outfile with the parameters set in the writer object. - """ - pixels = array('B') - pixels.fromfile(ppmfile, - (self.bitdepth/8) * self.color_planes * - self.width * self.height) - apixels = array('B') - apixels.fromfile(pgmfile, - (self.bitdepth/8) * - self.width * self.height) - pixels = interleave_planes(pixels, apixels, - (self.bitdepth/8) * self.color_planes, - (self.bitdepth/8)) - if self.interlace: + if type(pixels) != array: + # Coerce to array type + fmt = 'BH'[self.bitdepth > 8] + pixels = array(fmt, pixels) self.write_passes(outfile, self.array_scanlines_interlace(pixels)) else: self.write_passes(outfile, self.array_scanlines(pixels)) - def file_scanlines(self, infile): - """ - Generates boxed rows in flat pixel format, from the input file - `infile`. It assumes that the input file is in a "Netpbm-like" - binary format, and is positioned at the beginning of the first - pixel. The number of pixels to read is taken from the image - dimensions (`width`, `height`, `planes`) and the number of bytes - per value is implied by the image `bitdepth`. - """ - - # Values per row - vpr = self.width * self.planes - row_bytes = vpr - if self.bitdepth > 8: - assert self.bitdepth == 16 - row_bytes *= 2 - fmt = '>%dH' % vpr - def line(): - return array('H', struct.unpack(fmt, infile.read(row_bytes))) - else: - def line(): - scanline = array('B', infile.read(row_bytes)) - return scanline - for y in range(self.height): - yield line() - def array_scanlines(self, pixels): """ - Generates boxed rows (flat pixels) from flat rows (flat pixels) - in an array. + Generates rows (each a sequence of values) from + a single array of values. """ # Values per row @@ -941,10 +853,10 @@ class Writer: def array_scanlines_interlace(self, pixels): """ - Generator for interlaced scanlines from an array. `pixels` is - the full source image in flat row flat pixel format. The - generator yields each scanline of the reduced passes in turn, in - boxed row flat pixel format. + Generator for interlaced scanlines from an array. + `pixels` is the full source image as a single array of values. + The generator yields each scanline of the reduced passes in turn, + each scanline being a sequence of values. """ # http://www.w3.org/TR/PNG/#8InterlaceMethods @@ -952,160 +864,225 @@ class Writer: fmt = 'BH'[self.bitdepth > 8] # Value per row vpr = self.width * self.planes - for xstart, ystart, xstep, ystep in _adam7: - if xstart >= self.width: - continue - # Pixels per row (of reduced image) - ppr = int(math.ceil((self.width-xstart)/float(xstep))) - # number of values in reduced image row. - row_len = ppr*self.planes - for y in range(ystart, self.height, ystep): + + # Each iteration generates a scanline starting at (x, y) + # and consisting of every xstep pixels. + for lines in adam7_generate(self.width, self.height): + for x, y, xstep in lines: + # Pixels per row (of reduced image) + ppr = int(math.ceil((self.width - x) / float(xstep))) + # Values per row (of reduced image) + reduced_row_len = ppr * self.planes if xstep == 1: + # Easy case: line is a simple slice. offset = y * vpr - yield pixels[offset:offset+vpr] - else: - row = array(fmt) - # There's no easier way to set the length of an array - row.extend(pixels[0:row_len]) - offset = y * vpr + xstart * self.planes - end_offset = (y+1) * vpr - skip = self.planes * xstep - for i in range(self.planes): - row[i::self.planes] = \ - pixels[offset+i:end_offset:skip] - yield row - -def write_chunk(outfile, tag, data=strtobytes('')): + yield pixels[offset: offset + vpr] + continue + # We have to step by xstep, + # which we can do one plane at a time + # using the step in Python slices. + row = array(fmt) + # There's no easier way to set the length of an array + row.extend(pixels[0:reduced_row_len]) + offset = y * vpr + x * self.planes + end_offset = (y + 1) * vpr + skip = self.planes * xstep + for i in range(self.planes): + row[i::self.planes] = \ + pixels[offset + i: end_offset: skip] + yield row + + +def write_chunk(outfile, tag, data=b''): """ Write a PNG chunk to the output file, including length and checksum. """ + data = bytes(data) # http://www.w3.org/TR/PNG/#5Chunk-layout outfile.write(struct.pack("!I", len(data))) - tag = strtobytes(tag) outfile.write(tag) outfile.write(data) checksum = zlib.crc32(tag) checksum = zlib.crc32(data, checksum) - checksum &= 2**32-1 + checksum &= 2 ** 32 - 1 outfile.write(struct.pack("!I", checksum)) + def write_chunks(out, chunks): """Create a PNG file by writing out the chunks.""" - out.write(_signature) + out.write(signature) for chunk in chunks: write_chunk(out, *chunk) -def filter_scanline(type, line, fo, prev=None): - """Apply a scanline filter to a scanline. `type` specifies the - filter type (0 to 4); `line` specifies the current (unfiltered) - scanline as a sequence of bytes; `prev` specifies the previous - (unfiltered) scanline as a sequence of bytes. `fo` specifies the - filter offset; normally this is size of a pixel in bytes (the number - of bytes per sample times the number of channels), but when this is - < 1 (for bit depths < 8) then the filter offset is 1. + +def rescale_rows(rows, rescale): + """ + Take each row in rows (an iterator) and yield + a fresh row with the pixels scaled according to + the rescale parameters in the list `rescale`. + Each element of `rescale` is a tuple of + (source_bitdepth, target_bitdepth), + with one element per channel. """ - assert 0 <= type < 5 - - # The output array. Which, pathetically, we extend one-byte at a - # time (fortunately this is linear). - out = array('B', [type]) - - def sub(): - ai = -fo - for x in line: - if ai >= 0: - x = (x - line[ai]) & 0xff - out.append(x) - ai += 1 - def up(): - for i,x in enumerate(line): - x = (x - prev[i]) & 0xff - out.append(x) - def average(): - ai = -fo - for i,x in enumerate(line): - if ai >= 0: - x = (x - ((line[ai] + prev[i]) >> 1)) & 0xff - else: - x = (x - (prev[i] >> 1)) & 0xff - out.append(x) - ai += 1 - def paeth(): - # http://www.w3.org/TR/PNG/#9Filter-type-4-Paeth - ai = -fo # also used for ci - for i,x in enumerate(line): - a = 0 - b = prev[i] - c = 0 - - if ai >= 0: - a = line[ai] - c = prev[ai] - p = a + b - c - pa = abs(p - a) - pb = abs(p - b) - pc = abs(p - c) - if pa <= pb and pa <= pc: - Pr = a - elif pb <= pc: - Pr = b - else: - Pr = c - - x = (x - Pr) & 0xff - out.append(x) - ai += 1 - - if not prev: - # We're on the first line. Some of the filters can be reduced - # to simpler cases which makes handling the line "off the top" - # of the image simpler. "up" becomes "none"; "paeth" becomes - # "left" (non-trivial, but true). "average" needs to be handled - # specially. - if type == 2: # "up" - type = 0 - elif type == 3: - prev = [0]*len(line) - elif type == 4: # "paeth" - type = 1 - if type == 0: - out.extend(line) - elif type == 1: - sub() - elif type == 2: - up() - elif type == 3: - average() - else: # type == 4 - paeth() - return out + # One factor for each channel + fs = [float(2 ** s[1] - 1)/float(2 ** s[0] - 1) + for s in rescale] + + # Assume all target_bitdepths are the same + target_bitdepths = set(s[1] for s in rescale) + assert len(target_bitdepths) == 1 + (target_bitdepth, ) = target_bitdepths + typecode = 'BH'[target_bitdepth > 8] + + # Number of channels + n_chans = len(rescale) + + for row in rows: + rescaled_row = array(typecode, iter(row)) + for i in range(n_chans): + channel = array( + typecode, + (int(round(fs[i] * x)) for x in row[i::n_chans])) + rescaled_row[i::n_chans] = channel + yield rescaled_row + + +def pack_rows(rows, bitdepth): + """Yield packed rows that are a byte array. + Each byte is packed with the values from several pixels. + """ + + assert bitdepth < 8 + assert 8 % bitdepth == 0 + + # samples per byte + spb = int(8 / bitdepth) + + def make_byte(block): + """Take a block of (2, 4, or 8) values, + and pack them into a single byte. + """ + + res = 0 + for v in block: + res = (res << bitdepth) + v + return res + + for row in rows: + a = bytearray(row) + # Adding padding bytes so we can group into a whole + # number of spb-tuples. + n = float(len(a)) + extra = math.ceil(n / spb) * spb - n + a.extend([0] * int(extra)) + # Pack into bytes. + # Each block is the samples for one byte. + blocks = group(a, spb) + yield bytearray(make_byte(block) for block in blocks) + + +def unpack_rows(rows): + """Unpack each row from being 16-bits per value, + to being a sequence of bytes. + """ + for row in rows: + fmt = '!%dH' % len(row) + yield bytearray(struct.pack(fmt, *row)) + + +def make_palette_chunks(palette): + """ + Create the byte sequences for a ``PLTE`` and + if necessary a ``tRNS`` chunk. + Returned as a pair (*p*, *t*). + *t* will be ``None`` if no ``tRNS`` chunk is necessary. + """ + + p = bytearray() + t = bytearray() + + for x in palette: + p.extend(x[0:3]) + if len(x) > 3: + t.append(x[3]) + if t: + return p, t + return p, None + + +def check_bitdepth_rescale( + palette, bitdepth, transparent, alpha, greyscale): + """ + Returns (bitdepth, rescale) pair. + """ + + if palette: + if len(bitdepth) != 1: + raise ProtocolError( + "with palette, only a single bitdepth may be used") + (bitdepth, ) = bitdepth + if bitdepth not in (1, 2, 4, 8): + raise ProtocolError( + "with palette, bitdepth must be 1, 2, 4, or 8") + if transparent is not None: + raise ProtocolError("transparent and palette not compatible") + if alpha: + raise ProtocolError("alpha and palette not compatible") + if greyscale: + raise ProtocolError("greyscale and palette not compatible") + return bitdepth, None + + # No palette, check for sBIT chunk generation. + + if greyscale and not alpha: + # Single channel, L. + (bitdepth,) = bitdepth + if bitdepth in (1, 2, 4, 8, 16): + return bitdepth, None + if bitdepth > 8: + targetbitdepth = 16 + elif bitdepth == 3: + targetbitdepth = 4 + else: + assert bitdepth in (5, 6, 7) + targetbitdepth = 8 + return targetbitdepth, [(bitdepth, targetbitdepth)] + + assert alpha or not greyscale + + depth_set = tuple(set(bitdepth)) + if depth_set in [(8,), (16,)]: + # No sBIT required. + (bitdepth, ) = depth_set + return bitdepth, None + + targetbitdepth = (8, 16)[max(bitdepth) > 8] + return targetbitdepth, [(b, targetbitdepth) for b in bitdepth] + + +# Regex for decoding mode string +RegexModeDecode = re.compile("(LA?|RGBA?);?([0-9]*)", flags=re.IGNORECASE) def from_array(a, mode=None, info={}): - """Create a PNG :class:`Image` object from a 2- or 3-dimensional - array. One application of this function is easy PIL-style saving: + """ + Create a PNG :class:`Image` object from a 2-dimensional array. + One application of this function is easy PIL-style saving: ``png.from_array(pixels, 'L').save('foo.png')``. - .. note : - - The use of the term *3-dimensional* is for marketing purposes - only. It doesn't actually work. Please bear with us. Meanwhile - enjoy the complimentary snacks (on request) and please use a - 2-dimensional array. - - Unless they are specified using the *info* parameter, the PNG's - height and width are taken from the array size. For a 3 dimensional - array the first axis is the height; the second axis is the width; - and the third axis is the channel number. Thus an RGB image that is - 16 pixels high and 8 wide will use an array that is 16x8x3. For 2 - dimensional arrays the first axis is the height, but the second axis - is ``width*channels``, so an RGB image that is 16 pixels high and 8 - wide will use a 2-dimensional array that is 16x24 (each row will be - 8*3==24 sample values). + Unless they are specified using the *info* parameter, + the PNG's height and width are taken from the array size. + The first axis is the height; the second axis is the + ravelled width and channel index. + The array is treated is a sequence of rows, + each row being a sequence of values (``width*channels`` in number). + So an RGB image that is 16 pixels high and 8 wide will + occupy a 2-dimensional array that is 16x24 + (each row will be 8*3 = 24 sample values). *mode* is a string that specifies the image colour format in a PIL-style mode. It can be: @@ -1119,54 +1096,59 @@ def from_array(a, mode=None, info={}): ``'RGBA'`` colour image with alpha (4 channel) - The mode string can also specify the bit depth (overriding how this - function normally derives the bit depth, see below). Appending - ``';16'`` to the mode will cause the PNG to be 16 bits per channel; + The mode string can also specify the bit depth + (overriding how this function normally derives the bit depth, + see below). + Appending ``';16'`` to the mode will cause the PNG to be + 16 bits per channel; any decimal from 1 to 16 can be used to specify the bit depth. When a 2-dimensional array is used *mode* determines how many channels the image has, and so allows the width to be derived from the second array dimension. - The array is expected to be a ``numpy`` array, but it can be any - suitable Python sequence. For example, a list of lists can be used: - ``png.from_array([[0, 255, 0], [255, 0, 255]], 'L')``. The exact - rules are: ``len(a)`` gives the first dimension, height; - ``len(a[0])`` gives the second dimension; ``len(a[0][0])`` gives the - third dimension, unless an exception is raised in which case a - 2-dimensional array is assumed. It's slightly more complicated than - that because an iterator of rows can be used, and it all still - works. Using an iterator allows data to be streamed efficiently. - - The bit depth of the PNG is normally taken from the array element's - datatype (but if *mode* specifies a bitdepth then that is used - instead). The array element's datatype is determined in a way which + The array is expected to be a ``numpy`` array, + but it can be any suitable Python sequence. + For example, a list of lists can be used: + ``png.from_array([[0, 255, 0], [255, 0, 255]], 'L')``. + The exact rules are: ``len(a)`` gives the first dimension, height; + ``len(a[0])`` gives the second dimension. + It's slightly more complicated than that because + an iterator of rows can be used, and it all still works. + Using an iterator allows data to be streamed efficiently. + + The bit depth of the PNG is normally taken from + the array element's datatype + (but if *mode* specifies a bitdepth then that is used instead). + The array element's datatype is determined in a way which is supposed to work both for ``numpy`` arrays and for Python - ``array.array`` objects. A 1 byte datatype will give a bit depth of - 8, a 2 byte datatype will give a bit depth of 16. If the datatype - does not have an implicit size, for example it is a plain Python - list of lists, as above, then a default of 8 is used. - - The *info* parameter is a dictionary that can be used to specify - metadata (in the same style as the arguments to the - :class:``png.Writer`` class). For this function the keys that are - useful are: - + ``array.array`` objects. + A 1 byte datatype will give a bit depth of 8, + a 2 byte datatype will give a bit depth of 16. + If the datatype does not have an implicit size, + like the above example where it is a plain Python list of lists, + then a default of 8 is used. + + The *info* parameter is a dictionary that can + be used to specify metadata (in the same style as + the arguments to the :class:`png.Writer` class). + For this function the keys that are useful are: + height - overrides the height derived from the array dimensions and allows - *a* to be an iterable. + overrides the height derived from the array dimensions and + allows *a* to be an iterable. width overrides the width derived from the array dimensions. bitdepth - overrides the bit depth derived from the element datatype (but - must match *mode* if that also specifies a bit depth). + overrides the bit depth derived from the element datatype + (but must match *mode* if that also specifies a bit depth). - Generally anything specified in the - *info* dictionary will override any implicit choices that this - function would otherwise make, but must match any explicit ones. + Generally anything specified in the *info* dictionary will + override any implicit choices that this function would otherwise make, + but must match any explicit ones. For example, if the *info* dictionary has a ``greyscale`` key then - this must be true when mode is ``'L'`` or ``'LA'`` and false when - mode is ``'RGB'`` or ``'RGBA'``. + this must be true when mode is ``'L'`` or ``'LA'`` and + false when mode is ``'RGB'`` or ``'RGBA'``. """ # We abuse the *info* parameter by modifying it. Take a copy here. @@ -1174,69 +1156,51 @@ def from_array(a, mode=None, info={}): info = dict(info) # Syntax check mode string. - bitdepth = None - try: - # Assign the 'L' or 'RGBA' part to `gotmode`. - if mode.startswith('L'): - gotmode = 'L' - mode = mode[1:] - elif mode.startswith('RGB'): - gotmode = 'RGB' - mode = mode[3:] - else: - raise Error() - if mode.startswith('A'): - gotmode += 'A' - mode = mode[1:] + match = RegexModeDecode.match(mode) + if not match: + raise Error("mode string should be 'RGB' or 'L;16' or similar.") - # Skip any optional ';' - while mode.startswith(';'): - mode = mode[1:] + mode, bitdepth = match.groups() + if bitdepth: + bitdepth = int(bitdepth) - # Parse optional bitdepth - if mode: - try: - bitdepth = int(mode) - except (TypeError, ValueError): - raise Error() - except Error: - raise Error("mode string should be 'RGB' or 'L;16' or similar.") - mode = gotmode + # Colour format. + if 'greyscale' in info: + if bool(info['greyscale']) != ('L' in mode): + raise ProtocolError("info['greyscale'] should match mode.") + info['greyscale'] = 'L' in mode + + alpha = 'A' in mode + if 'alpha' in info: + if bool(info['alpha']) != alpha: + raise ProtocolError("info['alpha'] should match mode.") + info['alpha'] = alpha # Get bitdepth from *mode* if possible. if bitdepth: - if info.get('bitdepth') and bitdepth != info['bitdepth']: - raise Error("mode bitdepth (%d) should match info bitdepth (%d)." % - (bitdepth, info['bitdepth'])) + if info.get("bitdepth") and bitdepth != info['bitdepth']: + raise ProtocolError( + "bitdepth (%d) should match bitdepth of info (%d)." % + (bitdepth, info['bitdepth'])) info['bitdepth'] = bitdepth # Fill in and/or check entries in *info*. # Dimensions. - if 'size' in info: - # Check width, height, size all match where used. - for dimension,axis in [('width', 0), ('height', 1)]: - if dimension in info: - if info[dimension] != info['size'][axis]: - raise Error( - "info[%r] should match info['size'][%r]." % - (dimension, axis)) - info['width'],info['height'] = info['size'] - if 'height' not in info: + width, height = check_sizes( + info.get("size"), + info.get("width"), + info.get("height")) + if width: + info["width"] = width + if height: + info["height"] = height + + if "height" not in info: try: - l = len(a) + info['height'] = len(a) except TypeError: - raise Error( - "len(a) does not work, supply info['height'] instead.") - info['height'] = l - # Colour format. - if 'greyscale' in info: - if bool(info['greyscale']) != ('L' in mode): - raise Error("info['greyscale'] should match mode.") - info['greyscale'] = 'L' in mode - if 'alpha' in info: - if bool(info['alpha']) != ('A' in mode): - raise Error("info['alpha'] should match mode.") - info['alpha'] = 'A' in mode + raise ProtocolError( + "len(a) does not work, supply info['height'] instead.") planes = len(mode) if 'planes' in info: @@ -1246,26 +1210,15 @@ def from_array(a, mode=None, info={}): # In order to work out whether we the array is 2D or 3D we need its # first row, which requires that we take a copy of its iterator. # We may also need the first row to derive width and bitdepth. - a,t = itertools.tee(a) - row = t.next() + a, t = itertools.tee(a) + row = next(t) del t - try: - row[0][0] - threed = True - testelement = row[0] - except (IndexError, TypeError): - threed = False - testelement = row + + testelement = row if 'width' not in info: - if threed: - width = len(row) - else: - width = len(row) // planes + width = len(row) // planes info['width'] = width - # Not implemented yet - assert not threed - if 'bitdepth' not in info: try: dtype = testelement.dtype @@ -1275,25 +1228,28 @@ def from_array(a, mode=None, info={}): # Try a Python array.array. bitdepth = 8 * testelement.itemsize except AttributeError: - # We can't determine it from the array element's - # datatype, use a default of 8. + # We can't determine it from the array element's datatype, + # use a default of 8. bitdepth = 8 else: - # If we got here without exception, we now assume that - # the array is a numpy array. + # If we got here without exception, + # we now assume that the array is a numpy array. if dtype.kind == 'b': bitdepth = 1 else: bitdepth = 8 * dtype.itemsize info['bitdepth'] = bitdepth - for thing in 'width height bitdepth greyscale alpha'.split(): + for thing in ["width", "height", "bitdepth", "greyscale", "alpha"]: assert thing in info + return Image(a, info) + # So that refugee's from PIL feel more at home. Not documented. fromarray = from_array + class Image: """A PNG image. You can create an :class:`Image` object from an array of pixels by calling :meth:`png.from_array`. It can be @@ -1303,78 +1259,70 @@ class Image: def __init__(self, rows, info): """ .. note :: - + The constructor is not public. Please do not call it. """ - + self.rows = rows self.info = info def save(self, file): - """Save the image to *file*. If *file* looks like an open file - descriptor then it is used, otherwise it is treated as a - filename and a fresh file is opened. - - In general, you can only call this method once; after it has - been called the first time and the PNG image has been saved, the - source data will have been streamed, and cannot be streamed - again. + """Save the image to the named *file*. + + See `.write()` if you already have an open file object. + + In general, you can only call this method once; + after it has been called the first time the PNG image is written, + the source data will have been streamed, and + cannot be streamed again. """ w = Writer(**self.info) - try: - file.write - def close(): pass - except AttributeError: - file = open(file, 'wb') - def close(): file.close() + with open(file, 'wb') as fd: + w.write(fd, self.rows) - try: - w.write(file, self.rows) - finally: - close() + def write(self, file): + """Write the image to the open file object. -class _readable: - """ - A simple file-like interface for strings and arrays. - """ + See `.save()` if you have a filename. - def __init__(self, buf): - self.buf = buf - self.offset = 0 + In general, you can only call this method once; + after it has been called the first time the PNG image is written, + the source data will have been streamed, and + cannot be streamed again. + """ - def read(self, n): - r = self.buf[self.offset:self.offset+n] - if isarray(r): - r = r.tostring() - self.offset += n - return r + w = Writer(**self.info) + w.write(file, self.rows) class Reader: """ - PNG decoder in pure Python. + Pure Python PNG decoder in pure Python. """ - def __init__(self, _guess=None, **kw): + def __init__(self, _guess=None, filename=None, file=None, bytes=None): """ - Create a PNG decoder object. - - The constructor expects exactly one keyword argument. If you - supply a positional argument instead, it will guess the input - type. You can choose among the following keyword arguments: + The constructor expects exactly one keyword argument. + If you supply a positional argument instead, + it will guess the input type. + Choose from the following keyword arguments: filename Name of input file (a PNG file). file A file-like object (object with a read() method). bytes - ``array`` or ``string`` with PNG data. + ``bytes`` or ``bytearray`` with PNG data. """ - if ((_guess is not None and len(kw) != 0) or - (_guess is None and len(kw) != 1)): + keywords_supplied = ( + (_guess is not None) + + (filename is not None) + + (file is not None) + + (bytes is not None)) + if keywords_supplied != 1: raise TypeError("Reader() takes exactly 1 argument") # Will be the first 8 bytes, later on. See validate_signature. @@ -1382,80 +1330,76 @@ class Reader: self.transparent = None # A pair of (len,type) if a chunk has been read but its data and # checksum have not (in other words the file position is just - # past the 4 bytes that specify the chunk type). See preamble - # method for how this is used. + # past the 4 bytes that specify the chunk type). + # See preamble method for how this is used. self.atchunk = None if _guess is not None: if isarray(_guess): - kw["bytes"] = _guess + bytes = _guess elif isinstance(_guess, str): - kw["filename"] = _guess + filename = _guess elif hasattr(_guess, 'read'): - kw["file"] = _guess - - if "filename" in kw: - self.file = open(kw["filename"], "rb") - elif "file" in kw: - self.file = kw["file"] - elif "bytes" in kw: - self.file = _readable(kw["bytes"]) + file = _guess + + if bytes is not None: + self.file = io.BytesIO(bytes) + elif filename is not None: + self.file = open(filename, "rb") + elif file is not None: + self.file = file else: - raise TypeError("expecting filename, file or bytes array") - + raise ProtocolError("expecting filename, file or bytes array") - def chunk(self, seek=None, lenient=False): + def chunk(self, lenient=False): """ - Read the next PNG chunk from the input file; returns a - (*type*,*data*) tuple. *type* is the chunk's type as a string - (all PNG chunk types are 4 characters long). *data* is the - chunk's data content, as a string. + Read the next PNG chunk from the input file; + returns a (*type*, *data*) tuple. + *type* is the chunk's type as a byte string + (all PNG chunk types are 4 bytes long). + *data* is the chunk's data content, as a byte string. - If the optional `seek` argument is - specified then it will keep reading chunks until it either runs - out of file or finds the type specified by the argument. Note - that in general the order of chunks in PNGs is unspecified, so - using `seek` can cause you to miss chunks. - - If the optional `lenient` argument evaluates to True, + If the optional `lenient` argument evaluates to `True`, checksum failures will raise warnings rather than exceptions. """ self.validate_signature() - while True: - # http://www.w3.org/TR/PNG/#5Chunk-layout - if not self.atchunk: - self.atchunk = self.chunklentype() - length,type = self.atchunk - self.atchunk = None - data = self.file.read(length) - if len(data) != length: - raise ChunkError('Chunk %s too short for required %i octets.' - % (type, length)) - checksum = self.file.read(4) - if len(checksum) != 4: - raise ChunkError('Chunk %s too short for checksum.' % type) - if seek and type != seek: - continue - verify = zlib.crc32(strtobytes(type)) - verify = zlib.crc32(data, verify) - # Whether the output from zlib.crc32 is signed or not varies - # according to hideous implementation details, see - # http://bugs.python.org/issue1202 . - # We coerce it to be positive here (in a way which works on - # Python 2.3 and older). - verify &= 2**32 - 1 - verify = struct.pack('!I', verify) - if checksum != verify: - (a, ) = struct.unpack('!I', checksum) - (b, ) = struct.unpack('!I', verify) - message = "Checksum error in %s chunk: 0x%08X != 0x%08X." % (type, a, b) - if lenient: - warnings.warn(message, RuntimeWarning) - else: - raise ChunkError(message) - return type, data + # http://www.w3.org/TR/PNG/#5Chunk-layout + if not self.atchunk: + self.atchunk = self._chunk_len_type() + if not self.atchunk: + raise ChunkError("No more chunks.") + length, type = self.atchunk + self.atchunk = None + + data = self.file.read(length) + if len(data) != length: + raise ChunkError( + 'Chunk %s too short for required %i octets.' + % (type, length)) + checksum = self.file.read(4) + if len(checksum) != 4: + raise ChunkError('Chunk %s too short for checksum.' % type) + verify = zlib.crc32(type) + verify = zlib.crc32(data, verify) + # Whether the output from zlib.crc32 is signed or not varies + # according to hideous implementation details, see + # http://bugs.python.org/issue1202 . + # We coerce it to be positive here (in a way which works on + # Python 2.3 and older). + verify &= 2**32 - 1 + verify = struct.pack('!I', verify) + if checksum != verify: + (a, ) = struct.unpack('!I', checksum) + (b, ) = struct.unpack('!I', verify) + message = ("Checksum error in %s chunk: 0x%08X != 0x%08X." + % (type.decode('ascii'), a, b)) + if lenient: + warnings.warn(message, RuntimeWarning) + else: + raise ChunkError(message) + return type, data def chunks(self): """Return an iterator that will yield each chunk as a @@ -1463,38 +1407,40 @@ class Reader: """ while True: - t,v = self.chunk() - yield t,v - if t == 'IEND': + t, v = self.chunk() + yield t, v + if t == b'IEND': break def undo_filter(self, filter_type, scanline, previous): - """Undo the filter for a scanline. `scanline` is a sequence of - bytes that does not include the initial filter type byte. - `previous` is decoded previous scanline (for straightlaced - images this is the previous pixel row, but for interlaced - images, it is the previous scanline in the reduced image, which - in general is not the previous pixel row in the final image). - When there is no previous scanline (the first row of a - straightlaced image, or the first row in one of the passes in an - interlaced image), then this argument should be ``None``. - - The scanline will have the effects of filtering removed, and the - result will be returned as a fresh sequence of bytes. + """ + Undo the filter for a scanline. + `scanline` is a sequence of bytes that + does not include the initial filter type byte. + `previous` is decoded previous scanline + (for straightlaced images this is the previous pixel row, + but for interlaced images, it is + the previous scanline in the reduced image, + which in general is not the previous pixel row in the final image). + When there is no previous scanline + (the first row of a straightlaced image, + or the first row in one of the passes in an interlaced image), + then this argument should be ``None``. + + The scanline will have the effects of filtering removed; + the result will be returned as a fresh sequence of bytes. """ # :todo: Would it be better to update scanline in place? - # Yes, with the Cython extension making the undo_filter fast, - # updating scanline inplace makes the code 3 times faster - # (reading 50 images of 800x800 went from 40s to 16s) result = scanline if filter_type == 0: return result - if filter_type not in (1,2,3,4): - raise FormatError('Invalid PNG Filter Type.' - ' See http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters .') + if filter_type not in (1, 2, 3, 4): + raise FormatError( + 'Invalid PNG Filter Type. ' + 'See http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters .') # Filter unit. The stride from one pixel to the corresponding # byte from the previous pixel. Normally this is the pixel @@ -1507,250 +1453,176 @@ class Reader: # first line 'up' is the same as 'null', 'paeth' is the same # as 'sub', with only 'average' requiring any special case. if not previous: - previous = array('B', [0]*len(scanline)) - - def sub(): - """Undo sub filter.""" - - ai = 0 - # Loop starts at index fu. Observe that the initial part - # of the result is already filled in correctly with - # scanline. - for i in range(fu, len(result)): - x = scanline[i] - a = result[ai] - result[i] = (x + a) & 0xff - ai += 1 - - def up(): - """Undo up filter.""" - - for i in range(len(result)): - x = scanline[i] - b = previous[i] - result[i] = (x + b) & 0xff - - def average(): - """Undo average filter.""" - - ai = -fu - for i in range(len(result)): - x = scanline[i] - if ai < 0: - a = 0 - else: - a = result[ai] - b = previous[i] - result[i] = (x + ((a + b) >> 1)) & 0xff - ai += 1 - - def paeth(): - """Undo Paeth filter.""" - - # Also used for ci. - ai = -fu - for i in range(len(result)): - x = scanline[i] - if ai < 0: - a = c = 0 - else: - a = result[ai] - c = previous[ai] - b = previous[i] - p = a + b - c - pa = abs(p - a) - pb = abs(p - b) - pc = abs(p - c) - if pa <= pb and pa <= pc: - pr = a - elif pb <= pc: - pr = b - else: - pr = c - result[i] = (x + pr) & 0xff - ai += 1 + previous = bytearray([0] * len(scanline)) # Call appropriate filter algorithm. Note that 0 has already # been dealt with. - (None, - pngfilters.undo_filter_sub, - pngfilters.undo_filter_up, - pngfilters.undo_filter_average, - pngfilters.undo_filter_paeth)[filter_type](fu, scanline, previous, result) + fn = (None, + undo_filter_sub, + undo_filter_up, + undo_filter_average, + undo_filter_paeth)[filter_type] + fn(fu, scanline, previous, result) return result - def deinterlace(self, raw): + def _deinterlace(self, raw): """ Read raw pixel data, undo filters, deinterlace, and flatten. - Return in flat row flat pixel format. + Return a single array of values. """ # Values per row (of the target image) vpr = self.width * self.planes - # Make a result array, and make it big enough. Interleaving - # writes to the output array randomly (well, not quite), so the - # entire output array must be in memory. - fmt = 'BH'[self.bitdepth > 8] - a = array(fmt, [0]*vpr*self.height) + # Values per image + vpi = vpr * self.height + # Interleaving writes to the output array randomly + # (well, not quite), so the entire output array must be in memory. + # Make a result array, and make it big enough. + if self.bitdepth > 8: + a = array('H', [0] * vpi) + else: + a = bytearray([0] * vpi) source_offset = 0 - for xstart, ystart, xstep, ystep in _adam7: - if xstart >= self.width: - continue - # The previous (reconstructed) scanline. None at the - # beginning of a pass to indicate that there is no previous - # line. + for lines in adam7_generate(self.width, self.height): + # The previous (reconstructed) scanline. + # `None` at the beginning of a pass + # to indicate that there is no previous line. recon = None - # Pixels per row (reduced pass image) - ppr = int(math.ceil((self.width-xstart)/float(xstep))) - # Row size in bytes for this pass. - row_size = int(math.ceil(self.psize * ppr)) - for y in range(ystart, self.height, ystep): + for x, y, xstep in lines: + # Pixels per row (reduced pass image) + ppr = int(math.ceil((self.width - x) / float(xstep))) + # Row size in bytes for this pass. + row_size = int(math.ceil(self.psize * ppr)) + filter_type = raw[source_offset] source_offset += 1 - scanline = raw[source_offset:source_offset+row_size] + scanline = raw[source_offset: source_offset + row_size] source_offset += row_size recon = self.undo_filter(filter_type, scanline, recon) # Convert so that there is one element per pixel value - flat = self.serialtoflat(recon, ppr) + flat = self._bytes_to_values(recon, width=ppr) if xstep == 1: - assert xstart == 0 + assert x == 0 offset = y * vpr - a[offset:offset+vpr] = flat + a[offset: offset + vpr] = flat else: - offset = y * vpr + xstart * self.planes - end_offset = (y+1) * vpr + offset = y * vpr + x * self.planes + end_offset = (y + 1) * vpr skip = self.planes * xstep for i in range(self.planes): - a[offset+i:end_offset:skip] = \ - flat[i::self.planes] + a[offset + i: end_offset: skip] = \ + flat[i:: self.planes] + return a - def iterboxed(self, rows): - """Iterator that yields each scanline in boxed row flat pixel - format. `rows` should be an iterator that yields the bytes of - each row in turn. + def _iter_bytes_to_values(self, byte_rows): + """ + Iterator that yields each scanline; + each scanline being a sequence of values. + `byte_rows` should be an iterator that yields + the bytes of each row in turn. """ - def asvalues(raw): - """Convert a row of raw bytes into a flat row. Result will - be a freshly allocated object, not shared with - argument. - """ + for row in byte_rows: + yield self._bytes_to_values(row) - if self.bitdepth == 8: - return array('B', raw) - if self.bitdepth == 16: - raw = tostring(raw) - return array('H', struct.unpack('!%dH' % (len(raw)//2), raw)) - assert self.bitdepth < 8 - width = self.width - # Samples per byte - spb = 8//self.bitdepth - out = array('B') - mask = 2**self.bitdepth - 1 - shifts = list(map(self.bitdepth.__mul__, reversed(range(spb)))) - for o in raw: - out.extend(list(map(lambda i: mask&(o>>i), shifts))) - return out[:width] - return itertools.imap(asvalues, rows) - - def serialtoflat(self, bytes, width=None): - """Convert serial format (byte stream) pixel data to flat row - flat pixel. + def _bytes_to_values(self, bs, width=None): + """Convert a packed row of bytes into a row of values. + Result will be a freshly allocated object, + not shared with the argument. """ if self.bitdepth == 8: - return bytes + return bytearray(bs) if self.bitdepth == 16: - bytes = tostring(bytes) return array('H', - struct.unpack('!%dH' % (len(bytes)//2), bytes)) + struct.unpack('!%dH' % (len(bs) // 2), bs)) + assert self.bitdepth < 8 if width is None: width = self.width # Samples per byte - spb = 8//self.bitdepth - out = array('B') + spb = 8 // self.bitdepth + out = bytearray() mask = 2**self.bitdepth - 1 - shifts = map(self.bitdepth.__mul__, reversed(range(spb))) - l = width - for o in bytes: - out.extend([(mask&(o>>s)) for s in shifts][:l]) - l -= spb - if l <= 0: - l = width - return out - - def iterstraight(self, raw): - """Iterator that undoes the effect of filtering, and yields - each row in serialised format (as a sequence of bytes). - Assumes input is straightlaced. `raw` should be an iterable - that yields the raw bytes in chunks of arbitrary size. + shifts = [self.bitdepth * i + for i in reversed(list(range(spb)))] + for o in bs: + out.extend([mask & (o >> i) for i in shifts]) + return out[:width] + + def _iter_straight_packed(self, byte_blocks): + """Iterator that undoes the effect of filtering; + yields each row as a sequence of packed bytes. + Assumes input is straightlaced. + `byte_blocks` should be an iterable that yields the raw bytes + in blocks of arbitrary size. """ # length of row, in bytes rb = self.row_bytes - a = array('B') - # The previous (reconstructed) scanline. None indicates first - # line of image. + a = bytearray() + # The previous (reconstructed) scanline. + # None indicates first line of image. recon = None - for some in raw: - a.extend(some) + for some_bytes in byte_blocks: + a.extend(some_bytes) while len(a) >= rb + 1: filter_type = a[0] - scanline = a[1:rb+1] - del a[:rb+1] + scanline = a[1: rb + 1] + del a[: rb + 1] recon = self.undo_filter(filter_type, scanline, recon) yield recon if len(a) != 0: # :file:format We get here with a file format error: # when the available bytes (after decompressing) do not # pack into exact rows. - raise FormatError( - 'Wrong size for decompressed IDAT chunk.') + raise FormatError('Wrong size for decompressed IDAT chunk.') assert len(a) == 0 def validate_signature(self): - """If signature (header) has not been read then read and + """ + If signature (header) has not been read then read and validate it; otherwise do nothing. """ if self.signature: return self.signature = self.file.read(8) - if self.signature != _signature: + if self.signature != signature: raise FormatError("PNG file has invalid signature.") def preamble(self, lenient=False): """ - Extract the image metadata by reading the initial part of - the PNG file up to the start of the ``IDAT`` chunk. All the - chunks that precede the ``IDAT`` chunk are read and either - processed for metadata or discarded. + Extract the image metadata by reading + the initial part of the PNG file up to + the start of the ``IDAT`` chunk. + All the chunks that precede the ``IDAT`` chunk are + read and either processed for metadata or discarded. - If the optional `lenient` argument evaluates to True, checksum - failures will raise warnings rather than exceptions. + If the optional `lenient` argument evaluates to `True`, + checksum failures will raise warnings rather than exceptions. """ self.validate_signature() while True: if not self.atchunk: - self.atchunk = self.chunklentype() + self.atchunk = self._chunk_len_type() if self.atchunk is None: - raise FormatError( - 'This PNG file has no IDAT chunks.') - if self.atchunk[1] == 'IDAT': + raise FormatError('This PNG file has no IDAT chunks.') + if self.atchunk[1] == b'IDAT': return self.process_chunk(lenient=lenient) - def chunklentype(self): - """Reads just enough of the input to determine the next - chunk's length and type, returned as a (*length*, *type*) pair - where *type* is a string. If there are no more chunks, ``None`` - is returned. + def _chunk_len_type(self): + """ + Reads just enough of the input to + determine the next chunk's length and type; + return a (*length*, *type*) pair where *type* is a byte sequence. + If there are no more chunks, ``None`` is returned. """ x = self.file.read(8) @@ -1758,24 +1630,32 @@ class Reader: return None if len(x) != 8: raise FormatError( - 'End of file whilst reading chunk length and type.') - length,type = struct.unpack('!I4s', x) - type = bytestostr(type) - if length > 2**31-1: - raise FormatError('Chunk %s is too large: %d.' % (type,length)) - return length,type + 'End of file whilst reading chunk length and type.') + length, type = struct.unpack('!I4s', x) + if length > 2 ** 31 - 1: + raise FormatError('Chunk %s is too large: %d.' % (type, length)) + # Check that all bytes are in valid ASCII range. + # https://www.w3.org/TR/2003/REC-PNG-20031110/#5Chunk-layout + type_bytes = set(bytearray(type)) + if not(type_bytes <= set(range(65, 91)) | set(range(97, 123))): + raise FormatError( + 'Chunk %r has invalid Chunk Type.' + % list(type)) + return length, type def process_chunk(self, lenient=False): - """Process the next chunk and its data. This only processes the - following chunk types, all others are ignored: ``IHDR``, - ``PLTE``, ``bKGD``, ``tRNS``, ``gAMA``, ``sBIT``, ``pHYs``. + """ + Process the next chunk and its data. + This only processes the following chunk types: + ``IHDR``, ``PLTE``, ``bKGD``, ``tRNS``, ``gAMA``, ``sBIT``, ``pHYs``. + All other chunk types are ignored. - If the optional `lenient` argument evaluates to True, + If the optional `lenient` argument evaluates to `True`, checksum failures will raise warnings rather than exceptions. """ type, data = self.chunk(lenient=lenient) - method = '_process_' + type + method = '_process_' + type.decode('ascii') m = getattr(self, method, None) if m: m(data) @@ -1791,22 +1671,26 @@ class Reader: check_bitdepth_colortype(self.bitdepth, self.color_type) if self.compression != 0: - raise Error("unknown compression method %d" % self.compression) + raise FormatError( + "Unknown compression method %d" % self.compression) if self.filter != 0: - raise FormatError("Unknown filter method %d," - " see http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters ." - % self.filter) - if self.interlace not in (0,1): - raise FormatError("Unknown interlace method %d," - " see http://www.w3.org/TR/2003/REC-PNG-20031110/#8InterlaceMethods ." - % self.interlace) + raise FormatError( + "Unknown filter method %d," + " see http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters ." + % self.filter) + if self.interlace not in (0, 1): + raise FormatError( + "Unknown interlace method %d, see " + "http://www.w3.org/TR/2003/REC-PNG-20031110/#8InterlaceMethods" + " ." + % self.interlace) # Derived values # http://www.w3.org/TR/PNG/#6Colour-values - colormap = bool(self.color_type & 1) - greyscale = not (self.color_type & 2) + colormap = bool(self.color_type & 1) + greyscale = not(self.color_type & 2) alpha = bool(self.color_type & 4) - color_planes = (3,1)[greyscale or colormap] + color_planes = (3, 1)[greyscale or colormap] planes = color_planes + alpha self.colormap = colormap @@ -1814,7 +1698,7 @@ class Reader: self.alpha = alpha self.color_planes = color_planes self.planes = planes - self.psize = float(self.bitdepth)/float(8) * planes + self.psize = float(self.bitdepth) / float(8) * planes if int(self.psize) == self.psize: self.psize = int(self.psize) self.row_bytes = int(math.ceil(self.width * self.psize)) @@ -1824,7 +1708,7 @@ class Reader: # Stores tRNS chunk if present, and is used to check chunk # ordering constraints. self.trns = None - # Stores sbit chunk if present. + # Stores sBIT chunk if present. self.sbit = None def _process_PLTE(self, data): @@ -1834,8 +1718,8 @@ class Reader: self.plte = data if len(data) % 3 != 0: raise FormatError( - "PLTE chunk's length should be a multiple of 3.") - if len(data) > (2**self.bitdepth)*3: + "PLTE chunk's length should be a multiple of 3.") + if len(data) > (2 ** self.bitdepth) * 3: raise FormatError("PLTE chunk is too long.") if len(data) == 0: raise FormatError("Empty PLTE is not allowed.") @@ -1845,11 +1729,11 @@ class Reader: if self.colormap: if not self.plte: warnings.warn( - "PLTE chunk is required before bKGD chunk.") + "PLTE chunk is required before bKGD chunk.") self.background = struct.unpack('B', data) else: self.background = struct.unpack("!%dH" % self.color_planes, - data) + data) except struct.error: raise FormatError("bKGD chunk has incorrect length.") @@ -1860,15 +1744,15 @@ class Reader: if not self.plte: warnings.warn("PLTE chunk is required before tRNS chunk.") else: - if len(data) > len(self.plte)/3: + if len(data) > len(self.plte) / 3: # Was warning, but promoted to Error as it # would otherwise cause pain later on. raise FormatError("tRNS chunk is too long.") else: if self.alpha: raise FormatError( - "tRNS chunk is not valid with colour type %d." % - self.color_type) + "tRNS chunk is not valid with colour type %d." % + self.color_type) try: self.transparent = \ struct.unpack("!%dH" % self.color_planes, data) @@ -1884,7 +1768,7 @@ class Reader: def _process_sBIT(self, data): self.sbit = data if (self.colormap and len(data) != 3 or - not self.colormap and len(data) != self.planes): + not self.colormap and len(data) != self.planes): raise FormatError("sBIT chunk has incorrect length.") def _process_pHYs(self, data): @@ -1893,17 +1777,19 @@ class Reader: fmt = "!LLB" if len(data) != struct.calcsize(fmt): raise FormatError("pHYs chunk has incorrect length.") - self.x_pixels_per_unit, self.y_pixels_per_unit, unit = struct.unpack(fmt,data) + self.x_pixels_per_unit, self.y_pixels_per_unit, unit = \ + struct.unpack(fmt, data) self.unit_is_meter = bool(unit) def read(self, lenient=False): """ - Read the PNG file and decode it. Returns (`width`, `height`, - `pixels`, `metadata`). + Read the PNG file and decode it. + Returns (`width`, `height`, `rows`, `info`). May use excessive memory. - `pixels` are returned in boxed row flat pixel format. + `rows` is a sequence of rows; + each row is a sequence of values. If the optional `lenient` argument evaluates to True, checksum failures will raise warnings rather than exceptions. @@ -1912,87 +1798,83 @@ class Reader: def iteridat(): """Iterator that yields all the ``IDAT`` chunks as strings.""" while True: - try: - type, data = self.chunk(lenient=lenient) - except ValueError: - raise ChunkError(sys.exc_info()[1].args[0]) - if type == 'IEND': + type, data = self.chunk(lenient=lenient) + if type == b'IEND': # http://www.w3.org/TR/PNG/#11IEND break - if type != 'IDAT': + if type != b'IDAT': continue - # type == 'IDAT' + # type == b'IDAT' # http://www.w3.org/TR/PNG/#11IDAT if self.colormap and not self.plte: warnings.warn("PLTE chunk is required before IDAT chunk") yield data - def iterdecomp(idat): - """Iterator that yields decompressed strings. `idat` should - be an iterator that yields the ``IDAT`` chunk data. - """ - - # Currently, with no max_length parameter to decompress, - # this routine will do one yield per IDAT chunk: Not very - # incremental. - d = zlib.decompressobj() - # Each IDAT chunk is passed to the decompressor, then any - # remaining state is decompressed out. - for data in idat: - # :todo: add a max_length argument here to limit output - # size. - yield array('B', d.decompress(data)) - yield array('B', d.flush()) - self.preamble(lenient=lenient) - raw = iterdecomp(iteridat()) + raw = decompress(iteridat()) + if self.interlace: - raw = array('B', itertools.chain(*raw)) - arraycode = 'BH'[self.bitdepth>8] - # Like :meth:`group` but producing an array.array object for - # each row. - pixels = itertools.imap(lambda *row: array(arraycode, row), - *[iter(self.deinterlace(raw))]*self.width*self.planes) + def rows_from_interlace(): + """Yield each row from an interlaced PNG.""" + # It's important that this iterator doesn't read + # IDAT chunks until it yields the first row. + bs = bytearray(itertools.chain(*raw)) + arraycode = 'BH'[self.bitdepth > 8] + # Like :meth:`group` but + # producing an array.array object for each row. + values = self._deinterlace(bs) + vpr = self.width * self.planes + for i in range(0, len(values), vpr): + row = array(arraycode, values[i:i+vpr]) + yield row + rows = rows_from_interlace() else: - pixels = self.iterboxed(self.iterstraight(raw)) - meta = dict() + rows = self._iter_bytes_to_values(self._iter_straight_packed(raw)) + info = dict() for attr in 'greyscale alpha planes bitdepth interlace'.split(): - meta[attr] = getattr(self, attr) - meta['size'] = (self.width, self.height) + info[attr] = getattr(self, attr) + info['size'] = (self.width, self.height) for attr in 'gamma transparent background'.split(): a = getattr(self, attr, None) if a is not None: - meta[attr] = a + info[attr] = a + if getattr(self, 'x_pixels_per_unit', None): + info['physical'] = Resolution(self.x_pixels_per_unit, + self.y_pixels_per_unit, + self.unit_is_meter) if self.plte: - meta['palette'] = self.palette() - return self.width, self.height, pixels, meta - + info['palette'] = self.palette() + return self.width, self.height, rows, info def read_flat(self): """ - Read a PNG file and decode it into flat row flat pixel format. - Returns (*width*, *height*, *pixels*, *metadata*). + Read a PNG file and decode it into a single array of values. + Returns (*width*, *height*, *values*, *info*). May use excessive memory. - `pixels` are returned in flat row flat pixel format. + `values` is a single array. - See also the :meth:`read` method which returns pixels in the - more stream-friendly boxed row flat pixel format. + The :meth:`read` method is more stream-friendly than this, + because it returns a sequence of rows. """ - x, y, pixel, meta = self.read() - arraycode = 'BH'[meta['bitdepth']>8] + x, y, pixel, info = self.read() + arraycode = 'BH'[info['bitdepth'] > 8] pixel = array(arraycode, itertools.chain(*pixel)) - return x, y, pixel, meta + return x, y, pixel, info def palette(self, alpha='natural'): - """Returns a palette that is a sequence of 3-tuples or 4-tuples, - synthesizing it from the ``PLTE`` and ``tRNS`` chunks. These - chunks should have already been processed (for example, by - calling the :meth:`preamble` method). All the tuples are the - same size: 3-tuples if there is no ``tRNS`` chunk, 4-tuples when - there is a ``tRNS`` chunk. Assumes that the image is colour type + """ + Returns a palette that is a sequence of 3-tuples or 4-tuples, + synthesizing it from the ``PLTE`` and ``tRNS`` chunks. + These chunks should have already been processed (for example, + by calling the :meth:`preamble` method). + All the tuples are the same size: + 3-tuples if there is no ``tRNS`` chunk, + 4-tuples when there is a ``tRNS`` chunk. + + Assumes that the image is colour type 3 and therefore a ``PLTE`` chunk is required. If the `alpha` argument is ``'force'`` then an alpha channel is @@ -2004,45 +1886,52 @@ class Reader: "Required PLTE chunk is missing in colour type 3 image.") plte = group(array('B', self.plte), 3) if self.trns or alpha == 'force': - trns = array('B', self.trns or '') - trns.extend([255]*(len(plte)-len(trns))) - plte = map(operator.add, plte, group(trns, 1)) + trns = array('B', self.trns or []) + trns.extend([255] * (len(plte) - len(trns))) + plte = list(map(operator.add, plte, group(trns, 1))) return plte def asDirect(self): - """Returns the image data as a direct representation of an - ``x * y * planes`` array. This method is intended to remove the - need for callers to deal with palettes and transparency - themselves. Images with a palette (colour type 3) - are converted to RGB or RGBA; images with transparency (a - ``tRNS`` chunk) are converted to LA or RGBA as appropriate. - When returned in this format the pixel values represent the - colour value directly without needing to refer to palettes or - transparency information. + """ + Returns the image data as a direct representation of + an ``x * y * planes`` array. + This removes the need for callers to deal with + palettes and transparency themselves. + Images with a palette (colour type 3) are converted to RGB or RGBA; + images with transparency (a ``tRNS`` chunk) are converted to + LA or RGBA as appropriate. + When returned in this format the pixel values represent + the colour value directly without needing to refer + to palettes or transparency information. Like the :meth:`read` method this method returns a 4-tuple: - (*width*, *height*, *pixels*, *meta*) - - This method normally returns pixel values with the bit depth - they have in the source image, but when the source PNG has an - ``sBIT`` chunk it is inspected and can reduce the bit depth of - the result pixels; pixel values will be reduced according to - the bit depth specified in the ``sBIT`` chunk (PNG nerds should - note a single result bit depth is used for all channels; the - maximum of the ones specified in the ``sBIT`` chunk. An RGB565 - image will be rescaled to 6-bit RGB666). - - The *meta* dictionary that is returned reflects the `direct` - format and not the original source image. For example, an RGB - source image with a ``tRNS`` chunk to represent a transparent - colour, will have ``planes=3`` and ``alpha=False`` for the - source image, but the *meta* dictionary returned by this method - will have ``planes=4`` and ``alpha=True`` because an alpha - channel is synthesized and added. - - *pixels* is the pixel data in boxed row flat pixel format (just - like the :meth:`read` method). + (*width*, *height*, *rows*, *info*) + + This method normally returns pixel values with + the bit depth they have in the source image, but + when the source PNG has an ``sBIT`` chunk it is inspected and + can reduce the bit depth of the result pixels; + pixel values will be reduced according to the bit depth + specified in the ``sBIT`` chunk. + PNG nerds should note a single result bit depth is + used for all channels: + the maximum of the ones specified in the ``sBIT`` chunk. + An RGB565 image will be rescaled to 6-bit RGB666. + + The *info* dictionary that is returned reflects + the `direct` format and not the original source image. + For example, an RGB source image with a ``tRNS`` chunk + to represent a transparent colour, + will start with ``planes=3`` and ``alpha=False`` for the + source image, + but the *info* dictionary returned by this method + will have ``planes=4`` and ``alpha=True`` because + an alpha channel is synthesized and added. + + *rows* is a sequence of rows; + each row being a sequence of values + (like the :meth:`read` method). All the other aspects of the image data are not changed. """ @@ -2053,17 +1942,18 @@ class Reader: if not self.colormap and not self.trns and not self.sbit: return self.read() - x,y,pixels,meta = self.read() + x, y, pixels, info = self.read() if self.colormap: - meta['colormap'] = False - meta['alpha'] = bool(self.trns) - meta['bitdepth'] = 8 - meta['planes'] = 3 + bool(self.trns) - plte = list(self.palette()) + info['colormap'] = False + info['alpha'] = bool(self.trns) + info['bitdepth'] = 8 + info['planes'] = 3 + bool(self.trns) + plte = self.palette() + def iterpal(pixels): for row in pixels: - row = list(map(plte.__getitem__, row)) + row = [plte[x] for x in row] yield array('B', itertools.chain(*row)) pixels = iterpal(pixels) elif self.trns: @@ -2075,11 +1965,12 @@ class Reader: # perhaps go faster (all those 1-tuples!), but I still # wonder whether the code proliferation is worth it) it = self.transparent - maxval = 2**meta['bitdepth']-1 - planes = meta['planes'] - meta['alpha'] = True - meta['planes'] += 1 - typecode = 'BH'[meta['bitdepth']>8] + maxval = 2 ** info['bitdepth'] - 1 + planes = info['planes'] + info['alpha'] = True + info['planes'] += 1 + typecode = 'BH'[info['bitdepth'] > 8] + def itertrns(pixels): for row in pixels: # For each row we group it into pixels, then form a @@ -2090,142 +1981,147 @@ class Reader: row = group(row, planes) opa = map(it.__ne__, row) opa = map(maxval.__mul__, opa) - opa = zip(opa) # convert to 1-tuples - yield array(typecode, - itertools.chain(*map(operator.add, row, opa))) + opa = list(zip(opa)) # convert to 1-tuples + yield array( + typecode, + itertools.chain(*map(operator.add, row, opa))) pixels = itertrns(pixels) targetbitdepth = None if self.sbit: sbit = struct.unpack('%dB' % len(self.sbit), self.sbit) targetbitdepth = max(sbit) - if targetbitdepth > meta['bitdepth']: + if targetbitdepth > info['bitdepth']: raise Error('sBIT chunk %r exceeds bitdepth %d' % - (sbit,self.bitdepth)) + (sbit, self.bitdepth)) if min(sbit) <= 0: raise Error('sBIT chunk %r has a 0-entry' % sbit) - if targetbitdepth == meta['bitdepth']: - targetbitdepth = None if targetbitdepth: - shift = meta['bitdepth'] - targetbitdepth - meta['bitdepth'] = targetbitdepth + shift = info['bitdepth'] - targetbitdepth + info['bitdepth'] = targetbitdepth + def itershift(pixels): for row in pixels: - yield map(shift.__rrshift__, row) + yield [p >> shift for p in row] pixels = itershift(pixels) - return x,y,pixels,meta - - def asFloat(self, maxval=1.0): - """Return image pixels as per :meth:`asDirect` method, but scale - all pixel values to be floating point values between 0.0 and - *maxval*. - """ - - x,y,pixels,info = self.asDirect() - sourcemaxval = 2**info['bitdepth']-1 - del info['bitdepth'] - info['maxval'] = float(maxval) - factor = float(maxval)/float(sourcemaxval) - def iterfloat(): - for row in pixels: - yield map(factor.__mul__, row) - return x,y,iterfloat(),info + return x, y, pixels, info def _as_rescale(self, get, targetbitdepth): """Helper used by :meth:`asRGB8` and :meth:`asRGBA8`.""" - width,height,pixels,meta = get() - maxval = 2**meta['bitdepth'] - 1 + width, height, pixels, info = get() + maxval = 2**info['bitdepth'] - 1 targetmaxval = 2**targetbitdepth - 1 factor = float(targetmaxval) / float(maxval) - meta['bitdepth'] = targetbitdepth + info['bitdepth'] = targetbitdepth + def iterscale(): for row in pixels: - yield map(lambda x: int(round(x*factor)), row) + yield [int(round(x * factor)) for x in row] if maxval == targetmaxval: - return width, height, pixels, meta + return width, height, pixels, info else: - return width, height, iterscale(), meta + return width, height, iterscale(), info def asRGB8(self): - """Return the image data as an RGB pixels with 8-bits per - sample. This is like the :meth:`asRGB` method except that - this method additionally rescales the values so that they - are all between 0 and 255 (8-bit). In the case where the - source image has a bit depth < 8 the transformation preserves - all the information; where the source image has bit depth - > 8, then rescaling to 8-bit values loses precision. No - dithering is performed. Like :meth:`asRGB`, an alpha channel - in the source image will raise an exception. + """ + Return the image data as an RGB pixels with 8-bits per sample. + This is like the :meth:`asRGB` method except that + this method additionally rescales the values so that + they are all between 0 and 255 (8-bit). + In the case where the source image has a bit depth < 8 + the transformation preserves all the information; + where the source image has bit depth > 8, then + rescaling to 8-bit values loses precision. + No dithering is performed. + Like :meth:`asRGB`, + an alpha channel in the source image will raise an exception. This function returns a 4-tuple: - (*width*, *height*, *pixels*, *metadata*). - *width*, *height*, *metadata* are as per the - :meth:`read` method. - - *pixels* is the pixel data in boxed row flat pixel format. + (*width*, *height*, *rows*, *info*). + *width*, *height*, *info* are as per the :meth:`read` method. + + *rows* is the pixel data as a sequence of rows. """ return self._as_rescale(self.asRGB, 8) def asRGBA8(self): - """Return the image data as RGBA pixels with 8-bits per - sample. This method is similar to :meth:`asRGB8` and - :meth:`asRGBA`: The result pixels have an alpha channel, *and* - values are rescaled to the range 0 to 255. The alpha channel is - synthesized if necessary (with a small speed penalty). + """ + Return the image data as RGBA pixels with 8-bits per sample. + This method is similar to :meth:`asRGB8` and :meth:`asRGBA`: + The result pixels have an alpha channel, *and* + values are rescaled to the range 0 to 255. + The alpha channel is synthesized if necessary + (with a small speed penalty). """ return self._as_rescale(self.asRGBA, 8) def asRGB(self): - """Return image as RGB pixels. RGB colour images are passed - through unchanged; greyscales are expanded into RGB - triplets (there is a small speed overhead for doing this). + """ + Return image as RGB pixels. + RGB colour images are passed through unchanged; + greyscales are expanded into RGB triplets + (there is a small speed overhead for doing this). - An alpha channel in the source image will raise an - exception. + An alpha channel in the source image will raise an exception. - The return values are as for the :meth:`read` method - except that the *metadata* reflect the returned pixels, not the - source image. In particular, for this method - ``metadata['greyscale']`` will be ``False``. + The return values are as for the :meth:`read` method except that + the *info* reflect the returned pixels, not the source image. + In particular, + for this method ``info['greyscale']`` will be ``False``. """ - width,height,pixels,meta = self.asDirect() - if meta['alpha']: + width, height, pixels, info = self.asDirect() + if info['alpha']: raise Error("will not convert image with alpha channel to RGB") - if not meta['greyscale']: - return width,height,pixels,meta - meta['greyscale'] = False - typecode = 'BH'[meta['bitdepth'] > 8] + if not info['greyscale']: + return width, height, pixels, info + info['greyscale'] = False + info['planes'] = 3 + + if info['bitdepth'] > 8: + def newarray(): + return array('H', [0]) + else: + def newarray(): + return bytearray([0]) + def iterrgb(): for row in pixels: - a = array(typecode, [0]) * 3 * width + a = newarray() * 3 * width for i in range(3): a[i::3] = row yield a - return width,height,iterrgb(),meta + return width, height, iterrgb(), info def asRGBA(self): - """Return image as RGBA pixels. Greyscales are expanded into - RGB triplets; an alpha channel is synthesized if necessary. - The return values are as for the :meth:`read` method - except that the *metadata* reflect the returned pixels, not the - source image. In particular, for this method - ``metadata['greyscale']`` will be ``False``, and - ``metadata['alpha']`` will be ``True``. + """ + Return image as RGBA pixels. + Greyscales are expanded into RGB triplets; + an alpha channel is synthesized if necessary. + The return values are as for the :meth:`read` method except that + the *info* reflect the returned pixels, not the source image. + In particular, for this method + ``info['greyscale']`` will be ``False``, and + ``info['alpha']`` will be ``True``. """ - width,height,pixels,meta = self.asDirect() - if meta['alpha'] and not meta['greyscale']: - return width,height,pixels,meta - typecode = 'BH'[meta['bitdepth'] > 8] - maxval = 2**meta['bitdepth'] - 1 + width, height, pixels, info = self.asDirect() + if info['alpha'] and not info['greyscale']: + return width, height, pixels, info + typecode = 'BH'[info['bitdepth'] > 8] + maxval = 2**info['bitdepth'] - 1 maxbuffer = struct.pack('=' + typecode, maxval) * 4 * width - def newarray(): - return array(typecode, maxbuffer) - if meta['alpha'] and meta['greyscale']: + if info['bitdepth'] > 8: + def newarray(): + return array('H', maxbuffer) + else: + def newarray(): + return bytearray(maxbuffer) + + if info['alpha'] and info['greyscale']: # LA to RGBA def convert(): for row in pixels: @@ -2233,550 +2129,227 @@ class Reader: # into first three target channels, and A channel # into fourth channel. a = newarray() - pngfilters.convert_la_to_rgba(row, a) + convert_la_to_rgba(row, a) yield a - elif meta['greyscale']: + elif info['greyscale']: # L to RGBA def convert(): for row in pixels: a = newarray() - pngfilters.convert_l_to_rgba(row, a) + convert_l_to_rgba(row, a) yield a else: - assert not meta['alpha'] and not meta['greyscale'] + assert not info['alpha'] and not info['greyscale'] # RGB to RGBA + def convert(): for row in pixels: a = newarray() - pngfilters.convert_rgb_to_rgba(row, a) + convert_rgb_to_rgba(row, a) yield a - meta['alpha'] = True - meta['greyscale'] = False - return width,height,convert(),meta + info['alpha'] = True + info['greyscale'] = False + info['planes'] = 4 + return width, height, convert(), info + + +def decompress(data_blocks): + """ + `data_blocks` should be an iterable that + yields the compressed data (from the ``IDAT`` chunks). + This yields decompressed byte strings. + """ + + # Currently, with no max_length parameter to decompress, + # this routine will do one yield per IDAT chunk: Not very + # incremental. + d = zlib.decompressobj() + # Each IDAT chunk is passed to the decompressor, then any + # remaining state is decompressed out. + for data in data_blocks: + # :todo: add a max_length argument here to limit output size. + yield bytearray(d.decompress(data)) + yield bytearray(d.flush()) + def check_bitdepth_colortype(bitdepth, colortype): - """Check that `bitdepth` and `colortype` are both valid, - and specified in a valid combination. Returns if valid, - raise an Exception if not valid. + """ + Check that `bitdepth` and `colortype` are both valid, + and specified in a valid combination. + Returns (None) if valid, raise an Exception if not valid. """ - if bitdepth not in (1,2,4,8,16): + if bitdepth not in (1, 2, 4, 8, 16): raise FormatError("invalid bit depth %d" % bitdepth) - if colortype not in (0,2,3,4,6): + if colortype not in (0, 2, 3, 4, 6): raise FormatError("invalid colour type %d" % colortype) # Check indexed (palettized) images have 8 or fewer bits # per pixel; check only indexed or greyscale images have # fewer than 8 bits per pixel. if colortype & 1 and bitdepth > 8: raise FormatError( - "Indexed images (colour type %d) cannot" - " have bitdepth > 8 (bit depth %d)." - " See http://www.w3.org/TR/2003/REC-PNG-20031110/#table111 ." - % (bitdepth, colortype)) - if bitdepth < 8 and colortype not in (0,3): - raise FormatError("Illegal combination of bit depth (%d)" - " and colour type (%d)." - " See http://www.w3.org/TR/2003/REC-PNG-20031110/#table111 ." - % (bitdepth, colortype)) - -def isinteger(x): + "Indexed images (colour type %d) cannot" + " have bitdepth > 8 (bit depth %d)." + " See http://www.w3.org/TR/2003/REC-PNG-20031110/#table111 ." + % (bitdepth, colortype)) + if bitdepth < 8 and colortype not in (0, 3): + raise FormatError( + "Illegal combination of bit depth (%d)" + " and colour type (%d)." + " See http://www.w3.org/TR/2003/REC-PNG-20031110/#table111 ." + % (bitdepth, colortype)) + + +def is_natural(x): + """A non-negative integer.""" try: - return int(x) == x + is_integer = int(x) == x except (TypeError, ValueError): return False + return is_integer and x >= 0 -# === Legacy Version Support === +def undo_filter_sub(filter_unit, scanline, previous, result): + """Undo sub filter.""" -# :pyver:old: PyPNG works on Python versions 2.3 and 2.2, but not -# without some awkward problems. Really PyPNG works on Python 2.4 (and -# above); it works on Pythons 2.3 and 2.2 by virtue of fixing up -# problems here. It's a bit ugly (which is why it's hidden down here). -# -# Generally the strategy is one of pretending that we're running on -# Python 2.4 (or above), and patching up the library support on earlier -# versions so that it looks enough like Python 2.4. When it comes to -# Python 2.2 there is one thing we cannot patch: extended slices -# http://www.python.org/doc/2.3/whatsnew/section-slices.html. -# Instead we simply declare that features that are implemented using -# extended slices will not work on Python 2.2. -# -# In order to work on Python 2.3 we fix up a recurring annoyance involving -# the array type. In Python 2.3 an array cannot be initialised with an -# array, and it cannot be extended with a list (or other sequence). -# Both of those are repeated issues in the code. Whilst I would not -# normally tolerate this sort of behaviour, here we "shim" a replacement -# for array into place (and hope no-one notices). You never read this. -# -# In an amusing case of warty hacks on top of warty hacks... the array -# shimming we try and do only works on Python 2.3 and above (you can't -# subclass array.array in Python 2.2). So to get it working on Python -# 2.2 we go for something much simpler and (probably) way slower. -try: - array('B').extend([]) - array('B', array('B')) -# :todo:(drj) Check that TypeError is correct for Python 2.3 -except TypeError: - # Expect to get here on Python 2.3 - try: - class _array_shim(array): - true_array = array - def __new__(cls, typecode, init=None): - super_new = super(_array_shim, cls).__new__ - it = super_new(cls, typecode) - if init is None: - return it - it.extend(init) - return it - def extend(self, extension): - super_extend = super(_array_shim, self).extend - if isinstance(extension, self.true_array): - return super_extend(extension) - if not isinstance(extension, (list, str)): - # Convert to list. Allows iterators to work. - extension = list(extension) - return super_extend(self.true_array(self.typecode, extension)) - array = _array_shim - except TypeError: - # Expect to get here on Python 2.2 - def array(typecode, init=()): - if type(init) == str: - return map(ord, init) - return list(init) - -# Further hacks to get it limping along on Python 2.2 -try: - enumerate -except NameError: - def enumerate(seq): - i=0 - for x in seq: - yield i,x - i += 1 - -try: - reversed -except NameError: - def reversed(l): - l = list(l) - l.reverse() - for x in l: - yield x - -try: - itertools -except NameError: - class _dummy_itertools: - pass - itertools = _dummy_itertools() - def _itertools_imap(f, seq): - for x in seq: - yield f(x) - itertools.imap = _itertools_imap - def _itertools_chain(*iterables): - for it in iterables: - for element in it: - yield element - itertools.chain = _itertools_chain - - -# === Support for users without Cython === - -try: - pngfilters -except NameError: - class pngfilters(object): - def undo_filter_sub(filter_unit, scanline, previous, result): - """Undo sub filter.""" - - ai = 0 - # Loops starts at index fu. Observe that the initial part - # of the result is already filled in correctly with - # scanline. - for i in range(filter_unit, len(result)): - x = scanline[i] - a = result[ai] - result[i] = (x + a) & 0xff - ai += 1 - undo_filter_sub = staticmethod(undo_filter_sub) - - def undo_filter_up(filter_unit, scanline, previous, result): - """Undo up filter.""" - - for i in range(len(result)): - x = scanline[i] - b = previous[i] - result[i] = (x + b) & 0xff - undo_filter_up = staticmethod(undo_filter_up) - - def undo_filter_average(filter_unit, scanline, previous, result): - """Undo up filter.""" - - ai = -filter_unit - for i in range(len(result)): - x = scanline[i] - if ai < 0: - a = 0 - else: - a = result[ai] - b = previous[i] - result[i] = (x + ((a + b) >> 1)) & 0xff - ai += 1 - undo_filter_average = staticmethod(undo_filter_average) - - def undo_filter_paeth(filter_unit, scanline, previous, result): - """Undo Paeth filter.""" - - # Also used for ci. - ai = -filter_unit - for i in range(len(result)): - x = scanline[i] - if ai < 0: - a = c = 0 - else: - a = result[ai] - c = previous[ai] - b = previous[i] - p = a + b - c - pa = abs(p - a) - pb = abs(p - b) - pc = abs(p - c) - if pa <= pb and pa <= pc: - pr = a - elif pb <= pc: - pr = b - else: - pr = c - result[i] = (x + pr) & 0xff - ai += 1 - undo_filter_paeth = staticmethod(undo_filter_paeth) - - def convert_la_to_rgba(row, result): - for i in range(3): - result[i::4] = row[0::2] - result[3::4] = row[1::2] - convert_la_to_rgba = staticmethod(convert_la_to_rgba) - - def convert_l_to_rgba(row, result): - """Convert a grayscale image to RGBA. This method assumes - the alpha channel in result is already correctly - initialized. - """ - for i in range(3): - result[i::4] = row - convert_l_to_rgba = staticmethod(convert_l_to_rgba) + ai = 0 + # Loops starts at index fu. Observe that the initial part + # of the result is already filled in correctly with + # scanline. + for i in range(filter_unit, len(result)): + x = scanline[i] + a = result[ai] + result[i] = (x + a) & 0xff + ai += 1 - def convert_rgb_to_rgba(row, result): - """Convert an RGB image to RGBA. This method assumes the - alpha channel in result is already correctly initialized. - """ - for i in range(3): - result[i::4] = row[i::3] - convert_rgb_to_rgba = staticmethod(convert_rgb_to_rgba) +def undo_filter_up(filter_unit, scanline, previous, result): + """Undo up filter.""" -# === Command Line Support === + for i in range(len(result)): + x = scanline[i] + b = previous[i] + result[i] = (x + b) & 0xff -def read_pam_header(infile): - """ - Read (the rest of a) PAM header. `infile` should be positioned - immediately after the initial 'P7' line (at the beginning of the - second line). Returns are as for `read_pnm_header`. - """ - - # Unlike PBM, PGM, and PPM, we can read the header a line at a time. - header = dict() - while True: - l = infile.readline().strip() - if l == strtobytes('ENDHDR'): - break - if not l: - raise EOFError('PAM ended prematurely') - if l[0] == strtobytes('#'): - continue - l = l.split(None, 1) - if l[0] not in header: - header[l[0]] = l[1] + +def undo_filter_average(filter_unit, scanline, previous, result): + """Undo up filter.""" + + ai = -filter_unit + for i in range(len(result)): + x = scanline[i] + if ai < 0: + a = 0 else: - header[l[0]] += strtobytes(' ') + l[1] - - required = ['WIDTH', 'HEIGHT', 'DEPTH', 'MAXVAL'] - required = [strtobytes(x) for x in required] - WIDTH,HEIGHT,DEPTH,MAXVAL = required - present = [x for x in required if x in header] - if len(present) != len(required): - raise Error('PAM file must specify WIDTH, HEIGHT, DEPTH, and MAXVAL') - width = int(header[WIDTH]) - height = int(header[HEIGHT]) - depth = int(header[DEPTH]) - maxval = int(header[MAXVAL]) - if (width <= 0 or - height <= 0 or - depth <= 0 or - maxval <= 0): - raise Error( - 'WIDTH, HEIGHT, DEPTH, MAXVAL must all be positive integers') - return 'P7', width, height, depth, maxval - -def read_pnm_header(infile, supported=('P5','P6')): - """ - Read a PNM header, returning (format,width,height,depth,maxval). - `width` and `height` are in pixels. `depth` is the number of - channels in the image; for PBM and PGM it is synthesized as 1, for - PPM as 3; for PAM images it is read from the header. `maxval` is - synthesized (as 1) for PBM images. - """ + a = result[ai] + b = previous[i] + result[i] = (x + ((a + b) >> 1)) & 0xff + ai += 1 - # Generally, see http://netpbm.sourceforge.net/doc/ppm.html - # and http://netpbm.sourceforge.net/doc/pam.html - - supported = [strtobytes(x) for x in supported] - - # Technically 'P7' must be followed by a newline, so by using - # rstrip() we are being liberal in what we accept. I think this - # is acceptable. - type = infile.read(3).rstrip() - if type not in supported: - raise NotImplementedError('file format %s not supported' % type) - if type == strtobytes('P7'): - # PAM header parsing is completely different. - return read_pam_header(infile) - # Expected number of tokens in header (3 for P4, 4 for P6) - expected = 4 - pbm = ('P1', 'P4') - if type in pbm: - expected = 3 - header = [type] - - # We have to read the rest of the header byte by byte because the - # final whitespace character (immediately following the MAXVAL in - # the case of P6) may not be a newline. Of course all PNM files in - # the wild use a newline at this point, so it's tempting to use - # readline; but it would be wrong. - def getc(): - c = infile.read(1) - if not c: - raise Error('premature EOF reading PNM header') - return c - c = getc() - while True: - # Skip whitespace that precedes a token. - while c.isspace(): - c = getc() - # Skip comments. - while c == '#': - while c not in '\n\r': - c = getc() - if not c.isdigit(): - raise Error('unexpected character %s found in header' % c) - # According to the specification it is legal to have comments - # that appear in the middle of a token. - # This is bonkers; I've never seen it; and it's a bit awkward to - # code good lexers in Python (no goto). So we break on such - # cases. - token = strtobytes('') - while c.isdigit(): - token += c - c = getc() - # Slight hack. All "tokens" are decimal integers, so convert - # them here. - header.append(int(token)) - if len(header) == expected: - break - # Skip comments (again) - while c == '#': - while c not in '\n\r': - c = getc() - if not c.isspace(): - raise Error('expected header to end with whitespace, not %s' % c) - - if type in pbm: - # synthesize a MAXVAL - header.append(1) - depth = (1,3)[type == strtobytes('P6')] - return header[0], header[1], header[2], depth, header[3] - -def write_pnm(file, width, height, pixels, meta): - """Write a Netpbm PNM/PAM file. - """ +def undo_filter_paeth(filter_unit, scanline, previous, result): + """Undo Paeth filter.""" - bitdepth = meta['bitdepth'] - maxval = 2**bitdepth - 1 - # Rudely, the number of image planes can be used to determine - # whether we are L (PGM), LA (PAM), RGB (PPM), or RGBA (PAM). - planes = meta['planes'] - # Can be an assert as long as we assume that pixels and meta came - # from a PNG file. - assert planes in (1,2,3,4) - if planes in (1,3): - if 1 == planes: - # PGM - # Could generate PBM if maxval is 1, but we don't (for one - # thing, we'd have to convert the data, not just blat it - # out). - fmt = 'P5' + # Also used for ci. + ai = -filter_unit + for i in range(len(result)): + x = scanline[i] + if ai < 0: + a = c = 0 else: - # PPM - fmt = 'P6' - header = '%s %d %d %d\n' % (fmt, width, height, maxval) - if planes in (2,4): - # PAM - # See http://netpbm.sourceforge.net/doc/pam.html - if 2 == planes: - tupltype = 'GRAYSCALE_ALPHA' + a = result[ai] + c = previous[ai] + b = previous[i] + p = a + b - c + pa = abs(p - a) + pb = abs(p - b) + pc = abs(p - c) + if pa <= pb and pa <= pc: + pr = a + elif pb <= pc: + pr = b else: - tupltype = 'RGB_ALPHA' - header = ('P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\n' - 'TUPLTYPE %s\nENDHDR\n' % - (width, height, planes, maxval, tupltype)) - file.write(header.encode('ascii')) - # Values per row - vpr = planes * width - # struct format - fmt = '>%d' % vpr - if maxval > 0xff: - fmt = fmt + 'H' - else: - fmt = fmt + 'B' - for row in pixels: - file.write(struct.pack(fmt, *row)) - file.flush() + pr = c + result[i] = (x + pr) & 0xff + ai += 1 + -def color_triple(color): +def convert_la_to_rgba(row, result): + for i in range(3): + result[i::4] = row[0::2] + result[3::4] = row[1::2] + + +def convert_l_to_rgba(row, result): """ - Convert a command line colour value to a RGB triple of integers. - FIXME: Somewhere we need support for greyscale backgrounds etc. + Convert a grayscale image to RGBA. + This method assumes the alpha channel in result is + already correctly initialized. """ - if color.startswith('#') and len(color) == 4: - return (int(color[1], 16), - int(color[2], 16), - int(color[3], 16)) - if color.startswith('#') and len(color) == 7: - return (int(color[1:3], 16), - int(color[3:5], 16), - int(color[5:7], 16)) - elif color.startswith('#') and len(color) == 13: - return (int(color[1:5], 16), - int(color[5:9], 16), - int(color[9:13], 16)) - -def _add_common_options(parser): - """Call *parser.add_option* for each of the options that are - common between this PNG--PNM conversion tool and the gen - tool. + for i in range(3): + result[i::4] = row + + +def convert_rgb_to_rgba(row, result): """ - parser.add_option("-i", "--interlace", - default=False, action="store_true", - help="create an interlaced PNG file (Adam7)") - parser.add_option("-t", "--transparent", - action="store", type="string", metavar="#RRGGBB", - help="mark the specified colour as transparent") - parser.add_option("-b", "--background", - action="store", type="string", metavar="#RRGGBB", - help="save the specified background colour") - parser.add_option("-g", "--gamma", - action="store", type="float", metavar="value", - help="save the specified gamma value") - parser.add_option("-c", "--compression", - action="store", type="int", metavar="level", - help="zlib compression level (0-9)") - return parser - -def _main(argv): + Convert an RGB image to RGBA. + This method assumes the alpha channel in result is + already correctly initialized. """ - Run the PNG encoder with options from the command line. + for i in range(3): + result[i::4] = row[i::3] + + +# Only reason to include this in this module is that +# several utilities need it, and it is small. +def binary_stdin(): + """ + A sys.stdin that returns bytes. """ - # Parse command line arguments - from optparse import OptionParser - version = '%prog ' + __version__ - parser = OptionParser(version=version) - parser.set_usage("%prog [options] [imagefile]") - parser.add_option('-r', '--read-png', default=False, - action='store_true', - help='Read PNG, write PNM') - parser.add_option("-a", "--alpha", - action="store", type="string", metavar="pgmfile", - help="alpha channel transparency (RGBA)") - _add_common_options(parser) - - (options, args) = parser.parse_args(args=argv[1:]) - - # Convert options - if options.transparent is not None: - options.transparent = color_triple(options.transparent) - if options.background is not None: - options.background = color_triple(options.background) - - # Prepare input and output files - if len(args) == 0: - infilename = '-' - infile = sys.stdin - elif len(args) == 1: - infilename = args[0] - infile = open(infilename, 'rb') - else: - parser.error("more than one input file") - outfile = sys.stdout + try: + return sys.stdin.buffer + except AttributeError: + # Probably Python 2, where bytes are strings. + return sys.stdin + + +def binary_stdout(): + """ + A sys.stdout that accepts bytes. + """ + + # First there is a Python3 issue. + try: + stdout = sys.stdout.buffer + except AttributeError: + # Probably Python 2, where bytes are strings. + stdout = sys.stdout + + # On Windows the C runtime file orientation needs changing. if sys.platform == "win32": - import msvcrt, os + import msvcrt + import os msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) - if options.read_png: - # Encode PNG to PPM - png = Reader(file=infile) - width,height,pixels,meta = png.asDirect() - write_pnm(outfile, width, height, pixels, meta) - else: - # Encode PNM to PNG - format, width, height, depth, maxval = \ - read_pnm_header(infile, ('P5','P6','P7')) - # When it comes to the variety of input formats, we do something - # rather rude. Observe that L, LA, RGB, RGBA are the 4 colour - # types supported by PNG and that they correspond to 1, 2, 3, 4 - # channels respectively. So we use the number of channels in - # the source image to determine which one we have. We do not - # care about TUPLTYPE. - greyscale = depth <= 2 - pamalpha = depth in (2,4) - supported = map(lambda x: 2**x-1, range(1,17)) - try: - mi = supported.index(maxval) - except ValueError: - raise NotImplementedError( - 'your maxval (%s) not in supported list %s' % - (maxval, str(supported))) - bitdepth = mi+1 - writer = Writer(width, height, - greyscale=greyscale, - bitdepth=bitdepth, - interlace=options.interlace, - transparent=options.transparent, - background=options.background, - alpha=bool(pamalpha or options.alpha), - gamma=options.gamma, - compression=options.compression) - if options.alpha: - pgmfile = open(options.alpha, 'rb') - format, awidth, aheight, adepth, amaxval = \ - read_pnm_header(pgmfile, 'P5') - if amaxval != '255': - raise NotImplementedError( - 'maxval %s not supported for alpha channel' % amaxval) - if (awidth, aheight) != (width, height): - raise ValueError("alpha channel image size mismatch" - " (%s has %sx%s but %s has %sx%s)" - % (infilename, width, height, - options.alpha, awidth, aheight)) - writer.convert_ppm_and_pgm(infile, pgmfile, outfile) - else: - writer.convert_pnm(infile, outfile) + return stdout + + +def cli_open(path): + if path == "-": + return binary_stdin() + return open(path, "rb") + + +def main(argv): + """ + Run command line PNG. + """ + print("What should the command line tool do?", file=sys.stderr) if __name__ == '__main__': try: - _main(sys.argv) - except Error: - e = sys.exc_info()[1] + main(sys.argv) + except Error as e: print(e, file=sys.stderr) diff --git a/scripts/build/png2bdc.py b/scripts/build/png2bdc.py index a1be5c552d0..adc4b185f44 100644 --- a/scripts/build/png2bdc.py +++ b/scripts/build/png2bdc.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 ## ## license:BSD-3-Clause ## copyright-holders:Aaron Giles, Andrew Gardner @@ -59,12 +59,8 @@ import os import png import sys -if sys.version_info >= (3,): - def b2p(v): - return bytes([v]) -else: - def b2p(v): - return chr(v) +def b2p(v): + return bytes([v]) ######################################## diff --git a/scripts/build/verinfo.py b/scripts/build/verinfo.py index f89956d1a53..68038b6251b 100644 --- a/scripts/build/verinfo.py +++ b/scripts/build/verinfo.py @@ -1,172 +1,137 @@ -#!/usr/bin/python +#!/usr/bin/python3 ## ## license:BSD-3-Clause -## copyright-holders:Aaron Giles, Andrew Gardner - -from __future__ import with_statement +## copyright-holders:Vas Crabb +import argparse +import io import re +import string import sys def parse_args(): - def usage(): - sys.stderr.write('Usage: verinfo.py [-b mame|mess|ume] [-r|-p] [-o <outfile>] <srcfile>\n') - sys.exit(1) + parser = argparse.ArgumentParser() + parser.add_argument('--target', '-t', metavar='<target>', default='mame', help='target name') + parser.add_argument('--subtarget', '-s', metavar='<subtarget>', default='mame', help='subtarget name') + parser.add_argument('--executable', '-e', metavar='<executable>', default='mame', help='base executable name') + parser.add_argument('--format', '-f', choices=('rc', 'plist'), metavar='<format>', default='rc', help='output format') + parser.add_argument('--resources', '-r', metavar='<resfile>', help='resource file to include') + parser.add_argument('-o', metavar='<outfile>', help='output file name') + parser.add_argument('input', metavar='<srcfile>', help='version info source file') + return parser.parse_args() - flags = True - target = 'mame' - format = 'rc' - input = None - output = None - i = 1 - while i < len(sys.argv): - if flags and (sys.argv[i] == '-r'): - format = 'rc' - elif flags and (sys.argv[i] == '-p'): - format = 'plist' - elif flags and (sys.argv[i] == '-b'): - i += 1 - if (i >= len(sys.argv)): - usage() - else: - target = sys.argv[i] - elif flags and (sys.argv[i] == '-o'): - i += 1 - if (i >= len(sys.argv)) or (output is not None): - usage() - else: - output = sys.argv[i] - elif flags and (sys.argv[i] == '--'): - flags = False - elif flags and sys.argv[i].startswith('-'): - usage() - elif input is not None: - usage() - else: - input = sys.argv[i] - i += 1 - if input is None: - usage() - return target, format, input, output - -def extract_version(input): - pattern = re.compile('\s+BARE_BUILD_VERSION\s+"(([^."]+)\.([^."]+))"') - for line in input.readlines(): +def extract_version(verinfo): + pattern = re.compile(r'\s+BARE_BUILD_VERSION\s+"(([^."]+)\.([^."]+))"') + for line in verinfo: match = pattern.search(line) if match: return match.group(1), match.group(2), match.group(3) return None, None, None -build, outfmt, srcfile, dstfile = parse_args() - -try: - fp = open(srcfile, 'rU') -except IOError: - sys.stderr.write("Unable to open source file '%s'\n" % srcfile) - sys.exit(1) +if __name__ == '__main__': + options = parse_args() -version_string, version_major, version_minor = extract_version(fp) -version_build = "0" -version_subbuild = "0" -if not version_string: - sys.stderr.write("Unable to extract version from source file '%s'\n" % srcfile) - sys.exit(1) -fp.close() - -if dstfile is not None: try: - fp = open(dstfile, 'w') - except IOError: - sys.stderr.write("Unable to open output file '%s'\n" % dstfile) + with io.open(options.input, 'r') as verinfo: + verfull, vermajor, verminor = extract_version(verinfo) + verbuild = '0' + except IOError as e: + sys.stderr.write("Error reading source file '%s': %s\n" % (options.input, e)) + sys.exit(1) + + if verfull is None: + sys.stderr.write("Unable to extract version from source file '%s'\n" % (options.input, )) sys.exit(1) -else: - fp = sys.stdout -if build == "mess": - # MESS - author = "MESS Team" - comments = "Multi Emulation Super System" - company_name = "MESS Team" - file_description = "MESS" - internal_name = "MESS" - original_filename = "MESS" - product_name = "MESS" - bundle_identifier = "org.mamedev.mess" -else: - # MAME - author = "Nicola Salmoria and the MAME Team" - comments = "Multi-purpose emulation framework" - company_name = "MAME Team" - file_description = "MAME" - internal_name = "MAME" if build == "mame" else build - original_filename = "MAME" if build == "mame" else build - product_name = "MAME" if build == "mame" else build - bundle_identifier = "org.mamedev." + build + if options.format == 'plist': + template = string.Template( + '<?xml version="1.0" encoding="UTF-8"?>\n' \ + '<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n' \ + '<plist version="1.0">\n' \ + '<dict>\n' \ + '\t<key>CFBundleDisplayName</key>\n' \ + '\t<string>${product}</string>\n' \ + '\t<key>CFBundleIdentifier</key>\n' \ + '\t<string>${rdns}</string>\n' \ + '\t<key>CFBundleInfoDictionaryVersion</key>\n' \ + '\t<string>6.0</string>\n' \ + '\t<key>CFBundleName</key>\n' \ + '\t<string>${product}</string>\n' \ + '\t<key>CFBundleShortVersionString</key>\n' \ + '\t<string>${major}.${minor}.${build}</string>\n' \ + '\t<key>NSPrincipalClass</key>\n' \ + '\t<string>NSApplication</string>\n' \ + '</dict>\n' \ + '</plist>\n') + else: + template = string.Template( + '#include <windows.h>\n' \ + '#pragma code_page(65001)\n' \ + 'VS_VERSION_INFO VERSIONINFO\n' \ + '\tFILEVERSION ${major},${minor},${build},${subbuild}\n' \ + '\tPRODUCTVERSION ${major},${minor},${build},${subbuild}\n' \ + '\tFILEFLAGSMASK 0x3fL\n' \ + '\tFILEFLAGS ${winfileflags}\n' \ + '\tFILEOS VOS_NT_WINDOWS32\n' \ + '\tFILETYPE VFT_APP\n' \ + '\tFILESUBTYPE VFT2_UNKNOWN\n' \ + 'BEGIN\n' \ + '\tBLOCK "StringFileInfo"\n' \ + '\tBEGIN\n' \ + '#ifdef UNICODE\n' \ + '\t\tBLOCK "040904b0"\n' \ + '#else\n' \ + '\t\tBLOCK "040904E4"\n' \ + '#endif\n' \ + '\t\tBEGIN\n' \ + '\t\t\tVALUE "Author", "${author}\\0"\n' \ + '\t\t\tVALUE "Comments", "${comments}\\0"\n' \ + '\t\t\tVALUE "CompanyName", "${company}\\0"\n' \ + '\t\t\tVALUE "FileDescription", "${filedesc}\\0"\n' \ + '\t\t\tVALUE "FileVersion", "${major}, ${minor}, ${build}, ${subbuild}\\0"\n' \ + '\t\t\tVALUE "InternalName", "${internal}\\0"\n' \ + '\t\t\tVALUE "LegalCopyright", "${copyright}\\0"\n' \ + '\t\t\tVALUE "OriginalFilename", "${original}\\0"\n' \ + '\t\t\tVALUE "ProductName", "${product}\\0"\n' \ + '\t\t\tVALUE "ProductVersion", "${version}\\0"\n' \ + '\t\tEND\n' \ + '\tEND\n' \ + '\tBLOCK "VarFileInfo"\n' \ + '\tBEGIN\n' \ + '#ifdef UNICODE\n' \ + '\t\tVALUE "Translation", 0x409, 1200\n' \ + '#else\n' \ + '\t\tVALUE "Translation", 0x409, 1252\n' \ + '#endif\n' \ + '\tEND\n' \ + 'END\n' \ + '#include "${resources}"\n') -legal_copyright = "Copyright Nicola Salmoria and the MAME team" + internal = options.target + '_' + options.subtarget if options.target != options.subtarget else options.target + text = template.substitute( + version=verfull, + major=vermajor, minor=verminor, build='0', subbuild='0', + author='MAMEdev and contributors', + comments='Multi-purpose emulation framework', + company='MAMEdev', + filedesc='MAME', + internal=internal, + original=options.executable, + product=('MAME' if options.target == 'mame' else options.target), + rdns=('org.mamedev.' + internal), + copyright='\u00a9 1997-2025 MAMEdev and contributors', + winfileflags=('0x0L' if verbuild == '0' else 'VS_FF_PRERELEASE'), + resources=(options.resources or 'mame.rc')) -if outfmt == 'rc': - fp.write('VS_VERSION_INFO VERSIONINFO\n') - fp.write('\tFILEVERSION %s,%s,%s,%s\n' % (version_major, version_minor, version_build, version_subbuild)) - fp.write('\tPRODUCTVERSION %s,%s,%s,%s\n' % (version_major, version_minor, version_build, version_subbuild)) - fp.write('\tFILEFLAGSMASK 0x3fL\n') - if version_build == 0: - fp.write('\tFILEFLAGS 0x0L\n') + if options.o is not None: + try: + with io.open(options.o, 'w', encoding='utf-8') as out: + out.write(text) + except IOError as e: + sys.stderr.write("Error writing output file '%s': %s\n" % (options.o, e)) + sys.exit(1) else: - fp.write('\tFILEFLAGS VS_FF_PRERELEASE\n') - fp.write('\tFILEOS VOS_NT_WINDOWS32\n') - fp.write('\tFILETYPE VFT_APP\n') - fp.write('\tFILESUBTYPE VFT2_UNKNOWN\n') - fp.write('BEGIN\n') - fp.write('\tBLOCK "StringFileInfo"\n') - fp.write('\tBEGIN\n') - fp.write('#ifdef UNICODE\n') - fp.write('\t\tBLOCK "040904b0"\n') - fp.write('#else\n') - fp.write('\t\tBLOCK "040904E4"\n') - fp.write('#endif\n') - fp.write('\t\tBEGIN\n') - fp.write('\t\t\tVALUE "Author", "%s\\0"\n' % author) - fp.write('\t\t\tVALUE "Comments", "%s\\0"\n' % comments) - fp.write('\t\t\tVALUE "CompanyName", "%s\\0"\n' % company_name) - fp.write('\t\t\tVALUE "FileDescription", "%s\\0"\n' % file_description) - fp.write('\t\t\tVALUE "FileVersion", "%s, %s, %s, %s\\0"\n' % (version_major, version_minor, version_build, version_subbuild)) - fp.write('\t\t\tVALUE "InternalName", "%s\\0"\n' % internal_name) - fp.write('\t\t\tVALUE "LegalCopyright", "%s\\0"\n' % legal_copyright) - fp.write('\t\t\tVALUE "OriginalFilename", "%s\\0"\n' % original_filename) - fp.write('\t\t\tVALUE "ProductName", "%s\\0"\n' % product_name) - fp.write('\t\t\tVALUE "ProductVersion", "%s\\0"\n' % version_string) - fp.write('\t\tEND\n') - fp.write('\tEND\n') - fp.write('\tBLOCK "VarFileInfo"\n') - fp.write('\tBEGIN\n') - fp.write('#ifdef UNICODE\n') - fp.write('\t\tVALUE "Translation", 0x409, 1200\n') - fp.write('#else\n') - fp.write('\t\tVALUE "Translation", 0x409, 1252\n') - fp.write('#endif\n') - fp.write('\tEND\n') - fp.write('END\n') -elif outfmt == 'plist': - fp.write('<?xml version="1.0" encoding="UTF-8"?>\n') - fp.write('<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n') - fp.write('<plist version="1.0">\n') - fp.write('<dict>\n') - fp.write('\t<key>CFBundleDisplayName</key>\n') - fp.write('\t<string>%s</string>\n' % product_name) - fp.write('\t<key>CFBundleIdentifier</key>\n') - fp.write('\t<string>%s</string>\n' % bundle_identifier) - fp.write('\t<key>CFBundleInfoDictionaryVersion</key>\n') - fp.write('\t<string>6.0</string>\n') - fp.write('\t<key>CFBundleName</key>\n') - fp.write('\t<string>%s</string>\n' % product_name) - fp.write('\t<key>CFBundleShortVersionString</key>\n') - fp.write('\t<string>%s.%s.%s</string>\n' % (version_major, version_minor, version_build)) - fp.write('\t<key>NSPrincipalClass</key>\n') - fp.write('\t<string>NSApplication</string>\n') - fp.write('</dict>\n') - fp.write('</plist>\n') -fp.flush() + sys.stdout.write(text) |