1#Needs: fonts/DejaVuSerif.sfd
2#
3################################################################################
4########################### W3C's WOFF Validator ###############################
5################################################################################
6#
7# This software or document includes material copied from or derived from
8# http://dev.w3.org/webfonts/WOFF/tools/validator/
9# Copyright (C) 2012 W3C(R) (MIT, ERCIM, Keio, Beihang).
10#
11# This work is being provided by the copyright holders under the following
12# license.
13#
14# License
15#
16# By obtaining and/or copying this work, you (the licensee) agree that you have
17# read, understood, and will comply with the following terms and conditions.
18#
19# Permission to copy, modify, and distribute this work, with or without
20# modification, for any purpose and without fee or royalty is hereby granted,
21# provided that you include the following on ALL copies of the work or portions
22# thereof, including modifications:
23#
24#     The full text of this NOTICE in a location viewable to users of the
25# redistributed or derivative work.
26#     Any pre-existing intellectual property disclaimers, notices, or terms and
27# conditions. If none exist, the W3C Software and Document Short Notice should
28# be included.
29#     Notice of any changes or modifications, through a copyright statement on
30# the new code or document such as "This software or document includes material
31# copied from or derived from [title and URI of the W3C document].
32# Copyright (C) [YEAR] W3C(R) (MIT, ERCIM, Keio, Beihang)."
33#
34# Disclaimers
35#
36# THIS WORK IS PROVIDED "AS IS," AND COPYRIGHT HOLDERS MAKE NO REPRESENTATIONS
37# OR WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO, WARRANTIES
38# OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF
39# THE SOFTWARE OR DOCUMENT WILL NOT INFRINGE ANY THIRD PARTY PATENTS,
40# COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS.
41#
42# COPYRIGHT HOLDERS WILL NOT BE LIABLE FOR ANY DIRECT, INDIRECT, SPECIAL OR
43# CONSEQUENTIAL DAMAGES ARISING OUT OF ANY USE OF THE SOFTWARE OR DOCUMENT.
44#
45# The name and trademarks of copyright holders may NOT be used in advertising
46# or publicity pertaining to the work without specific, written prior
47# permission. Title to copyright in this work will at all times remain with
48# copyright holders.
49#
50
51"""
52A module for validating the the file structure of WOFF Files.
53*validateFont* is the only public function.
54
55This can also be used as a command line tool for validating WOFF files.
56"""
57
58# import
59
60import fontforge
61import os
62import re
63import time
64import sys
65import struct
66import zlib
67import optparse
68import codecs
69from xml.etree import ElementTree
70from xml.parsers.expat import ExpatError
71try:
72    from cStringIO import StringIO
73except ImportError:
74    from io import StringIO
75
76# ----------------------
77# Support: Metadata Spec
78# ----------------------
79
80"""
81The Extended Metadata specifications are defined as a set of
82nested Python objects. This allows for a very simple XML
83validation procedure. The common element structure is as follows:
84
85    {
86        # ----------
87        # Attributes
88        # ----------
89
90        # In all cases, the dictionary has the attribute name at the top
91        # with the possible value(s) as the value. If an attribute has
92        # more than one representation (for exmaple xml:lang and lang)
93        # the two are specified as a space separated string for example
94        # "xml:lang lang".
95
96        # Required
97        "requiredAttributes" : {
98            # empty or one or more of the following
99            "name" : "default as string, list of options or None"
100        },
101
102        # Recommended
103        "recommendedAttributes" : {
104            # empty or one or more of the following
105            "name" : "default as string, list of options or None"
106        },
107
108        # Optional
109        "optionalAttributes" : {
110            # empty or one or more of the following
111            "name" : "default as string, list of options or None"
112        },
113
114        # -------
115        # Content
116        # -------
117
118        "contentLevel" : "not allowed", "recommended" or "required",
119
120        # --------------
121        # Child Elements
122        # --------------
123
124        # In all cases, the dictionary has the element name at the top
125        # with a dictionary as the value. The value dictionary defines
126        # the number of times the shild-element may occur along with
127        # the specification for the child-element.
128
129        # Required
130        "requiredChildElements" : {
131            # empty or one or more of the following
132            "name" : {
133                "minimumOccurrences" : int or None,
134                "maximumOccurrences" : int or None,
135                "spec" : {}
136            }
137        },
138
139        # Recommended
140        "recommendedChildElements" : {
141            # empty or one or more of the following
142            "name" : {
143                # minimumOccurrences is implicitly 0
144                "maximumOccurrences" : int or None,
145                "spec" : {}
146            }
147        },
148
149        # Optional
150        "optionalChildElements" : {
151            # empty or one or more of the following
152            "name" : {
153                # minimumOccurrences is implicitly 0
154                "maximumOccurrences" : int or None,
155                "spec" : {}
156            }
157        }
158    }
159
160The recommendedAttributes and recommendedChildElements are optional
161but they are separated from the optionalAttributes and optionalChildElements
162to allow for more detailed reporting.
163"""
164
165# Metadata 1.0
166# ------------
167
168# Common Options
169
170dirOptions_1_0 = ["ltr", "rtl"]
171
172# Fourth-Level Elements
173
174divSpec_1_0 = {
175    "requiredAttributes" : {},
176    "recommendedAttributes" : {},
177    "optionalAttributes" : {
178        "dir" : dirOptions_1_0,
179        "class" : None
180    },
181    "content" : "recommended",
182    "requiredChildElements" : {},
183    "recommendedChildElements" : {},
184    "optionalChildElements" : {
185        "div" : {
186            "maximumOccurrences" : None,
187            "spec" : "recursive divSpec_1_0" # special override for recursion.
188        },
189        "span" : {
190            "maximumOccurrences" : None,
191            "spec" : "recursive spanSpec_1_0" # special override for recursion.
192        }
193    }
194}
195
196spanSpec_1_0 = {
197    "requiredAttributes" : {},
198    "recommendedAttributes" : {},
199    "optionalAttributes" : {
200        "dir" : dirOptions_1_0,
201        "class" : None
202    },
203    "content" : "recommended",
204    "requiredChildElements" : {},
205    "recommendedChildElements" : {},
206    "optionalChildElements" : {
207        "div" : {
208            "maximumOccurrences" : None,
209            "spec" : "recursive divSpec_1_0" # special override for recursion.
210        },
211        "span" : {
212            "maximumOccurrences" : None,
213            "spec" : "recursive spanSpec_1_0" # special override for recursion.
214        }
215    }
216}
217
218# Third-Level Elements
219
220creditSpec_1_0 = {
221    "requiredAttributes" : {
222        "name" : None
223    },
224    "recommendedAttributes" : {},
225    "optionalAttributes" : {
226        "url" : None,
227        "role" : None,
228        "dir" : dirOptions_1_0,
229        "class" : None
230    },
231    "content" : "not allowed",
232    "requiredChildElements" : {},
233    "recommendedChildElements" : {},
234    "optionalChildElements" : {}
235}
236
237textSpec_1_0 = {
238    "requiredAttributes" : {},
239    "recommendedAttributes" : {},
240    "optionalAttributes" : {
241        "url" : None,
242        "role" : None,
243        "dir" : dirOptions_1_0,
244        "class" : None,
245        "xml:lang lang" : None
246    },
247    "content" : "recommended",
248    "requiredChildElements" : {},
249    "recommendedChildElements" : {},
250    "optionalChildElements" : {
251        "div" : {
252            "maximumOccurrences" : None,
253            "spec" : divSpec_1_0
254        },
255        "span" : {
256            "maximumOccurrences" : None,
257            "spec" : spanSpec_1_0
258        }
259    }
260}
261
262extensionNameSpec_1_0 = {
263    "requiredAttributes" : {},
264    "recommendedAttributes" : {},
265    "optionalAttributes" : {
266        "dir" : dirOptions_1_0,
267        "class" : None,
268        "xml:lang lang" : None
269    },
270    "content" : "recommended",
271    "requiredChildElements" : {},
272    "recommendedChildElements" : {},
273    "optionalChildElements" : {}
274}
275
276extensionValueSpec_1_0 = {
277    "requiredAttributes" : {},
278    "recommendedAttributes" : {},
279    "optionalAttributes" : {
280        "dir" : dirOptions_1_0,
281        "class" : None,
282        "xml:lang lang" : None
283    },
284    "content" : "recommended",
285    "requiredChildElements" : {},
286    "recommendedChildElements" : {},
287    "optionalChildElements" : {}
288}
289
290extensionItemSpec_1_0 = {
291    "requiredAttributes" : {},
292    "recommendedAttributes" : {},
293    "optionalAttributes" : {
294        "id" : None
295    },
296    "content" : "not allowed",
297    "requiredChildElements" : {
298        "name" : {
299            "minimumOccurrences" : 1,
300            "maximumOccurrences" : None,
301            "spec" : extensionNameSpec_1_0
302        },
303        "value" : {
304            "minimumOccurrences" : 1,
305            "maximumOccurrences" : None,
306            "spec" : extensionValueSpec_1_0
307        }
308    },
309    "recommendedChildElements" : {
310    },
311    "optionalChildElements" : {}
312}
313
314# Second Level Elements
315
316uniqueidSpec_1_0 = {
317    "requiredAttributes" : {
318        "id" : None
319    },
320    "recommendedAttributes" : {},
321    "optionalAttributes" : {},
322    "content" : "not allowed",
323    "requiredChildElements" : {},
324    "recommendedChildElements" : {},
325    "optionalChildElements" : {}
326}
327
328vendorSpec_1_0 = {
329    "requiredAttributes" : {
330        "name" : None
331    },
332    "recommendedAttributes" : {},
333    "optionalAttributes" : {
334        "url" : None,
335        "dir" : dirOptions_1_0,
336        "class" : None
337    },
338    "content" : "not allowed",
339    "requiredChildElements" : {},
340    "recommendedChildElements" : {},
341    "optionalChildElements" : {}
342}
343
344creditsSpec_1_0 = {
345    "requiredAttributes" : {},
346    "recommendedAttributes" : {},
347    "optionalAttributes" : {},
348    "content" : "not allowed",
349    "requiredChildElements" : {
350        "credit" : {
351            "minimumOccurrences" : 1,
352            "maximumOccurrences" : None,
353            "spec" : creditSpec_1_0
354        }
355    },
356    "recommendedChildElements" : {},
357    "optionalChildElements" : {}
358}
359
360descriptionSpec_1_0 = {
361    "requiredAttributes" : {},
362    "recommendedAttributes" : {},
363    "optionalAttributes" : {
364        "url" : None,
365    },
366    "content" : "not allowed",
367    "requiredChildElements" : {
368        "text" : {
369            "minimumOccurrences" : 1,
370            "maximumOccurrences" : None,
371            "spec" : textSpec_1_0
372        }
373    },
374    "recommendedChildElements" : {},
375    "optionalChildElements" : {}
376}
377
378licenseSpec_1_0 = {
379    "requiredAttributes" : {},
380    "recommendedAttributes" : {},
381    "optionalAttributes" : {
382        "url" : None,
383        "id" : None
384    },
385    "content" : "not allowed",
386    "requiredChildElements" : {},
387    "recommendedChildElements" : {},
388    "optionalChildElements" : {
389        "text" : {
390            "maximumOccurrences" : None,
391            "spec" : textSpec_1_0
392        }
393    }
394}
395
396copyrightSpec_1_0 = {
397    "requiredAttributes" : {},
398    "recommendedAttributes" : {},
399    "optionalAttributes" : {},
400    "content" : "not allowed",
401    "requiredChildElements" : {
402        "text" : {
403            "minimumOccurrences" : 1,
404            "maximumOccurrences" : None,
405            "spec" : textSpec_1_0
406        }
407    },
408    "recommendedChildElements" : {},
409    "optionalChildElements" : {}
410}
411
412trademarkSpec_1_0 = {
413    "requiredAttributes" : {},
414    "recommendedAttributes" : {},
415    "optionalAttributes" : {},
416    "content" : "not allowed",
417    "requiredChildElements" : {
418        "text" : {
419            "minimumOccurrences" : 1,
420            "maximumOccurrences" : None,
421            "spec" : textSpec_1_0
422        }
423    },
424    "recommendedChildElements" : {},
425    "optionalChildElements" : {}
426}
427
428licenseeSpec_1_0 = {
429    "requiredAttributes" : {
430        "name" : None,
431    },
432    "recommendedAttributes" : {},
433    "optionalAttributes" : {
434        "dir" : dirOptions_1_0,
435        "class" : None
436    },
437    "content" : "not allowed",
438    "requiredChildElements" : {},
439    "recommendedChildElements" : {},
440    "optionalChildElements" : {}
441}
442
443extensionSpec_1_0 = {
444    "requiredAttributes" : {},
445    "recommendedAttributes" : {},
446    "optionalAttributes" : {
447        "id" : None
448    },
449    "content" : "not allowed",
450    "requiredChildElements" : {
451        "item" : {
452            "minimumOccurrences" : 1,
453            "maximumOccurrences" : None,
454            "spec" : extensionItemSpec_1_0
455        }
456    },
457    "recommendedChildElements" : {},
458    "optionalChildElements" : {
459        "name" : {
460            "maximumOccurrences" : None,
461            "spec" : extensionNameSpec_1_0
462        }
463    }
464}
465
466# First Level Elements
467
468metadataSpec_1_0 = {
469    "requiredAttributes" : {
470        "version" : "1.0"
471    },
472    "recommendedAttributes" : {},
473    "optionalAttributes" : {},
474    "content" : "not allowed",
475    "requiredChildElements" : {},
476    "recommendedChildElements" : {
477        "uniqueid" : {
478            "maximumOccurrences" : 1,
479            "spec" : uniqueidSpec_1_0
480        }
481    },
482    "optionalChildElements" : {
483        "vendor" : {
484            "maximumOccurrences" : 1,
485            "spec" : vendorSpec_1_0
486        },
487        "credits" : {
488            "maximumOccurrences" : 1,
489            "spec" : creditsSpec_1_0
490        },
491        "description" : {
492            "maximumOccurrences" : 1,
493            "spec" : descriptionSpec_1_0
494        },
495        "license" : {
496            "maximumOccurrences" : 1,
497            "spec" : licenseSpec_1_0
498        },
499        "copyright" : {
500            "maximumOccurrences" : 1,
501            "spec" : copyrightSpec_1_0
502        },
503        "trademark" : {
504            "maximumOccurrences" : 1,
505            "spec" : trademarkSpec_1_0
506        },
507        "licensee" : {
508            "maximumOccurrences" : 1,
509            "spec" : licenseeSpec_1_0
510        },
511        "licensee" : {
512            "maximumOccurrences" : 1,
513            "spec" : licenseeSpec_1_0
514        },
515        "extension" : {
516            "maximumOccurrences" : None,
517            "spec" : extensionSpec_1_0
518        }
519    }
520}
521
522# ----------------------
523# Support: struct Helper
524# ----------------------
525
526# This was inspired by Just van Rossum's sstruct module.
527# http://fonttools.svn.sourceforge.net/svnroot/fonttools/trunk/Lib/sstruct.py
528
529def structPack(format, obj):
530    keys, formatString = _structGetFormat(format)
531    values = []
532    for key in keys:
533        values.append(obj[key])
534    data = struct.pack(formatString, *values)
535    return data
536
537def structUnpack(format, data):
538    keys, formatString = _structGetFormat(format)
539    size = struct.calcsize(formatString)
540    values = struct.unpack(formatString, data[:size])
541    unpacked = {}
542    for index, key in enumerate(keys):
543        value = values[index]
544        unpacked[key] = value
545    return unpacked, data[size:]
546
547def structCalcSize(format):
548    keys, formatString = _structGetFormat(format)
549    return struct.calcsize(formatString)
550
551_structFormatCache = {}
552
553def _structGetFormat(format):
554    if format not in _structFormatCache:
555        keys = []
556        formatString = [">"] # always big endian
557        for line in format.strip().splitlines():
558            line = line.split("#", 1)[0].strip()
559            if not line:
560                continue
561            key, formatCharacter = line.split(":")
562            key = key.strip()
563            formatCharacter = formatCharacter.strip()
564            keys.append(key)
565            formatString.append(formatCharacter)
566        _structFormatCache[format] = (keys, "".join(formatString))
567    return _structFormatCache[format]
568
569# -------------
570# Tests: Header
571# -------------
572
573def testHeader(data, reporter):
574    """
575    Test the WOFF header.
576    """
577    functions = [
578        _testHeaderSignature,
579        _testHeaderFlavor,
580        _testHeaderLength,
581        _testHeaderReserved,
582        _testHeaderTotalSFNTSize,
583        _testHeaderNumTables
584    ]
585    nonStoppingError = False
586    for function in functions:
587        stoppingError, nsError = function(data, reporter)
588        if nsError:
589            nonStoppingError = True
590        if stoppingError:
591            return True, nonStoppingError
592    return False, nonStoppingError
593
594
595headerFormat = """
596    signature:      4s
597    flavor:         4s
598    length:         L
599    numTables:      H
600    reserved:       H
601    totalSfntSize:  L
602    majorVersion:   H
603    minorVersion:   H
604    metaOffset:     L
605    metaLength:     L
606    metaOrigLength: L
607    privOffset:     L
608    privLength:     L
609"""
610headerSize = structCalcSize(headerFormat)
611
612def _testHeaderStructure(data, reporter):
613    """
614    Tests:
615    - Header must be the proper structure.
616    """
617    try:
618        structUnpack(headerFormat, data)
619        reporter.logPass(message="The header structure is correct.")
620    except:
621        reporter.logError(message="The header is not properly structured.")
622        return True, False
623    return False, False
624
625def _testHeaderSignature(data, reporter):
626    """
627    Tests:
628    - The signature must be "wOFF".
629    """
630    header = unpackHeader(data)
631    signature = header["signature"]
632    if signature != b"wOFF":
633        reporter.logError(message="Invalid signature: %s." % signature)
634        return True, False
635    else:
636        reporter.logPass(message="The signature is correct.")
637    return False, False
638
639def _testHeaderFlavor(data, reporter):
640    """
641    Tests:
642    - The flavor should be OTTO, 0x00010000 or true. Warn if another value is found.
643    - If the flavor is OTTO, the CFF table must be present.
644    - If the flavor is not OTTO, the CFF must not be present.
645    - If the directory cannot be unpacked, the flavor can not be validated. Issue a warning.
646    """
647    header = unpackHeader(data)
648    flavor = header["flavor"]
649    if flavor not in (b"OTTO", b"\000\001\000\000", b"true"):
650        reporter.logWarning(message="Unknown flavor: %s." % flavor)
651    else:
652        try:
653            tags = [table["tag"] for table in unpackDirectory(data)]
654            if b"CFF " in tags and flavor != b"OTTO":
655                reporter.logError(message="A \"CFF\" table is defined in the font and the flavor is not set to \"OTTO\".")
656                return False, True
657            elif b"CFF " not in tags and flavor == b"OTTO":
658                reporter.logError(message="The flavor is set to \"OTTO\" but no \"CFF\" table is defined.")
659                return False, True
660            else:
661                reporter.logPass(message="The flavor is a correct value.")
662        except:
663            reporter.logWarning(message="Could not validate the flavor.")
664    return False, False
665
666def _testHeaderLength(data, reporter):
667    """
668    Tests:
669    - The length of the data must match the defined length.
670    - The length of the data must be long enough for header and directory for defined number of tables.
671    - The length of the data must be long enough to contain the table lengths defined in the directory,
672      the metaLength and the privLength.
673    """
674    header = unpackHeader(data)
675    length = header["length"]
676    numTables = header["numTables"]
677    minLength = headerSize + (directorySize * numTables)
678    if length != len(data):
679        reporter.logError(message="Defined length (%d) does not match actual length of the data (%d)." % (length, len(data)))
680        return False, True
681    if length < minLength:
682        reporter.logError(message="Invalid length defined (%d) for number of tables defined." % length)
683        return False, True
684    directory = unpackDirectory(data)
685    for entry in directory:
686        compLength = entry["compLength"]
687        if compLength % 4:
688            compLength += 4 - (compLength % 4)
689        minLength += compLength
690    metaLength = privLength = 0
691    if header["metaOffset"]:
692        metaLength = header["metaLength"]
693    if header["privOffset"]:
694        privLength = header["privLength"]
695    if privLength and metaLength % 4:
696        metaLength += 4 - (metaLength % 4)
697    minLength += metaLength + privLength
698    if length < minLength:
699        reporter.logError(message="Defined length (%d) does not match the required length of the data (%d)." % (length, minLength))
700        return False, True
701    reporter.logPass(message="The length defined in the header is correct.")
702    return False, False
703
704def _testHeaderReserved(data, reporter):
705    """
706    Tests:
707    - The reserved bit must be set to 0.
708    """
709    header = unpackHeader(data)
710    reserved = header["reserved"]
711    if reserved != 0:
712        reporter.logError(message="Invalid value in reserved field (%d)." % reserved)
713        return False, True
714    else:
715        reporter.logPass(message="The value in the reserved field is correct.")
716    return False, False
717
718def _testHeaderTotalSFNTSize(data, reporter):
719    """
720    Tests:
721    - The size of the unpacked SFNT data must be a multiple of 4.
722    - The origLength values in the directory, with proper padding, must sum
723      to the totalSfntSize in the header.
724    """
725    header = unpackHeader(data)
726    directory = unpackDirectory(data)
727    totalSfntSize = header["totalSfntSize"]
728    isValid = True
729    if totalSfntSize % 4:
730        reporter.logError(message="The total sfnt size (%d) is not a multiple of four." % totalSfntSize)
731        isValid = False
732    else:
733        numTables = header["numTables"]
734        requiredSize = sfntHeaderSize + (numTables * sfntDirectoryEntrySize)
735        for table in directory:
736            origLength = table["origLength"]
737            if origLength % 4:
738                origLength += 4 - (origLength % 4)
739            requiredSize += origLength
740        if totalSfntSize != requiredSize:
741            reporter.logError(message="The total sfnt size (%d) does not match the required sfnt size (%d)." % (totalSfntSize, requiredSize))
742            isValid = False
743    if isValid:
744        reporter.logPass(message="The total sfnt size is valid.")
745    return False, not isValid
746
747def _testHeaderNumTables(data, reporter):
748    """
749    Tests:
750    - The number of tables must be at least 1.
751    - The directory entries for the specified number of tables must be properly formatted.
752    """
753    header = unpackHeader(data)
754    numTables = header["numTables"]
755    if numTables < 1:
756        reporter.logError(message="Invalid number of tables defined in header structure (%d)." % numTables)
757        return False, True
758    data = data[headerSize:]
759    for index in range(numTables):
760        try:
761            d, data = structUnpack(directoryFormat, data)
762        except:
763            reporter.logError(message="The defined number of tables in the header (%d) does not match the actual number of tables (%d)." % (numTables, index))
764            return False, True
765    reporter.logPass(message="The number of tables defined in the header is valid.")
766    return False, False
767
768# -------------
769# Tests: Tables
770# -------------
771
772def testDataBlocks(data, reporter):
773    """
774    Test the WOFF data blocks.
775    """
776    functions = [
777        _testBlocksOffsetLengthZero,
778        _testBlocksPositioning
779    ]
780    nonStoppingError = False
781    for function in functions:
782        stoppingError, nsError = function(data, reporter)
783        if nsError:
784            nonStoppingError = True
785        if stoppingError:
786            return True, nonStoppingError
787    return False, nonStoppingError
788
789def _testBlocksOffsetLengthZero(data, reporter):
790    """
791    - The metadata must have the offset and length set to zero consistently.
792    - The private data must have the offset and length set to zero consistently.
793    """
794    header = unpackHeader(data)
795    haveError = False
796    # metadata
797    metaOffset = header["metaOffset"]
798    metaLength = header["metaLength"]
799    if metaOffset == 0 or metaLength == 0:
800        if metaOffset == 0 and metaLength == 0:
801            reporter.logPass(message="The length and offset are appropriately set for empty metadata.")
802        else:
803            reporter.logError(message="The metadata offset (%d) and metadata length (%d) are not properly set. If one is 0, they both must be 0." % (metaOffset, metaLength))
804            haveError = True
805    # private data
806    privOffset = header["privOffset"]
807    privLength = header["privLength"]
808    if privOffset == 0 or privLength == 0:
809        if privOffset == 0 and privLength == 0:
810            reporter.logPass(message="The length and offset are appropriately set for empty private data.")
811        else:
812            reporter.logError(message="The private data offset (%d) and private data length (%d) are not properly set. If one is 0, they both must be 0." % (privOffset, privLength))
813            haveError = True
814    return False, haveError
815
816def _testBlocksPositioning(data, reporter):
817    """
818    Tests:
819    - The table data must start immediately after the directory.
820    - The table data must end at the beginning of the metadata, the beginning of the private data or the end of the file.
821    - The metadata must start immediately after the table data.
822    - the metadata must end at the beginning of he private data (padded as needed) or the end of the file.
823    - The private data must start immediately after the table data or metadata.
824    - The private data must end at the edge of the file.
825    """
826    header = unpackHeader(data)
827    haveError = False
828    # table data start
829    directory = unpackDirectory(data)
830    if not directory:
831        return False, False
832    expectedTableDataStart = headerSize + (directorySize * header["numTables"])
833    offsets = [entry["offset"] for entry in directory]
834    tableDataStart = min(offsets)
835    if expectedTableDataStart != tableDataStart:
836        reporter.logError(message="The table data does not start (%d) in the required position (%d)." % (tableDataStart, expectedTableDataStart))
837        haveError = True
838    else:
839        reporter.logPass(message="The table data begins in the required position.")
840    # table data end
841    if header["metaOffset"]:
842        definedTableDataEnd = header["metaOffset"]
843    elif header["privOffset"]:
844        definedTableDataEnd = header["privOffset"]
845    else:
846        definedTableDataEnd = header["length"]
847    directory = unpackDirectory(data)
848    ends = [table["offset"] + table["compLength"] + calcPaddingLength(table["compLength"]) for table in directory]
849    expectedTableDataEnd = max(ends)
850    if expectedTableDataEnd != definedTableDataEnd:
851        reporter.logError(message="The table data end (%d) is not in the required position (%d)." % (definedTableDataEnd, expectedTableDataEnd))
852        haveError = True
853    else:
854        reporter.logPass(message="The table data ends in the required position.")
855    # metadata
856    if header["metaOffset"]:
857        # start
858        expectedMetaStart = expectedTableDataEnd
859        definedMetaStart = header["metaOffset"]
860        if expectedMetaStart != definedMetaStart:
861            reporter.logError(message="The metadata does not start (%d) in the required position (%d)." % (definedMetaStart, expectedMetaStart))
862            haveError = True
863        else:
864            reporter.logPass(message="The metadata begins in the required position.")
865        # end
866        if header["privOffset"]:
867            definedMetaEnd = header["privOffset"]
868            needMetaPadding = True
869        else:
870            definedMetaEnd = header["length"]
871            needMetaPadding = False
872        expectedMetaEnd = header["metaOffset"] + header["metaLength"]
873        if needMetaPadding:
874            expectedMetaEnd += calcPaddingLength(header["metaLength"])
875        if expectedMetaEnd != definedMetaEnd:
876            reporter.logError(message="The metadata end (%d) is not in the required position (%d)." % (definedMetaEnd, expectedMetaEnd))
877            haveError = True
878        else:
879            reporter.logPass(message="The metadata ends in the required position.")
880    # private data
881    if header["privOffset"]:
882        # start
883        if header["metaOffset"]:
884            expectedPrivateStart = expectedMetaEnd
885        else:
886            expectedPrivateStart = expectedTableDataEnd
887        definedPrivateStart = header["privOffset"]
888        if expectedPrivateStart != definedPrivateStart:
889            reporter.logError(message="The private data does not start (%d) in the required position (%d)." % (definedPrivateStart, expectedPrivateStart))
890            haveError = True
891        else:
892            reporter.logPass(message="The private data begins in the required position.")
893        # end
894        expectedPrivateEnd = header["length"]
895        definedPrivateEnd = header["privOffset"] + header["privLength"]
896        if expectedPrivateEnd != definedPrivateEnd:
897            reporter.logError(message="The private data end (%d) is not in the required position (%d)." % (definedPrivateEnd, expectedPrivateEnd))
898            haveError = True
899        else:
900            reporter.logPass(message="The private data ends in the required position.")
901    return False, haveError
902
903# ----------------------
904# Tests: Table Directory
905# ----------------------
906
907def testTableDirectory(data, reporter):
908    """
909    Test the WOFF table directory.
910    """
911    functions = [
912        _testTableDirectoryStructure,
913        _testTableDirectory4ByteOffsets,
914        _testTableDirectoryPadding,
915        _testTableDirectoryPositions,
916        _testTableDirectoryCompressedLength,
917        _testTableDirectoryDecompressedLength,
918        _testTableDirectoryChecksums,
919        _testTableDirectoryTableOrder
920    ]
921    nonStoppingError = False
922    for function in functions:
923        stoppingError, nsError = function(data, reporter)
924        if nsError:
925            nonStoppingError = True
926        if stoppingError:
927            return True, nonStoppingError
928    return False, nonStoppingError
929
930directoryFormat = """
931    tag:            4s
932    offset:         L
933    compLength:     L
934    origLength:     L
935    origChecksum:   L
936"""
937directorySize = structCalcSize(directoryFormat)
938
939def _testTableDirectoryStructure(data, reporter):
940    """
941    Tests:
942    - The entries in the table directory can be unpacked.
943    """
944    header = unpackHeader(data)
945    numTables = header["numTables"]
946    data = data[headerSize:]
947    try:
948        for index in range(numTables):
949            table, data = structUnpack(directoryFormat, data)
950        reporter.logPass(message="The table directory structure is correct.")
951    except:
952        reporter.logError(message="The table directory is not properly structured.")
953        return True, False
954    return False, False
955
956def _testTableDirectory4ByteOffsets(data, reporter):
957    """
958    Tests:
959    - The font tables must each begin on a 4-byte boundary.
960    """
961    directory = unpackDirectory(data)
962    haveError = False
963    for table in directory:
964        tag = table["tag"]
965        offset = table["offset"]
966        if offset % 4:
967            reporter.logError(message="The \"%s\" table does not begin on a 4-byte boundary (%d)." % (tag, offset))
968            haveError = True
969        else:
970            reporter.logPass(message="The \"%s\" table begins on a 4-byte boundary." % tag)
971    return False, haveError
972
973def _testTableDirectoryPadding(data, reporter):
974    """
975    Tests:
976    - All tables, including the final table, must be padded to a
977      four byte boundary using null bytes as needed.
978    """
979    header = unpackHeader(data)
980    directory = unpackDirectory(data)
981    haveError = False
982    # test final table
983    endError = False
984    sfntEnd = None
985    if header["metaOffset"] != 0:
986        sfntEnd = header["metaOffset"]
987    elif header["privOffset"] != 0:
988        sfntEnd = header["privOffset"]
989    else:
990        sfntEnd = header["length"]
991    if sfntEnd % 4:
992        reporter.logError(message="The sfnt data does not end with proper padding.")
993        haveError = True
994    else:
995        reporter.logPass(message="The sfnt data ends with proper padding.")
996    # test the bytes used for padding
997    for table in directory:
998        tag = table["tag"]
999        offset = table["offset"]
1000        length = table["compLength"]
1001        paddingLength = calcPaddingLength(length)
1002        if paddingLength:
1003            paddingOffset = offset + length
1004            padding = data[paddingOffset:paddingOffset+paddingLength]
1005            expectedPadding = b"\0" * paddingLength
1006            if padding != expectedPadding:
1007                reporter.logError(message="The \"%s\" table is not padded with null bytes." % tag)
1008                haveError = True
1009            else:
1010                reporter.logPass(message="The \"%s\" table is padded with null bytes." % tag)
1011    return False, haveError
1012
1013def _testTableDirectoryPositions(data, reporter):
1014    """
1015    Tests:
1016    - The table offsets must not be before the end of the header/directory.
1017    - The table offset + length must not be greater than the edge of the available space.
1018    - The table offsets must not be after the edge of the available space.
1019    - Table blocks must not overlap.
1020    - There must be no gaps between the tables.
1021    """
1022    directory = unpackDirectory(data)
1023    tablesWithProblems = set()
1024    haveError = False
1025    # test for overlapping tables
1026    locations = []
1027    for table in directory:
1028        offset = table["offset"]
1029        length = table["compLength"]
1030        length = length + calcPaddingLength(length)
1031        locations.append((offset, offset + length, table["tag"]))
1032    for start, end, tag in locations:
1033        for otherStart, otherEnd, otherTag in locations:
1034            if tag == otherTag:
1035                continue
1036            if start >= otherStart and start < otherEnd:
1037                reporter.logError(message="The \"%s\" table overlaps the \"%s\" table." % (tag, otherTag))
1038                tablesWithProblems.add(tag)
1039                tablesWithProblems.add(otherTag)
1040                haveError = True
1041    # test for invalid offset, length and combo
1042    header = unpackHeader(data)
1043    if header["metaOffset"] != 0:
1044        tableDataEnd = header["metaOffset"]
1045    elif header["privOffset"] != 0:
1046        tableDataEnd = header["privOffset"]
1047    else:
1048        tableDataEnd = header["length"]
1049    numTables = header["numTables"]
1050    minOffset = headerSize + (directorySize * numTables)
1051    maxLength = tableDataEnd - minOffset
1052    for table in directory:
1053        tag = table["tag"]
1054        offset = table["offset"]
1055        length = table["compLength"]
1056        # offset is before the beginning of the table data block
1057        if offset < minOffset:
1058            tablesWithProblems.add(tag)
1059            message = "The \"%s\" table directory entry offset (%d) is before the start of the table data block (%d)." % (tag, offset, minOffset)
1060            reporter.logError(message=message)
1061            haveError = True
1062        # offset is after the end of the table data block
1063        elif offset > tableDataEnd:
1064            tablesWithProblems.add(tag)
1065            message = "The \"%s\" table directory entry offset (%d) is past the end of the table data block (%d)." % (tag, offset, tableDataEnd)
1066            reporter.logError(message=message)
1067            haveError = True
1068        # offset + length is after the end of the table tada block
1069        elif (offset + length) > tableDataEnd:
1070            tablesWithProblems.add(tag)
1071            message = "The \"%s\" table directory entry offset (%d) + length (%d) is past the end of the table data block (%d)." % (tag, offset, length, tableDataEnd)
1072            reporter.logError(message=message)
1073            haveError = True
1074    # test for gaps
1075    tables = []
1076    for table in directory:
1077        tag = table["tag"]
1078        offset = table["offset"]
1079        length = table["compLength"]
1080        length += calcPaddingLength(length)
1081        tables.append((offset, offset + length, tag))
1082    tables.sort()
1083    for index, (start, end, tag) in enumerate(tables):
1084        if index == 0:
1085            continue
1086        prevStart, prevEnd, prevTag = tables[index - 1]
1087        if prevEnd < start:
1088            tablesWithProblems.add(prevTag)
1089            tablesWithProblems.add(tag)
1090            reporter.logError(message="Extraneous data between the \"%s\" and \"%s\" tables." % (prevTag, tag))
1091            haveError = True
1092    # log passes
1093    for entry in directory:
1094        tag = entry["tag"]
1095        if tag in tablesWithProblems:
1096            continue
1097        reporter.logPass(message="The \"%s\" table directory entry has a valid offset and length." % tag)
1098    return False, haveError
1099
1100def _testTableDirectoryCompressedLength(data, reporter):
1101    """
1102    Tests:
1103    - The compressed length must be less than or equal to the original length.
1104    """
1105    directory = unpackDirectory(data)
1106    haveError = False
1107    for table in directory:
1108        tag = table["tag"]
1109        compLength = table["compLength"]
1110        origLength = table["origLength"]
1111        if compLength > origLength:
1112            reporter.logError(message="The \"%s\" table directory entry has a compressed length (%d) larger than the original length (%d)." % (tag, compLength, origLength))
1113            haveError = True
1114        elif compLength == origLength:
1115            reporter.logPass(message="The \"%s\" table directory entry is not compressed." % tag)
1116        else:
1117            reporter.logPass(message="The \"%s\" table directory entry has proper compLength and origLength values." % tag)
1118    return False, haveError
1119
1120def _testTableDirectoryDecompressedLength(data, reporter):
1121    """
1122    Tests:
1123    - The decompressed length of the data must match the defined original length.
1124    """
1125    directory = unpackDirectory(data)
1126    tableData = unpackTableData(data)
1127    haveError = False
1128    for table in directory:
1129        tag = table["tag"]
1130        offset = table["offset"]
1131        compLength = table["compLength"]
1132        origLength = table["origLength"]
1133        if compLength >= origLength:
1134            continue
1135        decompressedData = tableData[tag]
1136        # couldn't be decompressed. handled elsewhere.
1137        if decompressedData is None:
1138            continue
1139        decompressedLength = len(decompressedData)
1140        if origLength != decompressedLength:
1141            reporter.logError(message="The \"%s\" table directory entry has an original length (%d) that does not match the actual length of the decompressed data (%d)." % (tag, origLength, decompressedLength))
1142            haveError = True
1143        else:
1144            reporter.logPass(message="The \"%s\" table directory entry has a proper original length compared to the actual decompressed data." % tag)
1145    return False, haveError
1146
1147def _testTableDirectoryChecksums(data, reporter):
1148    """
1149    Tests:
1150    - The checksums for the tables must match the checksums in the directory.
1151    - The head checksum adjustment must be correct.
1152    """
1153    # check the table directory checksums
1154    directory = unpackDirectory(data)
1155    tables = unpackTableData(data)
1156    haveError = False
1157    for entry in directory:
1158        tag = entry["tag"]
1159        origChecksum = entry["origChecksum"]
1160        decompressedData = tables[tag]
1161        # couldn't be decompressed.
1162        if decompressedData is None:
1163            continue
1164        newChecksum = calcChecksum(tag, decompressedData)
1165        if newChecksum != origChecksum:
1166            newChecksum = hex(newChecksum).strip("L")
1167            origChecksum = hex(origChecksum).strip("L")
1168            reporter.logError(message="The \"%s\" table directory entry original checksum (%s) does not match the checksum (%s) calculated from the data." % (tag, origChecksum, newChecksum))
1169            haveError = True
1170        else:
1171            reporter.logPass(message="The \"%s\" table directory entry original checksum is correct." % tag)
1172    # check the head checksum adjustment
1173    if b"head" not in tables:
1174        reporter.logWarning(message="The font does not contain a \"head\" table.")
1175    else:
1176        newChecksum = calcHeadChecksum(data)
1177        data = tables[b"head"]
1178        try:
1179            checksum = struct.unpack(">L", data[8:12])[0]
1180            if checksum != newChecksum:
1181                checksum = hex(checksum).strip("L")
1182                newChecksum = hex(newChecksum).strip("L")
1183                reporter.logError(message="The \"head\" table checkSumAdjustment (%s) does not match the calculated checkSumAdjustment (%s)." % (checksum, newChecksum))
1184                haveError = True
1185            else:
1186                reporter.logPass(message="The \"head\" table checkSumAdjustment is valid.")
1187        except:
1188            reporter.logError(message="The \"head\" table is not properly structured.")
1189            haveError = True
1190    return False, haveError
1191
1192def _testTableDirectoryTableOrder(data, reporter):
1193    """
1194    Tests:
1195    - The directory entries must be stored in ascending order based on their tag.
1196    """
1197    storedOrder = [table["tag"] for table in unpackDirectory(data)]
1198    if storedOrder != sorted(storedOrder):
1199        reporter.logError(message="The table directory entries are not stored in alphabetical order.")
1200        return False, True
1201    else:
1202        reporter.logPass(message="The table directory entries are stored in the proper order.")
1203        return False, False
1204
1205# -----------------
1206# Tests: Table Data
1207# -----------------
1208
1209def testTableData(data, reporter):
1210    """
1211    Test the table data.
1212    """
1213    functions = [
1214        _testTableDataDecompression
1215    ]
1216    nonStoppingError = False
1217    for function in functions:
1218        stoppingError, nsError = function(data, reporter)
1219        if nsError:
1220            nonStoppingError = True
1221        if stoppingError:
1222            return True, nonStoppingError
1223    return False, nonStoppingError
1224
1225def _testTableDataDecompression(data, reporter):
1226    """
1227    Tests:
1228    - The table data, when the defined compressed length is less
1229      than the original length, must be properly compressed.
1230    """
1231    haveError = False
1232    for table in unpackDirectory(data):
1233        tag = table["tag"]
1234        offset = table["offset"]
1235        compLength = table["compLength"]
1236        origLength = table["origLength"]
1237        if origLength <= compLength:
1238            continue
1239        entryData = data[offset:offset+compLength]
1240        try:
1241            decompressed = zlib.decompress(entryData)
1242            reporter.logPass(message="The \"%s\" table data can be decompressed with zlib." % tag)
1243        except zlib.error:
1244            reporter.logError(message="The \"%s\" table data can not be decompressed with zlib." % tag)
1245            haveError = True
1246    return False, haveError
1247
1248# ----------------
1249# Tests: Metadata
1250# ----------------
1251
1252def testMetadata(data, reporter):
1253    """
1254    Test the WOFF metadata.
1255    """
1256    if _shouldSkipMetadataTest(data, reporter):
1257        return False, False
1258    functions = [
1259        _testMetadataPadding,
1260        _testMetadataDecompression,
1261        _testMetadataDecompressedLength,
1262        _testMetadataParse,
1263        _testMetadataEncoding,
1264        _testMetadataStructure
1265    ]
1266    nonStoppingError = False
1267    for function in functions:
1268        stoppingError, nsError = function(data, reporter)
1269        if nsError:
1270            nonStoppingError = True
1271        if stoppingError:
1272            return True, nonStoppingError
1273    return False, nonStoppingError
1274
1275def _shouldSkipMetadataTest(data, reporter):
1276    """
1277    This is used at the start of metadata test functions.
1278    It writes a note and returns True if not metadata exists.
1279    """
1280    header = unpackHeader(data)
1281    metaOffset = header["metaOffset"]
1282    metaLength = header["metaLength"]
1283    if metaOffset == 0 or metaLength == 0:
1284        reporter.logNote(message="No metadata to test.")
1285        return True
1286
1287def _testMetadataPadding(data, reporter):
1288    """
1289    - The padding must be null.
1290    """
1291    header = unpackHeader(data)
1292    if not header["metaOffset"] or not header["privOffset"]:
1293        return False, False
1294    paddingLength = calcPaddingLength(header["metaLength"])
1295    if not paddingLength:
1296        return False, False
1297    paddingOffset = header["metaOffset"] + header["metaLength"]
1298    padding = data[paddingOffset:paddingOffset + paddingLength]
1299    expectedPadding = "\0" * paddingLength
1300    if padding != expectedPadding:
1301        reporter.logError(message="The metadata is not padded with null bytes.")
1302        return False, True
1303    else:
1304        reporter.logPass(message="The metadata is padded with null bytes,")
1305        return False, False
1306
1307# does this need to be tested?
1308#
1309# def testMetadataIsCompressed(data, reporter):
1310#     """
1311#     Tests:
1312#     - The metadata must be compressed.
1313#     """
1314#     if _shouldSkipMetadataTest(data, reporter):
1315#         return
1316#     header = unpackHeader(data)
1317#     length = header["metaLength"]
1318#     origLength = header["metaOrigLength"]
1319#     if length >= origLength:
1320#         reporter.logError(message="The compressed metdata length (%d) is higher than or equal to the original, uncompressed length (%d)." % (length, origLength))
1321#         return True
1322#     reporter.logPass(message="The compressed metdata length is smaller than the original, uncompressed length.")
1323
1324def _testMetadataDecompression(data, reporter):
1325    """
1326    Tests:
1327    - Metadata must be compressed with zlib.
1328    """
1329    if _shouldSkipMetadataTest(data, reporter):
1330        return False, False
1331    compData = unpackMetadata(data, decompress=False, parse=False)
1332    try:
1333        zlib.decompress(compData)
1334    except zlib.error:
1335        reporter.logError(message="The metadata can not be decompressed with zlib.")
1336        return True, False
1337    reporter.logPass(message="The metadata can be decompressed with zlib.")
1338    return False, False
1339
1340def _testMetadataDecompressedLength(data, reporter):
1341    """
1342    Tests:
1343    - The length of the decompressed metadata must match the defined original length.
1344    """
1345    if _shouldSkipMetadataTest(data, reporter):
1346        return False, False
1347    header = unpackHeader(data)
1348    metadata = unpackMetadata(data, parse=False)
1349    metaOrigLength = header["metaOrigLength"]
1350    decompressedLength = len(metadata)
1351    if metaOrigLength != decompressedLength:
1352        reporter.logError(message="The decompressed metadata length (%d) does not match the original metadata length (%d) in the header." % (decompressedLength, metaOrigLength))
1353        return False, True
1354    else:
1355        reporter.logPass(message="The decompressed metadata length matches the original metadata length in the header.")
1356        return False, False
1357
1358def _testMetadataParse(data, reporter):
1359    """
1360    Tests:
1361    - The metadata must be well-formed.
1362    """
1363    if _shouldSkipMetadataTest(data, reporter):
1364        return False, False
1365    metadata = unpackMetadata(data, parse=False)
1366    try:
1367        tree = ElementTree.fromstring(metadata)
1368    except (ExpatError, LookupError):
1369        reporter.logError(message="The metadata can not be parsed.")
1370        return True, False
1371    reporter.logPass(message="The metadata can be parsed.")
1372    return False, False
1373
1374def _testMetadataEncoding(data, reporter):
1375    """
1376    Tests:
1377    - The metadata must be UTF-8 encoded.
1378    """
1379    if _shouldSkipMetadataTest(data, reporter):
1380        return False, False
1381    metadata = unpackMetadata(data, parse=False)
1382    errorMessage = "The metadata encoding is not valid."
1383    encoding = None
1384    # check the BOM
1385    if not metadata.startswith("<"):
1386        if not metadata.startswith(codecs.BOM_UTF8):
1387            reporter.logError(message=errorMessage)
1388            return False, True
1389        else:
1390            encoding = "UTF-8"
1391    # sniff the encoding
1392    else:
1393        # quick test to ensure that the regular expression will work.
1394        # the string must start with <?xml. this will catch
1395        # other encodings such as: <\x00?\x00x\x00m\x00l
1396        if not metadata.startswith("<?xml"):
1397            reporter.logError(message=errorMessage)
1398            return False, True
1399        # go to the first occurance of >
1400        line = metadata.split(">", 1)[0]
1401        # find an encoding string
1402        pattern = re.compile(
1403            "\s+"
1404            "encoding"
1405            "\s*"
1406            "="
1407            "\s*"
1408            "[\"']+"
1409            "([^\"']+)"
1410        )
1411        m = pattern.search(line)
1412        if m:
1413            encoding = m.group(1)
1414        else:
1415            encoding = "UTF-8"
1416    # report
1417    if encoding != "UTF-8":
1418        reporter.logError(message=errorMessage)
1419        return False, True
1420    else:
1421        reporter.logPass(message="The metadata is properly encoded.")
1422        return False, False
1423
1424def _testMetadataStructure(data, reporter):
1425    """
1426    Test the metadata structure.
1427    """
1428    if _shouldSkipMetadataTest(data, reporter):
1429        return False, False
1430    tree = unpackMetadata(data)
1431    # make sure the top element is metadata
1432    if tree.tag != "metadata":
1433        reporter.logError("The top element is not \"metadata\".")
1434        return False, True
1435    # sniff the version
1436    version = tree.attrib.get("version")
1437    if not version:
1438        reporter.logError("The \"version\" attribute is not defined.")
1439        return False, True
1440    # grab the appropriate specification
1441    versionSpecs = {
1442        "1.0" : metadataSpec_1_0
1443    }
1444    spec = versionSpecs.get(version)
1445    if spec is None:
1446        reporter.logError("Unknown version (\"%s\")." % version)
1447        return False, True
1448    haveError = _validateMetadataElement(tree, spec, reporter)
1449    if not haveError:
1450        reporter.logPass("The \"metadata\" element is properly formatted.")
1451    return False, haveError
1452
1453def _validateMetadataElement(element, spec, reporter, parentTree=[]):
1454    haveError = False
1455    # unknown attributes
1456    knownAttributes = []
1457    for attrib in spec["requiredAttributes"].keys() + spec["recommendedAttributes"].keys() + spec["optionalAttributes"].keys():
1458        attrib = _parseAttribute(attrib)
1459        knownAttributes.append(attrib)
1460    for attrib in sorted(element.attrib.keys()):
1461        # the search is a bit complicated because there are
1462        # attributes that have more than one name.
1463        found = False
1464        for knownAttrib in knownAttributes:
1465            if knownAttrib == attrib:
1466                found = True
1467                break
1468            elif isinstance(knownAttrib, list) and attrib in knownAttrib:
1469                found = True
1470                break
1471        if not found:
1472            _logMetadataResult(
1473                reporter,
1474                "error",
1475                "Unknown attribute (\"%s\")" % attrib,
1476                element.tag,
1477                parentTree
1478            )
1479            haveError = True
1480    # attributes
1481    s = [
1482        ("requiredAttributes", "required"),
1483        ("recommendedAttributes", "recommended"),
1484        ("optionalAttributes", "optional")
1485    ]
1486    for key, requirementLevel in s:
1487        if spec[key]:
1488            e = _validateAttributes(element, spec[key], reporter, parentTree, requirementLevel)
1489            if e:
1490                haveError = True
1491    # unknown child-elements
1492    knownChildElements = spec["requiredChildElements"].keys() + spec["recommendedChildElements"].keys() + spec["optionalChildElements"].keys()
1493    for childElement in element:
1494        if childElement.tag not in knownChildElements:
1495           _logMetadataResult(
1496               reporter,
1497               "error",
1498               "Unknown child-element (\"%s\")" % childElement.tag,
1499               element.tag,
1500               parentTree
1501           )
1502           haveError = True
1503    # child elements
1504    s = [
1505        ("requiredChildElements", "required"),
1506        ("recommendedChildElements", "recommended"),
1507        ("optionalChildElements", "optional")
1508    ]
1509    for key, requirementLevel in s:
1510        if spec[key]:
1511            for childElementTag, childElementData in sorted(spec[key].items()):
1512                e = _validateChildElements(element, childElementTag, childElementData, reporter, parentTree, requirementLevel)
1513                if e:
1514                    haveError = True
1515    # content
1516    content = element.text
1517    if content is not None:
1518        content = content.strip()
1519    if content and spec["content"] == "not allowed":
1520        _logMetadataResult(
1521            reporter,
1522            "error",
1523            "Content defined",
1524            element.tag,
1525            parentTree
1526        )
1527        haveError = True
1528    elif not content and content and spec["content"] == "required":
1529        _logMetadataResult(
1530            reporter,
1531            "error",
1532            "Content not defined",
1533            element.tag,
1534            parentTree
1535        )
1536    elif not content and spec["content"] == "recommended":
1537        _logMetadataResult(
1538            reporter,
1539            "warn",
1540            "Content not defined",
1541            element.tag,
1542            parentTree
1543        )
1544    # log the result
1545    if not haveError and parentTree == ["metadata"]:
1546        reporter.logPass("The \"%s\" element is properly formatted." % element.tag)
1547    # done
1548    return haveError
1549
1550def _parseAttribute(attrib):
1551    if " " in attrib:
1552        final = []
1553        for a in attrib.split(" "):
1554            if a.startswith("xml:"):
1555                a = "{http://www.w3.org/XML/1998/namespace}" + a[4:]
1556            final.append(a)
1557        return final
1558    return attrib
1559
1560def _unEtreeAttribute(attrib):
1561    ns = "{http://www.w3.org/XML/1998/namespace}"
1562    if attrib.startswith(ns):
1563        attrib = "xml:" + attrib[len(ns):]
1564    return attrib
1565
1566def _validateAttributes(element, spec, reporter, parentTree, requirementLevel):
1567    haveError = False
1568    for attrib, valueOptions in sorted(spec.items()):
1569        attribs = _parseAttribute(attrib)
1570        if isinstance(attribs, basestring):
1571            attribs = [attribs]
1572        found = []
1573        for attrib in attribs:
1574            if attrib in element.attrib:
1575                found.append(attrib)
1576        # make strings for reporting
1577        if len(attribs) > 1:
1578            attribString = ", ".join(["\"%s\"" % _unEtreeAttribute(i) for i in attribs])
1579        else:
1580            attribString = "\"%s\"" % attribs[0]
1581        if len(found) == 0:
1582            pass
1583        elif len(found) > 1:
1584            foundString = ", ".join(["\"%s\"" % _unEtreeAttribute(i) for i in found])
1585        else:
1586            foundString = "\"%s\"" % found[0]
1587        # more than one of the mutually exclusive attributes found
1588        if len(found) > 1:
1589            _logMetadataResult(
1590                reporter,
1591                "error",
1592                "More than one mutually exclusive attribute (%s) defined" % foundString,
1593                element.tag,
1594                parentTree
1595            )
1596            haveError = True
1597        # missing
1598        elif len(found) == 0:
1599            if requirementLevel == "optional":
1600                continue
1601            elif requirementLevel == "required":
1602                errorLevel = "error"
1603            else:
1604                errorLevel = "warn"
1605            _logMetadataResult(
1606                reporter,
1607                errorLevel,
1608                "%s \"%s\" attribute not defined" % (requirementLevel.title(), attrib),
1609                element.tag,
1610                parentTree
1611            )
1612            if requirementLevel == "required":
1613                haveError = True
1614        # incorrect value
1615        else:
1616            e = _validateAttributeValue(element, found[0], valueOptions, reporter, parentTree)
1617            if e:
1618                haveError = True
1619    # done
1620    return haveError
1621
1622def _validateAttributeValue(element, attrib, valueOptions, reporter, parentTree):
1623    haveError = False
1624    value = element.attrib[attrib]
1625    if isinstance(valueOptions, basestring):
1626        valueOptions = [valueOptions]
1627    # no defined value options
1628    if valueOptions is None:
1629        # the string is empty
1630        if not value:
1631            _logMetadataResult(
1632                reporter,
1633                "warn",
1634                "Value for the \"%s\" attribute is an empty string" % attrib,
1635                element.tag,
1636                parentTree
1637            )
1638    # illegal value
1639    elif value not in valueOptions:
1640        _logMetadataResult(
1641            reporter,
1642            "error",
1643            "Invalid value (\"%s\") for the \"%s\" attribute" % (value, attrib),
1644            element.tag,
1645            parentTree
1646        )
1647        haveError = True
1648    # return the error state
1649    return haveError
1650
1651def _validateChildElements(element, childElementTag, childElementData, reporter, parentTree, requirementLevel):
1652    haveError = False
1653    # get the valid counts
1654    minimumOccurrences = childElementData.get("minimumOccurrences", 0)
1655    maximumOccurrences = childElementData.get("maximumOccurrences", None)
1656    # find the appropriate elements
1657    found = element.findall(childElementTag)
1658    # not defined enough times
1659    if minimumOccurrences == 1 and len(found) == 0:
1660        _logMetadataResult(
1661            reporter,
1662            "error",
1663            "%s \"%s\" child-element not defined" % (requirementLevel.title(), childElementTag),
1664            element.tag,
1665            parentTree
1666        )
1667        haveError = True
1668    elif len(found) < minimumOccurrences:
1669        _logMetadataResult(
1670            reporter,
1671            "error",
1672            "%s \"%s\" child-element is defined %d times instead of the minimum %d times" % (requirementLevel.title(), childElementTag, len(found), minimumOccurrences),
1673            element.tag,
1674            parentTree
1675        )
1676        haveError = True
1677    # not defined, but not recommended
1678    elif len(found) == 0 and requirementLevel == "recommended":
1679        _logMetadataResult(
1680            reporter,
1681            "warn",
1682            "%s \"%s\" child-element is not defined" % (requirementLevel.title(), childElementTag),
1683            element.tag,
1684            parentTree
1685        )
1686    # defined too many times
1687    if maximumOccurrences is not None:
1688        if maximumOccurrences == 1 and len(found) > 1:
1689            _logMetadataResult(
1690                reporter,
1691                "error",
1692                "%s \"%s\" child-element defined more than once" % (requirementLevel.title(), childElementTag),
1693                element.tag,
1694                parentTree
1695            )
1696            haveError = True
1697        elif len(found) > maximumOccurrences:
1698            _logMetadataResult(
1699                reporter,
1700                "error",
1701                "%s \"%s\" child-element defined %d times instead of the maximum %d times" % (requirementLevel.title(), childElementTag, len(found), minimumOccurrences),
1702                element.tag,
1703                parentTree
1704            )
1705            haveError = True
1706    # validate the found elements
1707    if not haveError:
1708        for childElement in found:
1709            # handle recursive child-elements
1710            childElementSpec = childElementData["spec"]
1711            if childElementSpec == "recursive divSpec_1_0":
1712                childElementSpec = divSpec_1_0
1713            elif childElementSpec == "recursive spanSpec_1_0":
1714                childElementSpec = spanSpec_1_0
1715            # dive
1716            e = _validateMetadataElement(childElement, childElementSpec, reporter, parentTree + [element.tag])
1717            if e:
1718                haveError = True
1719    # return the error state
1720    return haveError
1721
1722# logging support
1723
1724def _logMetadataResult(reporter, result, message, elementTag, parentTree):
1725    message = _formatMetadataResultMessage(message, elementTag, parentTree)
1726    methods = {
1727        "error" : reporter.logError,
1728        "warn" : reporter.logWarning,
1729        "note" : reporter.logNote,
1730        "pass" : reporter.logPass
1731    }
1732    methods[result](message)
1733
1734def _formatMetadataResultMessage(message, elementTag, parentTree):
1735    parentTree = parentTree + [elementTag]
1736    if parentTree[0] == "metadata":
1737        parentTree = parentTree[1:]
1738    if parentTree:
1739        parentTree = ["\"%s\"" % t for t in reversed(parentTree) if t is not None]
1740        message += " in " + " in ".join(parentTree)
1741    message += "."
1742    return message
1743
1744# ----------------
1745# Metadata Display
1746# ----------------
1747
1748def getMetadataForDisplay(data):
1749    """
1750    Build a tree of the metadata. The value returned will
1751    be a list of elements in the following dict form:
1752
1753        {
1754            tag : {
1755                attributes : {
1756                    attribute : value
1757                },
1758            text : string,
1759            children : []
1760            }
1761        }
1762
1763    The value for "children" will be a list of elements
1764    folowing the same structure defined above.
1765    """
1766    test = unpackMetadata(data, parse=False)
1767    if not test:
1768        return None
1769    metadata = unpackMetadata(data)
1770    tree = []
1771    for element in metadata:
1772        _recurseMetadataElement(element, tree)
1773    return tree
1774
1775def _recurseMetadataElement(element, tree):
1776    # tag
1777    tag = element.tag
1778    # text
1779    text = element.text
1780    if text:
1781        text = text.strip()
1782    if not text:
1783        text = None
1784    # attributes
1785    attributes = {}
1786    ns = "{http://www.w3.org/XML/1998/namespace}"
1787    for key, value in element.attrib.items():
1788        if key.startswith(ns):
1789            key = "xml:" + key[len(ns):]
1790        attributes[key] = value
1791    # compile the dictionary
1792    d = dict(
1793        tag=tag,
1794        attributes=attributes,
1795        text=text,
1796        children=[]
1797    )
1798    tree.append(d)
1799    # run through the children
1800    for sub in element:
1801        _recurseMetadataElement(sub, d["children"])
1802
1803
1804# -------------------------
1805# Support: Misc. SFNT Stuff
1806# -------------------------
1807
1808# Some of this was adapted from fontTools.ttLib.sfnt
1809
1810sfntHeaderFormat = """
1811    sfntVersion:    4s
1812    numTables:      H
1813    searchRange:    H
1814    entrySelector:  H
1815    rangeShift:     H
1816"""
1817sfntHeaderSize = structCalcSize(sfntHeaderFormat)
1818
1819sfntDirectoryEntryFormat = """
1820    tag:            4s
1821    checkSum:       L
1822    offset:         L
1823    length:         L
1824"""
1825sfntDirectoryEntrySize = structCalcSize(sfntDirectoryEntryFormat)
1826
1827def maxPowerOfTwo(value):
1828    exponent = 0
1829    while value:
1830        value = value >> 1
1831        exponent += 1
1832    return max(exponent - 1, 0)
1833
1834def getSearchRange(numTables):
1835    exponent = maxPowerOfTwo(numTables)
1836    searchRange = (2 ** exponent) * 16
1837    entrySelector = exponent
1838    rangeShift = numTables * 16 - searchRange
1839    return searchRange, entrySelector, rangeShift
1840
1841def calcPaddingLength(length):
1842    if not length % 4:
1843        return 0
1844    return 4 - (length % 4)
1845
1846def padData(data):
1847    data += b"\0" * calcPaddingLength(len(data))
1848    return data
1849
1850def sumDataULongs(data):
1851    longs = struct.unpack(">%dL" % (len(data) / 4), data)
1852    value = sum(longs) % (2 ** 32)
1853    return value
1854
1855def calcChecksum(tag, data):
1856    if tag == b"head":
1857        data = data[:8] + b"\0\0\0\0" + data[12:]
1858    data = padData(data)
1859    value = sumDataULongs(data)
1860    return value
1861
1862def calcHeadChecksum(data):
1863    header = unpackHeader(data)
1864    directory = unpackDirectory(data)
1865    numTables = header["numTables"]
1866    # build the sfnt directory
1867    searchRange, entrySelector, rangeShift = getSearchRange(numTables)
1868    sfntHeaderData = dict(
1869        sfntVersion=header["flavor"],
1870        numTables=numTables,
1871        searchRange=searchRange,
1872        entrySelector=entrySelector,
1873        rangeShift=rangeShift
1874    )
1875    sfntData = structPack(sfntHeaderFormat, sfntHeaderData)
1876    sfntEntries = {}
1877    offset = sfntHeaderSize + (sfntDirectoryEntrySize * numTables)
1878    directory = [(entry["offset"], entry) for entry in directory]
1879    for o, entry in sorted(directory):
1880        checksum = entry["origChecksum"]
1881        tag = entry["tag"]
1882        length = entry["origLength"]
1883        sfntEntries[tag] = dict(
1884            tag=tag,
1885            checkSum=checksum,
1886            offset=offset,
1887            length=length
1888        )
1889        offset += length + calcPaddingLength(length)
1890    for tag, sfntEntry in sorted(sfntEntries.items()):
1891        sfntData += structPack(sfntDirectoryEntryFormat, sfntEntry)
1892    # calculate
1893    checkSums = [entry["checkSum"] for entry in sfntEntries.values()]
1894    checkSums.append(sumDataULongs(sfntData))
1895    checkSum = sum(checkSums)
1896    checkSum = (0xB1B0AFBA - checkSum) & 0xffffffff
1897    return checkSum
1898
1899# ------------------
1900# Support XML Writer
1901# ------------------
1902
1903class XMLWriter(object):
1904
1905    def __init__(self):
1906        self._root = None
1907        self._elements = []
1908
1909    def simpletag(self, tag, **kwargs):
1910        ElementTree.SubElement(self._elements[-1], tag, **kwargs)
1911
1912    def begintag(self, tag, **kwargs):
1913        if self._elements:
1914            s = ElementTree.SubElement(self._elements[-1], tag, **kwargs)
1915        else:
1916            s = ElementTree.Element(tag, **kwargs)
1917            if self._root is None:
1918                self._root = s
1919        self._elements.append(s)
1920
1921    def endtag(self, tag):
1922        assert self._elements[-1].tag == tag
1923        del self._elements[-1]
1924
1925    def write(self, text):
1926        if self._elements[-1].text is None:
1927            self._elements[-1].text = text
1928        else:
1929            self._elements[-1].text += text
1930
1931    def compile(self, encoding="utf-8"):
1932        f = StringIO()
1933        tree = ElementTree.ElementTree(self._root)
1934        indent(tree.getroot())
1935        tree.write(f, encoding=encoding)
1936        text = f.getvalue()
1937        del f
1938        return text
1939
1940def indent(elem, level=0):
1941    # this is from http://effbot.python-hosting.com/file/effbotlib/ElementTree.py
1942    i = "\n" + level * "\t"
1943    if len(elem):
1944        if not elem.text or not elem.text.strip():
1945            elem.text = i + "\t"
1946        for e in elem:
1947            indent(e, level + 1)
1948        if not e.tail or not e.tail.strip():
1949            e.tail = i
1950    if level and (not elem.tail or not elem.tail.strip()):
1951        elem.tail = i
1952
1953# ---------------------------------
1954# Support: Reporters and HTML Stuff
1955# ---------------------------------
1956
1957class TestResultGroup(list):
1958
1959    def __init__(self, title):
1960        super(TestResultGroup, self).__init__()
1961        self.title = title
1962
1963    def _haveType(self, tp):
1964        for data in self:
1965            if data["type"] == tp:
1966                return True
1967        return False
1968
1969    def haveNote(self):
1970        return self._haveType("NOTE")
1971
1972    def haveWarning(self):
1973        return self._haveType("WARNING")
1974
1975    def haveError(self):
1976        return self._haveType("ERROR")
1977
1978    def havePass(self):
1979        return self._haveType("PASS")
1980
1981    def haveTraceback(self):
1982        return self._haveType("TRACEBACK")
1983
1984
1985class BaseReporter(object):
1986
1987    """
1988    Base reporter. This establishes the required API for reporters.
1989    """
1990
1991    def __init__(self):
1992        self.title = ""
1993        self.fileInfo = []
1994        self.metadata = None
1995        self.testResults = []
1996        self.haveReadError = False
1997
1998    def logTitle(self, title):
1999        self.title = title
2000
2001    def logFileInfo(self, title, value):
2002        self.fileInfo.append((title, value))
2003
2004    def logMetadata(self, data):
2005        self.metadata = data
2006
2007    def logTableInfo(self, tag=None, offset=None, compLength=None, origLength=None, origChecksum=None):
2008        self.tableInfo.append((tag, offset, compLength, origLength, origChecksum))
2009
2010    def logTestTitle(self, title):
2011        self.testResults.append(TestResultGroup(title))
2012
2013    def logNote(self, message, information=""):
2014        d = dict(type="NOTE", message=message, information=information)
2015        self.testResults[-1].append(d)
2016
2017    def logWarning(self, message, information=""):
2018        d = dict(type="WARNING", message=message, information=information)
2019        self.testResults[-1].append(d)
2020
2021    def logError(self, message, information=""):
2022        d = dict(type="ERROR", message=message, information=information)
2023        self.testResults[-1].append(d)
2024
2025    def logPass(self, message, information=""):
2026        d = dict(type="PASS", message=message, information=information)
2027        self.testResults[-1].append(d)
2028
2029    def logTraceback(self, text):
2030        d = dict(type="TRACEBACK", message=text, information="")
2031        self.testResults[-1].append(d)
2032
2033    def getReport(self, *args, **kwargs):
2034        raise NotImplementedError
2035
2036
2037class TextReporter(BaseReporter):
2038
2039    """
2040    Plain text reporter.
2041    """
2042
2043    def getReport(self, reportNote=True, reportWarning=True, reportError=True, reportPass=True):
2044        report = []
2045        if self.metadata is not None:
2046            report.append("METADATA DISPLAY")
2047        for group in self.testResults:
2048            for result in group:
2049                typ = result["type"]
2050                if typ == "NOTE" and not reportNote:
2051                    continue
2052                elif typ == "WARNING" and not reportWarning:
2053                    continue
2054                elif typ == "ERROR" and not reportError:
2055                    continue
2056                elif typ == "PASS" and not reportPass:
2057                    continue
2058                t = "%s - %s: %s" % (result["type"], group.title, result["message"])
2059                report.append(t)
2060        return "\n".join(report)
2061
2062
2063class HTMLReporter(BaseReporter):
2064
2065    def getReport(self):
2066        writer = startHTML(title=self.title)
2067        # write the file info
2068        self._writeFileInfo(writer)
2069        # write major error alert
2070        if self.haveReadError:
2071            self._writeMajorError(writer)
2072        # write the metadata
2073        if self.metadata:
2074            self._writeMetadata(writer)
2075        # write the test overview
2076        self._writeTestResultsOverview(writer)
2077        # write the test groups
2078        self._writeTestResults(writer)
2079        # close the html
2080        text = finishHTML(writer)
2081        # done
2082        return text
2083
2084    def _writeFileInfo(self, writer):
2085        # write the font info
2086        writer.begintag("div", c_l_a_s_s="infoBlock")
2087        ## title
2088        writer.begintag("h3", c_l_a_s_s="infoBlockTitle")
2089        writer.write("File Information")
2090        writer.endtag("h3")
2091        ## table
2092        writer.begintag("table", c_l_a_s_s="report")
2093        ## items
2094        for title, value in self.fileInfo:
2095            # row
2096            writer.begintag("tr")
2097            # title
2098            writer.begintag("td", c_l_a_s_s="title")
2099            writer.write(title)
2100            writer.endtag("td")
2101            # message
2102            writer.begintag("td")
2103            writer.write(value)
2104            writer.endtag("td")
2105            # close row
2106            writer.endtag("tr")
2107        writer.endtag("table")
2108        ## close the container
2109        writer.endtag("div")
2110
2111    def _writeMajorError(self, writer):
2112        writer.begintag("h2", c_l_a_s_s="readError")
2113        writer.write("The file contains major structural errors!")
2114        writer.endtag("h2")
2115
2116    def _writeMetadata(self, writer):
2117        # start the block
2118        writer.begintag("div", c_l_a_s_s="infoBlock")
2119        # title
2120        writer.begintag("h3", c_l_a_s_s="infoBlockTitle")
2121        writer.write("Metadata ")
2122        writer.endtag("h3")
2123        # content
2124        for element in self.metadata:
2125            self._writeMetadataElement(element, writer)
2126        # close the block
2127        writer.endtag("div")
2128
2129    def _writeMetadataElement(self, element, writer):
2130        writer.begintag("div", c_l_a_s_s="metadataElement")
2131        # tag
2132        writer.begintag("h5", c_l_a_s_s="metadata")
2133        writer.write(element["tag"])
2134        writer.endtag("h5")
2135        # attributes
2136        attributes = element["attributes"]
2137        if len(attributes):
2138            writer.begintag("h6", c_l_a_s_s="metadata")
2139            writer.write("Attributes:")
2140            writer.endtag("h6")
2141            # key, value pairs
2142            writer.begintag("table", c_l_a_s_s="metadata")
2143            for key, value in sorted(attributes.items()):
2144                writer.begintag("tr")
2145                writer.begintag("td", c_l_a_s_s="key")
2146                writer.write(key)
2147                writer.endtag("td")
2148                writer.begintag("td", c_l_a_s_s="value")
2149                writer.write(value)
2150                writer.endtag("td")
2151                writer.endtag("tr")
2152            writer.endtag("table")
2153        # text
2154        text = element["text"]
2155        if text is not None and text.strip():
2156            writer.begintag("h6", c_l_a_s_s="metadata")
2157            writer.write("Text:")
2158            writer.endtag("h6")
2159            writer.begintag("p", c_l_a_s_s="metadata")
2160            writer.write(text)
2161            writer.endtag("p")
2162        # child elements
2163        children = element["children"]
2164        if len(children):
2165            writer.begintag("h6", c_l_a_s_s="metadata")
2166            writer.write("Child Elements:")
2167            writer.endtag("h6")
2168            for child in children:
2169                self._writeMetadataElement(child, writer)
2170        # close
2171        writer.endtag("div")
2172
2173    def _writeTestResultsOverview(self, writer):
2174        ## tabulate
2175        notes = 0
2176        passes = 0
2177        errors = 0
2178        warnings = 0
2179        for group in self.testResults:
2180            for data in group:
2181                tp = data["type"]
2182                if tp == "NOTE":
2183                    notes += 1
2184                elif tp == "PASS":
2185                    passes += 1
2186                elif tp == "ERROR":
2187                    errors += 1
2188                else:
2189                    warnings += 1
2190        total = sum((notes, passes, errors, warnings))
2191        ## container
2192        writer.begintag("div", c_l_a_s_s="infoBlock")
2193        ## header
2194        writer.begintag("h3", c_l_a_s_s="infoBlockTitle")
2195        writer.write("Results for %d Tests" % total)
2196        writer.endtag("h3")
2197        ## results
2198        results = [
2199            ("PASS", passes),
2200            ("WARNING", warnings),
2201            ("ERROR", errors),
2202            ("NOTE", notes),
2203        ]
2204        writer.begintag("table", c_l_a_s_s="report")
2205        for tp, value in results:
2206            # title
2207            writer.begintag("tr", c_l_a_s_s="testReport%s" % tp.title())
2208            writer.begintag("td", c_l_a_s_s="title")
2209            writer.write(tp)
2210            writer.endtag("td")
2211            # count
2212            writer.begintag("td", c_l_a_s_s="testReportResultCount")
2213            writer.write(str(value))
2214            writer.endtag("td")
2215            # empty
2216            writer.begintag("td")
2217            writer.endtag("td")
2218            # toggle button
2219            buttonID = "testResult%sToggleButton" % tp
2220            writer.begintag("td",
2221                id=buttonID, c_l_a_s_s="toggleButton",
2222                onclick="testResultToggleButtonHit(a_p_o_s_t_r_o_p_h_e%sa_p_o_s_t_r_o_p_h_e, a_p_o_s_t_r_o_p_h_e%sa_p_o_s_t_r_o_p_h_e);" % (buttonID, "test%s" % tp.title()))
2223            writer.write("Hide")
2224            writer.endtag("td")
2225            # close the row
2226            writer.endtag("tr")
2227        writer.endtag("table")
2228        ## close the container
2229        writer.endtag("div")
2230
2231    def _writeTestResults(self, writer):
2232        for infoBlock in self.testResults:
2233            # container
2234            writer.begintag("div", c_l_a_s_s="infoBlock")
2235            # header
2236            writer.begintag("h4", c_l_a_s_s="infoBlockTitle")
2237            writer.write(infoBlock.title)
2238            writer.endtag("h4")
2239            # individual reports
2240            writer.begintag("table", c_l_a_s_s="report")
2241            for data in infoBlock:
2242                tp = data["type"]
2243                message = data["message"]
2244                information = data["information"]
2245                # row
2246                writer.begintag("tr", c_l_a_s_s="test%s" % tp.title())
2247                # title
2248                writer.begintag("td", c_l_a_s_s="title")
2249                writer.write(tp)
2250                writer.endtag("td")
2251                # message
2252                writer.begintag("td")
2253                writer.write(message)
2254                ## info
2255                if information:
2256                    writer.begintag("p", c_l_a_s_s="info")
2257                    writer.write(information)
2258                    writer.endtag("p")
2259                writer.endtag("td")
2260                # close row
2261                writer.endtag("tr")
2262            writer.endtag("table")
2263            # close container
2264            writer.endtag("div")
2265
2266
2267defaultCSS = """
2268body {
2269	background-color: #e5e5e5;
2270	padding: 15px 15px 0px 15px;
2271	margin: 0px;
2272	font-family: Helvetica, Verdana, Arial, sans-serif;
2273}
2274
2275h2.readError {
2276	background-color: red;
2277	color: white;
2278	margin: 20px 15px 20px 15px;
2279	padding: 10px;
2280	border-radius: 5px;
2281	font-size: 25px;
2282}
2283
2284/* info blocks */
2285
2286.infoBlock {
2287	background-color: white;
2288	margin: 0px 0px 15px 0px;
2289	padding: 15px;
2290	border-radius: 5px;
2291}
2292
2293h3.infoBlockTitle {
2294	font-size: 20px;
2295	margin: 0px 0px 15px 0px;
2296	padding: 0px 0px 10px 0px;
2297	border-bottom: 1px solid #e5e5e5;
2298}
2299
2300h4.infoBlockTitle {
2301	font-size: 17px;
2302	margin: 0px 0px 15px 0px;
2303	padding: 0px 0px 10px 0px;
2304	border-bottom: 1px solid #e5e5e5;
2305}
2306
2307table.report {
2308	border-collapse: collapse;
2309	width: 100%;
2310	font-size: 14px;
2311}
2312
2313table.report tr {
2314	border-top: 1px solid white;
2315}
2316
2317table.report tr.testPass, table.report tr.testReportPass {
2318	background-color: #c8ffaf;
2319}
2320
2321table.report tr.testError, table.report tr.testReportError {
2322	background-color: #ffc3af;
2323}
2324
2325table.report tr.testWarning, table.report tr.testReportWarning {
2326	background-color: #ffe1af;
2327}
2328
2329table.report tr.testNote, table.report tr.testReportNote {
2330	background-color: #96e1ff;
2331}
2332
2333table.report tr.testTraceback, table.report tr.testReportTraceback {
2334	background-color: red;
2335	color: white;
2336}
2337
2338table.report td {
2339	padding: 7px 5px 7px 5px;
2340	vertical-align: top;
2341}
2342
2343table.report td.title {
2344	width: 80px;
2345	text-align: right;
2346	font-weight: bold;
2347	text-transform: uppercase;
2348}
2349
2350table.report td.testReportResultCount {
2351	width: 100px;
2352}
2353
2354table.report td.toggleButton {
2355	text-align: center;
2356	width: 50px;
2357	border-left: 1px solid white;
2358	cursor: pointer;
2359}
2360
2361.infoBlock td p.info {
2362	font-size: 12px;
2363	font-style: italic;
2364	margin: 5px 0px 0px 0px;
2365}
2366
2367/* SFNT table */
2368
2369table.sfntTableData {
2370	font-size: 14px;
2371	width: 100%;
2372	border-collapse: collapse;
2373	padding: 0px;
2374}
2375
2376table.sfntTableData th {
2377	padding: 5px 0px 5px 0px;
2378	text-align: left
2379}
2380
2381table.sfntTableData tr.uncompressed {
2382	background-color: #ffc3af;
2383}
2384
2385table.sfntTableData td {
2386	width: 20%;
2387	padding: 5px 0px 5px 0px;
2388	border: 1px solid #e5e5e5;
2389	border-left: none;
2390	border-right: none;
2391	font-family: Consolas, Menlo, "Vera Mono", Monaco, monospace;
2392}
2393
2394pre {
2395	font-size: 12px;
2396	font-family: Consolas, Menlo, "Vera Mono", Monaco, monospace;
2397	margin: 0px;
2398	padding: 0px;
2399}
2400
2401/* Metadata */
2402
2403.metadataElement {
2404	background: rgba(0, 0, 0, 0.03);
2405	margin: 10px 0px 10px 0px;
2406	border: 2px solid #d8d8d8;
2407	padding: 10px;
2408}
2409
2410h5.metadata {
2411	font-size: 14px;
2412	margin: 5px 0px 10px 0px;
2413	padding: 0px 0px 5px 0px;
2414	border-bottom: 1px solid #d8d8d8;
2415}
2416
2417h6.metadata {
2418	font-size: 12px;
2419	font-weight: normal;
2420	margin: 10px 0px 10px 0px;
2421	padding: 0px 0px 5px 0px;
2422	border-bottom: 1px solid #d8d8d8;
2423}
2424
2425table.metadata {
2426	font-size: 12px;
2427	width: 100%;
2428	border-collapse: collapse;
2429	padding: 0px;
2430}
2431
2432table.metadata td.key {
2433	width: 5em;
2434	padding: 5px 5px 5px 0px;
2435	border-right: 1px solid #d8d8d8;
2436	text-align: right;
2437	vertical-align: top;
2438}
2439
2440table.metadata td.value {
2441	padding: 5px 0px 5px 5px;
2442	border-left: 1px solid #d8d8d8;
2443	text-align: left;
2444	vertical-align: top;
2445}
2446
2447p.metadata {
2448	font-size: 12px;
2449	font-style: italic;
2450}
2451}
2452"""
2453
2454defaultJavascript = """
2455
2456//<![CDATA[
2457	function testResultToggleButtonHit(buttonID, className) {
2458		// change the button title
2459		var element = document.getElementById(buttonID);
2460		if (element.innerHTML == "Show" ) {
2461			element.innerHTML = "Hide";
2462		}
2463		else {
2464			element.innerHTML = "Show";
2465		}
2466		// toggle the elements
2467		var elements = getTestResults(className);
2468		for (var e = 0; e < elements.length; ++e) {
2469			toggleElement(elements[e]);
2470		}
2471		// toggle the info blocks
2472		toggleInfoBlocks();
2473	}
2474
2475	function getTestResults(className) {
2476		var rows = document.getElementsByTagName("tr");
2477		var found = Array();
2478		for (var r = 0; r < rows.length; ++r) {
2479			var row = rows[r];
2480			if (row.className == className) {
2481				found[found.length] = row;
2482			}
2483		}
2484		return found;
2485	}
2486
2487	function toggleElement(element) {
2488		if (element.style.display != "none" ) {
2489			element.style.display = "none";
2490		}
2491		else {
2492			element.style.display = "";
2493		}
2494	}
2495
2496	function toggleInfoBlocks() {
2497		var tables = document.getElementsByTagName("table")
2498		for (var t = 0; t < tables.length; ++t) {
2499			var table = tables[t];
2500			if (table.className == "report") {
2501				var haveVisibleRow = false;
2502				var rows = table.rows;
2503				for (var r = 0; r < rows.length; ++r) {
2504					var row = rows[r];
2505					if (row.style.display == "none") {
2506						var i = 0;
2507					}
2508					else {
2509						haveVisibleRow = true;
2510					}
2511				}
2512				var div = table.parentNode;
2513				if (haveVisibleRow == true) {
2514					div.style.display = "";
2515				}
2516				else {
2517					div.style.display = "none";
2518				}
2519			}
2520		}
2521	}
2522//]]>
2523"""
2524
2525def startHTML(title=None, cssReplacements={}):
2526    writer = XMLWriter()
2527    # start the html
2528    writer.begintag("html", xmlns="http://www.w3.org/1999/xhtml", lang="en")
2529    # start the head
2530    writer.begintag("head")
2531    writer.simpletag("meta", http_equiv="Content-Type", content="text/html; charset=utf-8")
2532    # title
2533    if title is not None:
2534        writer.begintag("title")
2535        writer.write(title)
2536        writer.endtag("title")
2537    # write the css
2538    writer.begintag("style", type="text/css")
2539    css = defaultCSS
2540    for before, after in cssReplacements.items():
2541        css = css.replace(before, after)
2542    writer.write(css)
2543    writer.endtag("style")
2544    # write the javascript
2545    writer.begintag("script", type="text/javascript")
2546    javascript = defaultJavascript
2547    ## hack around some ElementTree escaping
2548    javascript = javascript.replace("<", "l_e_s_s")
2549    javascript = javascript.replace(">", "g_r_e_a_t_e_r")
2550    writer.write(javascript)
2551    writer.endtag("script")
2552    # close the head
2553    writer.endtag("head")
2554    # start the body
2555    writer.begintag("body")
2556    # return the writer
2557    return writer
2558
2559def finishHTML(writer):
2560    # close the body
2561    writer.endtag("body")
2562    # close the html
2563    writer.endtag("html")
2564    # get the text
2565    text = "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n"
2566    text += writer.compile()
2567    text = text.replace("c_l_a_s_s", "class")
2568    text = text.replace("a_p_o_s_t_r_o_p_h_e", "'")
2569    text = text.replace("l_e_s_s", "<")
2570    text = text.replace("g_r_e_a_t_e_r", ">")
2571    text = text.replace("http_equiv", "http-equiv")
2572    # return
2573    return text
2574
2575# ------------------
2576# Support: Unpackers
2577# ------------------
2578
2579def unpackHeader(data):
2580    return structUnpack(headerFormat, data)[0]
2581
2582def unpackDirectory(data):
2583    header = unpackHeader(data)
2584    numTables = header["numTables"]
2585    data = data[headerSize:]
2586    directory = []
2587    for index in range(numTables):
2588        table, data = structUnpack(directoryFormat, data)
2589        directory.append(table)
2590    return directory
2591
2592def unpackTableData(data):
2593    directory = unpackDirectory(data)
2594    tables = {}
2595    for entry in directory:
2596        tag = entry["tag"]
2597        offset = entry["offset"]
2598        origLength = entry["origLength"]
2599        compLength = entry["compLength"]
2600        if offset > len(data) or offset < 0 or (offset + compLength) < 0:
2601            tableData = ""
2602        elif offset + compLength > len(data):
2603            tableData = data[offset:]
2604        else:
2605            tableData = data[offset:offset+compLength]
2606        if compLength < origLength:
2607            try:
2608                td = zlib.decompress(tableData)
2609                tableData = td
2610            except zlib.error:
2611                tableData = None
2612        tables[tag] = tableData
2613    return tables
2614
2615def unpackMetadata(data, decompress=True, parse=True):
2616    header = unpackHeader(data)
2617    data = data[header["metaOffset"]:header["metaOffset"]+header["metaLength"]]
2618    if decompress and data:
2619        data = zlib.decompress(data)
2620    if parse and data:
2621        data = ElementTree.fromstring(data)
2622    return data
2623
2624def unpackPrivateData(data):
2625    header = unpackHeader(data)
2626    data = data[header["privOffset"]:header["privOffset"]+header["privLength"]]
2627    return data
2628
2629# -----------------------
2630# Support: Report Helpers
2631# -----------------------
2632
2633def findUniqueFileName(path):
2634    if not os.path.exists(path):
2635        return path
2636    folder = os.path.dirname(path)
2637    fileName = os.path.basename(path)
2638    fileName, extension = os.path.splitext(fileName)
2639    stamp = time.strftime("%Y-%m-%d %H-%M-%S %Z")
2640    newFileName = "%s (%s)%s" % (fileName, stamp, extension)
2641    newPath = os.path.join(folder, newFileName)
2642    # intentionally break to prevent a file overwrite.
2643    # this could happen if the user has a directory full
2644    # of files with future time stamped file names.
2645    # not likely, but avoid it all the same.
2646    assert not os.path.exists(newPath)
2647    return newPath
2648
2649
2650# ---------------
2651# Public Function
2652# ---------------
2653
2654def validateFont(path, options, writeFile=True):
2655    # start the reporter
2656    if options.outputFormat == "html":
2657        reporter = HTMLReporter()
2658    elif options.outputFormat == "text":
2659        reporter = TextReporter()
2660    else:
2661        raise NotImplementedError
2662    # log the title
2663    reporter.logTitle("Report: %s" % os.path.basename(path))
2664    # log fileinfo
2665    reporter.logFileInfo("FILE", os.path.basename(path))
2666    reporter.logFileInfo("DIRECTORY", os.path.dirname(path))
2667    # run tests and log results
2668    f = open(path, "rb")
2669    data = f.read()
2670    f.close()
2671    haveReadError = False
2672    canDisplayMetadata = True
2673    while 1:
2674        # the goal here is to locate as many errors as possible in
2675        # one session, rather than stopping validation at the first
2676        # instance of an error. to do this, each test function returns
2677        # two booleans indicating the following:
2678        #   1. errors were found that should cease all further tests.
2679        #   2. errors were found, but futurther tests can proceed.
2680        # this is important because displaying metadata for a file
2681        # with errors must not happen.
2682
2683        # header
2684        reporter.logTestTitle("Header")
2685        stoppingError, nonStoppingError = testHeader(data, reporter)
2686        if nonStoppingError:
2687            canDisplayMetadata = False
2688        if stoppingError:
2689            haveReadError = True
2690            break
2691        # data blocks
2692        reporter.logTestTitle("Data Blocks")
2693        stoppingError, nonStoppingError = testDataBlocks(data, reporter)
2694        if nonStoppingError:
2695            canDisplayMetadata = False
2696        if stoppingError:
2697            haveReadError = True
2698            break
2699        # table directory
2700        reporter.logTestTitle("Table Directory")
2701        stoppingError, nonStoppingError = testTableDirectory(data, reporter)
2702        if nonStoppingError:
2703            canDisplayMetadata = False
2704        if stoppingError:
2705            haveReadError = True
2706            break
2707        # table data
2708        reporter.logTestTitle("Table Data")
2709        stoppingError, nonStoppingError = testTableData(data, reporter)
2710        if nonStoppingError:
2711            canDisplayMetadata = False
2712        if stoppingError:
2713            haveReadError = True
2714            break
2715        # metadata
2716        reporter.logTestTitle("Metadata")
2717        stoppingError, nonStoppingError = testMetadata(data, reporter)
2718        if nonStoppingError:
2719            canDisplayMetadata = False
2720        if stoppingError:
2721            haveReadError = True
2722            break
2723        # done
2724        break
2725    reporter.haveReadError = haveReadError
2726    # report the metadata
2727    if not haveReadError and canDisplayMetadata:
2728        metadata = getMetadataForDisplay(data)
2729        reporter.logMetadata(metadata)
2730    # get the report
2731    report = reporter.getReport()
2732    # write
2733    reportPath = None
2734    if writeFile:
2735        # make the output file name
2736        if options.outputFileName is not None:
2737            fileName = options.outputFileName
2738        else:
2739            fileName = os.path.splitext(os.path.basename(path))[0]
2740            fileName += "_validate"
2741            if options.outputFormat == "html":
2742                fileName += ".html"
2743            else:
2744                fileName += ".txt"
2745        # make the output directory
2746        if options.outputDirectory is not None:
2747            directory = options.outputDirectory
2748        else:
2749            directory = os.path.dirname(path)
2750        # write the file
2751        reportPath = os.path.join(directory, fileName)
2752        reportPath = findUniqueFileName(reportPath)
2753        f = open(reportPath, "wb")
2754        f.write(report)
2755        f.close()
2756    return reportPath, report
2757
2758################################################################################
2759############################### Fontforge test #################################
2760################################################################################
2761
2762# Generate a WOFF file with fontforge.
2763fontname=sys.argv[1]
2764woffname = "%s.woff" % os.path.splitext(os.path.basename(fontname))[0]
2765font=fontforge.open(fontname)
2766font.generate(woffname)
2767font.close()
2768
2769# Use the W3C's validator (code above) on the output WOFF file.
2770class defaultOptions(object):
2771   def __init__( self ):
2772       self.outputFormat = "text"
2773reportPath, report = validateFont(woffname, defaultOptions(), False)
2774os.remove(woffname)
2775
2776# Check the validation report and raise assertion if an ERROR is found.
2777for line in report.split(os.linesep):
2778    if line.startswith("ERROR"):
2779        raise Exception(line)
2780    print(line)
2781