aboutsummaryrefslogtreecommitdiff
path: root/Lib/fontTools
diff options
context:
space:
mode:
Diffstat (limited to 'Lib/fontTools')
-rw-r--r--Lib/fontTools/__init__.py2
-rw-r--r--Lib/fontTools/cffLib/__init__.py2
-rw-r--r--Lib/fontTools/cffLib/specializer.py37
-rw-r--r--Lib/fontTools/cffLib/width.py4
-rw-r--r--Lib/fontTools/colorLib/builder.py155
-rw-r--r--Lib/fontTools/colorLib/unbuilder.py12
-rw-r--r--Lib/fontTools/designspaceLib/__init__.py43
-rw-r--r--Lib/fontTools/designspaceLib/split.py21
-rw-r--r--Lib/fontTools/designspaceLib/statNames.py37
-rw-r--r--Lib/fontTools/designspaceLib/types.py29
-rw-r--r--Lib/fontTools/feaLib/builder.py4
-rw-r--r--Lib/fontTools/feaLib/parser.py36
-rw-r--r--Lib/fontTools/fontBuilder.py2
-rw-r--r--Lib/fontTools/merge/cmap.py4
-rw-r--r--Lib/fontTools/misc/cliTools.py9
-rw-r--r--Lib/fontTools/misc/symfont.py63
-rw-r--r--Lib/fontTools/misc/treeTools.py45
-rw-r--r--Lib/fontTools/misc/visitor.py143
-rw-r--r--Lib/fontTools/mtiLib/__init__.py4
-rw-r--r--Lib/fontTools/otlLib/optimize/__init__.py2
-rw-r--r--Lib/fontTools/pens/basePen.py3
-rw-r--r--Lib/fontTools/pens/cairoPen.py26
-rw-r--r--Lib/fontTools/pens/momentsPen.py662
-rw-r--r--Lib/fontTools/pens/qtPen.py4
-rw-r--r--Lib/fontTools/pens/statisticsPen.py12
-rw-r--r--Lib/fontTools/pens/svgPathPen.py79
-rw-r--r--Lib/fontTools/subset/__init__.py26
-rw-r--r--Lib/fontTools/subset/cff.py11
-rw-r--r--Lib/fontTools/subset/svg.py17
-rw-r--r--Lib/fontTools/svgLib/path/parser.py9
-rw-r--r--Lib/fontTools/ttLib/scaleUpem.py336
-rw-r--r--Lib/fontTools/ttLib/tables/E_B_D_T_.py7
-rw-r--r--Lib/fontTools/ttLib/tables/E_B_L_C_.py6
-rw-r--r--Lib/fontTools/ttLib/tables/S_V_G_.py62
-rw-r--r--Lib/fontTools/ttLib/tables/_c_m_a_p.py8
-rw-r--r--Lib/fontTools/ttLib/tables/_g_l_y_f.py4
-rw-r--r--Lib/fontTools/ttLib/tables/_g_v_a_r.py63
-rw-r--r--Lib/fontTools/ttLib/tables/_k_e_r_n.py4
-rw-r--r--Lib/fontTools/ttLib/tables/otBase.py214
-rw-r--r--Lib/fontTools/ttLib/tables/otConverters.py112
-rwxr-xr-xLib/fontTools/ttLib/tables/otData.py106
-rw-r--r--Lib/fontTools/ttLib/tables/otTables.py60
-rw-r--r--Lib/fontTools/ttLib/tables/otTraverse.py137
-rw-r--r--Lib/fontTools/ttLib/ttFont.py132
-rw-r--r--Lib/fontTools/ttLib/ttGlyphSet.py221
-rw-r--r--Lib/fontTools/ttLib/ttVisitor.py32
-rwxr-xr-xLib/fontTools/ufoLib/__init__.py5
-rwxr-xr-xLib/fontTools/ufoLib/glifLib.py3
-rw-r--r--Lib/fontTools/varLib/__init__.py54
-rw-r--r--Lib/fontTools/varLib/cff.py1
-rw-r--r--Lib/fontTools/varLib/errors.py59
-rw-r--r--Lib/fontTools/varLib/featureVars.py16
-rw-r--r--Lib/fontTools/varLib/instancer/__init__.py154
-rw-r--r--Lib/fontTools/varLib/interpolatable.py71
-rw-r--r--Lib/fontTools/varLib/iup.py240
-rw-r--r--Lib/fontTools/varLib/merger.py457
-rw-r--r--Lib/fontTools/varLib/models.py162
-rw-r--r--Lib/fontTools/varLib/mutator.py5
-rw-r--r--Lib/fontTools/varLib/varStore.py24
59 files changed, 3282 insertions, 976 deletions
diff --git a/Lib/fontTools/__init__.py b/Lib/fontTools/__init__.py
index 9a39ea0f..5b2cca1f 100644
--- a/Lib/fontTools/__init__.py
+++ b/Lib/fontTools/__init__.py
@@ -3,6 +3,6 @@ from fontTools.misc.loggingTools import configLogger
log = logging.getLogger(__name__)
-version = __version__ = "4.33.3"
+version = __version__ = "4.37.1"
__all__ = ["version", "log", "configLogger"]
diff --git a/Lib/fontTools/cffLib/__init__.py b/Lib/fontTools/cffLib/__init__.py
index fc82bb27..3eda9ba4 100644
--- a/Lib/fontTools/cffLib/__init__.py
+++ b/Lib/fontTools/cffLib/__init__.py
@@ -1037,6 +1037,8 @@ class VarStoreData(object):
return len(self.data)
def getNumRegions(self, vsIndex):
+ if vsIndex is None:
+ vsIndex = 0
varData = self.otVarStore.VarData[vsIndex]
numRegions = varData.VarRegionCount
return numRegions
diff --git a/Lib/fontTools/cffLib/specializer.py b/Lib/fontTools/cffLib/specializer.py
index fbfefa92..677f03b7 100644
--- a/Lib/fontTools/cffLib/specializer.py
+++ b/Lib/fontTools/cffLib/specializer.py
@@ -304,7 +304,7 @@ def _convertBlendOpToArgs(blendList):
deltaArgs = args[numBlends:]
numDeltaValues = len(deltaArgs)
deltaList = [ deltaArgs[i:i + numRegions] for i in range(0, numDeltaValues, numRegions) ]
- blend_args = [ a + b for a, b in zip(defaultArgs,deltaList)]
+ blend_args = [ a + b + [1] for a, b in zip(defaultArgs,deltaList)]
return blend_args
def generalizeCommands(commands, ignoreErrors=False):
@@ -399,10 +399,10 @@ def _convertToBlendCmds(args):
else:
prev_stack_use = stack_use
# The arg is a tuple of blend values.
- # These are each (master 0,delta 1..delta n)
+ # These are each (master 0,delta 1..delta n, 1)
# Combine as many successive tuples as we can,
# up to the max stack limit.
- num_sources = len(arg)
+ num_sources = len(arg) - 1
blendlist = [arg]
i += 1
stack_use += 1 + num_sources # 1 for the num_blends arg
@@ -427,7 +427,8 @@ def _convertToBlendCmds(args):
for arg in blendlist:
blend_args.append(arg[0])
for arg in blendlist:
- blend_args.extend(arg[1:])
+ assert arg[-1] == 1
+ blend_args.extend(arg[1:-1])
blend_args.append(num_blends)
new_args.append(blend_args)
stack_use = prev_stack_use + num_blends
@@ -437,12 +438,13 @@ def _convertToBlendCmds(args):
def _addArgs(a, b):
if isinstance(b, list):
if isinstance(a, list):
- if len(a) != len(b):
+ if len(a) != len(b) or a[-1] != b[-1]:
raise ValueError()
- return [_addArgs(va, vb) for va,vb in zip(a, b)]
+ return [_addArgs(va, vb) for va,vb in zip(a[:-1], b[:-1])] + [a[-1]]
else:
a, b = b, a
if isinstance(a, list):
+ assert a[-1] == 1
return [_addArgs(a[0], b)] + a[1:]
return a + b
@@ -739,12 +741,27 @@ if __name__ == '__main__':
if len(sys.argv) == 1:
import doctest
sys.exit(doctest.testmod().failed)
- program = stringToProgram(sys.argv[1:])
+
+ import argparse
+
+ parser = argparse.ArgumentParser(
+ "fonttools cffLib.specialer", description="CFF CharString generalizer/specializer")
+ parser.add_argument(
+ "program", metavar="command", nargs="*", help="Commands.")
+ parser.add_argument(
+ "--num-regions", metavar="NumRegions", nargs="*", default=None,
+ help="Number of variable-font regions for blend opertaions.")
+
+ options = parser.parse_args(sys.argv[1:])
+
+ getNumRegions = None if options.num_regions is None else lambda vsIndex: int(options.num_regions[0 if vsIndex is None else vsIndex])
+
+ program = stringToProgram(options.program)
print("Program:"); print(programToString(program))
- commands = programToCommands(program)
+ commands = programToCommands(program, getNumRegions)
print("Commands:"); print(commands)
program2 = commandsToProgram(commands)
print("Program from commands:"); print(programToString(program2))
assert program == program2
- print("Generalized program:"); print(programToString(generalizeProgram(program)))
- print("Specialized program:"); print(programToString(specializeProgram(program)))
+ print("Generalized program:"); print(programToString(generalizeProgram(program, getNumRegions)))
+ print("Specialized program:"); print(programToString(specializeProgram(program, getNumRegions)))
diff --git a/Lib/fontTools/cffLib/width.py b/Lib/fontTools/cffLib/width.py
index 00b859bb..303c9462 100644
--- a/Lib/fontTools/cffLib/width.py
+++ b/Lib/fontTools/cffLib/width.py
@@ -135,13 +135,13 @@ def optimizeWidths(widths):
dfltC = nomnCost[nominal] - bestCost[nominal]
ends = []
if dfltC == dfltCostU[nominal]:
- starts = [nominal, nominal-108, nominal-1131]
+ starts = [nominal, nominal-108, nominal-1132]
for start in starts:
while cumMaxU[start] and cumMaxU[start] == cumMaxU[start-1]:
start -= 1
ends.append(start)
else:
- starts = [nominal, nominal+108, nominal+1131]
+ starts = [nominal, nominal+108, nominal+1132]
for start in starts:
while cumMaxD[start] and cumMaxD[start] == cumMaxD[start+1]:
start += 1
diff --git a/Lib/fontTools/colorLib/builder.py b/Lib/fontTools/colorLib/builder.py
index 2577fa76..442bc20e 100644
--- a/Lib/fontTools/colorLib/builder.py
+++ b/Lib/fontTools/colorLib/builder.py
@@ -23,6 +23,7 @@ from typing import (
)
from fontTools.misc.arrayTools import intRect
from fontTools.misc.fixedTools import fixedToFloat
+from fontTools.misc.treeTools import build_n_ary_tree
from fontTools.ttLib.tables import C_O_L_R_
from fontTools.ttLib.tables import C_P_A_L_
from fontTools.ttLib.tables import _n_a_m_e
@@ -186,10 +187,12 @@ def populateCOLRv0(
def buildCOLR(
colorGlyphs: _ColorGlyphsDict,
version: Optional[int] = None,
+ *,
glyphMap: Optional[Mapping[str, int]] = None,
varStore: Optional[ot.VarStore] = None,
varIndexMap: Optional[ot.DeltaSetIndexMap] = None,
clipBoxes: Optional[Dict[str, _ClipBoxInput]] = None,
+ allowLayerReuse: bool = True,
) -> C_O_L_R_.table_C_O_L_R_:
"""Build COLR table from color layers mapping.
@@ -231,7 +234,11 @@ def buildCOLR(
populateCOLRv0(colr, colorGlyphsV0, glyphMap)
- colr.LayerList, colr.BaseGlyphList = buildColrV1(colorGlyphsV1, glyphMap)
+ colr.LayerList, colr.BaseGlyphList = buildColrV1(
+ colorGlyphsV1,
+ glyphMap,
+ allowLayerReuse=allowLayerReuse,
+ )
if version is None:
version = 1 if (varStore or colorGlyphsV1) else 0
@@ -242,9 +249,6 @@ def buildCOLR(
if version == 0:
self.ColorLayers = self._decompileColorLayersV0(colr)
else:
- clipBoxes = {
- name: clipBoxes[name] for name in clipBoxes or {} if name in colorGlyphsV1
- }
colr.ClipList = buildClipList(clipBoxes) if clipBoxes else None
colr.VarIndexMap = varIndexMap
colr.VarStore = varStore
@@ -443,29 +447,16 @@ def _reuse_ranges(num_layers: int) -> Generator[Tuple[int, int], None, None]:
yield (lbound, ubound)
-class LayerListBuilder:
- layers: List[ot.Paint]
+class LayerReuseCache:
reusePool: Mapping[Tuple[Any, ...], int]
tuples: Mapping[int, Tuple[Any, ...]]
keepAlive: List[ot.Paint] # we need id to remain valid
def __init__(self):
- self.layers = []
self.reusePool = {}
self.tuples = {}
self.keepAlive = []
- # We need to intercept construction of PaintColrLayers
- callbacks = _buildPaintCallbacks()
- callbacks[
- (
- BuildCallback.BEFORE_BUILD,
- ot.Paint,
- ot.PaintFormat.PaintColrLayers,
- )
- ] = self._beforeBuildPaintColrLayers
- self.tableBuilder = TableBuilder(callbacks)
-
def _paint_tuple(self, paint: ot.Paint):
# start simple, who even cares about cyclic graphs or interesting field types
def _tuple_safe(value):
@@ -491,25 +482,7 @@ class LayerListBuilder:
def _as_tuple(self, paints: Sequence[ot.Paint]) -> Tuple[Any, ...]:
return tuple(self._paint_tuple(p) for p in paints)
- # COLR layers is unusual in that it modifies shared state
- # so we need a callback into an object
- def _beforeBuildPaintColrLayers(self, dest, source):
- # Sketchy gymnastics: a sequence input will have dropped it's layers
- # into NumLayers; get it back
- if isinstance(source.get("NumLayers", None), collections.abc.Sequence):
- layers = source["NumLayers"]
- else:
- layers = source["Layers"]
-
- # Convert maps seqs or whatever into typed objects
- layers = [self.buildPaint(l) for l in layers]
-
- # No reason to have a colr layers with just one entry
- if len(layers) == 1:
- return layers[0], {}
-
- # Look for reuse, with preference to longer sequences
- # This may make the layer list smaller
+ def try_reuse(self, layers: List[ot.Paint]) -> List[ot.Paint]:
found_reuse = True
while found_reuse:
found_reuse = False
@@ -532,10 +505,63 @@ class LayerListBuilder:
layers = layers[:lbound] + [new_slice] + layers[ubound:]
found_reuse = True
break
+ return layers
+
+ def add(self, layers: List[ot.Paint], first_layer_index: int):
+ for lbound, ubound in _reuse_ranges(len(layers)):
+ self.reusePool[self._as_tuple(layers[lbound:ubound])] = (
+ lbound + first_layer_index
+ )
+
+
+class LayerListBuilder:
+ layers: List[ot.Paint]
+ cache: LayerReuseCache
+ allowLayerReuse: bool
+
+ def __init__(self, *, allowLayerReuse=True):
+ self.layers = []
+ if allowLayerReuse:
+ self.cache = LayerReuseCache()
+ else:
+ self.cache = None
+
+ # We need to intercept construction of PaintColrLayers
+ callbacks = _buildPaintCallbacks()
+ callbacks[
+ (
+ BuildCallback.BEFORE_BUILD,
+ ot.Paint,
+ ot.PaintFormat.PaintColrLayers,
+ )
+ ] = self._beforeBuildPaintColrLayers
+ self.tableBuilder = TableBuilder(callbacks)
+
+ # COLR layers is unusual in that it modifies shared state
+ # so we need a callback into an object
+ def _beforeBuildPaintColrLayers(self, dest, source):
+ # Sketchy gymnastics: a sequence input will have dropped it's layers
+ # into NumLayers; get it back
+ if isinstance(source.get("NumLayers", None), collections.abc.Sequence):
+ layers = source["NumLayers"]
+ else:
+ layers = source["Layers"]
+
+ # Convert maps seqs or whatever into typed objects
+ layers = [self.buildPaint(l) for l in layers]
+
+ # No reason to have a colr layers with just one entry
+ if len(layers) == 1:
+ return layers[0], {}
+
+ if self.cache is not None:
+ # Look for reuse, with preference to longer sequences
+ # This may make the layer list smaller
+ layers = self.cache.try_reuse(layers)
# The layer list is now final; if it's too big we need to tree it
is_tree = len(layers) > MAX_PAINT_COLR_LAYER_COUNT
- layers = _build_n_ary_tree(layers, n=MAX_PAINT_COLR_LAYER_COUNT)
+ layers = build_n_ary_tree(layers, n=MAX_PAINT_COLR_LAYER_COUNT)
# We now have a tree of sequences with Paint leaves.
# Convert the sequences into PaintColrLayers.
@@ -563,11 +589,8 @@ class LayerListBuilder:
# Register our parts for reuse provided we aren't a tree
# If we are a tree the leaves registered for reuse and that will suffice
- if not is_tree:
- for lbound, ubound in _reuse_ranges(len(layers)):
- self.reusePool[self._as_tuple(layers[lbound:ubound])] = (
- lbound + paint.FirstLayerIndex
- )
+ if self.cache is not None and not is_tree:
+ self.cache.add(layers, paint.FirstLayerIndex)
# we've fully built dest; empty source prevents generalized build from kicking in
return paint, {}
@@ -603,6 +626,8 @@ def _format_glyph_errors(errors: Mapping[str, Exception]) -> str:
def buildColrV1(
colorGlyphs: _ColorGlyphsDict,
glyphMap: Optional[Mapping[str, int]] = None,
+ *,
+ allowLayerReuse: bool = True,
) -> Tuple[Optional[ot.LayerList], ot.BaseGlyphList]:
if glyphMap is not None:
colorGlyphItems = sorted(
@@ -613,7 +638,7 @@ def buildColrV1(
errors = {}
baseGlyphs = []
- layerBuilder = LayerListBuilder()
+ layerBuilder = LayerListBuilder(allowLayerReuse=allowLayerReuse)
for baseGlyph, paint in colorGlyphItems:
try:
baseGlyphs.append(buildBaseGlyphPaintRecord(baseGlyph, layerBuilder, paint))
@@ -632,45 +657,3 @@ def buildColrV1(
glyphs.BaseGlyphCount = len(baseGlyphs)
glyphs.BaseGlyphPaintRecord = baseGlyphs
return (layers, glyphs)
-
-
-def _build_n_ary_tree(leaves, n):
- """Build N-ary tree from sequence of leaf nodes.
-
- Return a list of lists where each non-leaf node is a list containing
- max n nodes.
- """
- if not leaves:
- return []
-
- assert n > 1
-
- depth = ceil(log(len(leaves), n))
-
- if depth <= 1:
- return list(leaves)
-
- # Fully populate complete subtrees of root until we have enough leaves left
- root = []
- unassigned = None
- full_step = n ** (depth - 1)
- for i in range(0, len(leaves), full_step):
- subtree = leaves[i : i + full_step]
- if len(subtree) < full_step:
- unassigned = subtree
- break
- while len(subtree) > n:
- subtree = [subtree[k : k + n] for k in range(0, len(subtree), n)]
- root.append(subtree)
-
- if unassigned:
- # Recurse to fill the last subtree, which is the only partially populated one
- subtree = _build_n_ary_tree(unassigned, n)
- if len(subtree) <= n - len(root):
- # replace last subtree with its children if they can still fit
- root.extend(subtree)
- else:
- root.append(subtree)
- assert len(root) <= n
-
- return root
diff --git a/Lib/fontTools/colorLib/unbuilder.py b/Lib/fontTools/colorLib/unbuilder.py
index 03458907..ac243550 100644
--- a/Lib/fontTools/colorLib/unbuilder.py
+++ b/Lib/fontTools/colorLib/unbuilder.py
@@ -13,12 +13,12 @@ def unbuildColrV1(layerList, baseGlyphList):
}
-def _flatten(lst):
- for el in lst:
- if isinstance(el, list):
- yield from _flatten(el)
+def _flatten_layers(lst):
+ for paint in lst:
+ if paint["Format"] == ot.PaintFormat.PaintColrLayers:
+ yield from _flatten_layers(paint["Layers"])
else:
- yield el
+ yield paint
class LayerListUnbuilder:
@@ -41,7 +41,7 @@ class LayerListUnbuilder:
assert source["Format"] == ot.PaintFormat.PaintColrLayers
layers = list(
- _flatten(
+ _flatten_layers(
[
self.unbuildPaint(childPaint)
for childPaint in self.layers[
diff --git a/Lib/fontTools/designspaceLib/__init__.py b/Lib/fontTools/designspaceLib/__init__.py
index 400e960e..c74b5509 100644
--- a/Lib/fontTools/designspaceLib/__init__.py
+++ b/Lib/fontTools/designspaceLib/__init__.py
@@ -8,7 +8,7 @@ import os
import posixpath
from io import BytesIO, StringIO
from textwrap import indent
-from typing import Any, Dict, List, MutableMapping, Optional, Tuple, Union
+from typing import Any, Dict, List, MutableMapping, Optional, Tuple, Union, cast
from fontTools.misc import etree as ET
from fontTools.misc import plistlib
@@ -22,9 +22,20 @@ from fontTools.misc.textTools import tobytes, tostr
"""
__all__ = [
- 'DesignSpaceDocumentError', 'DesignSpaceDocument', 'SourceDescriptor',
- 'InstanceDescriptor', 'AxisDescriptor', 'RuleDescriptor', 'BaseDocReader',
- 'BaseDocWriter'
+ 'AxisDescriptor',
+ 'AxisLabelDescriptor',
+ 'BaseDocReader',
+ 'BaseDocWriter',
+ 'DesignSpaceDocument',
+ 'DesignSpaceDocumentError',
+ 'DiscreteAxisDescriptor',
+ 'InstanceDescriptor',
+ 'LocationLabelDescriptor',
+ 'RangeAxisSubsetDescriptor',
+ 'RuleDescriptor',
+ 'SourceDescriptor',
+ 'ValueAxisSubsetDescriptor',
+ 'VariableFontDescriptor',
]
# ElementTree allows to find namespace-prefixed elements, but not attributes
@@ -950,6 +961,7 @@ class DiscreteAxisDescriptor(AbstractAxisDescriptor):
a2 = DiscreteAxisDescriptor()
a2.values = [0, 1]
+ a2.default = 0
a2.name = "Italic"
a2.tag = "ITAL"
a2.labelNames['fr'] = "Italique"
@@ -1352,7 +1364,7 @@ class BaseDocWriter(object):
minVersion = self.documentObject.formatTuple
if (
any(
- isinstance(axis, DiscreteAxisDescriptor) or
+ hasattr(axis, 'values') or
axis.axisOrdering is not None or
axis.axisLabels
for axis in self.documentObject.axes
@@ -1445,10 +1457,10 @@ class BaseDocWriter(object):
for label in axisObject.axisLabels:
self._addAxisLabel(labelsElement, label)
axisElement.append(labelsElement)
- if isinstance(axisObject, AxisDescriptor):
+ if hasattr(axisObject, "minimum"):
axisElement.attrib['minimum'] = self.intOrFloat(axisObject.minimum)
axisElement.attrib['maximum'] = self.intOrFloat(axisObject.maximum)
- elif isinstance(axisObject, DiscreteAxisDescriptor):
+ elif hasattr(axisObject, "values"):
axisElement.attrib['values'] = " ".join(self.intOrFloat(v) for v in axisObject.values)
axisElement.attrib['default'] = self.intOrFloat(axisObject.default)
if axisObject.hidden:
@@ -1682,14 +1694,19 @@ class BaseDocWriter(object):
for subset in vf.axisSubsets:
subsetElement = ET.Element('axis-subset')
subsetElement.attrib['name'] = subset.name
- if isinstance(subset, RangeAxisSubsetDescriptor):
+ # Mypy doesn't support narrowing union types via hasattr()
+ # https://mypy.readthedocs.io/en/stable/type_narrowing.html
+ # TODO(Python 3.10): use TypeGuard
+ if hasattr(subset, "userMinimum"):
+ subset = cast(RangeAxisSubsetDescriptor, subset)
if subset.userMinimum != -math.inf:
subsetElement.attrib['userminimum'] = self.intOrFloat(subset.userMinimum)
if subset.userMaximum != math.inf:
subsetElement.attrib['usermaximum'] = self.intOrFloat(subset.userMaximum)
if subset.userDefault is not None:
subsetElement.attrib['userdefault'] = self.intOrFloat(subset.userDefault)
- elif isinstance(subset, ValueAxisSubsetDescriptor):
+ elif hasattr(subset, "userValue"):
+ subset = cast(ValueAxisSubsetDescriptor, subset)
subsetElement.attrib['uservalue'] = self.intOrFloat(subset.userValue)
subsetsElement.append(subsetElement)
vfElement.append(subsetsElement)
@@ -2904,8 +2921,12 @@ class DesignSpaceDocument(LogMixin, AsDictMixin):
discreteAxes = []
rangeAxisSubsets: List[Union[RangeAxisSubsetDescriptor, ValueAxisSubsetDescriptor]] = []
for axis in self.axes:
- if isinstance(axis, DiscreteAxisDescriptor):
- discreteAxes.append(axis)
+ if hasattr(axis, "values"):
+ # Mypy doesn't support narrowing union types via hasattr()
+ # TODO(Python 3.10): use TypeGuard
+ # https://mypy.readthedocs.io/en/stable/type_narrowing.html
+ axis = cast(DiscreteAxisDescriptor, axis)
+ discreteAxes.append(axis) # type: ignore
else:
rangeAxisSubsets.append(RangeAxisSubsetDescriptor(name=axis.name))
valueCombinations = itertools.product(*[axis.values for axis in discreteAxes])
diff --git a/Lib/fontTools/designspaceLib/split.py b/Lib/fontTools/designspaceLib/split.py
index 2a09418c..408de70a 100644
--- a/Lib/fontTools/designspaceLib/split.py
+++ b/Lib/fontTools/designspaceLib/split.py
@@ -7,7 +7,7 @@ from __future__ import annotations
import itertools
import logging
import math
-from typing import Any, Callable, Dict, Iterator, List, Tuple
+from typing import Any, Callable, Dict, Iterator, List, Tuple, cast
from fontTools.designspaceLib import (
AxisDescriptor,
@@ -21,9 +21,9 @@ from fontTools.designspaceLib import (
)
from fontTools.designspaceLib.statNames import StatNames, getStatNames
from fontTools.designspaceLib.types import (
+ ConditionSet,
Range,
Region,
- ConditionSet,
getVFUserRegion,
locationInRegion,
regionInRegion,
@@ -87,11 +87,18 @@ def splitInterpolable(
discreteAxes = []
interpolableUserRegion: Region = {}
for axis in doc.axes:
- if isinstance(axis, DiscreteAxisDescriptor):
+ if hasattr(axis, "values"):
+ # Mypy doesn't support narrowing union types via hasattr()
+ # TODO(Python 3.10): use TypeGuard
+ # https://mypy.readthedocs.io/en/stable/type_narrowing.html
+ axis = cast(DiscreteAxisDescriptor, axis)
discreteAxes.append(axis)
else:
+ axis = cast(AxisDescriptor, axis)
interpolableUserRegion[axis.name] = Range(
- axis.minimum, axis.maximum, axis.default
+ axis.minimum,
+ axis.maximum,
+ axis.default,
)
valueCombinations = itertools.product(*[axis.values for axis in discreteAxes])
for values in valueCombinations:
@@ -191,7 +198,11 @@ def _extractSubSpace(
for axis in doc.axes:
range = userRegion[axis.name]
- if isinstance(range, Range) and isinstance(axis, AxisDescriptor):
+ if isinstance(range, Range) and hasattr(axis, "minimum"):
+ # Mypy doesn't support narrowing union types via hasattr()
+ # TODO(Python 3.10): use TypeGuard
+ # https://mypy.readthedocs.io/en/stable/type_narrowing.html
+ axis = cast(AxisDescriptor, axis)
subDoc.addAxis(
AxisDescriptor(
# Same info
diff --git a/Lib/fontTools/designspaceLib/statNames.py b/Lib/fontTools/designspaceLib/statNames.py
index 0a475c89..1b672703 100644
--- a/Lib/fontTools/designspaceLib/statNames.py
+++ b/Lib/fontTools/designspaceLib/statNames.py
@@ -88,21 +88,30 @@ def getStatNames(
# Then build names for all these languages, but fallback to English
# whenever a translation is missing.
labels = _getAxisLabelsForUserLocation(doc.axes, userLocation)
- languages = set(language for label in labels for language in label.labelNames)
- languages.add("en")
- for language in languages:
- styleName = " ".join(
- label.labelNames.get(language, label.defaultName)
- for label in labels
- if not label.elidable
- )
- if not styleName and doc.elidedFallbackName is not None:
- styleName = doc.elidedFallbackName
- styleNames[language] = styleName
+ if labels:
+ languages = set(language for label in labels for language in label.labelNames)
+ languages.add("en")
+ for language in languages:
+ styleName = " ".join(
+ label.labelNames.get(language, label.defaultName)
+ for label in labels
+ if not label.elidable
+ )
+ if not styleName and doc.elidedFallbackName is not None:
+ styleName = doc.elidedFallbackName
+ styleNames[language] = styleName
+
+ if "en" not in familyNames or "en" not in styleNames:
+ # Not enough information to compute PS names of styleMap names
+ return StatNames(
+ familyNames=familyNames,
+ styleNames=styleNames,
+ postScriptFontName=None,
+ styleMapFamilyNames={},
+ styleMapStyleName=None,
+ )
- postScriptFontName = None
- if "en" in familyNames and "en" in styleNames:
- postScriptFontName = f"{familyNames['en']}-{styleNames['en']}".replace(" ", "")
+ postScriptFontName = f"{familyNames['en']}-{styleNames['en']}".replace(" ", "")
styleMapStyleName, regularUserLocation = _getRibbiStyle(doc, userLocation)
diff --git a/Lib/fontTools/designspaceLib/types.py b/Lib/fontTools/designspaceLib/types.py
index 8afea96c..80ba9d6d 100644
--- a/Lib/fontTools/designspaceLib/types.py
+++ b/Lib/fontTools/designspaceLib/types.py
@@ -1,12 +1,15 @@
from __future__ import annotations
from dataclasses import dataclass
-from typing import Dict, List, Optional, Union
+from typing import Dict, List, Optional, Union, cast
from fontTools.designspaceLib import (
+ AxisDescriptor,
DesignSpaceDocument,
+ DesignSpaceDocumentError,
RangeAxisSubsetDescriptor,
SimpleLocationDict,
+ ValueAxisSubsetDescriptor,
VariableFontDescriptor,
)
@@ -89,6 +92,10 @@ def userRegionToDesignRegion(doc: DesignSpaceDocument, userRegion: Region) -> Re
designRegion = {}
for name, value in userRegion.items():
axis = doc.getAxis(name)
+ if axis is None:
+ raise DesignSpaceDocumentError(
+ f"Cannot find axis named '{name}' for region."
+ )
if isinstance(value, (float, int)):
designRegion[name] = axis.map_forward(value)
else:
@@ -107,16 +114,34 @@ def getVFUserRegion(doc: DesignSpaceDocument, vf: VariableFontDescriptor) -> Reg
# - it's a single location = use it to know which rules should apply in the VF
for axisSubset in vf.axisSubsets:
axis = doc.getAxis(axisSubset.name)
- if isinstance(axisSubset, RangeAxisSubsetDescriptor):
+ if axis is None:
+ raise DesignSpaceDocumentError(
+ f"Cannot find axis named '{axisSubset.name}' for variable font '{vf.name}'."
+ )
+ if hasattr(axisSubset, "userMinimum"):
+ # Mypy doesn't support narrowing union types via hasattr()
+ # TODO(Python 3.10): use TypeGuard
+ # https://mypy.readthedocs.io/en/stable/type_narrowing.html
+ axisSubset = cast(RangeAxisSubsetDescriptor, axisSubset)
+ if not hasattr(axis, "minimum"):
+ raise DesignSpaceDocumentError(
+ f"Cannot select a range over '{axis.name}' for variable font '{vf.name}' "
+ "because it's a discrete axis, use only 'userValue' instead."
+ )
+ axis = cast(AxisDescriptor, axis)
vfUserRegion[axis.name] = Range(
max(axisSubset.userMinimum, axis.minimum),
min(axisSubset.userMaximum, axis.maximum),
axisSubset.userDefault or axis.default,
)
else:
+ axisSubset = cast(ValueAxisSubsetDescriptor, axisSubset)
vfUserRegion[axis.name] = axisSubset.userValue
# Any axis not mentioned explicitly has a single location = default value
for axis in doc.axes:
if axis.name not in vfUserRegion:
+ assert isinstance(
+ axis.default, (int, float)
+ ), f"Axis '{axis.name}' has no valid default value."
vfUserRegion[axis.name] = axis.default
return vfUserRegion
diff --git a/Lib/fontTools/feaLib/builder.py b/Lib/fontTools/feaLib/builder.py
index a1644875..0a991761 100644
--- a/Lib/fontTools/feaLib/builder.py
+++ b/Lib/fontTools/feaLib/builder.py
@@ -230,8 +230,6 @@ class Builder(object):
self.font["GDEF"] = gdef
elif "GDEF" in self.font:
del self.font["GDEF"]
- elif self.varstorebuilder:
- raise FeatureLibError("Must save GDEF when compiling a variable font")
if "BASE" in tables:
base = self.buildBASE()
if base:
@@ -764,7 +762,7 @@ class Builder(object):
gdef.Version = 0x00010002 if gdef.MarkGlyphSetsDef else 0x00010000
if self.varstorebuilder:
store = self.varstorebuilder.finish()
- if store.VarData:
+ if store:
gdef.Version = 0x00010003
gdef.VarStore = store
varidx_map = store.optimize()
diff --git a/Lib/fontTools/feaLib/parser.py b/Lib/fontTools/feaLib/parser.py
index fd53573d..04ff6030 100644
--- a/Lib/fontTools/feaLib/parser.py
+++ b/Lib/fontTools/feaLib/parser.py
@@ -73,6 +73,7 @@ class Parser(object):
self.next_token_location_ = None
lexerClass = IncludingLexer if followIncludes else NonIncludingLexer
self.lexer_ = lexerClass(featurefile, includeDir=includeDir)
+ self.missing = {}
self.advance_lexer_(comments=True)
def parse(self):
@@ -125,6 +126,16 @@ class Parser(object):
),
self.cur_token_location_,
)
+ # Report any missing glyphs at the end of parsing
+ if self.missing:
+ error = [
+ " %s (first found at %s)" % (name, loc)
+ for name, loc in self.missing.items()
+ ]
+ raise FeatureLibError(
+ "The following glyph names are referenced but are missing from the "
+ "glyph set:\n" + ("\n".join(error)), None
+ )
return self.doc_
def parse_anchor_(self):
@@ -1242,14 +1253,6 @@ class Parser(object):
raise FeatureLibError(
"Name id value cannot be greater than 32767", self.cur_token_location_
)
- if 1 <= nameID <= 6:
- log.warning(
- "Name id %d cannot be set from the feature file. "
- "Ignoring record" % nameID
- )
- self.parse_name_() # skip to the next record
- return None
-
platformID, platEncID, langID, string = self.parse_name_()
return self.ast.NameRecord(
nameID, platformID, platEncID, langID, string, location=location
@@ -2073,19 +2076,18 @@ class Parser(object):
raise FeatureLibError("Expected a glyph name or CID", self.cur_token_location_)
def check_glyph_name_in_glyph_set(self, *names):
- """Raises if glyph name (just `start`) or glyph names of a
- range (`start` and `end`) are not in the glyph set.
+ """Adds a glyph name (just `start`) or glyph names of a
+ range (`start` and `end`) which are not in the glyph set
+ to the "missing list" for future error reporting.
If no glyph set is present, does nothing.
"""
if self.glyphNames_:
- missing = [name for name in names if name not in self.glyphNames_]
- if missing:
- raise FeatureLibError(
- "The following glyph names are referenced but are missing from the "
- f"glyph set: {', '.join(missing)}",
- self.cur_token_location_,
- )
+ for name in names:
+ if name in self.glyphNames_:
+ continue
+ if name not in self.missing:
+ self.missing[name] = self.cur_token_location_
def expect_markClass_reference_(self):
name = self.expect_class_name_()
diff --git a/Lib/fontTools/fontBuilder.py b/Lib/fontTools/fontBuilder.py
index ad7180cb..60382683 100644
--- a/Lib/fontTools/fontBuilder.py
+++ b/Lib/fontTools/fontBuilder.py
@@ -838,6 +838,7 @@ class FontBuilder(object):
varStore=None,
varIndexMap=None,
clipBoxes=None,
+ allowLayerReuse=True,
):
"""Build new COLR table using color layers dictionary.
@@ -853,6 +854,7 @@ class FontBuilder(object):
varStore=varStore,
varIndexMap=varIndexMap,
clipBoxes=clipBoxes,
+ allowLayerReuse=allowLayerReuse,
)
def setupCPAL(
diff --git a/Lib/fontTools/merge/cmap.py b/Lib/fontTools/merge/cmap.py
index 7ade4ac9..7d98b588 100644
--- a/Lib/fontTools/merge/cmap.py
+++ b/Lib/fontTools/merge/cmap.py
@@ -18,10 +18,10 @@ def computeMegaGlyphOrder(merger, glyphOrders):
for i,glyphName in enumerate(glyphOrder):
if glyphName in megaOrder:
n = megaOrder[glyphName]
- while (glyphName + "#" + repr(n)) in megaOrder:
+ while (glyphName + "." + repr(n)) in megaOrder:
n += 1
megaOrder[glyphName] = n
- glyphName += "#" + repr(n)
+ glyphName += "." + repr(n)
glyphOrder[i] = glyphName
megaOrder[glyphName] = 1
merger.glyphOrder = megaOrder = list(megaOrder.keys())
diff --git a/Lib/fontTools/misc/cliTools.py b/Lib/fontTools/misc/cliTools.py
index e8c17677..e7dadf98 100644
--- a/Lib/fontTools/misc/cliTools.py
+++ b/Lib/fontTools/misc/cliTools.py
@@ -6,7 +6,7 @@ import re
numberAddedRE = re.compile(r"#\d+$")
-def makeOutputFileName(input, outputDir=None, extension=None, overWrite=False):
+def makeOutputFileName(input, outputDir=None, extension=None, overWrite=False, suffix=""):
"""Generates a suitable file name for writing output.
Often tools will want to take a file, do some kind of transformation to it,
@@ -14,6 +14,7 @@ def makeOutputFileName(input, outputDir=None, extension=None, overWrite=False):
output file, through one or more of the following steps:
- changing the output directory
+ - appending suffix before file extension
- replacing the file extension
- suffixing the filename with a number (``#1``, ``#2``, etc.) to avoid
overwriting an existing file.
@@ -21,6 +22,8 @@ def makeOutputFileName(input, outputDir=None, extension=None, overWrite=False):
Args:
input: Name of input file.
outputDir: Optionally, a new directory to write the file into.
+ suffix: Optionally, a string suffix is appended to file name before
+ the extension.
extension: Optionally, a replacement for the current file extension.
overWrite: Overwriting an existing file is permitted if true; if false
and the proposed filename exists, a new name will be generated by
@@ -36,11 +39,11 @@ def makeOutputFileName(input, outputDir=None, extension=None, overWrite=False):
fileName = numberAddedRE.split(fileName)[0]
if extension is None:
extension = os.path.splitext(input)[1]
- output = os.path.join(dirName, fileName + extension)
+ output = os.path.join(dirName, fileName + suffix + extension)
n = 1
if not overWrite:
while os.path.exists(output):
output = os.path.join(
- dirName, fileName + "#" + repr(n) + extension)
+ dirName, fileName + suffix + "#" + repr(n) + extension)
n += 1
return output
diff --git a/Lib/fontTools/misc/symfont.py b/Lib/fontTools/misc/symfont.py
index a1a87300..3ff2b5df 100644
--- a/Lib/fontTools/misc/symfont.py
+++ b/Lib/fontTools/misc/symfont.py
@@ -108,16 +108,34 @@ MomentYYPen = partial(GreenPen, func=y*y)
MomentXYPen = partial(GreenPen, func=x*y)
-def printGreenPen(penName, funcs, file=sys.stdout):
+def printGreenPen(penName, funcs, file=sys.stdout, docstring=None):
+
+ if docstring is not None:
+ print('"""%s"""' % docstring)
print(
-'''from fontTools.pens.basePen import BasePen
+'''from fontTools.pens.basePen import BasePen, OpenContourError
+try:
+ import cython
+except ImportError:
+ # if cython not installed, use mock module with no-op decorators and types
+ from fontTools.misc import cython
+
+if cython.compiled:
+ # Yep, I'm compiled.
+ COMPILED = True
+else:
+ # Just a lowly interpreted script.
+ COMPILED = False
+
+
+__all__ = ["%s"]
class %s(BasePen):
def __init__(self, glyphset=None):
BasePen.__init__(self, glyphset)
-'''%penName, file=file)
+'''% (penName, penName), file=file)
for name,f in funcs:
print(' self.%s = 0' % name, file=file)
print('''
@@ -133,41 +151,58 @@ class %s(BasePen):
p0 = self._getCurrentPoint()
if p0 != self.__startPoint:
# Green theorem is not defined on open contours.
- raise NotImplementedError
+ raise OpenContourError(
+ "Green theorem is not defined on open contours."
+ )
''', end='', file=file)
for n in (1, 2, 3):
+
+ subs = {P[i][j]: [X, Y][j][i] for i in range(n+1) for j in range(2)}
+ greens = [green(f, BezierCurve[n]) for name,f in funcs]
+ greens = [sp.gcd_terms(f.collect(sum(P,()))) for f in greens] # Optimize
+ greens = [f.subs(subs) for f in greens] # Convert to p to x/y
+ defs, exprs = sp.cse(greens,
+ optimizations='basic',
+ symbols=(sp.Symbol('r%d'%i) for i in count()))
+
+ print()
+ for name,value in defs:
+ print(' @cython.locals(%s=cython.double)' % name, file=file)
if n == 1:
- print('''
+ print('''\
+ @cython.locals(x0=cython.double, y0=cython.double)
+ @cython.locals(x1=cython.double, y1=cython.double)
def _lineTo(self, p1):
x0,y0 = self._getCurrentPoint()
x1,y1 = p1
''', file=file)
elif n == 2:
- print('''
+ print('''\
+ @cython.locals(x0=cython.double, y0=cython.double)
+ @cython.locals(x1=cython.double, y1=cython.double)
+ @cython.locals(x2=cython.double, y2=cython.double)
def _qCurveToOne(self, p1, p2):
x0,y0 = self._getCurrentPoint()
x1,y1 = p1
x2,y2 = p2
''', file=file)
elif n == 3:
- print('''
+ print('''\
+ @cython.locals(x0=cython.double, y0=cython.double)
+ @cython.locals(x1=cython.double, y1=cython.double)
+ @cython.locals(x2=cython.double, y2=cython.double)
+ @cython.locals(x3=cython.double, y3=cython.double)
def _curveToOne(self, p1, p2, p3):
x0,y0 = self._getCurrentPoint()
x1,y1 = p1
x2,y2 = p2
x3,y3 = p3
''', file=file)
- subs = {P[i][j]: [X, Y][j][i] for i in range(n+1) for j in range(2)}
- greens = [green(f, BezierCurve[n]) for name,f in funcs]
- greens = [sp.gcd_terms(f.collect(sum(P,()))) for f in greens] # Optimize
- greens = [f.subs(subs) for f in greens] # Convert to p to x/y
- defs, exprs = sp.cse(greens,
- optimizations='basic',
- symbols=(sp.Symbol('r%d'%i) for i in count()))
for name,value in defs:
print(' %s = %s' % (name, value), file=file)
+
print(file=file)
for name,value in zip([f[0] for f in funcs], exprs):
print(' self.%s += %s' % (name, value), file=file)
diff --git a/Lib/fontTools/misc/treeTools.py b/Lib/fontTools/misc/treeTools.py
new file mode 100644
index 00000000..24e10ba5
--- /dev/null
+++ b/Lib/fontTools/misc/treeTools.py
@@ -0,0 +1,45 @@
+"""Generic tools for working with trees."""
+
+from math import ceil, log
+
+
+def build_n_ary_tree(leaves, n):
+ """Build N-ary tree from sequence of leaf nodes.
+
+ Return a list of lists where each non-leaf node is a list containing
+ max n nodes.
+ """
+ if not leaves:
+ return []
+
+ assert n > 1
+
+ depth = ceil(log(len(leaves), n))
+
+ if depth <= 1:
+ return list(leaves)
+
+ # Fully populate complete subtrees of root until we have enough leaves left
+ root = []
+ unassigned = None
+ full_step = n ** (depth - 1)
+ for i in range(0, len(leaves), full_step):
+ subtree = leaves[i : i + full_step]
+ if len(subtree) < full_step:
+ unassigned = subtree
+ break
+ while len(subtree) > n:
+ subtree = [subtree[k : k + n] for k in range(0, len(subtree), n)]
+ root.append(subtree)
+
+ if unassigned:
+ # Recurse to fill the last subtree, which is the only partially populated one
+ subtree = build_n_ary_tree(unassigned, n)
+ if len(subtree) <= n - len(root):
+ # replace last subtree with its children if they can still fit
+ root.extend(subtree)
+ else:
+ root.append(subtree)
+ assert len(root) <= n
+
+ return root
diff --git a/Lib/fontTools/misc/visitor.py b/Lib/fontTools/misc/visitor.py
new file mode 100644
index 00000000..3d28135f
--- /dev/null
+++ b/Lib/fontTools/misc/visitor.py
@@ -0,0 +1,143 @@
+"""Generic visitor pattern implementation for Python objects."""
+
+import enum
+
+
+class Visitor(object):
+
+ defaultStop = False
+
+ @classmethod
+ def _register(celf, clazzes_attrs):
+ assert celf != Visitor, "Subclass Visitor instead."
+ if "_visitors" not in celf.__dict__:
+ celf._visitors = {}
+
+ def wrapper(method):
+ assert method.__name__ == "visit"
+ for clazzes, attrs in clazzes_attrs:
+ if type(clazzes) != tuple:
+ clazzes = (clazzes,)
+ if type(attrs) == str:
+ attrs = (attrs,)
+ for clazz in clazzes:
+ _visitors = celf._visitors.setdefault(clazz, {})
+ for attr in attrs:
+ assert attr not in _visitors, (
+ "Oops, class '%s' has visitor function for '%s' defined already."
+ % (clazz.__name__, attr)
+ )
+ _visitors[attr] = method
+ return None
+
+ return wrapper
+
+ @classmethod
+ def register(celf, clazzes):
+ if type(clazzes) != tuple:
+ clazzes = (clazzes,)
+ return celf._register([(clazzes, (None,))])
+
+ @classmethod
+ def register_attr(celf, clazzes, attrs):
+ clazzes_attrs = []
+ if type(clazzes) != tuple:
+ clazzes = (clazzes,)
+ if type(attrs) == str:
+ attrs = (attrs,)
+ for clazz in clazzes:
+ clazzes_attrs.append((clazz, attrs))
+ return celf._register(clazzes_attrs)
+
+ @classmethod
+ def register_attrs(celf, clazzes_attrs):
+ return celf._register(clazzes_attrs)
+
+ @classmethod
+ def _visitorsFor(celf, thing, _default={}):
+ typ = type(thing)
+
+ for celf in celf.mro():
+
+ _visitors = getattr(celf, "_visitors", None)
+ if _visitors is None:
+ break
+
+ m = celf._visitors.get(typ, None)
+ if m is not None:
+ return m
+
+ return _default
+
+ def visitObject(self, obj, *args, **kwargs):
+ """Called to visit an object. This function loops over all non-private
+ attributes of the objects and calls any user-registered (via
+ @register_attr() or @register_attrs()) visit() functions.
+
+ If there is no user-registered visit function, of if there is and it
+ returns True, or it returns None (or doesn't return anything) and
+ visitor.defaultStop is False (default), then the visitor will proceed
+ to call self.visitAttr()"""
+
+ keys = sorted(vars(obj).keys())
+ _visitors = self._visitorsFor(obj)
+ defaultVisitor = _visitors.get("*", None)
+ for key in keys:
+ if key[0] == "_":
+ continue
+ value = getattr(obj, key)
+ visitorFunc = _visitors.get(key, defaultVisitor)
+ if visitorFunc is not None:
+ ret = visitorFunc(self, obj, key, value, *args, **kwargs)
+ if ret == False or (ret is None and self.defaultStop):
+ continue
+ self.visitAttr(obj, key, value, *args, **kwargs)
+
+ def visitAttr(self, obj, attr, value, *args, **kwargs):
+ """Called to visit an attribute of an object."""
+ self.visit(value, *args, **kwargs)
+
+ def visitList(self, obj, *args, **kwargs):
+ """Called to visit any value that is a list."""
+ for value in obj:
+ self.visit(value, *args, **kwargs)
+
+ def visitDict(self, obj, *args, **kwargs):
+ """Called to visit any value that is a dictionary."""
+ for value in obj.values():
+ self.visit(value, *args, **kwargs)
+
+ def visitLeaf(self, obj, *args, **kwargs):
+ """Called to visit any value that is not an object, list,
+ or dictionary."""
+ pass
+
+ def visit(self, obj, *args, **kwargs):
+ """This is the main entry to the visitor. The visitor will visit object
+ obj.
+
+ The visitor will first determine if there is a registered (via
+ @register()) visit function for the type of object. If there is, it
+ will be called, and (visitor, obj, *args, **kwargs) will be passed to
+ the user visit function.
+
+ If there is no user-registered visit function, of if there is and it
+ returns True, or it returns None (or doesn't return anything) and
+ visitor.defaultStop is False (default), then the visitor will proceed
+ to dispatch to one of self.visitObject(), self.visitList(),
+ self.visitDict(), or self.visitLeaf() (any of which can be overriden in
+ a subclass)."""
+
+ visitorFunc = self._visitorsFor(obj).get(None, None)
+ if visitorFunc is not None:
+ ret = visitorFunc(self, obj, *args, **kwargs)
+ if ret == False or (ret is None and self.defaultStop):
+ return
+ if hasattr(obj, "__dict__") and not isinstance(obj, enum.Enum):
+ self.visitObject(obj, *args, **kwargs)
+ elif isinstance(obj, list):
+ self.visitList(obj, *args, **kwargs)
+ elif isinstance(obj, dict):
+ self.visitDict(obj, *args, **kwargs)
+ else:
+ self.visitLeaf(obj, *args, **kwargs)
diff --git a/Lib/fontTools/mtiLib/__init__.py b/Lib/fontTools/mtiLib/__init__.py
index 667a216d..f117a742 100644
--- a/Lib/fontTools/mtiLib/__init__.py
+++ b/Lib/fontTools/mtiLib/__init__.py
@@ -121,7 +121,7 @@ def parseScriptList(lines, featureMap=None):
script = script[0].Script
else:
scriptRec = ot.ScriptRecord()
- scriptRec.ScriptTag = scriptTag
+ scriptRec.ScriptTag = scriptTag + ' '*(4 - len(scriptTag))
scriptRec.Script = ot.Script()
records.append(scriptRec)
script = scriptRec.Script
@@ -1165,7 +1165,7 @@ def build(f, font, tableTag=None):
def main(args=None, font=None):
- """Convert a FontDame OTL file to TTX XML.
+ """Convert a FontDame OTL file to TTX XML
Writes XML output to stdout.
diff --git a/Lib/fontTools/otlLib/optimize/__init__.py b/Lib/fontTools/otlLib/optimize/__init__.py
index a9512fb0..25bce9cd 100644
--- a/Lib/fontTools/otlLib/optimize/__init__.py
+++ b/Lib/fontTools/otlLib/optimize/__init__.py
@@ -4,7 +4,7 @@ from fontTools.ttLib import TTFont
def main(args=None):
- """Optimize the layout tables of an existing font."""
+ """Optimize the layout tables of an existing font"""
from argparse import ArgumentParser
from fontTools import configLogger
diff --git a/Lib/fontTools/pens/basePen.py b/Lib/fontTools/pens/basePen.py
index e06c00ef..f981f806 100644
--- a/Lib/fontTools/pens/basePen.py
+++ b/Lib/fontTools/pens/basePen.py
@@ -47,6 +47,9 @@ __all__ = ["AbstractPen", "NullPen", "BasePen", "PenError",
class PenError(Exception):
"""Represents an error during penning."""
+class OpenContourError(PenError):
+ pass
+
class AbstractPen:
diff --git a/Lib/fontTools/pens/cairoPen.py b/Lib/fontTools/pens/cairoPen.py
new file mode 100644
index 00000000..9cd5da91
--- /dev/null
+++ b/Lib/fontTools/pens/cairoPen.py
@@ -0,0 +1,26 @@
+"""Pen to draw to a Cairo graphics library context."""
+
+from fontTools.pens.basePen import BasePen
+
+
+__all__ = ["CairoPen"]
+
+
+class CairoPen(BasePen):
+ """Pen to draw to a Cairo graphics library context."""
+
+ def __init__(self, glyphSet, context):
+ BasePen.__init__(self, glyphSet)
+ self.context = context
+
+ def _moveTo(self, p):
+ self.context.move_to(*p)
+
+ def _lineTo(self, p):
+ self.context.line_to(*p)
+
+ def _curveToOne(self, p1, p2, p3):
+ self.context.curve_to(*p1, *p2, *p3)
+
+ def _closePath(self):
+ self.context.close_path()
diff --git a/Lib/fontTools/pens/momentsPen.py b/Lib/fontTools/pens/momentsPen.py
index 8c90f70a..7cd87919 100644
--- a/Lib/fontTools/pens/momentsPen.py
+++ b/Lib/fontTools/pens/momentsPen.py
@@ -1,14 +1,19 @@
-"""Pen calculating 0th, 1st, and 2nd moments of area of glyph shapes.
-This is low-level, autogenerated pen. Use statisticsPen instead."""
-from fontTools.pens.basePen import BasePen
+from fontTools.pens.basePen import BasePen, OpenContourError
+try:
+ import cython
+except ImportError:
+ # if cython not installed, use mock module with no-op decorators and types
+ from fontTools.misc import cython
+if cython.compiled:
+ # Yep, I'm compiled.
+ COMPILED = True
+else:
+ # Just a lowly interpreted script.
+ COMPILED = False
-__all__ = ["MomentsPen"]
-
-
-class OpenContourError(NotImplementedError):
- pass
+__all__ = ["MomentsPen"]
class MomentsPen(BasePen):
@@ -33,10 +38,26 @@ class MomentsPen(BasePen):
def _endPath(self):
p0 = self._getCurrentPoint()
if p0 != self.__startPoint:
+ # Green theorem is not defined on open contours.
raise OpenContourError(
"Green theorem is not defined on open contours."
)
+ @cython.locals(r0=cython.double)
+ @cython.locals(r1=cython.double)
+ @cython.locals(r2=cython.double)
+ @cython.locals(r3=cython.double)
+ @cython.locals(r4=cython.double)
+ @cython.locals(r5=cython.double)
+ @cython.locals(r6=cython.double)
+ @cython.locals(r7=cython.double)
+ @cython.locals(r8=cython.double)
+ @cython.locals(r9=cython.double)
+ @cython.locals(r10=cython.double)
+ @cython.locals(r11=cython.double)
+ @cython.locals(r12=cython.double)
+ @cython.locals(x0=cython.double, y0=cython.double)
+ @cython.locals(x1=cython.double, y1=cython.double)
def _lineTo(self, p1):
x0,y0 = self._getCurrentPoint()
x1,y1 = p1
@@ -44,246 +65,431 @@ class MomentsPen(BasePen):
r0 = x1*y0
r1 = x1*y1
r2 = x1**2
- r3 = x0**2
- r4 = 2*y0
- r5 = y0 - y1
- r6 = r5*x0
- r7 = y0**2
- r8 = y1**2
- r9 = x1**3
- r10 = r4*y1
+ r3 = r2*y1
+ r4 = y0 - y1
+ r5 = r4*x0
+ r6 = x0**2
+ r7 = 2*y0
+ r8 = y0**2
+ r9 = y1**2
+ r10 = x1**3
r11 = y0**3
r12 = y1**3
self.area += -r0/2 - r1/2 + x0*(y0 + y1)/2
- self.momentX += -r2*y0/6 - r2*y1/3 + r3*(r4 + y1)/6 - r6*x1/6
- self.momentY += -r0*y1/6 - r7*x1/6 - r8*x1/6 + x0*(r7 + r8 + y0*y1)/6
- self.momentXX += -r2*r6/12 - r3*r5*x1/12 - r9*y0/12 - r9*y1/4 + x0**3*(3*y0 + y1)/12
- self.momentXY += -r10*r2/24 - r2*r7/24 - r2*r8/8 + r3*(r10 + 3*r7 + r8)/24 - x0*x1*(r7 - r8)/12
- self.momentYY += -r0*r8/12 - r1*r7/12 - r11*x1/12 - r12*x1/12 + x0*(r11 + r12 + r7*y1 + r8*y0)/12
+ self.momentX += -r2*y0/6 - r3/3 - r5*x1/6 + r6*(r7 + y1)/6
+ self.momentY += -r0*y1/6 - r8*x1/6 - r9*x1/6 + x0*(r8 + r9 + y0*y1)/6
+ self.momentXX += -r10*y0/12 - r10*y1/4 - r2*r5/12 - r4*r6*x1/12 + x0**3*(3*y0 + y1)/12
+ self.momentXY += -r2*r8/24 - r2*r9/8 - r3*r7/24 + r6*(r7*y1 + 3*r8 + r9)/24 - x0*x1*(r8 - r9)/12
+ self.momentYY += -r0*r9/12 - r1*r8/12 - r11*x1/12 - r12*x1/12 + x0*(r11 + r12 + r8*y1 + r9*y0)/12
+ @cython.locals(r0=cython.double)
+ @cython.locals(r1=cython.double)
+ @cython.locals(r2=cython.double)
+ @cython.locals(r3=cython.double)
+ @cython.locals(r4=cython.double)
+ @cython.locals(r5=cython.double)
+ @cython.locals(r6=cython.double)
+ @cython.locals(r7=cython.double)
+ @cython.locals(r8=cython.double)
+ @cython.locals(r9=cython.double)
+ @cython.locals(r10=cython.double)
+ @cython.locals(r11=cython.double)
+ @cython.locals(r12=cython.double)
+ @cython.locals(r13=cython.double)
+ @cython.locals(r14=cython.double)
+ @cython.locals(r15=cython.double)
+ @cython.locals(r16=cython.double)
+ @cython.locals(r17=cython.double)
+ @cython.locals(r18=cython.double)
+ @cython.locals(r19=cython.double)
+ @cython.locals(r20=cython.double)
+ @cython.locals(r21=cython.double)
+ @cython.locals(r22=cython.double)
+ @cython.locals(r23=cython.double)
+ @cython.locals(r24=cython.double)
+ @cython.locals(r25=cython.double)
+ @cython.locals(r26=cython.double)
+ @cython.locals(r27=cython.double)
+ @cython.locals(r28=cython.double)
+ @cython.locals(r29=cython.double)
+ @cython.locals(r30=cython.double)
+ @cython.locals(r31=cython.double)
+ @cython.locals(r32=cython.double)
+ @cython.locals(r33=cython.double)
+ @cython.locals(r34=cython.double)
+ @cython.locals(r35=cython.double)
+ @cython.locals(r36=cython.double)
+ @cython.locals(r37=cython.double)
+ @cython.locals(r38=cython.double)
+ @cython.locals(r39=cython.double)
+ @cython.locals(r40=cython.double)
+ @cython.locals(r41=cython.double)
+ @cython.locals(r42=cython.double)
+ @cython.locals(r43=cython.double)
+ @cython.locals(r44=cython.double)
+ @cython.locals(r45=cython.double)
+ @cython.locals(r46=cython.double)
+ @cython.locals(r47=cython.double)
+ @cython.locals(r48=cython.double)
+ @cython.locals(r49=cython.double)
+ @cython.locals(r50=cython.double)
+ @cython.locals(r51=cython.double)
+ @cython.locals(r52=cython.double)
+ @cython.locals(r53=cython.double)
+ @cython.locals(x0=cython.double, y0=cython.double)
+ @cython.locals(x1=cython.double, y1=cython.double)
+ @cython.locals(x2=cython.double, y2=cython.double)
def _qCurveToOne(self, p1, p2):
x0,y0 = self._getCurrentPoint()
x1,y1 = p1
x2,y2 = p2
- r0 = 2*x1
- r1 = r0*y2
- r2 = 2*y1
- r3 = r2*x2
- r4 = 3*y2
- r5 = r4*x2
- r6 = 3*y0
- r7 = x1**2
- r8 = 2*y2
- r9 = x2**2
- r10 = 4*y1
- r11 = 10*y2
- r12 = r0*x2
- r13 = x0**2
- r14 = 10*y0
- r15 = x2*y2
- r16 = r0*y1 + r15
- r17 = 4*x1
- r18 = x2*y0
- r19 = r10*r15
- r20 = y1**2
- r21 = 2*r20
- r22 = y2**2
- r23 = r22*x2
- r24 = 5*r23
- r25 = y0**2
- r26 = y0*y2
- r27 = 5*r25
- r28 = 8*x1**3
- r29 = x2**3
- r30 = 30*y1
- r31 = 6*y1
- r32 = 10*r9*x1
- r33 = 4*r7
- r34 = 5*y2
- r35 = 12*r7
- r36 = r5 + 20*x1*y1
- r37 = 30*x1
- r38 = 12*x1
- r39 = 20*r7
- r40 = 8*r7*y1
- r41 = r34*r9
- r42 = 60*y1
- r43 = 20*r20
- r44 = 4*r20
- r45 = 15*r22
- r46 = r38*x2
- r47 = y1*y2
- r48 = 8*r20*x1 + r24
- r49 = 6*x1
- r50 = 8*y1**3
- r51 = y2**3
- r52 = y0**3
- r53 = 10*y1
- r54 = 12*y1
- r55 = 12*r20
+ r0 = 2*y1
+ r1 = r0*x2
+ r2 = x2*y2
+ r3 = 3*r2
+ r4 = 2*x1
+ r5 = 3*y0
+ r6 = x1**2
+ r7 = x2**2
+ r8 = 4*y1
+ r9 = 10*y2
+ r10 = 2*y2
+ r11 = r4*x2
+ r12 = x0**2
+ r13 = 10*y0
+ r14 = r4*y2
+ r15 = x2*y0
+ r16 = 4*x1
+ r17 = r0*x1 + r2
+ r18 = r2*r8
+ r19 = y1**2
+ r20 = 2*r19
+ r21 = y2**2
+ r22 = r21*x2
+ r23 = 5*r22
+ r24 = y0**2
+ r25 = y0*y2
+ r26 = 5*r24
+ r27 = x1**3
+ r28 = x2**3
+ r29 = 30*y1
+ r30 = 6*y1
+ r31 = 10*r7*x1
+ r32 = 5*y2
+ r33 = 12*r6
+ r34 = 30*x1
+ r35 = x1*y1
+ r36 = r3 + 20*r35
+ r37 = 12*x1
+ r38 = 20*r6
+ r39 = 8*r6*y1
+ r40 = r32*r7
+ r41 = 60*y1
+ r42 = 20*r19
+ r43 = 4*r19
+ r44 = 15*r21
+ r45 = 12*x2
+ r46 = 12*y2
+ r47 = 6*x1
+ r48 = 8*r19*x1 + r23
+ r49 = 8*y1**3
+ r50 = y2**3
+ r51 = y0**3
+ r52 = 10*y1
+ r53 = 12*y1
- self.area += r1/6 - r3/6 - r5/6 + x0*(r2 + r6 + y2)/6 - y0*(r0 + x2)/6
- self.momentX += -r10*r9/30 - r11*r9/30 - r12*(-r8 + y1)/30 + r13*(r10 + r14 + y2)/30 + r7*r8/30 + x0*(r1 + r16 - r17*y0 - r18)/30 - y0*(r12 + 2*r7 + r9)/30
- self.momentY += r1*(r8 + y1)/30 - r19/30 - r21*x2/30 - r24/30 - r25*(r17 + x2)/30 + x0*(r10*y0 + r2*y2 + r21 + r22 + r26 + r27)/30 - y0*(r16 + r3)/30
- self.momentXX += r13*(r11*x1 - 5*r18 + r3 + r36 - r37*y0)/420 + r28*y2/420 - r29*r30/420 - r29*y2/4 - r32*(r2 - r4)/420 - r33*x2*(r2 - r34)/420 + x0**3*(r31 + 21*y0 + y2)/84 - x0*(-r15*r38 + r18*r38 + r2*r9 - r35*y2 + r39*y0 - r40 - r41 + r6*r9)/420 - y0*(r28 + 5*r29 + r32 + r35*x2)/420
- self.momentXY += r13*(r14*y2 + 3*r22 + 105*r25 + r42*y0 + r43 + 12*r47)/840 - r17*x2*(r44 - r45)/840 - r22*r9/8 - r25*(r39 + r46 + 3*r9)/840 + r33*y2*(r10 + r34)/840 - r42*r9*y2/840 - r43*r9/840 + x0*(-r10*r18 + r17*r26 + r19 + r22*r49 - r25*r37 - r27*x2 + r38*r47 + r48)/420 - y0*(r15*r17 + r31*r9 + r40 + r41 + r46*y1)/420
- self.momentYY += r1*(r11*y1 + r44 + r45)/420 - r15*r43/420 - r23*r30/420 - r25*(r1 + r36 + r53*x2)/420 - r50*x2/420 - r51*x2/12 - r52*(r49 + x2)/84 + x0*(r22*r53 + r22*r6 + r25*r30 + r25*r34 + r26*r54 + r43*y0 + r50 + 5*r51 + 35*r52 + r55*y2)/420 - y0*(-r0*r22 + r15*r54 + r48 + r55*x2)/420
+ self.area += -r1/6 - r3/6 + x0*(r0 + r5 + y2)/6 + x1*y2/3 - y0*(r4 + x2)/6
+ self.momentX += -r11*(-r10 + y1)/30 + r12*(r13 + r8 + y2)/30 + r6*y2/15 - r7*r8/30 - r7*r9/30 + x0*(r14 - r15 - r16*y0 + r17)/30 - y0*(r11 + 2*r6 + r7)/30
+ self.momentY += -r18/30 - r20*x2/30 - r23/30 - r24*(r16 + x2)/30 + x0*(r0*y2 + r20 + r21 + r25 + r26 + r8*y0)/30 + x1*y2*(r10 + y1)/15 - y0*(r1 + r17)/30
+ self.momentXX += r12*(r1 - 5*r15 - r34*y0 + r36 + r9*x1)/420 + 2*r27*y2/105 - r28*r29/420 - r28*y2/4 - r31*(r0 - 3*y2)/420 - r6*x2*(r0 - r32)/105 + x0**3*(r30 + 21*y0 + y2)/84 - x0*(r0*r7 + r15*r37 - r2*r37 - r33*y2 + r38*y0 - r39 - r40 + r5*r7)/420 - y0*(8*r27 + 5*r28 + r31 + r33*x2)/420
+ self.momentXY += r12*(r13*y2 + 3*r21 + 105*r24 + r41*y0 + r42 + r46*y1)/840 - r16*x2*(r43 - r44)/840 - r21*r7/8 - r24*(r38 + r45*x1 + 3*r7)/840 - r41*r7*y2/840 - r42*r7/840 + r6*y2*(r32 + r8)/210 + x0*(-r15*r8 + r16*r25 + r18 + r21*r47 - r24*r34 - r26*x2 + r35*r46 + r48)/420 - y0*(r16*r2 + r30*r7 + r35*r45 + r39 + r40)/420
+ self.momentYY += -r2*r42/420 - r22*r29/420 - r24*(r14 + r36 + r52*x2)/420 - r49*x2/420 - r50*x2/12 - r51*(r47 + x2)/84 + x0*(r19*r46 + r21*r5 + r21*r52 + r24*r29 + r25*r53 + r26*y2 + r42*y0 + r49 + 5*r50 + 35*r51)/420 + x1*y2*(r43 + r44 + r9*y1)/210 - y0*(r19*r45 + r2*r53 - r21*r4 + r48)/420
+ @cython.locals(r0=cython.double)
+ @cython.locals(r1=cython.double)
+ @cython.locals(r2=cython.double)
+ @cython.locals(r3=cython.double)
+ @cython.locals(r4=cython.double)
+ @cython.locals(r5=cython.double)
+ @cython.locals(r6=cython.double)
+ @cython.locals(r7=cython.double)
+ @cython.locals(r8=cython.double)
+ @cython.locals(r9=cython.double)
+ @cython.locals(r10=cython.double)
+ @cython.locals(r11=cython.double)
+ @cython.locals(r12=cython.double)
+ @cython.locals(r13=cython.double)
+ @cython.locals(r14=cython.double)
+ @cython.locals(r15=cython.double)
+ @cython.locals(r16=cython.double)
+ @cython.locals(r17=cython.double)
+ @cython.locals(r18=cython.double)
+ @cython.locals(r19=cython.double)
+ @cython.locals(r20=cython.double)
+ @cython.locals(r21=cython.double)
+ @cython.locals(r22=cython.double)
+ @cython.locals(r23=cython.double)
+ @cython.locals(r24=cython.double)
+ @cython.locals(r25=cython.double)
+ @cython.locals(r26=cython.double)
+ @cython.locals(r27=cython.double)
+ @cython.locals(r28=cython.double)
+ @cython.locals(r29=cython.double)
+ @cython.locals(r30=cython.double)
+ @cython.locals(r31=cython.double)
+ @cython.locals(r32=cython.double)
+ @cython.locals(r33=cython.double)
+ @cython.locals(r34=cython.double)
+ @cython.locals(r35=cython.double)
+ @cython.locals(r36=cython.double)
+ @cython.locals(r37=cython.double)
+ @cython.locals(r38=cython.double)
+ @cython.locals(r39=cython.double)
+ @cython.locals(r40=cython.double)
+ @cython.locals(r41=cython.double)
+ @cython.locals(r42=cython.double)
+ @cython.locals(r43=cython.double)
+ @cython.locals(r44=cython.double)
+ @cython.locals(r45=cython.double)
+ @cython.locals(r46=cython.double)
+ @cython.locals(r47=cython.double)
+ @cython.locals(r48=cython.double)
+ @cython.locals(r49=cython.double)
+ @cython.locals(r50=cython.double)
+ @cython.locals(r51=cython.double)
+ @cython.locals(r52=cython.double)
+ @cython.locals(r53=cython.double)
+ @cython.locals(r54=cython.double)
+ @cython.locals(r55=cython.double)
+ @cython.locals(r56=cython.double)
+ @cython.locals(r57=cython.double)
+ @cython.locals(r58=cython.double)
+ @cython.locals(r59=cython.double)
+ @cython.locals(r60=cython.double)
+ @cython.locals(r61=cython.double)
+ @cython.locals(r62=cython.double)
+ @cython.locals(r63=cython.double)
+ @cython.locals(r64=cython.double)
+ @cython.locals(r65=cython.double)
+ @cython.locals(r66=cython.double)
+ @cython.locals(r67=cython.double)
+ @cython.locals(r68=cython.double)
+ @cython.locals(r69=cython.double)
+ @cython.locals(r70=cython.double)
+ @cython.locals(r71=cython.double)
+ @cython.locals(r72=cython.double)
+ @cython.locals(r73=cython.double)
+ @cython.locals(r74=cython.double)
+ @cython.locals(r75=cython.double)
+ @cython.locals(r76=cython.double)
+ @cython.locals(r77=cython.double)
+ @cython.locals(r78=cython.double)
+ @cython.locals(r79=cython.double)
+ @cython.locals(r80=cython.double)
+ @cython.locals(r81=cython.double)
+ @cython.locals(r82=cython.double)
+ @cython.locals(r83=cython.double)
+ @cython.locals(r84=cython.double)
+ @cython.locals(r85=cython.double)
+ @cython.locals(r86=cython.double)
+ @cython.locals(r87=cython.double)
+ @cython.locals(r88=cython.double)
+ @cython.locals(r89=cython.double)
+ @cython.locals(r90=cython.double)
+ @cython.locals(r91=cython.double)
+ @cython.locals(r92=cython.double)
+ @cython.locals(r93=cython.double)
+ @cython.locals(r94=cython.double)
+ @cython.locals(r95=cython.double)
+ @cython.locals(r96=cython.double)
+ @cython.locals(r97=cython.double)
+ @cython.locals(r98=cython.double)
+ @cython.locals(r99=cython.double)
+ @cython.locals(r100=cython.double)
+ @cython.locals(r101=cython.double)
+ @cython.locals(r102=cython.double)
+ @cython.locals(r103=cython.double)
+ @cython.locals(r104=cython.double)
+ @cython.locals(r105=cython.double)
+ @cython.locals(r106=cython.double)
+ @cython.locals(r107=cython.double)
+ @cython.locals(r108=cython.double)
+ @cython.locals(r109=cython.double)
+ @cython.locals(r110=cython.double)
+ @cython.locals(r111=cython.double)
+ @cython.locals(r112=cython.double)
+ @cython.locals(r113=cython.double)
+ @cython.locals(r114=cython.double)
+ @cython.locals(r115=cython.double)
+ @cython.locals(r116=cython.double)
+ @cython.locals(r117=cython.double)
+ @cython.locals(r118=cython.double)
+ @cython.locals(r119=cython.double)
+ @cython.locals(r120=cython.double)
+ @cython.locals(r121=cython.double)
+ @cython.locals(r122=cython.double)
+ @cython.locals(r123=cython.double)
+ @cython.locals(r124=cython.double)
+ @cython.locals(r125=cython.double)
+ @cython.locals(r126=cython.double)
+ @cython.locals(r127=cython.double)
+ @cython.locals(r128=cython.double)
+ @cython.locals(r129=cython.double)
+ @cython.locals(r130=cython.double)
+ @cython.locals(r131=cython.double)
+ @cython.locals(r132=cython.double)
+ @cython.locals(x0=cython.double, y0=cython.double)
+ @cython.locals(x1=cython.double, y1=cython.double)
+ @cython.locals(x2=cython.double, y2=cython.double)
+ @cython.locals(x3=cython.double, y3=cython.double)
def _curveToOne(self, p1, p2, p3):
x0,y0 = self._getCurrentPoint()
x1,y1 = p1
x2,y2 = p2
x3,y3 = p3
- r0 = 6*x2
- r1 = r0*y3
- r2 = 6*y2
- r3 = 10*y3
- r4 = r3*x3
- r5 = 3*x1
- r6 = 3*y1
- r7 = 6*x1
- r8 = 3*x2
- r9 = 6*y1
- r10 = 3*y2
- r11 = x2**2
- r12 = r11*y3
- r13 = 45*r12
- r14 = x3**2
- r15 = r14*y2
- r16 = r14*y3
- r17 = x2*x3
- r18 = 15*r17
- r19 = 7*y3
- r20 = x1**2
- r21 = 9*r20
- r22 = x0**2
- r23 = 21*y1
- r24 = 9*r11
- r25 = 9*x2
- r26 = x2*y3
- r27 = 15*r26
- r28 = -r25*y1 + r27
- r29 = r25*y2
- r30 = r9*x3
- r31 = 45*x1
- r32 = x1*x3
- r33 = 45*r20
- r34 = 5*r14
- r35 = x2*y2
- r36 = 18*r35
- r37 = 5*x3
- r38 = r37*y3
- r39 = r31*y1 + r36 + r38
- r40 = x1*y0
- r41 = x1*y3
- r42 = x2*y0
- r43 = x3*y1
- r44 = r10*x3
- r45 = x3*y2*y3
- r46 = y2**2
- r47 = 45*r46
- r48 = r47*x3
- r49 = y3**2
+ r0 = 6*y2
+ r1 = r0*x3
+ r2 = 10*y3
+ r3 = r2*x3
+ r4 = 3*y1
+ r5 = 6*x1
+ r6 = 3*x2
+ r7 = 6*y1
+ r8 = 3*y2
+ r9 = x2**2
+ r10 = 45*r9
+ r11 = r10*y3
+ r12 = x3**2
+ r13 = r12*y2
+ r14 = r12*y3
+ r15 = 7*y3
+ r16 = 15*x3
+ r17 = r16*x2
+ r18 = x1**2
+ r19 = 9*r18
+ r20 = x0**2
+ r21 = 21*y1
+ r22 = 9*r9
+ r23 = r7*x3
+ r24 = 9*y2
+ r25 = r24*x2 + r3
+ r26 = 9*x2
+ r27 = x2*y3
+ r28 = -r26*y1 + 15*r27
+ r29 = 3*x1
+ r30 = 45*x1
+ r31 = 12*x3
+ r32 = 45*r18
+ r33 = 5*r12
+ r34 = r8*x3
+ r35 = 105*y0
+ r36 = 30*y0
+ r37 = r36*x2
+ r38 = 5*x3
+ r39 = 15*y3
+ r40 = 5*y3
+ r41 = r40*x3
+ r42 = x2*y2
+ r43 = 18*r42
+ r44 = 45*y1
+ r45 = r41 + r43 + r44*x1
+ r46 = y2*y3
+ r47 = r46*x3
+ r48 = y2**2
+ r49 = 45*r48
r50 = r49*x3
- r51 = y1**2
- r52 = 9*r51
- r53 = y0**2
- r54 = 21*x1
- r55 = x3*y2
- r56 = 15*r55
- r57 = 9*y2
- r58 = y2*y3
- r59 = 15*r58
- r60 = 9*r46
- r61 = 3*y3
- r62 = 45*y1
- r63 = r8*y3
- r64 = y0*y1
- r65 = y0*y2
- r66 = 30*r65
- r67 = 5*y3
- r68 = y1*y3
- r69 = 45*r51
- r70 = 5*r49
- r71 = x2**3
- r72 = x3**3
- r73 = 126*x3
- r74 = x1**3
- r75 = r14*x2
- r76 = 63*r11
- r77 = r76*x3
- r78 = 15*r35
- r79 = r19*x3
- r80 = x1*y1
- r81 = 63*r35
- r82 = r38 + 378*r80 + r81
- r83 = x1*y2
- r84 = x2*y1
- r85 = x3*y0
- r86 = x2*x3*y1
- r87 = x2*x3*y3
- r88 = r11*y2
- r89 = 27*r88
- r90 = 42*y3
- r91 = r14*r90
- r92 = 90*x1*x2
- r93 = 189*x2
- r94 = 30*x1*x3
- r95 = 14*r16 + 126*r20*y1 + 45*r88 + r94*y2
- r96 = x1*x2
- r97 = 252*r96
- r98 = x1*x2*y2
- r99 = 42*r32
- r100 = x1*x3*y1
- r101 = 30*r17
- r102 = 18*r17
- r103 = 378*r20
- r104 = 189*y2
- r105 = r20*y3
- r106 = r11*y1
- r107 = r14*y1
- r108 = 378*r46
- r109 = 252*y2
- r110 = y1*y2
- r111 = x2*x3*y2
- r112 = y0*y3
- r113 = 378*r51
- r114 = 63*r46
- r115 = 27*x2
- r116 = r115*r46 + 42*r50
- r117 = x2*y1*y3
- r118 = x3*y1*y2
- r119 = r49*x2
- r120 = r51*x3
- r121 = x3*y3
- r122 = 14*x3
- r123 = 30*r117 + r122*r49 + r47*x2 + 126*r51*x1
- r124 = x1*y1*y3
- r125 = x1*y2*y3
- r126 = x2*y1*y2
- r127 = 54*y3
- r128 = 21*r55
- r129 = 630*r53
- r130 = r46*x1
- r131 = r49*x1
- r132 = 126*r53
- r133 = y2**3
- r134 = y3**3
- r135 = 630*r49
- r136 = y1**3
- r137 = y0**3
- r138 = r114*y3 + r23*r49
- r139 = r49*y2
+ r51 = y3**2
+ r52 = r51*x3
+ r53 = y1**2
+ r54 = 9*r53
+ r55 = y0**2
+ r56 = 21*x1
+ r57 = 6*x2
+ r58 = r16*y2
+ r59 = r39*y2
+ r60 = 9*r48
+ r61 = r6*y3
+ r62 = 3*y3
+ r63 = r36*y2
+ r64 = y1*y3
+ r65 = 45*r53
+ r66 = 5*r51
+ r67 = x2**3
+ r68 = x3**3
+ r69 = 630*y2
+ r70 = 126*x3
+ r71 = x1**3
+ r72 = 126*x2
+ r73 = 63*r9
+ r74 = r73*x3
+ r75 = r15*x3 + 15*r42
+ r76 = 630*x1
+ r77 = 14*x3
+ r78 = 21*r27
+ r79 = 42*x1
+ r80 = 42*x2
+ r81 = x1*y2
+ r82 = 63*r42
+ r83 = x1*y1
+ r84 = r41 + r82 + 378*r83
+ r85 = x2*x3
+ r86 = r85*y1
+ r87 = r27*x3
+ r88 = 27*r9
+ r89 = r88*y2
+ r90 = 42*r14
+ r91 = 90*x1
+ r92 = 189*r18
+ r93 = 378*r18
+ r94 = r12*y1
+ r95 = 252*x1*x2
+ r96 = r79*x3
+ r97 = 30*r85
+ r98 = r83*x3
+ r99 = 30*x3
+ r100 = 42*x3
+ r101 = r42*x1
+ r102 = r10*y2 + 14*r14 + 126*r18*y1 + r81*r99
+ r103 = 378*r48
+ r104 = 18*y1
+ r105 = r104*y2
+ r106 = y0*y1
+ r107 = 252*y2
+ r108 = r107*y0
+ r109 = y0*y3
+ r110 = 42*r64
+ r111 = 378*r53
+ r112 = 63*r48
+ r113 = 27*x2
+ r114 = r27*y2
+ r115 = r113*r48 + 42*r52
+ r116 = x3*y3
+ r117 = 54*r42
+ r118 = r51*x1
+ r119 = r51*x2
+ r120 = r48*x1
+ r121 = 21*x3
+ r122 = r64*x1
+ r123 = r81*y3
+ r124 = 30*r27*y1 + r49*x2 + 14*r52 + 126*r53*x1
+ r125 = y2**3
+ r126 = y3**3
+ r127 = y1**3
+ r128 = y0**3
+ r129 = r51*y2
+ r130 = r112*y3 + r21*r51
+ r131 = 189*r53
+ r132 = 90*y2
- self.area += r1/20 - r2*x3/20 - r4/20 + r5*(y2 + y3)/20 - r6*(x2 + x3)/20 + x0*(r10 + r9 + 10*y0 + y3)/20 - y0*(r7 + r8 + x3)/20
- self.momentX += r13/840 - r15/8 - r16/3 - r18*(r10 - r19)/840 + r21*(r10 + 2*y3)/840 + r22*(r2 + r23 + 56*y0 + y3)/168 + r5*(r28 + r29 - r30 + r4)/840 - r6*(10*r14 + r18 + r24)/840 + x0*(12*r26 + r31*y2 - r37*y0 + r39 - 105*r40 + 15*r41 - 30*r42 - 3*r43 + r44)/840 - y0*(18*r11 + r18 + r31*x2 + 12*r32 + r33 + r34)/840
- self.momentY += r27*(r10 + r19)/840 - r45/8 - r48/840 + r5*(10*r49 + r57*y1 + r59 + r60 + r9*y3)/840 - r50/6 - r52*(r8 + 2*x3)/840 - r53*(r0 + r54 + x3)/168 - r6*(r29 + r4 + r56)/840 + x0*(18*r46 + 140*r53 + r59 + r62*y2 + 105*r64 + r66 + r67*y0 + 12*r68 + r69 + r70)/840 - y0*(r39 + 15*r43 + 12*r55 - r61*x1 + r62*x2 + r63)/840
- self.momentXX += -r11*r73*(-r61 + y2)/9240 + r21*(r28 - r37*y1 + r44 + r78 + r79)/9240 + r22*(21*r26 - 630*r40 + 42*r41 - 126*r42 + r57*x3 + r82 + 210*r83 + 42*r84 - 14*r85)/9240 - r5*(r11*r62 + r14*r23 + 14*r15 - r76*y3 + 54*r86 - 84*r87 - r89 - r91)/9240 - r6*(27*r71 + 42*r72 + 70*r75 + r77)/9240 + 3*r71*y3/220 - 3*r72*y2/44 - r72*y3/4 + 3*r74*(r57 + r67)/3080 - r75*(378*y2 - 630*y3)/9240 + x0**3*(r57 + r62 + 165*y0 + y3)/660 + x0*(-18*r100 - r101*y0 - r101*y1 + r102*y2 - r103*y0 + r104*r20 + 63*r105 - 27*r106 - 9*r107 + r13 - r34*y0 - r76*y0 + 42*r87 + r92*y3 + r94*y3 + r95 - r97*y0 + 162*r98 - r99*y0)/9240 - y0*(135*r11*x1 + r14*r54 + r20*r93 + r33*x3 + 45*r71 + 14*r72 + 126*r74 + 42*r75 + r77 + r92*x3)/9240
- self.momentXY += -r108*r14/18480 + r12*(r109 + 378*y3)/18480 - r14*r49/8 - 3*r14*r58/44 - r17*(252*r46 - 1260*r49)/18480 + r21*(18*r110 + r3*y1 + 15*r46 + 7*r49 + 18*r58)/18480 + r22*(252*r110 + 28*r112 + r113 + r114 + 2310*r53 + 30*r58 + 1260*r64 + 252*r65 + 42*r68 + r70)/18480 - r52*(r102 + 15*r11 + 7*r14)/18480 - r53*(r101 + r103 + r34 + r76 + r97 + r99)/18480 + r7*(-r115*r51 + r116 + 18*r117 - 18*r118 + 42*r119 - 15*r120 + 28*r45 + r81*y3)/18480 - r9*(63*r111 + 42*r15 + 28*r87 + r89 + r91)/18480 + x0*(r1*y0 + r104*r80 + r112*r54 + 21*r119 - 9*r120 - r122*r53 + r123 + 54*r124 + 60*r125 + 54*r126 + r127*r35 + r128*y3 - r129*x1 + 81*r130 + 15*r131 - r132*x2 - r2*r85 - r23*r85 + r30*y3 + 84*r40*y2 - 84*r42*y1 + r60*x3)/9240 - y0*(54*r100 - 9*r105 + 81*r106 + 15*r107 + 54*r111 + r121*r7 + 21*r15 + r24*y3 + 60*r86 + 21*r87 + r95 + 189*r96*y1 + 54*r98)/9240
- self.momentYY += -r108*r121/9240 - r133*r73/9240 - r134*x3/12 - r135*r55/9240 - 3*r136*(r25 + r37)/3080 - r137*(r25 + r31 + x3)/660 + r26*(r135 + 126*r46 + 378*y2*y3)/9240 + r5*(r110*r127 + 27*r133 + 42*r134 + r138 + 70*r139 + r46*r62 + 27*r51*y2 + 15*r51*y3)/9240 - r52*(r56 + r63 + r78 + r79)/9240 - r53*(r128 + r25*y3 + 42*r43 + r82 + 42*r83 + 210*r84)/9240 - r6*(r114*x3 + r116 - 14*r119 + 84*r45)/9240 + x0*(r104*r51 + r109*r64 + 90*r110*y3 + r113*y0 + r114*y0 + r129*y1 + r132*y2 + 45*r133 + 14*r134 + 126*r136 + 770*r137 + r138 + 42*r139 + 135*r46*y1 + 14*r53*y3 + r64*r90 + r66*y3 + r69*y3 + r70*y0)/9240 - y0*(90*r118 + 63*r120 + r123 - 18*r124 - 30*r125 + 162*r126 - 27*r130 - 9*r131 + r36*y3 + 30*r43*y3 + 42*r45 + r48 + r51*r93)/9240
+ self.area += -r1/20 - r3/20 - r4*(x2 + x3)/20 + x0*(r7 + r8 + 10*y0 + y3)/20 + 3*x1*(y2 + y3)/20 + 3*x2*y3/10 - y0*(r5 + r6 + x3)/20
+ self.momentX += r11/840 - r13/8 - r14/3 - r17*(-r15 + r8)/840 + r19*(r8 + 2*y3)/840 + r20*(r0 + r21 + 56*y0 + y3)/168 + r29*(-r23 + r25 + r28)/840 - r4*(10*r12 + r17 + r22)/840 + x0*(12*r27 + r30*y2 + r34 - r35*x1 - r37 - r38*y0 + r39*x1 - r4*x3 + r45)/840 - y0*(r17 + r30*x2 + r31*x1 + r32 + r33 + 18*r9)/840
+ self.momentY += -r4*(r25 + r58)/840 - r47/8 - r50/840 - r52/6 - r54*(r6 + 2*x3)/840 - r55*(r56 + r57 + x3)/168 + x0*(r35*y1 + r40*y0 + r44*y2 + 18*r48 + 140*r55 + r59 + r63 + 12*r64 + r65 + r66)/840 + x1*(r24*y1 + 10*r51 + r59 + r60 + r7*y3)/280 + x2*y3*(r15 + r8)/56 - y0*(r16*y1 + r31*y2 + r44*x2 + r45 + r61 - r62*x1)/840
+ self.momentXX += -r12*r72*(-r40 + r8)/9240 + 3*r18*(r28 + r34 - r38*y1 + r75)/3080 + r20*(r24*x3 - r72*y0 - r76*y0 - r77*y0 + r78 + r79*y3 + r80*y1 + 210*r81 + r84)/9240 - r29*(r12*r21 + 14*r13 + r44*r9 - r73*y3 + 54*r86 - 84*r87 - r89 - r90)/9240 - r4*(70*r12*x2 + 27*r67 + 42*r68 + r74)/9240 + 3*r67*y3/220 - r68*r69/9240 - r68*y3/4 - r70*r9*(-r62 + y2)/9240 + 3*r71*(r24 + r40)/3080 + x0**3*(r24 + r44 + 165*y0 + y3)/660 + x0*(r100*r27 + 162*r101 + r102 + r11 + 63*r18*y3 + r27*r91 - r33*y0 - r37*x3 + r43*x3 - r73*y0 - r88*y1 + r92*y2 - r93*y0 - 9*r94 - r95*y0 - r96*y0 - r97*y1 - 18*r98 + r99*x1*y3)/9240 - y0*(r12*r56 + r12*r80 + r32*x3 + 45*r67 + 14*r68 + 126*r71 + r74 + r85*r91 + 135*r9*x1 + r92*x2)/9240
+ self.momentXY += -r103*r12/18480 - r12*r51/8 - 3*r14*y2/44 + 3*r18*(r105 + r2*y1 + 18*r46 + 15*r48 + 7*r51)/6160 + r20*(1260*r106 + r107*y1 + r108 + 28*r109 + r110 + r111 + r112 + 30*r46 + 2310*r55 + r66)/18480 - r54*(7*r12 + 18*r85 + 15*r9)/18480 - r55*(r33 + r73 + r93 + r95 + r96 + r97)/18480 - r7*(42*r13 + r82*x3 + 28*r87 + r89 + r90)/18480 - 3*r85*(r48 - r66)/220 + 3*r9*y3*(r62 + 2*y2)/440 + x0*(-r1*y0 - 84*r106*x2 + r109*r56 + 54*r114 + r117*y1 + 15*r118 + 21*r119 + 81*r120 + r121*r46 + 54*r122 + 60*r123 + r124 - r21*x3*y0 + r23*y3 - r54*x3 - r55*r72 - r55*r76 - r55*r77 + r57*y0*y3 + r60*x3 + 84*r81*y0 + 189*r81*y1)/9240 + x1*(r104*r27 - r105*x3 - r113*r53 + 63*r114 + r115 - r16*r53 + 28*r47 + r51*r80)/3080 - y0*(54*r101 + r102 + r116*r5 + r117*x3 + 21*r13 - r19*y3 + r22*y3 + r78*x3 + 189*r83*x2 + 60*r86 + 81*r9*y1 + 15*r94 + 54*r98)/9240
+ self.momentYY += -r103*r116/9240 - r125*r70/9240 - r126*x3/12 - 3*r127*(r26 + r38)/3080 - r128*(r26 + r30 + x3)/660 - r4*(r112*x3 + r115 - 14*r119 + 84*r47)/9240 - r52*r69/9240 - r54*(r58 + r61 + r75)/9240 - r55*(r100*y1 + r121*y2 + r26*y3 + r79*y2 + r84 + 210*x2*y1)/9240 + x0*(r108*y1 + r110*y0 + r111*y0 + r112*y0 + 45*r125 + 14*r126 + 126*r127 + 770*r128 + 42*r129 + r130 + r131*y2 + r132*r64 + 135*r48*y1 + 630*r55*y1 + 126*r55*y2 + 14*r55*y3 + r63*y3 + r65*y3 + r66*y0)/9240 + x1*(27*r125 + 42*r126 + 70*r129 + r130 + r39*r53 + r44*r48 + 27*r53*y2 + 54*r64*y2)/3080 + 3*x2*y3*(r48 + r66 + r8*y3)/220 - y0*(r100*r46 + 18*r114 - 9*r118 - 27*r120 - 18*r122 - 30*r123 + r124 + r131*x2 + r132*x3*y1 + 162*r42*y1 + r50 + 63*r53*x3 + r64*r99)/9240
if __name__ == '__main__':
from fontTools.misc.symfont import x, y, printGreenPen
diff --git a/Lib/fontTools/pens/qtPen.py b/Lib/fontTools/pens/qtPen.py
index 34736453..d08a344f 100644
--- a/Lib/fontTools/pens/qtPen.py
+++ b/Lib/fontTools/pens/qtPen.py
@@ -20,10 +20,10 @@ class QtPen(BasePen):
self.path.lineTo(*p)
def _curveToOne(self, p1, p2, p3):
- self.path.cubicTo(*p1+p2+p3)
+ self.path.cubicTo(*p1, *p2, *p3)
def _qCurveToOne(self, p1, p2):
- self.path.quadTo(*p1+p2)
+ self.path.quadTo(*p1, *p2)
def _closePath(self):
self.path.closeSubpath()
diff --git a/Lib/fontTools/pens/statisticsPen.py b/Lib/fontTools/pens/statisticsPen.py
index abd6ff5e..15830672 100644
--- a/Lib/fontTools/pens/statisticsPen.py
+++ b/Lib/fontTools/pens/statisticsPen.py
@@ -61,10 +61,13 @@ class StatisticsPen(MomentsPen):
# Correlation(X,Y) = Covariance(X,Y) / ( stddev(X) * stddev(Y) )
# https://en.wikipedia.org/wiki/Pearson_product-moment_correlation_coefficient
- correlation = covariance / (stddevX * stddevY)
+ if stddevX * stddevY == 0:
+ correlation = float("NaN")
+ else:
+ correlation = covariance / (stddevX * stddevY)
self.correlation = correlation if abs(correlation) > 1e-3 else 0
- slant = covariance / varianceY
+ slant = covariance / varianceY if varianceY != 0 else float("NaN")
self.slant = slant if abs(slant) > 1e-3 else 0
@@ -82,17 +85,16 @@ def _test(glyphset, upem, glyphs):
transformer = TransformPen(pen, Scale(1./upem))
glyph.draw(transformer)
for item in ['area', 'momentX', 'momentY', 'momentXX', 'momentYY', 'momentXY', 'meanX', 'meanY', 'varianceX', 'varianceY', 'stddevX', 'stddevY', 'covariance', 'correlation', 'slant']:
- if item[0] == '_': continue
print ("%s: %g" % (item, getattr(pen, item)))
def main(args):
if not args:
return
filename, glyphs = args[0], args[1:]
- if not glyphs:
- glyphs = ['e', 'o', 'I', 'slash', 'E', 'zero', 'eight', 'minus', 'equal']
from fontTools.ttLib import TTFont
font = TTFont(filename)
+ if not glyphs:
+ glyphs = font.getGlyphOrder()
_test(font.getGlyphSet(), font['head'].unitsPerEm, glyphs)
if __name__ == '__main__':
diff --git a/Lib/fontTools/pens/svgPathPen.py b/Lib/fontTools/pens/svgPathPen.py
index e92737e3..106e33b7 100644
--- a/Lib/fontTools/pens/svgPathPen.py
+++ b/Lib/fontTools/pens/svgPathPen.py
@@ -23,6 +23,18 @@ class SVGPathPen(BasePen):
used to resolve component references in composite glyphs.
ntos: a callable that takes a number and returns a string, to
customize how numbers are formatted (default: str).
+
+ Note:
+ Fonts have a coordinate system where Y grows up, whereas in SVG,
+ Y grows down. As such, rendering path data from this pen in
+ SVG typically results in upside-down glyphs. You can fix this
+ by wrapping the data from this pen in an SVG group element with
+ transform, or wrap this pen in a transform pen. For example:
+
+ spen = svgPathPen.SVGPathPen(glyphset)
+ pen= TransformPen(spen , (1, 0, 0, -1, 0, 0))
+ glyphset[glyphname].draw(pen)
+ print(tpen.getCommands())
"""
def __init__(self, glyphSet, ntos: Callable[[float], str] = str):
BasePen.__init__(self, glyphSet)
@@ -193,7 +205,70 @@ class SVGPathPen(BasePen):
return "".join(self._commands)
+def main(args=None):
+ """Generate per-character SVG from font and text"""
+
+ if args is None:
+ import sys
+ args = sys.argv[1:]
+
+ from fontTools.ttLib import TTFont
+ import argparse
+
+ parser = argparse.ArgumentParser(
+ "fonttools pens.svgPathPen", description="Generate SVG from text")
+ parser.add_argument(
+ "font", metavar="font.ttf", help="Font file.")
+ parser.add_argument(
+ "text", metavar="text", help="Text string.")
+ parser.add_argument(
+ "--variations", metavar="AXIS=LOC", default='',
+ help="List of space separated locations. A location consist in "
+ "the name of a variation axis, followed by '=' and a number. E.g.: "
+ "wght=700 wdth=80. The default is the location of the base master.")
+
+ options = parser.parse_args(args)
+
+ font = TTFont(options.font)
+ text = options.text
+
+ location = {}
+ for tag_v in options.variations.split():
+ fields = tag_v.split('=')
+ tag = fields[0].strip()
+ v = int(fields[1])
+ location[tag] = v
+
+ hhea = font['hhea']
+ ascent, descent = hhea.ascent, hhea.descent
+
+ glyphset = font.getGlyphSet(location=location)
+ cmap = font['cmap'].getBestCmap()
+
+ s = ''
+ width = 0
+ for u in text:
+ g = cmap[ord(u)]
+ glyph = glyphset[g]
+
+ pen = SVGPathPen(glyphset)
+ glyph.draw(pen)
+ commands = pen.getCommands()
+
+ s += '<g transform="translate(%d %d) scale(1 -1)"><path d="%s"/></g>\n' % (width, ascent, commands)
+
+ width += glyph.width
+
+ print('<?xml version="1.0" encoding="UTF-8"?>')
+ print('<svg width="%d" height="%d" xmlns="http://www.w3.org/2000/svg">' % (width, ascent-descent))
+ print(s, end='')
+ print('</svg>')
+
+
if __name__ == "__main__":
import sys
- import doctest
- sys.exit(doctest.testmod().failed)
+ if len(sys.argv) == 1:
+ import doctest
+ sys.exit(doctest.testmod().failed)
+
+ sys.exit(main())
diff --git a/Lib/fontTools/subset/__init__.py b/Lib/fontTools/subset/__init__.py
index 56d9c0ef..b58e6162 100644
--- a/Lib/fontTools/subset/__init__.py
+++ b/Lib/fontTools/subset/__init__.py
@@ -10,9 +10,11 @@ from fontTools.ttLib.tables.otBase import USE_HARFBUZZ_REPACKER
from fontTools.otlLib.maxContextCalc import maxCtxFont
from fontTools.pens.basePen import NullPen
from fontTools.misc.loggingTools import Timer
+from fontTools.misc.cliTools import makeOutputFileName
from fontTools.subset.util import _add_method, _uniq_sort
from fontTools.subset.cff import *
from fontTools.subset.svg import *
+from fontTools.varLib import varStore # for subset_varidxes
import sys
import struct
import array
@@ -636,10 +638,16 @@ def prune_post_subset(self, font, options):
self.Value.prune_hints()
self.ValueFormat = self.Value.getEffectiveFormat()
elif self.Format == 2:
- if not options.hinting:
- for v in self.Value:
- v.prune_hints()
- self.ValueFormat = reduce(int.__or__, [v.getEffectiveFormat() for v in self.Value], 0)
+ if None in self.Value:
+ assert self.ValueFormat == 0
+ assert all(v is None for v in self.Value)
+ else:
+ if not options.hinting:
+ for v in self.Value:
+ v.prune_hints()
+ self.ValueFormat = reduce(
+ int.__or__, [v.getEffectiveFormat() for v in self.Value], 0
+ )
# Downgrade to Format 1 if all ValueRecords are the same
if self.Format == 2 and all(v == self.Value[0] for v in self.Value):
@@ -2608,6 +2616,9 @@ class Options(object):
'vertical': ['valt', 'vert', 'vkrn', 'vpal', 'vrt2'],
'ltr': ['ltra', 'ltrm'],
'rtl': ['rtla', 'rtlm'],
+ 'rand': ['rand'],
+ 'justify': ['jalt'],
+ 'private': ['Harf', 'HARF', 'Buzz', 'BUZZ'],
# Complex shapers
'arabic': ['init', 'medi', 'fina', 'isol', 'med2', 'fin2', 'fin3',
'cswh', 'mset', 'stch'],
@@ -3180,12 +3191,7 @@ def main(args=None):
font = load_font(fontfile, options, dontLoadGlyphNames=dontLoadGlyphNames)
if outfile is None:
- basename, _ = splitext(fontfile)
- if options.flavor is not None:
- ext = "." + options.flavor.lower()
- else:
- ext = ".ttf" if font.sfntVersion == "\0\1\0\0" else ".otf"
- outfile = basename + ".subset" + ext
+ outfile = makeOutputFileName(fontfile, overWrite=True, suffix=".subset")
with timer("compile glyph list"):
if wildcard_glyphs:
diff --git a/Lib/fontTools/subset/cff.py b/Lib/fontTools/subset/cff.py
index 0dcb7975..d6872f39 100644
--- a/Lib/fontTools/subset/cff.py
+++ b/Lib/fontTools/subset/cff.py
@@ -3,7 +3,6 @@ from fontTools import ttLib
from fontTools.pens.basePen import NullPen
from fontTools.misc.roundTools import otRound
from fontTools.misc.loggingTools import deprecateFunction
-from fontTools.varLib.varStore import VarStoreInstancer
from fontTools.subset.util import _add_method, _uniq_sort
@@ -109,15 +108,7 @@ def subset_glyphs(self, s):
del csi.file, csi.offsets
if hasattr(font, "FDSelect"):
sel = font.FDSelect
- # XXX We want to set sel.format to None, such that the
- # most compact format is selected. However, OTS was
- # broken and couldn't parse a FDSelect format 0 that
- # happened before CharStrings. As such, always force
- # format 3 until we fix cffLib to always generate
- # FDSelect after CharStrings.
- # https://github.com/khaledhosny/ots/pull/31
- #sel.format = None
- sel.format = 3
+ sel.format = None
sel.gidArray = [sel.gidArray[i] for i in indices]
newCharStrings = {}
for indicesIdx, charsetIdx in enumerate(indices):
diff --git a/Lib/fontTools/subset/svg.py b/Lib/fontTools/subset/svg.py
index e25fb3e6..4ed2cbd2 100644
--- a/Lib/fontTools/subset/svg.py
+++ b/Lib/fontTools/subset/svg.py
@@ -7,13 +7,14 @@ from typing import Dict, Iterable, Iterator, List, Optional, Set, Tuple
try:
from lxml import etree
-except ModuleNotFoundError:
+except ImportError:
# lxml is required for subsetting SVG, but we prefer to delay the import error
# until subset_glyphs() is called (i.e. if font to subset has an 'SVG ' table)
etree = None
from fontTools import ttLib
from fontTools.subset.util import _add_method
+from fontTools.ttLib.tables.S_V_G_ import SVGDocument
__all__ = ["subset_glyphs"]
@@ -192,7 +193,7 @@ def ranges(ints: Iterable[int]) -> Iterator[Tuple[int, int]]:
@_add_method(ttLib.getTableClass("SVG "))
def subset_glyphs(self, s) -> bool:
if etree is None:
- raise ModuleNotFoundError("No module named 'lxml', required to subset SVG")
+ raise ImportError("No module named 'lxml', required to subset SVG")
# glyph names (before subsetting)
glyph_order: List[str] = s.orig_glyph_order
@@ -201,10 +202,12 @@ def subset_glyphs(self, s) -> bool:
# map from original to new glyph indices (after subsetting)
glyph_index_map: Dict[int, int] = s.glyph_index_map
- new_docs: List[Tuple[bytes, int, int]] = []
- for doc, start, end in self.docList:
+ new_docs: List[SVGDocument] = []
+ for doc in self.docList:
- glyphs = {glyph_order[i] for i in range(start, end + 1)}.intersection(s.glyphs)
+ glyphs = {
+ glyph_order[i] for i in range(doc.startGlyphID, doc.endGlyphID + 1)
+ }.intersection(s.glyphs)
if not glyphs:
# no intersection: we can drop the whole record
continue
@@ -212,7 +215,7 @@ def subset_glyphs(self, s) -> bool:
svg = etree.fromstring(
# encode because fromstring dislikes xml encoding decl if input is str.
# SVG xml encoding must be utf-8 as per OT spec.
- doc.encode("utf-8"),
+ doc.data.encode("utf-8"),
parser=etree.XMLParser(
# Disable libxml2 security restrictions to support very deep trees.
# Without this we would get an error like this:
@@ -241,7 +244,7 @@ def subset_glyphs(self, s) -> bool:
new_gids = (glyph_index_map[i] for i in gids)
for start, end in ranges(new_gids):
- new_docs.append((new_doc, start, end))
+ new_docs.append(SVGDocument(new_doc, start, end, doc.compressed))
self.docList = new_docs
diff --git a/Lib/fontTools/svgLib/path/parser.py b/Lib/fontTools/svgLib/path/parser.py
index 1fcf8998..e594b2b8 100644
--- a/Lib/fontTools/svgLib/path/parser.py
+++ b/Lib/fontTools/svgLib/path/parser.py
@@ -16,10 +16,13 @@ ARC_COMMANDS = set("Aa")
UPPERCASE = set('MZLHVCSQTA')
COMMAND_RE = re.compile("([MmZzLlHhVvCcSsQqTtAa])")
+
+# https://www.w3.org/TR/css-syntax-3/#number-token-diagram
+# but -6.e-5 will be tokenized as "-6" then "-5" and confuse parsing
FLOAT_RE = re.compile(
r"[-+]?" # optional sign
r"(?:"
- r"(?:0|[1-9][0-9]*)(?:\.[0-9]+(?:[eE][-+]?[0-9]+)?)?" # int/float
+ r"(?:0|[1-9][0-9]*)(?:\.[0-9]+)?(?:[eE][-+]?[0-9]+)?" # int/float
r"|"
r"(?:\.[0-9]+(?:[eE][-+]?[0-9]+)?)" # float with leading dot (e.g. '.42')
r")"
@@ -278,8 +281,8 @@ def parse_path(pathdef, pen, current_pos=(0, 0), arc_class=EllipticalArc):
last_control = control
elif command == 'A':
- rx = float(elements.pop())
- ry = float(elements.pop())
+ rx = abs(float(elements.pop()))
+ ry = abs(float(elements.pop()))
rotation = float(elements.pop())
arc_large = bool(int(elements.pop()))
arc_sweep = bool(int(elements.pop()))
diff --git a/Lib/fontTools/ttLib/scaleUpem.py b/Lib/fontTools/ttLib/scaleUpem.py
new file mode 100644
index 00000000..9e0e0ade
--- /dev/null
+++ b/Lib/fontTools/ttLib/scaleUpem.py
@@ -0,0 +1,336 @@
+"""Change the units-per-EM of a font.
+
+AAT and Graphite tables are not supported. CFF/CFF2 fonts
+are de-subroutinized."""
+
+
+from fontTools.ttLib.ttVisitor import TTVisitor
+import fontTools.ttLib as ttLib
+import fontTools.ttLib.tables.otBase as otBase
+import fontTools.ttLib.tables.otTables as otTables
+from fontTools.cffLib import VarStoreData
+import fontTools.cffLib.specializer as cffSpecializer
+from fontTools.misc.fixedTools import otRound
+
+
+__all__ = ["scale_upem", "ScalerVisitor"]
+
+
+class ScalerVisitor(TTVisitor):
+ def __init__(self, scaleFactor):
+ self.scaleFactor = scaleFactor
+
+ def scale(self, v):
+ return otRound(v * self.scaleFactor)
+
+
+@ScalerVisitor.register_attrs(
+ (
+ (ttLib.getTableClass("head"), ("unitsPerEm", "xMin", "yMin", "xMax", "yMax")),
+ (ttLib.getTableClass("post"), ("underlinePosition", "underlineThickness")),
+ (ttLib.getTableClass("VORG"), ("defaultVertOriginY")),
+ (
+ ttLib.getTableClass("hhea"),
+ (
+ "ascent",
+ "descent",
+ "lineGap",
+ "advanceWidthMax",
+ "minLeftSideBearing",
+ "minRightSideBearing",
+ "xMaxExtent",
+ "caretOffset",
+ ),
+ ),
+ (
+ ttLib.getTableClass("vhea"),
+ (
+ "ascent",
+ "descent",
+ "lineGap",
+ "advanceHeightMax",
+ "minTopSideBearing",
+ "minBottomSideBearing",
+ "yMaxExtent",
+ "caretOffset",
+ ),
+ ),
+ (
+ ttLib.getTableClass("OS/2"),
+ (
+ "xAvgCharWidth",
+ "ySubscriptXSize",
+ "ySubscriptYSize",
+ "ySubscriptXOffset",
+ "ySubscriptYOffset",
+ "ySuperscriptXSize",
+ "ySuperscriptYSize",
+ "ySuperscriptXOffset",
+ "ySuperscriptYOffset",
+ "yStrikeoutSize",
+ "yStrikeoutPosition",
+ "sTypoAscender",
+ "sTypoDescender",
+ "sTypoLineGap",
+ "usWinAscent",
+ "usWinDescent",
+ "sxHeight",
+ "sCapHeight",
+ ),
+ ),
+ (
+ otTables.ValueRecord,
+ ("XAdvance", "YAdvance", "XPlacement", "YPlacement"),
+ ), # GPOS
+ (otTables.Anchor, ("XCoordinate", "YCoordinate")), # GPOS
+ (otTables.CaretValue, ("Coordinate")), # GDEF
+ (otTables.BaseCoord, ("Coordinate")), # BASE
+ (otTables.MathValueRecord, ("Value")), # MATH
+ (otTables.ClipBox, ("xMin", "yMin", "xMax", "yMax")), # COLR
+ )
+)
+def visit(visitor, obj, attr, value):
+ setattr(obj, attr, visitor.scale(value))
+
+
+@ScalerVisitor.register_attr(
+ (ttLib.getTableClass("hmtx"), ttLib.getTableClass("vmtx")), "metrics"
+)
+def visit(visitor, obj, attr, metrics):
+ for g in metrics:
+ advance, lsb = metrics[g]
+ metrics[g] = visitor.scale(advance), visitor.scale(lsb)
+
+
+@ScalerVisitor.register_attr(ttLib.getTableClass("VMTX"), "VOriginRecords")
+def visit(visitor, obj, attr, VOriginRecords):
+ for g in VOriginRecords:
+ VOriginRecords[g] = visitor.scale(VOriginRecords[g])
+
+
+@ScalerVisitor.register_attr(ttLib.getTableClass("glyf"), "glyphs")
+def visit(visitor, obj, attr, glyphs):
+ for g in glyphs.values():
+ if g.isComposite():
+ for component in g.components:
+ component.x = visitor.scale(component.x)
+ component.y = visitor.scale(component.y)
+ else:
+ for attr in ("xMin", "xMax", "yMin", "yMax"):
+ v = getattr(g, attr, None)
+ if v is not None:
+ setattr(g, attr, visitor.scale(v))
+
+ glyf = visitor.font["glyf"]
+ coordinates = g.getCoordinates(glyf)[0]
+ for i, (x, y) in enumerate(coordinates):
+ coordinates[i] = visitor.scale(x), visitor.scale(y)
+
+
+@ScalerVisitor.register_attr(ttLib.getTableClass("gvar"), "variations")
+def visit(visitor, obj, attr, variations):
+ for varlist in variations.values():
+ for var in varlist:
+ coordinates = var.coordinates
+ for i, xy in enumerate(coordinates):
+ if xy is None:
+ continue
+ coordinates[i] = visitor.scale(xy[0]), visitor.scale(xy[1])
+
+
+@ScalerVisitor.register_attr(ttLib.getTableClass("kern"), "kernTables")
+def visit(visitor, obj, attr, kernTables):
+ for table in kernTables:
+ kernTable = table.kernTable
+ for k in kernTable.keys():
+ kernTable[k] = visitor.scale(kernTable[k])
+
+
+def _cff_scale(visitor, args):
+ for i, arg in enumerate(args):
+ if not isinstance(arg, list):
+ args[i] = visitor.scale(arg)
+ else:
+ num_blends = arg[-1]
+ _cff_scale(visitor, arg)
+ arg[-1] = num_blends
+
+
+@ScalerVisitor.register_attr(
+ (ttLib.getTableClass("CFF "), ttLib.getTableClass("CFF2")), "cff"
+)
+def visit(visitor, obj, attr, cff):
+ cff.desubroutinize()
+ topDict = cff.topDictIndex[0]
+ varStore = getattr(topDict, "VarStore", None)
+ getNumRegions = varStore.getNumRegions if varStore is not None else None
+ privates = set()
+ for fontname in cff.keys():
+ font = cff[fontname]
+ cs = font.CharStrings
+ for g in font.charset:
+ c, _ = cs.getItemAndSelector(g)
+ privates.add(c.private)
+
+ commands = cffSpecializer.programToCommands(
+ c.program, getNumRegions=getNumRegions
+ )
+ for op, args in commands:
+ _cff_scale(visitor, args)
+ c.program[:] = cffSpecializer.commandsToProgram(commands)
+
+ # Annoying business of scaling numbers that do not matter whatsoever
+
+ for attr in (
+ "UnderlinePosition",
+ "UnderlineThickness",
+ "FontBBox",
+ "StrokeWidth",
+ ):
+ value = getattr(topDict, attr, None)
+ if value is None:
+ continue
+ if isinstance(value, list):
+ _cff_scale(visitor, value)
+ else:
+ setattr(topDict, attr, visitor.scale(value))
+
+ for i in range(6):
+ topDict.FontMatrix[i] /= visitor.scaleFactor
+
+ for private in privates:
+ for attr in (
+ "BlueValues",
+ "OtherBlues",
+ "FamilyBlues",
+ "FamilyOtherBlues",
+ # "BlueScale",
+ # "BlueShift",
+ # "BlueFuzz",
+ "StdHW",
+ "StdVW",
+ "StemSnapH",
+ "StemSnapV",
+ "defaultWidthX",
+ "nominalWidthX",
+ ):
+ value = getattr(private, attr, None)
+ if value is None:
+ continue
+ if isinstance(value, list):
+ _cff_scale(visitor, value)
+ else:
+ setattr(private, attr, visitor.scale(value))
+
+
+# ItemVariationStore
+
+
+@ScalerVisitor.register(otTables.VarData)
+def visit(visitor, varData):
+ for item in varData.Item:
+ for i, v in enumerate(item):
+ item[i] = visitor.scale(v)
+
+
+# COLRv1
+
+
+def _setup_scale_paint(paint, scale):
+ if -2 <= scale <= 2 - (1 >> 14):
+ paint.Format = otTables.PaintFormat.PaintScaleUniform
+ paint.scale = scale
+ return
+
+ transform = otTables.Affine2x3()
+ transform.populateDefaults()
+ transform.xy = transform.yx = transform.dx = transform.dy = 0
+ transform.xx = transform.yy = scale
+
+ paint.Format = otTables.PaintFormat.PaintTransform
+ paint.Transform = transform
+
+
+@ScalerVisitor.register(otTables.BaseGlyphPaintRecord)
+def visit(visitor, record):
+ oldPaint = record.Paint
+
+ scale = otTables.Paint()
+ _setup_scale_paint(scale, visitor.scaleFactor)
+ scale.Paint = oldPaint
+
+ record.Paint = scale
+
+ return True
+
+
+@ScalerVisitor.register(otTables.Paint)
+def visit(visitor, paint):
+ if paint.Format != otTables.PaintFormat.PaintGlyph:
+ return True
+
+ newPaint = otTables.Paint()
+ newPaint.Format = paint.Format
+ newPaint.Paint = paint.Paint
+ newPaint.Glyph = paint.Glyph
+ del paint.Paint
+ del paint.Glyph
+
+ _setup_scale_paint(paint, 1 / visitor.scaleFactor)
+ paint.Paint = newPaint
+
+ visitor.visit(newPaint.Paint)
+
+ return False
+
+
+def scale_upem(font, new_upem):
+ """Change the units-per-EM of font to the new value."""
+ upem = font["head"].unitsPerEm
+ visitor = ScalerVisitor(new_upem / upem)
+ visitor.visit(font)
+
+
+def main(args=None):
+ """Change the units-per-EM of fonts"""
+
+ if args is None:
+ import sys
+
+ args = sys.argv[1:]
+
+ from fontTools.ttLib import TTFont
+ from fontTools.misc.cliTools import makeOutputFileName
+ import argparse
+
+ parser = argparse.ArgumentParser(
+ "fonttools ttLib.scaleUpem", description="Change the units-per-EM of fonts"
+ )
+ parser.add_argument("font", metavar="font", help="Font file.")
+ parser.add_argument(
+ "new_upem", metavar="new-upem", help="New units-per-EM integer value."
+ )
+ parser.add_argument(
+ "--output-file", metavar="path", default=None, help="Output file."
+ )
+
+ options = parser.parse_args(args)
+
+ font = TTFont(options.font)
+ new_upem = int(options.new_upem)
+ output_file = (
+ options.output_file
+ if options.output_file is not None
+ else makeOutputFileName(options.font, overWrite=True, suffix="-scaled")
+ )
+
+ scale_upem(font, new_upem)
+
+ print("Writing %s" % output_file)
+ font.save(output_file)
+
+
+if __name__ == "__main__":
+ import sys
+
+ sys.exit(main())
diff --git a/Lib/fontTools/ttLib/tables/E_B_D_T_.py b/Lib/fontTools/ttLib/tables/E_B_D_T_.py
index 0bd2ab99..ae716512 100644
--- a/Lib/fontTools/ttLib/tables/E_B_D_T_.py
+++ b/Lib/fontTools/ttLib/tables/E_B_D_T_.py
@@ -398,12 +398,17 @@ class BitmapGlyph(object):
# Allow lazy decompile.
if attr[:2] == '__':
raise AttributeError(attr)
- if not hasattr(self, "data"):
+ if attr == "data":
raise AttributeError(attr)
self.decompile()
del self.data
return getattr(self, attr)
+ def ensureDecompiled(self, recurse=False):
+ if hasattr(self, "data"):
+ self.decompile()
+ del self.data
+
# Not a fan of this but it is needed for safer safety checking.
def getFormat(self):
return safeEval(self.__class__.__name__[len(_bitmapGlyphSubclassPrefix):])
diff --git a/Lib/fontTools/ttLib/tables/E_B_L_C_.py b/Lib/fontTools/ttLib/tables/E_B_L_C_.py
index cfdbca7b..bb3d2140 100644
--- a/Lib/fontTools/ttLib/tables/E_B_L_C_.py
+++ b/Lib/fontTools/ttLib/tables/E_B_L_C_.py
@@ -338,11 +338,15 @@ class EblcIndexSubTable(object):
# Allow lazy decompile.
if attr[:2] == '__':
raise AttributeError(attr)
- if not hasattr(self, "data"):
+ if attr == "data":
raise AttributeError(attr)
self.decompile()
return getattr(self, attr)
+ def ensureDecompiled(self, recurse=False):
+ if hasattr(self, "data"):
+ self.decompile()
+
# This method just takes care of the indexSubHeader. Implementing subclasses
# should call it to compile the indexSubHeader and then continue compiling
# the remainder of their unique format.
diff --git a/Lib/fontTools/ttLib/tables/S_V_G_.py b/Lib/fontTools/ttLib/tables/S_V_G_.py
index bc0e533d..49e98d03 100644
--- a/Lib/fontTools/ttLib/tables/S_V_G_.py
+++ b/Lib/fontTools/ttLib/tables/S_V_G_.py
@@ -17,9 +17,11 @@ The XML format is:
</SVG>
"""
-from fontTools.misc.textTools import bytesjoin, strjoin, tobytes, tostr
+from fontTools.misc.textTools import bytesjoin, safeEval, strjoin, tobytes, tostr
from fontTools.misc import sstruct
from . import DefaultTable
+from collections.abc import Sequence
+from dataclasses import dataclass, astuple
from io import BytesIO
import struct
import logging
@@ -75,15 +77,18 @@ class table_S_V_G_(DefaultTable.DefaultTable):
start = entry.svgDocOffset + subTableStart
end = start + entry.svgDocLength
doc = data[start:end]
+ compressed = False
if doc.startswith(b"\x1f\x8b"):
import gzip
bytesIO = BytesIO(doc)
with gzip.GzipFile(None, "r", fileobj=bytesIO) as gunzipper:
doc = gunzipper.read()
- self.compressed = True
del bytesIO
+ compressed = True
doc = tostr(doc, "utf_8")
- self.docList.append( [doc, entry.startGlyphID, entry.endGlyphID] )
+ self.docList.append(
+ SVGDocument(doc, entry.startGlyphID, entry.endGlyphID, compressed)
+ )
def compile(self, ttFont):
version = 0
@@ -96,12 +101,18 @@ class table_S_V_G_(DefaultTable.DefaultTable):
entryList.append(datum)
curOffset = len(datum) + doc_index_entry_format_0Size*numEntries
seenDocs = {}
- for doc, startGlyphID, endGlyphID in self.docList:
- docBytes = tobytes(doc, encoding="utf_8")
- if getattr(self, "compressed", False) and not docBytes.startswith(b"\x1f\x8b"):
+ allCompressed = getattr(self, "compressed", False)
+ for i, doc in enumerate(self.docList):
+ if isinstance(doc, (list, tuple)):
+ doc = SVGDocument(*doc)
+ self.docList[i] = doc
+ docBytes = tobytes(doc.data, encoding="utf_8")
+ if (allCompressed or doc.compressed) and not docBytes.startswith(b"\x1f\x8b"):
import gzip
bytesIO = BytesIO()
- with gzip.GzipFile(None, "w", fileobj=bytesIO) as gzipper:
+ # mtime=0 strips the useless timestamp and makes gzip output reproducible;
+ # equivalent to `gzip -n`
+ with gzip.GzipFile(None, "w", fileobj=bytesIO, mtime=0) as gzipper:
gzipper.write(docBytes)
gzipped = bytesIO.getvalue()
if len(gzipped) < len(docBytes):
@@ -115,7 +126,7 @@ class table_S_V_G_(DefaultTable.DefaultTable):
curOffset += docLength
seenDocs[docBytes] = docOffset
docList.append(docBytes)
- entry = struct.pack(">HHLL", startGlyphID, endGlyphID, docOffset, docLength)
+ entry = struct.pack(">HHLL", doc.startGlyphID, doc.endGlyphID, docOffset, docLength)
entryList.append(entry)
entryList.extend(docList)
svgDocData = bytesjoin(entryList)
@@ -127,10 +138,16 @@ class table_S_V_G_(DefaultTable.DefaultTable):
return data
def toXML(self, writer, ttFont):
- for doc, startGID, endGID in self.docList:
- writer.begintag("svgDoc", startGlyphID=startGID, endGlyphID=endGID)
+ for i, doc in enumerate(self.docList):
+ if isinstance(doc, (list, tuple)):
+ doc = SVGDocument(*doc)
+ self.docList[i] = doc
+ attrs = {"startGlyphID": doc.startGlyphID, "endGlyphID": doc.endGlyphID}
+ if doc.compressed:
+ attrs["compressed"] = 1
+ writer.begintag("svgDoc", **attrs)
writer.newline()
- writer.writecdata(doc)
+ writer.writecdata(doc.data)
writer.newline()
writer.endtag("svgDoc")
writer.newline()
@@ -143,7 +160,8 @@ class table_S_V_G_(DefaultTable.DefaultTable):
doc = doc.strip()
startGID = int(attrs["startGlyphID"])
endGID = int(attrs["endGlyphID"])
- self.docList.append( [doc, startGID, endGID] )
+ compressed = bool(safeEval(attrs.get("compressed", "0")))
+ self.docList.append(SVGDocument(doc, startGID, endGID, compressed))
else:
log.warning("Unknown %s %s", name, content)
@@ -157,3 +175,23 @@ class DocumentIndexEntry(object):
def __repr__(self):
return "startGlyphID: %s, endGlyphID: %s, svgDocOffset: %s, svgDocLength: %s" % (self.startGlyphID, self.endGlyphID, self.svgDocOffset, self.svgDocLength)
+
+
+@dataclass
+class SVGDocument(Sequence):
+ data: str
+ startGlyphID: int
+ endGlyphID: int
+ compressed: bool = False
+
+ # Previously, the SVG table's docList attribute contained a lists of 3 items:
+ # [doc, startGlyphID, endGlyphID]; later, we added a `compressed` attribute.
+ # For backward compatibility with code that depends of them being sequences of
+ # fixed length=3, we subclass the Sequence abstract base class and pretend only
+ # the first three items are present. 'compressed' is only accessible via named
+ # attribute lookup like regular dataclasses: i.e. `doc.compressed`, not `doc[3]`
+ def __getitem__(self, index):
+ return astuple(self)[:3][index]
+
+ def __len__(self):
+ return 3
diff --git a/Lib/fontTools/ttLib/tables/_c_m_a_p.py b/Lib/fontTools/ttLib/tables/_c_m_a_p.py
index 9bd59a6b..ef2b5758 100644
--- a/Lib/fontTools/ttLib/tables/_c_m_a_p.py
+++ b/Lib/fontTools/ttLib/tables/_c_m_a_p.py
@@ -164,7 +164,9 @@ class table__c_m_a_p(DefaultTable.DefaultTable):
if ttFont.lazy is False: # Be lazy for None and True
self.ensureDecompiled()
- def ensureDecompiled(self):
+ def ensureDecompiled(self, recurse=False):
+ # The recurse argument is unused, but part of the signature of
+ # ensureDecompiled across the library.
for st in self.tables:
st.ensureDecompiled()
@@ -241,7 +243,9 @@ class CmapSubtable(object):
self.platEncID = None #: The encoding ID of this subtable (interpretation depends on ``platformID``)
self.language = None #: The language ID of this subtable (Macintosh platform only)
- def ensureDecompiled(self):
+ def ensureDecompiled(self, recurse=False):
+ # The recurse argument is unused, but part of the signature of
+ # ensureDecompiled across the library.
if self.data is None:
return
self.decompile(None, None) # use saved data.
diff --git a/Lib/fontTools/ttLib/tables/_g_l_y_f.py b/Lib/fontTools/ttLib/tables/_g_l_y_f.py
index 14c4519d..745ef72b 100644
--- a/Lib/fontTools/ttLib/tables/_g_l_y_f.py
+++ b/Lib/fontTools/ttLib/tables/_g_l_y_f.py
@@ -112,7 +112,9 @@ class table__g_l_y_f(DefaultTable.DefaultTable):
if ttFont.lazy is False: # Be lazy for None and True
self.ensureDecompiled()
- def ensureDecompiled(self):
+ def ensureDecompiled(self, recurse=False):
+ # The recurse argument is unused, but part of the signature of
+ # ensureDecompiled across the library.
for glyph in self.glyphs.values():
glyph.expand(self)
diff --git a/Lib/fontTools/ttLib/tables/_g_v_a_r.py b/Lib/fontTools/ttLib/tables/_g_v_a_r.py
index bc283cfe..dd198f4b 100644
--- a/Lib/fontTools/ttLib/tables/_g_v_a_r.py
+++ b/Lib/fontTools/ttLib/tables/_g_v_a_r.py
@@ -1,3 +1,4 @@
+from functools import partial
from fontTools.misc import sstruct
from fontTools.misc.textTools import safeEval
from . import DefaultTable
@@ -36,6 +37,46 @@ GVAR_HEADER_FORMAT = """
GVAR_HEADER_SIZE = sstruct.calcsize(GVAR_HEADER_FORMAT)
+class _lazy_dict(dict):
+
+ def get(self, k, *args):
+ v = super().get(k, *args)
+ if callable(v):
+ v = v()
+ self[k] = v
+ return v
+
+ def __getitem__(self, k):
+ v = super().__getitem__(k)
+ if callable(v):
+ v = v()
+ self[k] = v
+ return v
+
+ def items(self):
+ if not hasattr(self, '_loaded'):
+ self._load()
+ return super().items()
+
+ def values(self):
+ if not hasattr(self, '_loaded'):
+ self._load()
+ return super().values()
+
+ def __eq__(self, other):
+ if not hasattr(self, '_loaded'):
+ self._load()
+ return super().__eq__(other)
+
+ def __neq__(self, other):
+ if not hasattr(self, '_loaded'):
+ self._load()
+ return super().__neq__(other)
+
+ def _load(self):
+ for k in self:
+ self[k]
+ self._loaded = True
class table__g_v_a_r(DefaultTable.DefaultTable):
dependencies = ["fvar", "glyf"]
@@ -97,23 +138,19 @@ class table__g_v_a_r(DefaultTable.DefaultTable):
offsets = self.decompileOffsets_(data[GVAR_HEADER_SIZE:], tableFormat=(self.flags & 1), glyphCount=self.glyphCount)
sharedCoords = tv.decompileSharedTuples(
axisTags, self.sharedTupleCount, data, self.offsetToSharedTuples)
- self.variations = {}
+ self.variations = _lazy_dict()
offsetToData = self.offsetToGlyphVariationData
glyf = ttFont['glyf']
- for i in range(self.glyphCount):
- glyphName = glyphs[i]
+
+ def decompileVarGlyph(glyphName, gid):
glyph = glyf[glyphName]
numPointsInGlyph = self.getNumPoints_(glyph)
- gvarData = data[offsetToData + offsets[i] : offsetToData + offsets[i + 1]]
- try:
- self.variations[glyphName] = decompileGlyph_(
- numPointsInGlyph, sharedCoords, axisTags, gvarData)
- except Exception:
- log.error(
- "Failed to decompile deltas for glyph '%s' (%d points)",
- glyphName, numPointsInGlyph,
- )
- raise
+ gvarData = data[offsetToData + offsets[gid] : offsetToData + offsets[gid + 1]]
+ return decompileGlyph_(numPointsInGlyph, sharedCoords, axisTags, gvarData)
+
+ for gid in range(self.glyphCount):
+ glyphName = glyphs[gid]
+ self.variations[glyphName] = partial(decompileVarGlyph, glyphName, gid)
@staticmethod
def decompileOffsets_(data, tableFormat, glyphCount):
diff --git a/Lib/fontTools/ttLib/tables/_k_e_r_n.py b/Lib/fontTools/ttLib/tables/_k_e_r_n.py
index f3f714b2..bcad2cea 100644
--- a/Lib/fontTools/ttLib/tables/_k_e_r_n.py
+++ b/Lib/fontTools/ttLib/tables/_k_e_r_n.py
@@ -161,9 +161,11 @@ class KernTable_format_0(object):
len(data) - 6 * nPairs)
def compile(self, ttFont):
- nPairs = len(self.kernTable)
+ nPairs = min(len(self.kernTable), 0xFFFF)
searchRange, entrySelector, rangeShift = getSearchRange(nPairs, 6)
searchRange &= 0xFFFF
+ entrySelector = min(entrySelector, 0xFFFF)
+ rangeShift = min(rangeShift, 0xFFFF)
data = struct.pack(
">HHHH", nPairs, searchRange, entrySelector, rangeShift)
diff --git a/Lib/fontTools/ttLib/tables/otBase.py b/Lib/fontTools/ttLib/tables/otBase.py
index d30892f3..1bd3198d 100644
--- a/Lib/fontTools/ttLib/tables/otBase.py
+++ b/Lib/fontTools/ttLib/tables/otBase.py
@@ -1,18 +1,23 @@
from fontTools.config import OPTIONS
from fontTools.misc.textTools import Tag, bytesjoin
from .DefaultTable import DefaultTable
+from enum import IntEnum
import sys
import array
import struct
import logging
-from typing import Iterator, NamedTuple, Optional
+from functools import lru_cache
+from typing import Iterator, NamedTuple, Optional, Tuple
log = logging.getLogger(__name__)
have_uharfbuzz = False
try:
import uharfbuzz as hb
- have_uharfbuzz = True
+ # repack method added in uharfbuzz >= 0.23; if uharfbuzz *can* be
+ # imported but repack method is missing, behave as if uharfbuzz
+ # is not available (fallback to the slower Python implementation)
+ have_uharfbuzz = callable(getattr(hb, "repack", None))
except ImportError:
pass
@@ -36,6 +41,25 @@ class OTLOffsetOverflowError(Exception):
def __str__(self):
return repr(self.value)
+class RepackerState(IntEnum):
+ # Repacking control flow is implemnted using a state machine. The state machine table:
+ #
+ # State | Packing Success | Packing Failed | Exception Raised |
+ # ------------+-----------------+----------------+------------------+
+ # PURE_FT | Return result | PURE_FT | Return failure |
+ # HB_FT | Return result | HB_FT | FT_FALLBACK |
+ # FT_FALLBACK | HB_FT | FT_FALLBACK | Return failure |
+
+ # Pack only with fontTools, don't allow sharing between extensions.
+ PURE_FT = 1
+
+ # Attempt to pack with harfbuzz (allowing sharing between extensions)
+ # use fontTools to attempt overflow resolution.
+ HB_FT = 2
+
+ # Fallback if HB/FT packing gets stuck. Pack only with fontTools, don't allow sharing between
+ # extensions.
+ FT_FALLBACK = 3
class BaseTTXConverter(DefaultTable):
@@ -96,62 +120,98 @@ class BaseTTXConverter(DefaultTable):
self.tableTag,
)
+ if (use_hb_repack in (None, True)
+ and have_uharfbuzz
+ and self.tableTag in ("GSUB", "GPOS")):
+ state = RepackerState.HB_FT
+ else:
+ state = RepackerState.PURE_FT
+
hb_first_error_logged = False
+ lastOverflowRecord = None
while True:
try:
writer = OTTableWriter(tableTag=self.tableTag)
self.table.compile(writer, font)
- if (
- use_hb_repack in (None, True)
- and have_uharfbuzz
- and self.tableTag in ("GSUB", "GPOS")
- ):
- try:
- log.debug("serializing '%s' with hb.repack", self.tableTag)
- return writer.getAllDataUsingHarfbuzz()
- except (ValueError, MemoryError, hb.RepackerError) as e:
- # Only log hb repacker errors the first time they occur in
- # the offset-overflow resolution loop, they are just noisy.
- # Maybe we can revisit this if/when uharfbuzz actually gives
- # us more info as to why hb.repack failed...
- if not hb_first_error_logged:
- error_msg = f"{type(e).__name__}"
- if str(e) != "":
- error_msg += f": {e}"
- log.warning(
- "hb.repack failed to serialize '%s', reverting to "
- "pure-python serializer; the error message was: %s",
- self.tableTag,
- error_msg,
- )
- hb_first_error_logged = True
- return writer.getAllData(remove_duplicate=False)
- return writer.getAllData()
+ if state == RepackerState.HB_FT:
+ return self.tryPackingHarfbuzz(writer, hb_first_error_logged)
+ elif state == RepackerState.PURE_FT:
+ return self.tryPackingFontTools(writer)
+ elif state == RepackerState.FT_FALLBACK:
+ # Run packing with FontTools only, but don't return the result as it will
+ # not be optimally packed. Once a successful packing has been found, state is
+ # changed back to harfbuzz packing to produce the final, optimal, packing.
+ self.tryPackingFontTools(writer)
+ log.debug("Re-enabling sharing between extensions and switching back to "
+ "harfbuzz+fontTools packing.")
+ state = RepackerState.HB_FT
except OTLOffsetOverflowError as e:
+ hb_first_error_logged = True
+ ok = self.tryResolveOverflow(font, e, lastOverflowRecord)
+ lastOverflowRecord = e.value
- if overflowRecord == e.value:
- raise # Oh well...
-
- overflowRecord = e.value
- log.info("Attempting to fix OTLOffsetOverflowError %s", e)
- lastItem = overflowRecord
+ if ok:
+ continue
- ok = 0
- if overflowRecord.itemName is None:
- from .otTables import fixLookupOverFlows
- ok = fixLookupOverFlows(font, overflowRecord)
+ if state is RepackerState.HB_FT:
+ log.debug("Harfbuzz packing out of resolutions, disabling sharing between extensions and "
+ "switching to fontTools only packing.")
+ state = RepackerState.FT_FALLBACK
else:
- from .otTables import fixSubTableOverFlows
- ok = fixSubTableOverFlows(font, overflowRecord)
- if not ok:
- # Try upgrading lookup to Extension and hope
- # that cross-lookup sharing not happening would
- # fix overflow...
- from .otTables import fixLookupOverFlows
- ok = fixLookupOverFlows(font, overflowRecord)
- if not ok:
- raise
+ raise
+
+ def tryPackingHarfbuzz(self, writer, hb_first_error_logged):
+ try:
+ log.debug("serializing '%s' with hb.repack", self.tableTag)
+ return writer.getAllDataUsingHarfbuzz(self.tableTag)
+ except (ValueError, MemoryError, hb.RepackerError) as e:
+ # Only log hb repacker errors the first time they occur in
+ # the offset-overflow resolution loop, they are just noisy.
+ # Maybe we can revisit this if/when uharfbuzz actually gives
+ # us more info as to why hb.repack failed...
+ if not hb_first_error_logged:
+ error_msg = f"{type(e).__name__}"
+ if str(e) != "":
+ error_msg += f": {e}"
+ log.warning(
+ "hb.repack failed to serialize '%s', attempting fonttools resolutions "
+ "; the error message was: %s",
+ self.tableTag,
+ error_msg,
+ )
+ hb_first_error_logged = True
+ return writer.getAllData(remove_duplicate=False)
+
+
+ def tryPackingFontTools(self, writer):
+ return writer.getAllData()
+
+
+ def tryResolveOverflow(self, font, e, lastOverflowRecord):
+ ok = 0
+ if lastOverflowRecord == e.value:
+ # Oh well...
+ return ok
+
+ overflowRecord = e.value
+ log.info("Attempting to fix OTLOffsetOverflowError %s", e)
+
+ if overflowRecord.itemName is None:
+ from .otTables import fixLookupOverFlows
+ ok = fixLookupOverFlows(font, overflowRecord)
+ else:
+ from .otTables import fixSubTableOverFlows
+ ok = fixSubTableOverFlows(font, overflowRecord)
+
+ if ok:
+ return ok
+
+ # Try upgrading lookup to Extension and hope
+ # that cross-lookup sharing not happening would
+ # fix overflow...
+ from .otTables import fixLookupOverFlows
+ return fixLookupOverFlows(font, overflowRecord)
def toXML(self, writer, font):
self.table.toXML2(writer, font)
@@ -164,8 +224,8 @@ class BaseTTXConverter(DefaultTable):
self.table.fromXML(name, attrs, content, font)
self.table.populateDefaults()
- def ensureDecompiled(self):
- self.table.ensureDecompiled(recurse=True)
+ def ensureDecompiled(self, recurse=True):
+ self.table.ensureDecompiled(recurse=recurse)
# https://github.com/fonttools/fonttools/pull/2285#issuecomment-834652928
@@ -380,7 +440,7 @@ class OTTableWriter(object):
return NotImplemented
return self.offsetSize == other.offsetSize and self.items == other.items
- def _doneWriting(self, internedTables):
+ def _doneWriting(self, internedTables, shareExtension=False):
# Convert CountData references to data string items
# collapse duplicate table references to a unique entry
# "tables" are OTTableWriter objects.
@@ -396,7 +456,7 @@ class OTTableWriter(object):
# See: https://github.com/fonttools/fonttools/issues/518
dontShare = hasattr(self, 'DontShare')
- if isExtension:
+ if isExtension and not shareExtension:
internedTables = {}
items = self.items
@@ -405,7 +465,7 @@ class OTTableWriter(object):
if hasattr(item, "getCountData"):
items[i] = item.getCountData()
elif hasattr(item, "getData"):
- item._doneWriting(internedTables)
+ item._doneWriting(internedTables, shareExtension=shareExtension)
# At this point, all subwriters are hashable based on their items.
# (See hash and comparison magic methods above.) So the ``setdefault``
# call here will return the first writer object we've seen with
@@ -510,7 +570,7 @@ class OTTableWriter(object):
child_idx = item_idx = item._gatherGraphForHarfbuzz(tables, obj_list, done, item_idx, virtual_edges)
else:
child_idx = done[id(item)]
-
+
real_edge = (pos, item.offsetSize, child_idx)
real_links.append(real_edge)
offset_pos += item.offsetSize
@@ -524,7 +584,7 @@ class OTTableWriter(object):
return item_idx
- def getAllDataUsingHarfbuzz(self):
+ def getAllDataUsingHarfbuzz(self, tableTag):
"""The Whole table is represented as a Graph.
Assemble graph data and call Harfbuzz repacker to pack the table.
Harfbuzz repacker is faster and retain as much sub-table sharing as possible, see also:
@@ -533,7 +593,7 @@ class OTTableWriter(object):
https://github.com/harfbuzz/uharfbuzz/blob/main/src/uharfbuzz/_harfbuzz.pyx#L1149
"""
internedTables = {}
- self._doneWriting(internedTables)
+ self._doneWriting(internedTables, shareExtension=True)
tables = []
obj_list = []
done = {}
@@ -552,7 +612,10 @@ class OTTableWriter(object):
tableData = table.getDataForHarfbuzz()
data.append(tableData)
- return hb.repack(data, obj_list)
+ if hasattr(hb, "repack_with_tag"):
+ return hb.repack_with_tag(str(tableTag), data, obj_list)
+ else:
+ return hb.repack(data, obj_list)
def getAllData(self, remove_duplicate=True):
"""Assemble all data, including all subtables."""
@@ -808,6 +871,9 @@ class BaseTable(object):
#elif not conv.isCount:
# # Warn?
# pass
+ if hasattr(conv, "DEFAULT"):
+ # OptionalValue converters (e.g. VarIndex)
+ setattr(self, conv.name, conv.DEFAULT)
def decompile(self, reader, font):
self.readFormat(reader)
@@ -1042,6 +1108,10 @@ class BaseTable(object):
if isinstance(v, BaseTable)
)
+ # instance (not @class)method for consistency with FormatSwitchingBaseTable
+ def getVariableAttrs(self):
+ return getVariableAttrs(self.__class__)
+
class FormatSwitchingBaseTable(BaseTable):
@@ -1076,6 +1146,9 @@ class FormatSwitchingBaseTable(BaseTable):
def toXML(self, xmlWriter, font, attrs=None, name=None):
BaseTable.toXML(self, xmlWriter, font, attrs, name)
+ def getVariableAttrs(self):
+ return getVariableAttrs(self.__class__, self.Format)
+
class UInt8FormatSwitchingBaseTable(FormatSwitchingBaseTable):
def readFormat(self, reader):
@@ -1097,6 +1170,33 @@ def getFormatSwitchingBaseTableClass(formatType):
raise TypeError(f"Unsupported format type: {formatType!r}")
+# memoize since these are parsed from otData.py, thus stay constant
+@lru_cache()
+def getVariableAttrs(cls: BaseTable, fmt: Optional[int] = None) -> Tuple[str]:
+ """Return sequence of variable table field names (can be empty).
+
+ Attributes are deemed "variable" when their otData.py's description contain
+ 'VarIndexBase + {offset}', e.g. COLRv1 PaintVar* tables.
+ """
+ if not issubclass(cls, BaseTable):
+ raise TypeError(cls)
+ if issubclass(cls, FormatSwitchingBaseTable):
+ if fmt is None:
+ raise TypeError(f"'fmt' is required for format-switching {cls.__name__}")
+ converters = cls.convertersByName[fmt]
+ else:
+ converters = cls.convertersByName
+ # assume if no 'VarIndexBase' field is present, table has no variable fields
+ if "VarIndexBase" not in converters:
+ return ()
+ varAttrs = {}
+ for name, conv in converters.items():
+ offset = conv.getVarIndexOffset()
+ if offset is not None:
+ varAttrs[name] = offset
+ return tuple(sorted(varAttrs, key=varAttrs.__getitem__))
+
+
#
# Support for ValueRecords
#
diff --git a/Lib/fontTools/ttLib/tables/otConverters.py b/Lib/fontTools/ttLib/tables/otConverters.py
index 44fcd0ab..b08f1f19 100644
--- a/Lib/fontTools/ttLib/tables/otConverters.py
+++ b/Lib/fontTools/ttLib/tables/otConverters.py
@@ -15,10 +15,13 @@ from .otTables import (lookupTypes, AATStateTable, AATState, AATAction,
ContextualMorphAction, LigatureMorphAction,
InsertionMorphAction, MorxSubtable,
ExtendMode as _ExtendMode,
- CompositeMode as _CompositeMode)
+ CompositeMode as _CompositeMode,
+ NO_VARIATION_INDEX)
from itertools import zip_longest
from functools import partial
+import re
import struct
+from typing import Optional
import logging
@@ -60,7 +63,7 @@ def buildConverters(tableSpec, tableNamespace):
else:
converterClass = eval(tp, tableNamespace, converterMapping)
- conv = converterClass(name, repeat, aux)
+ conv = converterClass(name, repeat, aux, description=descr)
if conv.tableClass:
# A "template" such as OffsetTo(AType) knowss the table class already
@@ -136,7 +139,7 @@ class BaseConverter(object):
"""Base class for converter objects. Apart from the constructor, this
is an abstract class."""
- def __init__(self, name, repeat, aux, tableClass=None):
+ def __init__(self, name, repeat, aux, tableClass=None, *, description=""):
self.name = name
self.repeat = repeat
self.aux = aux
@@ -159,6 +162,7 @@ class BaseConverter(object):
"BaseGlyphRecordCount",
"LayerRecordCount",
]
+ self.description = description
def readArray(self, reader, font, tableDict, count):
"""Read an array of values from the reader."""
@@ -211,6 +215,15 @@ class BaseConverter(object):
"""Write a value to XML."""
raise NotImplementedError(self)
+ varIndexBasePlusOffsetRE = re.compile(r"VarIndexBase\s*\+\s*(\d+)")
+
+ def getVarIndexOffset(self) -> Optional[int]:
+ """If description has `VarIndexBase + {offset}`, return the offset else None."""
+ m = self.varIndexBasePlusOffsetRE.search(self.description)
+ if not m:
+ return None
+ return int(m.group(1))
+
class SimpleValue(BaseConverter):
@staticmethod
@@ -270,7 +283,7 @@ class Flags32(ULong):
return "0x%08X" % value
class VarIndex(OptionalValue, ULong):
- DEFAULT = 0xFFFFFFFF
+ DEFAULT = NO_VARIATION_INDEX
class Short(IntValue):
staticSize = 2
@@ -402,40 +415,51 @@ class DeciPoints(FloatValue):
def write(self, writer, font, tableDict, value, repeatIndex=None):
writer.writeUShort(round(value * 10))
-class Fixed(FloatValue):
- staticSize = 4
+class BaseFixedValue(FloatValue):
+ staticSize = NotImplemented
+ precisionBits = NotImplemented
+ readerMethod = NotImplemented
+ writerMethod = NotImplemented
def read(self, reader, font, tableDict):
- return fi2fl(reader.readLong(), 16)
+ return self.fromInt(getattr(reader, self.readerMethod)())
def write(self, writer, font, tableDict, value, repeatIndex=None):
- writer.writeLong(fl2fi(value, 16))
- @staticmethod
- def fromString(value):
- return str2fl(value, 16)
- @staticmethod
- def toString(value):
- return fl2str(value, 16)
+ getattr(writer, self.writerMethod)(self.toInt(value))
+ @classmethod
+ def fromInt(cls, value):
+ return fi2fl(value, cls.precisionBits)
+ @classmethod
+ def toInt(cls, value):
+ return fl2fi(value, cls.precisionBits)
+ @classmethod
+ def fromString(cls, value):
+ return str2fl(value, cls.precisionBits)
+ @classmethod
+ def toString(cls, value):
+ return fl2str(value, cls.precisionBits)
-class F2Dot14(FloatValue):
+class Fixed(BaseFixedValue):
+ staticSize = 4
+ precisionBits = 16
+ readerMethod = "readLong"
+ writerMethod = "writeLong"
+
+class F2Dot14(BaseFixedValue):
staticSize = 2
- def read(self, reader, font, tableDict):
- return fi2fl(reader.readShort(), 14)
- def write(self, writer, font, tableDict, value, repeatIndex=None):
- writer.writeShort(fl2fi(value, 14))
- @staticmethod
- def fromString(value):
- return str2fl(value, 14)
- @staticmethod
- def toString(value):
- return fl2str(value, 14)
+ precisionBits = 14
+ readerMethod = "readShort"
+ writerMethod = "writeShort"
class Angle(F2Dot14):
# angles are specified in degrees, and encoded as F2Dot14 fractions of half
# circle: e.g. 1.0 => 180, -0.5 => -90, -2.0 => -360, etc.
+ bias = 0.0
factor = 1.0/(1<<14) * 180 # 0.010986328125
- def read(self, reader, font, tableDict):
- return super().read(reader, font, tableDict) * 180
- def write(self, writer, font, tableDict, value, repeatIndex=None):
- super().write(writer, font, tableDict, value / 180, repeatIndex=repeatIndex)
+ @classmethod
+ def fromInt(cls, value):
+ return (super().fromInt(value) + cls.bias) * 180
+ @classmethod
+ def toInt(cls, value):
+ return super().toInt((value / 180) - cls.bias)
@classmethod
def fromString(cls, value):
# quantize to nearest multiples of minimum fixed-precision angle
@@ -444,6 +468,11 @@ class Angle(F2Dot14):
def toString(cls, value):
return nearestMultipleShortestRepr(value, cls.factor)
+class BiasedAngle(Angle):
+ # A bias of 1.0 is used in the representation of start and end angles
+ # of COLRv1 PaintSweepGradients to allow for encoding +360deg
+ bias = 1.0
+
class Version(SimpleValue):
staticSize = 4
def read(self, reader, font, tableDict):
@@ -686,8 +715,10 @@ class FeatureParams(Table):
class ValueFormat(IntValue):
staticSize = 2
- def __init__(self, name, repeat, aux, tableClass=None):
- BaseConverter.__init__(self, name, repeat, aux, tableClass)
+ def __init__(self, name, repeat, aux, tableClass=None, *, description=""):
+ BaseConverter.__init__(
+ self, name, repeat, aux, tableClass, description=description
+ )
self.which = "ValueFormat" + ("2" if name[-1] == "2" else "1")
def read(self, reader, font, tableDict):
format = reader.readUShort()
@@ -720,8 +751,10 @@ class ValueRecord(ValueFormat):
class AATLookup(BaseConverter):
BIN_SEARCH_HEADER_SIZE = 10
- def __init__(self, name, repeat, aux, tableClass):
- BaseConverter.__init__(self, name, repeat, aux, tableClass)
+ def __init__(self, name, repeat, aux, tableClass, *, description=""):
+ BaseConverter.__init__(
+ self, name, repeat, aux, tableClass, description=description
+ )
if issubclass(self.tableClass, SimpleValue):
self.converter = self.tableClass(name='Value', repeat=None, aux=None)
else:
@@ -1019,8 +1052,10 @@ class MorxSubtableConverter(BaseConverter):
val: key for key, val in _PROCESSING_ORDERS.items()
}
- def __init__(self, name, repeat, aux):
- BaseConverter.__init__(self, name, repeat, aux)
+ def __init__(self, name, repeat, aux, tableClass=None, *, description=""):
+ BaseConverter.__init__(
+ self, name, repeat, aux, tableClass, description=description
+ )
def _setTextDirectionFromCoverageFlags(self, flags, subtable):
if (flags & 0x20) != 0:
@@ -1140,8 +1175,10 @@ class MorxSubtableConverter(BaseConverter):
# https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6Tables.html#ExtendedStateHeader
# TODO: Untangle the implementation of the various lookup-specific formats.
class STXHeader(BaseConverter):
- def __init__(self, name, repeat, aux, tableClass):
- BaseConverter.__init__(self, name, repeat, aux, tableClass)
+ def __init__(self, name, repeat, aux, tableClass, *, description=""):
+ BaseConverter.__init__(
+ self, name, repeat, aux, tableClass, description=description
+ )
assert issubclass(self.tableClass, AATAction)
self.classLookup = AATLookup("GlyphClasses", None, None, UShort)
if issubclass(self.tableClass, ContextualMorphAction):
@@ -1742,6 +1779,7 @@ converterMapping = {
"Fixed": Fixed,
"F2Dot14": F2Dot14,
"Angle": Angle,
+ "BiasedAngle": BiasedAngle,
"struct": Struct,
"Offset": Table,
"LOffset": LTable,
diff --git a/Lib/fontTools/ttLib/tables/otData.py b/Lib/fontTools/ttLib/tables/otData.py
index dd4033e4..2e65869f 100755
--- a/Lib/fontTools/ttLib/tables/otData.py
+++ b/Lib/fontTools/ttLib/tables/otData.py
@@ -1623,10 +1623,10 @@ otData = [
('ClipBoxFormat2', [
('uint8', 'Format', None, None, 'Format for variable ClipBox: set to 2.'),
- ('int16', 'xMin', None, None, 'Minimum x of clip box.'),
- ('int16', 'yMin', None, None, 'Minimum y of clip box.'),
- ('int16', 'xMax', None, None, 'Maximum x of clip box.'),
- ('int16', 'yMax', None, None, 'Maximum y of clip box.'),
+ ('int16', 'xMin', None, None, 'Minimum x of clip box. VarIndexBase + 0.'),
+ ('int16', 'yMin', None, None, 'Minimum y of clip box. VarIndexBase + 1.'),
+ ('int16', 'xMax', None, None, 'Maximum x of clip box. VarIndexBase + 2.'),
+ ('int16', 'yMax', None, None, 'Maximum y of clip box. VarIndexBase + 3.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1648,12 +1648,12 @@ otData = [
('Fixed', 'dy', None, None, 'Translation in y direction'),
]),
('VarAffine2x3', [
- ('Fixed', 'xx', None, None, 'x-part of x basis vector'),
- ('Fixed', 'yx', None, None, 'y-part of x basis vector'),
- ('Fixed', 'xy', None, None, 'x-part of y basis vector'),
- ('Fixed', 'yy', None, None, 'y-part of y basis vector'),
- ('Fixed', 'dx', None, None, 'Translation in x direction'),
- ('Fixed', 'dy', None, None, 'Translation in y direction'),
+ ('Fixed', 'xx', None, None, 'x-part of x basis vector. VarIndexBase + 0.'),
+ ('Fixed', 'yx', None, None, 'y-part of x basis vector. VarIndexBase + 1.'),
+ ('Fixed', 'xy', None, None, 'x-part of y basis vector. VarIndexBase + 2.'),
+ ('Fixed', 'yy', None, None, 'y-part of y basis vector. VarIndexBase + 3.'),
+ ('Fixed', 'dx', None, None, 'Translation in x direction. VarIndexBase + 4.'),
+ ('Fixed', 'dy', None, None, 'Translation in y direction. VarIndexBase + 5.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1663,9 +1663,9 @@ otData = [
('F2Dot14', 'Alpha', None, None, 'Values outsided [0.,1.] reserved'),
]),
('VarColorStop', [
- ('F2Dot14', 'StopOffset', None, None, 'VarIndexBase + 0'),
+ ('F2Dot14', 'StopOffset', None, None, 'VarIndexBase + 0.'),
('uint16', 'PaletteIndex', None, None, 'Index for a CPAL palette entry.'),
- ('F2Dot14', 'Alpha', None, None, 'Values outsided [0.,1.] reserved. VarIndexBase + 1'),
+ ('F2Dot14', 'Alpha', None, None, 'Values outsided [0.,1.] reserved. VarIndexBase + 1.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1697,7 +1697,7 @@ otData = [
('PaintFormat3', [
('uint8', 'PaintFormat', None, None, 'Format identifier-format = 3'),
('uint16', 'PaletteIndex', None, None, 'Index for a CPAL palette entry.'),
- ('F2Dot14', 'Alpha', None, None, 'Values outsided [0.,1.] reserved. VarIndexBase + 0'),
+ ('F2Dot14', 'Alpha', None, None, 'Values outsided [0.,1.] reserved. VarIndexBase + 0.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1716,12 +1716,12 @@ otData = [
('PaintFormat5', [
('uint8', 'PaintFormat', None, None, 'Format identifier-format = 5'),
('LOffset24To(VarColorLine)', 'ColorLine', None, None, 'Offset (from beginning of PaintVarLinearGradient table) to VarColorLine subtable.'),
- ('int16', 'x0', None, None, ''),
- ('int16', 'y0', None, None, ''),
- ('int16', 'x1', None, None, ''),
- ('int16', 'y1', None, None, ''),
- ('int16', 'x2', None, None, ''),
- ('int16', 'y2', None, None, ''),
+ ('int16', 'x0', None, None, 'VarIndexBase + 0.'),
+ ('int16', 'y0', None, None, 'VarIndexBase + 1.'),
+ ('int16', 'x1', None, None, 'VarIndexBase + 2.'),
+ ('int16', 'y1', None, None, 'VarIndexBase + 3.'),
+ ('int16', 'x2', None, None, 'VarIndexBase + 4.'),
+ ('int16', 'y2', None, None, 'VarIndexBase + 5.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1740,12 +1740,12 @@ otData = [
('PaintFormat7', [
('uint8', 'PaintFormat', None, None, 'Format identifier-format = 7'),
('LOffset24To(VarColorLine)', 'ColorLine', None, None, 'Offset (from beginning of PaintVarRadialGradient table) to VarColorLine subtable.'),
- ('int16', 'x0', None, None, ''),
- ('int16', 'y0', None, None, ''),
- ('uint16', 'r0', None, None, ''),
- ('int16', 'x1', None, None, ''),
- ('int16', 'y1', None, None, ''),
- ('uint16', 'r1', None, None, ''),
+ ('int16', 'x0', None, None, 'VarIndexBase + 0.'),
+ ('int16', 'y0', None, None, 'VarIndexBase + 1.'),
+ ('uint16', 'r0', None, None, 'VarIndexBase + 2.'),
+ ('int16', 'x1', None, None, 'VarIndexBase + 3.'),
+ ('int16', 'y1', None, None, 'VarIndexBase + 4.'),
+ ('uint16', 'r1', None, None, 'VarIndexBase + 5.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1755,17 +1755,17 @@ otData = [
('Offset24', 'ColorLine', None, None, 'Offset (from beginning of PaintSweepGradient table) to ColorLine subtable.'),
('int16', 'centerX', None, None, 'Center x coordinate.'),
('int16', 'centerY', None, None, 'Center y coordinate.'),
- ('Angle', 'startAngle', None, None, 'Start of the angular range of the gradient.'),
- ('Angle', 'endAngle', None, None, 'End of the angular range of the gradient.'),
+ ('BiasedAngle', 'startAngle', None, None, 'Start of the angular range of the gradient.'),
+ ('BiasedAngle', 'endAngle', None, None, 'End of the angular range of the gradient.'),
]),
# PaintVarSweepGradient
('PaintFormat9', [
('uint8', 'PaintFormat', None, None, 'Format identifier-format = 9'),
('LOffset24To(VarColorLine)', 'ColorLine', None, None, 'Offset (from beginning of PaintVarSweepGradient table) to VarColorLine subtable.'),
- ('int16', 'centerX', None, None, 'Center x coordinate.'),
- ('int16', 'centerY', None, None, 'Center y coordinate.'),
- ('Angle', 'startAngle', None, None, 'Start of the angular range of the gradient.'),
- ('Angle', 'endAngle', None, None, 'End of the angular range of the gradient.'),
+ ('int16', 'centerX', None, None, 'Center x coordinate. VarIndexBase + 0.'),
+ ('int16', 'centerY', None, None, 'Center y coordinate. VarIndexBase + 1.'),
+ ('BiasedAngle', 'startAngle', None, None, 'Start of the angular range of the gradient. VarIndexBase + 2.'),
+ ('BiasedAngle', 'endAngle', None, None, 'End of the angular range of the gradient. VarIndexBase + 3.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1806,8 +1806,8 @@ otData = [
('PaintFormat15', [
('uint8', 'PaintFormat', None, None, 'Format identifier-format = 15'),
('Offset24', 'Paint', None, None, 'Offset (from beginning of PaintVarTranslate table) to Paint subtable.'),
- ('int16', 'dx', None, None, 'Translation in x direction.'),
- ('int16', 'dy', None, None, 'Translation in y direction.'),
+ ('int16', 'dx', None, None, 'Translation in x direction. VarIndexBase + 0.'),
+ ('int16', 'dy', None, None, 'Translation in y direction. VarIndexBase + 1.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1822,8 +1822,8 @@ otData = [
('PaintFormat17', [
('uint8', 'PaintFormat', None, None, 'Format identifier-format = 17'),
('Offset24', 'Paint', None, None, 'Offset (from beginning of PaintVarScale table) to Paint subtable.'),
- ('F2Dot14', 'scaleX', None, None, ''),
- ('F2Dot14', 'scaleY', None, None, ''),
+ ('F2Dot14', 'scaleX', None, None, 'VarIndexBase + 0.'),
+ ('F2Dot14', 'scaleY', None, None, 'VarIndexBase + 1.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1840,10 +1840,10 @@ otData = [
('PaintFormat19', [
('uint8', 'PaintFormat', None, None, 'Format identifier-format = 19'),
('Offset24', 'Paint', None, None, 'Offset (from beginning of PaintVarScaleAroundCenter table) to Paint subtable.'),
- ('F2Dot14', 'scaleX', None, None, ''),
- ('F2Dot14', 'scaleY', None, None, ''),
- ('int16', 'centerX', None, None, ''),
- ('int16', 'centerY', None, None, ''),
+ ('F2Dot14', 'scaleX', None, None, 'VarIndexBase + 0.'),
+ ('F2Dot14', 'scaleY', None, None, 'VarIndexBase + 1.'),
+ ('int16', 'centerX', None, None, 'VarIndexBase + 2.'),
+ ('int16', 'centerY', None, None, 'VarIndexBase + 3.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1857,7 +1857,7 @@ otData = [
('PaintFormat21', [
('uint8', 'PaintFormat', None, None, 'Format identifier-format = 21'),
('Offset24', 'Paint', None, None, 'Offset (from beginning of PaintVarScaleUniform table) to Paint subtable.'),
- ('F2Dot14', 'scale', None, None, ''),
+ ('F2Dot14', 'scale', None, None, 'VarIndexBase + 0.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1873,9 +1873,9 @@ otData = [
('PaintFormat23', [
('uint8', 'PaintFormat', None, None, 'Format identifier-format = 23'),
('Offset24', 'Paint', None, None, 'Offset (from beginning of PaintVarScaleUniformAroundCenter table) to Paint subtable.'),
- ('F2Dot14', 'scale', None, None, ''),
- ('int16', 'centerX', None, None, ''),
- ('int16', 'centerY', None, None, ''),
+ ('F2Dot14', 'scale', None, None, 'VarIndexBase + 0'),
+ ('int16', 'centerX', None, None, 'VarIndexBase + 1'),
+ ('int16', 'centerY', None, None, 'VarIndexBase + 2'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1889,7 +1889,7 @@ otData = [
('PaintFormat25', [
('uint8', 'PaintFormat', None, None, 'Format identifier-format = 25'),
('Offset24', 'Paint', None, None, 'Offset (from beginning of PaintVarRotate table) to Paint subtable.'),
- ('Angle', 'angle', None, None, ''),
+ ('Angle', 'angle', None, None, 'VarIndexBase + 0.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1905,9 +1905,9 @@ otData = [
('PaintFormat27', [
('uint8', 'PaintFormat', None, None, 'Format identifier-format = 27'),
('Offset24', 'Paint', None, None, 'Offset (from beginning of PaintVarRotateAroundCenter table) to Paint subtable.'),
- ('Angle', 'angle', None, None, ''),
- ('int16', 'centerX', None, None, ''),
- ('int16', 'centerY', None, None, ''),
+ ('Angle', 'angle', None, None, 'VarIndexBase + 0.'),
+ ('int16', 'centerX', None, None, 'VarIndexBase + 1.'),
+ ('int16', 'centerY', None, None, 'VarIndexBase + 2.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1922,8 +1922,8 @@ otData = [
('PaintFormat29', [
('uint8', 'PaintFormat', None, None, 'Format identifier-format = 29'),
('Offset24', 'Paint', None, None, 'Offset (from beginning of PaintVarSkew table) to Paint subtable.'),
- ('Angle', 'xSkewAngle', None, None, ''),
- ('Angle', 'ySkewAngle', None, None, ''),
+ ('Angle', 'xSkewAngle', None, None, 'VarIndexBase + 0.'),
+ ('Angle', 'ySkewAngle', None, None, 'VarIndexBase + 1.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
@@ -1940,10 +1940,10 @@ otData = [
('PaintFormat31', [
('uint8', 'PaintFormat', None, None, 'Format identifier-format = 31'),
('Offset24', 'Paint', None, None, 'Offset (from beginning of PaintVarSkewAroundCenter table) to Paint subtable.'),
- ('Angle', 'xSkewAngle', None, None, ''),
- ('Angle', 'ySkewAngle', None, None, ''),
- ('int16', 'centerX', None, None, ''),
- ('int16', 'centerY', None, None, ''),
+ ('Angle', 'xSkewAngle', None, None, 'VarIndexBase + 0.'),
+ ('Angle', 'ySkewAngle', None, None, 'VarIndexBase + 1.'),
+ ('int16', 'centerX', None, None, 'VarIndexBase + 2.'),
+ ('int16', 'centerY', None, None, 'VarIndexBase + 3.'),
('VarIndex', 'VarIndexBase', None, None, 'Base index into DeltaSetIndexMap.'),
]),
diff --git a/Lib/fontTools/ttLib/tables/otTables.py b/Lib/fontTools/ttLib/tables/otTables.py
index fbd9db7b..6e7f3dfb 100644
--- a/Lib/fontTools/ttLib/tables/otTables.py
+++ b/Lib/fontTools/ttLib/tables/otTables.py
@@ -600,6 +600,11 @@ class Coverage(FormatSwitchingBaseTable):
glyphs.append(attrs["value"])
+# The special 0xFFFFFFFF delta-set index is used to indicate that there
+# is no variation data in the ItemVariationStore for a given variable field
+NO_VARIATION_INDEX = 0xFFFFFFFF
+
+
class DeltaSetIndexMap(getFormatSwitchingBaseTableClass("uint8")):
def populateDefaults(self, propagator=None):
@@ -647,12 +652,19 @@ class DeltaSetIndexMap(getFormatSwitchingBaseTableClass("uint8")):
return rawTable
def toXML2(self, xmlWriter, font):
+ # Make xml dump less verbose, by omitting no-op entries like:
+ # <Map index="..." outer="65535" inner="65535"/>
+ xmlWriter.comment(
+ "Omitted values default to 0xFFFF/0xFFFF (no variations)"
+ )
+ xmlWriter.newline()
for i, value in enumerate(getattr(self, "mapping", [])):
- attrs = (
- ('index', i),
- ('outer', value >> 16),
- ('inner', value & 0xFFFF),
- )
+ attrs = [('index', i)]
+ if value != NO_VARIATION_INDEX:
+ attrs.extend([
+ ('outer', value >> 16),
+ ('inner', value & 0xFFFF),
+ ])
xmlWriter.simpletag("Map", attrs)
xmlWriter.newline()
@@ -661,8 +673,8 @@ class DeltaSetIndexMap(getFormatSwitchingBaseTableClass("uint8")):
if mapping is None:
self.mapping = mapping = []
index = safeEval(attrs['index'])
- outer = safeEval(attrs['outer'])
- inner = safeEval(attrs['inner'])
+ outer = safeEval(attrs.get('outer', '0xFFFF'))
+ inner = safeEval(attrs.get('inner', '0xFFFF'))
assert inner <= 0xFFFF
mapping.insert(index, (outer << 16) | inner)
@@ -1257,7 +1269,19 @@ class BaseGlyphList(BaseTable):
return self.__dict__.copy()
+class ClipBoxFormat(IntEnum):
+ Static = 1
+ Variable = 2
+
+ def is_variable(self):
+ return self is self.Variable
+
+ def as_variable(self):
+ return self.Variable
+
+
class ClipBox(getFormatSwitchingBaseTableClass("uint8")):
+ formatEnum = ClipBoxFormat
def as_tuple(self):
return tuple(getattr(self, conv.name) for conv in self.getConverters())
@@ -1492,12 +1516,24 @@ class PaintFormat(IntEnum):
PaintVarSkewAroundCenter = 31
PaintComposite = 32
+ def is_variable(self):
+ return self.name.startswith("PaintVar")
+
+ def as_variable(self):
+ if self.is_variable():
+ return self
+ try:
+ return PaintFormat.__members__[f"PaintVar{self.name[5:]}"]
+ except KeyError:
+ return None
+
class Paint(getFormatSwitchingBaseTableClass("uint8")):
+ formatEnum = PaintFormat
def getFormatName(self):
try:
- return PaintFormat(self.Format).name
+ return self.formatEnum(self.Format).name
except ValueError:
raise NotImplementedError(f"Unknown Paint format: {self.Format}")
@@ -1962,6 +1998,14 @@ def _buildClasses():
cls.DontShare = True
namespace[name] = cls
+ # link Var{Table} <-> {Table} (e.g. ColorStop <-> VarColorStop, etc.)
+ for name, _ in otData:
+ if name.startswith("Var") and len(name) > 3 and name[3:] in namespace:
+ varType = namespace[name]
+ noVarType = namespace[name[3:]]
+ varType.NoVarType = noVarType
+ noVarType.VarType = varType
+
for base, alts in _equivalents.items():
base = namespace[base]
for alt in alts:
diff --git a/Lib/fontTools/ttLib/tables/otTraverse.py b/Lib/fontTools/ttLib/tables/otTraverse.py
new file mode 100644
index 00000000..40b28b2b
--- /dev/null
+++ b/Lib/fontTools/ttLib/tables/otTraverse.py
@@ -0,0 +1,137 @@
+"""Methods for traversing trees of otData-driven OpenType tables."""
+from collections import deque
+from typing import Callable, Deque, Iterable, List, Optional, Tuple
+from .otBase import BaseTable
+
+
+__all__ = [
+ "bfs_base_table",
+ "dfs_base_table",
+ "SubTablePath",
+]
+
+
+class SubTablePath(Tuple[BaseTable.SubTableEntry, ...]):
+
+ def __str__(self) -> str:
+ path_parts = []
+ for entry in self:
+ path_part = entry.name
+ if entry.index is not None:
+ path_part += f"[{entry.index}]"
+ path_parts.append(path_part)
+ return ".".join(path_parts)
+
+
+# Given f(current frontier, new entries) add new entries to frontier
+AddToFrontierFn = Callable[[Deque[SubTablePath], List[SubTablePath]], None]
+
+
+def dfs_base_table(
+ root: BaseTable,
+ root_accessor: Optional[str] = None,
+ skip_root: bool = False,
+ predicate: Optional[Callable[[SubTablePath], bool]] = None,
+) -> Iterable[SubTablePath]:
+ """Depth-first search tree of BaseTables.
+
+ Args:
+ root (BaseTable): the root of the tree.
+ root_accessor (Optional[str]): attribute name for the root table, if any (mostly
+ useful for debugging).
+ skip_root (Optional[bool]): if True, the root itself is not visited, only its
+ children.
+ predicate (Optional[Callable[[SubTablePath], bool]]): function to filter out
+ paths. If True, the path is yielded and its subtables are added to the
+ queue. If False, the path is skipped and its subtables are not traversed.
+
+ Yields:
+ SubTablePath: tuples of BaseTable.SubTableEntry(name, table, index) namedtuples
+ for each of the nodes in the tree. The last entry in a path is the current
+ subtable, whereas preceding ones refer to its parent tables all the way up to
+ the root.
+ """
+ yield from _traverse_ot_data(
+ root,
+ root_accessor,
+ skip_root,
+ predicate,
+ lambda frontier, new: frontier.extendleft(reversed(new)),
+ )
+
+
+def bfs_base_table(
+ root: BaseTable,
+ root_accessor: Optional[str] = None,
+ skip_root: bool = False,
+ predicate: Optional[Callable[[SubTablePath], bool]] = None,
+) -> Iterable[SubTablePath]:
+ """Breadth-first search tree of BaseTables.
+
+ Args:
+ root (BaseTable): the root of the tree.
+ root_accessor (Optional[str]): attribute name for the root table, if any (mostly
+ useful for debugging).
+ skip_root (Optional[bool]): if True, the root itself is not visited, only its
+ children.
+ predicate (Optional[Callable[[SubTablePath], bool]]): function to filter out
+ paths. If True, the path is yielded and its subtables are added to the
+ queue. If False, the path is skipped and its subtables are not traversed.
+
+ Yields:
+ SubTablePath: tuples of BaseTable.SubTableEntry(name, table, index) namedtuples
+ for each of the nodes in the tree. The last entry in a path is the current
+ subtable, whereas preceding ones refer to its parent tables all the way up to
+ the root.
+ """
+ yield from _traverse_ot_data(
+ root,
+ root_accessor,
+ skip_root,
+ predicate,
+ lambda frontier, new: frontier.extend(new),
+ )
+
+
+def _traverse_ot_data(
+ root: BaseTable,
+ root_accessor: Optional[str],
+ skip_root: bool,
+ predicate: Optional[Callable[[SubTablePath], bool]],
+ add_to_frontier_fn: AddToFrontierFn,
+) -> Iterable[SubTablePath]:
+ # no visited because general otData cannot cycle (forward-offset only)
+ if root_accessor is None:
+ root_accessor = type(root).__name__
+
+ if predicate is None:
+
+ def predicate(path):
+ return True
+
+ frontier: Deque[SubTablePath] = deque()
+
+ root_entry = BaseTable.SubTableEntry(root_accessor, root)
+ if not skip_root:
+ frontier.append((root_entry,))
+ else:
+ add_to_frontier_fn(
+ frontier,
+ [(root_entry, subtable_entry) for subtable_entry in root.iterSubTables()],
+ )
+
+ while frontier:
+ # path is (value, attr_name) tuples. attr_name is attr of parent to get value
+ path = frontier.popleft()
+ current = path[-1].value
+
+ if not predicate(path):
+ continue
+
+ yield SubTablePath(path)
+
+ new_entries = [
+ path + (subtable_entry,) for subtable_entry in current.iterSubTables()
+ ]
+
+ add_to_frontier_fn(frontier, new_entries)
diff --git a/Lib/fontTools/ttLib/ttFont.py b/Lib/fontTools/ttLib/ttFont.py
index d7f7ef83..327d113f 100644
--- a/Lib/fontTools/ttLib/ttFont.py
+++ b/Lib/fontTools/ttLib/ttFont.py
@@ -4,6 +4,11 @@ from fontTools.misc.configTools import AbstractConfig
from fontTools.misc.textTools import Tag, byteord, tostr
from fontTools.misc.loggingTools import deprecateArgument
from fontTools.ttLib import TTLibError
+from fontTools.ttLib.ttGlyphSet import (
+ _TTGlyphSet, _TTGlyph,
+ _TTGlyphCFF, _TTGlyphGlyf,
+ _TTVarGlyphSet,
+)
from fontTools.ttLib.sfnt import SFNTReader, SFNTWriter
from io import BytesIO, StringIO
import os
@@ -381,12 +386,14 @@ class TTFont(object):
keys = sortedTagList(keys)
return ["GlyphOrder"] + keys
- def ensureDecompiled(self):
+ def ensureDecompiled(self, recurse=None):
"""Decompile all the tables, even if a TTFont was opened in 'lazy' mode."""
for tag in self.keys():
table = self[tag]
- if self.lazy is not False and hasattr(table, "ensureDecompiled"):
- table.ensureDecompiled()
+ if recurse is None:
+ recurse = self.lazy is not False
+ if recurse and hasattr(table, "ensureDecompiled"):
+ table.ensureDecompiled(recurse=recurse)
self.lazy = False
def __len__(self):
@@ -673,7 +680,7 @@ class TTFont(object):
else:
raise KeyError(tag)
- def getGlyphSet(self, preferCFF=True):
+ def getGlyphSet(self, preferCFF=True, location=None, normalized=False):
"""Return a generic GlyphSet, which is a dict-like object
mapping glyph names to glyph objects. The returned glyph objects
have a .draw() method that supports the Pen protocol, and will
@@ -684,16 +691,28 @@ class TTFont(object):
If the font contains both a 'CFF '/'CFF2' and a 'glyf' table, you can use
the 'preferCFF' argument to specify which one should be taken. If the
font contains both a 'CFF ' and a 'CFF2' table, the latter is taken.
+
+ If the 'location' parameter is set, it should be a dictionary mapping
+ four-letter variation tags to their float values, and the returned
+ glyph-set will represent an instance of a variable font at that location.
+ If the 'normalized' variable is set to True, that location is interpretted
+ as in the normalized (-1..+1) space, otherwise it is in the font's defined
+ axes space.
"""
glyphs = None
if (preferCFF and any(tb in self for tb in ["CFF ", "CFF2"]) or
("glyf" not in self and any(tb in self for tb in ["CFF ", "CFF2"]))):
table_tag = "CFF2" if "CFF2" in self else "CFF "
+ if location:
+ raise NotImplementedError # TODO
glyphs = _TTGlyphSet(self,
list(self[table_tag].cff.values())[0].CharStrings, _TTGlyphCFF)
if glyphs is None and "glyf" in self:
- glyphs = _TTGlyphSet(self, self["glyf"], _TTGlyphGlyf)
+ if location and 'gvar' in self:
+ glyphs = _TTVarGlyphSet(self, location=location, normalized=normalized)
+ else:
+ glyphs = _TTGlyphSet(self, self["glyf"], _TTGlyphGlyf)
if glyphs is None:
raise TTLibError("Font contains no outlines")
@@ -726,109 +745,6 @@ class TTFont(object):
return self["cmap"].getBestCmap(cmapPreferences=cmapPreferences)
-class _TTGlyphSet(object):
-
- """Generic dict-like GlyphSet class that pulls metrics from hmtx and
- glyph shape from TrueType or CFF.
- """
-
- def __init__(self, ttFont, glyphs, glyphType):
- """Construct a new glyphset.
-
- Args:
- font (TTFont): The font object (used to get metrics).
- glyphs (dict): A dictionary mapping glyph names to ``_TTGlyph`` objects.
- glyphType (class): Either ``_TTGlyphCFF`` or ``_TTGlyphGlyf``.
- """
- self._glyphs = glyphs
- self._hmtx = ttFont['hmtx']
- self._vmtx = ttFont['vmtx'] if 'vmtx' in ttFont else None
- self._glyphType = glyphType
-
- def keys(self):
- return list(self._glyphs.keys())
-
- def has_key(self, glyphName):
- return glyphName in self._glyphs
-
- __contains__ = has_key
-
- def __getitem__(self, glyphName):
- horizontalMetrics = self._hmtx[glyphName]
- verticalMetrics = self._vmtx[glyphName] if self._vmtx else None
- return self._glyphType(
- self, self._glyphs[glyphName], horizontalMetrics, verticalMetrics)
-
- def __len__(self):
- return len(self._glyphs)
-
- def get(self, glyphName, default=None):
- try:
- return self[glyphName]
- except KeyError:
- return default
-
-class _TTGlyph(object):
-
- """Wrapper for a TrueType glyph that supports the Pen protocol, meaning
- that it has .draw() and .drawPoints() methods that take a pen object as
- their only argument. Additionally there are 'width' and 'lsb' attributes,
- read from the 'hmtx' table.
-
- If the font contains a 'vmtx' table, there will also be 'height' and 'tsb'
- attributes.
- """
-
- def __init__(self, glyphset, glyph, horizontalMetrics, verticalMetrics=None):
- """Construct a new _TTGlyph.
-
- Args:
- glyphset (_TTGlyphSet): A glyphset object used to resolve components.
- glyph (ttLib.tables._g_l_y_f.Glyph): The glyph object.
- horizontalMetrics (int, int): The glyph's width and left sidebearing.
- """
- self._glyphset = glyphset
- self._glyph = glyph
- self.width, self.lsb = horizontalMetrics
- if verticalMetrics:
- self.height, self.tsb = verticalMetrics
- else:
- self.height, self.tsb = None, None
-
- def draw(self, pen):
- """Draw the glyph onto ``pen``. See fontTools.pens.basePen for details
- how that works.
- """
- self._glyph.draw(pen)
-
- def drawPoints(self, pen):
- # drawPoints is only implemented for _TTGlyphGlyf at this time.
- raise NotImplementedError()
-
-class _TTGlyphCFF(_TTGlyph):
- pass
-
-class _TTGlyphGlyf(_TTGlyph):
-
- def draw(self, pen):
- """Draw the glyph onto Pen. See fontTools.pens.basePen for details
- how that works.
- """
- glyfTable = self._glyphset._glyphs
- glyph = self._glyph
- offset = self.lsb - glyph.xMin if hasattr(glyph, "xMin") else 0
- glyph.draw(pen, glyfTable, offset)
-
- def drawPoints(self, pen):
- """Draw the glyph onto PointPen. See fontTools.pens.pointPen
- for details how that works.
- """
- glyfTable = self._glyphset._glyphs
- glyph = self._glyph
- offset = self.lsb - glyph.xMin if hasattr(glyph, "xMin") else 0
- glyph.drawPoints(pen, glyfTable, offset)
-
-
class GlyphOrder(object):
"""A pseudo table. The glyph order isn't in the font as a separate
diff --git a/Lib/fontTools/ttLib/ttGlyphSet.py b/Lib/fontTools/ttLib/ttGlyphSet.py
new file mode 100644
index 00000000..be26215b
--- /dev/null
+++ b/Lib/fontTools/ttLib/ttGlyphSet.py
@@ -0,0 +1,221 @@
+"""GlyphSets returned by a TTFont."""
+
+from fontTools.misc.fixedTools import otRound
+from copy import copy
+
+class _TTGlyphSet(object):
+
+ """Generic dict-like GlyphSet class that pulls metrics from hmtx and
+ glyph shape from TrueType or CFF.
+ """
+
+ def __init__(self, ttFont, glyphs, glyphType):
+ """Construct a new glyphset.
+
+ Args:
+ font (TTFont): The font object (used to get metrics).
+ glyphs (dict): A dictionary mapping glyph names to ``_TTGlyph`` objects.
+ glyphType (class): Either ``_TTGlyphCFF`` or ``_TTGlyphGlyf``.
+ """
+ self._glyphs = glyphs
+ self._hmtx = ttFont['hmtx']
+ self._vmtx = ttFont['vmtx'] if 'vmtx' in ttFont else None
+ self._glyphType = glyphType
+
+ def keys(self):
+ return list(self._glyphs.keys())
+
+ def has_key(self, glyphName):
+ return glyphName in self._glyphs
+
+ __contains__ = has_key
+
+ def __getitem__(self, glyphName):
+ horizontalMetrics = self._hmtx[glyphName]
+ verticalMetrics = self._vmtx[glyphName] if self._vmtx else None
+ return self._glyphType(
+ self, self._glyphs[glyphName], horizontalMetrics, verticalMetrics)
+
+ def __len__(self):
+ return len(self._glyphs)
+
+ def get(self, glyphName, default=None):
+ try:
+ return self[glyphName]
+ except KeyError:
+ return default
+
+class _TTGlyph(object):
+
+ """Wrapper for a TrueType glyph that supports the Pen protocol, meaning
+ that it has .draw() and .drawPoints() methods that take a pen object as
+ their only argument. Additionally there are 'width' and 'lsb' attributes,
+ read from the 'hmtx' table.
+
+ If the font contains a 'vmtx' table, there will also be 'height' and 'tsb'
+ attributes.
+ """
+
+ def __init__(self, glyphset, glyph, horizontalMetrics, verticalMetrics=None):
+ """Construct a new _TTGlyph.
+
+ Args:
+ glyphset (_TTGlyphSet): A glyphset object used to resolve components.
+ glyph (ttLib.tables._g_l_y_f.Glyph): The glyph object.
+ horizontalMetrics (int, int): The glyph's width and left sidebearing.
+ """
+ self._glyphset = glyphset
+ self._glyph = glyph
+ self.width, self.lsb = horizontalMetrics
+ if verticalMetrics:
+ self.height, self.tsb = verticalMetrics
+ else:
+ self.height, self.tsb = None, None
+
+ def draw(self, pen):
+ """Draw the glyph onto ``pen``. See fontTools.pens.basePen for details
+ how that works.
+ """
+ self._glyph.draw(pen)
+
+ def drawPoints(self, pen):
+ from fontTools.pens.pointPen import SegmentToPointPen
+ self.draw(SegmentToPointPen(pen))
+
+class _TTGlyphCFF(_TTGlyph):
+ pass
+
+class _TTGlyphGlyf(_TTGlyph):
+
+ def draw(self, pen):
+ """Draw the glyph onto Pen. See fontTools.pens.basePen for details
+ how that works.
+ """
+ glyfTable = self._glyphset._glyphs
+ glyph = self._glyph
+ offset = self.lsb - glyph.xMin if hasattr(glyph, "xMin") else 0
+ glyph.draw(pen, glyfTable, offset)
+
+ def drawPoints(self, pen):
+ """Draw the glyph onto PointPen. See fontTools.pens.pointPen
+ for details how that works.
+ """
+ glyfTable = self._glyphset._glyphs
+ glyph = self._glyph
+ offset = self.lsb - glyph.xMin if hasattr(glyph, "xMin") else 0
+ glyph.drawPoints(pen, glyfTable, offset)
+
+
+
+class _TTVarGlyphSet(_TTGlyphSet):
+
+ def __init__(self, font, location, normalized=False):
+ self._ttFont = font
+ self._glyphs = font['glyf']
+
+ if not normalized:
+ from fontTools.varLib.models import normalizeLocation, piecewiseLinearMap
+
+ axes = {a.axisTag: (a.minValue, a.defaultValue, a.maxValue) for a in font['fvar'].axes}
+ location = normalizeLocation(location, axes)
+ if 'avar' in font:
+ avar = font['avar']
+ avarSegments = avar.segments
+ new_location = {}
+ for axis_tag, value in location.items():
+ avarMapping = avarSegments.get(axis_tag, None)
+ if avarMapping is not None:
+ value = piecewiseLinearMap(value, avarMapping)
+ new_location[axis_tag] = value
+ location = new_location
+ del new_location
+
+ self.location = location
+
+ def __getitem__(self, glyphName):
+ if glyphName not in self._glyphs:
+ raise KeyError(glyphName)
+ return _TTVarGlyphGlyf(self._ttFont, glyphName, self.location)
+
+
+def _setCoordinates(glyph, coord, glyfTable):
+ # Handle phantom points for (left, right, top, bottom) positions.
+ assert len(coord) >= 4
+ if not hasattr(glyph, 'xMin'):
+ glyph.recalcBounds(glyfTable)
+ leftSideX = coord[-4][0]
+ rightSideX = coord[-3][0]
+ topSideY = coord[-2][1]
+ bottomSideY = coord[-1][1]
+
+ for _ in range(4):
+ del coord[-1]
+
+ if glyph.isComposite():
+ assert len(coord) == len(glyph.components)
+ for p,comp in zip(coord, glyph.components):
+ if hasattr(comp, 'x'):
+ comp.x,comp.y = p
+ elif glyph.numberOfContours == 0:
+ assert len(coord) == 0
+ else:
+ assert len(coord) == len(glyph.coordinates)
+ glyph.coordinates = coord
+
+ glyph.recalcBounds(glyfTable)
+
+ horizontalAdvanceWidth = otRound(rightSideX - leftSideX)
+ verticalAdvanceWidth = otRound(topSideY - bottomSideY)
+ leftSideBearing = otRound(glyph.xMin - leftSideX)
+ topSideBearing = otRound(topSideY - glyph.yMax)
+ return (
+ horizontalAdvanceWidth,
+ leftSideBearing,
+ verticalAdvanceWidth,
+ topSideBearing,
+ )
+
+
+class _TTVarGlyph(_TTGlyph):
+ def __init__(self, ttFont, glyphName, location):
+ self._ttFont = ttFont
+ self._glyphName = glyphName
+ self._location = location
+ # draw() fills these in
+ self.width = self.height = self.lsb = self.tsb = None
+
+
+class _TTVarGlyphGlyf(_TTVarGlyph):
+
+ def draw(self, pen):
+ from fontTools.varLib.iup import iup_delta
+ from fontTools.ttLib.tables._g_l_y_f import GlyphCoordinates
+ from fontTools.varLib.models import supportScalar
+
+ glyf = self._ttFont['glyf']
+ hMetrics = self._ttFont['hmtx'].metrics
+ vMetrics = getattr(self._ttFont.get('vmtx'), 'metrics', None)
+
+ variations = self._ttFont['gvar'].variations[self._glyphName]
+ coordinates, _ = glyf._getCoordinatesAndControls(self._glyphName, hMetrics, vMetrics)
+ origCoords, endPts = None, None
+ for var in variations:
+ scalar = supportScalar(self._location, var.axes)
+ if not scalar:
+ continue
+ delta = var.coordinates
+ if None in delta:
+ if origCoords is None:
+ origCoords,control = glyf._getCoordinatesAndControls(self._glyphName, hMetrics, vMetrics)
+ endPts = control[1] if control[0] >= 1 else list(range(len(control[1])))
+ delta = iup_delta(delta, origCoords, endPts)
+ coordinates += GlyphCoordinates(delta) * scalar
+
+ glyph = copy(glyf[self._glyphName]) # Shallow copy
+ width, lsb, height, tsb = _setCoordinates(glyph, coordinates, glyf)
+ self.width = width
+ self.lsb = lsb
+ self.height = height
+ self.tsb = tsb
+ offset = lsb - glyph.xMin if hasattr(glyph, "xMin") else 0
+ glyph.draw(pen, glyf, offset)
diff --git a/Lib/fontTools/ttLib/ttVisitor.py b/Lib/fontTools/ttLib/ttVisitor.py
new file mode 100644
index 00000000..54db61b1
--- /dev/null
+++ b/Lib/fontTools/ttLib/ttVisitor.py
@@ -0,0 +1,32 @@
+"""Specialization of fontTools.misc.visitor to work with TTFont."""
+
+from fontTools.misc.visitor import Visitor
+from fontTools.ttLib import TTFont
+
+
+class TTVisitor(Visitor):
+ def visitAttr(self, obj, attr, value, *args, **kwargs):
+ if isinstance(value, TTFont):
+ return False
+ super().visitAttr(obj, attr, value, *args, **kwargs)
+
+ def visit(self, obj, *args, **kwargs):
+ if hasattr(obj, "ensureDecompiled"):
+ obj.ensureDecompiled(recurse=False)
+ super().visit(obj, *args, **kwargs)
+
+
+@TTVisitor.register(TTFont)
+def visit(visitor, font, *args, **kwargs):
+ # Some objects have links back to TTFont; even though we
+ # have a check in visitAttr to stop them from recursing
+ # onto TTFont, sometimes they still do, for example when
+ # someone overrides visitAttr.
+ if hasattr(visitor, "font"):
+ return False
+
+ visitor.font = font
+ for tag in font.keys():
+ visitor.visit(font[tag], *args, **kwargs)
+ del visitor.font
+ return False
diff --git a/Lib/fontTools/ufoLib/__init__.py b/Lib/fontTools/ufoLib/__init__.py
index bd04dd7a..fa6cb117 100755
--- a/Lib/fontTools/ufoLib/__init__.py
+++ b/Lib/fontTools/ufoLib/__init__.py
@@ -98,6 +98,11 @@ class UFOFormatVersion(tuple, _VersionTupleEnumMixin, enum.Enum):
FORMAT_2_0 = (2, 0)
FORMAT_3_0 = (3, 0)
+# python 3.11 doesn't like when a mixin overrides a dunder method like __str__
+# for some reasons it keep using Enum.__str__, see
+# https://github.com/fonttools/fonttools/pull/2655
+UFOFormatVersion.__str__ = _VersionTupleEnumMixin.__str__
+
class UFOFileStructure(enum.Enum):
ZIP = "zip"
diff --git a/Lib/fontTools/ufoLib/glifLib.py b/Lib/fontTools/ufoLib/glifLib.py
index 89c9176a..7d28eaf7 100755
--- a/Lib/fontTools/ufoLib/glifLib.py
+++ b/Lib/fontTools/ufoLib/glifLib.py
@@ -79,6 +79,9 @@ class GLIFFormatVersion(tuple, _VersionTupleEnumMixin, enum.Enum):
versions.add(cls.FORMAT_2_0)
return frozenset(versions)
+# workaround for py3.11, see https://github.com/fonttools/fonttools/pull/2655
+GLIFFormatVersion.__str__ = _VersionTupleEnumMixin.__str__
+
# ------------
# Simple Glyph
diff --git a/Lib/fontTools/varLib/__init__.py b/Lib/fontTools/varLib/__init__.py
index 4029a107..f1ca99ff 100644
--- a/Lib/fontTools/varLib/__init__.py
+++ b/Lib/fontTools/varLib/__init__.py
@@ -30,13 +30,15 @@ from fontTools.ttLib.tables.TupleVariation import TupleVariation
from fontTools.ttLib.tables import otTables as ot
from fontTools.ttLib.tables.otBase import OTTableWriter
from fontTools.varLib import builder, models, varStore
-from fontTools.varLib.merger import VariationMerger
+from fontTools.varLib.merger import VariationMerger, COLRVariationMerger
from fontTools.varLib.mvar import MVAR_ENTRIES
from fontTools.varLib.iup import iup_delta_optimize
from fontTools.varLib.featureVars import addFeatureVariations
from fontTools.designspaceLib import DesignSpaceDocument, InstanceDescriptor
from fontTools.designspaceLib.split import splitInterpolable, splitVariableFonts
from fontTools.varLib.stat import buildVFStatTable
+from fontTools.colorLib.builder import buildColrV1
+from fontTools.colorLib.unbuilder import unbuildColrV1
from functools import partial
from collections import OrderedDict, namedtuple
import os.path
@@ -486,7 +488,7 @@ def _get_advance_metrics(font, masterModel, master_ttfs,
vOrigMap[glyphName] = storeBuilder.storeDeltas(deltas, round=noRound)
indirectStore = storeBuilder.finish()
- mapping2 = indirectStore.optimize()
+ mapping2 = indirectStore.optimize(use_NO_VARIATION_INDEX=False)
advMapping = [mapping2[advMapping[g]] for g in glyphOrder]
advanceMapping = builder.buildVarIdxMap(advMapping, glyphOrder)
@@ -606,7 +608,7 @@ def _add_BASE(font, masterModel, master_ttfs, axisTags):
merger.mergeTables(font, master_ttfs, ['BASE'])
store = merger.store_builder.finish()
- if not store.VarData:
+ if not store:
return
base = font['BASE'].table
assert base.Version == 0x00010000
@@ -621,7 +623,7 @@ def _merge_OTL(font, model, master_fonts, axisTags):
merger.mergeTables(font, master_fonts, ['GSUB', 'GDEF', 'GPOS'])
store = merger.store_builder.finish()
- if not store.VarData:
+ if not store:
return
try:
GDEF = font['GDEF'].table
@@ -711,6 +713,19 @@ def _add_CFF2(varFont, model, master_fonts):
merge_region_fonts(varFont, model, ordered_fonts_list, glyphOrder)
+def _add_COLR(font, model, master_fonts, axisTags, colr_layer_reuse=True):
+ merger = COLRVariationMerger(model, axisTags, font, allowLayerReuse=colr_layer_reuse)
+ merger.mergeTables(font, master_fonts)
+ store = merger.store_builder.finish()
+
+ colr = font["COLR"].table
+ if store:
+ mapping = store.optimize()
+ colr.VarStore = store
+ varIdxes = [mapping[v] for v in merger.varIdxes]
+ colr.VarIndexMap = builder.buildDeltaSetIndexMap(varIdxes)
+
+
def load_designspace(designspace):
# TODO: remove this and always assume 'designspace' is a DesignSpaceDocument,
# never a file path, as that's already handled by caller
@@ -865,7 +880,14 @@ def set_default_weight_width_slant(font, location):
font["post"].italicAngle = italicAngle
-def build_many(designspace: DesignSpaceDocument, master_finder=lambda s:s, exclude=[], optimize=True, skip_vf=lambda vf_name: False):
+def build_many(
+ designspace: DesignSpaceDocument,
+ master_finder=lambda s:s,
+ exclude=[],
+ optimize=True,
+ skip_vf=lambda vf_name: False,
+ colr_layer_reuse=True,
+):
"""
Build variable fonts from a designspace file, version 5 which can define
several VFs, or version 4 which has implicitly one VF covering the whole doc.
@@ -890,14 +912,21 @@ def build_many(designspace: DesignSpaceDocument, master_finder=lambda s:s, exclu
vfDoc,
master_finder,
exclude=list(exclude) + ["STAT"],
- optimize=optimize
+ optimize=optimize,
+ colr_layer_reuse=colr_layer_reuse,
)[0]
if "STAT" not in exclude:
buildVFStatTable(vf, designspace, name)
res[name] = vf
return res
-def build(designspace, master_finder=lambda s:s, exclude=[], optimize=True):
+def build(
+ designspace,
+ master_finder=lambda s:s,
+ exclude=[],
+ optimize=True,
+ colr_layer_reuse=True,
+):
"""
Build variation font from a designspace file.
@@ -975,6 +1004,8 @@ def build(designspace, master_finder=lambda s:s, exclude=[], optimize=True):
post.formatType = 2.0
post.extraNames = []
post.mapping = {}
+ if 'COLR' not in exclude and 'COLR' in vf and vf['COLR'].version > 0:
+ _add_COLR(vf, model, master_fonts, axisTags, colr_layer_reuse)
set_default_weight_width_slant(
vf, location={axis.axisTag: axis.defaultValue for axis in vf["fvar"].axes}
@@ -1083,6 +1114,12 @@ def main(args=None):
help='do not perform IUP optimization'
)
parser.add_argument(
+ '--no-colr-layer-reuse',
+ dest='colr_layer_reuse',
+ action='store_false',
+ help='do not rebuild variable COLR table to optimize COLR layer reuse',
+ )
+ parser.add_argument(
'--master-finder',
default='master_ttf_interpolatable/{stem}.ttf',
help=(
@@ -1120,7 +1157,8 @@ def main(args=None):
designspace_filename,
finder,
exclude=options.exclude,
- optimize=options.optimize
+ optimize=options.optimize,
+ colr_layer_reuse=options.colr_layer_reuse,
)
outfile = options.outfile
diff --git a/Lib/fontTools/varLib/cff.py b/Lib/fontTools/varLib/cff.py
index 08ddfc41..727efa70 100644
--- a/Lib/fontTools/varLib/cff.py
+++ b/Lib/fontTools/varLib/cff.py
@@ -639,6 +639,7 @@ class CFF2CharStringMergePen(T2CharStringPen):
# convert to deltas
deltas = get_delta_func(coord)[1:]
coord = [coord[0]] + deltas
+ coord.append(1)
new_coords.append(coord)
cmd[1] = new_coords
lastOp = op
diff --git a/Lib/fontTools/varLib/errors.py b/Lib/fontTools/varLib/errors.py
index c5a149cb..4f30f901 100644
--- a/Lib/fontTools/varLib/errors.py
+++ b/Lib/fontTools/varLib/errors.py
@@ -30,12 +30,8 @@ class VarLibMergeError(VarLibError):
def _master_name(self, ix):
if self.merger is not None:
ttf = self.merger.ttfs[ix]
- if (
- "name" in ttf
- and ttf["name"].getDebugName(1)
- and ttf["name"].getDebugName(2)
- ):
- return ttf["name"].getDebugName(1) + " " + ttf["name"].getDebugName(2)
+ if "name" in ttf and ttf["name"].getBestFullName():
+ return ttf["name"].getBestFullName()
elif hasattr(ttf.reader, "file") and hasattr(ttf.reader.file, "name"):
return ttf.reader.file.name
return f"master number {ix}"
@@ -46,7 +42,10 @@ class VarLibMergeError(VarLibError):
index = [x == self.cause["expected"] for x in self.cause["got"]].index(
False
)
- return index, self._master_name(index)
+ master_name = self._master_name(index)
+ if "location" in self.cause:
+ master_name = f"{master_name} ({self.cause['location']})"
+ return index, master_name
return None, None
@property
@@ -54,7 +53,7 @@ class VarLibMergeError(VarLibError):
if "expected" in self.cause and "got" in self.cause:
offender_index, offender = self.offender
got = self.cause["got"][offender_index]
- return f"Expected to see {self.stack[0]}=={self.cause['expected']}, instead saw {got}\n"
+ return f"Expected to see {self.stack[0]}=={self.cause['expected']!r}, instead saw {got!r}\n"
return ""
def __str__(self):
@@ -76,11 +75,21 @@ class ShouldBeConstant(VarLibMergeError):
@property
def details(self):
+ basic_message = super().details
+
if self.stack[0] != ".FeatureCount" or self.merger is None:
- return super().details
- offender_index, offender = self.offender
+ return basic_message
+
+ assert self.stack[0] == ".FeatureCount"
+ offender_index, _ = self.offender
bad_ttf = self.merger.ttfs[offender_index]
- good_ttf = self.merger.ttfs[offender_index - 1]
+ good_ttf = next(
+ ttf
+ for ttf in self.merger.ttfs
+ if self.stack[-1] in ttf
+ and ttf[self.stack[-1]].table.FeatureList.FeatureCount
+ == self.cause["expected"]
+ )
good_features = [
x.FeatureTag
@@ -90,7 +99,7 @@ class ShouldBeConstant(VarLibMergeError):
x.FeatureTag
for x in bad_ttf[self.stack[-1]].table.FeatureList.FeatureRecord
]
- return (
+ return basic_message + (
"\nIncompatible features between masters.\n"
f"Expected: {', '.join(good_features)}.\n"
f"Got: {', '.join(bad_features)}.\n"
@@ -111,6 +120,20 @@ class FoundANone(VarLibMergeError):
return f"{stack[0]}=={cause['got']}\n"
+class NotANone(VarLibMergeError):
+ """one of the values in a list was not empty when it should have been"""
+
+ @property
+ def offender(self):
+ index = [x is not None for x in self.cause["got"]].index(True)
+ return index, self._master_name(index)
+
+ @property
+ def details(self):
+ cause, stack = self.cause, self.stack
+ return f"{stack[0]}=={cause['got']}\n"
+
+
class MismatchedTypes(VarLibMergeError):
"""data had inconsistent types"""
@@ -134,12 +157,20 @@ class InconsistentExtensions(VarLibMergeError):
class UnsupportedFormat(VarLibMergeError):
"""an OpenType subtable (%s) had a format I didn't expect"""
+ def __init__(self, merger=None, **kwargs):
+ super().__init__(merger, **kwargs)
+ if not self.stack:
+ self.stack = [".Format"]
+
@property
def reason(self):
- return self.__doc__ % self.cause["subtable"]
+ s = self.__doc__ % self.cause["subtable"]
+ if "value" in self.cause:
+ s += f" ({self.cause['value']!r})"
+ return s
-class UnsupportedFormat(UnsupportedFormat):
+class InconsistentFormats(UnsupportedFormat):
"""an OpenType subtable (%s) had inconsistent formats between masters"""
diff --git a/Lib/fontTools/varLib/featureVars.py b/Lib/fontTools/varLib/featureVars.py
index e3366327..ad47ab8e 100644
--- a/Lib/fontTools/varLib/featureVars.py
+++ b/Lib/fontTools/varLib/featureVars.py
@@ -44,6 +44,10 @@ def addFeatureVariations(font, conditionalSubstitutions, featureTag='rvrn'):
# >>> f.save(dstPath)
"""
+ _checkSubstitutionGlyphsExist(
+ glyphNames=set(font.getGlyphOrder()),
+ substitutions=conditionalSubstitutions,
+ )
substitutions = overlayFeatureVariations(conditionalSubstitutions)
@@ -66,6 +70,18 @@ def addFeatureVariations(font, conditionalSubstitutions, featureTag='rvrn'):
conditionsAndLookups,
featureTag)
+def _checkSubstitutionGlyphsExist(glyphNames, substitutions):
+ referencedGlyphNames = set()
+ for _, substitution in substitutions:
+ referencedGlyphNames |= substitution.keys()
+ referencedGlyphNames |= set(substitution.values())
+ missing = referencedGlyphNames - glyphNames
+ if missing:
+ raise VarLibValidationError(
+ "Missing glyphs are referenced in conditional substitution rules:"
+ f" {', '.join(missing)}"
+ )
+
def overlayFeatureVariations(conditionalSubstitutions):
"""Compute overlaps between all conditional substitutions.
diff --git a/Lib/fontTools/varLib/instancer/__init__.py b/Lib/fontTools/varLib/instancer/__init__.py
index 6dad393e..8f976123 100644
--- a/Lib/fontTools/varLib/instancer/__init__.py
+++ b/Lib/fontTools/varLib/instancer/__init__.py
@@ -90,12 +90,11 @@ from fontTools.varLib import builder
from fontTools.varLib.mvar import MVAR_ENTRIES
from fontTools.varLib.merger import MutatorMerger
from fontTools.varLib.instancer import names
-from contextlib import contextmanager
+from fontTools.misc.cliTools import makeOutputFileName
import collections
from copy import deepcopy
from enum import IntEnum
import logging
-from itertools import islice
import os
import re
@@ -329,7 +328,9 @@ def limitTupleVariationAxisRange(var, axisTag, axisRange):
return [var, newVar]
-def _instantiateGvarGlyph(glyphname, glyf, gvar, hMetrics, vMetrics, axisLimits, optimize=True):
+def _instantiateGvarGlyph(
+ glyphname, glyf, gvar, hMetrics, vMetrics, axisLimits, optimize=True
+):
coordinates, ctrl = glyf._getCoordinatesAndControls(glyphname, hMetrics, vMetrics)
endPts = ctrl.endPts
@@ -365,22 +366,26 @@ def _instantiateGvarGlyph(glyphname, glyf, gvar, hMetrics, vMetrics, axisLimits,
for var in tupleVarStore:
var.optimize(coordinates, endPts, isComposite)
+
def instantiateGvarGlyph(varfont, glyphname, axisLimits, optimize=True):
"""Remove?
https://github.com/fonttools/fonttools/pull/2266"""
gvar = varfont["gvar"]
glyf = varfont["glyf"]
- hMetrics = varfont['hmtx'].metrics
- vMetrics = getattr(varfont.get('vmtx'), 'metrics', None)
- _instantiateGvarGlyph(glyphname, glyf, gvar, hMetrics, vMetrics, axisLimits, optimize=optimize)
+ hMetrics = varfont["hmtx"].metrics
+ vMetrics = getattr(varfont.get("vmtx"), "metrics", None)
+ _instantiateGvarGlyph(
+ glyphname, glyf, gvar, hMetrics, vMetrics, axisLimits, optimize=optimize
+ )
+
def instantiateGvar(varfont, axisLimits, optimize=True):
log.info("Instantiating glyf/gvar tables")
gvar = varfont["gvar"]
glyf = varfont["glyf"]
- hMetrics = varfont['hmtx'].metrics
- vMetrics = getattr(varfont.get('vmtx'), 'metrics', None)
+ hMetrics = varfont["hmtx"].metrics
+ vMetrics = getattr(varfont.get("vmtx"), "metrics", None)
# Get list of glyph names sorted by component depth.
# If a composite glyph is processed before its base glyph, the bounds may
# be calculated incorrectly because deltas haven't been applied to the
@@ -395,7 +400,9 @@ def instantiateGvar(varfont, axisLimits, optimize=True):
),
)
for glyphname in glyphnames:
- _instantiateGvarGlyph(glyphname, glyf, gvar, hMetrics, vMetrics, axisLimits, optimize=optimize)
+ _instantiateGvarGlyph(
+ glyphname, glyf, gvar, hMetrics, vMetrics, axisLimits, optimize=optimize
+ )
if not gvar.variations:
del varfont["gvar"]
@@ -485,7 +492,7 @@ def _instantiateVHVAR(varfont, axisLimits, tableFields):
# or AdvHeightMap. If a direct, implicit glyphID->VariationIndex mapping is
# used for advances, skip re-optimizing and maintain original VariationIndex.
if getattr(vhvar, tableFields.advMapping):
- varIndexMapping = varStore.optimize()
+ varIndexMapping = varStore.optimize(use_NO_VARIATION_INDEX=False)
glyphOrder = varfont.getGlyphOrder()
_remapVarIdxMap(vhvar, tableFields.advMapping, varIndexMapping, glyphOrder)
if getattr(vhvar, tableFields.sb1): # left or top sidebearings
@@ -633,6 +640,7 @@ def instantiateItemVariationStore(itemVarStore, fvarAxes, axisLimits):
for major, deltas in enumerate(defaultDeltaArray)
for minor, delta in enumerate(deltas)
}
+ defaultDeltas[itemVarStore.NO_VARIATION_INDEX] = 0
return defaultDeltas
@@ -745,23 +753,7 @@ def _limitFeatureVariationConditionRange(condition, axisRange):
values = [minValue, maxValue]
for i, value in enumerate(values):
- if value < 0:
- if axisRange.minimum == 0:
- newValue = 0
- else:
- newValue = value / abs(axisRange.minimum)
- if newValue <= -1.0:
- newValue = -1.0
- elif value > 0:
- if axisRange.maximum == 0:
- newValue = 0
- else:
- newValue = value / axisRange.maximum
- if newValue >= 1.0:
- newValue = 1.0
- else:
- newValue = 0
- values[i] = newValue
+ values[i] = normalizeValue(value, (axisRange.minimum, 0, axisRange.maximum))
return AxisRange(*values)
@@ -806,12 +798,12 @@ def _instantiateFeatureVariationRecord(
return applies, shouldKeep
-def _limitFeatureVariationRecord(record, axisRanges, fvarAxes):
+def _limitFeatureVariationRecord(record, axisRanges, axisOrder):
newConditions = []
for i, condition in enumerate(record.ConditionSet.ConditionTable):
if condition.Format == 1:
axisIdx = condition.AxisIndex
- axisTag = fvarAxes[axisIdx].axisTag
+ axisTag = axisOrder[axisIdx]
if axisTag in axisRanges:
axisRange = axisRanges[axisTag]
newRange = _limitFeatureVariationConditionRange(condition, axisRange)
@@ -855,7 +847,7 @@ def _instantiateFeatureVariations(table, fvarAxes, axisLimits):
record, i, location, fvarAxes, axisIndexMap
)
if shouldKeep:
- shouldKeep = _limitFeatureVariationRecord(record, axisRanges, fvarAxes)
+ shouldKeep = _limitFeatureVariationRecord(record, axisRanges, axisOrder)
if shouldKeep and _featureVariationRecordIsUnique(record, uniqueRecords):
newRecords.append(record)
@@ -938,24 +930,16 @@ def instantiateAvar(varfont, axisLimits):
)
newMapping = {}
for fromCoord, toCoord in mapping.items():
- if fromCoord < 0:
- if axisRange.minimum == 0 or fromCoord < axisRange.minimum:
- continue
- else:
- fromCoord /= abs(axisRange.minimum)
- elif fromCoord > 0:
- if axisRange.maximum == 0 or fromCoord > axisRange.maximum:
- continue
- else:
- fromCoord /= axisRange.maximum
- if toCoord < 0:
- assert mappedMin != 0
- assert toCoord >= mappedMin
- toCoord /= abs(mappedMin)
- elif toCoord > 0:
- assert mappedMax != 0
- assert toCoord <= mappedMax
- toCoord /= mappedMax
+
+ if fromCoord < axisRange.minimum or fromCoord > axisRange.maximum:
+ continue
+ fromCoord = normalizeValue(
+ fromCoord, (axisRange.minimum, 0, axisRange.maximum)
+ )
+
+ assert mappedMin <= toCoord <= mappedMax
+ toCoord = normalizeValue(toCoord, (mappedMin, 0, mappedMax))
+
fromCoord = floatToFixedToFloat(fromCoord, 14)
toCoord = floatToFixedToFloat(toCoord, 14)
newMapping[fromCoord] = toCoord
@@ -1199,10 +1183,10 @@ def instantiateVariableFont(
requires the skia-pathops package (available to pip install).
The overlap parameter only has effect when generating full static instances.
updateFontNames (bool): if True, update the instantiated font's name table using
- the Axis Value Tables from the STAT table. The name table will be updated so
- it conforms to the R/I/B/BI model. If the STAT table is missing or
- an Axis Value table is missing for a given axis coordinate, a ValueError will
- be raised.
+ the Axis Value Tables from the STAT table. The name table and the style bits
+ in the head and OS/2 table will be updated so they conform to the R/I/B/BI
+ model. If the STAT table is missing or an Axis Value table is missing for
+ a given axis coordinate, a ValueError will be raised.
"""
# 'overlap' used to be bool and is now enum; for backward compat keep accepting bool
overlap = OverlapMode(int(overlap))
@@ -1272,9 +1256,51 @@ def instantiateVariableFont(
},
)
+ if updateFontNames:
+ # Set Regular/Italic/Bold/Bold Italic bits as appropriate, after the
+ # name table has been updated.
+ setRibbiBits(varfont)
+
return varfont
+def setRibbiBits(font):
+ """Set the `head.macStyle` and `OS/2.fsSelection` style bits
+ appropriately."""
+
+ english_ribbi_style = font["name"].getName(names.NameID.SUBFAMILY_NAME, 3, 1, 0x409)
+ if english_ribbi_style is None:
+ return
+
+ styleMapStyleName = english_ribbi_style.toStr().lower()
+ if styleMapStyleName not in {"regular", "bold", "italic", "bold italic"}:
+ return
+
+ if styleMapStyleName == "bold":
+ font["head"].macStyle = 0b01
+ elif styleMapStyleName == "bold italic":
+ font["head"].macStyle = 0b11
+ elif styleMapStyleName == "italic":
+ font["head"].macStyle = 0b10
+
+ selection = font["OS/2"].fsSelection
+ # First clear...
+ selection &= ~(1 << 0)
+ selection &= ~(1 << 5)
+ selection &= ~(1 << 6)
+ # ...then re-set the bits.
+ if styleMapStyleName == "regular":
+ selection |= 1 << 6
+ elif styleMapStyleName == "bold":
+ selection |= 1 << 5
+ elif styleMapStyleName == "italic":
+ selection |= 1 << 0
+ elif styleMapStyleName == "bold italic":
+ selection |= 1 << 0
+ selection |= 1 << 5
+ font["OS/2"].fsSelection = selection
+
+
def splitAxisLocationAndRanges(axisLimits, rangeType=AxisRange):
location, axisRanges = {}, {}
for axisTag, value in axisLimits.items():
@@ -1380,6 +1406,18 @@ def parseArgs(args):
help="Update the instantiated font's `name` table. Input font must have "
"a STAT table with Axis Value Tables",
)
+ parser.add_argument(
+ "--no-recalc-timestamp",
+ dest="recalc_timestamp",
+ action="store_false",
+ help="Don't set the output font's timestamp to the current time.",
+ )
+ parser.add_argument(
+ "--no-recalc-bounds",
+ dest="recalc_bounds",
+ action="store_false",
+ help="Don't recalculate font bounding boxes",
+ )
loggingGroup = parser.add_mutually_exclusive_group(required=False)
loggingGroup.add_argument(
"-v", "--verbose", action="store_true", help="Run more verbosely."
@@ -1417,12 +1455,16 @@ def parseArgs(args):
def main(args=None):
- """Partially instantiate a variable font."""
+ """Partially instantiate a variable font"""
infile, axisLimits, options = parseArgs(args)
log.info("Restricting axes: %s", axisLimits)
log.info("Loading variable font")
- varfont = TTFont(infile)
+ varfont = TTFont(
+ infile,
+ recalcTimestamp=options.recalc_timestamp,
+ recalcBBoxes=options.recalc_bounds,
+ )
isFullInstance = {
axisTag for axisTag, limit in axisLimits.items() if not isinstance(limit, tuple)
@@ -1437,9 +1479,9 @@ def main(args=None):
updateFontNames=options.update_name_table,
)
+ suffix = "-instance" if isFullInstance else "-partial"
outfile = (
- os.path.splitext(infile)[0]
- + "-{}.ttf".format("instance" if isFullInstance else "partial")
+ makeOutputFileName(infile, overWrite=True, suffix=suffix)
if not options.output
else options.output
)
diff --git a/Lib/fontTools/varLib/interpolatable.py b/Lib/fontTools/varLib/interpolatable.py
index a9583a18..f86b6f9b 100644
--- a/Lib/fontTools/varLib/interpolatable.py
+++ b/Lib/fontTools/varLib/interpolatable.py
@@ -16,12 +16,12 @@ import itertools
import sys
def _rot_list(l, k):
- """Rotate list by k items forward. Ie. item at position 0 will be
- at position k in returned list. Negative k is allowed."""
- n = len(l)
- k %= n
- if not k: return l
- return l[n-k:] + l[:n-k]
+ """Rotate list by k items forward. Ie. item at position 0 will be
+ at position k in returned list. Negative k is allowed."""
+ n = len(l)
+ k %= n
+ if not k: return l
+ return l[n-k:] + l[:n-k]
class PerContourPen(BasePen):
@@ -361,20 +361,69 @@ def main(args=None):
from os.path import basename
- names = [basename(filename).rsplit(".", 1)[0] for filename in args.inputs]
-
fonts = []
+ names = []
+
+ if len(args.inputs) == 1:
+ if args.inputs[0].endswith('.designspace'):
+ from fontTools.designspaceLib import DesignSpaceDocument
+ designspace = DesignSpaceDocument.fromfile(args.inputs[0])
+ args.inputs = [master.path for master in designspace.sources]
+
+ elif args.inputs[0].endswith('.glyphs'):
+ from glyphsLib import GSFont, to_ufos
+ gsfont = GSFont(args.inputs[0])
+ fonts.extend(to_ufos(gsfont))
+ names = ['%s-%s' % (f.info.familyName, f.info.styleName) for f in fonts]
+ args.inputs = []
+
+ elif args.inputs[0].endswith('.ttf'):
+ from fontTools.ttLib import TTFont
+ font = TTFont(args.inputs[0])
+ if 'gvar' in font:
+ # Is variable font
+ gvar = font['gvar']
+ # Gather all "master" locations
+ locs = set()
+ for variations in gvar.variations.values():
+ for var in variations:
+ loc = []
+ for tag,val in sorted(var.axes.items()):
+ loc.append((tag,val[1]))
+ locs.add(tuple(loc))
+ # Rebuild locs as dictionaries
+ new_locs = [{}]
+ for loc in sorted(locs, key=lambda v: (len(v), v)):
+ names.append(str(loc))
+ l = {}
+ for tag,val in loc:
+ l[tag] = val
+ new_locs.append(l)
+ locs = new_locs
+ del new_locs
+ # locs is all master locations now
+
+ for loc in locs:
+ fonts.append(font.getGlyphSet(location=loc, normalized=True))
+
+ args.inputs = []
+
+
for filename in args.inputs:
if filename.endswith(".ufo"):
from fontTools.ufoLib import UFOReader
-
fonts.append(UFOReader(filename))
else:
from fontTools.ttLib import TTFont
-
fonts.append(TTFont(filename))
- glyphsets = [font.getGlyphSet() for font in fonts]
+ names.append(basename(filename).rsplit(".", 1)[0])
+
+ if hasattr(fonts[0], 'getGlyphSet'):
+ glyphsets = [font.getGlyphSet() for font in fonts]
+ else:
+ glyphsets = fonts
+
problems = test(glyphsets, glyphs=glyphs, names=names)
if args.json:
import json
diff --git a/Lib/fontTools/varLib/iup.py b/Lib/fontTools/varLib/iup.py
index 45a7a5ed..9c5bc35b 100644
--- a/Lib/fontTools/varLib/iup.py
+++ b/Lib/fontTools/varLib/iup.py
@@ -1,4 +1,47 @@
-def iup_segment(coords, rc1, rd1, rc2, rd2):
+from typing import (
+ Sequence,
+ Tuple,
+ Union,
+)
+from numbers import (
+ Integral,
+ Real
+)
+
+try:
+ import cython
+except ImportError:
+ # if cython not installed, use mock module with no-op decorators and types
+ from fontTools.misc import cython
+
+if cython.compiled:
+ # Yep, I'm compiled.
+ COMPILED = True
+else:
+ # Just a lowly interpreted script.
+ COMPILED = False
+
+
+_Point = Tuple[Real, Real]
+_Delta = Tuple[Real, Real]
+_PointSegment = Sequence[_Point]
+_DeltaSegment = Sequence[_Delta]
+_DeltaOrNone = Union[_Delta, None]
+_DeltaOrNoneSegment = Sequence[_DeltaOrNone]
+_Endpoints = Sequence[Integral]
+
+
+MAX_LOOKBACK = 8
+
+def iup_segment(coords : _PointSegment,
+ rc1 : _Point,
+ rd1 : _Delta,
+ rc2 : _Point,
+ rd2 : _Delta) -> _DeltaSegment:
+ """Given two reference coordinates `rc1` & `rc2` and their respective
+ delta vectors `rd1` & `rd2`, returns interpolated deltas for the set of
+ coordinates `coords`. """
+
# rc1 = reference coord 1
# rd1 = reference delta 1
out_arrays = [None, None]
@@ -6,7 +49,6 @@ def iup_segment(coords, rc1, rd1, rc2, rd2):
out_arrays[j] = out = []
x1, x2, d1, d2 = rc1[j], rc2[j], rd1[j], rd2[j]
-
if x1 == x2:
n = len(coords)
if d1 == d2:
@@ -36,14 +78,20 @@ def iup_segment(coords, rc1, rd1, rc2, rd2):
return zip(*out_arrays)
-def iup_contour(delta, coords):
- assert len(delta) == len(coords)
- if None not in delta:
- return delta
+def iup_contour(deltas : _DeltaOrNoneSegment,
+ coords : _PointSegment) -> _DeltaSegment:
+ """For the contour given in `coords`, interpolate any missing
+ delta values in delta vector `deltas`.
+
+ Returns fully filled-out delta vector."""
- n = len(delta)
+ assert len(deltas) == len(coords)
+ if None not in deltas:
+ return deltas
+
+ n = len(deltas)
# indices of points with explicit deltas
- indices = [i for i,v in enumerate(delta) if v is not None]
+ indices = [i for i,v in enumerate(deltas) if v is not None]
if not indices:
# All deltas are None. Return 0,0 for all.
return [(0,0)]*n
@@ -54,23 +102,31 @@ def iup_contour(delta, coords):
if start != 0:
# Initial segment that wraps around
i1, i2, ri1, ri2 = 0, start, start, indices[-1]
- out.extend(iup_segment(coords[i1:i2], coords[ri1], delta[ri1], coords[ri2], delta[ri2]))
- out.append(delta[start])
+ out.extend(iup_segment(coords[i1:i2], coords[ri1], deltas[ri1], coords[ri2], deltas[ri2]))
+ out.append(deltas[start])
for end in it:
if end - start > 1:
i1, i2, ri1, ri2 = start+1, end, start, end
- out.extend(iup_segment(coords[i1:i2], coords[ri1], delta[ri1], coords[ri2], delta[ri2]))
- out.append(delta[end])
+ out.extend(iup_segment(coords[i1:i2], coords[ri1], deltas[ri1], coords[ri2], deltas[ri2]))
+ out.append(deltas[end])
start = end
if start != n-1:
# Final segment that wraps around
i1, i2, ri1, ri2 = start+1, n, start, indices[0]
- out.extend(iup_segment(coords[i1:i2], coords[ri1], delta[ri1], coords[ri2], delta[ri2]))
+ out.extend(iup_segment(coords[i1:i2], coords[ri1], deltas[ri1], coords[ri2], deltas[ri2]))
- assert len(delta) == len(out), (len(delta), len(out))
+ assert len(deltas) == len(out), (len(deltas), len(out))
return out
-def iup_delta(delta, coords, ends):
+def iup_delta(deltas : _DeltaOrNoneSegment,
+ coords : _PointSegment,
+ ends: _Endpoints) -> _DeltaSegment:
+ """For the outline given in `coords`, with contour endpoints given
+ in sorted increasing order in `ends`, interpolate any missing
+ delta values in delta vector `deltas`.
+
+ Returns fully filled-out delta vector."""
+
assert sorted(ends) == ends and len(coords) == (ends[-1]+1 if ends else 0) + 4
n = len(coords)
ends = ends + [n-4, n-3, n-2, n-1]
@@ -78,7 +134,7 @@ def iup_delta(delta, coords, ends):
start = 0
for end in ends:
end += 1
- contour = iup_contour(delta[start:end], coords[start:end])
+ contour = iup_contour(deltas[start:end], coords[start:end])
out.extend(contour)
start = end
@@ -86,7 +142,15 @@ def iup_delta(delta, coords, ends):
# Optimizer
-def can_iup_in_between(deltas, coords, i, j, tolerance):
+def can_iup_in_between(deltas : _DeltaSegment,
+ coords : _PointSegment,
+ i : Integral,
+ j : Integral,
+ tolerance : Real) -> bool:
+ """Return true if the deltas for points at `i` and `j` (`i < j`) can be
+ successfully used to interpolate deltas for points in between them within
+ provided error tolerance."""
+
assert j - i >= 2
interp = list(iup_segment(coords[i+1:j], coords[i], deltas[i], coords[j], deltas[j]))
deltas = deltas[i+1:j]
@@ -95,23 +159,25 @@ def can_iup_in_between(deltas, coords, i, j, tolerance):
return all(abs(complex(x-p, y-q)) <= tolerance for (x,y),(p,q) in zip(deltas, interp))
-def _iup_contour_bound_forced_set(delta, coords, tolerance=0):
+def _iup_contour_bound_forced_set(deltas : _DeltaSegment,
+ coords : _PointSegment,
+ tolerance : Real = 0) -> set:
"""The forced set is a conservative set of points on the contour that must be encoded
explicitly (ie. cannot be interpolated). Calculating this set allows for significantly
speeding up the dynamic-programming, as well as resolve circularity in DP.
The set is precise; that is, if an index is in the returned set, then there is no way
- that IUP can generate delta for that point, given coords and delta.
+ that IUP can generate delta for that point, given `coords` and `deltas`.
"""
- assert len(delta) == len(coords)
+ assert len(deltas) == len(coords)
+ n = len(deltas)
forced = set()
# Track "last" and "next" points on the contour as we sweep.
- nd, nc = delta[0], coords[0]
- ld, lc = delta[-1], coords[-1]
- for i in range(len(delta)-1, -1, -1):
- d, c = ld, lc
- ld, lc = delta[i-1], coords[i-1]
+ for i in range(len(deltas)-1, -1, -1):
+ ld, lc = deltas[i-1], coords[i-1]
+ d, c = deltas[i], coords[i]
+ nd, nc = deltas[i-n+1], coords[i-n+1]
for j in (0,1): # For X and for Y
cj = c[j]
@@ -128,42 +194,48 @@ def _iup_contour_bound_forced_set(delta, coords, tolerance=0):
c1, c2 = ncj, lcj
d1, d2 = ndj, ldj
+ force = False
+
+ # If the two coordinates are the same, then the interpolation
+ # algorithm produces the same delta if both deltas are equal,
+ # and zero if they differ.
+ #
+ # This test has to be before the next one.
+ if c1 == c2:
+ if abs(d1 - d2) > tolerance and abs(dj) > tolerance:
+ force = True
+
# If coordinate for current point is between coordinate of adjacent
# points on the two sides, but the delta for current point is NOT
# between delta for those adjacent points (considering tolerance
# allowance), then there is no way that current point can be IUP-ed.
# Mark it forced.
- force = False
- if c1 <= cj <= c2:
+ elif c1 <= cj <= c2: # and c1 != c2
if not (min(d1,d2)-tolerance <= dj <= max(d1,d2)+tolerance):
force = True
+
+ # Otherwise, the delta should either match the closest, or have the
+ # same sign as the interpolation of the two deltas.
else: # cj < c1 or c2 < cj
- if c1 == c2:
- if d1 == d2:
- if abs(dj - d1) > tolerance:
- force = True
- else:
- if abs(dj) > tolerance:
- # Disabled the following because the "d1 == d2" does
- # check does not take tolerance into consideration...
- pass # force = True
- elif d1 != d2:
+ if d1 != d2:
if cj < c1:
- if dj != d1 and ((dj-tolerance < d1) != (d1 < d2)):
+ if abs(dj) > tolerance and abs(dj - d1) > tolerance and ((dj-tolerance < d1) != (d1 < d2)):
force = True
else: # c2 < cj
- if d2 != dj and ((d2 < dj+tolerance) != (d1 < d2)):
+ if abs(dj) > tolerance and abs(dj - d2) > tolerance and ((d2 < dj+tolerance) != (d1 < d2)):
force = True
if force:
forced.add(i)
break
- nd, nc = d, c
-
return forced
-def _iup_contour_optimize_dp(delta, coords, forced={}, tolerance=0, lookback=None):
+def _iup_contour_optimize_dp(deltas : _DeltaSegment,
+ coords : _PointSegment,
+ forced={},
+ tolerance : Real = 0,
+ lookback : Integral =None):
"""Straightforward Dynamic-Programming. For each index i, find least-costly encoding of
points 0 to i where i is explicitly encoded. We find this by considering all previous
explicit points j and check whether interpolation can fill points between j and i.
@@ -173,9 +245,10 @@ def _iup_contour_optimize_dp(delta, coords, forced={}, tolerance=0, lookback=Non
As major speedup, we stop looking further whenever we see a "forced" point."""
- n = len(delta)
+ n = len(deltas)
if lookback is None:
lookback = n
+ lookback = min(lookback, MAX_LOOKBACK)
costs = {-1:0}
chain = {-1:None}
for i in range(0, n):
@@ -191,7 +264,7 @@ def _iup_contour_optimize_dp(delta, coords, forced={}, tolerance=0, lookback=Non
cost = costs[j] + 1
- if cost < best_cost and can_iup_in_between(delta, coords, j, i, tolerance):
+ if cost < best_cost and can_iup_in_between(deltas, coords, j, i, tolerance):
costs[i] = best_cost = cost
chain[i] = j
@@ -200,7 +273,7 @@ def _iup_contour_optimize_dp(delta, coords, forced={}, tolerance=0, lookback=Non
return chain, costs
-def _rot_list(l, k):
+def _rot_list(l : list, k : int):
"""Rotate list by k items forward. Ie. item at position 0 will be
at position k in returned list. Negative k is allowed."""
n = len(l)
@@ -208,48 +281,62 @@ def _rot_list(l, k):
if not k: return l
return l[n-k:] + l[:n-k]
-def _rot_set(s, k, n):
+def _rot_set(s : set, k : int, n : int):
k %= n
if not k: return s
return {(v + k) % n for v in s}
-def iup_contour_optimize(delta, coords, tolerance=0.):
- n = len(delta)
+def iup_contour_optimize(deltas : _DeltaSegment,
+ coords : _PointSegment,
+ tolerance : Real = 0.) -> _DeltaOrNoneSegment:
+ """For contour with coordinates `coords`, optimize a set of delta
+ values `deltas` within error `tolerance`.
+
+ Returns delta vector that has most number of None items instead of
+ the input delta.
+ """
+
+ n = len(deltas)
# Get the easy cases out of the way:
# If all are within tolerance distance of 0, encode nothing:
- if all(abs(complex(*p)) <= tolerance for p in delta):
+ if all(abs(complex(*p)) <= tolerance for p in deltas):
return [None] * n
# If there's exactly one point, return it:
if n == 1:
- return delta
+ return deltas
# If all deltas are exactly the same, return just one (the first one):
- d0 = delta[0]
- if all(d0 == d for d in delta):
+ d0 = deltas[0]
+ if all(d0 == d for d in deltas):
return [d0] + [None] * (n-1)
# Else, solve the general problem using Dynamic Programming.
- forced = _iup_contour_bound_forced_set(delta, coords, tolerance)
+ forced = _iup_contour_bound_forced_set(deltas, coords, tolerance)
# The _iup_contour_optimize_dp() routine returns the optimal encoding
# solution given the constraint that the last point is always encoded.
# To remove this constraint, we use two different methods, depending on
# whether forced set is non-empty or not:
+ # Debugging: Make the next if always take the second branch and observe
+ # if the font size changes (reduced); that would mean the forced-set
+ # has members it should not have.
if forced:
# Forced set is non-empty: rotate the contour start point
# such that the last point in the list is a forced point.
k = (n-1) - max(forced)
assert k >= 0
- delta = _rot_list(delta, k)
+ deltas = _rot_list(deltas, k)
coords = _rot_list(coords, k)
forced = _rot_set(forced, k, n)
- chain, costs = _iup_contour_optimize_dp(delta, coords, forced, tolerance)
+ # Debugging: Pass a set() instead of forced variable to the next call
+ # to exercise forced-set computation for under-counting.
+ chain, costs = _iup_contour_optimize_dp(deltas, coords, forced, tolerance)
# Assemble solution.
solution = set()
@@ -257,18 +344,25 @@ def iup_contour_optimize(delta, coords, tolerance=0.):
while i is not None:
solution.add(i)
i = chain[i]
+ solution.remove(-1)
+
+ #if not forced <= solution:
+ # print("coord", coords)
+ # print("deltas", deltas)
+ # print("len", len(deltas))
assert forced <= solution, (forced, solution)
- delta = [delta[i] if i in solution else None for i in range(n)]
- delta = _rot_list(delta, -k)
+ deltas = [deltas[i] if i in solution else None for i in range(n)]
+
+ deltas = _rot_list(deltas, -k)
else:
- # Repeat the contour an extra time, solve the 2*n case, then look for solutions of the
- # circular n-length problem in the solution for 2*n linear case. I cannot prove that
+ # Repeat the contour an extra time, solve the new case, then look for solutions of the
+ # circular n-length problem in the solution for new linear case. I cannot prove that
# this always produces the optimal solution...
- chain, costs = _iup_contour_optimize_dp(delta+delta, coords+coords, forced, tolerance, n)
+ chain, costs = _iup_contour_optimize_dp(deltas+deltas, coords+coords, forced, tolerance, n)
best_sol, best_cost = None, n+1
- for start in range(n-1, 2*n-1):
+ for start in range(n-1, len(costs) - 1):
# Assemble solution.
solution = set()
i = start
@@ -280,19 +374,35 @@ def iup_contour_optimize(delta, coords, tolerance=0.):
if cost <= best_cost:
best_sol, best_cost = solution, cost
- delta = [delta[i] if i in best_sol else None for i in range(n)]
+ #if not forced <= best_sol:
+ # print("coord", coords)
+ # print("deltas", deltas)
+ # print("len", len(deltas))
+ assert forced <= best_sol, (forced, best_sol)
+
+ deltas = [deltas[i] if i in best_sol else None for i in range(n)]
- return delta
+ return deltas
-def iup_delta_optimize(delta, coords, ends, tolerance=0.):
+def iup_delta_optimize(deltas : _DeltaSegment,
+ coords : _PointSegment,
+ ends : _Endpoints,
+ tolerance : Real = 0.) -> _DeltaOrNoneSegment:
+ """For the outline given in `coords`, with contour endpoints given
+ in sorted increasing order in `ends`, optimize a set of delta
+ values `deltas` within error `tolerance`.
+
+ Returns delta vector that has most number of None items instead of
+ the input delta.
+ """
assert sorted(ends) == ends and len(coords) == (ends[-1]+1 if ends else 0) + 4
n = len(coords)
ends = ends + [n-4, n-3, n-2, n-1]
out = []
start = 0
for end in ends:
- contour = iup_contour_optimize(delta[start:end+1], coords[start:end+1], tolerance)
+ contour = iup_contour_optimize(deltas[start:end+1], coords[start:end+1], tolerance)
assert len(contour) == end - start + 1
out.extend(contour)
start = end+1
diff --git a/Lib/fontTools/varLib/merger.py b/Lib/fontTools/varLib/merger.py
index 3e5d2a9b..c9a1d3e3 100644
--- a/Lib/fontTools/varLib/merger.py
+++ b/Lib/fontTools/varLib/merger.py
@@ -3,15 +3,20 @@ Merge OpenType Layout tables (GDEF / GPOS / GSUB).
"""
import os
import copy
+import enum
from operator import ior
import logging
+from fontTools.colorLib.builder import MAX_PAINT_COLR_LAYER_COUNT, LayerReuseCache
from fontTools.misc import classifyTools
from fontTools.misc.roundTools import otRound
+from fontTools.misc.treeTools import build_n_ary_tree
from fontTools.ttLib.tables import otTables as ot
from fontTools.ttLib.tables import otBase as otBase
+from fontTools.ttLib.tables.otConverters import BaseFixedValue
+from fontTools.ttLib.tables.otTraverse import dfs_base_table
from fontTools.ttLib.tables.DefaultTable import DefaultTable
from fontTools.varLib import builder, models, varStore
-from fontTools.varLib.models import nonNone, allNone, allEqual, allEqualTo
+from fontTools.varLib.models import nonNone, allNone, allEqual, allEqualTo, subList
from fontTools.varLib.varStore import VarStoreInstancer
from functools import reduce
from fontTools.otlLib.builder import buildSinglePos
@@ -26,11 +31,12 @@ from .errors import (
ShouldBeConstant,
FoundANone,
MismatchedTypes,
+ NotANone,
LengthsDiffer,
KeysDiffer,
InconsistentGlyphOrder,
InconsistentExtensions,
- UnsupportedFormat,
+ InconsistentFormats,
UnsupportedFormat,
VarLibMergeError,
)
@@ -39,13 +45,15 @@ class Merger(object):
def __init__(self, font=None):
self.font = font
+ # mergeTables populates this from the parent's master ttfs
+ self.ttfs = None
@classmethod
def merger(celf, clazzes, attrs=(None,)):
assert celf != Merger, 'Subclass Merger instead.'
if 'mergers' not in celf.__dict__:
celf.mergers = {}
- if type(clazzes) == type:
+ if type(clazzes) in (type, enum.EnumMeta):
clazzes = (clazzes,)
if type(attrs) == str:
attrs = (attrs,)
@@ -81,10 +89,10 @@ class Merger(object):
def mergeObjects(self, out, lst, exclude=()):
if hasattr(out, "ensureDecompiled"):
- out.ensureDecompiled()
+ out.ensureDecompiled(recurse=False)
for item in lst:
if hasattr(item, "ensureDecompiled"):
- item.ensureDecompiled()
+ item.ensureDecompiled(recurse=False)
keys = sorted(vars(out).keys())
if not all(keys == sorted(vars(v).keys()) for v in lst):
raise KeysDiffer(self, expected=keys,
@@ -122,6 +130,11 @@ class Merger(object):
mergerFunc = self.mergersFor(out).get(None, None)
if mergerFunc is not None:
mergerFunc(self, out, lst)
+ elif isinstance(out, enum.Enum):
+ # need to special-case Enums as have __dict__ but are not regular 'objects',
+ # otherwise mergeObjects/mergeThings get trapped in a RecursionError
+ if not allEqualTo(out, lst):
+ raise ShouldBeConstant(self, expected=out, got=lst)
elif hasattr(out, '__dict__'):
self.mergeObjects(out, lst)
elif isinstance(out, list):
@@ -134,9 +147,8 @@ class Merger(object):
for tag in tableTags:
if tag not in font: continue
try:
- self.ttfs = [m for m in master_ttfs if tag in m]
- self.mergeThings(font[tag], [m[tag] if tag in m else None
- for m in master_ttfs])
+ self.ttfs = master_ttfs
+ self.mergeThings(font[tag], [m.get(tag) for m in master_ttfs])
except VarLibMergeError as e:
e.stack.append(tag)
raise
@@ -216,6 +228,20 @@ def _merge_GlyphOrders(font, lst, values_lst=None, default=None):
for dict_set in dict_sets]
return order, padded
+@AligningMerger.merger(otBase.ValueRecord)
+def merge(merger, self, lst):
+ # Code below sometimes calls us with self being
+ # a new object. Copy it from lst and recurse.
+ self.__dict__ = lst[0].__dict__.copy()
+ merger.mergeObjects(self, lst)
+
+@AligningMerger.merger(ot.Anchor)
+def merge(merger, self, lst):
+ # Code below sometimes calls us with self being
+ # a new object. Copy it from lst and recurse.
+ self.__dict__ = lst[0].__dict__.copy()
+ merger.mergeObjects(self, lst)
+
def _Lookup_SinglePos_get_effective_value(merger, subtables, glyph):
for self in subtables:
if self is None or \
@@ -1036,11 +1062,19 @@ class VariationMerger(AligningMerger):
def mergeThings(self, out, lst):
masterModel = None
+ origTTFs = None
if None in lst:
if allNone(lst):
if out is not None:
raise FoundANone(self, got=lst)
return
+
+ # temporarily subset the list of master ttfs to the ones for which
+ # master values are not None
+ origTTFs = self.ttfs
+ if self.ttfs:
+ self.ttfs = subList([v is not None for v in lst], self.ttfs)
+
masterModel = self.model
model, lst = masterModel.getSubModel(lst)
self.setModel(model)
@@ -1049,6 +1083,8 @@ class VariationMerger(AligningMerger):
if masterModel:
self.setModel(masterModel)
+ if origTTFs:
+ self.ttfs = origTTFs
def buildVarDevTable(store_builder, master_values):
@@ -1099,3 +1135,408 @@ def merge(merger, self, lst):
setattr(self, name, value)
if deviceTable:
setattr(self, tableName, deviceTable)
+
+
+class COLRVariationMerger(VariationMerger):
+ """A specialized VariationMerger that takes multiple master fonts containing
+ COLRv1 tables, and builds a variable COLR font.
+
+ COLR tables are special in that variable subtables can be associated with
+ multiple delta-set indices (via VarIndexBase).
+ They also contain tables that must change their type (not simply the Format)
+ as they become variable (e.g. Affine2x3 -> VarAffine2x3) so this merger takes
+ care of that too.
+ """
+
+ def __init__(self, model, axisTags, font, allowLayerReuse=True):
+ VariationMerger.__init__(self, model, axisTags, font)
+ # maps {tuple(varIdxes): VarIndexBase} to facilitate reuse of VarIndexBase
+ # between variable tables with same varIdxes.
+ self.varIndexCache = {}
+ # flat list of all the varIdxes generated while merging
+ self.varIdxes = []
+ # set of id()s of the subtables that contain variations after merging
+ # and need to be upgraded to the associated VarType.
+ self.varTableIds = set()
+ # we keep these around for rebuilding a LayerList while merging PaintColrLayers
+ self.layers = []
+ self.layerReuseCache = None
+ if allowLayerReuse:
+ self.layerReuseCache = LayerReuseCache()
+ # flag to ensure BaseGlyphList is fully merged before LayerList gets processed
+ self._doneBaseGlyphs = False
+
+ def mergeTables(self, font, master_ttfs, tableTags=("COLR",)):
+ if "COLR" in tableTags and "COLR" in font:
+ # The merger modifies the destination COLR table in-place. If this contains
+ # multiple PaintColrLayers referencing the same layers from LayerList, it's
+ # a problem because we may risk modifying the same paint more than once, or
+ # worse, fail while attempting to do that.
+ # We don't know whether the master COLR table was built with layer reuse
+ # disabled, thus to be safe we rebuild its LayerList so that it contains only
+ # unique layers referenced from non-overlapping PaintColrLayers throughout
+ # the base paint graphs.
+ self.expandPaintColrLayers(font["COLR"].table)
+ VariationMerger.mergeTables(self, font, master_ttfs, tableTags)
+
+ def checkFormatEnum(self, out, lst, validate=lambda _: True):
+ fmt = out.Format
+ formatEnum = out.formatEnum
+ ok = False
+ try:
+ fmt = formatEnum(fmt)
+ except ValueError:
+ pass
+ else:
+ ok = validate(fmt)
+ if not ok:
+ raise UnsupportedFormat(
+ self, subtable=type(out).__name__, value=fmt
+ )
+ expected = fmt
+ got = []
+ for v in lst:
+ fmt = getattr(v, "Format", None)
+ try:
+ fmt = formatEnum(fmt)
+ except ValueError:
+ pass
+ got.append(fmt)
+ if not allEqualTo(expected, got):
+ raise InconsistentFormats(
+ self,
+ subtable=type(out).__name__,
+ expected=expected,
+ got=got,
+ )
+ return expected
+
+ def mergeSparseDict(self, out, lst):
+ for k in out.keys():
+ try:
+ self.mergeThings(out[k], [v.get(k) for v in lst])
+ except VarLibMergeError as e:
+ e.stack.append(f"[{k!r}]")
+ raise
+
+ def mergeAttrs(self, out, lst, attrs):
+ for attr in attrs:
+ value = getattr(out, attr)
+ values = [getattr(item, attr) for item in lst]
+ try:
+ self.mergeThings(value, values)
+ except VarLibMergeError as e:
+ e.stack.append(f".{attr}")
+ raise
+
+ def storeMastersForAttr(self, out, lst, attr):
+ master_values = [getattr(item, attr) for item in lst]
+
+ # VarStore treats deltas for fixed-size floats as integers, so we
+ # must convert master values to int before storing them in the builder
+ # then back to float.
+ is_fixed_size_float = False
+ conv = out.getConverterByName(attr)
+ if isinstance(conv, BaseFixedValue):
+ is_fixed_size_float = True
+ master_values = [conv.toInt(v) for v in master_values]
+
+ baseValue = master_values[0]
+ varIdx = ot.NO_VARIATION_INDEX
+ if not allEqual(master_values):
+ baseValue, varIdx = self.store_builder.storeMasters(master_values)
+
+ if is_fixed_size_float:
+ baseValue = conv.fromInt(baseValue)
+
+ return baseValue, varIdx
+
+ def storeVariationIndices(self, varIdxes) -> int:
+ # try to reuse an existing VarIndexBase for the same varIdxes, or else
+ # create a new one
+ key = tuple(varIdxes)
+ varIndexBase = self.varIndexCache.get(key)
+
+ if varIndexBase is None:
+ # scan for a full match anywhere in the self.varIdxes
+ for i in range(len(self.varIdxes) - len(varIdxes) + 1):
+ if self.varIdxes[i:i+len(varIdxes)] == varIdxes:
+ self.varIndexCache[key] = varIndexBase = i
+ break
+
+ if varIndexBase is None:
+ # try find a partial match at the end of the self.varIdxes
+ for n in range(len(varIdxes)-1, 0, -1):
+ if self.varIdxes[-n:] == varIdxes[:n]:
+ varIndexBase = len(self.varIdxes) - n
+ self.varIndexCache[key] = varIndexBase
+ self.varIdxes.extend(varIdxes[n:])
+ break
+
+ if varIndexBase is None:
+ # no match found, append at the end
+ self.varIndexCache[key] = varIndexBase = len(self.varIdxes)
+ self.varIdxes.extend(varIdxes)
+
+ return varIndexBase
+
+ def mergeVariableAttrs(self, out, lst, attrs) -> int:
+ varIndexBase = ot.NO_VARIATION_INDEX
+ varIdxes = []
+ for attr in attrs:
+ baseValue, varIdx = self.storeMastersForAttr(out, lst, attr)
+ setattr(out, attr, baseValue)
+ varIdxes.append(varIdx)
+
+ if any(v != ot.NO_VARIATION_INDEX for v in varIdxes):
+ varIndexBase = self.storeVariationIndices(varIdxes)
+
+ return varIndexBase
+
+ @classmethod
+ def convertSubTablesToVarType(cls, table):
+ for path in dfs_base_table(
+ table,
+ skip_root=True,
+ predicate=lambda path: (
+ getattr(type(path[-1].value), "VarType", None) is not None
+ )
+ ):
+ st = path[-1]
+ subTable = st.value
+ varType = type(subTable).VarType
+ newSubTable = varType()
+ newSubTable.__dict__.update(subTable.__dict__)
+ newSubTable.populateDefaults()
+ parent = path[-2].value
+ if st.index is not None:
+ getattr(parent, st.name)[st.index] = newSubTable
+ else:
+ setattr(parent, st.name, newSubTable)
+
+ @staticmethod
+ def expandPaintColrLayers(colr):
+ """Rebuild LayerList without PaintColrLayers reuse.
+
+ Each base paint graph is fully DFS-traversed (with exception of PaintColrGlyph
+ which are irrelevant for this); any layers referenced via PaintColrLayers are
+ collected into a new LayerList and duplicated when reuse is detected, to ensure
+ that all paints are distinct objects at the end of the process.
+ PaintColrLayers's FirstLayerIndex/NumLayers are updated so that no overlap
+ is left. Also, any consecutively nested PaintColrLayers are flattened.
+ The COLR table's LayerList is replaced with the new unique layers.
+ A side effect is also that any layer from the old LayerList which is not
+ referenced by any PaintColrLayers is dropped.
+ """
+ if not colr.LayerList:
+ # if no LayerList, there's nothing to expand
+ return
+ uniqueLayerIDs = set()
+ newLayerList = []
+ for rec in colr.BaseGlyphList.BaseGlyphPaintRecord:
+ frontier = [rec.Paint]
+ while frontier:
+ paint = frontier.pop()
+ if paint.Format == ot.PaintFormat.PaintColrGlyph:
+ # don't traverse these, we treat them as constant for merging
+ continue
+ elif paint.Format == ot.PaintFormat.PaintColrLayers:
+ # de-treeify any nested PaintColrLayers, append unique copies to
+ # the new layer list and update PaintColrLayers index/count
+ children = list(_flatten_layers(paint, colr))
+ first_layer_index = len(newLayerList)
+ for layer in children:
+ if id(layer) in uniqueLayerIDs:
+ layer = copy.deepcopy(layer)
+ assert id(layer) not in uniqueLayerIDs
+ newLayerList.append(layer)
+ uniqueLayerIDs.add(id(layer))
+ paint.FirstLayerIndex = first_layer_index
+ paint.NumLayers = len(children)
+ else:
+ children = paint.getChildren(colr)
+ frontier.extend(reversed(children))
+ # sanity check all the new layers are distinct objects
+ assert len(newLayerList) == len(uniqueLayerIDs)
+ colr.LayerList.Paint = newLayerList
+ colr.LayerList.LayerCount = len(newLayerList)
+
+
+@COLRVariationMerger.merger(ot.BaseGlyphList)
+def merge(merger, self, lst):
+ # ignore BaseGlyphCount, allow sparse glyph sets across masters
+ out = {rec.BaseGlyph: rec for rec in self.BaseGlyphPaintRecord}
+ masters = [{rec.BaseGlyph: rec for rec in m.BaseGlyphPaintRecord} for m in lst]
+
+ for i, g in enumerate(out.keys()):
+ try:
+ # missing base glyphs don't participate in the merge
+ merger.mergeThings(out[g], [v.get(g) for v in masters])
+ except VarLibMergeError as e:
+ e.stack.append(f".BaseGlyphPaintRecord[{i}]")
+ e.cause["location"] = f"base glyph {g!r}"
+ raise
+
+ merger._doneBaseGlyphs = True
+
+
+@COLRVariationMerger.merger(ot.LayerList)
+def merge(merger, self, lst):
+ # nothing to merge for LayerList, assuming we have already merged all PaintColrLayers
+ # found while traversing the paint graphs rooted at BaseGlyphPaintRecords.
+ assert merger._doneBaseGlyphs, "BaseGlyphList must be merged before LayerList"
+ # Simply flush the final list of layers and go home.
+ self.LayerCount = len(merger.layers)
+ self.Paint = merger.layers
+
+
+def _flatten_layers(root, colr):
+ assert root.Format == ot.PaintFormat.PaintColrLayers
+ for paint in root.getChildren(colr):
+ if paint.Format == ot.PaintFormat.PaintColrLayers:
+ yield from _flatten_layers(paint, colr)
+ else:
+ yield paint
+
+
+def _merge_PaintColrLayers(self, out, lst):
+ # we only enforce that the (flat) number of layers is the same across all masters
+ # but we allow FirstLayerIndex to differ to acommodate for sparse glyph sets.
+
+ out_layers = list(_flatten_layers(out, self.font["COLR"].table))
+
+ # sanity check ttfs are subset to current values (see VariationMerger.mergeThings)
+ # before matching each master PaintColrLayers to its respective COLR by position
+ assert len(self.ttfs) == len(lst)
+ master_layerses = [
+ list(_flatten_layers(lst[i], self.ttfs[i]["COLR"].table))
+ for i in range(len(lst))
+ ]
+
+ try:
+ self.mergeLists(out_layers, master_layerses)
+ except VarLibMergeError as e:
+ # NOTE: This attribute doesn't actually exist in PaintColrLayers but it's
+ # handy to have it in the stack trace for debugging.
+ e.stack.append(".Layers")
+ raise
+
+ # following block is very similar to LayerListBuilder._beforeBuildPaintColrLayers
+ # but I couldn't find a nice way to share the code between the two...
+
+ if self.layerReuseCache is not None:
+ # successful reuse can make the list smaller
+ out_layers = self.layerReuseCache.try_reuse(out_layers)
+
+ # if the list is still too big we need to tree-fy it
+ is_tree = len(out_layers) > MAX_PAINT_COLR_LAYER_COUNT
+ out_layers = build_n_ary_tree(out_layers, n=MAX_PAINT_COLR_LAYER_COUNT)
+
+ # We now have a tree of sequences with Paint leaves.
+ # Convert the sequences into PaintColrLayers.
+ def listToColrLayers(paint):
+ if isinstance(paint, list):
+ layers = [listToColrLayers(l) for l in paint]
+ paint = ot.Paint()
+ paint.Format = int(ot.PaintFormat.PaintColrLayers)
+ paint.NumLayers = len(layers)
+ paint.FirstLayerIndex = len(self.layers)
+ self.layers.extend(layers)
+ if self.layerReuseCache is not None:
+ self.layerReuseCache.add(layers, paint.FirstLayerIndex)
+ return paint
+
+ out_layers = [listToColrLayers(l) for l in out_layers]
+
+ if len(out_layers) == 1 and out_layers[0].Format == ot.PaintFormat.PaintColrLayers:
+ # special case when the reuse cache finds a single perfect PaintColrLayers match
+ # (it can only come from a successful reuse, _flatten_layers has gotten rid of
+ # all nested PaintColrLayers already); we assign it directly and avoid creating
+ # an extra table
+ out.NumLayers = out_layers[0].NumLayers
+ out.FirstLayerIndex = out_layers[0].FirstLayerIndex
+ else:
+ out.NumLayers = len(out_layers)
+ out.FirstLayerIndex = len(self.layers)
+
+ self.layers.extend(out_layers)
+
+ # Register our parts for reuse provided we aren't a tree
+ # If we are a tree the leaves registered for reuse and that will suffice
+ if self.layerReuseCache is not None and not is_tree:
+ self.layerReuseCache.add(out_layers, out.FirstLayerIndex)
+
+
+@COLRVariationMerger.merger((ot.Paint, ot.ClipBox))
+def merge(merger, self, lst):
+ fmt = merger.checkFormatEnum(self, lst, lambda fmt: not fmt.is_variable())
+
+ if fmt is ot.PaintFormat.PaintColrLayers:
+ _merge_PaintColrLayers(merger, self, lst)
+ return
+
+ varFormat = fmt.as_variable()
+
+ varAttrs = ()
+ if varFormat is not None:
+ varAttrs = otBase.getVariableAttrs(type(self), varFormat)
+ staticAttrs = (c.name for c in self.getConverters() if c.name not in varAttrs)
+
+ merger.mergeAttrs(self, lst, staticAttrs)
+
+ varIndexBase = merger.mergeVariableAttrs(self, lst, varAttrs)
+
+ subTables = [st.value for st in self.iterSubTables()]
+
+ # Convert table to variable if itself has variations or any subtables have
+ isVariable = (
+ varIndexBase != ot.NO_VARIATION_INDEX
+ or any(id(table) in merger.varTableIds for table in subTables)
+ )
+
+ if isVariable:
+ if varAttrs:
+ # Some PaintVar* don't have any scalar attributes that can vary,
+ # only indirect offsets to other variable subtables, thus have
+ # no VarIndexBase of their own (e.g. PaintVarTransform)
+ self.VarIndexBase = varIndexBase
+
+ if subTables:
+ # Convert Affine2x3 -> VarAffine2x3, ColorLine -> VarColorLine, etc.
+ merger.convertSubTablesToVarType(self)
+
+ assert varFormat is not None
+ self.Format = int(varFormat)
+
+
+@COLRVariationMerger.merger((ot.Affine2x3, ot.ColorStop))
+def merge(merger, self, lst):
+ varType = type(self).VarType
+
+ varAttrs = otBase.getVariableAttrs(varType)
+ staticAttrs = (c.name for c in self.getConverters() if c.name not in varAttrs)
+
+ merger.mergeAttrs(self, lst, staticAttrs)
+
+ varIndexBase = merger.mergeVariableAttrs(self, lst, varAttrs)
+
+ if varIndexBase != ot.NO_VARIATION_INDEX:
+ self.VarIndexBase = varIndexBase
+ # mark as having variations so the parent table will convert to Var{Type}
+ merger.varTableIds.add(id(self))
+
+
+@COLRVariationMerger.merger(ot.ColorLine)
+def merge(merger, self, lst):
+ merger.mergeAttrs(self, lst, (c.name for c in self.getConverters()))
+
+ if any(id(stop) in merger.varTableIds for stop in self.ColorStop):
+ merger.convertSubTablesToVarType(self)
+ merger.varTableIds.add(id(self))
+
+
+@COLRVariationMerger.merger(ot.ClipList, "clips")
+def merge(merger, self, lst):
+ # 'sparse' in that we allow non-default masters to omit ClipBox entries
+ # for some/all glyphs (i.e. they don't participate)
+ merger.mergeSparseDict(self, lst)
diff --git a/Lib/fontTools/varLib/models.py b/Lib/fontTools/varLib/models.py
index c548fbca..a7e020b0 100644
--- a/Lib/fontTools/varLib/models.py
+++ b/Lib/fontTools/varLib/models.py
@@ -1,11 +1,6 @@
"""Variation fonts interpolation models."""
__all__ = [
- "nonNone",
- "allNone",
- "allEqual",
- "allEqualTo",
- "subList",
"normalizeValue",
"normalizeLocation",
"supportScalar",
@@ -50,12 +45,13 @@ def subList(truth, lst):
def normalizeValue(v, triple):
"""Normalizes value based on a min/default/max triple.
- >>> normalizeValue(400, (100, 400, 900))
- 0.0
- >>> normalizeValue(100, (100, 400, 900))
- -1.0
- >>> normalizeValue(650, (100, 400, 900))
- 0.5
+
+ >>> normalizeValue(400, (100, 400, 900))
+ 0.0
+ >>> normalizeValue(100, (100, 400, 900))
+ -1.0
+ >>> normalizeValue(650, (100, 400, 900))
+ 0.5
"""
lower, default, upper = triple
if not (lower <= default <= upper):
@@ -75,41 +71,42 @@ def normalizeValue(v, triple):
def normalizeLocation(location, axes):
"""Normalizes location based on axis min/default/max values from axes.
- >>> axes = {"wght": (100, 400, 900)}
- >>> normalizeLocation({"wght": 400}, axes)
- {'wght': 0.0}
- >>> normalizeLocation({"wght": 100}, axes)
- {'wght': -1.0}
- >>> normalizeLocation({"wght": 900}, axes)
- {'wght': 1.0}
- >>> normalizeLocation({"wght": 650}, axes)
- {'wght': 0.5}
- >>> normalizeLocation({"wght": 1000}, axes)
- {'wght': 1.0}
- >>> normalizeLocation({"wght": 0}, axes)
- {'wght': -1.0}
- >>> axes = {"wght": (0, 0, 1000)}
- >>> normalizeLocation({"wght": 0}, axes)
- {'wght': 0.0}
- >>> normalizeLocation({"wght": -1}, axes)
- {'wght': 0.0}
- >>> normalizeLocation({"wght": 1000}, axes)
- {'wght': 1.0}
- >>> normalizeLocation({"wght": 500}, axes)
- {'wght': 0.5}
- >>> normalizeLocation({"wght": 1001}, axes)
- {'wght': 1.0}
- >>> axes = {"wght": (0, 1000, 1000)}
- >>> normalizeLocation({"wght": 0}, axes)
- {'wght': -1.0}
- >>> normalizeLocation({"wght": -1}, axes)
- {'wght': -1.0}
- >>> normalizeLocation({"wght": 500}, axes)
- {'wght': -0.5}
- >>> normalizeLocation({"wght": 1000}, axes)
- {'wght': 0.0}
- >>> normalizeLocation({"wght": 1001}, axes)
- {'wght': 0.0}
+
+ >>> axes = {"wght": (100, 400, 900)}
+ >>> normalizeLocation({"wght": 400}, axes)
+ {'wght': 0.0}
+ >>> normalizeLocation({"wght": 100}, axes)
+ {'wght': -1.0}
+ >>> normalizeLocation({"wght": 900}, axes)
+ {'wght': 1.0}
+ >>> normalizeLocation({"wght": 650}, axes)
+ {'wght': 0.5}
+ >>> normalizeLocation({"wght": 1000}, axes)
+ {'wght': 1.0}
+ >>> normalizeLocation({"wght": 0}, axes)
+ {'wght': -1.0}
+ >>> axes = {"wght": (0, 0, 1000)}
+ >>> normalizeLocation({"wght": 0}, axes)
+ {'wght': 0.0}
+ >>> normalizeLocation({"wght": -1}, axes)
+ {'wght': 0.0}
+ >>> normalizeLocation({"wght": 1000}, axes)
+ {'wght': 1.0}
+ >>> normalizeLocation({"wght": 500}, axes)
+ {'wght': 0.5}
+ >>> normalizeLocation({"wght": 1001}, axes)
+ {'wght': 1.0}
+ >>> axes = {"wght": (0, 1000, 1000)}
+ >>> normalizeLocation({"wght": 0}, axes)
+ {'wght': -1.0}
+ >>> normalizeLocation({"wght": -1}, axes)
+ {'wght': -1.0}
+ >>> normalizeLocation({"wght": 500}, axes)
+ {'wght': -0.5}
+ >>> normalizeLocation({"wght": 1000}, axes)
+ {'wght': 0.0}
+ >>> normalizeLocation({"wght": 1001}, axes)
+ {'wght': 0.0}
"""
out = {}
for tag, triple in axes.items():
@@ -118,27 +115,32 @@ def normalizeLocation(location, axes):
return out
-def supportScalar(location, support, ot=True):
+def supportScalar(location, support, ot=True, extrapolate=False):
"""Returns the scalar multiplier at location, for a master
with support. If ot is True, then a peak value of zero
for support of an axis means "axis does not participate". That
is how OpenType Variation Font technology works.
- >>> supportScalar({}, {})
- 1.0
- >>> supportScalar({'wght':.2}, {})
- 1.0
- >>> supportScalar({'wght':.2}, {'wght':(0,2,3)})
- 0.1
- >>> supportScalar({'wght':2.5}, {'wght':(0,2,4)})
- 0.75
- >>> supportScalar({'wght':2.5, 'wdth':0}, {'wght':(0,2,4), 'wdth':(-1,0,+1)})
- 0.75
- >>> supportScalar({'wght':2.5, 'wdth':.5}, {'wght':(0,2,4), 'wdth':(-1,0,+1)}, ot=False)
- 0.375
- >>> supportScalar({'wght':2.5, 'wdth':0}, {'wght':(0,2,4), 'wdth':(-1,0,+1)})
- 0.75
- >>> supportScalar({'wght':2.5, 'wdth':.5}, {'wght':(0,2,4), 'wdth':(-1,0,+1)})
- 0.75
+
+ >>> supportScalar({}, {})
+ 1.0
+ >>> supportScalar({'wght':.2}, {})
+ 1.0
+ >>> supportScalar({'wght':.2}, {'wght':(0,2,3)})
+ 0.1
+ >>> supportScalar({'wght':2.5}, {'wght':(0,2,4)})
+ 0.75
+ >>> supportScalar({'wght':2.5, 'wdth':0}, {'wght':(0,2,4), 'wdth':(-1,0,+1)})
+ 0.75
+ >>> supportScalar({'wght':2.5, 'wdth':.5}, {'wght':(0,2,4), 'wdth':(-1,0,+1)}, ot=False)
+ 0.375
+ >>> supportScalar({'wght':2.5, 'wdth':0}, {'wght':(0,2,4), 'wdth':(-1,0,+1)})
+ 0.75
+ >>> supportScalar({'wght':2.5, 'wdth':.5}, {'wght':(0,2,4), 'wdth':(-1,0,+1)})
+ 0.75
+ >>> supportScalar({'wght':4}, {'wght':(0,2,3)}, extrapolate=True)
+ 2.0
+ >>> supportScalar({'wght':4}, {'wght':(0,2,2)}, extrapolate=True)
+ 2.0
"""
scalar = 1.0
for axis, (lower, peak, upper) in support.items():
@@ -156,9 +158,27 @@ def supportScalar(location, support, ot=True):
v = location[axis]
if v == peak:
continue
+
+ if extrapolate:
+ if v < -1 and lower <= -1:
+ if peak <= -1 and peak < upper:
+ scalar *= (v - upper) / (peak - upper)
+ continue
+ elif -1 < peak:
+ scalar *= (v - lower) / (peak - lower)
+ continue
+ elif +1 < v and +1 <= upper:
+ if +1 <= peak and lower < peak:
+ scalar *= (v - lower) / (peak - lower)
+ continue
+ elif peak < +1:
+ scalar *= (v - upper) / (peak - upper)
+ continue
+
if v <= lower or upper <= v:
scalar = 0.0
break
+
if v < peak:
scalar *= (v - lower) / (peak - lower)
else: # v > peak
@@ -167,10 +187,11 @@ def supportScalar(location, support, ot=True):
class VariationModel(object):
+ """Locations must have the base master at the origin (ie. 0).
- """
- Locations must be in normalized space. Ie. base master
- is at origin (0)::
+ If the extrapolate argument is set to True, then location values are
+ interpretted in the normalized space, ie. in the [-1,+1] range, and
+ values are extrapolated outside this range.
>>> from pprint import pprint
>>> locations = [ \
@@ -210,14 +231,16 @@ class VariationModel(object):
5: 0.6666666666666667,
6: 0.4444444444444445,
7: 0.6666666666666667}]
- """
+ """
+
+ def __init__(self, locations, axisOrder=None, extrapolate=False):
- def __init__(self, locations, axisOrder=None):
if len(set(tuple(sorted(l.items())) for l in locations)) != len(locations):
raise VariationModelError("Locations must be unique.")
self.origLocations = locations
self.axisOrder = axisOrder if axisOrder is not None else []
+ self.extrapolate = extrapolate
locations = [{k: v for k, v in loc.items() if v != 0.0} for loc in locations]
keyFunc = self.getMasterLocationsSortKeyFunc(
@@ -416,7 +439,8 @@ class VariationModel(object):
return model.getDeltas(items, round=round), model.supports
def getScalars(self, loc):
- return [supportScalar(loc, support) for support in self.supports]
+ return [supportScalar(loc, support, extrapolate=self.extrapolate)
+ for support in self.supports]
@staticmethod
def interpolateFromDeltasAndScalars(deltas, scalars):
diff --git a/Lib/fontTools/varLib/mutator.py b/Lib/fontTools/varLib/mutator.py
index 263c4e61..2e674798 100644
--- a/Lib/fontTools/varLib/mutator.py
+++ b/Lib/fontTools/varLib/mutator.py
@@ -412,6 +412,9 @@ def main(args=None):
parser.add_argument(
"-o", "--output", metavar="OUTPUT.ttf", default=None,
help="Output instance TTF file (default: INPUT-instance.ttf).")
+ parser.add_argument(
+ "--no-recalc-timestamp", dest="recalc_timestamp", action='store_false',
+ help="Don't set the output font's timestamp to the current time.")
logging_group = parser.add_mutually_exclusive_group(required=False)
logging_group.add_argument(
"-v", "--verbose", action="store_true", help="Run more verbosely.")
@@ -445,7 +448,7 @@ def main(args=None):
log.info("Location: %s", loc)
log.info("Loading variable font")
- varfont = TTFont(varfilename)
+ varfont = TTFont(varfilename, recalcTimestamp=options.recalc_timestamp)
instantiateVariableFont(varfont, loc, inplace=True, overlap=options.overlap)
diff --git a/Lib/fontTools/varLib/varStore.py b/Lib/fontTools/varLib/varStore.py
index bcf81b39..2ffc6b13 100644
--- a/Lib/fontTools/varLib/varStore.py
+++ b/Lib/fontTools/varLib/varStore.py
@@ -7,6 +7,10 @@ from functools import partial
from collections import defaultdict
+NO_VARIATION_INDEX = ot.NO_VARIATION_INDEX
+ot.VarStore.NO_VARIATION_INDEX = NO_VARIATION_INDEX
+
+
def _getLocationKey(loc):
return tuple(sorted(loc.items(), key=lambda kv: kv[0]))
@@ -135,6 +139,11 @@ def VarRegion_get_support(self, fvar_axes):
ot.VarRegion.get_support = VarRegion_get_support
+def VarStore___bool__(self):
+ return bool(self.VarData)
+
+ot.VarStore.__bool__ = VarStore___bool__
+
class VarStoreInstancer(object):
def __init__(self, varstore, fvar_axes, location={}):
@@ -169,6 +178,7 @@ class VarStoreInstancer(object):
def __getitem__(self, varidx):
major, minor = varidx >> 16, varidx & 0xFFFF
+ if varidx == NO_VARIATION_INDEX: return 0.
varData = self._varData
scalars = [self._getScalar(ri) for ri in varData[major].VarRegionIndex]
deltas = varData[major].Item[minor]
@@ -192,6 +202,8 @@ def VarStore_subset_varidxes(self, varIdxes, optimize=True, retainFirstMap=False
# Sort out used varIdxes by major/minor.
used = {}
for varIdx in varIdxes:
+ if varIdx == NO_VARIATION_INDEX:
+ continue
major = varIdx >> 16
minor = varIdx & 0xFFFF
d = used.get(major)
@@ -206,7 +218,7 @@ def VarStore_subset_varidxes(self, varIdxes, optimize=True, retainFirstMap=False
varData = self.VarData
newVarData = []
- varDataMap = {}
+ varDataMap = {NO_VARIATION_INDEX: NO_VARIATION_INDEX}
for major,data in enumerate(varData):
usedMinors = used.get(major)
if usedMinors is None:
@@ -431,7 +443,7 @@ class _EncodingDict(dict):
return chars
-def VarStore_optimize(self):
+def VarStore_optimize(self, use_NO_VARIATION_INDEX=True):
"""Optimize storage. Returns mapping from old VarIdxes to new ones."""
# TODO
@@ -455,6 +467,10 @@ def VarStore_optimize(self):
row[regionIdx] += v
row = tuple(row)
+ if use_NO_VARIATION_INDEX and not any(row):
+ front_mapping[(major<<16)+minor] = None
+ continue
+
encodings.add_row(row)
front_mapping[(major<<16)+minor] = row
@@ -537,9 +553,9 @@ def VarStore_optimize(self):
back_mapping[item] = (major<<16)+minor
# Compile final mapping.
- varidx_map = {}
+ varidx_map = {NO_VARIATION_INDEX:NO_VARIATION_INDEX}
for k,v in front_mapping.items():
- varidx_map[k] = back_mapping[v]
+ varidx_map[k] = back_mapping[v] if v is not None else NO_VARIATION_INDEX
# Remove unused regions.
self.prune_regions()