xref: /aosp_15_r20/external/fonttools/Lib/fontTools/varLib/interpolatableHelpers.py (revision e1fe3e4ad2793916b15cccdc4a7da52a7e1dd0e9)
1from fontTools.ttLib.ttGlyphSet import LerpGlyphSet
2from fontTools.pens.basePen import AbstractPen, BasePen, DecomposingPen
3from fontTools.pens.pointPen import AbstractPointPen, SegmentToPointPen
4from fontTools.pens.recordingPen import RecordingPen, DecomposingRecordingPen
5from fontTools.misc.transform import Transform
6from collections import defaultdict, deque
7from math import sqrt, copysign, atan2, pi
8from enum import Enum
9import itertools
10
11import logging
12
13log = logging.getLogger("fontTools.varLib.interpolatable")
14
15
16class InterpolatableProblem:
17    NOTHING = "nothing"
18    MISSING = "missing"
19    OPEN_PATH = "open_path"
20    PATH_COUNT = "path_count"
21    NODE_COUNT = "node_count"
22    NODE_INCOMPATIBILITY = "node_incompatibility"
23    CONTOUR_ORDER = "contour_order"
24    WRONG_START_POINT = "wrong_start_point"
25    KINK = "kink"
26    UNDERWEIGHT = "underweight"
27    OVERWEIGHT = "overweight"
28
29    severity = {
30        MISSING: 1,
31        OPEN_PATH: 2,
32        PATH_COUNT: 3,
33        NODE_COUNT: 4,
34        NODE_INCOMPATIBILITY: 5,
35        CONTOUR_ORDER: 6,
36        WRONG_START_POINT: 7,
37        KINK: 8,
38        UNDERWEIGHT: 9,
39        OVERWEIGHT: 10,
40        NOTHING: 11,
41    }
42
43
44def sort_problems(problems):
45    """Sort problems by severity, then by glyph name, then by problem message."""
46    return dict(
47        sorted(
48            problems.items(),
49            key=lambda _: -min(
50                (
51                    (InterpolatableProblem.severity[p["type"]] + p.get("tolerance", 0))
52                    for p in _[1]
53                ),
54            ),
55            reverse=True,
56        )
57    )
58
59
60def rot_list(l, k):
61    """Rotate list by k items forward.  Ie. item at position 0 will be
62    at position k in returned list.  Negative k is allowed."""
63    return l[-k:] + l[:-k]
64
65
66class PerContourPen(BasePen):
67    def __init__(self, Pen, glyphset=None):
68        BasePen.__init__(self, glyphset)
69        self._glyphset = glyphset
70        self._Pen = Pen
71        self._pen = None
72        self.value = []
73
74    def _moveTo(self, p0):
75        self._newItem()
76        self._pen.moveTo(p0)
77
78    def _lineTo(self, p1):
79        self._pen.lineTo(p1)
80
81    def _qCurveToOne(self, p1, p2):
82        self._pen.qCurveTo(p1, p2)
83
84    def _curveToOne(self, p1, p2, p3):
85        self._pen.curveTo(p1, p2, p3)
86
87    def _closePath(self):
88        self._pen.closePath()
89        self._pen = None
90
91    def _endPath(self):
92        self._pen.endPath()
93        self._pen = None
94
95    def _newItem(self):
96        self._pen = pen = self._Pen()
97        self.value.append(pen)
98
99
100class PerContourOrComponentPen(PerContourPen):
101    def addComponent(self, glyphName, transformation):
102        self._newItem()
103        self.value[-1].addComponent(glyphName, transformation)
104
105
106class SimpleRecordingPointPen(AbstractPointPen):
107    def __init__(self):
108        self.value = []
109
110    def beginPath(self, identifier=None, **kwargs):
111        pass
112
113    def endPath(self) -> None:
114        pass
115
116    def addPoint(self, pt, segmentType=None):
117        self.value.append((pt, False if segmentType is None else True))
118
119
120def vdiff_hypot2(v0, v1):
121    s = 0
122    for x0, x1 in zip(v0, v1):
123        d = x1 - x0
124        s += d * d
125    return s
126
127
128def vdiff_hypot2_complex(v0, v1):
129    s = 0
130    for x0, x1 in zip(v0, v1):
131        d = x1 - x0
132        s += d.real * d.real + d.imag * d.imag
133        # This does the same but seems to be slower:
134        # s += (d * d.conjugate()).real
135    return s
136
137
138def matching_cost(G, matching):
139    return sum(G[i][j] for i, j in enumerate(matching))
140
141
142def min_cost_perfect_bipartite_matching_scipy(G):
143    n = len(G)
144    rows, cols = linear_sum_assignment(G)
145    assert (rows == list(range(n))).all()
146    return list(cols), matching_cost(G, cols)
147
148
149def min_cost_perfect_bipartite_matching_munkres(G):
150    n = len(G)
151    cols = [None] * n
152    for row, col in Munkres().compute(G):
153        cols[row] = col
154    return cols, matching_cost(G, cols)
155
156
157def min_cost_perfect_bipartite_matching_bruteforce(G):
158    n = len(G)
159
160    if n > 6:
161        raise Exception("Install Python module 'munkres' or 'scipy >= 0.17.0'")
162
163    # Otherwise just brute-force
164    permutations = itertools.permutations(range(n))
165    best = list(next(permutations))
166    best_cost = matching_cost(G, best)
167    for p in permutations:
168        cost = matching_cost(G, p)
169        if cost < best_cost:
170            best, best_cost = list(p), cost
171    return best, best_cost
172
173
174try:
175    from scipy.optimize import linear_sum_assignment
176
177    min_cost_perfect_bipartite_matching = min_cost_perfect_bipartite_matching_scipy
178except ImportError:
179    try:
180        from munkres import Munkres
181
182        min_cost_perfect_bipartite_matching = (
183            min_cost_perfect_bipartite_matching_munkres
184        )
185    except ImportError:
186        min_cost_perfect_bipartite_matching = (
187            min_cost_perfect_bipartite_matching_bruteforce
188        )
189
190
191def contour_vector_from_stats(stats):
192    # Don't change the order of items here.
193    # It's okay to add to the end, but otherwise, other
194    # code depends on it. Search for "covariance".
195    size = sqrt(abs(stats.area))
196    return (
197        copysign((size), stats.area),
198        stats.meanX,
199        stats.meanY,
200        stats.stddevX * 2,
201        stats.stddevY * 2,
202        stats.correlation * size,
203    )
204
205
206def matching_for_vectors(m0, m1):
207    n = len(m0)
208
209    identity_matching = list(range(n))
210
211    costs = [[vdiff_hypot2(v0, v1) for v1 in m1] for v0 in m0]
212    (
213        matching,
214        matching_cost,
215    ) = min_cost_perfect_bipartite_matching(costs)
216    identity_cost = sum(costs[i][i] for i in range(n))
217    return matching, matching_cost, identity_cost
218
219
220def points_characteristic_bits(points):
221    bits = 0
222    for pt, b in reversed(points):
223        bits = (bits << 1) | b
224    return bits
225
226
227_NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR = 4
228
229
230def points_complex_vector(points):
231    vector = []
232    if not points:
233        return vector
234    points = [complex(*pt) for pt, _ in points]
235    n = len(points)
236    assert _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR == 4
237    points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
238    while len(points) < _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR:
239        points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
240    for i in range(n):
241        # The weights are magic numbers.
242
243        # The point itself
244        p0 = points[i]
245        vector.append(p0)
246
247        # The vector to the next point
248        p1 = points[i + 1]
249        d0 = p1 - p0
250        vector.append(d0 * 3)
251
252        # The turn vector
253        p2 = points[i + 2]
254        d1 = p2 - p1
255        vector.append(d1 - d0)
256
257        # The angle to the next point, as a cross product;
258        # Square root of, to match dimentionality of distance.
259        cross = d0.real * d1.imag - d0.imag * d1.real
260        cross = copysign(sqrt(abs(cross)), cross)
261        vector.append(cross * 4)
262
263    return vector
264
265
266def add_isomorphisms(points, isomorphisms, reverse):
267    reference_bits = points_characteristic_bits(points)
268    n = len(points)
269
270    # if points[0][0] == points[-1][0]:
271    #   abort
272
273    if reverse:
274        points = points[::-1]
275        bits = points_characteristic_bits(points)
276    else:
277        bits = reference_bits
278
279    vector = points_complex_vector(points)
280
281    assert len(vector) % n == 0
282    mult = len(vector) // n
283    mask = (1 << n) - 1
284
285    for i in range(n):
286        b = ((bits << (n - i)) & mask) | (bits >> i)
287        if b == reference_bits:
288            isomorphisms.append(
289                (rot_list(vector, -i * mult), n - 1 - i if reverse else i, reverse)
290            )
291
292
293def find_parents_and_order(glyphsets, locations):
294    parents = [None] + list(range(len(glyphsets) - 1))
295    order = list(range(len(glyphsets)))
296    if locations:
297        # Order base master first
298        bases = (i for i, l in enumerate(locations) if all(v == 0 for v in l.values()))
299        if bases:
300            base = next(bases)
301            logging.info("Base master index %s, location %s", base, locations[base])
302        else:
303            base = 0
304            logging.warning("No base master location found")
305
306        # Form a minimum spanning tree of the locations
307        try:
308            from scipy.sparse.csgraph import minimum_spanning_tree
309
310            graph = [[0] * len(locations) for _ in range(len(locations))]
311            axes = set()
312            for l in locations:
313                axes.update(l.keys())
314            axes = sorted(axes)
315            vectors = [tuple(l.get(k, 0) for k in axes) for l in locations]
316            for i, j in itertools.combinations(range(len(locations)), 2):
317                graph[i][j] = vdiff_hypot2(vectors[i], vectors[j])
318
319            tree = minimum_spanning_tree(graph)
320            rows, cols = tree.nonzero()
321            graph = defaultdict(set)
322            for row, col in zip(rows, cols):
323                graph[row].add(col)
324                graph[col].add(row)
325
326            # Traverse graph from the base and assign parents
327            parents = [None] * len(locations)
328            order = []
329            visited = set()
330            queue = deque([base])
331            while queue:
332                i = queue.popleft()
333                visited.add(i)
334                order.append(i)
335                for j in sorted(graph[i]):
336                    if j not in visited:
337                        parents[j] = i
338                        queue.append(j)
339
340        except ImportError:
341            pass
342
343        log.info("Parents: %s", parents)
344        log.info("Order: %s", order)
345    return parents, order
346
347
348def transform_from_stats(stats, inverse=False):
349    # https://cookierobotics.com/007/
350    a = stats.varianceX
351    b = stats.covariance
352    c = stats.varianceY
353
354    delta = (((a - c) * 0.5) ** 2 + b * b) ** 0.5
355    lambda1 = (a + c) * 0.5 + delta  # Major eigenvalue
356    lambda2 = (a + c) * 0.5 - delta  # Minor eigenvalue
357    theta = atan2(lambda1 - a, b) if b != 0 else (pi * 0.5 if a < c else 0)
358    trans = Transform()
359
360    if lambda2 < 0:
361        # XXX This is a hack.
362        # The problem is that the covariance matrix is singular.
363        # This happens when the contour is a line, or a circle.
364        # In that case, the covariance matrix is not a good
365        # representation of the contour.
366        # We should probably detect this earlier and avoid
367        # computing the covariance matrix in the first place.
368        # But for now, we just avoid the division by zero.
369        lambda2 = 0
370
371    if inverse:
372        trans = trans.translate(-stats.meanX, -stats.meanY)
373        trans = trans.rotate(-theta)
374        trans = trans.scale(1 / sqrt(lambda1), 1 / sqrt(lambda2))
375    else:
376        trans = trans.scale(sqrt(lambda1), sqrt(lambda2))
377        trans = trans.rotate(theta)
378        trans = trans.translate(stats.meanX, stats.meanY)
379
380    return trans
381