xref: /aosp_15_r20/external/fonttools/Lib/fontTools/varLib/varStore.py (revision e1fe3e4ad2793916b15cccdc4a7da52a7e1dd0e9)
1*e1fe3e4aSElliott Hughesfrom fontTools.misc.roundTools import noRound, otRound
2*e1fe3e4aSElliott Hughesfrom fontTools.misc.intTools import bit_count
3*e1fe3e4aSElliott Hughesfrom fontTools.ttLib.tables import otTables as ot
4*e1fe3e4aSElliott Hughesfrom fontTools.varLib.models import supportScalar
5*e1fe3e4aSElliott Hughesfrom fontTools.varLib.builder import (
6*e1fe3e4aSElliott Hughes    buildVarRegionList,
7*e1fe3e4aSElliott Hughes    buildVarStore,
8*e1fe3e4aSElliott Hughes    buildVarRegion,
9*e1fe3e4aSElliott Hughes    buildVarData,
10*e1fe3e4aSElliott Hughes)
11*e1fe3e4aSElliott Hughesfrom functools import partial
12*e1fe3e4aSElliott Hughesfrom collections import defaultdict
13*e1fe3e4aSElliott Hughesfrom heapq import heappush, heappop
14*e1fe3e4aSElliott Hughes
15*e1fe3e4aSElliott Hughes
16*e1fe3e4aSElliott HughesNO_VARIATION_INDEX = ot.NO_VARIATION_INDEX
17*e1fe3e4aSElliott Hughesot.VarStore.NO_VARIATION_INDEX = NO_VARIATION_INDEX
18*e1fe3e4aSElliott Hughes
19*e1fe3e4aSElliott Hughes
20*e1fe3e4aSElliott Hughesdef _getLocationKey(loc):
21*e1fe3e4aSElliott Hughes    return tuple(sorted(loc.items(), key=lambda kv: kv[0]))
22*e1fe3e4aSElliott Hughes
23*e1fe3e4aSElliott Hughes
24*e1fe3e4aSElliott Hughesclass OnlineVarStoreBuilder(object):
25*e1fe3e4aSElliott Hughes    def __init__(self, axisTags):
26*e1fe3e4aSElliott Hughes        self._axisTags = axisTags
27*e1fe3e4aSElliott Hughes        self._regionMap = {}
28*e1fe3e4aSElliott Hughes        self._regionList = buildVarRegionList([], axisTags)
29*e1fe3e4aSElliott Hughes        self._store = buildVarStore(self._regionList, [])
30*e1fe3e4aSElliott Hughes        self._data = None
31*e1fe3e4aSElliott Hughes        self._model = None
32*e1fe3e4aSElliott Hughes        self._supports = None
33*e1fe3e4aSElliott Hughes        self._varDataIndices = {}
34*e1fe3e4aSElliott Hughes        self._varDataCaches = {}
35*e1fe3e4aSElliott Hughes        self._cache = {}
36*e1fe3e4aSElliott Hughes
37*e1fe3e4aSElliott Hughes    def setModel(self, model):
38*e1fe3e4aSElliott Hughes        self.setSupports(model.supports)
39*e1fe3e4aSElliott Hughes        self._model = model
40*e1fe3e4aSElliott Hughes
41*e1fe3e4aSElliott Hughes    def setSupports(self, supports):
42*e1fe3e4aSElliott Hughes        self._model = None
43*e1fe3e4aSElliott Hughes        self._supports = list(supports)
44*e1fe3e4aSElliott Hughes        if not self._supports[0]:
45*e1fe3e4aSElliott Hughes            del self._supports[0]  # Drop base master support
46*e1fe3e4aSElliott Hughes        self._cache = {}
47*e1fe3e4aSElliott Hughes        self._data = None
48*e1fe3e4aSElliott Hughes
49*e1fe3e4aSElliott Hughes    def finish(self, optimize=True):
50*e1fe3e4aSElliott Hughes        self._regionList.RegionCount = len(self._regionList.Region)
51*e1fe3e4aSElliott Hughes        self._store.VarDataCount = len(self._store.VarData)
52*e1fe3e4aSElliott Hughes        for data in self._store.VarData:
53*e1fe3e4aSElliott Hughes            data.ItemCount = len(data.Item)
54*e1fe3e4aSElliott Hughes            data.calculateNumShorts(optimize=optimize)
55*e1fe3e4aSElliott Hughes        return self._store
56*e1fe3e4aSElliott Hughes
57*e1fe3e4aSElliott Hughes    def _add_VarData(self):
58*e1fe3e4aSElliott Hughes        regionMap = self._regionMap
59*e1fe3e4aSElliott Hughes        regionList = self._regionList
60*e1fe3e4aSElliott Hughes
61*e1fe3e4aSElliott Hughes        regions = self._supports
62*e1fe3e4aSElliott Hughes        regionIndices = []
63*e1fe3e4aSElliott Hughes        for region in regions:
64*e1fe3e4aSElliott Hughes            key = _getLocationKey(region)
65*e1fe3e4aSElliott Hughes            idx = regionMap.get(key)
66*e1fe3e4aSElliott Hughes            if idx is None:
67*e1fe3e4aSElliott Hughes                varRegion = buildVarRegion(region, self._axisTags)
68*e1fe3e4aSElliott Hughes                idx = regionMap[key] = len(regionList.Region)
69*e1fe3e4aSElliott Hughes                regionList.Region.append(varRegion)
70*e1fe3e4aSElliott Hughes            regionIndices.append(idx)
71*e1fe3e4aSElliott Hughes
72*e1fe3e4aSElliott Hughes        # Check if we have one already...
73*e1fe3e4aSElliott Hughes        key = tuple(regionIndices)
74*e1fe3e4aSElliott Hughes        varDataIdx = self._varDataIndices.get(key)
75*e1fe3e4aSElliott Hughes        if varDataIdx is not None:
76*e1fe3e4aSElliott Hughes            self._outer = varDataIdx
77*e1fe3e4aSElliott Hughes            self._data = self._store.VarData[varDataIdx]
78*e1fe3e4aSElliott Hughes            self._cache = self._varDataCaches[key]
79*e1fe3e4aSElliott Hughes            if len(self._data.Item) == 0xFFFF:
80*e1fe3e4aSElliott Hughes                # This is full.  Need new one.
81*e1fe3e4aSElliott Hughes                varDataIdx = None
82*e1fe3e4aSElliott Hughes
83*e1fe3e4aSElliott Hughes        if varDataIdx is None:
84*e1fe3e4aSElliott Hughes            self._data = buildVarData(regionIndices, [], optimize=False)
85*e1fe3e4aSElliott Hughes            self._outer = len(self._store.VarData)
86*e1fe3e4aSElliott Hughes            self._store.VarData.append(self._data)
87*e1fe3e4aSElliott Hughes            self._varDataIndices[key] = self._outer
88*e1fe3e4aSElliott Hughes            if key not in self._varDataCaches:
89*e1fe3e4aSElliott Hughes                self._varDataCaches[key] = {}
90*e1fe3e4aSElliott Hughes            self._cache = self._varDataCaches[key]
91*e1fe3e4aSElliott Hughes
92*e1fe3e4aSElliott Hughes    def storeMasters(self, master_values, *, round=round):
93*e1fe3e4aSElliott Hughes        deltas = self._model.getDeltas(master_values, round=round)
94*e1fe3e4aSElliott Hughes        base = deltas.pop(0)
95*e1fe3e4aSElliott Hughes        return base, self.storeDeltas(deltas, round=noRound)
96*e1fe3e4aSElliott Hughes
97*e1fe3e4aSElliott Hughes    def storeDeltas(self, deltas, *, round=round):
98*e1fe3e4aSElliott Hughes        deltas = [round(d) for d in deltas]
99*e1fe3e4aSElliott Hughes        if len(deltas) == len(self._supports) + 1:
100*e1fe3e4aSElliott Hughes            deltas = tuple(deltas[1:])
101*e1fe3e4aSElliott Hughes        else:
102*e1fe3e4aSElliott Hughes            assert len(deltas) == len(self._supports)
103*e1fe3e4aSElliott Hughes            deltas = tuple(deltas)
104*e1fe3e4aSElliott Hughes
105*e1fe3e4aSElliott Hughes        varIdx = self._cache.get(deltas)
106*e1fe3e4aSElliott Hughes        if varIdx is not None:
107*e1fe3e4aSElliott Hughes            return varIdx
108*e1fe3e4aSElliott Hughes
109*e1fe3e4aSElliott Hughes        if not self._data:
110*e1fe3e4aSElliott Hughes            self._add_VarData()
111*e1fe3e4aSElliott Hughes        inner = len(self._data.Item)
112*e1fe3e4aSElliott Hughes        if inner == 0xFFFF:
113*e1fe3e4aSElliott Hughes            # Full array. Start new one.
114*e1fe3e4aSElliott Hughes            self._add_VarData()
115*e1fe3e4aSElliott Hughes            return self.storeDeltas(deltas)
116*e1fe3e4aSElliott Hughes        self._data.addItem(deltas, round=noRound)
117*e1fe3e4aSElliott Hughes
118*e1fe3e4aSElliott Hughes        varIdx = (self._outer << 16) + inner
119*e1fe3e4aSElliott Hughes        self._cache[deltas] = varIdx
120*e1fe3e4aSElliott Hughes        return varIdx
121*e1fe3e4aSElliott Hughes
122*e1fe3e4aSElliott Hughes
123*e1fe3e4aSElliott Hughesdef VarData_addItem(self, deltas, *, round=round):
124*e1fe3e4aSElliott Hughes    deltas = [round(d) for d in deltas]
125*e1fe3e4aSElliott Hughes
126*e1fe3e4aSElliott Hughes    countUs = self.VarRegionCount
127*e1fe3e4aSElliott Hughes    countThem = len(deltas)
128*e1fe3e4aSElliott Hughes    if countUs + 1 == countThem:
129*e1fe3e4aSElliott Hughes        deltas = list(deltas[1:])
130*e1fe3e4aSElliott Hughes    else:
131*e1fe3e4aSElliott Hughes        assert countUs == countThem, (countUs, countThem)
132*e1fe3e4aSElliott Hughes        deltas = list(deltas)
133*e1fe3e4aSElliott Hughes    self.Item.append(deltas)
134*e1fe3e4aSElliott Hughes    self.ItemCount = len(self.Item)
135*e1fe3e4aSElliott Hughes
136*e1fe3e4aSElliott Hughes
137*e1fe3e4aSElliott Hughesot.VarData.addItem = VarData_addItem
138*e1fe3e4aSElliott Hughes
139*e1fe3e4aSElliott Hughes
140*e1fe3e4aSElliott Hughesdef VarRegion_get_support(self, fvar_axes):
141*e1fe3e4aSElliott Hughes    return {
142*e1fe3e4aSElliott Hughes        fvar_axes[i].axisTag: (reg.StartCoord, reg.PeakCoord, reg.EndCoord)
143*e1fe3e4aSElliott Hughes        for i, reg in enumerate(self.VarRegionAxis)
144*e1fe3e4aSElliott Hughes        if reg.PeakCoord != 0
145*e1fe3e4aSElliott Hughes    }
146*e1fe3e4aSElliott Hughes
147*e1fe3e4aSElliott Hughes
148*e1fe3e4aSElliott Hughesot.VarRegion.get_support = VarRegion_get_support
149*e1fe3e4aSElliott Hughes
150*e1fe3e4aSElliott Hughes
151*e1fe3e4aSElliott Hughesdef VarStore___bool__(self):
152*e1fe3e4aSElliott Hughes    return bool(self.VarData)
153*e1fe3e4aSElliott Hughes
154*e1fe3e4aSElliott Hughes
155*e1fe3e4aSElliott Hughesot.VarStore.__bool__ = VarStore___bool__
156*e1fe3e4aSElliott Hughes
157*e1fe3e4aSElliott Hughes
158*e1fe3e4aSElliott Hughesclass VarStoreInstancer(object):
159*e1fe3e4aSElliott Hughes    def __init__(self, varstore, fvar_axes, location={}):
160*e1fe3e4aSElliott Hughes        self.fvar_axes = fvar_axes
161*e1fe3e4aSElliott Hughes        assert varstore is None or varstore.Format == 1
162*e1fe3e4aSElliott Hughes        self._varData = varstore.VarData if varstore else []
163*e1fe3e4aSElliott Hughes        self._regions = varstore.VarRegionList.Region if varstore else []
164*e1fe3e4aSElliott Hughes        self.setLocation(location)
165*e1fe3e4aSElliott Hughes
166*e1fe3e4aSElliott Hughes    def setLocation(self, location):
167*e1fe3e4aSElliott Hughes        self.location = dict(location)
168*e1fe3e4aSElliott Hughes        self._clearCaches()
169*e1fe3e4aSElliott Hughes
170*e1fe3e4aSElliott Hughes    def _clearCaches(self):
171*e1fe3e4aSElliott Hughes        self._scalars = {}
172*e1fe3e4aSElliott Hughes
173*e1fe3e4aSElliott Hughes    def _getScalar(self, regionIdx):
174*e1fe3e4aSElliott Hughes        scalar = self._scalars.get(regionIdx)
175*e1fe3e4aSElliott Hughes        if scalar is None:
176*e1fe3e4aSElliott Hughes            support = self._regions[regionIdx].get_support(self.fvar_axes)
177*e1fe3e4aSElliott Hughes            scalar = supportScalar(self.location, support)
178*e1fe3e4aSElliott Hughes            self._scalars[regionIdx] = scalar
179*e1fe3e4aSElliott Hughes        return scalar
180*e1fe3e4aSElliott Hughes
181*e1fe3e4aSElliott Hughes    @staticmethod
182*e1fe3e4aSElliott Hughes    def interpolateFromDeltasAndScalars(deltas, scalars):
183*e1fe3e4aSElliott Hughes        delta = 0.0
184*e1fe3e4aSElliott Hughes        for d, s in zip(deltas, scalars):
185*e1fe3e4aSElliott Hughes            if not s:
186*e1fe3e4aSElliott Hughes                continue
187*e1fe3e4aSElliott Hughes            delta += d * s
188*e1fe3e4aSElliott Hughes        return delta
189*e1fe3e4aSElliott Hughes
190*e1fe3e4aSElliott Hughes    def __getitem__(self, varidx):
191*e1fe3e4aSElliott Hughes        major, minor = varidx >> 16, varidx & 0xFFFF
192*e1fe3e4aSElliott Hughes        if varidx == NO_VARIATION_INDEX:
193*e1fe3e4aSElliott Hughes            return 0.0
194*e1fe3e4aSElliott Hughes        varData = self._varData
195*e1fe3e4aSElliott Hughes        scalars = [self._getScalar(ri) for ri in varData[major].VarRegionIndex]
196*e1fe3e4aSElliott Hughes        deltas = varData[major].Item[minor]
197*e1fe3e4aSElliott Hughes        return self.interpolateFromDeltasAndScalars(deltas, scalars)
198*e1fe3e4aSElliott Hughes
199*e1fe3e4aSElliott Hughes    def interpolateFromDeltas(self, varDataIndex, deltas):
200*e1fe3e4aSElliott Hughes        varData = self._varData
201*e1fe3e4aSElliott Hughes        scalars = [self._getScalar(ri) for ri in varData[varDataIndex].VarRegionIndex]
202*e1fe3e4aSElliott Hughes        return self.interpolateFromDeltasAndScalars(deltas, scalars)
203*e1fe3e4aSElliott Hughes
204*e1fe3e4aSElliott Hughes
205*e1fe3e4aSElliott Hughes#
206*e1fe3e4aSElliott Hughes# Optimizations
207*e1fe3e4aSElliott Hughes#
208*e1fe3e4aSElliott Hughes# retainFirstMap - If true, major 0 mappings are retained. Deltas for unused indices are zeroed
209*e1fe3e4aSElliott Hughes# advIdxes - Set of major 0 indices for advance deltas to be listed first. Other major 0 indices follow.
210*e1fe3e4aSElliott Hughes
211*e1fe3e4aSElliott Hughes
212*e1fe3e4aSElliott Hughesdef VarStore_subset_varidxes(
213*e1fe3e4aSElliott Hughes    self, varIdxes, optimize=True, retainFirstMap=False, advIdxes=set()
214*e1fe3e4aSElliott Hughes):
215*e1fe3e4aSElliott Hughes    # Sort out used varIdxes by major/minor.
216*e1fe3e4aSElliott Hughes    used = {}
217*e1fe3e4aSElliott Hughes    for varIdx in varIdxes:
218*e1fe3e4aSElliott Hughes        if varIdx == NO_VARIATION_INDEX:
219*e1fe3e4aSElliott Hughes            continue
220*e1fe3e4aSElliott Hughes        major = varIdx >> 16
221*e1fe3e4aSElliott Hughes        minor = varIdx & 0xFFFF
222*e1fe3e4aSElliott Hughes        d = used.get(major)
223*e1fe3e4aSElliott Hughes        if d is None:
224*e1fe3e4aSElliott Hughes            d = used[major] = set()
225*e1fe3e4aSElliott Hughes        d.add(minor)
226*e1fe3e4aSElliott Hughes    del varIdxes
227*e1fe3e4aSElliott Hughes
228*e1fe3e4aSElliott Hughes    #
229*e1fe3e4aSElliott Hughes    # Subset VarData
230*e1fe3e4aSElliott Hughes    #
231*e1fe3e4aSElliott Hughes
232*e1fe3e4aSElliott Hughes    varData = self.VarData
233*e1fe3e4aSElliott Hughes    newVarData = []
234*e1fe3e4aSElliott Hughes    varDataMap = {NO_VARIATION_INDEX: NO_VARIATION_INDEX}
235*e1fe3e4aSElliott Hughes    for major, data in enumerate(varData):
236*e1fe3e4aSElliott Hughes        usedMinors = used.get(major)
237*e1fe3e4aSElliott Hughes        if usedMinors is None:
238*e1fe3e4aSElliott Hughes            continue
239*e1fe3e4aSElliott Hughes        newMajor = len(newVarData)
240*e1fe3e4aSElliott Hughes        newVarData.append(data)
241*e1fe3e4aSElliott Hughes
242*e1fe3e4aSElliott Hughes        items = data.Item
243*e1fe3e4aSElliott Hughes        newItems = []
244*e1fe3e4aSElliott Hughes        if major == 0 and retainFirstMap:
245*e1fe3e4aSElliott Hughes            for minor in range(len(items)):
246*e1fe3e4aSElliott Hughes                newItems.append(
247*e1fe3e4aSElliott Hughes                    items[minor] if minor in usedMinors else [0] * len(items[minor])
248*e1fe3e4aSElliott Hughes                )
249*e1fe3e4aSElliott Hughes                varDataMap[minor] = minor
250*e1fe3e4aSElliott Hughes        else:
251*e1fe3e4aSElliott Hughes            if major == 0:
252*e1fe3e4aSElliott Hughes                minors = sorted(advIdxes) + sorted(usedMinors - advIdxes)
253*e1fe3e4aSElliott Hughes            else:
254*e1fe3e4aSElliott Hughes                minors = sorted(usedMinors)
255*e1fe3e4aSElliott Hughes            for minor in minors:
256*e1fe3e4aSElliott Hughes                newMinor = len(newItems)
257*e1fe3e4aSElliott Hughes                newItems.append(items[minor])
258*e1fe3e4aSElliott Hughes                varDataMap[(major << 16) + minor] = (newMajor << 16) + newMinor
259*e1fe3e4aSElliott Hughes
260*e1fe3e4aSElliott Hughes        data.Item = newItems
261*e1fe3e4aSElliott Hughes        data.ItemCount = len(data.Item)
262*e1fe3e4aSElliott Hughes
263*e1fe3e4aSElliott Hughes        data.calculateNumShorts(optimize=optimize)
264*e1fe3e4aSElliott Hughes
265*e1fe3e4aSElliott Hughes    self.VarData = newVarData
266*e1fe3e4aSElliott Hughes    self.VarDataCount = len(self.VarData)
267*e1fe3e4aSElliott Hughes
268*e1fe3e4aSElliott Hughes    self.prune_regions()
269*e1fe3e4aSElliott Hughes
270*e1fe3e4aSElliott Hughes    return varDataMap
271*e1fe3e4aSElliott Hughes
272*e1fe3e4aSElliott Hughes
273*e1fe3e4aSElliott Hughesot.VarStore.subset_varidxes = VarStore_subset_varidxes
274*e1fe3e4aSElliott Hughes
275*e1fe3e4aSElliott Hughes
276*e1fe3e4aSElliott Hughesdef VarStore_prune_regions(self):
277*e1fe3e4aSElliott Hughes    """Remove unused VarRegions."""
278*e1fe3e4aSElliott Hughes    #
279*e1fe3e4aSElliott Hughes    # Subset VarRegionList
280*e1fe3e4aSElliott Hughes    #
281*e1fe3e4aSElliott Hughes
282*e1fe3e4aSElliott Hughes    # Collect.
283*e1fe3e4aSElliott Hughes    usedRegions = set()
284*e1fe3e4aSElliott Hughes    for data in self.VarData:
285*e1fe3e4aSElliott Hughes        usedRegions.update(data.VarRegionIndex)
286*e1fe3e4aSElliott Hughes    # Subset.
287*e1fe3e4aSElliott Hughes    regionList = self.VarRegionList
288*e1fe3e4aSElliott Hughes    regions = regionList.Region
289*e1fe3e4aSElliott Hughes    newRegions = []
290*e1fe3e4aSElliott Hughes    regionMap = {}
291*e1fe3e4aSElliott Hughes    for i in sorted(usedRegions):
292*e1fe3e4aSElliott Hughes        regionMap[i] = len(newRegions)
293*e1fe3e4aSElliott Hughes        newRegions.append(regions[i])
294*e1fe3e4aSElliott Hughes    regionList.Region = newRegions
295*e1fe3e4aSElliott Hughes    regionList.RegionCount = len(regionList.Region)
296*e1fe3e4aSElliott Hughes    # Map.
297*e1fe3e4aSElliott Hughes    for data in self.VarData:
298*e1fe3e4aSElliott Hughes        data.VarRegionIndex = [regionMap[i] for i in data.VarRegionIndex]
299*e1fe3e4aSElliott Hughes
300*e1fe3e4aSElliott Hughes
301*e1fe3e4aSElliott Hughesot.VarStore.prune_regions = VarStore_prune_regions
302*e1fe3e4aSElliott Hughes
303*e1fe3e4aSElliott Hughes
304*e1fe3e4aSElliott Hughesdef _visit(self, func):
305*e1fe3e4aSElliott Hughes    """Recurse down from self, if type of an object is ot.Device,
306*e1fe3e4aSElliott Hughes    call func() on it.  Works on otData-style classes."""
307*e1fe3e4aSElliott Hughes
308*e1fe3e4aSElliott Hughes    if type(self) == ot.Device:
309*e1fe3e4aSElliott Hughes        func(self)
310*e1fe3e4aSElliott Hughes
311*e1fe3e4aSElliott Hughes    elif isinstance(self, list):
312*e1fe3e4aSElliott Hughes        for that in self:
313*e1fe3e4aSElliott Hughes            _visit(that, func)
314*e1fe3e4aSElliott Hughes
315*e1fe3e4aSElliott Hughes    elif hasattr(self, "getConverters") and not hasattr(self, "postRead"):
316*e1fe3e4aSElliott Hughes        for conv in self.getConverters():
317*e1fe3e4aSElliott Hughes            that = getattr(self, conv.name, None)
318*e1fe3e4aSElliott Hughes            if that is not None:
319*e1fe3e4aSElliott Hughes                _visit(that, func)
320*e1fe3e4aSElliott Hughes
321*e1fe3e4aSElliott Hughes    elif isinstance(self, ot.ValueRecord):
322*e1fe3e4aSElliott Hughes        for that in self.__dict__.values():
323*e1fe3e4aSElliott Hughes            _visit(that, func)
324*e1fe3e4aSElliott Hughes
325*e1fe3e4aSElliott Hughes
326*e1fe3e4aSElliott Hughesdef _Device_recordVarIdx(self, s):
327*e1fe3e4aSElliott Hughes    """Add VarIdx in this Device table (if any) to the set s."""
328*e1fe3e4aSElliott Hughes    if self.DeltaFormat == 0x8000:
329*e1fe3e4aSElliott Hughes        s.add((self.StartSize << 16) + self.EndSize)
330*e1fe3e4aSElliott Hughes
331*e1fe3e4aSElliott Hughes
332*e1fe3e4aSElliott Hughesdef Object_collect_device_varidxes(self, varidxes):
333*e1fe3e4aSElliott Hughes    adder = partial(_Device_recordVarIdx, s=varidxes)
334*e1fe3e4aSElliott Hughes    _visit(self, adder)
335*e1fe3e4aSElliott Hughes
336*e1fe3e4aSElliott Hughes
337*e1fe3e4aSElliott Hughesot.GDEF.collect_device_varidxes = Object_collect_device_varidxes
338*e1fe3e4aSElliott Hughesot.GPOS.collect_device_varidxes = Object_collect_device_varidxes
339*e1fe3e4aSElliott Hughes
340*e1fe3e4aSElliott Hughes
341*e1fe3e4aSElliott Hughesdef _Device_mapVarIdx(self, mapping, done):
342*e1fe3e4aSElliott Hughes    """Map VarIdx in this Device table (if any) through mapping."""
343*e1fe3e4aSElliott Hughes    if id(self) in done:
344*e1fe3e4aSElliott Hughes        return
345*e1fe3e4aSElliott Hughes    done.add(id(self))
346*e1fe3e4aSElliott Hughes    if self.DeltaFormat == 0x8000:
347*e1fe3e4aSElliott Hughes        varIdx = mapping[(self.StartSize << 16) + self.EndSize]
348*e1fe3e4aSElliott Hughes        self.StartSize = varIdx >> 16
349*e1fe3e4aSElliott Hughes        self.EndSize = varIdx & 0xFFFF
350*e1fe3e4aSElliott Hughes
351*e1fe3e4aSElliott Hughes
352*e1fe3e4aSElliott Hughesdef Object_remap_device_varidxes(self, varidxes_map):
353*e1fe3e4aSElliott Hughes    mapper = partial(_Device_mapVarIdx, mapping=varidxes_map, done=set())
354*e1fe3e4aSElliott Hughes    _visit(self, mapper)
355*e1fe3e4aSElliott Hughes
356*e1fe3e4aSElliott Hughes
357*e1fe3e4aSElliott Hughesot.GDEF.remap_device_varidxes = Object_remap_device_varidxes
358*e1fe3e4aSElliott Hughesot.GPOS.remap_device_varidxes = Object_remap_device_varidxes
359*e1fe3e4aSElliott Hughes
360*e1fe3e4aSElliott Hughes
361*e1fe3e4aSElliott Hughesclass _Encoding(object):
362*e1fe3e4aSElliott Hughes    def __init__(self, chars):
363*e1fe3e4aSElliott Hughes        self.chars = chars
364*e1fe3e4aSElliott Hughes        self.width = bit_count(chars)
365*e1fe3e4aSElliott Hughes        self.columns = self._columns(chars)
366*e1fe3e4aSElliott Hughes        self.overhead = self._characteristic_overhead(self.columns)
367*e1fe3e4aSElliott Hughes        self.items = set()
368*e1fe3e4aSElliott Hughes
369*e1fe3e4aSElliott Hughes    def append(self, row):
370*e1fe3e4aSElliott Hughes        self.items.add(row)
371*e1fe3e4aSElliott Hughes
372*e1fe3e4aSElliott Hughes    def extend(self, lst):
373*e1fe3e4aSElliott Hughes        self.items.update(lst)
374*e1fe3e4aSElliott Hughes
375*e1fe3e4aSElliott Hughes    def get_room(self):
376*e1fe3e4aSElliott Hughes        """Maximum number of bytes that can be added to characteristic
377*e1fe3e4aSElliott Hughes        while still being beneficial to merge it into another one."""
378*e1fe3e4aSElliott Hughes        count = len(self.items)
379*e1fe3e4aSElliott Hughes        return max(0, (self.overhead - 1) // count - self.width)
380*e1fe3e4aSElliott Hughes
381*e1fe3e4aSElliott Hughes    room = property(get_room)
382*e1fe3e4aSElliott Hughes
383*e1fe3e4aSElliott Hughes    def get_gain(self):
384*e1fe3e4aSElliott Hughes        """Maximum possible byte gain from merging this into another
385*e1fe3e4aSElliott Hughes        characteristic."""
386*e1fe3e4aSElliott Hughes        count = len(self.items)
387*e1fe3e4aSElliott Hughes        return max(0, self.overhead - count)
388*e1fe3e4aSElliott Hughes
389*e1fe3e4aSElliott Hughes    gain = property(get_gain)
390*e1fe3e4aSElliott Hughes
391*e1fe3e4aSElliott Hughes    def gain_sort_key(self):
392*e1fe3e4aSElliott Hughes        return self.gain, self.chars
393*e1fe3e4aSElliott Hughes
394*e1fe3e4aSElliott Hughes    def width_sort_key(self):
395*e1fe3e4aSElliott Hughes        return self.width, self.chars
396*e1fe3e4aSElliott Hughes
397*e1fe3e4aSElliott Hughes    @staticmethod
398*e1fe3e4aSElliott Hughes    def _characteristic_overhead(columns):
399*e1fe3e4aSElliott Hughes        """Returns overhead in bytes of encoding this characteristic
400*e1fe3e4aSElliott Hughes        as a VarData."""
401*e1fe3e4aSElliott Hughes        c = 4 + 6  # 4 bytes for LOffset, 6 bytes for VarData header
402*e1fe3e4aSElliott Hughes        c += bit_count(columns) * 2
403*e1fe3e4aSElliott Hughes        return c
404*e1fe3e4aSElliott Hughes
405*e1fe3e4aSElliott Hughes    @staticmethod
406*e1fe3e4aSElliott Hughes    def _columns(chars):
407*e1fe3e4aSElliott Hughes        cols = 0
408*e1fe3e4aSElliott Hughes        i = 1
409*e1fe3e4aSElliott Hughes        while chars:
410*e1fe3e4aSElliott Hughes            if chars & 0b1111:
411*e1fe3e4aSElliott Hughes                cols |= i
412*e1fe3e4aSElliott Hughes            chars >>= 4
413*e1fe3e4aSElliott Hughes            i <<= 1
414*e1fe3e4aSElliott Hughes        return cols
415*e1fe3e4aSElliott Hughes
416*e1fe3e4aSElliott Hughes    def gain_from_merging(self, other_encoding):
417*e1fe3e4aSElliott Hughes        combined_chars = other_encoding.chars | self.chars
418*e1fe3e4aSElliott Hughes        combined_width = bit_count(combined_chars)
419*e1fe3e4aSElliott Hughes        combined_columns = self.columns | other_encoding.columns
420*e1fe3e4aSElliott Hughes        combined_overhead = _Encoding._characteristic_overhead(combined_columns)
421*e1fe3e4aSElliott Hughes        combined_gain = (
422*e1fe3e4aSElliott Hughes            +self.overhead
423*e1fe3e4aSElliott Hughes            + other_encoding.overhead
424*e1fe3e4aSElliott Hughes            - combined_overhead
425*e1fe3e4aSElliott Hughes            - (combined_width - self.width) * len(self.items)
426*e1fe3e4aSElliott Hughes            - (combined_width - other_encoding.width) * len(other_encoding.items)
427*e1fe3e4aSElliott Hughes        )
428*e1fe3e4aSElliott Hughes        return combined_gain
429*e1fe3e4aSElliott Hughes
430*e1fe3e4aSElliott Hughes
431*e1fe3e4aSElliott Hughesclass _EncodingDict(dict):
432*e1fe3e4aSElliott Hughes    def __missing__(self, chars):
433*e1fe3e4aSElliott Hughes        r = self[chars] = _Encoding(chars)
434*e1fe3e4aSElliott Hughes        return r
435*e1fe3e4aSElliott Hughes
436*e1fe3e4aSElliott Hughes    def add_row(self, row):
437*e1fe3e4aSElliott Hughes        chars = self._row_characteristics(row)
438*e1fe3e4aSElliott Hughes        self[chars].append(row)
439*e1fe3e4aSElliott Hughes
440*e1fe3e4aSElliott Hughes    @staticmethod
441*e1fe3e4aSElliott Hughes    def _row_characteristics(row):
442*e1fe3e4aSElliott Hughes        """Returns encoding characteristics for a row."""
443*e1fe3e4aSElliott Hughes        longWords = False
444*e1fe3e4aSElliott Hughes
445*e1fe3e4aSElliott Hughes        chars = 0
446*e1fe3e4aSElliott Hughes        i = 1
447*e1fe3e4aSElliott Hughes        for v in row:
448*e1fe3e4aSElliott Hughes            if v:
449*e1fe3e4aSElliott Hughes                chars += i
450*e1fe3e4aSElliott Hughes            if not (-128 <= v <= 127):
451*e1fe3e4aSElliott Hughes                chars += i * 0b0010
452*e1fe3e4aSElliott Hughes            if not (-32768 <= v <= 32767):
453*e1fe3e4aSElliott Hughes                longWords = True
454*e1fe3e4aSElliott Hughes                break
455*e1fe3e4aSElliott Hughes            i <<= 4
456*e1fe3e4aSElliott Hughes
457*e1fe3e4aSElliott Hughes        if longWords:
458*e1fe3e4aSElliott Hughes            # Redo; only allow 2byte/4byte encoding
459*e1fe3e4aSElliott Hughes            chars = 0
460*e1fe3e4aSElliott Hughes            i = 1
461*e1fe3e4aSElliott Hughes            for v in row:
462*e1fe3e4aSElliott Hughes                if v:
463*e1fe3e4aSElliott Hughes                    chars += i * 0b0011
464*e1fe3e4aSElliott Hughes                if not (-32768 <= v <= 32767):
465*e1fe3e4aSElliott Hughes                    chars += i * 0b1100
466*e1fe3e4aSElliott Hughes                i <<= 4
467*e1fe3e4aSElliott Hughes
468*e1fe3e4aSElliott Hughes        return chars
469*e1fe3e4aSElliott Hughes
470*e1fe3e4aSElliott Hughes
471*e1fe3e4aSElliott Hughesdef VarStore_optimize(self, use_NO_VARIATION_INDEX=True, quantization=1):
472*e1fe3e4aSElliott Hughes    """Optimize storage. Returns mapping from old VarIdxes to new ones."""
473*e1fe3e4aSElliott Hughes
474*e1fe3e4aSElliott Hughes    # Overview:
475*e1fe3e4aSElliott Hughes    #
476*e1fe3e4aSElliott Hughes    # For each VarData row, we first extend it with zeroes to have
477*e1fe3e4aSElliott Hughes    # one column per region in VarRegionList. We then group the
478*e1fe3e4aSElliott Hughes    # rows into _Encoding objects, by their "characteristic" bitmap.
479*e1fe3e4aSElliott Hughes    # The characteristic bitmap is a binary number representing how
480*e1fe3e4aSElliott Hughes    # many bytes each column of the data takes up to encode. Each
481*e1fe3e4aSElliott Hughes    # column is encoded in four bits. For example, if a column has
482*e1fe3e4aSElliott Hughes    # only values in the range -128..127, it would only have a single
483*e1fe3e4aSElliott Hughes    # bit set in the characteristic bitmap for that column. If it has
484*e1fe3e4aSElliott Hughes    # values in the range -32768..32767, it would have two bits set.
485*e1fe3e4aSElliott Hughes    # The number of ones in the characteristic bitmap is the "width"
486*e1fe3e4aSElliott Hughes    # of the encoding.
487*e1fe3e4aSElliott Hughes    #
488*e1fe3e4aSElliott Hughes    # Each encoding as such has a number of "active" (ie. non-zero)
489*e1fe3e4aSElliott Hughes    # columns. The overhead of encoding the characteristic bitmap
490*e1fe3e4aSElliott Hughes    # is 10 bytes, plus 2 bytes per active column.
491*e1fe3e4aSElliott Hughes    #
492*e1fe3e4aSElliott Hughes    # When an encoding is merged into another one, if the characteristic
493*e1fe3e4aSElliott Hughes    # of the old encoding is a subset of the new one, then the overhead
494*e1fe3e4aSElliott Hughes    # of the old encoding is completely eliminated. However, each row
495*e1fe3e4aSElliott Hughes    # now would require more bytes to encode, to the tune of one byte
496*e1fe3e4aSElliott Hughes    # per characteristic bit that is active in the new encoding but not
497*e1fe3e4aSElliott Hughes    # in the old one. The number of bits that can be added to an encoding
498*e1fe3e4aSElliott Hughes    # while still beneficial to merge it into another encoding is called
499*e1fe3e4aSElliott Hughes    # the "room" for that encoding.
500*e1fe3e4aSElliott Hughes    #
501*e1fe3e4aSElliott Hughes    # The "gain" of an encodings is the maximum number of bytes we can
502*e1fe3e4aSElliott Hughes    # save by merging it into another encoding. The "gain" of merging
503*e1fe3e4aSElliott Hughes    # two encodings is how many bytes we save by doing so.
504*e1fe3e4aSElliott Hughes    #
505*e1fe3e4aSElliott Hughes    # High-level algorithm:
506*e1fe3e4aSElliott Hughes    #
507*e1fe3e4aSElliott Hughes    # - Each encoding has a minimal way to encode it. However, because
508*e1fe3e4aSElliott Hughes    #   of the overhead of encoding the characteristic bitmap, it may
509*e1fe3e4aSElliott Hughes    #   be beneficial to merge two encodings together, if there is
510*e1fe3e4aSElliott Hughes    #   gain in doing so. As such, we need to search for the best
511*e1fe3e4aSElliott Hughes    #   such successive merges.
512*e1fe3e4aSElliott Hughes    #
513*e1fe3e4aSElliott Hughes    # Algorithm:
514*e1fe3e4aSElliott Hughes    #
515*e1fe3e4aSElliott Hughes    # - Put all encodings into a "todo" list.
516*e1fe3e4aSElliott Hughes    #
517*e1fe3e4aSElliott Hughes    # - Sort todo list by decreasing gain (for stability).
518*e1fe3e4aSElliott Hughes    #
519*e1fe3e4aSElliott Hughes    # - Make a priority-queue of the gain from combining each two
520*e1fe3e4aSElliott Hughes    #   encodings in the todo list. The priority queue is sorted by
521*e1fe3e4aSElliott Hughes    #   decreasing gain. Only positive gains are included.
522*e1fe3e4aSElliott Hughes    #
523*e1fe3e4aSElliott Hughes    # - While priority queue is not empty:
524*e1fe3e4aSElliott Hughes    #   - Pop the first item from the priority queue,
525*e1fe3e4aSElliott Hughes    #   - Merge the two encodings it represents,
526*e1fe3e4aSElliott Hughes    #   - Remove the two encodings from the todo list,
527*e1fe3e4aSElliott Hughes    #   - Insert positive gains from combining the new encoding with
528*e1fe3e4aSElliott Hughes    #     all existing todo list items into the priority queue,
529*e1fe3e4aSElliott Hughes    #   - If a todo list item with the same characteristic bitmap as
530*e1fe3e4aSElliott Hughes    #     the new encoding exists, remove it from the todo list and
531*e1fe3e4aSElliott Hughes    #     merge it into the new encoding.
532*e1fe3e4aSElliott Hughes    #   - Insert the new encoding into the todo list,
533*e1fe3e4aSElliott Hughes    #
534*e1fe3e4aSElliott Hughes    # - Encode all remaining items in the todo list.
535*e1fe3e4aSElliott Hughes    #
536*e1fe3e4aSElliott Hughes    # The output is then sorted for stability, in the following way:
537*e1fe3e4aSElliott Hughes    # - The VarRegionList of the input is kept intact.
538*e1fe3e4aSElliott Hughes    # - All encodings are sorted before the main algorithm, by
539*e1fe3e4aSElliott Hughes    #   gain_key_sort(), which is a tuple of the following items:
540*e1fe3e4aSElliott Hughes    #   * The gain of the encoding.
541*e1fe3e4aSElliott Hughes    #   * The characteristic bitmap of the encoding, with higher-numbered
542*e1fe3e4aSElliott Hughes    #     columns compared first.
543*e1fe3e4aSElliott Hughes    # - The VarData is sorted by width_sort_key(), which is a tuple
544*e1fe3e4aSElliott Hughes    #   of the following items:
545*e1fe3e4aSElliott Hughes    #   * The "width" of the encoding.
546*e1fe3e4aSElliott Hughes    #   * The characteristic bitmap of the encoding, with higher-numbered
547*e1fe3e4aSElliott Hughes    #     columns compared first.
548*e1fe3e4aSElliott Hughes    # - Within each VarData, the items are sorted as vectors of numbers.
549*e1fe3e4aSElliott Hughes    #
550*e1fe3e4aSElliott Hughes    # Finally, each VarData is optimized to remove the empty columns and
551*e1fe3e4aSElliott Hughes    # reorder columns as needed.
552*e1fe3e4aSElliott Hughes
553*e1fe3e4aSElliott Hughes    # TODO
554*e1fe3e4aSElliott Hughes    # Check that no two VarRegions are the same; if they are, fold them.
555*e1fe3e4aSElliott Hughes
556*e1fe3e4aSElliott Hughes    n = len(self.VarRegionList.Region)  # Number of columns
557*e1fe3e4aSElliott Hughes    zeroes = [0] * n
558*e1fe3e4aSElliott Hughes
559*e1fe3e4aSElliott Hughes    front_mapping = {}  # Map from old VarIdxes to full row tuples
560*e1fe3e4aSElliott Hughes
561*e1fe3e4aSElliott Hughes    encodings = _EncodingDict()
562*e1fe3e4aSElliott Hughes
563*e1fe3e4aSElliott Hughes    # Collect all items into a set of full rows (with lots of zeroes.)
564*e1fe3e4aSElliott Hughes    for major, data in enumerate(self.VarData):
565*e1fe3e4aSElliott Hughes        regionIndices = data.VarRegionIndex
566*e1fe3e4aSElliott Hughes
567*e1fe3e4aSElliott Hughes        for minor, item in enumerate(data.Item):
568*e1fe3e4aSElliott Hughes            row = list(zeroes)
569*e1fe3e4aSElliott Hughes
570*e1fe3e4aSElliott Hughes            if quantization == 1:
571*e1fe3e4aSElliott Hughes                for regionIdx, v in zip(regionIndices, item):
572*e1fe3e4aSElliott Hughes                    row[regionIdx] += v
573*e1fe3e4aSElliott Hughes            else:
574*e1fe3e4aSElliott Hughes                for regionIdx, v in zip(regionIndices, item):
575*e1fe3e4aSElliott Hughes                    row[regionIdx] += (
576*e1fe3e4aSElliott Hughes                        round(v / quantization) * quantization
577*e1fe3e4aSElliott Hughes                    )  # TODO https://github.com/fonttools/fonttools/pull/3126#discussion_r1205439785
578*e1fe3e4aSElliott Hughes
579*e1fe3e4aSElliott Hughes            row = tuple(row)
580*e1fe3e4aSElliott Hughes
581*e1fe3e4aSElliott Hughes            if use_NO_VARIATION_INDEX and not any(row):
582*e1fe3e4aSElliott Hughes                front_mapping[(major << 16) + minor] = None
583*e1fe3e4aSElliott Hughes                continue
584*e1fe3e4aSElliott Hughes
585*e1fe3e4aSElliott Hughes            encodings.add_row(row)
586*e1fe3e4aSElliott Hughes            front_mapping[(major << 16) + minor] = row
587*e1fe3e4aSElliott Hughes
588*e1fe3e4aSElliott Hughes    # Prepare for the main algorithm.
589*e1fe3e4aSElliott Hughes    todo = sorted(encodings.values(), key=_Encoding.gain_sort_key)
590*e1fe3e4aSElliott Hughes    del encodings
591*e1fe3e4aSElliott Hughes
592*e1fe3e4aSElliott Hughes    # Repeatedly pick two best encodings to combine, and combine them.
593*e1fe3e4aSElliott Hughes
594*e1fe3e4aSElliott Hughes    heap = []
595*e1fe3e4aSElliott Hughes    for i, encoding in enumerate(todo):
596*e1fe3e4aSElliott Hughes        for j in range(i + 1, len(todo)):
597*e1fe3e4aSElliott Hughes            other_encoding = todo[j]
598*e1fe3e4aSElliott Hughes            combining_gain = encoding.gain_from_merging(other_encoding)
599*e1fe3e4aSElliott Hughes            if combining_gain > 0:
600*e1fe3e4aSElliott Hughes                heappush(heap, (-combining_gain, i, j))
601*e1fe3e4aSElliott Hughes
602*e1fe3e4aSElliott Hughes    while heap:
603*e1fe3e4aSElliott Hughes        _, i, j = heappop(heap)
604*e1fe3e4aSElliott Hughes        if todo[i] is None or todo[j] is None:
605*e1fe3e4aSElliott Hughes            continue
606*e1fe3e4aSElliott Hughes
607*e1fe3e4aSElliott Hughes        encoding, other_encoding = todo[i], todo[j]
608*e1fe3e4aSElliott Hughes        todo[i], todo[j] = None, None
609*e1fe3e4aSElliott Hughes
610*e1fe3e4aSElliott Hughes        # Combine the two encodings
611*e1fe3e4aSElliott Hughes        combined_chars = other_encoding.chars | encoding.chars
612*e1fe3e4aSElliott Hughes        combined_encoding = _Encoding(combined_chars)
613*e1fe3e4aSElliott Hughes        combined_encoding.extend(encoding.items)
614*e1fe3e4aSElliott Hughes        combined_encoding.extend(other_encoding.items)
615*e1fe3e4aSElliott Hughes
616*e1fe3e4aSElliott Hughes        for k, enc in enumerate(todo):
617*e1fe3e4aSElliott Hughes            if enc is None:
618*e1fe3e4aSElliott Hughes                continue
619*e1fe3e4aSElliott Hughes
620*e1fe3e4aSElliott Hughes            # In the unlikely event that the same encoding exists already,
621*e1fe3e4aSElliott Hughes            # combine it.
622*e1fe3e4aSElliott Hughes            if enc.chars == combined_chars:
623*e1fe3e4aSElliott Hughes                combined_encoding.extend(enc.items)
624*e1fe3e4aSElliott Hughes                todo[k] = None
625*e1fe3e4aSElliott Hughes                continue
626*e1fe3e4aSElliott Hughes
627*e1fe3e4aSElliott Hughes            combining_gain = combined_encoding.gain_from_merging(enc)
628*e1fe3e4aSElliott Hughes            if combining_gain > 0:
629*e1fe3e4aSElliott Hughes                heappush(heap, (-combining_gain, k, len(todo)))
630*e1fe3e4aSElliott Hughes
631*e1fe3e4aSElliott Hughes        todo.append(combined_encoding)
632*e1fe3e4aSElliott Hughes
633*e1fe3e4aSElliott Hughes    encodings = [encoding for encoding in todo if encoding is not None]
634*e1fe3e4aSElliott Hughes
635*e1fe3e4aSElliott Hughes    # Assemble final store.
636*e1fe3e4aSElliott Hughes    back_mapping = {}  # Mapping from full rows to new VarIdxes
637*e1fe3e4aSElliott Hughes    encodings.sort(key=_Encoding.width_sort_key)
638*e1fe3e4aSElliott Hughes    self.VarData = []
639*e1fe3e4aSElliott Hughes    for encoding in encodings:
640*e1fe3e4aSElliott Hughes        items = sorted(encoding.items)
641*e1fe3e4aSElliott Hughes
642*e1fe3e4aSElliott Hughes        while items:
643*e1fe3e4aSElliott Hughes            major = len(self.VarData)
644*e1fe3e4aSElliott Hughes            data = ot.VarData()
645*e1fe3e4aSElliott Hughes            self.VarData.append(data)
646*e1fe3e4aSElliott Hughes            data.VarRegionIndex = range(n)
647*e1fe3e4aSElliott Hughes            data.VarRegionCount = len(data.VarRegionIndex)
648*e1fe3e4aSElliott Hughes
649*e1fe3e4aSElliott Hughes            # Each major can only encode up to 0xFFFF entries.
650*e1fe3e4aSElliott Hughes            data.Item, items = items[:0xFFFF], items[0xFFFF:]
651*e1fe3e4aSElliott Hughes
652*e1fe3e4aSElliott Hughes            for minor, item in enumerate(data.Item):
653*e1fe3e4aSElliott Hughes                back_mapping[item] = (major << 16) + minor
654*e1fe3e4aSElliott Hughes
655*e1fe3e4aSElliott Hughes    # Compile final mapping.
656*e1fe3e4aSElliott Hughes    varidx_map = {NO_VARIATION_INDEX: NO_VARIATION_INDEX}
657*e1fe3e4aSElliott Hughes    for k, v in front_mapping.items():
658*e1fe3e4aSElliott Hughes        varidx_map[k] = back_mapping[v] if v is not None else NO_VARIATION_INDEX
659*e1fe3e4aSElliott Hughes
660*e1fe3e4aSElliott Hughes    # Recalculate things and go home.
661*e1fe3e4aSElliott Hughes    self.VarRegionList.RegionCount = len(self.VarRegionList.Region)
662*e1fe3e4aSElliott Hughes    self.VarDataCount = len(self.VarData)
663*e1fe3e4aSElliott Hughes    for data in self.VarData:
664*e1fe3e4aSElliott Hughes        data.ItemCount = len(data.Item)
665*e1fe3e4aSElliott Hughes        data.optimize()
666*e1fe3e4aSElliott Hughes
667*e1fe3e4aSElliott Hughes    # Remove unused regions.
668*e1fe3e4aSElliott Hughes    self.prune_regions()
669*e1fe3e4aSElliott Hughes
670*e1fe3e4aSElliott Hughes    return varidx_map
671*e1fe3e4aSElliott Hughes
672*e1fe3e4aSElliott Hughes
673*e1fe3e4aSElliott Hughesot.VarStore.optimize = VarStore_optimize
674*e1fe3e4aSElliott Hughes
675*e1fe3e4aSElliott Hughes
676*e1fe3e4aSElliott Hughesdef main(args=None):
677*e1fe3e4aSElliott Hughes    """Optimize a font's GDEF variation store"""
678*e1fe3e4aSElliott Hughes    from argparse import ArgumentParser
679*e1fe3e4aSElliott Hughes    from fontTools import configLogger
680*e1fe3e4aSElliott Hughes    from fontTools.ttLib import TTFont
681*e1fe3e4aSElliott Hughes    from fontTools.ttLib.tables.otBase import OTTableWriter
682*e1fe3e4aSElliott Hughes
683*e1fe3e4aSElliott Hughes    parser = ArgumentParser(prog="varLib.varStore", description=main.__doc__)
684*e1fe3e4aSElliott Hughes    parser.add_argument("--quantization", type=int, default=1)
685*e1fe3e4aSElliott Hughes    parser.add_argument("fontfile")
686*e1fe3e4aSElliott Hughes    parser.add_argument("outfile", nargs="?")
687*e1fe3e4aSElliott Hughes    options = parser.parse_args(args)
688*e1fe3e4aSElliott Hughes
689*e1fe3e4aSElliott Hughes    # TODO: allow user to configure logging via command-line options
690*e1fe3e4aSElliott Hughes    configLogger(level="INFO")
691*e1fe3e4aSElliott Hughes
692*e1fe3e4aSElliott Hughes    quantization = options.quantization
693*e1fe3e4aSElliott Hughes    fontfile = options.fontfile
694*e1fe3e4aSElliott Hughes    outfile = options.outfile
695*e1fe3e4aSElliott Hughes
696*e1fe3e4aSElliott Hughes    font = TTFont(fontfile)
697*e1fe3e4aSElliott Hughes    gdef = font["GDEF"]
698*e1fe3e4aSElliott Hughes    store = gdef.table.VarStore
699*e1fe3e4aSElliott Hughes
700*e1fe3e4aSElliott Hughes    writer = OTTableWriter()
701*e1fe3e4aSElliott Hughes    store.compile(writer, font)
702*e1fe3e4aSElliott Hughes    size = len(writer.getAllData())
703*e1fe3e4aSElliott Hughes    print("Before: %7d bytes" % size)
704*e1fe3e4aSElliott Hughes
705*e1fe3e4aSElliott Hughes    varidx_map = store.optimize(quantization=quantization)
706*e1fe3e4aSElliott Hughes
707*e1fe3e4aSElliott Hughes    writer = OTTableWriter()
708*e1fe3e4aSElliott Hughes    store.compile(writer, font)
709*e1fe3e4aSElliott Hughes    size = len(writer.getAllData())
710*e1fe3e4aSElliott Hughes    print("After:  %7d bytes" % size)
711*e1fe3e4aSElliott Hughes
712*e1fe3e4aSElliott Hughes    if outfile is not None:
713*e1fe3e4aSElliott Hughes        gdef.table.remap_device_varidxes(varidx_map)
714*e1fe3e4aSElliott Hughes        if "GPOS" in font:
715*e1fe3e4aSElliott Hughes            font["GPOS"].table.remap_device_varidxes(varidx_map)
716*e1fe3e4aSElliott Hughes
717*e1fe3e4aSElliott Hughes        font.save(outfile)
718*e1fe3e4aSElliott Hughes
719*e1fe3e4aSElliott Hughes
720*e1fe3e4aSElliott Hughesif __name__ == "__main__":
721*e1fe3e4aSElliott Hughes    import sys
722*e1fe3e4aSElliott Hughes
723*e1fe3e4aSElliott Hughes    if len(sys.argv) > 1:
724*e1fe3e4aSElliott Hughes        sys.exit(main())
725*e1fe3e4aSElliott Hughes    import doctest
726*e1fe3e4aSElliott Hughes
727*e1fe3e4aSElliott Hughes    sys.exit(doctest.testmod().failed)
728