varStore.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767
  1. from fontTools.misc.roundTools import noRound, otRound
  2. from fontTools.misc.intTools import bit_count
  3. from fontTools.ttLib.tables import otTables as ot
  4. from fontTools.varLib.models import supportScalar
  5. from fontTools.varLib.builder import (
  6. buildVarRegionList,
  7. buildVarStore,
  8. buildVarRegion,
  9. buildVarData,
  10. )
  11. from functools import partial
  12. from collections import defaultdict
  13. from heapq import heappush, heappop
  14. NO_VARIATION_INDEX = ot.NO_VARIATION_INDEX
  15. ot.VarStore.NO_VARIATION_INDEX = NO_VARIATION_INDEX
  16. def _getLocationKey(loc):
  17. return tuple(sorted(loc.items(), key=lambda kv: kv[0]))
  18. class OnlineVarStoreBuilder(object):
  19. def __init__(self, axisTags):
  20. self._axisTags = axisTags
  21. self._regionMap = {}
  22. self._regionList = buildVarRegionList([], axisTags)
  23. self._store = buildVarStore(self._regionList, [])
  24. self._data = None
  25. self._model = None
  26. self._supports = None
  27. self._varDataIndices = {}
  28. self._varDataCaches = {}
  29. self._cache = None
  30. def setModel(self, model):
  31. self.setSupports(model.supports)
  32. self._model = model
  33. def setSupports(self, supports):
  34. self._model = None
  35. self._supports = list(supports)
  36. if not self._supports[0]:
  37. del self._supports[0] # Drop base master support
  38. self._cache = None
  39. self._data = None
  40. def finish(self, optimize=True):
  41. self._regionList.RegionCount = len(self._regionList.Region)
  42. self._store.VarDataCount = len(self._store.VarData)
  43. for data in self._store.VarData:
  44. data.ItemCount = len(data.Item)
  45. data.calculateNumShorts(optimize=optimize)
  46. return self._store
  47. def _add_VarData(self, num_items=1):
  48. regionMap = self._regionMap
  49. regionList = self._regionList
  50. regions = self._supports
  51. regionIndices = []
  52. for region in regions:
  53. key = _getLocationKey(region)
  54. idx = regionMap.get(key)
  55. if idx is None:
  56. varRegion = buildVarRegion(region, self._axisTags)
  57. idx = regionMap[key] = len(regionList.Region)
  58. regionList.Region.append(varRegion)
  59. regionIndices.append(idx)
  60. # Check if we have one already...
  61. key = tuple(regionIndices)
  62. varDataIdx = self._varDataIndices.get(key)
  63. if varDataIdx is not None:
  64. self._outer = varDataIdx
  65. self._data = self._store.VarData[varDataIdx]
  66. self._cache = self._varDataCaches[key]
  67. if len(self._data.Item) + num_items > 0xFFFF:
  68. # This is full. Need new one.
  69. varDataIdx = None
  70. if varDataIdx is None:
  71. self._data = buildVarData(regionIndices, [], optimize=False)
  72. self._outer = len(self._store.VarData)
  73. self._store.VarData.append(self._data)
  74. self._varDataIndices[key] = self._outer
  75. if key not in self._varDataCaches:
  76. self._varDataCaches[key] = {}
  77. self._cache = self._varDataCaches[key]
  78. def storeMasters(self, master_values, *, round=round):
  79. deltas = self._model.getDeltas(master_values, round=round)
  80. base = deltas.pop(0)
  81. return base, self.storeDeltas(deltas, round=noRound)
  82. def storeMastersMany(self, master_values_list, *, round=round):
  83. deltas_list = [
  84. self._model.getDeltas(master_values, round=round)
  85. for master_values in master_values_list
  86. ]
  87. base_list = [deltas.pop(0) for deltas in deltas_list]
  88. return base_list, self.storeDeltasMany(deltas_list, round=noRound)
  89. def storeDeltas(self, deltas, *, round=round):
  90. deltas = [round(d) for d in deltas]
  91. if len(deltas) == len(self._supports) + 1:
  92. deltas = tuple(deltas[1:])
  93. else:
  94. assert len(deltas) == len(self._supports)
  95. deltas = tuple(deltas)
  96. if not self._data:
  97. self._add_VarData()
  98. varIdx = self._cache.get(deltas)
  99. if varIdx is not None:
  100. return varIdx
  101. inner = len(self._data.Item)
  102. if inner == 0xFFFF:
  103. # Full array. Start new one.
  104. self._add_VarData()
  105. return self.storeDeltas(deltas, round=noRound)
  106. self._data.addItem(deltas, round=noRound)
  107. varIdx = (self._outer << 16) + inner
  108. self._cache[deltas] = varIdx
  109. return varIdx
  110. def storeDeltasMany(self, deltas_list, *, round=round):
  111. deltas_list = [[round(d) for d in deltas] for deltas in deltas_list]
  112. deltas_list = tuple(tuple(deltas) for deltas in deltas_list)
  113. if not self._data:
  114. self._add_VarData(len(deltas_list))
  115. varIdx = self._cache.get(deltas_list)
  116. if varIdx is not None:
  117. return varIdx
  118. inner = len(self._data.Item)
  119. if inner + len(deltas_list) > 0xFFFF:
  120. # Full array. Start new one.
  121. self._add_VarData(len(deltas_list))
  122. return self.storeDeltasMany(deltas_list, round=noRound)
  123. for i, deltas in enumerate(deltas_list):
  124. self._data.addItem(deltas, round=noRound)
  125. varIdx = (self._outer << 16) + inner + i
  126. self._cache[deltas] = varIdx
  127. varIdx = (self._outer << 16) + inner
  128. self._cache[deltas_list] = varIdx
  129. return varIdx
  130. def VarData_addItem(self, deltas, *, round=round):
  131. deltas = [round(d) for d in deltas]
  132. countUs = self.VarRegionCount
  133. countThem = len(deltas)
  134. if countUs + 1 == countThem:
  135. deltas = list(deltas[1:])
  136. else:
  137. assert countUs == countThem, (countUs, countThem)
  138. deltas = list(deltas)
  139. self.Item.append(deltas)
  140. self.ItemCount = len(self.Item)
  141. ot.VarData.addItem = VarData_addItem
  142. def VarRegion_get_support(self, fvar_axes):
  143. return {
  144. fvar_axes[i].axisTag: (reg.StartCoord, reg.PeakCoord, reg.EndCoord)
  145. for i, reg in enumerate(self.VarRegionAxis)
  146. if reg.PeakCoord != 0
  147. }
  148. ot.VarRegion.get_support = VarRegion_get_support
  149. def VarStore___bool__(self):
  150. return bool(self.VarData)
  151. ot.VarStore.__bool__ = VarStore___bool__
  152. class VarStoreInstancer(object):
  153. def __init__(self, varstore, fvar_axes, location={}):
  154. self.fvar_axes = fvar_axes
  155. assert varstore is None or varstore.Format == 1
  156. self._varData = varstore.VarData if varstore else []
  157. self._regions = varstore.VarRegionList.Region if varstore else []
  158. self.setLocation(location)
  159. def setLocation(self, location):
  160. self.location = dict(location)
  161. self._clearCaches()
  162. def _clearCaches(self):
  163. self._scalars = {}
  164. def _getScalar(self, regionIdx):
  165. scalar = self._scalars.get(regionIdx)
  166. if scalar is None:
  167. support = self._regions[regionIdx].get_support(self.fvar_axes)
  168. scalar = supportScalar(self.location, support)
  169. self._scalars[regionIdx] = scalar
  170. return scalar
  171. @staticmethod
  172. def interpolateFromDeltasAndScalars(deltas, scalars):
  173. delta = 0.0
  174. for d, s in zip(deltas, scalars):
  175. if not s:
  176. continue
  177. delta += d * s
  178. return delta
  179. def __getitem__(self, varidx):
  180. major, minor = varidx >> 16, varidx & 0xFFFF
  181. if varidx == NO_VARIATION_INDEX:
  182. return 0.0
  183. varData = self._varData
  184. scalars = [self._getScalar(ri) for ri in varData[major].VarRegionIndex]
  185. deltas = varData[major].Item[minor]
  186. return self.interpolateFromDeltasAndScalars(deltas, scalars)
  187. def interpolateFromDeltas(self, varDataIndex, deltas):
  188. varData = self._varData
  189. scalars = [self._getScalar(ri) for ri in varData[varDataIndex].VarRegionIndex]
  190. return self.interpolateFromDeltasAndScalars(deltas, scalars)
  191. #
  192. # Optimizations
  193. #
  194. # retainFirstMap - If true, major 0 mappings are retained. Deltas for unused indices are zeroed
  195. # advIdxes - Set of major 0 indices for advance deltas to be listed first. Other major 0 indices follow.
  196. def VarStore_subset_varidxes(
  197. self,
  198. varIdxes,
  199. optimize=True,
  200. retainFirstMap=False,
  201. advIdxes=set(),
  202. *,
  203. VarData="VarData",
  204. ):
  205. # Sort out used varIdxes by major/minor.
  206. used = defaultdict(set)
  207. for varIdx in varIdxes:
  208. if varIdx == NO_VARIATION_INDEX:
  209. continue
  210. major = varIdx >> 16
  211. minor = varIdx & 0xFFFF
  212. used[major].add(minor)
  213. del varIdxes
  214. #
  215. # Subset VarData
  216. #
  217. varData = getattr(self, VarData)
  218. newVarData = []
  219. varDataMap = {NO_VARIATION_INDEX: NO_VARIATION_INDEX}
  220. for major, data in enumerate(varData):
  221. usedMinors = used.get(major)
  222. if usedMinors is None:
  223. continue
  224. newMajor = len(newVarData)
  225. newVarData.append(data)
  226. items = data.Item
  227. newItems = []
  228. if major == 0 and retainFirstMap:
  229. for minor in range(len(items)):
  230. newItems.append(
  231. items[minor] if minor in usedMinors else [0] * len(items[minor])
  232. )
  233. varDataMap[minor] = minor
  234. else:
  235. if major == 0:
  236. minors = sorted(advIdxes) + sorted(usedMinors - advIdxes)
  237. else:
  238. minors = sorted(usedMinors)
  239. for minor in minors:
  240. newMinor = len(newItems)
  241. newItems.append(items[minor])
  242. varDataMap[(major << 16) + minor] = (newMajor << 16) + newMinor
  243. data.Item = newItems
  244. data.ItemCount = len(data.Item)
  245. if VarData == "VarData":
  246. data.calculateNumShorts(optimize=optimize)
  247. setattr(self, VarData, newVarData)
  248. setattr(self, VarData + "Count", len(newVarData))
  249. self.prune_regions()
  250. return varDataMap
  251. ot.VarStore.subset_varidxes = VarStore_subset_varidxes
  252. def VarStore_prune_regions(self, *, VarData="VarData", VarRegionList="VarRegionList"):
  253. """Remove unused VarRegions."""
  254. #
  255. # Subset VarRegionList
  256. #
  257. # Collect.
  258. usedRegions = set()
  259. for data in getattr(self, VarData):
  260. usedRegions.update(data.VarRegionIndex)
  261. # Subset.
  262. regionList = getattr(self, VarRegionList)
  263. regions = regionList.Region
  264. newRegions = []
  265. regionMap = {}
  266. for i in sorted(usedRegions):
  267. regionMap[i] = len(newRegions)
  268. newRegions.append(regions[i])
  269. regionList.Region = newRegions
  270. regionList.RegionCount = len(regionList.Region)
  271. # Map.
  272. for data in getattr(self, VarData):
  273. data.VarRegionIndex = [regionMap[i] for i in data.VarRegionIndex]
  274. ot.VarStore.prune_regions = VarStore_prune_regions
  275. def _visit(self, func):
  276. """Recurse down from self, if type of an object is ot.Device,
  277. call func() on it. Works on otData-style classes."""
  278. if type(self) == ot.Device:
  279. func(self)
  280. elif isinstance(self, list):
  281. for that in self:
  282. _visit(that, func)
  283. elif hasattr(self, "getConverters") and not hasattr(self, "postRead"):
  284. for conv in self.getConverters():
  285. that = getattr(self, conv.name, None)
  286. if that is not None:
  287. _visit(that, func)
  288. elif isinstance(self, ot.ValueRecord):
  289. for that in self.__dict__.values():
  290. _visit(that, func)
  291. def _Device_recordVarIdx(self, s):
  292. """Add VarIdx in this Device table (if any) to the set s."""
  293. if self.DeltaFormat == 0x8000:
  294. s.add((self.StartSize << 16) + self.EndSize)
  295. def Object_collect_device_varidxes(self, varidxes):
  296. adder = partial(_Device_recordVarIdx, s=varidxes)
  297. _visit(self, adder)
  298. ot.GDEF.collect_device_varidxes = Object_collect_device_varidxes
  299. ot.GPOS.collect_device_varidxes = Object_collect_device_varidxes
  300. def _Device_mapVarIdx(self, mapping, done):
  301. """Map VarIdx in this Device table (if any) through mapping."""
  302. if id(self) in done:
  303. return
  304. done.add(id(self))
  305. if self.DeltaFormat == 0x8000:
  306. varIdx = mapping[(self.StartSize << 16) + self.EndSize]
  307. self.StartSize = varIdx >> 16
  308. self.EndSize = varIdx & 0xFFFF
  309. def Object_remap_device_varidxes(self, varidxes_map):
  310. mapper = partial(_Device_mapVarIdx, mapping=varidxes_map, done=set())
  311. _visit(self, mapper)
  312. ot.GDEF.remap_device_varidxes = Object_remap_device_varidxes
  313. ot.GPOS.remap_device_varidxes = Object_remap_device_varidxes
  314. class _Encoding(object):
  315. def __init__(self, chars):
  316. self.chars = chars
  317. self.width = bit_count(chars)
  318. self.columns = self._columns(chars)
  319. self.overhead = self._characteristic_overhead(self.columns)
  320. self.items = set()
  321. def append(self, row):
  322. self.items.add(row)
  323. def extend(self, lst):
  324. self.items.update(lst)
  325. def get_room(self):
  326. """Maximum number of bytes that can be added to characteristic
  327. while still being beneficial to merge it into another one."""
  328. count = len(self.items)
  329. return max(0, (self.overhead - 1) // count - self.width)
  330. room = property(get_room)
  331. def get_gain(self):
  332. """Maximum possible byte gain from merging this into another
  333. characteristic."""
  334. count = len(self.items)
  335. return max(0, self.overhead - count)
  336. gain = property(get_gain)
  337. def gain_sort_key(self):
  338. return self.gain, self.chars
  339. def width_sort_key(self):
  340. return self.width, self.chars
  341. @staticmethod
  342. def _characteristic_overhead(columns):
  343. """Returns overhead in bytes of encoding this characteristic
  344. as a VarData."""
  345. c = 4 + 6 # 4 bytes for LOffset, 6 bytes for VarData header
  346. c += bit_count(columns) * 2
  347. return c
  348. @staticmethod
  349. def _columns(chars):
  350. cols = 0
  351. i = 1
  352. while chars:
  353. if chars & 0b1111:
  354. cols |= i
  355. chars >>= 4
  356. i <<= 1
  357. return cols
  358. def gain_from_merging(self, other_encoding):
  359. combined_chars = other_encoding.chars | self.chars
  360. combined_width = bit_count(combined_chars)
  361. combined_columns = self.columns | other_encoding.columns
  362. combined_overhead = _Encoding._characteristic_overhead(combined_columns)
  363. combined_gain = (
  364. +self.overhead
  365. + other_encoding.overhead
  366. - combined_overhead
  367. - (combined_width - self.width) * len(self.items)
  368. - (combined_width - other_encoding.width) * len(other_encoding.items)
  369. )
  370. return combined_gain
  371. class _EncodingDict(dict):
  372. def __missing__(self, chars):
  373. r = self[chars] = _Encoding(chars)
  374. return r
  375. def add_row(self, row):
  376. chars = self._row_characteristics(row)
  377. self[chars].append(row)
  378. @staticmethod
  379. def _row_characteristics(row):
  380. """Returns encoding characteristics for a row."""
  381. longWords = False
  382. chars = 0
  383. i = 1
  384. for v in row:
  385. if v:
  386. chars += i
  387. if not (-128 <= v <= 127):
  388. chars += i * 0b0010
  389. if not (-32768 <= v <= 32767):
  390. longWords = True
  391. break
  392. i <<= 4
  393. if longWords:
  394. # Redo; only allow 2byte/4byte encoding
  395. chars = 0
  396. i = 1
  397. for v in row:
  398. if v:
  399. chars += i * 0b0011
  400. if not (-32768 <= v <= 32767):
  401. chars += i * 0b1100
  402. i <<= 4
  403. return chars
  404. def VarStore_optimize(self, use_NO_VARIATION_INDEX=True, quantization=1):
  405. """Optimize storage. Returns mapping from old VarIdxes to new ones."""
  406. # Overview:
  407. #
  408. # For each VarData row, we first extend it with zeroes to have
  409. # one column per region in VarRegionList. We then group the
  410. # rows into _Encoding objects, by their "characteristic" bitmap.
  411. # The characteristic bitmap is a binary number representing how
  412. # many bytes each column of the data takes up to encode. Each
  413. # column is encoded in four bits. For example, if a column has
  414. # only values in the range -128..127, it would only have a single
  415. # bit set in the characteristic bitmap for that column. If it has
  416. # values in the range -32768..32767, it would have two bits set.
  417. # The number of ones in the characteristic bitmap is the "width"
  418. # of the encoding.
  419. #
  420. # Each encoding as such has a number of "active" (ie. non-zero)
  421. # columns. The overhead of encoding the characteristic bitmap
  422. # is 10 bytes, plus 2 bytes per active column.
  423. #
  424. # When an encoding is merged into another one, if the characteristic
  425. # of the old encoding is a subset of the new one, then the overhead
  426. # of the old encoding is completely eliminated. However, each row
  427. # now would require more bytes to encode, to the tune of one byte
  428. # per characteristic bit that is active in the new encoding but not
  429. # in the old one. The number of bits that can be added to an encoding
  430. # while still beneficial to merge it into another encoding is called
  431. # the "room" for that encoding.
  432. #
  433. # The "gain" of an encodings is the maximum number of bytes we can
  434. # save by merging it into another encoding. The "gain" of merging
  435. # two encodings is how many bytes we save by doing so.
  436. #
  437. # High-level algorithm:
  438. #
  439. # - Each encoding has a minimal way to encode it. However, because
  440. # of the overhead of encoding the characteristic bitmap, it may
  441. # be beneficial to merge two encodings together, if there is
  442. # gain in doing so. As such, we need to search for the best
  443. # such successive merges.
  444. #
  445. # Algorithm:
  446. #
  447. # - Put all encodings into a "todo" list.
  448. #
  449. # - Sort todo list by decreasing gain (for stability).
  450. #
  451. # - Make a priority-queue of the gain from combining each two
  452. # encodings in the todo list. The priority queue is sorted by
  453. # decreasing gain. Only positive gains are included.
  454. #
  455. # - While priority queue is not empty:
  456. # - Pop the first item from the priority queue,
  457. # - Merge the two encodings it represents,
  458. # - Remove the two encodings from the todo list,
  459. # - Insert positive gains from combining the new encoding with
  460. # all existing todo list items into the priority queue,
  461. # - If a todo list item with the same characteristic bitmap as
  462. # the new encoding exists, remove it from the todo list and
  463. # merge it into the new encoding.
  464. # - Insert the new encoding into the todo list,
  465. #
  466. # - Encode all remaining items in the todo list.
  467. #
  468. # The output is then sorted for stability, in the following way:
  469. # - The VarRegionList of the input is kept intact.
  470. # - All encodings are sorted before the main algorithm, by
  471. # gain_key_sort(), which is a tuple of the following items:
  472. # * The gain of the encoding.
  473. # * The characteristic bitmap of the encoding, with higher-numbered
  474. # columns compared first.
  475. # - The VarData is sorted by width_sort_key(), which is a tuple
  476. # of the following items:
  477. # * The "width" of the encoding.
  478. # * The characteristic bitmap of the encoding, with higher-numbered
  479. # columns compared first.
  480. # - Within each VarData, the items are sorted as vectors of numbers.
  481. #
  482. # Finally, each VarData is optimized to remove the empty columns and
  483. # reorder columns as needed.
  484. # TODO
  485. # Check that no two VarRegions are the same; if they are, fold them.
  486. n = len(self.VarRegionList.Region) # Number of columns
  487. zeroes = [0] * n
  488. front_mapping = {} # Map from old VarIdxes to full row tuples
  489. encodings = _EncodingDict()
  490. # Collect all items into a set of full rows (with lots of zeroes.)
  491. for major, data in enumerate(self.VarData):
  492. regionIndices = data.VarRegionIndex
  493. for minor, item in enumerate(data.Item):
  494. row = list(zeroes)
  495. if quantization == 1:
  496. for regionIdx, v in zip(regionIndices, item):
  497. row[regionIdx] += v
  498. else:
  499. for regionIdx, v in zip(regionIndices, item):
  500. row[regionIdx] += (
  501. round(v / quantization) * quantization
  502. ) # TODO https://github.com/fonttools/fonttools/pull/3126#discussion_r1205439785
  503. row = tuple(row)
  504. if use_NO_VARIATION_INDEX and not any(row):
  505. front_mapping[(major << 16) + minor] = None
  506. continue
  507. encodings.add_row(row)
  508. front_mapping[(major << 16) + minor] = row
  509. # Prepare for the main algorithm.
  510. todo = sorted(encodings.values(), key=_Encoding.gain_sort_key)
  511. del encodings
  512. # Repeatedly pick two best encodings to combine, and combine them.
  513. heap = []
  514. for i, encoding in enumerate(todo):
  515. for j in range(i + 1, len(todo)):
  516. other_encoding = todo[j]
  517. combining_gain = encoding.gain_from_merging(other_encoding)
  518. if combining_gain > 0:
  519. heappush(heap, (-combining_gain, i, j))
  520. while heap:
  521. _, i, j = heappop(heap)
  522. if todo[i] is None or todo[j] is None:
  523. continue
  524. encoding, other_encoding = todo[i], todo[j]
  525. todo[i], todo[j] = None, None
  526. # Combine the two encodings
  527. combined_chars = other_encoding.chars | encoding.chars
  528. combined_encoding = _Encoding(combined_chars)
  529. combined_encoding.extend(encoding.items)
  530. combined_encoding.extend(other_encoding.items)
  531. for k, enc in enumerate(todo):
  532. if enc is None:
  533. continue
  534. # In the unlikely event that the same encoding exists already,
  535. # combine it.
  536. if enc.chars == combined_chars:
  537. combined_encoding.extend(enc.items)
  538. todo[k] = None
  539. continue
  540. combining_gain = combined_encoding.gain_from_merging(enc)
  541. if combining_gain > 0:
  542. heappush(heap, (-combining_gain, k, len(todo)))
  543. todo.append(combined_encoding)
  544. encodings = [encoding for encoding in todo if encoding is not None]
  545. # Assemble final store.
  546. back_mapping = {} # Mapping from full rows to new VarIdxes
  547. encodings.sort(key=_Encoding.width_sort_key)
  548. self.VarData = []
  549. for encoding in encodings:
  550. items = sorted(encoding.items)
  551. while items:
  552. major = len(self.VarData)
  553. data = ot.VarData()
  554. self.VarData.append(data)
  555. data.VarRegionIndex = range(n)
  556. data.VarRegionCount = len(data.VarRegionIndex)
  557. # Each major can only encode up to 0xFFFF entries.
  558. data.Item, items = items[:0xFFFF], items[0xFFFF:]
  559. for minor, item in enumerate(data.Item):
  560. back_mapping[item] = (major << 16) + minor
  561. # Compile final mapping.
  562. varidx_map = {NO_VARIATION_INDEX: NO_VARIATION_INDEX}
  563. for k, v in front_mapping.items():
  564. varidx_map[k] = back_mapping[v] if v is not None else NO_VARIATION_INDEX
  565. # Recalculate things and go home.
  566. self.VarRegionList.RegionCount = len(self.VarRegionList.Region)
  567. self.VarDataCount = len(self.VarData)
  568. for data in self.VarData:
  569. data.ItemCount = len(data.Item)
  570. data.optimize()
  571. # Remove unused regions.
  572. self.prune_regions()
  573. return varidx_map
  574. ot.VarStore.optimize = VarStore_optimize
  575. def main(args=None):
  576. """Optimize a font's GDEF variation store"""
  577. from argparse import ArgumentParser
  578. from fontTools import configLogger
  579. from fontTools.ttLib import TTFont
  580. from fontTools.ttLib.tables.otBase import OTTableWriter
  581. parser = ArgumentParser(prog="varLib.varStore", description=main.__doc__)
  582. parser.add_argument("--quantization", type=int, default=1)
  583. parser.add_argument("fontfile")
  584. parser.add_argument("outfile", nargs="?")
  585. options = parser.parse_args(args)
  586. # TODO: allow user to configure logging via command-line options
  587. configLogger(level="INFO")
  588. quantization = options.quantization
  589. fontfile = options.fontfile
  590. outfile = options.outfile
  591. font = TTFont(fontfile)
  592. gdef = font["GDEF"]
  593. store = gdef.table.VarStore
  594. writer = OTTableWriter()
  595. store.compile(writer, font)
  596. size = len(writer.getAllData())
  597. print("Before: %7d bytes" % size)
  598. varidx_map = store.optimize(quantization=quantization)
  599. writer = OTTableWriter()
  600. store.compile(writer, font)
  601. size = len(writer.getAllData())
  602. print("After: %7d bytes" % size)
  603. if outfile is not None:
  604. gdef.table.remap_device_varidxes(varidx_map)
  605. if "GPOS" in font:
  606. font["GPOS"].table.remap_device_varidxes(varidx_map)
  607. font.save(outfile)
  608. if __name__ == "__main__":
  609. import sys
  610. if len(sys.argv) > 1:
  611. sys.exit(main())
  612. import doctest
  613. sys.exit(doctest.testmod().failed)