interpolatableHelpers.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380
  1. from fontTools.ttLib.ttGlyphSet import LerpGlyphSet
  2. from fontTools.pens.basePen import AbstractPen, BasePen, DecomposingPen
  3. from fontTools.pens.pointPen import AbstractPointPen, SegmentToPointPen
  4. from fontTools.pens.recordingPen import RecordingPen, DecomposingRecordingPen
  5. from fontTools.misc.transform import Transform
  6. from collections import defaultdict, deque
  7. from math import sqrt, copysign, atan2, pi
  8. from enum import Enum
  9. import itertools
  10. import logging
  11. log = logging.getLogger("fontTools.varLib.interpolatable")
  12. class InterpolatableProblem:
  13. NOTHING = "nothing"
  14. MISSING = "missing"
  15. OPEN_PATH = "open_path"
  16. PATH_COUNT = "path_count"
  17. NODE_COUNT = "node_count"
  18. NODE_INCOMPATIBILITY = "node_incompatibility"
  19. CONTOUR_ORDER = "contour_order"
  20. WRONG_START_POINT = "wrong_start_point"
  21. KINK = "kink"
  22. UNDERWEIGHT = "underweight"
  23. OVERWEIGHT = "overweight"
  24. severity = {
  25. MISSING: 1,
  26. OPEN_PATH: 2,
  27. PATH_COUNT: 3,
  28. NODE_COUNT: 4,
  29. NODE_INCOMPATIBILITY: 5,
  30. CONTOUR_ORDER: 6,
  31. WRONG_START_POINT: 7,
  32. KINK: 8,
  33. UNDERWEIGHT: 9,
  34. OVERWEIGHT: 10,
  35. NOTHING: 11,
  36. }
  37. def sort_problems(problems):
  38. """Sort problems by severity, then by glyph name, then by problem message."""
  39. return dict(
  40. sorted(
  41. problems.items(),
  42. key=lambda _: -min(
  43. (
  44. (InterpolatableProblem.severity[p["type"]] + p.get("tolerance", 0))
  45. for p in _[1]
  46. ),
  47. ),
  48. reverse=True,
  49. )
  50. )
  51. def rot_list(l, k):
  52. """Rotate list by k items forward. Ie. item at position 0 will be
  53. at position k in returned list. Negative k is allowed."""
  54. return l[-k:] + l[:-k]
  55. class PerContourPen(BasePen):
  56. def __init__(self, Pen, glyphset=None):
  57. BasePen.__init__(self, glyphset)
  58. self._glyphset = glyphset
  59. self._Pen = Pen
  60. self._pen = None
  61. self.value = []
  62. def _moveTo(self, p0):
  63. self._newItem()
  64. self._pen.moveTo(p0)
  65. def _lineTo(self, p1):
  66. self._pen.lineTo(p1)
  67. def _qCurveToOne(self, p1, p2):
  68. self._pen.qCurveTo(p1, p2)
  69. def _curveToOne(self, p1, p2, p3):
  70. self._pen.curveTo(p1, p2, p3)
  71. def _closePath(self):
  72. self._pen.closePath()
  73. self._pen = None
  74. def _endPath(self):
  75. self._pen.endPath()
  76. self._pen = None
  77. def _newItem(self):
  78. self._pen = pen = self._Pen()
  79. self.value.append(pen)
  80. class PerContourOrComponentPen(PerContourPen):
  81. def addComponent(self, glyphName, transformation):
  82. self._newItem()
  83. self.value[-1].addComponent(glyphName, transformation)
  84. class SimpleRecordingPointPen(AbstractPointPen):
  85. def __init__(self):
  86. self.value = []
  87. def beginPath(self, identifier=None, **kwargs):
  88. pass
  89. def endPath(self) -> None:
  90. pass
  91. def addPoint(self, pt, segmentType=None):
  92. self.value.append((pt, False if segmentType is None else True))
  93. def vdiff_hypot2(v0, v1):
  94. s = 0
  95. for x0, x1 in zip(v0, v1):
  96. d = x1 - x0
  97. s += d * d
  98. return s
  99. def vdiff_hypot2_complex(v0, v1):
  100. s = 0
  101. for x0, x1 in zip(v0, v1):
  102. d = x1 - x0
  103. s += d.real * d.real + d.imag * d.imag
  104. # This does the same but seems to be slower:
  105. # s += (d * d.conjugate()).real
  106. return s
  107. def matching_cost(G, matching):
  108. return sum(G[i][j] for i, j in enumerate(matching))
  109. def min_cost_perfect_bipartite_matching_scipy(G):
  110. n = len(G)
  111. rows, cols = linear_sum_assignment(G)
  112. assert (rows == list(range(n))).all()
  113. return list(cols), matching_cost(G, cols)
  114. def min_cost_perfect_bipartite_matching_munkres(G):
  115. n = len(G)
  116. cols = [None] * n
  117. for row, col in Munkres().compute(G):
  118. cols[row] = col
  119. return cols, matching_cost(G, cols)
  120. def min_cost_perfect_bipartite_matching_bruteforce(G):
  121. n = len(G)
  122. if n > 6:
  123. raise Exception("Install Python module 'munkres' or 'scipy >= 0.17.0'")
  124. # Otherwise just brute-force
  125. permutations = itertools.permutations(range(n))
  126. best = list(next(permutations))
  127. best_cost = matching_cost(G, best)
  128. for p in permutations:
  129. cost = matching_cost(G, p)
  130. if cost < best_cost:
  131. best, best_cost = list(p), cost
  132. return best, best_cost
  133. try:
  134. from scipy.optimize import linear_sum_assignment
  135. min_cost_perfect_bipartite_matching = min_cost_perfect_bipartite_matching_scipy
  136. except ImportError:
  137. try:
  138. from munkres import Munkres
  139. min_cost_perfect_bipartite_matching = (
  140. min_cost_perfect_bipartite_matching_munkres
  141. )
  142. except ImportError:
  143. min_cost_perfect_bipartite_matching = (
  144. min_cost_perfect_bipartite_matching_bruteforce
  145. )
  146. def contour_vector_from_stats(stats):
  147. # Don't change the order of items here.
  148. # It's okay to add to the end, but otherwise, other
  149. # code depends on it. Search for "covariance".
  150. size = sqrt(abs(stats.area))
  151. return (
  152. copysign((size), stats.area),
  153. stats.meanX,
  154. stats.meanY,
  155. stats.stddevX * 2,
  156. stats.stddevY * 2,
  157. stats.correlation * size,
  158. )
  159. def matching_for_vectors(m0, m1):
  160. n = len(m0)
  161. identity_matching = list(range(n))
  162. costs = [[vdiff_hypot2(v0, v1) for v1 in m1] for v0 in m0]
  163. (
  164. matching,
  165. matching_cost,
  166. ) = min_cost_perfect_bipartite_matching(costs)
  167. identity_cost = sum(costs[i][i] for i in range(n))
  168. return matching, matching_cost, identity_cost
  169. def points_characteristic_bits(points):
  170. bits = 0
  171. for pt, b in reversed(points):
  172. bits = (bits << 1) | b
  173. return bits
  174. _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR = 4
  175. def points_complex_vector(points):
  176. vector = []
  177. if not points:
  178. return vector
  179. points = [complex(*pt) for pt, _ in points]
  180. n = len(points)
  181. assert _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR == 4
  182. points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
  183. while len(points) < _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR:
  184. points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
  185. for i in range(n):
  186. # The weights are magic numbers.
  187. # The point itself
  188. p0 = points[i]
  189. vector.append(p0)
  190. # The vector to the next point
  191. p1 = points[i + 1]
  192. d0 = p1 - p0
  193. vector.append(d0 * 3)
  194. # The turn vector
  195. p2 = points[i + 2]
  196. d1 = p2 - p1
  197. vector.append(d1 - d0)
  198. # The angle to the next point, as a cross product;
  199. # Square root of, to match dimentionality of distance.
  200. cross = d0.real * d1.imag - d0.imag * d1.real
  201. cross = copysign(sqrt(abs(cross)), cross)
  202. vector.append(cross * 4)
  203. return vector
  204. def add_isomorphisms(points, isomorphisms, reverse):
  205. reference_bits = points_characteristic_bits(points)
  206. n = len(points)
  207. # if points[0][0] == points[-1][0]:
  208. # abort
  209. if reverse:
  210. points = points[::-1]
  211. bits = points_characteristic_bits(points)
  212. else:
  213. bits = reference_bits
  214. vector = points_complex_vector(points)
  215. assert len(vector) % n == 0
  216. mult = len(vector) // n
  217. mask = (1 << n) - 1
  218. for i in range(n):
  219. b = ((bits << (n - i)) & mask) | (bits >> i)
  220. if b == reference_bits:
  221. isomorphisms.append(
  222. (rot_list(vector, -i * mult), n - 1 - i if reverse else i, reverse)
  223. )
  224. def find_parents_and_order(glyphsets, locations):
  225. parents = [None] + list(range(len(glyphsets) - 1))
  226. order = list(range(len(glyphsets)))
  227. if locations:
  228. # Order base master first
  229. bases = (i for i, l in enumerate(locations) if all(v == 0 for v in l.values()))
  230. if bases:
  231. base = next(bases)
  232. logging.info("Base master index %s, location %s", base, locations[base])
  233. else:
  234. base = 0
  235. logging.warning("No base master location found")
  236. # Form a minimum spanning tree of the locations
  237. try:
  238. from scipy.sparse.csgraph import minimum_spanning_tree
  239. graph = [[0] * len(locations) for _ in range(len(locations))]
  240. axes = set()
  241. for l in locations:
  242. axes.update(l.keys())
  243. axes = sorted(axes)
  244. vectors = [tuple(l.get(k, 0) for k in axes) for l in locations]
  245. for i, j in itertools.combinations(range(len(locations)), 2):
  246. graph[i][j] = vdiff_hypot2(vectors[i], vectors[j])
  247. tree = minimum_spanning_tree(graph)
  248. rows, cols = tree.nonzero()
  249. graph = defaultdict(set)
  250. for row, col in zip(rows, cols):
  251. graph[row].add(col)
  252. graph[col].add(row)
  253. # Traverse graph from the base and assign parents
  254. parents = [None] * len(locations)
  255. order = []
  256. visited = set()
  257. queue = deque([base])
  258. while queue:
  259. i = queue.popleft()
  260. visited.add(i)
  261. order.append(i)
  262. for j in sorted(graph[i]):
  263. if j not in visited:
  264. parents[j] = i
  265. queue.append(j)
  266. except ImportError:
  267. pass
  268. log.info("Parents: %s", parents)
  269. log.info("Order: %s", order)
  270. return parents, order
  271. def transform_from_stats(stats, inverse=False):
  272. # https://cookierobotics.com/007/
  273. a = stats.varianceX
  274. b = stats.covariance
  275. c = stats.varianceY
  276. delta = (((a - c) * 0.5) ** 2 + b * b) ** 0.5
  277. lambda1 = (a + c) * 0.5 + delta # Major eigenvalue
  278. lambda2 = (a + c) * 0.5 - delta # Minor eigenvalue
  279. theta = atan2(lambda1 - a, b) if b != 0 else (pi * 0.5 if a < c else 0)
  280. trans = Transform()
  281. if lambda2 < 0:
  282. # XXX This is a hack.
  283. # The problem is that the covariance matrix is singular.
  284. # This happens when the contour is a line, or a circle.
  285. # In that case, the covariance matrix is not a good
  286. # representation of the contour.
  287. # We should probably detect this earlier and avoid
  288. # computing the covariance matrix in the first place.
  289. # But for now, we just avoid the division by zero.
  290. lambda2 = 0
  291. if inverse:
  292. trans = trans.translate(-stats.meanX, -stats.meanY)
  293. trans = trans.rotate(-theta)
  294. trans = trans.scale(1 / sqrt(lambda1), 1 / sqrt(lambda2))
  295. else:
  296. trans = trans.scale(sqrt(lambda1), sqrt(lambda2))
  297. trans = trans.rotate(theta)
  298. trans = trans.translate(stats.meanX, stats.meanY)
  299. return trans