interpolatable.py 44 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211
  1. """
  2. Tool to find wrong contour order between different masters, and
  3. other interpolatability (or lack thereof) issues.
  4. Call as:
  5. $ fonttools varLib.interpolatable font1 font2 ...
  6. """
  7. from fontTools.pens.basePen import AbstractPen, BasePen
  8. from fontTools.pens.pointPen import AbstractPointPen, SegmentToPointPen
  9. from fontTools.pens.recordingPen import RecordingPen
  10. from fontTools.pens.statisticsPen import StatisticsPen, StatisticsControlPen
  11. from fontTools.pens.momentsPen import OpenContourError
  12. from fontTools.varLib.models import piecewiseLinearMap, normalizeLocation
  13. from fontTools.misc.fixedTools import floatToFixedToStr
  14. from fontTools.misc.transform import Transform
  15. from collections import defaultdict, deque
  16. from functools import wraps
  17. from pprint import pformat
  18. from math import sqrt, copysign, atan2, pi
  19. import itertools
  20. import logging
  21. log = logging.getLogger("fontTools.varLib.interpolatable")
  22. def _rot_list(l, k):
  23. """Rotate list by k items forward. Ie. item at position 0 will be
  24. at position k in returned list. Negative k is allowed."""
  25. return l[-k:] + l[:-k]
  26. class PerContourPen(BasePen):
  27. def __init__(self, Pen, glyphset=None):
  28. BasePen.__init__(self, glyphset)
  29. self._glyphset = glyphset
  30. self._Pen = Pen
  31. self._pen = None
  32. self.value = []
  33. def _moveTo(self, p0):
  34. self._newItem()
  35. self._pen.moveTo(p0)
  36. def _lineTo(self, p1):
  37. self._pen.lineTo(p1)
  38. def _qCurveToOne(self, p1, p2):
  39. self._pen.qCurveTo(p1, p2)
  40. def _curveToOne(self, p1, p2, p3):
  41. self._pen.curveTo(p1, p2, p3)
  42. def _closePath(self):
  43. self._pen.closePath()
  44. self._pen = None
  45. def _endPath(self):
  46. self._pen.endPath()
  47. self._pen = None
  48. def _newItem(self):
  49. self._pen = pen = self._Pen()
  50. self.value.append(pen)
  51. class PerContourOrComponentPen(PerContourPen):
  52. def addComponent(self, glyphName, transformation):
  53. self._newItem()
  54. self.value[-1].addComponent(glyphName, transformation)
  55. class SimpleRecordingPointPen(AbstractPointPen):
  56. def __init__(self):
  57. self.value = []
  58. def beginPath(self, identifier=None, **kwargs):
  59. pass
  60. def endPath(self) -> None:
  61. pass
  62. def addPoint(self, pt, segmentType=None):
  63. self.value.append((pt, False if segmentType is None else True))
  64. def _vdiff_hypot2(v0, v1):
  65. s = 0
  66. for x0, x1 in zip(v0, v1):
  67. d = x1 - x0
  68. s += d * d
  69. return s
  70. def _vdiff_hypot2_complex(v0, v1):
  71. s = 0
  72. for x0, x1 in zip(v0, v1):
  73. d = x1 - x0
  74. s += d.real * d.real + d.imag * d.imag
  75. # This does the same but seems to be slower:
  76. # s += (d * d.conjugate()).real
  77. return s
  78. def _hypot2_complex(d):
  79. return d.real * d.real + d.imag * d.imag
  80. def _matching_cost(G, matching):
  81. return sum(G[i][j] for i, j in enumerate(matching))
  82. def min_cost_perfect_bipartite_matching_scipy(G):
  83. n = len(G)
  84. rows, cols = linear_sum_assignment(G)
  85. assert (rows == list(range(n))).all()
  86. return list(cols), _matching_cost(G, cols)
  87. def min_cost_perfect_bipartite_matching_munkres(G):
  88. n = len(G)
  89. cols = [None] * n
  90. for row, col in Munkres().compute(G):
  91. cols[row] = col
  92. return cols, _matching_cost(G, cols)
  93. def min_cost_perfect_bipartite_matching_bruteforce(G):
  94. n = len(G)
  95. if n > 6:
  96. raise Exception("Install Python module 'munkres' or 'scipy >= 0.17.0'")
  97. # Otherwise just brute-force
  98. permutations = itertools.permutations(range(n))
  99. best = list(next(permutations))
  100. best_cost = _matching_cost(G, best)
  101. for p in permutations:
  102. cost = _matching_cost(G, p)
  103. if cost < best_cost:
  104. best, best_cost = list(p), cost
  105. return best, best_cost
  106. try:
  107. from scipy.optimize import linear_sum_assignment
  108. min_cost_perfect_bipartite_matching = min_cost_perfect_bipartite_matching_scipy
  109. except ImportError:
  110. try:
  111. from munkres import Munkres
  112. min_cost_perfect_bipartite_matching = (
  113. min_cost_perfect_bipartite_matching_munkres
  114. )
  115. except ImportError:
  116. min_cost_perfect_bipartite_matching = (
  117. min_cost_perfect_bipartite_matching_bruteforce
  118. )
  119. def _contour_vector_from_stats(stats):
  120. # Don't change the order of items here.
  121. # It's okay to add to the end, but otherwise, other
  122. # code depends on it. Search for "covariance".
  123. size = sqrt(abs(stats.area))
  124. return (
  125. copysign((size), stats.area),
  126. stats.meanX,
  127. stats.meanY,
  128. stats.stddevX * 2,
  129. stats.stddevY * 2,
  130. stats.correlation * size,
  131. )
  132. def _points_characteristic_bits(points):
  133. bits = 0
  134. for pt, b in reversed(points):
  135. bits = (bits << 1) | b
  136. return bits
  137. _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR = 4
  138. def _points_complex_vector(points):
  139. vector = []
  140. if not points:
  141. return vector
  142. points = [complex(*pt) for pt, _ in points]
  143. n = len(points)
  144. assert _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR == 4
  145. points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
  146. while len(points) < _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR:
  147. points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
  148. for i in range(n):
  149. # The weights are magic numbers.
  150. # The point itself
  151. p0 = points[i]
  152. vector.append(p0)
  153. # The vector to the next point
  154. p1 = points[i + 1]
  155. d0 = p1 - p0
  156. vector.append(d0 * 3)
  157. # The turn vector
  158. p2 = points[i + 2]
  159. d1 = p2 - p1
  160. vector.append(d1 - d0)
  161. # The angle to the next point, as a cross product;
  162. # Square root of, to match dimentionality of distance.
  163. cross = d0.real * d1.imag - d0.imag * d1.real
  164. cross = copysign(sqrt(abs(cross)), cross)
  165. vector.append(cross * 4)
  166. return vector
  167. def _add_isomorphisms(points, isomorphisms, reverse):
  168. reference_bits = _points_characteristic_bits(points)
  169. n = len(points)
  170. # if points[0][0] == points[-1][0]:
  171. # abort
  172. if reverse:
  173. points = points[::-1]
  174. bits = _points_characteristic_bits(points)
  175. else:
  176. bits = reference_bits
  177. vector = _points_complex_vector(points)
  178. assert len(vector) % n == 0
  179. mult = len(vector) // n
  180. mask = (1 << n) - 1
  181. for i in range(n):
  182. b = ((bits << (n - i)) & mask) | (bits >> i)
  183. if b == reference_bits:
  184. isomorphisms.append(
  185. (_rot_list(vector, -i * mult), n - 1 - i if reverse else i, reverse)
  186. )
  187. def _find_parents_and_order(glyphsets, locations):
  188. parents = [None] + list(range(len(glyphsets) - 1))
  189. order = list(range(len(glyphsets)))
  190. if locations:
  191. # Order base master first
  192. bases = (i for i, l in enumerate(locations) if all(v == 0 for v in l.values()))
  193. if bases:
  194. base = next(bases)
  195. logging.info("Base master index %s, location %s", base, locations[base])
  196. else:
  197. base = 0
  198. logging.warning("No base master location found")
  199. # Form a minimum spanning tree of the locations
  200. try:
  201. from scipy.sparse.csgraph import minimum_spanning_tree
  202. graph = [[0] * len(locations) for _ in range(len(locations))]
  203. axes = set()
  204. for l in locations:
  205. axes.update(l.keys())
  206. axes = sorted(axes)
  207. vectors = [tuple(l.get(k, 0) for k in axes) for l in locations]
  208. for i, j in itertools.combinations(range(len(locations)), 2):
  209. graph[i][j] = _vdiff_hypot2(vectors[i], vectors[j])
  210. tree = minimum_spanning_tree(graph)
  211. rows, cols = tree.nonzero()
  212. graph = defaultdict(set)
  213. for row, col in zip(rows, cols):
  214. graph[row].add(col)
  215. graph[col].add(row)
  216. # Traverse graph from the base and assign parents
  217. parents = [None] * len(locations)
  218. order = []
  219. visited = set()
  220. queue = deque([base])
  221. while queue:
  222. i = queue.popleft()
  223. visited.add(i)
  224. order.append(i)
  225. for j in sorted(graph[i]):
  226. if j not in visited:
  227. parents[j] = i
  228. queue.append(j)
  229. except ImportError:
  230. pass
  231. log.info("Parents: %s", parents)
  232. log.info("Order: %s", order)
  233. return parents, order
  234. def test_gen(
  235. glyphsets,
  236. glyphs=None,
  237. names=None,
  238. ignore_missing=False,
  239. *,
  240. locations=None,
  241. tolerance=0.95,
  242. show_all=False,
  243. ):
  244. if names is None:
  245. names = glyphsets
  246. if glyphs is None:
  247. # `glyphs = glyphsets[0].keys()` is faster, certainly, but doesn't allow for sparse TTFs/OTFs given out of order
  248. # ... risks the sparse master being the first one, and only processing a subset of the glyphs
  249. glyphs = {g for glyphset in glyphsets for g in glyphset.keys()}
  250. parents, order = _find_parents_and_order(glyphsets, locations)
  251. def grand_parent(i, glyphname):
  252. if i is None:
  253. return None
  254. i = parents[i]
  255. if i is None:
  256. return None
  257. while parents[i] is not None and glyphsets[i][glyphname] is None:
  258. i = parents[i]
  259. return i
  260. for glyph_name in glyphs:
  261. log.info("Testing glyph %s", glyph_name)
  262. allGreenVectors = []
  263. allControlVectors = []
  264. allNodeTypes = []
  265. allContourIsomorphisms = []
  266. allContourPoints = []
  267. allGlyphs = [glyphset[glyph_name] for glyphset in glyphsets]
  268. if len([1 for glyph in allGlyphs if glyph is not None]) <= 1:
  269. continue
  270. for master_idx, (glyph, glyphset, name) in enumerate(
  271. zip(allGlyphs, glyphsets, names)
  272. ):
  273. if glyph is None:
  274. if not ignore_missing:
  275. yield (
  276. glyph_name,
  277. {"type": "missing", "master": name, "master_idx": master_idx},
  278. )
  279. allNodeTypes.append(None)
  280. allControlVectors.append(None)
  281. allGreenVectors.append(None)
  282. allContourIsomorphisms.append(None)
  283. allContourPoints.append(None)
  284. continue
  285. perContourPen = PerContourOrComponentPen(RecordingPen, glyphset=glyphset)
  286. try:
  287. glyph.draw(perContourPen, outputImpliedClosingLine=True)
  288. except TypeError:
  289. glyph.draw(perContourPen)
  290. contourPens = perContourPen.value
  291. del perContourPen
  292. contourControlVectors = []
  293. contourGreenVectors = []
  294. contourIsomorphisms = []
  295. contourPoints = []
  296. nodeTypes = []
  297. allNodeTypes.append(nodeTypes)
  298. allControlVectors.append(contourControlVectors)
  299. allGreenVectors.append(contourGreenVectors)
  300. allContourIsomorphisms.append(contourIsomorphisms)
  301. allContourPoints.append(contourPoints)
  302. for ix, contour in enumerate(contourPens):
  303. contourOps = tuple(op for op, arg in contour.value)
  304. nodeTypes.append(contourOps)
  305. greenStats = StatisticsPen(glyphset=glyphset)
  306. controlStats = StatisticsControlPen(glyphset=glyphset)
  307. try:
  308. contour.replay(greenStats)
  309. contour.replay(controlStats)
  310. except OpenContourError as e:
  311. yield (
  312. glyph_name,
  313. {
  314. "master": name,
  315. "master_idx": master_idx,
  316. "contour": ix,
  317. "type": "open_path",
  318. },
  319. )
  320. continue
  321. contourGreenVectors.append(_contour_vector_from_stats(greenStats))
  322. contourControlVectors.append(_contour_vector_from_stats(controlStats))
  323. # Check starting point
  324. if contourOps[0] == "addComponent":
  325. continue
  326. assert contourOps[0] == "moveTo"
  327. assert contourOps[-1] in ("closePath", "endPath")
  328. points = SimpleRecordingPointPen()
  329. converter = SegmentToPointPen(points, False)
  330. contour.replay(converter)
  331. # points.value is a list of pt,bool where bool is true if on-curve and false if off-curve;
  332. # now check all rotations and mirror-rotations of the contour and build list of isomorphic
  333. # possible starting points.
  334. isomorphisms = []
  335. contourIsomorphisms.append(isomorphisms)
  336. # Add rotations
  337. _add_isomorphisms(points.value, isomorphisms, False)
  338. # Add mirrored rotations
  339. _add_isomorphisms(points.value, isomorphisms, True)
  340. contourPoints.append(points.value)
  341. matchings = [None] * len(allControlVectors)
  342. for m1idx in order:
  343. if allNodeTypes[m1idx] is None:
  344. continue
  345. m0idx = grand_parent(m1idx, glyph_name)
  346. if m0idx is None:
  347. continue
  348. if allNodeTypes[m0idx] is None:
  349. continue
  350. showed = False
  351. m1 = allNodeTypes[m1idx]
  352. m0 = allNodeTypes[m0idx]
  353. if len(m0) != len(m1):
  354. showed = True
  355. yield (
  356. glyph_name,
  357. {
  358. "type": "path_count",
  359. "master_1": names[m0idx],
  360. "master_2": names[m1idx],
  361. "master_1_idx": m0idx,
  362. "master_2_idx": m1idx,
  363. "value_1": len(m0),
  364. "value_2": len(m1),
  365. },
  366. )
  367. continue
  368. if m0 != m1:
  369. for pathIx, (nodes1, nodes2) in enumerate(zip(m0, m1)):
  370. if nodes1 == nodes2:
  371. continue
  372. if len(nodes1) != len(nodes2):
  373. showed = True
  374. yield (
  375. glyph_name,
  376. {
  377. "type": "node_count",
  378. "path": pathIx,
  379. "master_1": names[m0idx],
  380. "master_2": names[m1idx],
  381. "master_1_idx": m0idx,
  382. "master_2_idx": m1idx,
  383. "value_1": len(nodes1),
  384. "value_2": len(nodes2),
  385. },
  386. )
  387. continue
  388. for nodeIx, (n1, n2) in enumerate(zip(nodes1, nodes2)):
  389. if n1 != n2:
  390. showed = True
  391. yield (
  392. glyph_name,
  393. {
  394. "type": "node_incompatibility",
  395. "path": pathIx,
  396. "node": nodeIx,
  397. "master_1": names[m0idx],
  398. "master_2": names[m1idx],
  399. "master_1_idx": m0idx,
  400. "master_2_idx": m1idx,
  401. "value_1": n1,
  402. "value_2": n2,
  403. },
  404. )
  405. continue
  406. m1Control = allControlVectors[m1idx]
  407. m1Green = allGreenVectors[m1idx]
  408. m0Control = allControlVectors[m0idx]
  409. m0Green = allGreenVectors[m0idx]
  410. if len(m1Control) > 1:
  411. identity_matching = list(range(len(m0Control)))
  412. # We try matching both the StatisticsControlPen vector
  413. # and the StatisticsPen vector.
  414. # If either method found a identity matching, accept it.
  415. # This is crucial for fonts like Kablammo[MORF].ttf and
  416. # Nabla[EDPT,EHLT].ttf, since they really confuse the
  417. # StatisticsPen vector because of their area=0 contours.
  418. #
  419. # TODO: Optimize by only computing the StatisticsPen vector
  420. # and then checking if it is the identity vector. Only if
  421. # not, compute the StatisticsControlPen vector and check both.
  422. costsControl = [
  423. [_vdiff_hypot2(v0, v1) for v1 in m1Control] for v0 in m0Control
  424. ]
  425. (
  426. matching_control,
  427. matching_cost_control,
  428. ) = min_cost_perfect_bipartite_matching(costsControl)
  429. identity_cost_control = sum(
  430. costsControl[i][i] for i in range(len(m0Control))
  431. )
  432. done = matching_cost_control == identity_cost_control
  433. if not done:
  434. costsGreen = [
  435. [_vdiff_hypot2(v0, v1) for v1 in m1Green] for v0 in m0Green
  436. ]
  437. (
  438. matching_green,
  439. matching_cost_green,
  440. ) = min_cost_perfect_bipartite_matching(costsGreen)
  441. identity_cost_green = sum(
  442. costsGreen[i][i] for i in range(len(m0Control))
  443. )
  444. done = matching_cost_green == identity_cost_green
  445. if not done:
  446. # Otherwise, use the worst of the two matchings.
  447. if (
  448. matching_cost_control / identity_cost_control
  449. < matching_cost_green / identity_cost_green
  450. ):
  451. matching = matching_control
  452. matching_cost = matching_cost_control
  453. identity_cost = identity_cost_control
  454. else:
  455. matching = matching_green
  456. matching_cost = matching_cost_green
  457. identity_cost = identity_cost_green
  458. if matching_cost < identity_cost * tolerance:
  459. # print(matching_cost_control / identity_cost_control, matching_cost_green / identity_cost_green)
  460. showed = True
  461. yield (
  462. glyph_name,
  463. {
  464. "type": "contour_order",
  465. "master_1": names[m0idx],
  466. "master_2": names[m1idx],
  467. "master_1_idx": m0idx,
  468. "master_2_idx": m1idx,
  469. "value_1": list(range(len(m0Control))),
  470. "value_2": matching,
  471. },
  472. )
  473. matchings[m1idx] = matching
  474. m1 = allContourIsomorphisms[m1idx]
  475. m0 = allContourIsomorphisms[m0idx]
  476. # If contour-order is wrong, adjust it
  477. if matchings[m1idx] is not None and m1: # m1 is empty for composite glyphs
  478. m1 = [m1[i] for i in matchings[m1idx]]
  479. for ix, (contour0, contour1) in enumerate(zip(m0, m1)):
  480. if len(contour0) == 0 or len(contour0) != len(contour1):
  481. # We already reported this; or nothing to do; or not compatible
  482. # after reordering above.
  483. continue
  484. c0 = contour0[0]
  485. # Next few lines duplicated below.
  486. costs = [_vdiff_hypot2_complex(c0[0], c1[0]) for c1 in contour1]
  487. min_cost_idx, min_cost = min(enumerate(costs), key=lambda x: x[1])
  488. first_cost = costs[0]
  489. if min_cost < first_cost * tolerance:
  490. # c0 is the first isomorphism of the m0 master
  491. # contour1 is list of all isomorphisms of the m1 master
  492. #
  493. # If the two shapes are both circle-ish and slightly
  494. # rotated, we detect wrong start point. This is for
  495. # example the case hundreds of times in
  496. # RobotoSerif-Italic[GRAD,opsz,wdth,wght].ttf
  497. #
  498. # If the proposed point is only one off from the first
  499. # point (and not reversed), try harder:
  500. #
  501. # Find the major eigenvector of the covariance matrix,
  502. # and rotate the contours by that angle. Then find the
  503. # closest point again. If it matches this time, let it
  504. # pass.
  505. proposed_point = contour1[min_cost_idx][1]
  506. reverse = contour1[min_cost_idx][2]
  507. num_points = len(allContourPoints[m1idx][ix])
  508. leeway = 3
  509. okay = False
  510. if not reverse and (
  511. proposed_point <= leeway
  512. or proposed_point >= num_points - leeway
  513. ):
  514. # Try harder
  515. m0Vectors = allGreenVectors[m1idx][ix]
  516. m1Vectors = allGreenVectors[m1idx][ix]
  517. # Recover the covariance matrix from the GreenVectors.
  518. # This is a 2x2 matrix.
  519. transforms = []
  520. for vector in (m0Vectors, m1Vectors):
  521. meanX = vector[1]
  522. meanY = vector[2]
  523. stddevX = vector[3] / 2
  524. stddevY = vector[4] / 2
  525. correlation = vector[5] / abs(vector[0])
  526. # https://cookierobotics.com/007/
  527. a = stddevX * stddevX # VarianceX
  528. c = stddevY * stddevY # VarianceY
  529. b = correlation * stddevX * stddevY # Covariance
  530. delta = (((a - c) * 0.5) ** 2 + b * b) ** 0.5
  531. lambda1 = (a + c) * 0.5 + delta # Major eigenvalue
  532. lambda2 = (a + c) * 0.5 - delta # Minor eigenvalue
  533. theta = (
  534. atan2(lambda1 - a, b)
  535. if b != 0
  536. else (pi * 0.5 if a < c else 0)
  537. )
  538. trans = Transform()
  539. trans = trans.translate(meanX, meanY)
  540. trans = trans.rotate(theta)
  541. trans = trans.scale(sqrt(lambda1), sqrt(lambda2))
  542. transforms.append(trans)
  543. trans = transforms[0]
  544. new_c0 = (
  545. [
  546. complex(*trans.transformPoint((pt.real, pt.imag)))
  547. for pt in c0[0]
  548. ],
  549. ) + c0[1:]
  550. trans = transforms[1]
  551. new_contour1 = []
  552. for c1 in contour1:
  553. new_c1 = (
  554. [
  555. complex(*trans.transformPoint((pt.real, pt.imag)))
  556. for pt in c1[0]
  557. ],
  558. ) + c1[1:]
  559. new_contour1.append(new_c1)
  560. # Next few lines duplicate from above.
  561. costs = [
  562. _vdiff_hypot2_complex(new_c0[0], new_c1[0])
  563. for new_c1 in new_contour1
  564. ]
  565. min_cost_idx, min_cost = min(
  566. enumerate(costs), key=lambda x: x[1]
  567. )
  568. first_cost = costs[0]
  569. # Only accept a perfect match
  570. if min_cost_idx == 0:
  571. okay = True
  572. if not okay:
  573. showed = True
  574. yield (
  575. glyph_name,
  576. {
  577. "type": "wrong_start_point",
  578. "contour": ix,
  579. "master_1": names[m0idx],
  580. "master_2": names[m1idx],
  581. "master_1_idx": m0idx,
  582. "master_2_idx": m1idx,
  583. "value_1": 0,
  584. "value_2": proposed_point,
  585. "reversed": reverse,
  586. },
  587. )
  588. else:
  589. # If first_cost is Too Large™, do further inspection.
  590. # This can happen specially in the case of TrueType
  591. # fonts, where the original contour had wrong start point,
  592. # but because of the cubic->quadratic conversion, we don't
  593. # have many isomorphisms to work with.
  594. # The threshold here is all black magic. It's just here to
  595. # speed things up so we don't end up doing a full matching
  596. # on every contour that is correct.
  597. threshold = (
  598. len(c0[0]) * (allControlVectors[m0idx][ix][0] * 0.5) ** 2 / 4
  599. ) # Magic only
  600. c1 = contour1[min_cost_idx]
  601. # If point counts are different it's because of the contour
  602. # reordering above. We can in theory still try, but our
  603. # bipartite-matching implementations currently assume
  604. # equal number of vertices on both sides. I'm lazy to update
  605. # all three different implementations!
  606. if len(c0[0]) == len(c1[0]) and first_cost > threshold:
  607. # Do a quick(!) matching between the points. If it's way off,
  608. # flag it. This can happen specially in the case of TrueType
  609. # fonts, where the original contour had wrong start point, but
  610. # because of the cubic->quadratic conversion, we don't have many
  611. # isomorphisms.
  612. points0 = c0[0][::_NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR]
  613. points1 = c1[0][::_NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR]
  614. graph = [
  615. [_hypot2_complex(p0 - p1) for p1 in points1]
  616. for p0 in points0
  617. ]
  618. matching, matching_cost = min_cost_perfect_bipartite_matching(
  619. graph
  620. )
  621. identity_cost = sum(graph[i][i] for i in range(len(graph)))
  622. if matching_cost < identity_cost / 8: # Heuristic
  623. # print(matching_cost, identity_cost, matching)
  624. showed = True
  625. yield (
  626. glyph_name,
  627. {
  628. "type": "wrong_structure",
  629. "contour": ix,
  630. "master_1": names[m0idx],
  631. "master_2": names[m1idx],
  632. "master_1_idx": m0idx,
  633. "master_2_idx": m1idx,
  634. },
  635. )
  636. if show_all and not showed:
  637. yield (
  638. glyph_name,
  639. {
  640. "type": "nothing",
  641. "master_1": names[m0idx],
  642. "master_2": names[m1idx],
  643. "master_1_idx": m0idx,
  644. "master_2_idx": m1idx,
  645. },
  646. )
  647. @wraps(test_gen)
  648. def test(*args, **kwargs):
  649. problems = defaultdict(list)
  650. for glyphname, problem in test_gen(*args, **kwargs):
  651. problems[glyphname].append(problem)
  652. return problems
  653. def recursivelyAddGlyph(glyphname, glyphset, ttGlyphSet, glyf):
  654. if glyphname in glyphset:
  655. return
  656. glyphset[glyphname] = ttGlyphSet[glyphname]
  657. for component in getattr(glyf[glyphname], "components", []):
  658. recursivelyAddGlyph(component.glyphName, glyphset, ttGlyphSet, glyf)
  659. def main(args=None):
  660. """Test for interpolatability issues between fonts"""
  661. import argparse
  662. import sys
  663. parser = argparse.ArgumentParser(
  664. "fonttools varLib.interpolatable",
  665. description=main.__doc__,
  666. )
  667. parser.add_argument(
  668. "--glyphs",
  669. action="store",
  670. help="Space-separate name of glyphs to check",
  671. )
  672. parser.add_argument(
  673. "--show-all",
  674. action="store_true",
  675. help="Show all glyph pairs, even if no problems are found",
  676. )
  677. parser.add_argument(
  678. "--tolerance",
  679. action="store",
  680. type=float,
  681. help="Error tolerance. Default 0.95",
  682. )
  683. parser.add_argument(
  684. "--json",
  685. action="store_true",
  686. help="Output report in JSON format",
  687. )
  688. parser.add_argument(
  689. "--pdf",
  690. action="store",
  691. help="Output report in PDF format",
  692. )
  693. parser.add_argument(
  694. "--html",
  695. action="store",
  696. help="Output report in HTML format",
  697. )
  698. parser.add_argument(
  699. "--quiet",
  700. action="store_true",
  701. help="Only exit with code 1 or 0, no output",
  702. )
  703. parser.add_argument(
  704. "--output",
  705. action="store",
  706. help="Output file for the problem report; Default: stdout",
  707. )
  708. parser.add_argument(
  709. "--ignore-missing",
  710. action="store_true",
  711. help="Will not report glyphs missing from sparse masters as errors",
  712. )
  713. parser.add_argument(
  714. "inputs",
  715. metavar="FILE",
  716. type=str,
  717. nargs="+",
  718. help="Input a single variable font / DesignSpace / Glyphs file, or multiple TTF/UFO files",
  719. )
  720. parser.add_argument(
  721. "--name",
  722. metavar="NAME",
  723. type=str,
  724. action="append",
  725. help="Name of the master to use in the report. If not provided, all are used.",
  726. )
  727. parser.add_argument("-v", "--verbose", action="store_true", help="Run verbosely.")
  728. args = parser.parse_args(args)
  729. from fontTools import configLogger
  730. configLogger(level=("INFO" if args.verbose else "ERROR"))
  731. glyphs = args.glyphs.split() if args.glyphs else None
  732. from os.path import basename
  733. fonts = []
  734. names = []
  735. locations = []
  736. original_args_inputs = tuple(args.inputs)
  737. if len(args.inputs) == 1:
  738. designspace = None
  739. if args.inputs[0].endswith(".designspace"):
  740. from fontTools.designspaceLib import DesignSpaceDocument
  741. designspace = DesignSpaceDocument.fromfile(args.inputs[0])
  742. args.inputs = [master.path for master in designspace.sources]
  743. locations = [master.location for master in designspace.sources]
  744. axis_triples = {
  745. a.name: (a.minimum, a.default, a.maximum) for a in designspace.axes
  746. }
  747. axis_mappings = {a.name: a.map for a in designspace.axes}
  748. axis_triples = {
  749. k: tuple(piecewiseLinearMap(v, dict(axis_mappings[k])) for v in vv)
  750. for k, vv in axis_triples.items()
  751. }
  752. elif args.inputs[0].endswith(".glyphs"):
  753. from glyphsLib import GSFont, to_designspace
  754. gsfont = GSFont(args.inputs[0])
  755. designspace = to_designspace(gsfont)
  756. fonts = [source.font for source in designspace.sources]
  757. names = ["%s-%s" % (f.info.familyName, f.info.styleName) for f in fonts]
  758. args.inputs = []
  759. locations = [master.location for master in designspace.sources]
  760. axis_triples = {
  761. a.name: (a.minimum, a.default, a.maximum) for a in designspace.axes
  762. }
  763. axis_mappings = {a.name: a.map for a in designspace.axes}
  764. axis_triples = {
  765. k: tuple(piecewiseLinearMap(v, dict(axis_mappings[k])) for v in vv)
  766. for k, vv in axis_triples.items()
  767. }
  768. elif args.inputs[0].endswith(".ttf"):
  769. from fontTools.ttLib import TTFont
  770. font = TTFont(args.inputs[0])
  771. if "gvar" in font:
  772. # Is variable font
  773. axisMapping = {}
  774. fvar = font["fvar"]
  775. for axis in fvar.axes:
  776. axisMapping[axis.axisTag] = {
  777. -1: axis.minValue,
  778. 0: axis.defaultValue,
  779. 1: axis.maxValue,
  780. }
  781. if "avar" in font:
  782. avar = font["avar"]
  783. for axisTag, segments in avar.segments.items():
  784. fvarMapping = axisMapping[axisTag].copy()
  785. for location, value in segments.items():
  786. axisMapping[axisTag][value] = piecewiseLinearMap(
  787. location, fvarMapping
  788. )
  789. gvar = font["gvar"]
  790. glyf = font["glyf"]
  791. # Gather all glyphs at their "master" locations
  792. ttGlyphSets = {}
  793. glyphsets = defaultdict(dict)
  794. if glyphs is None:
  795. glyphs = sorted(gvar.variations.keys())
  796. for glyphname in glyphs:
  797. for var in gvar.variations[glyphname]:
  798. locDict = {}
  799. loc = []
  800. for tag, val in sorted(var.axes.items()):
  801. locDict[tag] = val[1]
  802. loc.append((tag, val[1]))
  803. locTuple = tuple(loc)
  804. if locTuple not in ttGlyphSets:
  805. ttGlyphSets[locTuple] = font.getGlyphSet(
  806. location=locDict, normalized=True, recalcBounds=False
  807. )
  808. recursivelyAddGlyph(
  809. glyphname, glyphsets[locTuple], ttGlyphSets[locTuple], glyf
  810. )
  811. names = ["''"]
  812. fonts = [font.getGlyphSet()]
  813. locations = [{}]
  814. axis_triples = {a: (-1, 0, +1) for a in sorted(axisMapping.keys())}
  815. for locTuple in sorted(glyphsets.keys(), key=lambda v: (len(v), v)):
  816. name = (
  817. "'"
  818. + " ".join(
  819. "%s=%s"
  820. % (
  821. k,
  822. floatToFixedToStr(
  823. piecewiseLinearMap(v, axisMapping[k]), 14
  824. ),
  825. )
  826. for k, v in locTuple
  827. )
  828. + "'"
  829. )
  830. names.append(name)
  831. fonts.append(glyphsets[locTuple])
  832. locations.append(dict(locTuple))
  833. args.ignore_missing = True
  834. args.inputs = []
  835. if not locations:
  836. locations = [{} for _ in fonts]
  837. for filename in args.inputs:
  838. if filename.endswith(".ufo"):
  839. from fontTools.ufoLib import UFOReader
  840. fonts.append(UFOReader(filename))
  841. else:
  842. from fontTools.ttLib import TTFont
  843. fonts.append(TTFont(filename))
  844. names.append(basename(filename).rsplit(".", 1)[0])
  845. glyphsets = []
  846. for font in fonts:
  847. if hasattr(font, "getGlyphSet"):
  848. glyphset = font.getGlyphSet()
  849. else:
  850. glyphset = font
  851. glyphsets.append({k: glyphset[k] for k in glyphset.keys()})
  852. if args.name:
  853. accepted_names = set(args.name)
  854. glyphsets = [
  855. glyphset
  856. for name, glyphset in zip(names, glyphsets)
  857. if name in accepted_names
  858. ]
  859. locations = [
  860. location
  861. for name, location in zip(names, locations)
  862. if name in accepted_names
  863. ]
  864. names = [name for name in names if name in accepted_names]
  865. if not glyphs:
  866. glyphs = sorted(set([gn for glyphset in glyphsets for gn in glyphset.keys()]))
  867. glyphsSet = set(glyphs)
  868. for glyphset in glyphsets:
  869. glyphSetGlyphNames = set(glyphset.keys())
  870. diff = glyphsSet - glyphSetGlyphNames
  871. if diff:
  872. for gn in diff:
  873. glyphset[gn] = None
  874. # Normalize locations
  875. locations = [normalizeLocation(loc, axis_triples) for loc in locations]
  876. try:
  877. log.info("Running on %d glyphsets", len(glyphsets))
  878. log.info("Locations: %s", pformat(locations))
  879. problems_gen = test_gen(
  880. glyphsets,
  881. glyphs=glyphs,
  882. names=names,
  883. locations=locations,
  884. ignore_missing=args.ignore_missing,
  885. tolerance=args.tolerance or 0.95,
  886. show_all=args.show_all,
  887. )
  888. problems = defaultdict(list)
  889. f = sys.stdout if args.output is None else open(args.output, "w")
  890. if not args.quiet:
  891. if args.json:
  892. import json
  893. for glyphname, problem in problems_gen:
  894. problems[glyphname].append(problem)
  895. print(json.dumps(problems), file=f)
  896. else:
  897. last_glyphname = None
  898. for glyphname, p in problems_gen:
  899. problems[glyphname].append(p)
  900. if glyphname != last_glyphname:
  901. print(f"Glyph {glyphname} was not compatible:", file=f)
  902. last_glyphname = glyphname
  903. last_master_idxs = None
  904. master_idxs = (
  905. (p["master_idx"])
  906. if "master_idx" in p
  907. else (p["master_1_idx"], p["master_2_idx"])
  908. )
  909. if master_idxs != last_master_idxs:
  910. master_names = (
  911. (p["master"])
  912. if "master" in p
  913. else (p["master_1"], p["master_2"])
  914. )
  915. print(f" Masters: %s:" % ", ".join(master_names), file=f)
  916. last_master_idxs = master_idxs
  917. if p["type"] == "missing":
  918. print(
  919. " Glyph was missing in master %s" % p["master"], file=f
  920. )
  921. elif p["type"] == "open_path":
  922. print(
  923. " Glyph has an open path in master %s" % p["master"],
  924. file=f,
  925. )
  926. elif p["type"] == "path_count":
  927. print(
  928. " Path count differs: %i in %s, %i in %s"
  929. % (
  930. p["value_1"],
  931. p["master_1"],
  932. p["value_2"],
  933. p["master_2"],
  934. ),
  935. file=f,
  936. )
  937. elif p["type"] == "node_count":
  938. print(
  939. " Node count differs in path %i: %i in %s, %i in %s"
  940. % (
  941. p["path"],
  942. p["value_1"],
  943. p["master_1"],
  944. p["value_2"],
  945. p["master_2"],
  946. ),
  947. file=f,
  948. )
  949. elif p["type"] == "node_incompatibility":
  950. print(
  951. " Node %o incompatible in path %i: %s in %s, %s in %s"
  952. % (
  953. p["node"],
  954. p["path"],
  955. p["value_1"],
  956. p["master_1"],
  957. p["value_2"],
  958. p["master_2"],
  959. ),
  960. file=f,
  961. )
  962. elif p["type"] == "contour_order":
  963. print(
  964. " Contour order differs: %s in %s, %s in %s"
  965. % (
  966. p["value_1"],
  967. p["master_1"],
  968. p["value_2"],
  969. p["master_2"],
  970. ),
  971. file=f,
  972. )
  973. elif p["type"] == "wrong_start_point":
  974. print(
  975. " Contour %d start point differs: %s in %s, %s in %s; reversed: %s"
  976. % (
  977. p["contour"],
  978. p["value_1"],
  979. p["master_1"],
  980. p["value_2"],
  981. p["master_2"],
  982. p["reversed"],
  983. ),
  984. file=f,
  985. )
  986. elif p["type"] == "wrong_structure":
  987. print(
  988. " Contour %d structures differ: %s, %s"
  989. % (
  990. p["contour"],
  991. p["master_1"],
  992. p["master_2"],
  993. ),
  994. file=f,
  995. )
  996. elif p["type"] == "nothing":
  997. print(
  998. " Nothing wrong between %s and %s"
  999. % (
  1000. p["master_1"],
  1001. p["master_2"],
  1002. ),
  1003. file=f,
  1004. )
  1005. else:
  1006. for glyphname, problem in problems_gen:
  1007. problems[glyphname].append(problem)
  1008. if args.pdf:
  1009. log.info("Writing PDF to %s", args.pdf)
  1010. from .interpolatablePlot import InterpolatablePDF
  1011. with InterpolatablePDF(args.pdf, glyphsets=glyphsets, names=names) as pdf:
  1012. pdf.add_problems(problems)
  1013. if not problems and not args.quiet:
  1014. pdf.draw_cupcake()
  1015. if args.html:
  1016. log.info("Writing HTML to %s", args.html)
  1017. from .interpolatablePlot import InterpolatableSVG
  1018. svgs = []
  1019. with InterpolatableSVG(svgs, glyphsets=glyphsets, names=names) as svg:
  1020. svg.add_problems(problems)
  1021. if not problems and not args.quiet:
  1022. svg.draw_cupcake()
  1023. import base64
  1024. with open(args.html, "wb") as f:
  1025. f.write(b"<!DOCTYPE html>\n")
  1026. f.write(b"<html><body align=center>\n")
  1027. for svg in svgs:
  1028. f.write("<img src='data:image/svg+xml;base64,".encode("utf-8"))
  1029. f.write(base64.b64encode(svg))
  1030. f.write(b"' />\n")
  1031. f.write(b"<hr>\n")
  1032. f.write(b"</body></html>\n")
  1033. except Exception as e:
  1034. e.args += original_args_inputs
  1035. log.error(e)
  1036. raise
  1037. if problems:
  1038. return problems
  1039. if __name__ == "__main__":
  1040. import sys
  1041. problems = main()
  1042. sys.exit(int(bool(problems)))