interpolatable.py 43 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156
  1. """
  2. Tool to find wrong contour order between different masters, and
  3. other interpolatability (or lack thereof) issues.
  4. Call as:
  5. $ fonttools varLib.interpolatable font1 font2 ...
  6. """
  7. from .interpolatableHelpers import *
  8. from .interpolatableTestContourOrder import test_contour_order
  9. from .interpolatableTestStartingPoint import test_starting_point
  10. from fontTools.pens.recordingPen import RecordingPen, DecomposingRecordingPen
  11. from fontTools.pens.transformPen import TransformPen
  12. from fontTools.pens.statisticsPen import StatisticsPen, StatisticsControlPen
  13. from fontTools.pens.momentsPen import OpenContourError
  14. from fontTools.varLib.models import piecewiseLinearMap, normalizeLocation
  15. from fontTools.misc.fixedTools import floatToFixedToStr
  16. from fontTools.misc.transform import Transform
  17. from collections import defaultdict
  18. from types import SimpleNamespace
  19. from functools import wraps
  20. from pprint import pformat
  21. from math import sqrt, atan2, pi
  22. import logging
  23. log = logging.getLogger("fontTools.varLib.interpolatable")
  24. DEFAULT_TOLERANCE = 0.95
  25. DEFAULT_KINKINESS = 0.5
  26. DEFAULT_KINKINESS_LENGTH = 0.002 # ratio of UPEM
  27. DEFAULT_UPEM = 1000
  28. class Glyph:
  29. ITEMS = (
  30. "recordings",
  31. "recordingsNormalized",
  32. "greenStats",
  33. "controlStats",
  34. "greenVectors",
  35. "greenVectorsNormalized",
  36. "controlVectors",
  37. "nodeTypes",
  38. "isomorphisms",
  39. "points",
  40. "openContours",
  41. )
  42. def __init__(self, glyphname, glyphset):
  43. self.name = glyphname
  44. for item in self.ITEMS:
  45. setattr(self, item, [])
  46. self._populate(glyphset)
  47. def _fill_in(self, ix):
  48. for item in self.ITEMS:
  49. if len(getattr(self, item)) == ix:
  50. getattr(self, item).append(None)
  51. def _populate(self, glyphset):
  52. glyph = glyphset[self.name]
  53. self.doesnt_exist = glyph is None
  54. if self.doesnt_exist:
  55. return
  56. perContourPen = PerContourOrComponentPen(RecordingPen, glyphset=glyphset)
  57. try:
  58. glyph.draw(perContourPen, outputImpliedClosingLine=True)
  59. except TypeError:
  60. glyph.draw(perContourPen)
  61. self.recordings = perContourPen.value
  62. del perContourPen
  63. for ix, contour in enumerate(self.recordings):
  64. nodeTypes = [op for op, arg in contour.value]
  65. self.nodeTypes.append(nodeTypes)
  66. greenStats = StatisticsPen(glyphset=glyphset)
  67. controlStats = StatisticsControlPen(glyphset=glyphset)
  68. try:
  69. contour.replay(greenStats)
  70. contour.replay(controlStats)
  71. self.openContours.append(False)
  72. except OpenContourError as e:
  73. self.openContours.append(True)
  74. self._fill_in(ix)
  75. continue
  76. self.greenStats.append(greenStats)
  77. self.controlStats.append(controlStats)
  78. self.greenVectors.append(contour_vector_from_stats(greenStats))
  79. self.controlVectors.append(contour_vector_from_stats(controlStats))
  80. # Save a "normalized" version of the outlines
  81. try:
  82. rpen = DecomposingRecordingPen(glyphset)
  83. tpen = TransformPen(
  84. rpen, transform_from_stats(greenStats, inverse=True)
  85. )
  86. contour.replay(tpen)
  87. self.recordingsNormalized.append(rpen)
  88. except ZeroDivisionError:
  89. self.recordingsNormalized.append(None)
  90. greenStats = StatisticsPen(glyphset=glyphset)
  91. rpen.replay(greenStats)
  92. self.greenVectorsNormalized.append(contour_vector_from_stats(greenStats))
  93. # Check starting point
  94. if nodeTypes[0] == "addComponent":
  95. self._fill_in(ix)
  96. continue
  97. assert nodeTypes[0] == "moveTo"
  98. assert nodeTypes[-1] in ("closePath", "endPath")
  99. points = SimpleRecordingPointPen()
  100. converter = SegmentToPointPen(points, False)
  101. contour.replay(converter)
  102. # points.value is a list of pt,bool where bool is true if on-curve and false if off-curve;
  103. # now check all rotations and mirror-rotations of the contour and build list of isomorphic
  104. # possible starting points.
  105. self.points.append(points.value)
  106. isomorphisms = []
  107. self.isomorphisms.append(isomorphisms)
  108. # Add rotations
  109. add_isomorphisms(points.value, isomorphisms, False)
  110. # Add mirrored rotations
  111. add_isomorphisms(points.value, isomorphisms, True)
  112. def draw(self, pen, countor_idx=None):
  113. if countor_idx is None:
  114. for contour in self.recordings:
  115. contour.draw(pen)
  116. else:
  117. self.recordings[countor_idx].draw(pen)
  118. def test_gen(
  119. glyphsets,
  120. glyphs=None,
  121. names=None,
  122. ignore_missing=False,
  123. *,
  124. locations=None,
  125. tolerance=DEFAULT_TOLERANCE,
  126. kinkiness=DEFAULT_KINKINESS,
  127. upem=DEFAULT_UPEM,
  128. show_all=False,
  129. ):
  130. if tolerance >= 10:
  131. tolerance *= 0.01
  132. assert 0 <= tolerance <= 1
  133. if kinkiness >= 10:
  134. kinkiness *= 0.01
  135. assert 0 <= kinkiness
  136. names = names or [repr(g) for g in glyphsets]
  137. if glyphs is None:
  138. # `glyphs = glyphsets[0].keys()` is faster, certainly, but doesn't allow for sparse TTFs/OTFs given out of order
  139. # ... risks the sparse master being the first one, and only processing a subset of the glyphs
  140. glyphs = {g for glyphset in glyphsets for g in glyphset.keys()}
  141. parents, order = find_parents_and_order(glyphsets, locations)
  142. def grand_parent(i, glyphname):
  143. if i is None:
  144. return None
  145. i = parents[i]
  146. if i is None:
  147. return None
  148. while parents[i] is not None and glyphsets[i][glyphname] is None:
  149. i = parents[i]
  150. return i
  151. for glyph_name in glyphs:
  152. log.info("Testing glyph %s", glyph_name)
  153. allGlyphs = [Glyph(glyph_name, glyphset) for glyphset in glyphsets]
  154. if len([1 for glyph in allGlyphs if glyph is not None]) <= 1:
  155. continue
  156. for master_idx, (glyph, glyphset, name) in enumerate(
  157. zip(allGlyphs, glyphsets, names)
  158. ):
  159. if glyph.doesnt_exist:
  160. if not ignore_missing:
  161. yield (
  162. glyph_name,
  163. {"type": "missing", "master": name, "master_idx": master_idx},
  164. )
  165. continue
  166. has_open = False
  167. for ix, open in enumerate(glyph.openContours):
  168. if not open:
  169. continue
  170. has_open = True
  171. yield (
  172. glyph_name,
  173. {
  174. "master": name,
  175. "master_idx": master_idx,
  176. "contour": ix,
  177. "type": "open_path",
  178. },
  179. )
  180. if has_open:
  181. continue
  182. matchings = [None] * len(glyphsets)
  183. for m1idx in order:
  184. glyph1 = allGlyphs[m1idx]
  185. if glyph1 is None or not glyph1.nodeTypes:
  186. continue
  187. m0idx = grand_parent(m1idx, glyph_name)
  188. if m0idx is None:
  189. continue
  190. glyph0 = allGlyphs[m0idx]
  191. if glyph0 is None or not glyph0.nodeTypes:
  192. continue
  193. #
  194. # Basic compatibility checks
  195. #
  196. m1 = glyph0.nodeTypes
  197. m0 = glyph1.nodeTypes
  198. if len(m0) != len(m1):
  199. yield (
  200. glyph_name,
  201. {
  202. "type": "path_count",
  203. "master_1": names[m0idx],
  204. "master_2": names[m1idx],
  205. "master_1_idx": m0idx,
  206. "master_2_idx": m1idx,
  207. "value_1": len(m0),
  208. "value_2": len(m1),
  209. },
  210. )
  211. continue
  212. if m0 != m1:
  213. for pathIx, (nodes1, nodes2) in enumerate(zip(m0, m1)):
  214. if nodes1 == nodes2:
  215. continue
  216. if len(nodes1) != len(nodes2):
  217. yield (
  218. glyph_name,
  219. {
  220. "type": "node_count",
  221. "path": pathIx,
  222. "master_1": names[m0idx],
  223. "master_2": names[m1idx],
  224. "master_1_idx": m0idx,
  225. "master_2_idx": m1idx,
  226. "value_1": len(nodes1),
  227. "value_2": len(nodes2),
  228. },
  229. )
  230. continue
  231. for nodeIx, (n1, n2) in enumerate(zip(nodes1, nodes2)):
  232. if n1 != n2:
  233. yield (
  234. glyph_name,
  235. {
  236. "type": "node_incompatibility",
  237. "path": pathIx,
  238. "node": nodeIx,
  239. "master_1": names[m0idx],
  240. "master_2": names[m1idx],
  241. "master_1_idx": m0idx,
  242. "master_2_idx": m1idx,
  243. "value_1": n1,
  244. "value_2": n2,
  245. },
  246. )
  247. continue
  248. #
  249. # "contour_order" check
  250. #
  251. matching, matching_cost, identity_cost = test_contour_order(glyph0, glyph1)
  252. if matching_cost < identity_cost * tolerance:
  253. log.debug(
  254. "matching_ratio %g",
  255. matching_cost / identity_cost,
  256. )
  257. this_tolerance = matching_cost / identity_cost
  258. log.debug("tolerance: %g", this_tolerance)
  259. yield (
  260. glyph_name,
  261. {
  262. "type": "contour_order",
  263. "master_1": names[m0idx],
  264. "master_2": names[m1idx],
  265. "master_1_idx": m0idx,
  266. "master_2_idx": m1idx,
  267. "value_1": list(range(len(matching))),
  268. "value_2": matching,
  269. "tolerance": this_tolerance,
  270. },
  271. )
  272. matchings[m1idx] = matching
  273. #
  274. # "wrong_start_point" / weight check
  275. #
  276. m0Isomorphisms = glyph0.isomorphisms
  277. m1Isomorphisms = glyph1.isomorphisms
  278. m0Vectors = glyph0.greenVectors
  279. m1Vectors = glyph1.greenVectors
  280. m0VectorsNormalized = glyph0.greenVectorsNormalized
  281. m1VectorsNormalized = glyph1.greenVectorsNormalized
  282. recording0 = glyph0.recordings
  283. recording1 = glyph1.recordings
  284. recording0Normalized = glyph0.recordingsNormalized
  285. recording1Normalized = glyph1.recordingsNormalized
  286. # If contour-order is wrong, adjust it
  287. matching = matchings[m1idx]
  288. if (
  289. matching is not None and m1Isomorphisms
  290. ): # m1 is empty for composite glyphs
  291. m1Isomorphisms = [m1Isomorphisms[i] for i in matching]
  292. m1Vectors = [m1Vectors[i] for i in matching]
  293. m1VectorsNormalized = [m1VectorsNormalized[i] for i in matching]
  294. recording1 = [recording1[i] for i in matching]
  295. recording1Normalized = [recording1Normalized[i] for i in matching]
  296. midRecording = []
  297. for c0, c1 in zip(recording0, recording1):
  298. try:
  299. midRecording.append(lerp_recordings(c0, c1))
  300. except ValueError:
  301. # Mismatch because of the reordering above
  302. midRecording.append(None)
  303. for ix, (contour0, contour1) in enumerate(
  304. zip(m0Isomorphisms, m1Isomorphisms)
  305. ):
  306. if (
  307. contour0 is None
  308. or contour1 is None
  309. or len(contour0) == 0
  310. or len(contour0) != len(contour1)
  311. ):
  312. # We already reported this; or nothing to do; or not compatible
  313. # after reordering above.
  314. continue
  315. proposed_point, reverse, min_cost, first_cost = test_starting_point(
  316. glyph0, glyph1, ix, tolerance, matching
  317. )
  318. if proposed_point or reverse:
  319. this_tolerance = min_cost / first_cost
  320. log.debug("tolerance: %g", this_tolerance)
  321. if min_cost < first_cost * tolerance:
  322. yield (
  323. glyph_name,
  324. {
  325. "type": "wrong_start_point",
  326. "contour": ix,
  327. "master_1": names[m0idx],
  328. "master_2": names[m1idx],
  329. "master_1_idx": m0idx,
  330. "master_2_idx": m1idx,
  331. "value_1": 0,
  332. "value_2": proposed_point,
  333. "reversed": reverse,
  334. "tolerance": this_tolerance,
  335. },
  336. )
  337. else:
  338. # Weight check.
  339. #
  340. # If contour could be mid-interpolated, and the two
  341. # contours have the same area sign, proceeed.
  342. #
  343. # The sign difference can happen if it's a werido
  344. # self-intersecting contour; ignore it.
  345. contour = midRecording[ix]
  346. normalized = False
  347. if contour and (m0Vectors[ix][0] < 0) == (m1Vectors[ix][0] < 0):
  348. if normalized:
  349. midStats = StatisticsPen(glyphset=None)
  350. tpen = TransformPen(
  351. midStats, transform_from_stats(midStats, inverse=True)
  352. )
  353. contour.replay(tpen)
  354. else:
  355. midStats = StatisticsPen(glyphset=None)
  356. contour.replay(midStats)
  357. midVector = contour_vector_from_stats(midStats)
  358. m0Vec = (
  359. m0Vectors[ix] if not normalized else m0VectorsNormalized[ix]
  360. )
  361. m1Vec = (
  362. m1Vectors[ix] if not normalized else m1VectorsNormalized[ix]
  363. )
  364. size0 = m0Vec[0] * m0Vec[0]
  365. size1 = m1Vec[0] * m1Vec[0]
  366. midSize = midVector[0] * midVector[0]
  367. power = 1
  368. t = tolerance**power
  369. for overweight, problem_type in enumerate(
  370. ("underweight", "overweight")
  371. ):
  372. if overweight:
  373. expectedSize = sqrt(size0 * size1)
  374. expectedSize = (size0 + size1) - expectedSize
  375. expectedSize = size1 + (midSize - size1)
  376. continue
  377. else:
  378. expectedSize = sqrt(size0 * size1)
  379. log.debug(
  380. "%s: actual size %g; threshold size %g, master sizes: %g, %g",
  381. problem_type,
  382. midSize,
  383. expectedSize,
  384. size0,
  385. size1,
  386. )
  387. size0, size1 = sorted((size0, size1))
  388. if (
  389. not overweight
  390. and expectedSize * tolerance > midSize + 1e-5
  391. ) or (
  392. overweight and 1e-5 + expectedSize / tolerance < midSize
  393. ):
  394. try:
  395. if overweight:
  396. this_tolerance = (expectedSize / midSize) ** (
  397. 1 / power
  398. )
  399. else:
  400. this_tolerance = (midSize / expectedSize) ** (
  401. 1 / power
  402. )
  403. except ZeroDivisionError:
  404. this_tolerance = 0
  405. log.debug("tolerance %g", this_tolerance)
  406. yield (
  407. glyph_name,
  408. {
  409. "type": problem_type,
  410. "contour": ix,
  411. "master_1": names[m0idx],
  412. "master_2": names[m1idx],
  413. "master_1_idx": m0idx,
  414. "master_2_idx": m1idx,
  415. "tolerance": this_tolerance,
  416. },
  417. )
  418. #
  419. # "kink" detector
  420. #
  421. m0 = glyph0.points
  422. m1 = glyph1.points
  423. # If contour-order is wrong, adjust it
  424. if matchings[m1idx] is not None and m1: # m1 is empty for composite glyphs
  425. m1 = [m1[i] for i in matchings[m1idx]]
  426. t = 0.1 # ~sin(radian(6)) for tolerance 0.95
  427. deviation_threshold = (
  428. upem * DEFAULT_KINKINESS_LENGTH * DEFAULT_KINKINESS / kinkiness
  429. )
  430. for ix, (contour0, contour1) in enumerate(zip(m0, m1)):
  431. if (
  432. contour0 is None
  433. or contour1 is None
  434. or len(contour0) == 0
  435. or len(contour0) != len(contour1)
  436. ):
  437. # We already reported this; or nothing to do; or not compatible
  438. # after reordering above.
  439. continue
  440. # Walk the contour, keeping track of three consecutive points, with
  441. # middle one being an on-curve. If the three are co-linear then
  442. # check for kinky-ness.
  443. for i in range(len(contour0)):
  444. pt0 = contour0[i]
  445. pt1 = contour1[i]
  446. if not pt0[1] or not pt1[1]:
  447. # Skip off-curves
  448. continue
  449. pt0_prev = contour0[i - 1]
  450. pt1_prev = contour1[i - 1]
  451. pt0_next = contour0[(i + 1) % len(contour0)]
  452. pt1_next = contour1[(i + 1) % len(contour1)]
  453. if pt0_prev[1] and pt1_prev[1]:
  454. # At least one off-curve is required
  455. continue
  456. if pt0_prev[1] and pt1_prev[1]:
  457. # At least one off-curve is required
  458. continue
  459. pt0 = complex(*pt0[0])
  460. pt1 = complex(*pt1[0])
  461. pt0_prev = complex(*pt0_prev[0])
  462. pt1_prev = complex(*pt1_prev[0])
  463. pt0_next = complex(*pt0_next[0])
  464. pt1_next = complex(*pt1_next[0])
  465. # We have three consecutive points. Check whether
  466. # they are colinear.
  467. d0_prev = pt0 - pt0_prev
  468. d0_next = pt0_next - pt0
  469. d1_prev = pt1 - pt1_prev
  470. d1_next = pt1_next - pt1
  471. sin0 = d0_prev.real * d0_next.imag - d0_prev.imag * d0_next.real
  472. sin1 = d1_prev.real * d1_next.imag - d1_prev.imag * d1_next.real
  473. try:
  474. sin0 /= abs(d0_prev) * abs(d0_next)
  475. sin1 /= abs(d1_prev) * abs(d1_next)
  476. except ZeroDivisionError:
  477. continue
  478. if abs(sin0) > t or abs(sin1) > t:
  479. # Not colinear / not smooth.
  480. continue
  481. # Check the mid-point is actually, well, in the middle.
  482. dot0 = d0_prev.real * d0_next.real + d0_prev.imag * d0_next.imag
  483. dot1 = d1_prev.real * d1_next.real + d1_prev.imag * d1_next.imag
  484. if dot0 < 0 or dot1 < 0:
  485. # Sharp corner.
  486. continue
  487. # Fine, if handle ratios are similar...
  488. r0 = abs(d0_prev) / (abs(d0_prev) + abs(d0_next))
  489. r1 = abs(d1_prev) / (abs(d1_prev) + abs(d1_next))
  490. r_diff = abs(r0 - r1)
  491. if abs(r_diff) < t:
  492. # Smooth enough.
  493. continue
  494. mid = (pt0 + pt1) / 2
  495. mid_prev = (pt0_prev + pt1_prev) / 2
  496. mid_next = (pt0_next + pt1_next) / 2
  497. mid_d0 = mid - mid_prev
  498. mid_d1 = mid_next - mid
  499. sin_mid = mid_d0.real * mid_d1.imag - mid_d0.imag * mid_d1.real
  500. try:
  501. sin_mid /= abs(mid_d0) * abs(mid_d1)
  502. except ZeroDivisionError:
  503. continue
  504. # ...or if the angles are similar.
  505. if abs(sin_mid) * (tolerance * kinkiness) <= t:
  506. # Smooth enough.
  507. continue
  508. # How visible is the kink?
  509. cross = sin_mid * abs(mid_d0) * abs(mid_d1)
  510. arc_len = abs(mid_d0 + mid_d1)
  511. deviation = abs(cross / arc_len)
  512. if deviation < deviation_threshold:
  513. continue
  514. deviation_ratio = deviation / arc_len
  515. if deviation_ratio > t:
  516. continue
  517. this_tolerance = t / (abs(sin_mid) * kinkiness)
  518. log.debug(
  519. "deviation %g; deviation_ratio %g; sin_mid %g; r_diff %g",
  520. deviation,
  521. deviation_ratio,
  522. sin_mid,
  523. r_diff,
  524. )
  525. log.debug("tolerance %g", this_tolerance)
  526. yield (
  527. glyph_name,
  528. {
  529. "type": "kink",
  530. "contour": ix,
  531. "master_1": names[m0idx],
  532. "master_2": names[m1idx],
  533. "master_1_idx": m0idx,
  534. "master_2_idx": m1idx,
  535. "value": i,
  536. "tolerance": this_tolerance,
  537. },
  538. )
  539. #
  540. # --show-all
  541. #
  542. if show_all:
  543. yield (
  544. glyph_name,
  545. {
  546. "type": "nothing",
  547. "master_1": names[m0idx],
  548. "master_2": names[m1idx],
  549. "master_1_idx": m0idx,
  550. "master_2_idx": m1idx,
  551. },
  552. )
  553. @wraps(test_gen)
  554. def test(*args, **kwargs):
  555. problems = defaultdict(list)
  556. for glyphname, problem in test_gen(*args, **kwargs):
  557. problems[glyphname].append(problem)
  558. return problems
  559. def recursivelyAddGlyph(glyphname, glyphset, ttGlyphSet, glyf):
  560. if glyphname in glyphset:
  561. return
  562. glyphset[glyphname] = ttGlyphSet[glyphname]
  563. for component in getattr(glyf[glyphname], "components", []):
  564. recursivelyAddGlyph(component.glyphName, glyphset, ttGlyphSet, glyf)
  565. def main(args=None):
  566. """Test for interpolatability issues between fonts"""
  567. import argparse
  568. import sys
  569. parser = argparse.ArgumentParser(
  570. "fonttools varLib.interpolatable",
  571. description=main.__doc__,
  572. )
  573. parser.add_argument(
  574. "--glyphs",
  575. action="store",
  576. help="Space-separate name of glyphs to check",
  577. )
  578. parser.add_argument(
  579. "--show-all",
  580. action="store_true",
  581. help="Show all glyph pairs, even if no problems are found",
  582. )
  583. parser.add_argument(
  584. "--tolerance",
  585. action="store",
  586. type=float,
  587. help="Error tolerance. Between 0 and 1. Default %s" % DEFAULT_TOLERANCE,
  588. )
  589. parser.add_argument(
  590. "--kinkiness",
  591. action="store",
  592. type=float,
  593. help="How aggressively report kinks. Default %s" % DEFAULT_KINKINESS,
  594. )
  595. parser.add_argument(
  596. "--json",
  597. action="store_true",
  598. help="Output report in JSON format",
  599. )
  600. parser.add_argument(
  601. "--pdf",
  602. action="store",
  603. help="Output report in PDF format",
  604. )
  605. parser.add_argument(
  606. "--ps",
  607. action="store",
  608. help="Output report in PostScript format",
  609. )
  610. parser.add_argument(
  611. "--html",
  612. action="store",
  613. help="Output report in HTML format",
  614. )
  615. parser.add_argument(
  616. "--quiet",
  617. action="store_true",
  618. help="Only exit with code 1 or 0, no output",
  619. )
  620. parser.add_argument(
  621. "--output",
  622. action="store",
  623. help="Output file for the problem report; Default: stdout",
  624. )
  625. parser.add_argument(
  626. "--ignore-missing",
  627. action="store_true",
  628. help="Will not report glyphs missing from sparse masters as errors",
  629. )
  630. parser.add_argument(
  631. "inputs",
  632. metavar="FILE",
  633. type=str,
  634. nargs="+",
  635. help="Input a single variable font / DesignSpace / Glyphs file, or multiple TTF/UFO files",
  636. )
  637. parser.add_argument(
  638. "--name",
  639. metavar="NAME",
  640. type=str,
  641. action="append",
  642. help="Name of the master to use in the report. If not provided, all are used.",
  643. )
  644. parser.add_argument("-v", "--verbose", action="store_true", help="Run verbosely.")
  645. parser.add_argument("--debug", action="store_true", help="Run with debug output.")
  646. args = parser.parse_args(args)
  647. from fontTools import configLogger
  648. configLogger(level=("INFO" if args.verbose else "ERROR"))
  649. if args.debug:
  650. configLogger(level="DEBUG")
  651. glyphs = args.glyphs.split() if args.glyphs else None
  652. from os.path import basename
  653. fonts = []
  654. names = []
  655. locations = []
  656. upem = DEFAULT_UPEM
  657. original_args_inputs = tuple(args.inputs)
  658. if len(args.inputs) == 1:
  659. designspace = None
  660. if args.inputs[0].endswith(".designspace"):
  661. from fontTools.designspaceLib import DesignSpaceDocument
  662. designspace = DesignSpaceDocument.fromfile(args.inputs[0])
  663. args.inputs = [master.path for master in designspace.sources]
  664. locations = [master.location for master in designspace.sources]
  665. axis_triples = {
  666. a.name: (a.minimum, a.default, a.maximum) for a in designspace.axes
  667. }
  668. axis_mappings = {a.name: a.map for a in designspace.axes}
  669. axis_triples = {
  670. k: tuple(piecewiseLinearMap(v, dict(axis_mappings[k])) for v in vv)
  671. for k, vv in axis_triples.items()
  672. }
  673. elif args.inputs[0].endswith(".glyphs"):
  674. from glyphsLib import GSFont, to_designspace
  675. gsfont = GSFont(args.inputs[0])
  676. upem = gsfont.upm
  677. designspace = to_designspace(gsfont)
  678. fonts = [source.font for source in designspace.sources]
  679. names = ["%s-%s" % (f.info.familyName, f.info.styleName) for f in fonts]
  680. args.inputs = []
  681. locations = [master.location for master in designspace.sources]
  682. axis_triples = {
  683. a.name: (a.minimum, a.default, a.maximum) for a in designspace.axes
  684. }
  685. axis_mappings = {a.name: a.map for a in designspace.axes}
  686. axis_triples = {
  687. k: tuple(piecewiseLinearMap(v, dict(axis_mappings[k])) for v in vv)
  688. for k, vv in axis_triples.items()
  689. }
  690. elif args.inputs[0].endswith(".ttf"):
  691. from fontTools.ttLib import TTFont
  692. font = TTFont(args.inputs[0])
  693. upem = font["head"].unitsPerEm
  694. if "gvar" in font:
  695. # Is variable font
  696. axisMapping = {}
  697. fvar = font["fvar"]
  698. for axis in fvar.axes:
  699. axisMapping[axis.axisTag] = {
  700. -1: axis.minValue,
  701. 0: axis.defaultValue,
  702. 1: axis.maxValue,
  703. }
  704. if "avar" in font:
  705. avar = font["avar"]
  706. for axisTag, segments in avar.segments.items():
  707. fvarMapping = axisMapping[axisTag].copy()
  708. for location, value in segments.items():
  709. axisMapping[axisTag][value] = piecewiseLinearMap(
  710. location, fvarMapping
  711. )
  712. gvar = font["gvar"]
  713. glyf = font["glyf"]
  714. # Gather all glyphs at their "master" locations
  715. ttGlyphSets = {}
  716. glyphsets = defaultdict(dict)
  717. if glyphs is None:
  718. glyphs = sorted(gvar.variations.keys())
  719. for glyphname in glyphs:
  720. for var in gvar.variations[glyphname]:
  721. locDict = {}
  722. loc = []
  723. for tag, val in sorted(var.axes.items()):
  724. locDict[tag] = val[1]
  725. loc.append((tag, val[1]))
  726. locTuple = tuple(loc)
  727. if locTuple not in ttGlyphSets:
  728. ttGlyphSets[locTuple] = font.getGlyphSet(
  729. location=locDict, normalized=True, recalcBounds=False
  730. )
  731. recursivelyAddGlyph(
  732. glyphname, glyphsets[locTuple], ttGlyphSets[locTuple], glyf
  733. )
  734. names = ["''"]
  735. fonts = [font.getGlyphSet()]
  736. locations = [{}]
  737. axis_triples = {a: (-1, 0, +1) for a in sorted(axisMapping.keys())}
  738. for locTuple in sorted(glyphsets.keys(), key=lambda v: (len(v), v)):
  739. name = (
  740. "'"
  741. + " ".join(
  742. "%s=%s"
  743. % (
  744. k,
  745. floatToFixedToStr(
  746. piecewiseLinearMap(v, axisMapping[k]), 14
  747. ),
  748. )
  749. for k, v in locTuple
  750. )
  751. + "'"
  752. )
  753. names.append(name)
  754. fonts.append(glyphsets[locTuple])
  755. locations.append(dict(locTuple))
  756. args.ignore_missing = True
  757. args.inputs = []
  758. if not locations:
  759. locations = [{} for _ in fonts]
  760. for filename in args.inputs:
  761. if filename.endswith(".ufo"):
  762. from fontTools.ufoLib import UFOReader
  763. font = UFOReader(filename)
  764. info = SimpleNamespace()
  765. font.readInfo(info)
  766. upem = info.unitsPerEm
  767. fonts.append(font)
  768. else:
  769. from fontTools.ttLib import TTFont
  770. font = TTFont(filename)
  771. upem = font["head"].unitsPerEm
  772. fonts.append(font)
  773. names.append(basename(filename).rsplit(".", 1)[0])
  774. glyphsets = []
  775. for font in fonts:
  776. if hasattr(font, "getGlyphSet"):
  777. glyphset = font.getGlyphSet()
  778. else:
  779. glyphset = font
  780. glyphsets.append({k: glyphset[k] for k in glyphset.keys()})
  781. if args.name:
  782. accepted_names = set(args.name)
  783. glyphsets = [
  784. glyphset
  785. for name, glyphset in zip(names, glyphsets)
  786. if name in accepted_names
  787. ]
  788. locations = [
  789. location
  790. for name, location in zip(names, locations)
  791. if name in accepted_names
  792. ]
  793. names = [name for name in names if name in accepted_names]
  794. if not glyphs:
  795. glyphs = sorted(set([gn for glyphset in glyphsets for gn in glyphset.keys()]))
  796. glyphsSet = set(glyphs)
  797. for glyphset in glyphsets:
  798. glyphSetGlyphNames = set(glyphset.keys())
  799. diff = glyphsSet - glyphSetGlyphNames
  800. if diff:
  801. for gn in diff:
  802. glyphset[gn] = None
  803. # Normalize locations
  804. locations = [normalizeLocation(loc, axis_triples) for loc in locations]
  805. tolerance = args.tolerance or DEFAULT_TOLERANCE
  806. kinkiness = args.kinkiness if args.kinkiness is not None else DEFAULT_KINKINESS
  807. try:
  808. log.info("Running on %d glyphsets", len(glyphsets))
  809. log.info("Locations: %s", pformat(locations))
  810. problems_gen = test_gen(
  811. glyphsets,
  812. glyphs=glyphs,
  813. names=names,
  814. locations=locations,
  815. upem=upem,
  816. ignore_missing=args.ignore_missing,
  817. tolerance=tolerance,
  818. kinkiness=kinkiness,
  819. show_all=args.show_all,
  820. )
  821. problems = defaultdict(list)
  822. f = sys.stdout if args.output is None else open(args.output, "w")
  823. if not args.quiet:
  824. if args.json:
  825. import json
  826. for glyphname, problem in problems_gen:
  827. problems[glyphname].append(problem)
  828. print(json.dumps(problems), file=f)
  829. else:
  830. last_glyphname = None
  831. for glyphname, p in problems_gen:
  832. problems[glyphname].append(p)
  833. if glyphname != last_glyphname:
  834. print(f"Glyph {glyphname} was not compatible:", file=f)
  835. last_glyphname = glyphname
  836. last_master_idxs = None
  837. master_idxs = (
  838. (p["master_idx"])
  839. if "master_idx" in p
  840. else (p["master_1_idx"], p["master_2_idx"])
  841. )
  842. if master_idxs != last_master_idxs:
  843. master_names = (
  844. (p["master"])
  845. if "master" in p
  846. else (p["master_1"], p["master_2"])
  847. )
  848. print(f" Masters: %s:" % ", ".join(master_names), file=f)
  849. last_master_idxs = master_idxs
  850. if p["type"] == "missing":
  851. print(
  852. " Glyph was missing in master %s" % p["master"], file=f
  853. )
  854. elif p["type"] == "open_path":
  855. print(
  856. " Glyph has an open path in master %s" % p["master"],
  857. file=f,
  858. )
  859. elif p["type"] == "path_count":
  860. print(
  861. " Path count differs: %i in %s, %i in %s"
  862. % (
  863. p["value_1"],
  864. p["master_1"],
  865. p["value_2"],
  866. p["master_2"],
  867. ),
  868. file=f,
  869. )
  870. elif p["type"] == "node_count":
  871. print(
  872. " Node count differs in path %i: %i in %s, %i in %s"
  873. % (
  874. p["path"],
  875. p["value_1"],
  876. p["master_1"],
  877. p["value_2"],
  878. p["master_2"],
  879. ),
  880. file=f,
  881. )
  882. elif p["type"] == "node_incompatibility":
  883. print(
  884. " Node %o incompatible in path %i: %s in %s, %s in %s"
  885. % (
  886. p["node"],
  887. p["path"],
  888. p["value_1"],
  889. p["master_1"],
  890. p["value_2"],
  891. p["master_2"],
  892. ),
  893. file=f,
  894. )
  895. elif p["type"] == "contour_order":
  896. print(
  897. " Contour order differs: %s in %s, %s in %s"
  898. % (
  899. p["value_1"],
  900. p["master_1"],
  901. p["value_2"],
  902. p["master_2"],
  903. ),
  904. file=f,
  905. )
  906. elif p["type"] == "wrong_start_point":
  907. print(
  908. " Contour %d start point differs: %s in %s, %s in %s; reversed: %s"
  909. % (
  910. p["contour"],
  911. p["value_1"],
  912. p["master_1"],
  913. p["value_2"],
  914. p["master_2"],
  915. p["reversed"],
  916. ),
  917. file=f,
  918. )
  919. elif p["type"] == "underweight":
  920. print(
  921. " Contour %d interpolation is underweight: %s, %s"
  922. % (
  923. p["contour"],
  924. p["master_1"],
  925. p["master_2"],
  926. ),
  927. file=f,
  928. )
  929. elif p["type"] == "overweight":
  930. print(
  931. " Contour %d interpolation is overweight: %s, %s"
  932. % (
  933. p["contour"],
  934. p["master_1"],
  935. p["master_2"],
  936. ),
  937. file=f,
  938. )
  939. elif p["type"] == "kink":
  940. print(
  941. " Contour %d has a kink at %s: %s, %s"
  942. % (
  943. p["contour"],
  944. p["value"],
  945. p["master_1"],
  946. p["master_2"],
  947. ),
  948. file=f,
  949. )
  950. elif p["type"] == "nothing":
  951. print(
  952. " Showing %s and %s"
  953. % (
  954. p["master_1"],
  955. p["master_2"],
  956. ),
  957. file=f,
  958. )
  959. else:
  960. for glyphname, problem in problems_gen:
  961. problems[glyphname].append(problem)
  962. if args.pdf:
  963. log.info("Writing PDF to %s", args.pdf)
  964. from .interpolatablePlot import InterpolatablePDF
  965. with InterpolatablePDF(args.pdf, glyphsets=glyphsets, names=names) as pdf:
  966. pdf.add_title_page(
  967. original_args_inputs, tolerance=tolerance, kinkiness=kinkiness
  968. )
  969. pdf.add_problems(problems)
  970. if not problems and not args.quiet:
  971. pdf.draw_cupcake()
  972. if args.ps:
  973. log.info("Writing PS to %s", args.pdf)
  974. from .interpolatablePlot import InterpolatablePS
  975. with InterpolatablePS(args.ps, glyphsets=glyphsets, names=names) as ps:
  976. ps.add_title_page(
  977. original_args_inputs, tolerance=tolerance, kinkiness=kinkiness
  978. )
  979. ps.add_problems(problems)
  980. if not problems and not args.quiet:
  981. ps.draw_cupcake()
  982. if args.html:
  983. log.info("Writing HTML to %s", args.html)
  984. from .interpolatablePlot import InterpolatableSVG
  985. svgs = []
  986. glyph_starts = {}
  987. with InterpolatableSVG(svgs, glyphsets=glyphsets, names=names) as svg:
  988. svg.add_title_page(
  989. original_args_inputs,
  990. show_tolerance=False,
  991. tolerance=tolerance,
  992. kinkiness=kinkiness,
  993. )
  994. for glyph, glyph_problems in problems.items():
  995. glyph_starts[len(svgs)] = glyph
  996. svg.add_problems(
  997. {glyph: glyph_problems},
  998. show_tolerance=False,
  999. show_page_number=False,
  1000. )
  1001. if not problems and not args.quiet:
  1002. svg.draw_cupcake()
  1003. import base64
  1004. with open(args.html, "wb") as f:
  1005. f.write(b"<!DOCTYPE html>\n")
  1006. f.write(
  1007. b'<html><body align="center" style="font-family: sans-serif; text-color: #222">\n'
  1008. )
  1009. f.write(b"<title>fonttools varLib.interpolatable report</title>\n")
  1010. for i, svg in enumerate(svgs):
  1011. if i in glyph_starts:
  1012. f.write(f"<h1>Glyph {glyph_starts[i]}</h1>\n".encode("utf-8"))
  1013. f.write("<img src='data:image/svg+xml;base64,".encode("utf-8"))
  1014. f.write(base64.b64encode(svg))
  1015. f.write(b"' />\n")
  1016. f.write(b"<hr>\n")
  1017. f.write(b"</body></html>\n")
  1018. except Exception as e:
  1019. e.args += original_args_inputs
  1020. log.error(e)
  1021. raise
  1022. if problems:
  1023. return problems
  1024. if __name__ == "__main__":
  1025. import sys
  1026. problems = main()
  1027. sys.exit(int(bool(problems)))