I am done

This commit is contained in:
2024-10-30 22:14:35 +01:00
parent 720dc28c09
commit 40e2a747cf
36901 changed files with 5011519 additions and 0 deletions

View File

@ -0,0 +1,5 @@
"""fontTools.voltLib -- a package for dealing with Visual OpenType Layout Tool
(VOLT) files."""
# See
# http://www.microsoft.com/typography/VOLT.mspx

View File

@ -0,0 +1,448 @@
from fontTools.voltLib.error import VoltLibError
from typing import NamedTuple
class Pos(NamedTuple):
adv: int
dx: int
dy: int
adv_adjust_by: dict
dx_adjust_by: dict
dy_adjust_by: dict
def __str__(self):
res = " POS"
for attr in ("adv", "dx", "dy"):
value = getattr(self, attr)
if value is not None:
res += f" {attr.upper()} {value}"
adjust_by = getattr(self, f"{attr}_adjust_by", {})
for size, adjustment in adjust_by.items():
res += f" ADJUST_BY {adjustment} AT {size}"
res += " END_POS"
return res
class Element(object):
def __init__(self, location=None):
self.location = location
def build(self, builder):
pass
def __str__(self):
raise NotImplementedError
class Statement(Element):
pass
class Expression(Element):
pass
class VoltFile(Statement):
def __init__(self):
Statement.__init__(self, location=None)
self.statements = []
def build(self, builder):
for s in self.statements:
s.build(builder)
def __str__(self):
return "\n" + "\n".join(str(s) for s in self.statements) + " END\n"
class GlyphDefinition(Statement):
def __init__(self, name, gid, gunicode, gtype, components, location=None):
Statement.__init__(self, location)
self.name = name
self.id = gid
self.unicode = gunicode
self.type = gtype
self.components = components
def __str__(self):
res = f'DEF_GLYPH "{self.name}" ID {self.id}'
if self.unicode is not None:
if len(self.unicode) > 1:
unicodes = ",".join(f"U+{u:04X}" for u in self.unicode)
res += f' UNICODEVALUES "{unicodes}"'
else:
res += f" UNICODE {self.unicode[0]}"
if self.type is not None:
res += f" TYPE {self.type}"
if self.components is not None:
res += f" COMPONENTS {self.components}"
res += " END_GLYPH"
return res
class GroupDefinition(Statement):
def __init__(self, name, enum, location=None):
Statement.__init__(self, location)
self.name = name
self.enum = enum
self.glyphs_ = None
def glyphSet(self, groups=None):
if groups is not None and self.name in groups:
raise VoltLibError(
'Group "%s" contains itself.' % (self.name), self.location
)
if self.glyphs_ is None:
if groups is None:
groups = set({self.name})
else:
groups.add(self.name)
self.glyphs_ = self.enum.glyphSet(groups)
return self.glyphs_
def __str__(self):
enum = self.enum and str(self.enum) or ""
return f'DEF_GROUP "{self.name}"\n{enum}\nEND_GROUP'
class GlyphName(Expression):
"""A single glyph name, such as cedilla."""
def __init__(self, glyph, location=None):
Expression.__init__(self, location)
self.glyph = glyph
def glyphSet(self):
return (self.glyph,)
def __str__(self):
return f' GLYPH "{self.glyph}"'
class Enum(Expression):
"""An enum"""
def __init__(self, enum, location=None):
Expression.__init__(self, location)
self.enum = enum
def __iter__(self):
for e in self.glyphSet():
yield e
def glyphSet(self, groups=None):
glyphs = []
for element in self.enum:
if isinstance(element, (GroupName, Enum)):
glyphs.extend(element.glyphSet(groups))
else:
glyphs.extend(element.glyphSet())
return tuple(glyphs)
def __str__(self):
enum = "".join(str(e) for e in self.enum)
return f" ENUM{enum} END_ENUM"
class GroupName(Expression):
"""A glyph group"""
def __init__(self, group, parser, location=None):
Expression.__init__(self, location)
self.group = group
self.parser_ = parser
def glyphSet(self, groups=None):
group = self.parser_.resolve_group(self.group)
if group is not None:
self.glyphs_ = group.glyphSet(groups)
return self.glyphs_
else:
raise VoltLibError(
'Group "%s" is used but undefined.' % (self.group), self.location
)
def __str__(self):
return f' GROUP "{self.group}"'
class Range(Expression):
"""A glyph range"""
def __init__(self, start, end, parser, location=None):
Expression.__init__(self, location)
self.start = start
self.end = end
self.parser = parser
def glyphSet(self):
return tuple(self.parser.glyph_range(self.start, self.end))
def __str__(self):
return f' RANGE "{self.start}" TO "{self.end}"'
class ScriptDefinition(Statement):
def __init__(self, name, tag, langs, location=None):
Statement.__init__(self, location)
self.name = name
self.tag = tag
self.langs = langs
def __str__(self):
res = "DEF_SCRIPT"
if self.name is not None:
res += f' NAME "{self.name}"'
res += f' TAG "{self.tag}"\n\n'
for lang in self.langs:
res += f"{lang}"
res += "END_SCRIPT"
return res
class LangSysDefinition(Statement):
def __init__(self, name, tag, features, location=None):
Statement.__init__(self, location)
self.name = name
self.tag = tag
self.features = features
def __str__(self):
res = "DEF_LANGSYS"
if self.name is not None:
res += f' NAME "{self.name}"'
res += f' TAG "{self.tag}"\n\n'
for feature in self.features:
res += f"{feature}"
res += "END_LANGSYS\n"
return res
class FeatureDefinition(Statement):
def __init__(self, name, tag, lookups, location=None):
Statement.__init__(self, location)
self.name = name
self.tag = tag
self.lookups = lookups
def __str__(self):
res = f'DEF_FEATURE NAME "{self.name}" TAG "{self.tag}"\n'
res += " " + " ".join(f'LOOKUP "{l}"' for l in self.lookups) + "\n"
res += "END_FEATURE\n"
return res
class LookupDefinition(Statement):
def __init__(
self,
name,
process_base,
process_marks,
mark_glyph_set,
direction,
reversal,
comments,
context,
sub,
pos,
location=None,
):
Statement.__init__(self, location)
self.name = name
self.process_base = process_base
self.process_marks = process_marks
self.mark_glyph_set = mark_glyph_set
self.direction = direction
self.reversal = reversal
self.comments = comments
self.context = context
self.sub = sub
self.pos = pos
def __str__(self):
res = f'DEF_LOOKUP "{self.name}"'
res += f' {self.process_base and "PROCESS_BASE" or "SKIP_BASE"}'
if self.process_marks:
res += " PROCESS_MARKS "
if self.mark_glyph_set:
res += f'MARK_GLYPH_SET "{self.mark_glyph_set}"'
elif isinstance(self.process_marks, str):
res += f'"{self.process_marks}"'
else:
res += "ALL"
else:
res += " SKIP_MARKS"
if self.direction is not None:
res += f" DIRECTION {self.direction}"
if self.reversal:
res += " REVERSAL"
if self.comments is not None:
comments = self.comments.replace("\n", r"\n")
res += f'\nCOMMENTS "{comments}"'
if self.context:
res += "\n" + "\n".join(str(c) for c in self.context)
else:
res += "\nIN_CONTEXT\nEND_CONTEXT"
if self.sub:
res += f"\n{self.sub}"
if self.pos:
res += f"\n{self.pos}"
return res
class SubstitutionDefinition(Statement):
def __init__(self, mapping, location=None):
Statement.__init__(self, location)
self.mapping = mapping
def __str__(self):
res = "AS_SUBSTITUTION\n"
for src, dst in self.mapping.items():
src = "".join(str(s) for s in src)
dst = "".join(str(d) for d in dst)
res += f"SUB{src}\nWITH{dst}\nEND_SUB\n"
res += "END_SUBSTITUTION"
return res
class SubstitutionSingleDefinition(SubstitutionDefinition):
pass
class SubstitutionMultipleDefinition(SubstitutionDefinition):
pass
class SubstitutionLigatureDefinition(SubstitutionDefinition):
pass
class SubstitutionReverseChainingSingleDefinition(SubstitutionDefinition):
pass
class PositionAttachDefinition(Statement):
def __init__(self, coverage, coverage_to, location=None):
Statement.__init__(self, location)
self.coverage = coverage
self.coverage_to = coverage_to
def __str__(self):
coverage = "".join(str(c) for c in self.coverage)
res = f"AS_POSITION\nATTACH{coverage}\nTO"
for coverage, anchor in self.coverage_to:
coverage = "".join(str(c) for c in coverage)
res += f'{coverage} AT ANCHOR "{anchor}"'
res += "\nEND_ATTACH\nEND_POSITION"
return res
class PositionAttachCursiveDefinition(Statement):
def __init__(self, coverages_exit, coverages_enter, location=None):
Statement.__init__(self, location)
self.coverages_exit = coverages_exit
self.coverages_enter = coverages_enter
def __str__(self):
res = "AS_POSITION\nATTACH_CURSIVE"
for coverage in self.coverages_exit:
coverage = "".join(str(c) for c in coverage)
res += f"\nEXIT {coverage}"
for coverage in self.coverages_enter:
coverage = "".join(str(c) for c in coverage)
res += f"\nENTER {coverage}"
res += "\nEND_ATTACH\nEND_POSITION"
return res
class PositionAdjustPairDefinition(Statement):
def __init__(self, coverages_1, coverages_2, adjust_pair, location=None):
Statement.__init__(self, location)
self.coverages_1 = coverages_1
self.coverages_2 = coverages_2
self.adjust_pair = adjust_pair
def __str__(self):
res = "AS_POSITION\nADJUST_PAIR\n"
for coverage in self.coverages_1:
coverage = " ".join(str(c) for c in coverage)
res += f" FIRST {coverage}"
res += "\n"
for coverage in self.coverages_2:
coverage = " ".join(str(c) for c in coverage)
res += f" SECOND {coverage}"
res += "\n"
for (id_1, id_2), (pos_1, pos_2) in self.adjust_pair.items():
res += f" {id_1} {id_2} BY{pos_1}{pos_2}\n"
res += "\nEND_ADJUST\nEND_POSITION"
return res
class PositionAdjustSingleDefinition(Statement):
def __init__(self, adjust_single, location=None):
Statement.__init__(self, location)
self.adjust_single = adjust_single
def __str__(self):
res = "AS_POSITION\nADJUST_SINGLE"
for coverage, pos in self.adjust_single:
coverage = "".join(str(c) for c in coverage)
res += f"{coverage} BY{pos}"
res += "\nEND_ADJUST\nEND_POSITION"
return res
class ContextDefinition(Statement):
def __init__(self, ex_or_in, left=None, right=None, location=None):
Statement.__init__(self, location)
self.ex_or_in = ex_or_in
self.left = left if left is not None else []
self.right = right if right is not None else []
def __str__(self):
res = self.ex_or_in + "\n"
for coverage in self.left:
coverage = "".join(str(c) for c in coverage)
res += f" LEFT{coverage}\n"
for coverage in self.right:
coverage = "".join(str(c) for c in coverage)
res += f" RIGHT{coverage}\n"
res += "END_CONTEXT"
return res
class AnchorDefinition(Statement):
def __init__(self, name, gid, glyph_name, component, locked, pos, location=None):
Statement.__init__(self, location)
self.name = name
self.gid = gid
self.glyph_name = glyph_name
self.component = component
self.locked = locked
self.pos = pos
def __str__(self):
locked = self.locked and " LOCKED" or ""
return (
f'DEF_ANCHOR "{self.name}"'
f" ON {self.gid}"
f" GLYPH {self.glyph_name}"
f" COMPONENT {self.component}"
f"{locked}"
f" AT {self.pos} END_ANCHOR"
)
class SettingDefinition(Statement):
def __init__(self, name, value, location=None):
Statement.__init__(self, location)
self.name = name
self.value = value
def __str__(self):
if self.value is True:
return f"{self.name}"
if isinstance(self.value, (tuple, list)):
value = " ".join(str(v) for v in self.value)
return f"{self.name} {value}"
return f"{self.name} {self.value}"

View File

@ -0,0 +1,12 @@
class VoltLibError(Exception):
def __init__(self, message, location):
Exception.__init__(self, message)
self.location = location
def __str__(self):
message = Exception.__str__(self)
if self.location:
path, line, column = self.location
return "%s:%d:%d: %s" % (path, line, column, message)
else:
return message

View File

@ -0,0 +1,99 @@
from fontTools.voltLib.error import VoltLibError
class Lexer(object):
NUMBER = "NUMBER"
STRING = "STRING"
NAME = "NAME"
NEWLINE = "NEWLINE"
CHAR_WHITESPACE_ = " \t"
CHAR_NEWLINE_ = "\r\n"
CHAR_DIGIT_ = "0123456789"
CHAR_UC_LETTER_ = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
CHAR_LC_LETTER_ = "abcdefghijklmnopqrstuvwxyz"
CHAR_UNDERSCORE_ = "_"
CHAR_PERIOD_ = "."
CHAR_NAME_START_ = (
CHAR_UC_LETTER_ + CHAR_LC_LETTER_ + CHAR_PERIOD_ + CHAR_UNDERSCORE_
)
CHAR_NAME_CONTINUATION_ = CHAR_NAME_START_ + CHAR_DIGIT_
def __init__(self, text, filename):
self.filename_ = filename
self.line_ = 1
self.pos_ = 0
self.line_start_ = 0
self.text_ = text
self.text_length_ = len(text)
def __iter__(self):
return self
def next(self): # Python 2
return self.__next__()
def __next__(self): # Python 3
while True:
token_type, token, location = self.next_()
if token_type not in {Lexer.NEWLINE}:
return (token_type, token, location)
def location_(self):
column = self.pos_ - self.line_start_ + 1
return (self.filename_ or "<volt>", self.line_, column)
def next_(self):
self.scan_over_(Lexer.CHAR_WHITESPACE_)
location = self.location_()
start = self.pos_
text = self.text_
limit = len(text)
if start >= limit:
raise StopIteration()
cur_char = text[start]
next_char = text[start + 1] if start + 1 < limit else None
if cur_char == "\n":
self.pos_ += 1
self.line_ += 1
self.line_start_ = self.pos_
return (Lexer.NEWLINE, None, location)
if cur_char == "\r":
self.pos_ += 2 if next_char == "\n" else 1
self.line_ += 1
self.line_start_ = self.pos_
return (Lexer.NEWLINE, None, location)
if cur_char == '"':
self.pos_ += 1
self.scan_until_('"\r\n')
if self.pos_ < self.text_length_ and self.text_[self.pos_] == '"':
self.pos_ += 1
return (Lexer.STRING, text[start + 1 : self.pos_ - 1], location)
else:
raise VoltLibError("Expected '\"' to terminate string", location)
if cur_char in Lexer.CHAR_NAME_START_:
self.pos_ += 1
self.scan_over_(Lexer.CHAR_NAME_CONTINUATION_)
token = text[start : self.pos_]
return (Lexer.NAME, token, location)
if cur_char in Lexer.CHAR_DIGIT_:
self.scan_over_(Lexer.CHAR_DIGIT_)
return (Lexer.NUMBER, int(text[start : self.pos_], 10), location)
if cur_char == "-" and next_char in Lexer.CHAR_DIGIT_:
self.pos_ += 1
self.scan_over_(Lexer.CHAR_DIGIT_)
return (Lexer.NUMBER, int(text[start : self.pos_], 10), location)
raise VoltLibError("Unexpected character: '%s'" % cur_char, location)
def scan_over_(self, valid):
p = self.pos_
while p < self.text_length_ and self.text_[p] in valid:
p += 1
self.pos_ = p
def scan_until_(self, stop_at):
p = self.pos_
while p < self.text_length_ and self.text_[p] not in stop_at:
p += 1
self.pos_ = p

View File

@ -0,0 +1,656 @@
import fontTools.voltLib.ast as ast
from fontTools.voltLib.lexer import Lexer
from fontTools.voltLib.error import VoltLibError
from io import open
PARSE_FUNCS = {
"DEF_GLYPH": "parse_def_glyph_",
"DEF_GROUP": "parse_def_group_",
"DEF_SCRIPT": "parse_def_script_",
"DEF_LOOKUP": "parse_def_lookup_",
"DEF_ANCHOR": "parse_def_anchor_",
"GRID_PPEM": "parse_ppem_",
"PRESENTATION_PPEM": "parse_ppem_",
"PPOSITIONING_PPEM": "parse_ppem_",
"COMPILER_USEEXTENSIONLOOKUPS": "parse_noarg_option_",
"COMPILER_USEPAIRPOSFORMAT2": "parse_noarg_option_",
"CMAP_FORMAT": "parse_cmap_format",
"DO_NOT_TOUCH_CMAP": "parse_noarg_option_",
}
class Parser(object):
def __init__(self, path):
self.doc_ = ast.VoltFile()
self.glyphs_ = OrderedSymbolTable()
self.groups_ = SymbolTable()
self.anchors_ = {} # dictionary of SymbolTable() keyed by glyph
self.scripts_ = SymbolTable()
self.langs_ = SymbolTable()
self.lookups_ = SymbolTable()
self.next_token_type_, self.next_token_ = (None, None)
self.next_token_location_ = None
self.make_lexer_(path)
self.advance_lexer_()
def make_lexer_(self, file_or_path):
if hasattr(file_or_path, "read"):
filename = getattr(file_or_path, "name", None)
data = file_or_path.read()
else:
filename = file_or_path
with open(file_or_path, "r") as f:
data = f.read()
self.lexer_ = Lexer(data, filename)
def parse(self):
statements = self.doc_.statements
while self.next_token_type_ is not None:
self.advance_lexer_()
if self.cur_token_ in PARSE_FUNCS.keys():
func = getattr(self, PARSE_FUNCS[self.cur_token_])
statements.append(func())
elif self.is_cur_keyword_("END"):
break
else:
raise VoltLibError(
"Expected " + ", ".join(sorted(PARSE_FUNCS.keys())),
self.cur_token_location_,
)
return self.doc_
def parse_def_glyph_(self):
assert self.is_cur_keyword_("DEF_GLYPH")
location = self.cur_token_location_
name = self.expect_string_()
self.expect_keyword_("ID")
gid = self.expect_number_()
if gid < 0:
raise VoltLibError("Invalid glyph ID", self.cur_token_location_)
gunicode = None
if self.next_token_ == "UNICODE":
self.expect_keyword_("UNICODE")
gunicode = [self.expect_number_()]
if gunicode[0] < 0:
raise VoltLibError("Invalid glyph UNICODE", self.cur_token_location_)
elif self.next_token_ == "UNICODEVALUES":
self.expect_keyword_("UNICODEVALUES")
gunicode = self.parse_unicode_values_()
gtype = None
if self.next_token_ == "TYPE":
self.expect_keyword_("TYPE")
gtype = self.expect_name_()
assert gtype in ("BASE", "LIGATURE", "MARK", "COMPONENT")
components = None
if self.next_token_ == "COMPONENTS":
self.expect_keyword_("COMPONENTS")
components = self.expect_number_()
self.expect_keyword_("END_GLYPH")
if self.glyphs_.resolve(name) is not None:
raise VoltLibError(
'Glyph "%s" (gid %i) already defined' % (name, gid), location
)
def_glyph = ast.GlyphDefinition(
name, gid, gunicode, gtype, components, location=location
)
self.glyphs_.define(name, def_glyph)
return def_glyph
def parse_def_group_(self):
assert self.is_cur_keyword_("DEF_GROUP")
location = self.cur_token_location_
name = self.expect_string_()
enum = None
if self.next_token_ == "ENUM":
enum = self.parse_enum_()
self.expect_keyword_("END_GROUP")
if self.groups_.resolve(name) is not None:
raise VoltLibError(
'Glyph group "%s" already defined, '
"group names are case insensitive" % name,
location,
)
def_group = ast.GroupDefinition(name, enum, location=location)
self.groups_.define(name, def_group)
return def_group
def parse_def_script_(self):
assert self.is_cur_keyword_("DEF_SCRIPT")
location = self.cur_token_location_
name = None
if self.next_token_ == "NAME":
self.expect_keyword_("NAME")
name = self.expect_string_()
self.expect_keyword_("TAG")
tag = self.expect_string_()
if self.scripts_.resolve(tag) is not None:
raise VoltLibError(
'Script "%s" already defined, '
"script tags are case insensitive" % tag,
location,
)
self.langs_.enter_scope()
langs = []
while self.next_token_ != "END_SCRIPT":
self.advance_lexer_()
lang = self.parse_langsys_()
self.expect_keyword_("END_LANGSYS")
if self.langs_.resolve(lang.tag) is not None:
raise VoltLibError(
'Language "%s" already defined in script "%s", '
"language tags are case insensitive" % (lang.tag, tag),
location,
)
self.langs_.define(lang.tag, lang)
langs.append(lang)
self.expect_keyword_("END_SCRIPT")
self.langs_.exit_scope()
def_script = ast.ScriptDefinition(name, tag, langs, location=location)
self.scripts_.define(tag, def_script)
return def_script
def parse_langsys_(self):
assert self.is_cur_keyword_("DEF_LANGSYS")
location = self.cur_token_location_
name = None
if self.next_token_ == "NAME":
self.expect_keyword_("NAME")
name = self.expect_string_()
self.expect_keyword_("TAG")
tag = self.expect_string_()
features = []
while self.next_token_ != "END_LANGSYS":
self.advance_lexer_()
feature = self.parse_feature_()
self.expect_keyword_("END_FEATURE")
features.append(feature)
def_langsys = ast.LangSysDefinition(name, tag, features, location=location)
return def_langsys
def parse_feature_(self):
assert self.is_cur_keyword_("DEF_FEATURE")
location = self.cur_token_location_
self.expect_keyword_("NAME")
name = self.expect_string_()
self.expect_keyword_("TAG")
tag = self.expect_string_()
lookups = []
while self.next_token_ != "END_FEATURE":
# self.advance_lexer_()
self.expect_keyword_("LOOKUP")
lookup = self.expect_string_()
lookups.append(lookup)
feature = ast.FeatureDefinition(name, tag, lookups, location=location)
return feature
def parse_def_lookup_(self):
assert self.is_cur_keyword_("DEF_LOOKUP")
location = self.cur_token_location_
name = self.expect_string_()
if not name[0].isalpha():
raise VoltLibError(
'Lookup name "%s" must start with a letter' % name, location
)
if self.lookups_.resolve(name) is not None:
raise VoltLibError(
'Lookup "%s" already defined, '
"lookup names are case insensitive" % name,
location,
)
process_base = True
if self.next_token_ == "PROCESS_BASE":
self.advance_lexer_()
elif self.next_token_ == "SKIP_BASE":
self.advance_lexer_()
process_base = False
process_marks = True
mark_glyph_set = None
if self.next_token_ == "PROCESS_MARKS":
self.advance_lexer_()
if self.next_token_ == "MARK_GLYPH_SET":
self.advance_lexer_()
mark_glyph_set = self.expect_string_()
elif self.next_token_ == "ALL":
self.advance_lexer_()
elif self.next_token_ == "NONE":
self.advance_lexer_()
process_marks = False
elif self.next_token_type_ == Lexer.STRING:
process_marks = self.expect_string_()
else:
raise VoltLibError(
"Expected ALL, NONE, MARK_GLYPH_SET or an ID. "
"Got %s" % (self.next_token_type_),
location,
)
elif self.next_token_ == "SKIP_MARKS":
self.advance_lexer_()
process_marks = False
direction = None
if self.next_token_ == "DIRECTION":
self.expect_keyword_("DIRECTION")
direction = self.expect_name_()
assert direction in ("LTR", "RTL")
reversal = None
if self.next_token_ == "REVERSAL":
self.expect_keyword_("REVERSAL")
reversal = True
comments = None
if self.next_token_ == "COMMENTS":
self.expect_keyword_("COMMENTS")
comments = self.expect_string_().replace(r"\n", "\n")
context = []
while self.next_token_ in ("EXCEPT_CONTEXT", "IN_CONTEXT"):
context = self.parse_context_()
as_pos_or_sub = self.expect_name_()
sub = None
pos = None
if as_pos_or_sub == "AS_SUBSTITUTION":
sub = self.parse_substitution_(reversal)
elif as_pos_or_sub == "AS_POSITION":
pos = self.parse_position_()
else:
raise VoltLibError(
"Expected AS_SUBSTITUTION or AS_POSITION. " "Got %s" % (as_pos_or_sub),
location,
)
def_lookup = ast.LookupDefinition(
name,
process_base,
process_marks,
mark_glyph_set,
direction,
reversal,
comments,
context,
sub,
pos,
location=location,
)
self.lookups_.define(name, def_lookup)
return def_lookup
def parse_context_(self):
location = self.cur_token_location_
contexts = []
while self.next_token_ in ("EXCEPT_CONTEXT", "IN_CONTEXT"):
side = None
coverage = None
ex_or_in = self.expect_name_()
# side_contexts = [] # XXX
if self.next_token_ != "END_CONTEXT":
left = []
right = []
while self.next_token_ in ("LEFT", "RIGHT"):
side = self.expect_name_()
coverage = self.parse_coverage_()
if side == "LEFT":
left.append(coverage)
else:
right.append(coverage)
self.expect_keyword_("END_CONTEXT")
context = ast.ContextDefinition(
ex_or_in, left, right, location=location
)
contexts.append(context)
else:
self.expect_keyword_("END_CONTEXT")
return contexts
def parse_substitution_(self, reversal):
assert self.is_cur_keyword_("AS_SUBSTITUTION")
location = self.cur_token_location_
src = []
dest = []
if self.next_token_ != "SUB":
raise VoltLibError("Expected SUB", location)
while self.next_token_ == "SUB":
self.expect_keyword_("SUB")
src.append(self.parse_coverage_())
self.expect_keyword_("WITH")
dest.append(self.parse_coverage_())
self.expect_keyword_("END_SUB")
self.expect_keyword_("END_SUBSTITUTION")
max_src = max([len(cov) for cov in src])
max_dest = max([len(cov) for cov in dest])
# many to many or mixed is invalid
if (max_src > 1 and max_dest > 1) or (
reversal and (max_src > 1 or max_dest > 1)
):
raise VoltLibError("Invalid substitution type", location)
mapping = dict(zip(tuple(src), tuple(dest)))
if max_src == 1 and max_dest == 1:
if reversal:
sub = ast.SubstitutionReverseChainingSingleDefinition(
mapping, location=location
)
else:
sub = ast.SubstitutionSingleDefinition(mapping, location=location)
elif max_src == 1 and max_dest > 1:
sub = ast.SubstitutionMultipleDefinition(mapping, location=location)
elif max_src > 1 and max_dest == 1:
sub = ast.SubstitutionLigatureDefinition(mapping, location=location)
return sub
def parse_position_(self):
assert self.is_cur_keyword_("AS_POSITION")
location = self.cur_token_location_
pos_type = self.expect_name_()
if pos_type not in ("ATTACH", "ATTACH_CURSIVE", "ADJUST_PAIR", "ADJUST_SINGLE"):
raise VoltLibError(
"Expected ATTACH, ATTACH_CURSIVE, ADJUST_PAIR, ADJUST_SINGLE", location
)
if pos_type == "ATTACH":
position = self.parse_attach_()
elif pos_type == "ATTACH_CURSIVE":
position = self.parse_attach_cursive_()
elif pos_type == "ADJUST_PAIR":
position = self.parse_adjust_pair_()
elif pos_type == "ADJUST_SINGLE":
position = self.parse_adjust_single_()
self.expect_keyword_("END_POSITION")
return position
def parse_attach_(self):
assert self.is_cur_keyword_("ATTACH")
location = self.cur_token_location_
coverage = self.parse_coverage_()
coverage_to = []
self.expect_keyword_("TO")
while self.next_token_ != "END_ATTACH":
cov = self.parse_coverage_()
self.expect_keyword_("AT")
self.expect_keyword_("ANCHOR")
anchor_name = self.expect_string_()
coverage_to.append((cov, anchor_name))
self.expect_keyword_("END_ATTACH")
position = ast.PositionAttachDefinition(
coverage, coverage_to, location=location
)
return position
def parse_attach_cursive_(self):
assert self.is_cur_keyword_("ATTACH_CURSIVE")
location = self.cur_token_location_
coverages_exit = []
coverages_enter = []
while self.next_token_ != "ENTER":
self.expect_keyword_("EXIT")
coverages_exit.append(self.parse_coverage_())
while self.next_token_ != "END_ATTACH":
self.expect_keyword_("ENTER")
coverages_enter.append(self.parse_coverage_())
self.expect_keyword_("END_ATTACH")
position = ast.PositionAttachCursiveDefinition(
coverages_exit, coverages_enter, location=location
)
return position
def parse_adjust_pair_(self):
assert self.is_cur_keyword_("ADJUST_PAIR")
location = self.cur_token_location_
coverages_1 = []
coverages_2 = []
adjust_pair = {}
while self.next_token_ == "FIRST":
self.advance_lexer_()
coverage_1 = self.parse_coverage_()
coverages_1.append(coverage_1)
while self.next_token_ == "SECOND":
self.advance_lexer_()
coverage_2 = self.parse_coverage_()
coverages_2.append(coverage_2)
while self.next_token_ != "END_ADJUST":
id_1 = self.expect_number_()
id_2 = self.expect_number_()
self.expect_keyword_("BY")
pos_1 = self.parse_pos_()
pos_2 = self.parse_pos_()
adjust_pair[(id_1, id_2)] = (pos_1, pos_2)
self.expect_keyword_("END_ADJUST")
position = ast.PositionAdjustPairDefinition(
coverages_1, coverages_2, adjust_pair, location=location
)
return position
def parse_adjust_single_(self):
assert self.is_cur_keyword_("ADJUST_SINGLE")
location = self.cur_token_location_
adjust_single = []
while self.next_token_ != "END_ADJUST":
coverages = self.parse_coverage_()
self.expect_keyword_("BY")
pos = self.parse_pos_()
adjust_single.append((coverages, pos))
self.expect_keyword_("END_ADJUST")
position = ast.PositionAdjustSingleDefinition(adjust_single, location=location)
return position
def parse_def_anchor_(self):
assert self.is_cur_keyword_("DEF_ANCHOR")
location = self.cur_token_location_
name = self.expect_string_()
self.expect_keyword_("ON")
gid = self.expect_number_()
self.expect_keyword_("GLYPH")
glyph_name = self.expect_name_()
self.expect_keyword_("COMPONENT")
component = self.expect_number_()
# check for duplicate anchor names on this glyph
if glyph_name in self.anchors_:
anchor = self.anchors_[glyph_name].resolve(name)
if anchor is not None and anchor.component == component:
raise VoltLibError(
'Anchor "%s" already defined, '
"anchor names are case insensitive" % name,
location,
)
if self.next_token_ == "LOCKED":
locked = True
self.advance_lexer_()
else:
locked = False
self.expect_keyword_("AT")
pos = self.parse_pos_()
self.expect_keyword_("END_ANCHOR")
anchor = ast.AnchorDefinition(
name, gid, glyph_name, component, locked, pos, location=location
)
if glyph_name not in self.anchors_:
self.anchors_[glyph_name] = SymbolTable()
self.anchors_[glyph_name].define(name, anchor)
return anchor
def parse_adjust_by_(self):
self.advance_lexer_()
assert self.is_cur_keyword_("ADJUST_BY")
adjustment = self.expect_number_()
self.expect_keyword_("AT")
size = self.expect_number_()
return adjustment, size
def parse_pos_(self):
# VOLT syntax doesn't seem to take device Y advance
self.advance_lexer_()
location = self.cur_token_location_
assert self.is_cur_keyword_("POS"), location
adv = None
dx = None
dy = None
adv_adjust_by = {}
dx_adjust_by = {}
dy_adjust_by = {}
if self.next_token_ == "ADV":
self.advance_lexer_()
adv = self.expect_number_()
while self.next_token_ == "ADJUST_BY":
adjustment, size = self.parse_adjust_by_()
adv_adjust_by[size] = adjustment
if self.next_token_ == "DX":
self.advance_lexer_()
dx = self.expect_number_()
while self.next_token_ == "ADJUST_BY":
adjustment, size = self.parse_adjust_by_()
dx_adjust_by[size] = adjustment
if self.next_token_ == "DY":
self.advance_lexer_()
dy = self.expect_number_()
while self.next_token_ == "ADJUST_BY":
adjustment, size = self.parse_adjust_by_()
dy_adjust_by[size] = adjustment
self.expect_keyword_("END_POS")
return ast.Pos(adv, dx, dy, adv_adjust_by, dx_adjust_by, dy_adjust_by)
def parse_unicode_values_(self):
location = self.cur_token_location_
try:
unicode_values = self.expect_string_().split(",")
unicode_values = [int(uni[2:], 16) for uni in unicode_values if uni != ""]
except ValueError as err:
raise VoltLibError(str(err), location)
return unicode_values if unicode_values != [] else None
def parse_enum_(self):
self.expect_keyword_("ENUM")
location = self.cur_token_location_
enum = ast.Enum(self.parse_coverage_(), location=location)
self.expect_keyword_("END_ENUM")
return enum
def parse_coverage_(self):
coverage = []
location = self.cur_token_location_
while self.next_token_ in ("GLYPH", "GROUP", "RANGE", "ENUM"):
if self.next_token_ == "ENUM":
enum = self.parse_enum_()
coverage.append(enum)
elif self.next_token_ == "GLYPH":
self.expect_keyword_("GLYPH")
name = self.expect_string_()
coverage.append(ast.GlyphName(name, location=location))
elif self.next_token_ == "GROUP":
self.expect_keyword_("GROUP")
name = self.expect_string_()
coverage.append(ast.GroupName(name, self, location=location))
elif self.next_token_ == "RANGE":
self.expect_keyword_("RANGE")
start = self.expect_string_()
self.expect_keyword_("TO")
end = self.expect_string_()
coverage.append(ast.Range(start, end, self, location=location))
return tuple(coverage)
def resolve_group(self, group_name):
return self.groups_.resolve(group_name)
def glyph_range(self, start, end):
return self.glyphs_.range(start, end)
def parse_ppem_(self):
location = self.cur_token_location_
ppem_name = self.cur_token_
value = self.expect_number_()
setting = ast.SettingDefinition(ppem_name, value, location=location)
return setting
def parse_noarg_option_(self):
location = self.cur_token_location_
name = self.cur_token_
value = True
setting = ast.SettingDefinition(name, value, location=location)
return setting
def parse_cmap_format(self):
location = self.cur_token_location_
name = self.cur_token_
value = (self.expect_number_(), self.expect_number_(), self.expect_number_())
setting = ast.SettingDefinition(name, value, location=location)
return setting
def is_cur_keyword_(self, k):
return (self.cur_token_type_ is Lexer.NAME) and (self.cur_token_ == k)
def expect_string_(self):
self.advance_lexer_()
if self.cur_token_type_ is not Lexer.STRING:
raise VoltLibError("Expected a string", self.cur_token_location_)
return self.cur_token_
def expect_keyword_(self, keyword):
self.advance_lexer_()
if self.cur_token_type_ is Lexer.NAME and self.cur_token_ == keyword:
return self.cur_token_
raise VoltLibError('Expected "%s"' % keyword, self.cur_token_location_)
def expect_name_(self):
self.advance_lexer_()
if self.cur_token_type_ is Lexer.NAME:
return self.cur_token_
raise VoltLibError("Expected a name", self.cur_token_location_)
def expect_number_(self):
self.advance_lexer_()
if self.cur_token_type_ is not Lexer.NUMBER:
raise VoltLibError("Expected a number", self.cur_token_location_)
return self.cur_token_
def advance_lexer_(self):
self.cur_token_type_, self.cur_token_, self.cur_token_location_ = (
self.next_token_type_,
self.next_token_,
self.next_token_location_,
)
try:
if self.is_cur_keyword_("END"):
raise StopIteration
(
self.next_token_type_,
self.next_token_,
self.next_token_location_,
) = self.lexer_.next()
except StopIteration:
self.next_token_type_, self.next_token_ = (None, None)
class SymbolTable(object):
def __init__(self):
self.scopes_ = [{}]
def enter_scope(self):
self.scopes_.append({})
def exit_scope(self):
self.scopes_.pop()
def define(self, name, item):
self.scopes_[-1][name] = item
def resolve(self, name, case_insensitive=True):
for scope in reversed(self.scopes_):
item = scope.get(name)
if item:
return item
if case_insensitive:
for key in scope:
if key.lower() == name.lower():
return scope[key]
return None
class OrderedSymbolTable(SymbolTable):
def __init__(self):
self.scopes_ = [{}]
def enter_scope(self):
self.scopes_.append({})
def resolve(self, name, case_insensitive=False):
SymbolTable.resolve(self, name, case_insensitive=case_insensitive)
def range(self, start, end):
for scope in reversed(self.scopes_):
if start in scope and end in scope:
start_idx = list(scope.keys()).index(start)
end_idx = list(scope.keys()).index(end)
return list(scope.keys())[start_idx : end_idx + 1]
return None

View File

@ -0,0 +1,730 @@
"""\
MS VOLT ``.vtp`` to AFDKO ``.fea`` OpenType Layout converter.
Usage
-----
To convert a VTP project file:
.. code-block:: sh
$ fonttools voltLib.voltToFea input.vtp output.fea
It is also possible convert font files with `TSIV` table (as saved from Volt),
in this case the glyph names used in the Volt project will be mapped to the
actual glyph names in the font files when written to the feature file:
.. code-block:: sh
$ fonttools voltLib.voltToFea input.ttf output.fea
The ``--quiet`` option can be used to suppress warnings.
The ``--traceback`` can be used to get Python traceback in case of exceptions,
instead of suppressing the traceback.
Limitations
-----------
* Not all VOLT features are supported, the script will error if it it
encounters something it does not understand. Please report an issue if this
happens.
* AFDKO feature file syntax for mark positioning is awkward and does not allow
setting the mark coverage. It also defines mark anchors globally, as a result
some mark positioning lookups might cover many marks than what was in the VOLT
file. This should not be an issue in practice, but if it is then the only way
is to modify the VOLT file or the generated feature file manually to use unique
mark anchors for each lookup.
* VOLT allows subtable breaks in any lookup type, but AFDKO feature file
implementations vary in their support; currently AFDKOs makeOTF supports
subtable breaks in pair positioning lookups only, while FontTools feaLib
support it for most substitution lookups and only some positioning lookups.
"""
import logging
import re
from io import StringIO
from fontTools.feaLib import ast
from fontTools.ttLib import TTFont, TTLibError
from fontTools.voltLib import ast as VAst
from fontTools.voltLib.parser import Parser as VoltParser
log = logging.getLogger("fontTools.voltLib.voltToFea")
TABLES = ["GDEF", "GSUB", "GPOS"]
class MarkClassDefinition(ast.MarkClassDefinition):
def asFea(self, indent=""):
res = ""
if not getattr(self, "used", False):
res += "#"
res += ast.MarkClassDefinition.asFea(self, indent)
return res
# For sorting voltLib.ast.GlyphDefinition, see its use below.
class Group:
def __init__(self, group):
self.name = group.name.lower()
self.groups = [
x.group.lower() for x in group.enum.enum if isinstance(x, VAst.GroupName)
]
def __lt__(self, other):
if self.name in other.groups:
return True
if other.name in self.groups:
return False
if self.groups and not other.groups:
return False
if not self.groups and other.groups:
return True
class VoltToFea:
_NOT_LOOKUP_NAME_RE = re.compile(r"[^A-Za-z_0-9.]")
_NOT_CLASS_NAME_RE = re.compile(r"[^A-Za-z_0-9.\-]")
def __init__(self, file_or_path, font=None):
self._file_or_path = file_or_path
self._font = font
self._glyph_map = {}
self._glyph_order = None
self._gdef = {}
self._glyphclasses = {}
self._features = {}
self._lookups = {}
self._marks = set()
self._ligatures = {}
self._markclasses = {}
self._anchors = {}
self._settings = {}
self._lookup_names = {}
self._class_names = {}
def _lookupName(self, name):
if name not in self._lookup_names:
res = self._NOT_LOOKUP_NAME_RE.sub("_", name)
while res in self._lookup_names.values():
res += "_"
self._lookup_names[name] = res
return self._lookup_names[name]
def _className(self, name):
if name not in self._class_names:
res = self._NOT_CLASS_NAME_RE.sub("_", name)
while res in self._class_names.values():
res += "_"
self._class_names[name] = res
return self._class_names[name]
def _collectStatements(self, doc, tables):
# Collect and sort group definitions first, to make sure a group
# definition that references other groups comes after them since VOLT
# does not enforce such ordering, and feature file require it.
groups = [s for s in doc.statements if isinstance(s, VAst.GroupDefinition)]
for statement in sorted(groups, key=lambda x: Group(x)):
self._groupDefinition(statement)
for statement in doc.statements:
if isinstance(statement, VAst.GlyphDefinition):
self._glyphDefinition(statement)
elif isinstance(statement, VAst.AnchorDefinition):
if "GPOS" in tables:
self._anchorDefinition(statement)
elif isinstance(statement, VAst.SettingDefinition):
self._settingDefinition(statement)
elif isinstance(statement, VAst.GroupDefinition):
pass # Handled above
elif isinstance(statement, VAst.ScriptDefinition):
self._scriptDefinition(statement)
elif not isinstance(statement, VAst.LookupDefinition):
raise NotImplementedError(statement)
# Lookup definitions need to be handled last as they reference glyph
# and mark classes that might be defined after them.
for statement in doc.statements:
if isinstance(statement, VAst.LookupDefinition):
if statement.pos and "GPOS" not in tables:
continue
if statement.sub and "GSUB" not in tables:
continue
self._lookupDefinition(statement)
def _buildFeatureFile(self, tables):
doc = ast.FeatureFile()
statements = doc.statements
if self._glyphclasses:
statements.append(ast.Comment("# Glyph classes"))
statements.extend(self._glyphclasses.values())
if self._markclasses:
statements.append(ast.Comment("\n# Mark classes"))
statements.extend(c[1] for c in sorted(self._markclasses.items()))
if self._lookups:
statements.append(ast.Comment("\n# Lookups"))
for lookup in self._lookups.values():
statements.extend(getattr(lookup, "targets", []))
statements.append(lookup)
# Prune features
features = self._features.copy()
for ftag in features:
scripts = features[ftag]
for stag in scripts:
langs = scripts[stag]
for ltag in langs:
langs[ltag] = [l for l in langs[ltag] if l.lower() in self._lookups]
scripts[stag] = {t: l for t, l in langs.items() if l}
features[ftag] = {t: s for t, s in scripts.items() if s}
features = {t: f for t, f in features.items() if f}
if features:
statements.append(ast.Comment("# Features"))
for ftag, scripts in features.items():
feature = ast.FeatureBlock(ftag)
stags = sorted(scripts, key=lambda k: 0 if k == "DFLT" else 1)
for stag in stags:
feature.statements.append(ast.ScriptStatement(stag))
ltags = sorted(scripts[stag], key=lambda k: 0 if k == "dflt" else 1)
for ltag in ltags:
include_default = True if ltag == "dflt" else False
feature.statements.append(
ast.LanguageStatement(ltag, include_default=include_default)
)
for name in scripts[stag][ltag]:
lookup = self._lookups[name.lower()]
lookupref = ast.LookupReferenceStatement(lookup)
feature.statements.append(lookupref)
statements.append(feature)
if self._gdef and "GDEF" in tables:
classes = []
for name in ("BASE", "MARK", "LIGATURE", "COMPONENT"):
if name in self._gdef:
classname = "GDEF_" + name.lower()
glyphclass = ast.GlyphClassDefinition(classname, self._gdef[name])
statements.append(glyphclass)
classes.append(ast.GlyphClassName(glyphclass))
else:
classes.append(None)
gdef = ast.TableBlock("GDEF")
gdef.statements.append(ast.GlyphClassDefStatement(*classes))
statements.append(gdef)
return doc
def convert(self, tables=None):
doc = VoltParser(self._file_or_path).parse()
if tables is None:
tables = TABLES
if self._font is not None:
self._glyph_order = self._font.getGlyphOrder()
self._collectStatements(doc, tables)
fea = self._buildFeatureFile(tables)
return fea.asFea()
def _glyphName(self, glyph):
try:
name = glyph.glyph
except AttributeError:
name = glyph
return ast.GlyphName(self._glyph_map.get(name, name))
def _groupName(self, group):
try:
name = group.group
except AttributeError:
name = group
return ast.GlyphClassName(self._glyphclasses[name.lower()])
def _coverage(self, coverage):
items = []
for item in coverage:
if isinstance(item, VAst.GlyphName):
items.append(self._glyphName(item))
elif isinstance(item, VAst.GroupName):
items.append(self._groupName(item))
elif isinstance(item, VAst.Enum):
items.append(self._enum(item))
elif isinstance(item, VAst.Range):
items.append((item.start, item.end))
else:
raise NotImplementedError(item)
return items
def _enum(self, enum):
return ast.GlyphClass(self._coverage(enum.enum))
def _context(self, context):
out = []
for item in context:
coverage = self._coverage(item)
if not isinstance(coverage, (tuple, list)):
coverage = [coverage]
out.extend(coverage)
return out
def _groupDefinition(self, group):
name = self._className(group.name)
glyphs = self._enum(group.enum)
glyphclass = ast.GlyphClassDefinition(name, glyphs)
self._glyphclasses[group.name.lower()] = glyphclass
def _glyphDefinition(self, glyph):
try:
self._glyph_map[glyph.name] = self._glyph_order[glyph.id]
except TypeError:
pass
if glyph.type in ("BASE", "MARK", "LIGATURE", "COMPONENT"):
if glyph.type not in self._gdef:
self._gdef[glyph.type] = ast.GlyphClass()
self._gdef[glyph.type].glyphs.append(self._glyphName(glyph.name))
if glyph.type == "MARK":
self._marks.add(glyph.name)
elif glyph.type == "LIGATURE":
self._ligatures[glyph.name] = glyph.components
def _scriptDefinition(self, script):
stag = script.tag
for lang in script.langs:
ltag = lang.tag
for feature in lang.features:
lookups = {l.split("\\")[0]: True for l in feature.lookups}
ftag = feature.tag
if ftag not in self._features:
self._features[ftag] = {}
if stag not in self._features[ftag]:
self._features[ftag][stag] = {}
assert ltag not in self._features[ftag][stag]
self._features[ftag][stag][ltag] = lookups.keys()
def _settingDefinition(self, setting):
if setting.name.startswith("COMPILER_"):
self._settings[setting.name] = setting.value
else:
log.warning(f"Unsupported setting ignored: {setting.name}")
def _adjustment(self, adjustment):
adv, dx, dy, adv_adjust_by, dx_adjust_by, dy_adjust_by = adjustment
adv_device = adv_adjust_by and adv_adjust_by.items() or None
dx_device = dx_adjust_by and dx_adjust_by.items() or None
dy_device = dy_adjust_by and dy_adjust_by.items() or None
return ast.ValueRecord(
xPlacement=dx,
yPlacement=dy,
xAdvance=adv,
xPlaDevice=dx_device,
yPlaDevice=dy_device,
xAdvDevice=adv_device,
)
def _anchor(self, adjustment):
adv, dx, dy, adv_adjust_by, dx_adjust_by, dy_adjust_by = adjustment
assert not adv_adjust_by
dx_device = dx_adjust_by and dx_adjust_by.items() or None
dy_device = dy_adjust_by and dy_adjust_by.items() or None
return ast.Anchor(
dx or 0,
dy or 0,
xDeviceTable=dx_device or None,
yDeviceTable=dy_device or None,
)
def _anchorDefinition(self, anchordef):
anchorname = anchordef.name
glyphname = anchordef.glyph_name
anchor = self._anchor(anchordef.pos)
if anchorname.startswith("MARK_"):
name = "_".join(anchorname.split("_")[1:])
markclass = ast.MarkClass(self._className(name))
glyph = self._glyphName(glyphname)
markdef = MarkClassDefinition(markclass, anchor, glyph)
self._markclasses[(glyphname, anchorname)] = markdef
else:
if glyphname not in self._anchors:
self._anchors[glyphname] = {}
if anchorname not in self._anchors[glyphname]:
self._anchors[glyphname][anchorname] = {}
self._anchors[glyphname][anchorname][anchordef.component] = anchor
def _gposLookup(self, lookup, fealookup):
statements = fealookup.statements
pos = lookup.pos
if isinstance(pos, VAst.PositionAdjustPairDefinition):
for (idx1, idx2), (pos1, pos2) in pos.adjust_pair.items():
coverage_1 = pos.coverages_1[idx1 - 1]
coverage_2 = pos.coverages_2[idx2 - 1]
# If not both are groups, use “enum pos” otherwise makeotf will
# fail.
enumerated = False
for item in coverage_1 + coverage_2:
if not isinstance(item, VAst.GroupName):
enumerated = True
glyphs1 = self._coverage(coverage_1)
glyphs2 = self._coverage(coverage_2)
record1 = self._adjustment(pos1)
record2 = self._adjustment(pos2)
assert len(glyphs1) == 1
assert len(glyphs2) == 1
statements.append(
ast.PairPosStatement(
glyphs1[0], record1, glyphs2[0], record2, enumerated=enumerated
)
)
elif isinstance(pos, VAst.PositionAdjustSingleDefinition):
for a, b in pos.adjust_single:
glyphs = self._coverage(a)
record = self._adjustment(b)
assert len(glyphs) == 1
statements.append(
ast.SinglePosStatement([(glyphs[0], record)], [], [], False)
)
elif isinstance(pos, VAst.PositionAttachDefinition):
anchors = {}
for marks, classname in pos.coverage_to:
for mark in marks:
# Set actually used mark classes. Basically a hack to get
# around the feature file syntax limitation of making mark
# classes global and not allowing mark positioning to
# specify mark coverage.
for name in mark.glyphSet():
key = (name, "MARK_" + classname)
self._markclasses[key].used = True
markclass = ast.MarkClass(self._className(classname))
for base in pos.coverage:
for name in base.glyphSet():
if name not in anchors:
anchors[name] = []
if classname not in anchors[name]:
anchors[name].append(classname)
for name in anchors:
components = 1
if name in self._ligatures:
components = self._ligatures[name]
marks = []
for mark in anchors[name]:
markclass = ast.MarkClass(self._className(mark))
for component in range(1, components + 1):
if len(marks) < component:
marks.append([])
anchor = None
if component in self._anchors[name][mark]:
anchor = self._anchors[name][mark][component]
marks[component - 1].append((anchor, markclass))
base = self._glyphName(name)
if name in self._marks:
mark = ast.MarkMarkPosStatement(base, marks[0])
elif name in self._ligatures:
mark = ast.MarkLigPosStatement(base, marks)
else:
mark = ast.MarkBasePosStatement(base, marks[0])
statements.append(mark)
elif isinstance(pos, VAst.PositionAttachCursiveDefinition):
# Collect enter and exit glyphs
enter_coverage = []
for coverage in pos.coverages_enter:
for base in coverage:
for name in base.glyphSet():
enter_coverage.append(name)
exit_coverage = []
for coverage in pos.coverages_exit:
for base in coverage:
for name in base.glyphSet():
exit_coverage.append(name)
# Write enter anchors, also check if the glyph has exit anchor and
# write it, too.
for name in enter_coverage:
glyph = self._glyphName(name)
entry = self._anchors[name]["entry"][1]
exit = None
if name in exit_coverage:
exit = self._anchors[name]["exit"][1]
exit_coverage.pop(exit_coverage.index(name))
statements.append(ast.CursivePosStatement(glyph, entry, exit))
# Write any remaining exit anchors.
for name in exit_coverage:
glyph = self._glyphName(name)
exit = self._anchors[name]["exit"][1]
statements.append(ast.CursivePosStatement(glyph, None, exit))
else:
raise NotImplementedError(pos)
def _gposContextLookup(
self, lookup, prefix, suffix, ignore, fealookup, targetlookup
):
statements = fealookup.statements
assert not lookup.reversal
pos = lookup.pos
if isinstance(pos, VAst.PositionAdjustPairDefinition):
for (idx1, idx2), (pos1, pos2) in pos.adjust_pair.items():
glyphs1 = self._coverage(pos.coverages_1[idx1 - 1])
glyphs2 = self._coverage(pos.coverages_2[idx2 - 1])
assert len(glyphs1) == 1
assert len(glyphs2) == 1
glyphs = (glyphs1[0], glyphs2[0])
if ignore:
statement = ast.IgnorePosStatement([(prefix, glyphs, suffix)])
else:
lookups = (targetlookup, targetlookup)
statement = ast.ChainContextPosStatement(
prefix, glyphs, suffix, lookups
)
statements.append(statement)
elif isinstance(pos, VAst.PositionAdjustSingleDefinition):
glyphs = [ast.GlyphClass()]
for a, b in pos.adjust_single:
glyph = self._coverage(a)
glyphs[0].extend(glyph)
if ignore:
statement = ast.IgnorePosStatement([(prefix, glyphs, suffix)])
else:
statement = ast.ChainContextPosStatement(
prefix, glyphs, suffix, [targetlookup]
)
statements.append(statement)
elif isinstance(pos, VAst.PositionAttachDefinition):
glyphs = [ast.GlyphClass()]
for coverage, _ in pos.coverage_to:
glyphs[0].extend(self._coverage(coverage))
if ignore:
statement = ast.IgnorePosStatement([(prefix, glyphs, suffix)])
else:
statement = ast.ChainContextPosStatement(
prefix, glyphs, suffix, [targetlookup]
)
statements.append(statement)
else:
raise NotImplementedError(pos)
def _gsubLookup(self, lookup, prefix, suffix, ignore, chain, fealookup):
statements = fealookup.statements
sub = lookup.sub
for key, val in sub.mapping.items():
if not key or not val:
path, line, column = sub.location
log.warning(f"{path}:{line}:{column}: Ignoring empty substitution")
continue
statement = None
glyphs = self._coverage(key)
replacements = self._coverage(val)
if ignore:
chain_context = (prefix, glyphs, suffix)
statement = ast.IgnoreSubstStatement([chain_context])
elif isinstance(sub, VAst.SubstitutionSingleDefinition):
assert len(glyphs) == 1
assert len(replacements) == 1
statement = ast.SingleSubstStatement(
glyphs, replacements, prefix, suffix, chain
)
elif isinstance(sub, VAst.SubstitutionReverseChainingSingleDefinition):
assert len(glyphs) == 1
assert len(replacements) == 1
statement = ast.ReverseChainSingleSubstStatement(
prefix, suffix, glyphs, replacements
)
elif isinstance(sub, VAst.SubstitutionMultipleDefinition):
assert len(glyphs) == 1
statement = ast.MultipleSubstStatement(
prefix, glyphs[0], suffix, replacements, chain
)
elif isinstance(sub, VAst.SubstitutionLigatureDefinition):
assert len(replacements) == 1
statement = ast.LigatureSubstStatement(
prefix, glyphs, suffix, replacements[0], chain
)
else:
raise NotImplementedError(sub)
statements.append(statement)
def _lookupDefinition(self, lookup):
mark_attachement = None
mark_filtering = None
flags = 0
if lookup.direction == "RTL":
flags |= 1
if not lookup.process_base:
flags |= 2
# FIXME: Does VOLT support this?
# if not lookup.process_ligatures:
# flags |= 4
if not lookup.process_marks:
flags |= 8
elif isinstance(lookup.process_marks, str):
mark_attachement = self._groupName(lookup.process_marks)
elif lookup.mark_glyph_set is not None:
mark_filtering = self._groupName(lookup.mark_glyph_set)
lookupflags = None
if flags or mark_attachement is not None or mark_filtering is not None:
lookupflags = ast.LookupFlagStatement(
flags, mark_attachement, mark_filtering
)
if "\\" in lookup.name:
# Merge sub lookups as subtables (lookups named “base\sub”),
# makeotf/feaLib will issue a warning and ignore the subtable
# statement if it is not a pairpos lookup, though.
name = lookup.name.split("\\")[0]
if name.lower() not in self._lookups:
fealookup = ast.LookupBlock(self._lookupName(name))
if lookupflags is not None:
fealookup.statements.append(lookupflags)
fealookup.statements.append(ast.Comment("# " + lookup.name))
else:
fealookup = self._lookups[name.lower()]
fealookup.statements.append(ast.SubtableStatement())
fealookup.statements.append(ast.Comment("# " + lookup.name))
self._lookups[name.lower()] = fealookup
else:
fealookup = ast.LookupBlock(self._lookupName(lookup.name))
if lookupflags is not None:
fealookup.statements.append(lookupflags)
self._lookups[lookup.name.lower()] = fealookup
if lookup.comments is not None:
fealookup.statements.append(ast.Comment("# " + lookup.comments))
contexts = []
if lookup.context:
for context in lookup.context:
prefix = self._context(context.left)
suffix = self._context(context.right)
ignore = context.ex_or_in == "EXCEPT_CONTEXT"
contexts.append([prefix, suffix, ignore, False])
# It seems that VOLT will create contextual substitution using
# only the input if there is no other contexts in this lookup.
if ignore and len(lookup.context) == 1:
contexts.append([[], [], False, True])
else:
contexts.append([[], [], False, False])
targetlookup = None
for prefix, suffix, ignore, chain in contexts:
if lookup.sub is not None:
self._gsubLookup(lookup, prefix, suffix, ignore, chain, fealookup)
if lookup.pos is not None:
if self._settings.get("COMPILER_USEEXTENSIONLOOKUPS"):
fealookup.use_extension = True
if prefix or suffix or chain or ignore:
if not ignore and targetlookup is None:
targetname = self._lookupName(lookup.name + " target")
targetlookup = ast.LookupBlock(targetname)
fealookup.targets = getattr(fealookup, "targets", [])
fealookup.targets.append(targetlookup)
self._gposLookup(lookup, targetlookup)
self._gposContextLookup(
lookup, prefix, suffix, ignore, fealookup, targetlookup
)
else:
self._gposLookup(lookup, fealookup)
def main(args=None):
"""Convert MS VOLT to AFDKO feature files."""
import argparse
from pathlib import Path
from fontTools import configLogger
parser = argparse.ArgumentParser(
"fonttools voltLib.voltToFea", description=main.__doc__
)
parser.add_argument(
"input", metavar="INPUT", type=Path, help="input font/VTP file to process"
)
parser.add_argument(
"featurefile", metavar="OUTPUT", type=Path, help="output feature file"
)
parser.add_argument(
"-t",
"--table",
action="append",
choices=TABLES,
dest="tables",
help="List of tables to write, by default all tables are written",
)
parser.add_argument(
"-q", "--quiet", action="store_true", help="Suppress non-error messages"
)
parser.add_argument(
"--traceback", action="store_true", help="Dont catch exceptions"
)
options = parser.parse_args(args)
configLogger(level=("ERROR" if options.quiet else "INFO"))
file_or_path = options.input
font = None
try:
font = TTFont(file_or_path)
if "TSIV" in font:
file_or_path = StringIO(font["TSIV"].data.decode("utf-8"))
else:
log.error('"TSIV" table is missing, font was not saved from VOLT?')
return 1
except TTLibError:
pass
converter = VoltToFea(file_or_path, font)
try:
fea = converter.convert(options.tables)
except NotImplementedError as e:
if options.traceback:
raise
location = getattr(e.args[0], "location", None)
message = f'"{e}" is not supported'
if location:
path, line, column = location
log.error(f"{path}:{line}:{column}: {message}")
else:
log.error(message)
return 1
with open(options.featurefile, "w") as feafile:
feafile.write(fea)
if __name__ == "__main__":
import sys
sys.exit(main())