Clean up code

This commit is contained in:
germax26 2024-09-15 16:10:41 +10:00
parent adfd9b1ba4
commit 9ed1bfcadb
Signed by: germax26
SSH Key Fingerprint: SHA256:N3w+8798IMWBt7SYH8G1C0iJlIa2HIIcRCXwILT5FvM
5 changed files with 446 additions and 313 deletions

View File

@ -0,0 +1,109 @@
from datetime import datetime, timedelta, timezone
from types import TracebackType
from typing import BinaryIO, Callable, List, Optional, Tuple, Type, TypeVar
from io_utils import Parser, read_fixed_point, read_u16, read_u64
def read_fixed(f: BinaryIO) -> float: # 16.16
return read_fixed_point(f, 16, 16)
def read_fixed_version(f: BinaryIO) -> float: # Not the same as parse_fixed
majorVersion = read_u16(f)
minorVersion = read_u16(f)
assert minorVersion in [0x0000, 0x1000, 0x5000], f"Invalid fixed minorVersion: {hex(minorVersion)}"
return majorVersion + minorVersion/0xa000 # will need to change if there are ever any versions with 2 decimal digits
def read_F2DOT14(f: BinaryIO) -> float: # F2DOT14 (2.14)
return read_fixed_point(f, 2, 14)
EPOCH = datetime(1904, 1, 1, tzinfo=timezone.utc)
def read_long_datetime(f: BinaryIO) -> datetime:
return EPOCH+timedelta(seconds=read_u64(f))
T = TypeVar('T')
"""
The following `parse_at_...` functions all modify the tell of f, so it is recommended that you use `SaveTell` to wrap calling these.
"""
def parse_at_offset(f: BinaryIO, start_tell: int, offset: int, parser: Parser[T], *, zero_is_null:bool=True) -> T:
"""
Parses a `T` using `parser` at an offset of `offset` from `start_tell`.
If `zero_is_null` is True, then `offset` cannot be 0.
Only set `zero_is_null` to False when you are sure that 0 is a valid offset
"""
if zero_is_null: assert offset != 0, f"Offset was NULL"
f.seek(start_tell+offset)
return parser(f)
def parse_at_optional_offset(f: BinaryIO, start_tell: int, offset: int, parser: Parser[T]) -> Optional[T]:
"""
Same as `parse_at_offset`, however if the offset is NULL (0), then None is returned.
Should not be used when 0 is a valid offset to something.
"""
if offset == 0: return None
return parse_at_offset(f, start_tell, offset, parser, zero_is_null=True)
def parse_at_offsets(f: BinaryIO, start_tell: int, offsets: List[int], parser: Parser[T], *, zero_is_null:bool=True) -> List[T]:
return [parse_at_offset(f, start_tell, offset, parser, zero_is_null=zero_is_null) for offset in offsets]
def parse_at_optional_offsets(f: BinaryIO, start_tell: int, offsets: List[int], parser: Parser[T]) -> List[Optional[T]]:
return [parse_at_optional_offset(f, start_tell, offset, parser) for offset in offsets]
def parse_at_offsets_using_length(f: BinaryIO, start_tell: int, offsets: List[int], parser: Callable[[BinaryIO, int, int], T], *, zero_is_null:bool=True) -> List[Optional[T]]:
"""
The length of the returned list will be one less than that of `offsets`, as the last offset is used to calculate the length of the final element.
`parser` is of the form `(f: BinaryIO, index: int, length: int) -> T`
"""
elements: List[Optional[T]] = []
for i, offset in enumerate(offsets[:-1]):
length = offsets[i+1]-offset
if length == 0:
elements.append(None)
continue
elements.append(parse_at_offset(f, start_tell, offset, lambda f: parser(f, i, length), zero_is_null=zero_is_null))
return elements
def parse_list_at_offset(f: BinaryIO, start_tell: int, offset: int, count: int, parser: Parser[T], *, zero_is_null:bool=True) -> List[T]:
"""
Parses a continuous list of `T`s with `count` elements.
"""
return parse_at_offset(f, start_tell, offset, lambda f: [parser(f) for _ in range(count)], zero_is_null=zero_is_null)
def parse_list_and_use_offsets_into(f: BinaryIO, start_tell: int, offsets: list[int], count: int, parser: Parser[T]) -> Tuple[List[T], List[T]]:
elements = [(f.tell(), parser(f)) for _ in range(count)]
elements_by_offset_into: List[T] = []
for offset in offsets:
for (offset_into, element) in elements:
if offset + start_tell == offset_into:
elements_by_offset_into.append(element)
break
else:
assert False, (f"No element with offset {offset} into this list of elements", start_tell, offsets, elements)
return elements_by_offset_into, [element for (_, element) in elements]
class SaveTell:
"""
A context manager that allows operations to be done on a BinaryIO without affecting the tell.
"""
def __init__(self, f: BinaryIO):
self.f = f
def __enter__(self) -> int:
self.tell = self.f.tell()
return self.tell
def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], exc_tb: Optional[TracebackType]):
self.f.seek(self.tell)
def null_if_zero(n: int) -> Optional[int]: return n if n else None
def nulls_if_zero(ns: List[int]) -> List[Optional[int]]: return list(map(null_if_zero, ns))

View File

@ -1,29 +1,15 @@
from dataclasses import dataclass from dataclasses import dataclass
from datetime import datetime, timedelta, timezone from datetime import datetime
from enum import Enum, EnumMeta from enum import Enum, EnumMeta
from io import BytesIO from io import BytesIO
from math import floor, log2 from math import floor, log2
from types import TracebackType from typing import Callable, Generic, List, Optional, Tuple, TypeVar, BinaryIO
from typing import Callable, Generic, List, Optional, Tuple, Type, TypeVar, BinaryIO, Self
from OFF_io_utils import SaveTell, null_if_zero, parse_at_offset, parse_at_offsets, parse_at_offsets_using_length, parse_at_optional_offset, parse_at_optional_offsets, parse_list_and_use_offsets_into, parse_list_at_offset, read_F2DOT14, read_fixed, read_fixed_version, read_long_datetime
from abcde import ABD, ABE from abcde import ABD, ABE
from io_utils import read_ascii, read_fixed_point, read_i16, read_i32, read_i8, read_int, read_pascal_string, read_u16, read_u24, read_u32, read_u64, read_u8 from io_utils import Parser, is_at_end, len_to_end, read_ascii, read_i16, read_i32, read_i8, read_int, read_pascal_string, read_u16, read_u24, read_u32, read_u64, read_u8
def read_fixed(f: BinaryIO) -> float: # 16.16 T = TypeVar('T')
return read_fixed_point(f, 16, 16)
def read_fixed_version(f: BinaryIO) -> float: # Not the same as parse_fixed
majorVersion = read_u16(f)
minorVersion = read_u16(f)
assert minorVersion in [0x0000, 0x1000, 0x5000], f"Invalid fixed minorVersion: {hex(minorVersion)}"
return majorVersion + minorVersion/0x1000/10 # will need to change if there are ever any versions with 2 decimal digits
def read_F2DOT14(f: BinaryIO) -> float: # F2DOT14 (2.14)
return read_fixed_point(f, 2, 14)
def read_long_datetime(f: BinaryIO) -> datetime:
return datetime(1904, 1, 1, tzinfo=timezone.utc)+timedelta(seconds=read_u64(f))
Tag_ = TypeVar('Tag_') Tag_ = TypeVar('Tag_')
SomeTag = Callable[[str], Tag_] # If SomeTag is not an EnumMeta, it should throw a ValueError to indicate an invalid tag SomeTag = Callable[[str], Tag_] # If SomeTag is not an EnumMeta, it should throw a ValueError to indicate an invalid tag
@ -38,11 +24,12 @@ def read_tag_with_conditions(f: BinaryIO, *conditions: Tuple[Callable[[str], boo
else: else:
assert False, f"Invalid {umbrellaTagCls.__name__}: '{tag}'" assert False, f"Invalid {umbrellaTagCls.__name__}: '{tag}'"
always: Callable[[str], bool] = lambda _: True
def read_tag_from_tags(f: BinaryIO, *tagClss: SomeTag[Tag_], umbrellaTagCls: type | SomeTag[Tag_], strict:bool=True) -> Tag_: def read_tag_from_tags(f: BinaryIO, *tagClss: SomeTag[Tag_], umbrellaTagCls: type | SomeTag[Tag_], strict:bool=True) -> Tag_:
""" """
This is meant to be used for when some instances of an Enum are just CC01, CC02, CC03, ... This is meant to be used for when some instances of an Enum are just CC01, CC02, CC03, ...
""" """
return read_tag_with_conditions(f, *[(lambda _: True, tagCls) for tagCls in tagClss], umbrellaTagCls=umbrellaTagCls, strict=strict) return read_tag_with_conditions(f, *[(always, tagCls) for tagCls in tagClss], umbrellaTagCls=umbrellaTagCls, strict=strict)
def read_tag(f: BinaryIO, tagCls: SomeTag[Tag_], *, strict:bool=True) -> Tag_: def read_tag(f: BinaryIO, tagCls: SomeTag[Tag_], *, strict:bool=True) -> Tag_:
return read_tag_from_tags(f, tagCls, umbrellaTagCls=tagCls, strict=strict) return read_tag_from_tags(f, tagCls, umbrellaTagCls=tagCls, strict=strict)
@ -50,9 +37,6 @@ def read_tag(f: BinaryIO, tagCls: SomeTag[Tag_], *, strict:bool=True) -> Tag_:
ID_ = TypeVar('ID_') ID_ = TypeVar('ID_')
SomeID = Callable[[int], ID_] SomeID = Callable[[int], ID_]
T = TypeVar('T')
Parser = Callable[[BinaryIO], T]
def read_id_from_ranges(f: BinaryIO, *ranges: Tuple[Optional[int], SomeID[ID_]], umbrellaIdCls: SomeID[ID_], reader: Parser[int]=read_u16) -> ID_: # must be in ascending order def read_id_from_ranges(f: BinaryIO, *ranges: Tuple[Optional[int], SomeID[ID_]], umbrellaIdCls: SomeID[ID_], reader: Parser[int]=read_u16) -> ID_: # must be in ascending order
assert len(ranges) > 0, f"Must have at least one range" assert len(ranges) > 0, f"Must have at least one range"
id = reader(f) id = reader(f)
@ -60,67 +44,11 @@ def read_id_from_ranges(f: BinaryIO, *ranges: Tuple[Optional[int], SomeID[ID_]],
if num is not None and id > num: continue if num is not None and id > num: continue
try: return idCls(id) try: return idCls(id)
except ValueError: pass except ValueError: pass
assert False, f"Invalid {umbrellaIdCls.__name__}: {id}" assert False, f"Invalid {umbrellaIdCls.__name__}: {id} (hex: {repr_hex(id, 2)})"
def read_id(f: BinaryIO, idCls: SomeID[ID_], *, reader: Parser[int]=read_u16) -> ID_: def read_id(f: BinaryIO, idCls: SomeID[ID_], *, reader: Parser[int]=read_u16) -> ID_:
return read_id_from_ranges(f, (None, idCls), umbrellaIdCls=idCls, reader=reader) return read_id_from_ranges(f, (None, idCls), umbrellaIdCls=idCls, reader=reader)
class SaveTell:
def __init__(self, f: BinaryIO):
self.f = f
def __enter__(self) -> int:
self.tell = self.f.tell()
return self.tell
def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], exc_tb: Optional[TracebackType]) -> None:
self.f.seek(self.tell)
# The following `parse_at_...` functions all move the BinaryIO away from wherever it was, so use `with SaveTell(f): ...` to save the tell
def parse_at_offset(f: BinaryIO, start_tell: int, offset: int, parser: Parser[T], *, zero_is_null:bool=True) -> T:
if zero_is_null: # Some tables have 0 being a valid offset
assert offset, f"Offset was NULL"
f.seek(start_tell+offset)
return parser(f)
def parse_at_optional_offset(f: BinaryIO, start_tell: int, offset: Optional[int], parser: Parser[T], *, zero_is_null:bool=True) -> Optional[T]:
if zero_is_null:
if not offset: return None
else:
if offset is None: return None
return parse_at_offset(f, start_tell, offset, parser, zero_is_null=zero_is_null)
def parse_at_offsets(f: BinaryIO, start_tell: int, offsets: List[int], parser: Parser[T], *, zero_is_null:bool=True) -> List[T]:
return [parse_at_offset(f, start_tell, offset, parser, zero_is_null=zero_is_null) for offset in offsets]
def parse_at_optional_offsets(f: BinaryIO, start_tell: int, offsets: List[int], parser: Parser[T], *, zero_is_null:bool=True) -> List[Optional[T]]:
return [parse_at_optional_offset(f, start_tell, offset, parser, zero_is_null=zero_is_null) for offset in offsets]
def parse_at_offsets_using_length(f: BinaryIO, start_tell: int, offsets: List[int], parser: Callable[[BinaryIO, int, int], T], *, zero_is_null:bool=True) -> List[Optional[T]]:
"""
The length of the returned list will be one less than that of `offsets`, as the last offset is used to calculate the length of the final element.
`parser` is of the form `(f: BinaryIO, index: int, length: int) -> T`
"""
elements: List[Optional[T]] = []
for i, offset in enumerate(offsets[:-1]):
length = offsets[i+1]-offset
if length == 0:
elements.append(None)
continue
elements.append(parse_at_offset(f, start_tell, offset, lambda f: parser(f, i, length), zero_is_null=zero_is_null))
return elements
def parse_list_at_offset(f: BinaryIO, start_tell: int, offset: int, count: int, parser: Parser[T], *, zero_is_null:bool=True) -> List[T]:
return parse_at_offset(f, start_tell, offset, lambda f: [parser(f) for _ in range(count)], zero_is_null=zero_is_null)
def null_if_zero(n: int) -> Optional[int]: return n if n else None
def nulls_if_zero(ns: List[int]) -> List[Optional[int]]: return list(map(null_if_zero, ns))
@dataclass @dataclass
class Table(ABD): pass class Table(ABD): pass
@ -165,9 +93,12 @@ class OffsetTable(Table):
entrySelector: int entrySelector: int
rangeShift: int rangeShift: int
def repr_hex(value: int, length: int=8) -> str:
return f"0x{hex(value)[2:]:0>{length}}"
def parse_offset_table(f: BinaryIO) -> OffsetTable: def parse_offset_table(f: BinaryIO) -> OffsetTable:
sfntVersion = read_u32(f) sfntVersion = read_u32(f)
assert sfntVersion in [0x00010000, 0x4F54544F], f"Invalid sfntVersion: 0x{hex(sfntVersion)[2:]:0>8}. Expected 0x00010000 or 0x4F54544F." assert sfntVersion in [0x00010000, 0x4F54544F], f"Invalid sfntVersion: {repr_hex(sfntVersion)}. Expected 0x00010000 or 0x4F54544F."
numTables = read_u16(f) numTables = read_u16(f)
searchRange = read_u16(f) searchRange = read_u16(f)
entrySelector = read_u16(f) entrySelector = read_u16(f)
@ -175,7 +106,7 @@ def parse_offset_table(f: BinaryIO) -> OffsetTable:
assert bin(searchRange).count('1') == 1 # ensure searchRange is a power of 2 assert bin(searchRange).count('1') == 1 # ensure searchRange is a power of 2
assert searchRange//16 <= numTables < searchRange//8 # ensure searchRange//16 is largest power of two less than num_tables assert searchRange//16 <= numTables < searchRange//8 # ensure searchRange//16 is largest power of two less than num_tables
assert entrySelector == len(bin(searchRange))-7 # ensure entrySelector is the logarithm of searchRange//16 assert entrySelector == len(bin(searchRange))-7 # ensure entrySelector is the logarithm of searchRange//16
assert rangeShift == numTables*16-searchRange
return OffsetTable(sfntVersion, numTables, searchRange, entrySelector, rangeShift) return OffsetTable(sfntVersion, numTables, searchRange, entrySelector, rangeShift)
class TableTag(Enum): class TableTag(Enum):
@ -197,7 +128,6 @@ class TableTag(Enum):
Prep = 'prep' Prep = 'prep'
Gasp = 'gasp' # :O Gasp = 'gasp' # :O
# 5.4 (CFF) # 5.4 (CFF)
... ...
@ -208,7 +138,7 @@ class TableTag(Enum):
# 5.6 (Optional) # 5.6 (Optional)
DSIG = 'DSIG' DSIG = 'DSIG'
Hdmx = 'hdmx' Hdmx = 'hdmx'
Kern = 'Kern' Kern = 'kern'
LTSH = 'LTSH' LTSH = 'LTSH'
PCLT = 'PCLT' PCLT = 'PCLT'
VDMX = 'VDMX' VDMX = 'VDMX'
@ -254,119 +184,25 @@ def parse_table_directory_entry(f: BinaryIO) -> TableDirectoryEntry:
length = read_u32(f) length = read_u32(f)
return TableDirectoryEntry(tableTag, checkSum, offset, length) return TableDirectoryEntry(tableTag, checkSum, offset, length)
# TODO: This should just be a dict[TableTag, TableDirectoryEntry]
@dataclass
class FontDirectoryByTable:
cmap: Optional[TableDirectoryEntry] = None
head: Optional[TableDirectoryEntry] = None
hhea: Optional[TableDirectoryEntry] = None
hmtx: Optional[TableDirectoryEntry] = None
maxp: Optional[TableDirectoryEntry] = None
name: Optional[TableDirectoryEntry] = None
os2 : Optional[TableDirectoryEntry] = None
post: Optional[TableDirectoryEntry] = None
cvt : Optional[TableDirectoryEntry] = None
fpgm: Optional[TableDirectoryEntry] = None
glyf: Optional[TableDirectoryEntry] = None
loca: Optional[TableDirectoryEntry] = None
prep: Optional[TableDirectoryEntry] = None
gasp: Optional[TableDirectoryEntry] = None
svg : Optional[TableDirectoryEntry] = None
DSIG: Optional[TableDirectoryEntry] = None
hdmx: Optional[TableDirectoryEntry] = None
Kern: Optional[TableDirectoryEntry] = None
LTSH: Optional[TableDirectoryEntry] = None
PCLT: Optional[TableDirectoryEntry] = None
VDMX: Optional[TableDirectoryEntry] = None
vhea: Optional[TableDirectoryEntry] = None
vmtx: Optional[TableDirectoryEntry] = None
COLR: Optional[TableDirectoryEntry] = None
CPAL: Optional[TableDirectoryEntry] = None
BASE: Optional[TableDirectoryEntry] = None
GDEF: Optional[TableDirectoryEntry] = None
GPOS: Optional[TableDirectoryEntry] = None
GSUB: Optional[TableDirectoryEntry] = None
JSTF: Optional[TableDirectoryEntry] = None
MATH: Optional[TableDirectoryEntry] = None
avar: Optional[TableDirectoryEntry] = None
cvar: Optional[TableDirectoryEntry] = None
fvar: Optional[TableDirectoryEntry] = None
gvar: Optional[TableDirectoryEntry] = None
HVAR: Optional[TableDirectoryEntry] = None
MVAR: Optional[TableDirectoryEntry] = None
STAT: Optional[TableDirectoryEntry] = None
VVAR: Optional[TableDirectoryEntry] = None
@dataclass @dataclass
class FontDirectory: class FontDirectory:
offset_table: OffsetTable offset_table: OffsetTable
table_directory: List[TableDirectoryEntry] table_directory: List[TableDirectoryEntry]
by_table: FontDirectoryByTable def get_entry(self, table_tag: TableTag) -> Optional[TableDirectoryEntry]:
for entry in self.table_directory:
if entry.tableTag == table_tag:
return entry
return None
def has_entry(self, table_tag: TableTag) -> bool:
return self.get_entry(table_tag) is not None
def parse_font_directory(f: BinaryIO) -> FontDirectory: def parse_font_directory(f: BinaryIO) -> FontDirectory:
offset_table = parse_offset_table(f) offset_table = parse_offset_table(f)
table_directory_entries = [parse_table_directory_entry(f) for _ in range(offset_table.numTables)] table_directory_entries = [parse_table_directory_entry(f) for _ in range(offset_table.numTables)]
by_table = FontDirectoryByTable() return FontDirectory(offset_table, table_directory_entries)
for table_directory_entry in table_directory_entries:
match table_directory_entry.tableTag:
case TableTag.Cmap: by_table.cmap = table_directory_entry
case TableTag.Head: by_table.head = table_directory_entry
case TableTag.Hhea: by_table.hhea = table_directory_entry
case TableTag.Hmtx: by_table.hmtx = table_directory_entry
case TableTag.Maxp: by_table.maxp = table_directory_entry
case TableTag.Name: by_table.name = table_directory_entry
case TableTag.OS2 : by_table.os2 = table_directory_entry
case TableTag.Post: by_table.post = table_directory_entry
case TableTag.Cvt : by_table.cvt = table_directory_entry
case TableTag.Fpgm: by_table.fpgm = table_directory_entry
case TableTag.Glyf: by_table.glyf = table_directory_entry
case TableTag.Loca: by_table.loca = table_directory_entry
case TableTag.Prep: by_table.prep = table_directory_entry
case TableTag.Gasp: by_table.gasp = table_directory_entry
case TableTag.Svg : by_table.svg = table_directory_entry
case TableTag.DSIG: by_table.DSIG = table_directory_entry
case TableTag.Hdmx: by_table.hdmx = table_directory_entry
case TableTag.Kern: by_table.Kern = table_directory_entry
case TableTag.LTSH: by_table.LTSH = table_directory_entry
case TableTag.PCLT: by_table.PCLT = table_directory_entry
case TableTag.VDMX: by_table.VDMX = table_directory_entry
case TableTag.Vhea: by_table.vhea = table_directory_entry
case TableTag.Vmtx: by_table.vmtx = table_directory_entry
case TableTag.COLR: by_table.COLR = table_directory_entry
case TableTag.CPAL: by_table.CPAL = table_directory_entry
case TableTag.BASE: by_table.BASE = table_directory_entry
case TableTag.GDEF: by_table.GDEF = table_directory_entry
case TableTag.GPOS: by_table.GPOS = table_directory_entry
case TableTag.GSUB: by_table.GSUB = table_directory_entry
case TableTag.JSTF: by_table.JSTF = table_directory_entry
case TableTag.MATH: by_table.MATH = table_directory_entry
case TableTag.Avar: by_table.avar = table_directory_entry
case TableTag.Cvar: by_table.cvar = table_directory_entry
case TableTag.Fvar: by_table.fvar = table_directory_entry
case TableTag.Gvar: by_table.gvar = table_directory_entry
case TableTag.HVAR: by_table.HVAR = table_directory_entry
case TableTag.MVAR: by_table.MVAR = table_directory_entry
case TableTag.STAT: by_table.STAT = table_directory_entry
case TableTag.VVAR: by_table.VVAR = table_directory_entry
case _:
assert False, f"Unimplemented: tableTag: {table_directory_entry.tableTag}"
return FontDirectory(offset_table, table_directory_entries, by_table)
@dataclass @dataclass
class CmapSubtable(Table, ABD): class CmapSubtable(Table, ABD):
@ -390,7 +226,7 @@ def parse_sub_header(f: BinaryIO) -> SubHeader:
@dataclass @dataclass
class CmapSubtable_Format_2(CmapSubtable): class CmapSubtable_Format_2(CmapSubtable):
length: int length: int
language: int language: int # TODO: Make this an optional int
subHeaderKeys: List[int] # 256 elements subHeaderKeys: List[int] # 256 elements
subHeaders: List[SubHeader] subHeaders: List[SubHeader]
glyphIndexArray: List[int] glyphIndexArray: List[int]
@ -566,7 +402,9 @@ def parse_cmap_subtable(f: BinaryIO, platformID: PlatformID) -> CmapSubtable:
entryCount = read_u16(f) entryCount = read_u16(f)
glyphIdArray = [read_u16(f) for _ in range(entryCount)] glyphIdArray = [read_u16(f) for _ in range(entryCount)]
assert f.tell()-start_tell == length, (f.tell()-start_tell, length) assert length-4<f.tell()-start_tell<=length, (f.tell()-start_tell, length)
f.seek(length-(f.tell()-start_tell))
return CmapSubtable_Format_6(format, length, language, firstCode, entryCount, glyphIdArray) return CmapSubtable_Format_6(format, length, language, firstCode, entryCount, glyphIdArray)
case 12: case 12:
assert read_u16(f) == 0, "Reserved" assert read_u16(f) == 0, "Reserved"
@ -643,6 +481,7 @@ def encoding_ID_cls_from_platform_ID(platformID: PlatformID) -> Callable[[int],
def parse_encoding_ID(f: BinaryIO, platformID: PlatformID) -> EncodingID: def parse_encoding_ID(f: BinaryIO, platformID: PlatformID) -> EncodingID:
return read_id(f, encoding_ID_cls_from_platform_ID(platformID)) return read_id(f, encoding_ID_cls_from_platform_ID(platformID))
# TODO: Finish this
def parse_string_with_encoding_ID(f: BinaryIO, length: int, encodingID: EncodingID) -> str: def parse_string_with_encoding_ID(f: BinaryIO, length: int, encodingID: EncodingID) -> str:
bytes = f.read(length) bytes = f.read(length)
match encodingID: match encodingID:
@ -687,6 +526,7 @@ def parse_cmap_table(f: BinaryIO) -> CmapTable:
return CmapTable(version, numTables, encodingRecords) return CmapTable(version, numTables, encodingRecords)
HEAD_TABLE_MAGIC = 0x5F0F3CF5
@dataclass @dataclass
class HeadTable(Table): class HeadTable(Table):
majorVersion: int majorVersion: int
@ -715,7 +555,7 @@ def parse_head_table(f: BinaryIO) -> HeadTable:
fontRevision = read_fixed(f) fontRevision = read_fixed(f)
checkSumAdjustment = read_u32(f) checkSumAdjustment = read_u32(f)
assert read_u32(f) == 0x5F0F3CF5, "magicNumber" assert read_u32(f) == HEAD_TABLE_MAGIC, "magicNumber"
flags = read_u16(f) flags = read_u16(f)
unitsPerEm = read_u16(f) unitsPerEm = read_u16(f)
created = read_long_datetime(f) created = read_long_datetime(f)
@ -867,6 +707,123 @@ class LanguageID(ABE): pass
class MacintoshLanguageID(LanguageID, Enum): class MacintoshLanguageID(LanguageID, Enum):
English = 0 English = 0
French = 1
German = 2
Italian = 3
Dutch = 4
Swedish = 5
Spanish = 6
Danish = 7
Portuguese = 8
Norwegian = 9
Hebrew = 10
Japanese = 11
Arabic = 12
Finnish = 13
Greek = 14
Icelandic = 15
Maltese = 16
Turkish = 17
Croatian = 18
Chinese_Traditional = 19
Urdu = 20
Hindi = 21
Thai = 22
Korean = 23
Lithuanian = 24
Polish = 25
Hungarian = 26
Estonian = 27
Latvian = 28
Sami = 29
Faroese = 30
FarsiPersian = 31
Russian = 32
Chinese_Simplified = 33
Flemish = 34
IrishGaelic = 35
Albanian = 36
Romanian = 37
Czech = 38
Slovak = 39
Slovenian = 40
Yiddish = 41
Serbian = 42
Macedonian = 43
Bulgarian = 44
Ukrainian = 45
Byelorussian = 46
Uzbek = 47
Kazakh = 48
Azerbaijani_CyrillicScript = 49
Azerbaijani_ArabicScript = 50
Armenian = 51
Georgian = 52
Moldavian = 53
Kirghiz = 54
Tajiki = 55
Turkmen = 56
Mongolian_MongolianScript = 57
Mongolian_CyrillicScript = 58
Pashto = 59
Kurdish = 60
Kashmiri = 61
Sindhi = 62
Tibetan = 63
Nepali = 64
Sanskrit = 65
Marathi = 66
Bengali = 67
Assamese = 68
Gujarati = 69
Punjabi = 70
Oriya = 71
Malayalam = 72
Kannada = 73
Tamil = 74
Telugu = 75
Sinhalese = 76
Burmese = 77
Khmer = 78
Lao = 79
Vietnamese = 80
Indonesian = 81
Tagalong = 82
Malay_RomanScript = 83
Malay_ArabicScript = 84
Amharic = 85
Tigrinya = 86
Galla = 87
Somali = 88
Swahili = 89
Rundi = 91
KinyarwandaRuanda = 90
NyanjaChewa = 92
Malagasy = 93
Esperanto = 94
Welsh = 128
Basque = 129
Catalan = 130
Latin = 131
Quenchua = 132
Guarani = 133
Aymara = 134
Tatar = 135
Uighur = 136
Dzongkha = 137
Javanese_RomanScript = 138
Sundanese_RomanScript = 139
Galician = 140
Afrikaans = 141
Breton = 142
Inuktitut = 143
ScottishGaelic = 144
ManxGaelic = 145
IrishGaelic_WithDotAbove = 146
Tongan = 147
Greek_Polytonic = 148
Greenlandic = 149
Azerbaijani_RomanScript = 150
def __str__(self) -> str: return self._name_ def __str__(self) -> str: return self._name_
@ -1175,7 +1132,7 @@ def parse_name_table(f: BinaryIO, length: int) -> NameTable:
case _: case _:
assert False, f"Unimplemented: format: {format}" assert False, f"Unimplemented: format: {format}"
assert False assert False, format
@dataclass @dataclass
class VendorTag: class VendorTag:
@ -1218,6 +1175,10 @@ class OS2Table(Table, ABD):
usWinAscent: int usWinAscent: int
usWinDescent: int usWinDescent: int
def __post_init__(self):
if self.fsSelection & 0x40: assert self.fsSelection & (0b100001) == 0 # bit 6 indicates that the font is regular, and therefore cannot be bold or italic.
assert self.fsSelection & 0xfc00 == 0, "reserved"
@dataclass @dataclass
class OS2Table_Ver_0(OS2Table): pass class OS2Table_Ver_0(OS2Table): pass
@ -1263,9 +1224,6 @@ def parse_OS2_table(f: BinaryIO) -> OS2Table:
sTypoAscender, sTypoDescender, sTypoLineGap = read_i16(f), read_i16(f), read_i16(f) sTypoAscender, sTypoDescender, sTypoLineGap = read_i16(f), read_i16(f), read_i16(f)
usWinAscent, usWinDescent = read_u16(f), read_u16(f) usWinAscent, usWinDescent = read_u16(f), read_u16(f)
if fsSelection & 0x40: assert fsSelection & (0b100001) == 0 # bit 6 indicates that the font is regular, and therefore cannot be bold or italic.
assert fsSelection & 0xfc00 == 0, "reserved"
if version == 0: if version == 0:
return OS2Table_Ver_0(version, xAvgCharWidth, usWeightClass, usWidthClass, fsType, ySubscriptXSize, ySubscriptYSize, ySubscriptXOffset, ySubscriptYOffset, ySuperscriptXSize, ySuperscriptYSize, ySuperscriptXOffset, ySuperscriptYOffset, yStrikeoutSize, yStrikeoutPosition, sFamilyClass, panose, ulUnicodeRange1, ulUnicodeRange2, ulUnicodeRange3, ulUnicodeRange4, achVendID, fsSelection, usFirstCharIndex, usLastCharIndex, sTypoAscender, sTypoDescender, sTypoLineGap, usWinAscent, usWinDescent) return OS2Table_Ver_0(version, xAvgCharWidth, usWeightClass, usWidthClass, fsType, ySubscriptXSize, ySubscriptYSize, ySubscriptXOffset, ySubscriptYOffset, ySuperscriptXSize, ySuperscriptYSize, ySuperscriptXOffset, ySuperscriptYOffset, yStrikeoutSize, yStrikeoutPosition, sFamilyClass, panose, ulUnicodeRange1, ulUnicodeRange2, ulUnicodeRange3, ulUnicodeRange4, achVendID, fsSelection, usFirstCharIndex, usLastCharIndex, sTypoAscender, sTypoDescender, sTypoLineGap, usWinAscent, usWinDescent)
@ -1346,6 +1304,8 @@ def parse_post_table(f: BinaryIO, length: int) -> PostTable:
while length - (f.tell() - start_tell) > 0: # kinda dangerous, but I think it's the only way to make it work? number of strings is not necessarily equal to numGlyphs. I think that you could probably figure out the number of strings by filtering elements in the glyphNameIndex array while length - (f.tell() - start_tell) > 0: # kinda dangerous, but I think it's the only way to make it work? number of strings is not necessarily equal to numGlyphs. I think that you could probably figure out the number of strings by filtering elements in the glyphNameIndex array
names.append(read_pascal_string(f)) names.append(read_pascal_string(f))
assert f.tell()-start_tell == length
return PostTable_Ver_2_0(version, italicAngle, underlinePosition, underlineThickness, isFixedPitch, minMemType42, maxMemType42, minMemType1, maxMemType1, numGlyphs, glyphNameIndex, names) return PostTable_Ver_2_0(version, italicAngle, underlinePosition, underlineThickness, isFixedPitch, minMemType42, maxMemType42, minMemType1, maxMemType1, numGlyphs, glyphNameIndex, names)
case 3.0: case 3.0:
return PostTable_Ver_3_0(version, italicAngle, underlinePosition, underlineThickness, isFixedPitch, minMemType42, maxMemType42, minMemType1, maxMemType1) return PostTable_Ver_3_0(version, italicAngle, underlinePosition, underlineThickness, isFixedPitch, minMemType42, maxMemType42, minMemType1, maxMemType1)
@ -1414,6 +1374,9 @@ class DSIGTable(Table, ABD):
flags: int # It's u16 but only bits 0-7 are documented? flags: int # It's u16 but only bits 0-7 are documented?
signatureRecords: List[SignatureRecord] # there's a typo in the ISO documentation. signatureRecords: List[SignatureRecord] # there's a typo in the ISO documentation.
def __post_init__(self):
assert self.flags & 0xfe == 0, "Reserved"
@dataclass @dataclass
class DSIGTable_Ver_1(DSIGTable): pass class DSIGTable_Ver_1(DSIGTable): pass
@ -1425,7 +1388,6 @@ def parse_DSIG_table(f: BinaryIO) -> DSIGTable:
numSignatures = read_u16(f) numSignatures = read_u16(f)
flags = read_u16(f) flags = read_u16(f)
assert flags & 0xfe == 0, "Reserved"
signatureRecords = [parse_signature_record(f, start_tell) for _ in range(numSignatures)] signatureRecords = [parse_signature_record(f, start_tell) for _ in range(numSignatures)]
if version == 1: if version == 1:
@ -1678,14 +1640,18 @@ class CCXXTag(ABE):
self.num = int(tag[2:]) self.num = int(tag[2:])
if not (self.__range__[0] <= self.num <= self.__range__[1]): raise ValueError(f"Invalid {self.__class__.__name__}: {self.num}. Expected number between {self.__range__[0]} and {self.__range__[1]}.") if not (self.__range__[0] <= self.num <= self.__range__[1]): raise ValueError(f"Invalid {self.__class__.__name__}: {self.num}. Expected number between {self.__range__[0]} and {self.__range__[1]}.")
@classmethod # @classmethod
def from_num(cls, num: int) -> Self: # def from_num(cls, num: int) -> Self:
assert 0 <= num <= 99, f"Invalid num: {num}. Must be two digits" # assert 0 <= num <= 99, f"Invalid num: {num}. Must be two digits"
return cls(f"{cls.__CC__}{num:0>2}") # don't need to check the range because the __init__ will check # return cls(f"{cls.__CC__}{num:0>2}") # don't need to check the range because the __init__ will check
def __str__(self) -> str: def __str__(self) -> str:
return f"'{self.__CC__}{self.num:0>2}'" return f"'{self.__CC__}{self.num:0>2}'"
@property
def name(self) -> str:
return str(self)
class ScriptTag(Enum): class ScriptTag(Enum):
Adlam = 'adlm' Adlam = 'adlm'
Ahom = 'ahom' Ahom = 'ahom'
@ -2475,8 +2441,12 @@ class ValidLangSysTag(LangSysTag, Enum):
def __str__(self) -> str: return self._name_ def __str__(self) -> str: return self._name_
@dataclass
class InvalidLangSysTag(LangSysTag):
tag: str
def parse_lang_sys_tag(f: BinaryIO) -> LangSysTag: def parse_lang_sys_tag(f: BinaryIO) -> LangSysTag:
return read_tag_from_tags(f, ValidLangSysTag, MS_VOLT_Tag, umbrellaTagCls=LangSysTag) return read_tag_from_tags(f, ValidLangSysTag, InvalidLangSysTag, MS_VOLT_Tag, umbrellaTagCls=LangSysTag)
@dataclass @dataclass
class LangSysRecord: class LangSysRecord:
@ -2682,8 +2652,18 @@ class MS_VOLT_Tag(CCXXTag, LangSysTag, FeatureTag):
__CC__ = 'zz' __CC__ = 'zz'
__range__ = (0, 99) # I don't know if zz00 is valid or not, but I am letting it be, so that it can be caught, because zzXX is not a valid tag for anything __range__ = (0, 99) # I don't know if zz00 is valid or not, but I am letting it be, so that it can be caught, because zzXX is not a valid tag for anything
def is_vendor_feature_tag(tag: str) -> bool:
return all(map(is_upper, tag))
@dataclass
class VendorFeatureTag(FeatureTag):
tag: str
def __post_init__(self):
if not is_vendor_feature_tag(self.tag): raise ValueError
def parse_feature_tag(f: BinaryIO) -> FeatureTag: def parse_feature_tag(f: BinaryIO) -> FeatureTag:
return read_tag_from_tags(f, SimpleFeatureTag, CvXXFeatureTag, SsXXFeatureTag, MS_VOLT_Tag, umbrellaTagCls=FeatureTag) return read_tag_with_conditions(f, (always, SimpleFeatureTag), (always, CvXXFeatureTag), (always, SsXXFeatureTag), (is_vendor_feature_tag, VendorFeatureTag), (lambda s: is_CCXX('zz', s), MS_VOLT_Tag), umbrellaTagCls=FeatureTag)
@dataclass @dataclass
class FeatureParamsTable(Table, ABD): pass class FeatureParamsTable(Table, ABD): pass
@ -2828,7 +2808,7 @@ class LookupFlag: # TODO: Do this like the other flags
def parse_lookup_flag(f: BinaryIO) -> LookupFlag: def parse_lookup_flag(f: BinaryIO) -> LookupFlag:
lookupFlag = read_u16(f) lookupFlag = read_u16(f)
rightToLeft, ignoreBaseGlyphs, ignoreLigatures, ignoreMarks, useMarkFilteringSet = [bool(lookupFlag&(1<<i)) for i in range(5)] rightToLeft, ignoreBaseGlyphs, ignoreLigatures, ignoreMarks, useMarkFilteringSet = [bool(lookupFlag&(1<<i)) for i in range(5)]
assert lookupFlag & 0x00e0 == 0, "Reserved" assert lookupFlag & 0x00e0 == 0, "Reserved" # TODO: Once you do this like the other flags, put this in the __post_init__
markAttachmentType = (lookupFlag & 0xff00) >> 8 markAttachmentType = (lookupFlag & 0xff00) >> 8
return LookupFlag(rightToLeft, ignoreBaseGlyphs, ignoreLigatures, ignoreMarks, useMarkFilteringSet, markAttachmentType) return LookupFlag(rightToLeft, ignoreBaseGlyphs, ignoreLigatures, ignoreMarks, useMarkFilteringSet, markAttachmentType)
@ -3091,9 +3071,11 @@ class ValueFormatFlags:
def x_advance_device(self) -> bool: return (self.bytes & 0x0040)!=0 def x_advance_device(self) -> bool: return (self.bytes & 0x0040)!=0
def y_advance_device(self) -> bool: return (self.bytes & 0x0080)!=0 def y_advance_device(self) -> bool: return (self.bytes & 0x0080)!=0
def __post_init__(self):
assert self.bytes & 0xFF00 == 0, "Reserved"
def parse_value_format(f: BinaryIO) -> ValueFormatFlags: def parse_value_format(f: BinaryIO) -> ValueFormatFlags:
valueFormat = read_u16(f) valueFormat = read_u16(f)
assert valueFormat & 0xFF00 == 0, "Reserved"
return ValueFormatFlags(valueFormat) return ValueFormatFlags(valueFormat)
@ -3104,12 +3086,18 @@ class DeviceTable(Table):
deltaFormat: int deltaFormat: int
deltaValue: List[int] deltaValue: List[int]
def __post_init__(self):
assert self.deltaFormat & 0x7ffc == 0, "Reserved"
@dataclass @dataclass
class VariationIndexTable(Table): class VariationIndexTable(Table):
deltaSetOuterIndex: int deltaSetOuterIndex: int
deltaSetInnerIndex: int deltaSetInnerIndex: int
deltaFormat: int deltaFormat: int
def __post_init__(self):
assert self.deltaFormat == 0x8000
DeviceTable_ = DeviceTable | VariationIndexTable DeviceTable_ = DeviceTable | VariationIndexTable
def parse_device_table(f: BinaryIO) -> DeviceTable_: def parse_device_table(f: BinaryIO) -> DeviceTable_:
@ -3117,7 +3105,6 @@ def parse_device_table(f: BinaryIO) -> DeviceTable_:
second = read_u16(f) second = read_u16(f)
deltaFormat = read_u16(f) deltaFormat = read_u16(f)
assert deltaFormat & 0x7ffc == 0, "Reserved"
assert deltaFormat in [1, 2, 3, 0x8000], f"Invalid deltaFormat: {deltaFormat}" assert deltaFormat in [1, 2, 3, 0x8000], f"Invalid deltaFormat: {deltaFormat}"
match deltaFormat: match deltaFormat:
@ -3476,10 +3463,10 @@ def parse_value_record(f: BinaryIO, start_tell: int, valueFormat: ValueFormatFla
xAdvance = read_i16(f) if valueFormat.x_advance() else None xAdvance = read_i16(f) if valueFormat.x_advance() else None
yAdvance = read_i16(f) if valueFormat.y_advance() else None yAdvance = read_i16(f) if valueFormat.y_advance() else None
xPlaDeviceOffset = read_u16(f) if valueFormat.x_placement_device() else None xPlaDeviceOffset = read_u16(f) if valueFormat.x_placement_device() else 0
yPlaDeviceOffset = read_u16(f) if valueFormat.y_placement_device() else None yPlaDeviceOffset = read_u16(f) if valueFormat.y_placement_device() else 0
xAdvDeviceOffset = read_u16(f) if valueFormat.x_advance_device() else None xAdvDeviceOffset = read_u16(f) if valueFormat.x_advance_device() else 0
yAdvDeviceOffset = read_u16(f) if valueFormat.y_advance_device() else None yAdvDeviceOffset = read_u16(f) if valueFormat.y_advance_device() else 0
with SaveTell(f): with SaveTell(f):
xPlaDevice = parse_at_optional_offset(f, start_tell, xPlaDeviceOffset, parse_device_table) xPlaDevice = parse_at_optional_offset(f, start_tell, xPlaDeviceOffset, parse_device_table)
yPlaDevice = parse_at_optional_offset(f, start_tell, yPlaDeviceOffset, parse_device_table) yPlaDevice = parse_at_optional_offset(f, start_tell, yPlaDeviceOffset, parse_device_table)
@ -4222,7 +4209,7 @@ def parse_GPOS_table(f: BinaryIO) -> GPOSTable:
featureVariationsOffset = read_u32(f) featureVariationsOffset = read_u32(f)
with SaveTell(f): with SaveTell(f):
featureVariations = parse_at_optional_offset(f, start_tell, featureVariationsOffset, lambda f: parse_feature_variations_table(f, featureList)) featureVariations = parse_at_offset(f, start_tell, featureVariationsOffset, lambda f: parse_feature_variations_table(f, featureList))
if minorVersion == 1: if minorVersion == 1:
return GPOSTable_Ver_1_1(majorVersion, minorVersion, scriptList, featureList, lookupList, featureVariations) return GPOSTable_Ver_1_1(majorVersion, minorVersion, scriptList, featureList, lookupList, featureVariations)
@ -4616,7 +4603,7 @@ def parse_GSUB_lookup_subtable(f: BinaryIO, lookupType: GSUBLookupType) -> GSUBL
ligatureSetOffsets = [read_u16(f) for _ in range(ligatureSetCount)] ligatureSetOffsets = [read_u16(f) for _ in range(ligatureSetCount)]
with SaveTell(f): with SaveTell(f):
coverage = parse_at_offset(f, start_tell, coverageOffset, parse_coverage_table) coverage = parse_at_offset(f, start_tell, coverageOffset, parse_coverage_table)
ligatureSets = parse_at_offsets(f, start_tell, ligatureSetOffsets, lambda f: parse_set_table(f, parse_ligature_table)) ligatureSets = parse_at_offsets(f, start_tell, ligatureSetOffsets, parse_ligature_set_table)
return LigatureSubstSubtable_Format_1(substFormat, coverage, ligatureSetCount, ligatureSets) return LigatureSubstSubtable_Format_1(substFormat, coverage, ligatureSetCount, ligatureSets)
case _: case _:
@ -4764,7 +4751,7 @@ def parse_GSUB_lookup_subtable(f: BinaryIO, lookupType: GSUBLookupType) -> GSUBL
assert False, lookupType assert False, lookupType
@dataclass @dataclass
class GSUBTable(Table, ABD): class GSUBTable(Table, ABD): # TODO: Maybe make a generic class for this, because this is the same as GPOSTable
majorVersion: int majorVersion: int
minorVersion: int minorVersion: int
# See: https://github.com/MicrosoftDocs/typography-issues/issues/79 # See: https://github.com/MicrosoftDocs/typography-issues/issues/79
@ -4869,12 +4856,15 @@ class SimpleGlyphFlag:
def y_is_same_or_positive_short(self) -> bool: return (self.byte & 0x20)!=0 def y_is_same_or_positive_short(self) -> bool: return (self.byte & 0x20)!=0
def overlap_simple(self) -> bool: return (self.byte & 0x40)!=0 def overlap_simple(self) -> bool: return (self.byte & 0x40)!=0
def __str__(self) -> str: def __repr__(self) -> str:
return '0x'+hex(self.byte)[2:].rjust(2, '0') return repr_hex(self.byte, 2)
def __post_init__(self):
assert self.byte & 0x80 == 0, self
def parse_simple_glyph_flag(f: BinaryIO) -> SimpleGlyphFlag: def parse_simple_glyph_flag(f: BinaryIO) -> SimpleGlyphFlag:
flags = read_u8(f) flags = read_u8(f)
assert flags & 0x80 == 0, "reserved"
return SimpleGlyphFlag(flags) return SimpleGlyphFlag(flags)
@dataclass @dataclass
@ -4887,27 +4877,30 @@ class SimpleGlyph(Glyph):
@dataclass @dataclass
class CompoundGlyphFlag: class CompoundGlyphFlag:
byte: int bytes: int
def arg_1_and_2_are_words(self) -> bool: return (self.byte & 0x0001)!=0 def arg_1_and_2_are_words(self) -> bool: return (self.bytes & 0x0001)!=0
def args_are_xy_values(self) -> bool: return (self.byte & 0x0002)!=0 def args_are_xy_values(self) -> bool: return (self.bytes & 0x0002)!=0
def round_xy_to_grid(self) -> bool: return (self.byte & 0x0004)!=0 def round_xy_to_grid(self) -> bool: return (self.bytes & 0x0004)!=0
def we_have_a_scale(self) -> bool: return (self.byte & 0x0008)!=0 def we_have_a_scale(self) -> bool: return (self.bytes & 0x0008)!=0
def more_components(self) -> bool: return (self.byte & 0x0020)!=0 def more_components(self) -> bool: return (self.bytes & 0x0020)!=0
def we_have_an_x_and_y_scale(self) -> bool: return (self.byte & 0x0040)!=0 def we_have_an_x_and_y_scale(self) -> bool: return (self.bytes & 0x0040)!=0
def we_have_a_two_by_two(self) -> bool: return (self.byte & 0x0080)!=0 def we_have_a_two_by_two(self) -> bool: return (self.bytes & 0x0080)!=0
def we_have_instructions(self) -> bool: return (self.byte & 0x0100)!=0 def we_have_instructions(self) -> bool: return (self.bytes & 0x0100)!=0
def use_my_metrics(self) -> bool: return (self.byte & 0x0200)!=0 def use_my_metrics(self) -> bool: return (self.bytes & 0x0200)!=0
def overlap_compound(self) -> bool: return (self.byte & 0x0400)!=0 def overlap_compound(self) -> bool: return (self.bytes & 0x0400)!=0
def scaled_component_offset(self) -> bool: return (self.byte & 0x0800)!=0 def scaled_component_offset(self) -> bool: return (self.bytes & 0x0800)!=0
def unscaled_component_offset(self) -> bool: return (self.byte & 0x1000)!=0 def unscaled_component_offset(self) -> bool: return (self.bytes & 0x1000)!=0
def __repr__(self) -> str: def __repr__(self) -> str:
return '0x'+hex(self.byte)[2:].rjust(4, '0') return repr_hex(self.bytes, 4)
def __post_init__(self):
assert self.bytes & 0xE010 == 0, "Reserved"
def parse_compound_glyph_flag(f: BinaryIO) -> CompoundGlyphFlag: def parse_compound_glyph_flag(f: BinaryIO) -> CompoundGlyphFlag:
flags = read_u16(f) flags = read_u16(f)
assert flags & 0xE010 == 0, "reserved"
return CompoundGlyphFlag(flags) return CompoundGlyphFlag(flags)
@dataclass @dataclass
@ -4977,7 +4970,6 @@ def parse_simple_glyph_flags(f: BinaryIO, total_points: int) -> Tuple[List[Simpl
Returns the logical list (i.e., expanding repeating flags), as well as the total xCoordinates and yCoordinates lengths Returns the logical list (i.e., expanding repeating flags), as well as the total xCoordinates and yCoordinates lengths
""" """
# Mostly just ported from https://github.com/RazrFalcon/ttf-parser/blob/2192d3e496201ab2ff39d5437f88d62e70083f0e/src/tables/glyf.rs#L521 # Mostly just ported from https://github.com/RazrFalcon/ttf-parser/blob/2192d3e496201ab2ff39d5437f88d62e70083f0e/src/tables/glyf.rs#L521
x_len = 0 x_len = 0
y_len = 0 y_len = 0
@ -5033,8 +5025,9 @@ def parse_glyph(f: BinaryIO, length: int) -> Glyph:
case False, False: yDelta = read_i16(yCoordinates) case False, False: yDelta = read_i16(yCoordinates)
coordinates.append((coordinates[-1][0]+xDelta, coordinates[-1][1]+yDelta)) coordinates.append((coordinates[-1][0]+xDelta, coordinates[-1][1]+yDelta))
# TODO: Do I need to read the padding bytes?
assert length-4<f.tell()-start_tell<=length # there might be padding bytes assert length-4<f.tell()-start_tell<=length # there might be padding bytes
assert is_at_end(xCoordinates) and is_at_end(yCoordinates), (len_to_end(xCoordinates), len_to_end(yCoordinates))
return SimpleGlyph(numberOfContours, xMin, yMin, xMax, yMax, endPtsOfContours, instructionLength, instructions, flags, coordinates[1:]) return SimpleGlyph(numberOfContours, xMin, yMin, xMax, yMax, endPtsOfContours, instructionLength, instructions, flags, coordinates[1:])
else: else:
components = [parse_component(f)] components = [parse_component(f)]
@ -5091,12 +5084,14 @@ class GaspRange:
rangeMaxPPEM: int rangeMaxPPEM: int
rangeGaspBehavior: int rangeGaspBehavior: int
def __post_init__(self):
assert self.rangeGaspBehavior & 0xFFF0 == 0, "Reserved"
def parse_gasp_range(f: BinaryIO, version: int) -> GaspRange: def parse_gasp_range(f: BinaryIO, version: int) -> GaspRange:
rangeMaxPPEM = read_u16(f) rangeMaxPPEM = read_u16(f)
rangeGaspBehavior = read_u16(f) rangeGaspBehavior = read_u16(f)
if version == 0: assert rangeGaspBehavior & 0x000C == 0, "Only supported in version 1" if version == 0: assert rangeGaspBehavior & 0x000C == 0, "Only supported in version 1"
assert rangeGaspBehavior & 0xFFF0 == 0, "Reserved"
return GaspRange(rangeMaxPPEM, rangeGaspBehavior) return GaspRange(rangeMaxPPEM, rangeGaspBehavior)
@ -5120,7 +5115,7 @@ class TrueTypeOutlines:
control_value_table: Optional[CvtTable] control_value_table: Optional[CvtTable]
font_program: Optional[FpgmTable] font_program: Optional[FpgmTable]
glyph_data: GlyfTable glyph_data: GlyfTable
index_to_location: LocaTable index_to_location: LocaTable # Parsing only
CV_program: Optional[PrepTable] CV_program: Optional[PrepTable]
grid_fitting_and_scan_conversion: Optional[GaspTable] grid_fitting_and_scan_conversion: Optional[GaspTable]
@ -5152,9 +5147,11 @@ class TupleIndex:
def tuple_index(self) -> int: return self.bytes & 0x0fff def tuple_index(self) -> int: return self.bytes & 0x0fff
def __post_init__(self):
assert self.bytes & 0x1000 == 0, "Reserved"
def parse_tuple_index(f: BinaryIO) -> TupleIndex: def parse_tuple_index(f: BinaryIO) -> TupleIndex:
tupleIndex = read_u16(f) tupleIndex = read_u16(f)
assert tupleIndex & 0x1000 == 0, "Reserved"
return TupleIndex(tupleIndex) return TupleIndex(tupleIndex)
@ -5269,7 +5266,7 @@ def parse_cvar_per_tuple_variation_data(f: BinaryIO, tupleVariationHeader: Tuple
assert f.tell()-start_tell == tupleVariationHeader.variationDataSize, (f.tell()-start_tell, tupleVariationHeader) assert f.tell()-start_tell == tupleVariationHeader.variationDataSize, (f.tell()-start_tell, tupleVariationHeader)
return CvarPerTupleVariationData(private_point_numbers, CVT_value_deltas) return CvarPerTupleVariationData(private_point_numbers, CVT_value_deltas)
def parse_tuple_variation_count(f: BinaryIO) -> Tuple[bool, int]: def parse_tuple_variation_count(f: BinaryIO) -> Tuple[bool, int]: # TODO: Maybe do this like the other flags?
SHARED_POINT_NUMBERS = 0x8000 SHARED_POINT_NUMBERS = 0x8000
COUNT_MASK = 0x0fff COUNT_MASK = 0x0fff
@ -5498,6 +5495,9 @@ class VariationAxisRecord:
flags: int flags: int
axisNameID: int axisNameID: int
def __post_init__(self):
assert self.flags & 0xfffe == 0, "Reserved"
def parse_variation_axis_record(f: BinaryIO, axisSize: int) -> VariationAxisRecord: def parse_variation_axis_record(f: BinaryIO, axisSize: int) -> VariationAxisRecord:
start_tell = f.tell() start_tell = f.tell()
@ -5506,7 +5506,6 @@ def parse_variation_axis_record(f: BinaryIO, axisSize: int) -> VariationAxisReco
defaultValue = read_fixed(f) defaultValue = read_fixed(f)
maxValue = read_fixed(f) maxValue = read_fixed(f)
flags = read_u16(f) flags = read_u16(f)
assert flags & 0xfffe == 0, "Reserved"
axisNameID = read_u16(f) axisNameID = read_u16(f)
assert f.tell() - start_tell == axisSize, (f.tell() - start_tell, axisSize) assert f.tell() - start_tell == axisSize, (f.tell() - start_tell, axisSize)
@ -5608,6 +5607,9 @@ class GvarTable(Table, ABD):
flags: int flags: int
glyphVariationData: List[Optional[GlyphVariationDataTable]] glyphVariationData: List[Optional[GlyphVariationDataTable]]
def __post_init__(self):
assert self.flags & 0xfffe == 0, "Reserved?" # Maybe not?
@dataclass @dataclass
class GvarTable_Ver_1_0(GvarTable): pass class GvarTable_Ver_1_0(GvarTable): pass
@ -5623,8 +5625,7 @@ def parse_gvar_table(f: BinaryIO, glyph_data: GlyfTable) -> GvarTable:
sharedTupleCount = read_u16(f) sharedTupleCount = read_u16(f)
sharedTuplesOffset = read_u32(f) sharedTuplesOffset = read_u32(f)
glyphCount = read_u16(f) glyphCount = read_u16(f)
flags = read_u16(f) flags = read_u16(f) # TODO: Maybe make a method parse_gvar_flags
assert flags & 0xfffe == 0, f"Reserved?" # Maybe not?
long = (flags & 0x0001) != 0 long = (flags & 0x0001) != 0
glyphVariationDataArrayOffset = read_u32(f) glyphVariationDataArrayOffset = read_u32(f)
glyphVariationDataOffsets = [read_u16(f)*2 for _ in range(glyphCount+1)] if not long else [read_u32(f) for _ in range(glyphCount+1)] # TODO: Some of these point to the same GlyphVariationDataTables. Maybe don't reparse each one if it's the same? glyphVariationDataOffsets = [read_u16(f)*2 for _ in range(glyphCount+1)] if not long else [read_u32(f) for _ in range(glyphCount+1)] # TODO: Some of these point to the same GlyphVariationDataTables. Maybe don't reparse each one if it's the same?
@ -5652,12 +5653,14 @@ class DeltaSetIndexMapTable(Table):
mapCount: int mapCount: int
mapData: List[Tuple[int, int]] # (outerIndex, innerIndex) mapData: List[Tuple[int, int]] # (outerIndex, innerIndex)
def __post_init__(self):
assert self.entryFormat & 0xffc0 == 0, "Reserved"
def parse_delta_set_index_map_table(f: BinaryIO) -> DeltaSetIndexMapTable: def parse_delta_set_index_map_table(f: BinaryIO) -> DeltaSetIndexMapTable:
INNER_INDEX_BIT_COUNT_MASK = 0x000f INNER_INDEX_BIT_COUNT_MASK = 0x000f
MAP_ENTRY_SIZE_MASK = 0x0030 MAP_ENTRY_SIZE_MASK = 0x0030
entryFormat = read_u16(f) entryFormat = read_u16(f) # TODO: Maybe make all flags like this? If something is reserved, it could just be future things
assert entryFormat & 0xffc0 == 0, "Reserved"
map_entry_size = ((entryFormat & MAP_ENTRY_SIZE_MASK) >> 4) + 1 map_entry_size = ((entryFormat & MAP_ENTRY_SIZE_MASK) >> 4) + 1
mapCount = read_u16(f) mapCount = read_u16(f)
@ -5917,78 +5920,80 @@ def possibly_parse_at_table_directory_entry_with_length(f: BinaryIO, table: Opti
def parse_open_font_file(f: BinaryIO) -> OpenFontFile: def parse_open_font_file(f: BinaryIO) -> OpenFontFile:
font_directory = parse_font_directory(f) font_directory = parse_font_directory(f)
font_header = parse_at_table_directory_entry(f, font_directory.by_table.head, parse_head_table) font_header = parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Head), parse_head_table)
horizontal_header = parse_at_table_directory_entry(f, font_directory.by_table.hhea, parse_hhea_table) horizontal_header = parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Hhea), parse_hhea_table)
maximum_profile = parse_at_table_directory_entry(f, font_directory.by_table.maxp, parse_maxp_table) maximum_profile = parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Maxp), parse_maxp_table)
horizontal_metrics = parse_at_table_directory_entry(f, font_directory.by_table.hmtx, lambda f: parse_hmtx_table(f, horizontal_header.numberOfHMetrics, maximum_profile.numGlyphs)) horizontal_metrics = parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Hmtx), lambda f: parse_hmtx_table(f, horizontal_header.numberOfHMetrics, maximum_profile.numGlyphs))
naming_table = parse_at_table_directory_entry_with_length(f, font_directory.by_table.name, parse_name_table) naming_table = parse_at_table_directory_entry_with_length(f, font_directory.get_entry(TableTag.Name), parse_name_table)
OS2_and_Windows_specific_metrics = parse_at_table_directory_entry(f, font_directory.by_table.os2, parse_OS2_table) OS2_and_Windows_specific_metrics = parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.OS2), parse_OS2_table)
character_to_glyph_mapping = parse_at_table_directory_entry(f, font_directory.by_table.cmap, parse_cmap_table) character_to_glyph_mapping = parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Cmap), parse_cmap_table)
PostScript_information = parse_at_table_directory_entry_with_length(f, font_directory.by_table.post, parse_post_table) PostScript_information = parse_at_table_directory_entry_with_length(f, font_directory.get_entry(TableTag.Post), parse_post_table)
# optional # optional
digital_signature = possibly_parse_at_table_directory_entry(f, font_directory.by_table.DSIG, parse_DSIG_table) digital_signature = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.DSIG), parse_DSIG_table)
horizontal_device_metrics = possibly_parse_at_table_directory_entry(f, font_directory.by_table.hdmx, lambda f: parse_hdmx_table(f, maximum_profile.numGlyphs)) horizontal_device_metrics = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Hdmx), lambda f: parse_hdmx_table(f, maximum_profile.numGlyphs))
kerning = possibly_parse_at_table_directory_entry(f, font_directory.by_table.Kern, parse_Kern_table) kerning = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Kern), parse_Kern_table)
linear_threshold_data = possibly_parse_at_table_directory_entry(f, font_directory.by_table.LTSH, parse_LTSH_table) linear_threshold_data = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.LTSH), parse_LTSH_table)
PCL5_data = possibly_parse_at_table_directory_entry(f, font_directory.by_table.PCLT, parse_PCLT_table) PCL5_data = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.PCLT), parse_PCLT_table)
vertical_device_metrics = possibly_parse_at_table_directory_entry(f, font_directory.by_table.VDMX, parse_VDMX_table) vertical_device_metrics = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.VDMX), parse_VDMX_table)
vertical_metrics_header = possibly_parse_at_table_directory_entry(f, font_directory.by_table.vhea, parse_vhea_table) vertical_metrics_header = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Vhea), parse_vhea_table)
if font_directory.by_table.vmtx: if font_directory.get_entry(TableTag.Vmtx):
assert vertical_metrics_header, f"Must have vertical_metrics_header to parse vertical_metrics" assert vertical_metrics_header, f"Must have vertical_metrics_header to parse vertical_metrics"
vertical_metrics = parse_at_table_directory_entry(f, font_directory.by_table.vmtx, lambda f: parse_vmtx_table(f, vertical_metrics_header.numOfLongVerMetris, maximum_profile.numGlyphs)) vertical_metrics = parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Vmtx), lambda f: parse_vmtx_table(f, vertical_metrics_header.numOfLongVerMetris, maximum_profile.numGlyphs))
else: else:
vertical_metrics = None vertical_metrics = None
colour_table = possibly_parse_at_table_directory_entry(f, font_directory.by_table.COLR, parse_COLR_table) colour_table = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.COLR), parse_COLR_table)
colour_palette_table = possibly_parse_at_table_directory_entry(f, font_directory.by_table.CPAL, parse_CPAL_table) colour_palette_table = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.CPAL), parse_CPAL_table)
# TTF / CFF # TTF / CFF
match font_directory.offset_table.sfntVersion: match font_directory.offset_table.sfntVersion:
case 0x00010000: # TTF case 0x00010000: # TTF
index_to_location = parse_at_table_directory_entry(f, font_directory.by_table.loca, lambda f: parse_loca_table(f, font_header.indexToLocFormat, maximum_profile.numGlyphs)) index_to_location = parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Loca), lambda f: parse_loca_table(f, font_header.indexToLocFormat, maximum_profile.numGlyphs))
glyph_data = parse_at_table_directory_entry(f, font_directory.by_table.glyf, lambda f: parse_glyf_table(f, index_to_location.offsets)) glyph_data = parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Glyf), lambda f: parse_glyf_table(f, index_to_location.offsets))
control_value_table = possibly_parse_at_table_directory_entry_with_length(f, font_directory.by_table.cvt, parse_cvt_table) control_value_table = possibly_parse_at_table_directory_entry_with_length(f, font_directory.get_entry(TableTag.Cvt), parse_cvt_table)
font_program = possibly_parse_at_table_directory_entry_with_length(f, font_directory.by_table.fpgm, parse_fpgm_table) font_program = possibly_parse_at_table_directory_entry_with_length(f, font_directory.get_entry(TableTag.Fpgm), parse_fpgm_table)
CV_program = possibly_parse_at_table_directory_entry_with_length(f, font_directory.by_table.prep, parse_prep_table) CV_program = possibly_parse_at_table_directory_entry_with_length(f, font_directory.get_entry(TableTag.Prep), parse_prep_table)
grid_fitting_and_scan_conversion = possibly_parse_at_table_directory_entry(f, font_directory.by_table.gasp, parse_gasp_table) grid_fitting_and_scan_conversion = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Gasp), parse_gasp_table)
outlines = TrueTypeOutlines(control_value_table, font_program, glyph_data, index_to_location, CV_program, grid_fitting_and_scan_conversion) outlines = TrueTypeOutlines(control_value_table, font_program, glyph_data, index_to_location, CV_program, grid_fitting_and_scan_conversion)
case _: case _:
assert False, f"Unimplemented: sfntVersion: {hex(font_directory.offset_table.sfntVersion)}" assert False, f"Unimplemented: sfntVersion: {hex(font_directory.offset_table.sfntVersion)}"
# SVG # SVG
scalar_vector_graphics = possibly_parse_at_table_directory_entry(f, font_directory.by_table.svg, parse_svg_table) scalar_vector_graphics = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Svg), parse_svg_table)
# Advanced # Advanced
baseline_data = possibly_parse_at_table_directory_entry(f, font_directory.by_table.BASE, parse_BASE_table) baseline_data = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.BASE), parse_BASE_table)
glyph_definition_data = possibly_parse_at_table_directory_entry(f, font_directory.by_table.GDEF, parse_GDEF_table) glyph_definition_data = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.GDEF), parse_GDEF_table)
glyph_positioning_data = possibly_parse_at_table_directory_entry(f, font_directory.by_table.GPOS, parse_GPOS_table) glyph_positioning_data = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.GPOS), parse_GPOS_table)
glyph_substitution_data = possibly_parse_at_table_directory_entry(f, font_directory.by_table.GSUB, parse_GSUB_table) glyph_substitution_data = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.GSUB), parse_GSUB_table)
justification_data = possibly_parse_at_table_directory_entry(f, font_directory.by_table.JSTF, parse_JSTF_table) justification_data = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.JSTF), parse_JSTF_table)
math_layout_data = possibly_parse_at_table_directory_entry(f, font_directory.by_table.MATH, parse_MATH_table) math_layout_data = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.MATH), parse_MATH_table)
advanced_features = AdvancedFeatures(baseline_data, glyph_definition_data, glyph_positioning_data, glyph_substitution_data, justification_data, math_layout_data) advanced_features = AdvancedFeatures(baseline_data, glyph_definition_data, glyph_positioning_data, glyph_substitution_data, justification_data, math_layout_data)
font_variations: Optional[FontVariations] = None font_variations: Optional[FontVariations] = None
if font_directory.by_table.fvar: if font_directory.has_entry(TableTag.Fvar):
font_variations_ = parse_at_table_directory_entry(f, font_directory.by_table.fvar, parse_fvar_table) font_variations_ = parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Fvar), parse_fvar_table)
style_attributes = parse_at_table_directory_entry(f, font_directory.by_table.STAT, parse_STAT_table) style_attributes = parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.STAT), parse_STAT_table)
axis_variations = possibly_parse_at_table_directory_entry(f, font_directory.by_table.avar, parse_avar_table) axis_variations = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Avar), parse_avar_table)
if font_directory.by_table.cvar: if font_directory.has_entry(TableTag.Cvar):
assert control_value_table, f"Must have control_value_table in order to have CVT_variations!" assert isinstance(outlines, TrueTypeOutlines)
CVT_variations = parse_at_table_directory_entry(f, font_directory.by_table.cvar, lambda f: parse_cvar_table(f, font_variations_.axisCount, control_value_table)) cvt = outlines.control_value_table
assert cvt, f"Must have control_value_table in order to have CVT_variations!"
CVT_variations = parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Cvar), lambda f: parse_cvar_table(f, font_variations_.axisCount, cvt))
else: else:
CVT_variations = None CVT_variations = None
if font_directory.by_table.gvar: if font_directory.has_entry(TableTag.Gvar):
assert isinstance(outlines, TrueTypeOutlines) assert isinstance(outlines, TrueTypeOutlines)
glyph_variations = parse_at_table_directory_entry(f, font_directory.by_table.gvar, lambda f: parse_gvar_table(f, outlines.glyph_data)) glyph_variations = parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.Gvar), lambda f: parse_gvar_table(f, outlines.glyph_data))
else: else:
glyph_variations = None glyph_variations = None
horizontal_metrics_variations = possibly_parse_at_table_directory_entry(f, font_directory.by_table.HVAR, parse_HVAR_table) horizontal_metrics_variations = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.HVAR), parse_HVAR_table)
metrics_variations = possibly_parse_at_table_directory_entry(f, font_directory.by_table.MVAR, parse_MVAR_table) metrics_variations = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.MVAR), parse_MVAR_table)
vertical_metrics_variations = possibly_parse_at_table_directory_entry(f, font_directory.by_table.VVAR, parse_VVAR_table) vertical_metrics_variations = possibly_parse_at_table_directory_entry(f, font_directory.get_entry(TableTag.VVAR), parse_VVAR_table)
font_variations = FontVariations(axis_variations, CVT_variations, font_variations_, glyph_variations, horizontal_metrics_variations, metrics_variations, style_attributes, vertical_metrics_variations) font_variations = FontVariations(axis_variations, CVT_variations, font_variations_, glyph_variations, horizontal_metrics_variations, metrics_variations, style_attributes, vertical_metrics_variations)

View File

@ -1,4 +1,6 @@
class ABD: from abc import ABC
class ABD(ABC):
""" """
#### Abstract Base Dataclass #### Abstract Base Dataclass
@ -9,11 +11,14 @@ class ABD:
msg = f"Cannot instantiate an Abstract Base Dataclass: {self.__class__.__name__}" msg = f"Cannot instantiate an Abstract Base Dataclass: {self.__class__.__name__}"
raise TypeError(msg) raise TypeError(msg)
# TODO: Make a subclass of EnumMeta to do this
class ABE: class ABE:
""" """
#### Abstract Base Enum #### Abstract Base Enum
This is for classes that will have an Enum subclass them This is for classes that will have an Enum subclass them.
Do not implement a __init__ method for the class directly inheriting from ABE
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
if ABE in self.__class__.__bases__ or ABE == self: if ABE in self.__class__.__bases__ or ABE == self:

View File

@ -1,9 +1,23 @@
from typing import BinaryIO from io import SEEK_END
from typing import BinaryIO, Callable, Literal, TypeVar
ENDIANNESS = 'big' def len_to_end(f: BinaryIO) -> int:
curr_tell = f.tell()
f.seek(0, SEEK_END)
end_tell = f.tell()
f.seek(curr_tell)
return end_tell - curr_tell
def read_int(f: BinaryIO, number: int, signed:bool=False) -> int: return int.from_bytes(f.read(number), ENDIANNESS, signed=signed) def is_at_end(f: BinaryIO) -> bool:
def write_int(f: BinaryIO, value: int, number: int, signed:bool=False) -> int: return f.write(value.to_bytes(number, ENDIANNESS, signed=signed)) return len_to_end(f) == 0
ENDIANNESS: Literal['little', 'big'] = 'big'
def read_int_from_bytes(s: bytes, *, signed:bool=False) -> int: return int.from_bytes(s, ENDIANNESS, signed=signed)
def bytes_from_int(value: int, number: int, *, signed:bool=False) -> bytes: return value.to_bytes(number, ENDIANNESS, signed=signed)
def read_int(f: BinaryIO, number: int, *, signed:bool=False) -> int: return read_int_from_bytes(f.read(number), signed=signed)
def write_int(f: BinaryIO, value: int, number: int, signed:bool=False) -> int: return f.write(bytes_from_int(value, number, signed=signed))
def read_u64(f: BinaryIO) -> int: return read_int(f, 8) def read_u64(f: BinaryIO) -> int: return read_int(f, 8)
def read_u32(f: BinaryIO) -> int: return read_int(f, 4) def read_u32(f: BinaryIO) -> int: return read_int(f, 4)
@ -11,7 +25,7 @@ def read_u24(f: BinaryIO) -> int: return read_int(f, 3)
def read_u16(f: BinaryIO) -> int: return read_int(f, 2) def read_u16(f: BinaryIO) -> int: return read_int(f, 2)
def read_u8(f: BinaryIO) -> int: return read_int(f, 1) def read_u8(f: BinaryIO) -> int: return read_int(f, 1)
def write_u16(f: BinaryIO, value: int) -> int: return f.write(value) def write_u16(f: BinaryIO, value: int) -> int: return write_int(f, value, 2)
def read_i32(f: BinaryIO) -> int: return read_int(f, 4, signed=True) def read_i32(f: BinaryIO) -> int: return read_int(f, 4, signed=True)
def read_i16(f: BinaryIO) -> int: return read_int(f, 2, signed=True) def read_i16(f: BinaryIO) -> int: return read_int(f, 2, signed=True)
@ -23,9 +37,12 @@ def read_ascii(f: BinaryIO, number: int) -> str: return f.read(number).decode(en
def read_fixed_point(f: BinaryIO, preradix_bits: int, postradix_bits:int, *, signed:bool=True) -> float: def read_fixed_point(f: BinaryIO, preradix_bits: int, postradix_bits:int, *, signed:bool=True) -> float:
assert (preradix_bits+postradix_bits)%8 == 0 assert (preradix_bits+postradix_bits)%8 == 0
raw = read_int(f, (preradix_bits+postradix_bits)//8, signed=signed) raw = read_int(f, (preradix_bits+postradix_bits)//8, signed=signed)
return raw/(1<<(postradix_bits)) return raw/(1<<postradix_bits)
def read_pascal_string(f: BinaryIO) -> str: def read_pascal_string(f: BinaryIO) -> str:
string_size = read_int(f, 1) string_size = read_u8(f)
pascal_string = read_ascii(f, string_size) pascal_string = read_ascii(f, string_size)
return pascal_string return pascal_string
T = TypeVar('T')
Parser = Callable[[BinaryIO], T]

View File

@ -1,9 +1,9 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import os import os
import sys import sys
assert len(sys.argv) == 2, "usage: python3 test.py <test>" assert len(sys.argv) >= 2, "usage: python3 test.py <test> [OPTIONS]"
from OpenFont import FontSpecificNameID, NameID, NameTable_Format_0, OpenFontFile, PredefinedNameID, TrueTypeOutlines, open_font_file, write_font_file from OpenFont import FontSpecificNameID, NameID, NameTable_Format_0, OpenFontFile, PredefinedNameID, TrueTypeOutlines, open_font_file
def search_names(font: OpenFontFile, nameID: NameID) -> str: def search_names(font: OpenFontFile, nameID: NameID) -> str:
assert isinstance(font.naming_table, NameTable_Format_0) assert isinstance(font.naming_table, NameTable_Format_0)
@ -15,7 +15,7 @@ def search_names(font: OpenFontFile, nameID: NameID) -> str:
assert False, f"Name not found: {nameID}" assert False, f"Name not found: {nameID}"
_, test = sys.argv _, test, *options = sys.argv
match test: match test:
case "names": case "names":
@ -30,11 +30,6 @@ match test:
axis_names = [search_names(font, FontSpecificNameID(axis.axisNameID)) for axis in font.font_variations.font_variations.axes] axis_names = [search_names(font, FontSpecificNameID(axis.axisNameID)) for axis in font.font_variations.font_variations.axes]
num_instances = font.font_variations.font_variations.instanceCount num_instances = font.font_variations.font_variations.instanceCount
print(f"\tAxes: [{', '.join(axis_names)}] ({num_instances} instances)") print(f"\tAxes: [{', '.join(axis_names)}] ({num_instances} instances)")
case "rewrite":
def test_font(font: OpenFontFile):
PATH = "out.ttf"
write_font_file(font, PATH)
open_font_file(PATH)
case _: case _:
assert False, f"Invalid test: '{test}'" assert False, f"Invalid test: '{test}'"
@ -44,13 +39,17 @@ if not os.path.exists(COMPLETED_PATH):
with open(COMPLETED_PATH, "r") as f: completed = f.read().split('\n') with open(COMPLETED_PATH, "r") as f: completed = f.read().split('\n')
def do_font(file: str): def do_font(file: str):
file = file.strip()
if file in completed: return if file in completed: return
try: try:
font = open_font_file(file) font = open_font_file(file)
test_font(font) test_font(font)
except AssertionError as err: except AssertionError as err:
if '--raise' in options:
err.add_note(f"Failed: {file}") err.add_note(f"Failed: {file}")
raise err raise err
print(f"{file}{':' if '--no-colon' not in options else ''} {err}")
return
with open(COMPLETED_PATH, 'a') as f: f.write(file+'\n') with open(COMPLETED_PATH, 'a') as f: f.write(file+'\n')
completed.append(file) completed.append(file)
@ -58,5 +57,3 @@ assert not sys.stdin.isatty(), f"Do not run this program directly, instead pipe
for line in sys.stdin: for line in sys.stdin:
file = line.rstrip('\n') file = line.rstrip('\n')
do_font(file) do_font(file)
print("Done!")