Compare commits

..

No commits in common. "acacae70ff1f8cde967e64f738fdf14f75e5e7d4" and "8cdba7112af6b5b3fd62cb182b197669a4151800" have entirely different histories.

19 changed files with 0 additions and 6558 deletions

3
.gitignore vendored
View File

@ -1,3 +0,0 @@
**.DS_Store
**/__pycache__
**/.mypy_cache

View File

@ -1,3 +0,0 @@
examples
mypy.ini
**/*.ttf

File diff suppressed because it is too large Load Diff

View File

@ -1,21 +0,0 @@
class ABD:
"""
#### Abstract Base Dataclass
This will only work on dataclasses
"""
def __post_init__(self, *args, **kwargs):
if ABD in self.__class__.__bases__ or ABD == self.__class__:
msg = f"Cannot instantiate an Abstract Base Dataclass: {self.__class__.__name__}"
raise TypeError(msg)
class ABE:
"""
#### Abstract Base Enum
This is for classes that will have an Enum subclass them
"""
def __init__(self, *args, **kwargs):
if ABE in self.__class__.__bases__ or ABE == self:
msg = f"Cannot instantiate an Abstract Base Enum: {self.__class__.__name__}"
raise TypeError(msg)

View File

@ -1,91 +0,0 @@
-module(font_directory).
-export([parse_font_directory/1]).
-include("sizes.hrl").
-define(OFFSET_TABLE_SIZE, (?U32+4*?U16)).
parse_offset_table(
<<SfntVersion:32,
NumTables:16,
SearchRange:16,
EntrySelector:16,
RangeShift:16>>) ->
case lists:member(SfntVersion, [16#00010000]) of false -> throw({invalid_sfnt_version, SfntVersion}); true -> ok end,
[
{sfnt_version, SfntVersion},
{num_tables, NumTables},
{search_range, SearchRange},
{entry_selector, EntrySelector},
{range_shift, RangeShift}
].
%% 5.2 (Mandatory)
read_table_tag(<<"cmap">>) -> cmap;
read_table_tag(<<"head">>) -> head;
read_table_tag(<<"hhea">>) -> hhea;
read_table_tag(<<"hmtx">>) -> hmtx;
read_table_tag(<<"maxp">>) -> maxp;
read_table_tag(<<"name">>) -> name;
read_table_tag(<<"OS/2">>) -> os2 ;
read_table_tag(<<"post">>) -> post;
%% 5.3 (TTF)
read_table_tag(<<"cvt ">>) -> cvt ;
read_table_tag(<<"fpgm">>) -> fpgm;
read_table_tag(<<"glyf">>) -> glyf;
read_table_tag(<<"loca">>) -> loca;
read_table_tag(<<"prep">>) -> prep;
read_table_tag(<<"gasp">>) -> gasp;
%% TODO: 5.4 (CFF)
%% 5.5 (SVG)
read_table_tag(<<"SVG ">>) -> svg ;
%% 5.6 (Optional)
read_table_tag(<<"DSIG">>) -> dsig;
read_table_tag(<<"hdmx">>) -> hdmx;
read_table_tag(<<"Kern">>) -> kern;
read_table_tag(<<"LTSH">>) -> ltsh;
read_table_tag(<<"PCLT">>) -> pclt;
read_table_tag(<<"VDMX">>) -> vdmx;
read_table_tag(<<"vhea">>) -> vhea;
read_table_tag(<<"vmtx">>) -> vmtx;
read_table_tag(<<"COLR">>) -> colr;
read_table_tag(<<"CPAL">>) -> cpal;
%% 6.3 (Advanced Features)
read_table_tag(<<"BASE">>) -> base;
read_table_tag(<<"GDEF">>) -> gdef;
read_table_tag(<<"GPOS">>) -> gpos;
read_table_tag(<<"GSUB">>) -> gsub;
read_table_tag(<<"JSTF">>) -> jstf;
read_table_tag(<<"MATH">>) -> math;
%% 7.3 (Variable Fonts)
read_table_tag(<<"avar">>) -> avar;
read_table_tag(<<"cvar">>) -> cvar;
read_table_tag(<<"fvar">>) -> fvar;
read_table_tag(<<"gvar">>) -> gvar;
read_table_tag(<<"HVAR">>) -> hvar;
read_table_tag(<<"MVAR">>) -> mvar;
read_table_tag(<<"STAT">>) -> stat;
read_table_tag(<<"VVAR">>) -> vvar;
read_table_tag(<<TableTag:4/bytes>>) -> throw({invalid_table_tag, TableTag}).
-define(TABLE_DIRECTORY_SIZE(Count), ((?TAG_SIZE+3*?U32)*Count)).
parse_table_directory(
<<TableTag:?TAG_SIZE/binary,
CheckSum:32,
Offset:32,
Length:32,
Tail/binary>>) -> [{read_table_tag(TableTag), [
{check_sum, CheckSum},
{offset, Offset},
{length, Length}
]} | parse_table_directory(Tail)];
parse_table_directory(<<>>) -> [].
parse_font_directory(<<OffsetTable_:?OFFSET_TABLE_SIZE/binary, TableDirectoryAndRest/binary>>) ->
OffsetTable = parse_offset_table(OffsetTable_),
NumTables = proplists:get_value(num_tables, OffsetTable),
<<TableDirectory_:?TABLE_DIRECTORY_SIZE(NumTables)/binary, _/binary>> = TableDirectoryAndRest,
TableDirectory = parse_table_directory(TableDirectory_),
[
{offset_table, OffsetTable},
{table_directory, TableDirectory}
].

View File

@ -1,12 +0,0 @@
-module(read_utils).
-compile(nowarn_export_all).
-compile(export_all).
read_fixed(<<Int:16, Frac:16>>) -> Int + Frac / 65536.0.
read_fixed_version(<<Int:16, Frac:16>>) -> Int + Frac / 16#1000 / 10.
read_long_datetime(<<SecondsSince1904:64>>) ->
SecondsTo1904 = calendar:datetime_to_gregorian_seconds({{1904,1,1}, {0,0,0}}),
Seconds = SecondsTo1904+SecondsSince1904,
calendar:gregorian_seconds_to_datetime(Seconds).

View File

@ -1,15 +0,0 @@
-define(U8, 1).
-define(U16, 2).
-define(U24, 3).
-define(U32, 4).
-define(U64, 8).
-define(I8, 1).
-define(I16, 2).
-define(I24, 3).
-define(I32, 4).
-define(FIXED_SIZE, 4).
-define(DATETIME_SIZE, 8).
-define(TAG_SIZE, 4).

View File

@ -1,15 +0,0 @@
-module(table).
-export([parse_table/4]).
-import(head, [parse_head_table/1]).
-import(hhea, [parse_hhea_table/1]).
-import(maxp, [parse_maxp_table/1]).
-import(hmtx, [parse_hmtx_table/1]).
parse_table(head, Table, _, _) -> parse_head_table(Table);
parse_table(hhea, Table, _, _) -> parse_hhea_table(Table);
parse_table(maxp, Table, _, _) -> parse_maxp_table(Table);
parse_table(hmtx, Table, _, Info) ->
[{number_of_h_metrics, NumberOfHMetrics}, {num_glyphs, NumGlyphs}] = Info,
parse_hmtx_table(<<NumberOfHMetrics:16, NumGlyphs:16, Table/binary>>);
parse_table(TableTag, _, _, _) -> throw({unimplemented_table, TableTag}).

View File

@ -1,46 +0,0 @@
-module(head).
-export([parse_head_table/1]).
-include("sizes.hrl").
-import(read_utils, [read_fixed/1, read_long_datetime/1]).
parse_head_table(
<<1:16, 0:16,
FontRendition:?FIXED_SIZE/binary,
CheckSumAdjustment:32,
MagicNumber:32,
Flags:16/bits,
UnitsPerEm:16,
Created:?DATETIME_SIZE/binary,
Modified:?DATETIME_SIZE/binary,
XMin:16/signed,
YMin:16/signed,
XMax:16/signed,
YMax:16/signed,
MacStyle:16/bits,
LowestRecPPEM:16,
FontDirectionHint:16/signed,
IndexToLocFormat:16/signed,
GlyphDataFormat:16/signed>>) ->
case MagicNumber == 16#5F0F3CF5 of false -> throw({invalid_magic_number, MagicNumber}); true -> ok end,
case lists:member(IndexToLocFormat, [0, 1]) of false -> throw({invalid_index_to_loc_format, IndexToLocFormat}); true -> ok end,
case GlyphDataFormat == 0 of false -> throw({invalid_glyph_data_format, GlyphDataFormat}); true -> ok end,
[
{major_version, 1},
{minor_version, 0},
{font_revision, read_fixed(FontRendition)},
{check_sum_adjustment, CheckSumAdjustment},
{flags, Flags},
{units_per_em, UnitsPerEm},
{created, read_long_datetime(Created)},
{modified, read_long_datetime(Modified)},
{x_min, XMin},
{y_min, YMin},
{x_max, XMax},
{y_max, YMax},
{mac_style, MacStyle},
{lowest_rec_ppem, LowestRecPPEM},
{font_direction_hint, FontDirectionHint},
{index_to_loc_format, IndexToLocFormat},
{glyph_data_format, GlyphDataFormat}
].

View File

@ -1,37 +0,0 @@
-module(hhea).
-export([parse_hhea_table/1]).
-include("sizes.hrl").
-import(read_utils, [read_fixed/1, read_long_datetime/1]).
parse_hhea_table(
<<1:16, 0:16,
Ascender:16/signed,
Descender:16/signed,
LineGap:16/signed,
AdvanceWidthMax:16,
MinLeftSideBearing:16/signed,
MinRightSideBearing:16/signed,
XMaxExtent:16/signed,
CaretSlopeRise:16/signed,
CaretSlopeRun:16/signed,
CaretOffset:16/signed,
0:16/signed, 0:16/signed, 0:16/signed, 0:16/signed, %% reserved
MetricDataFormat:16/signed, NumberOfHMetrics:16>>) ->
case MetricDataFormat == 0 of false -> throw(invalid_metric_data_format); true -> ok end,
[
{majorVersion, 1},
{minorVersion, 0},
{ascender, Ascender},
{descender, Descender},
{line_gap, LineGap},
{advance_width_max, AdvanceWidthMax},
{min_left_side_bearing, MinLeftSideBearing},
{min_right_side_bearing, MinRightSideBearing},
{x_max_extent, XMaxExtent},
{caret_slope_rise, CaretSlopeRise},
{caret_slope_run, CaretSlopeRun},
{caret_offset, CaretOffset},
{metric_data_format, MetricDataFormat},
{number_of_h_metrics, NumberOfHMetrics}
].

View File

@ -1,26 +0,0 @@
-module(hmtx).
-export([parse_hmtx_table/1]).
-include("sizes.hrl").
-define(LONG_HOR_METRIC_SIZE, (?U16+?I16)).
parse_long_hor_metric(<<AdvanceWidth:16, Lsb:16/signed, Rest/binary>>) ->
[[
{advance_width, AdvanceWidth},
{lsb, Lsb}
] | parse_long_hor_metric(Rest)];
parse_long_hor_metric(<<>>) -> [].
parse_left_side_bearing(<<LeftSideBearing:16/unsigned, Rest/binary>>) ->
[LeftSideBearing | parse_left_side_bearing(Rest)];
parse_left_side_bearing(<<>>) -> [].
parse_hmtx_table(
<<NumberOfHMetrics:16, NumGlyphs:16, %% <<NumberOfHMetrics:16, NumGlyphs:16, <<Data>>>>
HMetrics:(NumberOfHMetrics*?LONG_HOR_METRIC_SIZE)/binary,
LeftSideBearing:((NumGlyphs-NumberOfHMetrics)*?I16)/binary
>>) -> [
{h_metrics, parse_long_hor_metric(HMetrics)},
{left_side_bearing, parse_left_side_bearing(LeftSideBearing)}
].

View File

@ -1,52 +0,0 @@
-module(maxp).
-export([parse_maxp_table/1]).
-import(read_utils, [read_fixed_version/1]).
-include("sizes.hrl").
parse_maxp_table(
<<FixedVersion:?FIXED_SIZE/binary,
NumGlyphs:16, Tail/binary>>) ->
Version = read_fixed_version(FixedVersion),
case lists:member(Version, [0.5, 1.0]) of
false -> throw({invalid_version, Version});
true -> if Version == 0.5 -> [
{version, Version},
{num_glyphs, NumGlyphs}
]; true ->
<<
MaxPoints:16,
MaxContours:16,
MaxCompositePoints:16,
MaxCompositeContours:16,
MaxZones:16,
MaxTwilightPoints:16,
MaxStorage:16,
MaxFunctionDefs:16,
MaxInstructionDefs:16,
MaxStackElements:16,
MaxSizeOfInstructions:16,
MaxComponentElements:16,
MaxComponentDepth:16,
_/binary
>> = Tail,
[
{version, Version},
{num_glyphs, NumGlyphs},
{max_points, MaxPoints},
{max_contours, MaxContours},
{max_composite_points, MaxCompositePoints},
{max_composite_contours, MaxCompositeContours},
{max_zones, MaxZones},
{max_twilight_points, MaxTwilightPoints},
{max_storage, MaxStorage},
{max_function_defs, MaxFunctionDefs},
{max_instruction_defs, MaxInstructionDefs},
{max_stack_elements, MaxStackElements},
{max_size_of_instructions, MaxSizeOfInstructions},
{max_component_elements, MaxComponentElements},
{max_component_depth, MaxComponentDepth}
]
end
end.

View File

@ -1,7 +0,0 @@
-module(name).
-export([parse_name_table/1]).
parse_name_table(
<<0:16, Count:16,
Offset:16, _:(Offset-3*16)/binary,
Strings/binary>> = Table) -> throw(unimplemented).

View File

@ -1,32 +0,0 @@
-module(ttf_parser).
-export([read/1]).
-import(font_directory, [parse_font_directory/1]).
-import(table, [parse_table/4]).
parse_at_table_directory(File, TableDirectory, TableTag, Info) ->
TableDirectoryEntry = proplists:get_value(TableTag, TableDirectory),
Offset = proplists:get_value(offset, TableDirectoryEntry),
Length = proplists:get_value(length, TableDirectoryEntry),
<<_:Offset/binary, Table:Length/binary, _/binary>> = File,
parse_table(TableTag, Table, Length, Info).
parse_at_table_directory(File, TableDirectory, TableTag) -> parse_at_table_directory(File, TableDirectory, TableTag, {}).
read(Filename) ->
{ok, File} = file:read_file(Filename),
FontDirectory = parse_font_directory(File),
TableDirectory = proplists:get_value(table_directory, FontDirectory),
FontHeader = parse_at_table_directory(File, TableDirectory, head),
HorizontalHeader = parse_at_table_directory(File, TableDirectory, hhea),
MaximumProfile = parse_at_table_directory(File, TableDirectory, maxp),
HorizontalMetrics = parse_at_table_directory(File, TableDirectory, hmtx, [
{number_of_h_metrics, proplists:get_value(number_of_h_metrics, HorizontalHeader)},
{num_glyphs, proplists:get_value(num_glyphs, MaximumProfile)}
]),
[
{font_header, FontHeader},
{horizontal_header, HorizontalHeader},
{maximum_profile, MaximumProfile},
{horizontal_metrics, HorizontalMetrics}
].

View File

@ -1,2 +0,0 @@
find $1 -type f -name "*.tt[$2]"
# TODO: Parse other fonts that are not tt[fc]

View File

@ -1,31 +0,0 @@
from typing import BinaryIO
ENDIANNESS = 'big'
def read_int(f: BinaryIO, number: int, signed:bool=False) -> int: return int.from_bytes(f.read(number), ENDIANNESS, signed=signed)
def write_int(f: BinaryIO, value: int, number: int, signed:bool=False) -> int: return f.write(value.to_bytes(number, ENDIANNESS, signed=signed))
def read_u64(f: BinaryIO) -> int: return read_int(f, 8)
def read_u32(f: BinaryIO) -> int: return read_int(f, 4)
def read_u24(f: BinaryIO) -> int: return read_int(f, 3)
def read_u16(f: BinaryIO) -> int: return read_int(f, 2)
def read_u8(f: BinaryIO) -> int: return read_int(f, 1)
def write_u16(f: BinaryIO, value: int) -> int: return f.write(value)
def read_i32(f: BinaryIO) -> int: return read_int(f, 4, signed=True)
def read_i16(f: BinaryIO) -> int: return read_int(f, 2, signed=True)
def read_i8(f: BinaryIO) -> int: return read_int(f, 1, signed=True)
def read_utf8(f: BinaryIO, number: int) -> str: return f.read(number).decode(encoding='utf8')
def read_ascii(f: BinaryIO, number: int) -> str: return f.read(number).decode(encoding='ascii')
def read_fixed_point(f: BinaryIO, preradix_bits: int, postradix_bits:int, *, signed:bool=True) -> float:
assert (preradix_bits+postradix_bits)%8 == 0
raw = read_int(f, (preradix_bits+postradix_bits)//8, signed=signed)
return raw/(1<<(postradix_bits))
def read_pascal_string(f: BinaryIO) -> str:
string_size = read_int(f, 1)
pascal_string = read_ascii(f, string_size)
return pascal_string

View File

@ -1,46 +0,0 @@
import os
from OpenFont import FontSpecificNameID, NameID, NameTable_Format_0, OpenFontFile, PredefinedNameID, TrueTypeOutlines, open_font
def search_names(font: OpenFontFile, nameID: NameID) -> str:
assert isinstance(font.naming_table, NameTable_Format_0)
for nameRecord in font.naming_table.nameRecord:
if nameRecord.nameID == nameID:
return nameRecord.string
assert False, f"Name not found: {nameID}"
def print_font(font: OpenFontFile):
assert isinstance(font.naming_table, NameTable_Format_0)
assert isinstance(font.outlines, TrueTypeOutlines)
name = search_names(font, PredefinedNameID.FULL_NAME)
print(name, f"({font.maximum_profile.numGlyphs} glyphs, {font.naming_table.count} names)")
if font.font_variations:
axis_names = [search_names(font, FontSpecificNameID(axis.axisNameID)) for axis in font.font_variations.font_variations.axes]
num_instances = font.font_variations.font_variations.instanceCount
print(f"\tAxes: [{', '.join(axis_names)}] ({num_instances} instances)")
path = "examples/"
if os.path.isfile(path):
try:
font = open_font(path)
except AssertionError as err:
print("Failed:", path)
raise err
print_font(font)
else:
for root, dirs, files in os.walk(path):
for file in files:
_, ext = os.path.splitext(file)
match ext:
case '.ttf':
file_path = os.path.join(root, file)
try:
font = open_font(file_path)
except AssertionError as err:
print("Failed:", file_path)
raise err
print_font(font)

View File

@ -1,3 +0,0 @@
*.txt
*.svg
*.dot

View File

@ -1,117 +0,0 @@
#!/usr/bin/env python3
from os import system as run
import os
from subprocess import check_output as run_and_read
import sys
from typing import Dict, Set
from OpenFont import parse_font_directory
def get_vendor(file: str) -> str:
command = f"otfinfo -i {file} | grep 'Vendor ID' | awk '{{print $3}}'"
return run_and_read(command, shell=True).decode(encoding='ascii').rstrip('\n')
def get_tables(file: str) -> list[str]:
USE_OFTINFO = '--no-otfinfo' not in sys.argv
if USE_OFTINFO:
command = f"otfinfo -t {file} | awk '{{print $2}}'"
return run_and_read(command, shell=True).decode(encoding='ascii').rstrip('\n').split('\n')
else:
with open(file, 'rb') as f:
font_directory = parse_font_directory(f) # Since the font directory is always at the start of the file
sorted_entries = sorted(font_directory.table_directory, key=lambda entry: entry.offset)
return [entry.tableTag._value_ for entry in sorted_entries]
FILENAME = os.path.join(os.path.dirname(__file__), "tables")
graph: Dict[str, Set[str]] = {}
def verify_node(tag: str):
if tag not in graph: graph[tag] = set()
def add_edge(tag1: str, tag2: str):
verify_node(tag1)
verify_node(tag2)
graph[tag1].add(tag2)
MODE = None
if not sys.stdin.isatty():
MODE = 'direct' if '--direct' in sys.argv else 'before'
accumulator: Dict[tuple[str, str], int] = {} # acc[(tag1, tag2)] is number of times tag1 occured before tag2
for file in sys.stdin:
file = file.rstrip('\n')
print(f"{get_vendor(file):<4} {file}")
tables = get_tables(file)
def add_to_acc(tag1: str, tag2: str):
accumulator[(tag1, tag2)] = accumulator.get((tag1, tag2), 0)+1
match MODE:
case 'before':
for i, tag1 in enumerate(tables):
for tag2 in tables[i+1:]:
add_to_acc(tag1, tag2)
case 'direct':
for i, tag1 in enumerate(tables[:-1]):
tag2 = tables[i+1]
add_to_acc(tag1, tag2)
case _:
assert False, f"Invalid mode: '{MODE}'"
with open(f"{FILENAME}.txt", 'w') as f:
f.write(f"mode: {MODE}\n")
for (tag1, tag2) in accumulator:
f.write(f"'{tag1:<4}', '{tag2:<4}', {accumulator[(tag1, tag2)]}, {accumulator.get((tag2, tag1), 0)}\n")
for (tag1, tag2) in accumulator: add_edge(tag1, tag2)
else:
with open(f"{FILENAME}.txt", 'r') as f:
for i, line in enumerate(f.readlines()):
if i == 0:
MODE = line[6:-1]
continue
tag1, tag2 = line[1:5], line[9:13]
add_edge(tag1, tag2)
assert MODE, "Unreachable"
UNTRANSITIVITY = '--untrans' in sys.argv
def untransitivity() -> None:
to_remove: Dict[str, Set[str]] = {tag: set() for tag in graph}
for tag1 in graph:
for tag2 in graph[tag1]:
for tag3 in graph[tag2]:
if tag3 in graph[tag1]: # find a->b, b->c where a->c
to_remove[tag1].add(tag3)
# to_remove = {
# tag1: {tag3 for tag3 in graph[tag2] for tag2 in graph[tag1] if tag3 in graph[tag1]}
# for tag1 in graph
# }
for tag1 in to_remove:
for tag3 in to_remove[tag1]:
graph[tag1].remove(tag3)
if UNTRANSITIVITY: untransitivity()
GENERATE = '--svg' in sys.argv
def generate_svg() -> int:
def node_name(tag: str) -> str:
return '"'+tag+'"'
with open(f"{FILENAME}.dot", 'w') as f:
f.write("digraph {\n")
f.write(f"\tlayout=dot\n")
for node in graph:
f.write(f"\t{node_name(node)}\n")
for neighbour in graph[node]:
f.write(f"\t{node_name(node)} -> {node_name(neighbour)};\n")
f.write("}")
return run(f"dot -Tsvg {FILENAME}.dot > {FILENAME}.svg")
if GENERATE: generate_svg()
print(f"{MODE = }\n{UNTRANSITIVITY = }\n{GENERATE = }")