[layout] Cache subtable coverages in hb_map_t

Proof-of-concept. Going to revert. Memory consumption is more
than I like. It does speed up Roboto shaping another 15% though.
Perhaps if we could add logic to choose which subtables to
cache, this might be a useful approach.
This commit is contained in:
Behdad Esfahbod 2023-05-04 16:28:11 -06:00
parent 975980d368
commit 7a715d74e0
17 changed files with 65 additions and 99 deletions

View file

@ -113,12 +113,12 @@ struct CursivePosFormat1
const Coverage &get_coverage () const { return this+coverage; }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)];
const EntryExitRecord &this_record = entryExitRecord[coverage_index];
if (!this_record.entryAnchor) return_trace (false);
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;

View file

@ -109,12 +109,11 @@ struct MarkBasePosFormat1_2
);
}
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
if (likely (mark_index == NOT_COVERED)) return_trace (false);
unsigned int mark_index = coverage_index;
/* Now we search backwards for a non-mark glyph.
* We don't use skippy_iter.prev() to avoid O(n^2) behavior. */

View file

@ -92,12 +92,11 @@ struct MarkLigPosFormat1_2
const Coverage &get_coverage () const { return this+markCoverage; }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
if (likely (mark_index == NOT_COVERED)) return_trace (false);
unsigned int mark_index = coverage_index;
/* Now we search backwards for a non-mark glyph */

View file

@ -91,12 +91,11 @@ struct MarkMarkPosFormat1_2
const Coverage &get_coverage () const { return this+mark1Coverage; }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int mark1_index = (this+mark1Coverage).get_coverage (buffer->cur().codepoint);
if (likely (mark1_index == NOT_COVERED)) return_trace (false);
unsigned int mark1_index = coverage_index;
/* now we search backwards for a suitable mark glyph until a non-mark glyph */
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;

View file

@ -102,12 +102,10 @@ struct PairPosFormat1_3
const Coverage &get_coverage () const { return this+coverage; }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset_fast (buffer->idx, 1);
@ -118,7 +116,7 @@ struct PairPosFormat1_3
return_trace (false);
}
return_trace ((this+pairSet[index]).apply (c, valueFormat, skippy_iter.idx));
return_trace ((this+pairSet[coverage_index]).apply (c, valueFormat, skippy_iter.idx));
}
bool subset (hb_subset_context_t *c) const

View file

@ -122,12 +122,10 @@ struct PairPosFormat2_4
const Coverage &get_coverage () const { return this+coverage; }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset_fast (buffer->idx, 1);

View file

@ -20,10 +20,10 @@ struct PosLookup : Lookup
return false;
}
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
return_trace (dispatch (c));
return_trace (dispatch (c, coverage_index));
}
bool intersects (const hb_set_t *glyphs) const

View file

@ -61,12 +61,10 @@ struct SinglePosFormat1
ValueFormat get_value_format () const { return valueFormat; }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{

View file

@ -61,14 +61,12 @@ struct SinglePosFormat2
ValueFormat get_value_format () const { return valueFormat; }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
if (unlikely (index >= valueCount)) return_trace (false);
if (unlikely (coverage_index >= valueCount)) return_trace (false);
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
@ -78,7 +76,7 @@ struct SinglePosFormat2
}
valueFormat.apply_value (c, this,
&values[index * valueFormat.get_len ()],
&values[coverage_index * valueFormat.get_len ()],
buffer->cur_pos());
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())

View file

@ -69,14 +69,10 @@ struct AlternateSubstFormat1_2
{ return (this+alternateSet[(this+coverage).get_coverage (gid)])
.get_alternates (start_offset, alternate_count, alternate_glyphs); }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
return_trace ((this+alternateSet[index]).apply (c));
return_trace ((this+alternateSet[coverage_index]).apply (c));
}
bool serialize (hb_serialize_context_t *c,

View file

@ -78,14 +78,10 @@ struct LigatureSubstFormat1_2
return lig_set.would_apply (c);
}
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
const auto &lig_set = this+ligatureSet[index];
const auto &lig_set = this+ligatureSet[coverage_index];
return_trace (lig_set.apply (c));
}

View file

@ -61,14 +61,10 @@ struct MultipleSubstFormat1_2
bool would_apply (hb_would_apply_context_t *c) const
{ return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
return_trace ((this+sequence[index]).apply (c));
return_trace ((this+sequence[coverage_index]).apply (c));
}
template<typename Iterator,

View file

@ -106,19 +106,16 @@ struct ReverseChainSingleSubstFormat1
bool would_apply (hb_would_apply_context_t *c) const
{ return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
if (unlikely (c->nesting_level_left != HB_MAX_NESTING_LEVEL))
return_trace (false); /* No chaining to this type */
unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
const auto &substitute = StructAfter<decltype (substituteX)> (lookahead);
if (unlikely (index >= substitute.len)) return_trace (false);
if (unlikely (coverage_index >= substitute.len)) return_trace (false);
unsigned int start_index = 0, end_index = 0;
if (match_backtrack (c,
@ -139,7 +136,7 @@ struct ReverseChainSingleSubstFormat1
c->buffer->idx);
}
c->replace_glyph_inplace (substitute[index]);
c->replace_glyph_inplace (substitute[coverage_index]);
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{

View file

@ -123,12 +123,10 @@ struct SingleSubstFormat1_3
return 1;
}
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
unsigned int index = (this+coverage).get_coverage (glyph_id);
if (likely (index == NOT_COVERED)) return_trace (false);
hb_codepoint_t d = deltaGlyphID;
hb_codepoint_t mask = get_mask ();

View file

@ -100,13 +100,11 @@ struct SingleSubstFormat2_4
return 1;
}
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
if (unlikely (index >= substitute.len)) return_trace (false);
if (unlikely (coverage_index >= substitute.len)) return_trace (false);
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
@ -116,7 +114,7 @@ struct SingleSubstFormat2_4
c->buffer->idx);
}
c->replace_glyph (substitute[index]);
c->replace_glyph (substitute[coverage_index]);
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{

View file

@ -35,10 +35,10 @@ struct SubstLookup : Lookup
return dispatch (&c);
}
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
return_trace (dispatch (c));
return_trace (dispatch (c, coverage_index));
}
bool intersects (const hb_set_t *glyphs) const

View file

@ -680,7 +680,7 @@ struct hb_ot_apply_context_t :
const char *get_name () { return "APPLY"; }
typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
template <typename T>
return_t dispatch (const T &obj) { return obj.apply (this); }
return_t dispatch (const T &obj, unsigned coverage_index) { return obj.apply (this, coverage_index); }
static return_t default_return_value () { return false; }
bool stop_sublookup_iteration (return_t r) const { return r; }
return_t recurse (unsigned int sub_lookup_index)
@ -893,22 +893,24 @@ struct hb_accelerate_subtables_context_t :
hb_dispatch_context_t<hb_accelerate_subtables_context_t>
{
template <typename Type>
static inline bool apply_to (const void *obj, hb_ot_apply_context_t *c)
static inline bool apply_to (const void *obj, hb_ot_apply_context_t *c, unsigned coverage_index)
{
const Type *typed_obj = (const Type *) obj;
return typed_obj->apply (c);
return typed_obj->apply (c, coverage_index);
}
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
template <typename T>
static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<1>) HB_RETURN (bool, obj->apply_cached (c) )
static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, unsigned coverage_index, hb_priority<1>)
HB_RETURN (bool, obj->apply_cached (c, coverage_index) )
template <typename T>
static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<0>) HB_RETURN (bool, obj->apply (c) )
static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, unsigned coverage_index, hb_priority<0>)
HB_RETURN (bool, obj->apply (c, coverage_index) )
template <typename Type>
static inline bool apply_cached_to (const void *obj, hb_ot_apply_context_t *c)
static inline bool apply_cached_to (const void *obj, hb_ot_apply_context_t *c, unsigned coverage_index)
{
const Type *typed_obj = (const Type *) obj;
return apply_cached_ (typed_obj, c, hb_prioritize);
return apply_cached_ (typed_obj, c, coverage_index, hb_prioritize);
}
template <typename T>
@ -923,7 +925,7 @@ struct hb_accelerate_subtables_context_t :
}
#endif
typedef bool (*hb_apply_func_t) (const void *obj, hb_ot_apply_context_t *c);
typedef bool (*hb_apply_func_t) (const void *obj, hb_ot_apply_context_t *c, unsigned coverage_index);
typedef bool (*hb_cache_func_t) (const void *obj, hb_ot_apply_context_t *c, bool enter);
struct hb_applicable_t
@ -948,16 +950,22 @@ struct hb_accelerate_subtables_context_t :
#endif
digest.init ();
obj_.get_coverage ().collect_coverage (&digest);
coverage_map.init ();
auto &coverage = obj_.get_coverage ();
for (hb_codepoint_t g : hb_iter (coverage))
coverage_map.set (g, coverage.get_coverage (g));
}
bool apply (hb_ot_apply_context_t *c) const
{
return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
unsigned *v;
return coverage_map.has (c->buffer->cur().codepoint, &v) && apply_func (obj, c, *v);
}
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
bool apply_cached (hb_ot_apply_context_t *c) const
{
return digest.may_have (c->buffer->cur().codepoint) && apply_cached_func (obj, c);
unsigned *v;
return coverage_map.has (c->buffer->cur().codepoint, &v) && apply_cached_func (obj, c, *v);
}
bool cache_enter (hb_ot_apply_context_t *c) const
{
@ -977,6 +985,7 @@ struct hb_accelerate_subtables_context_t :
hb_cache_func_t cache_func;
#endif
hb_set_digest_t digest;
hb_map_t coverage_map;
};
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
@ -2233,14 +2242,11 @@ struct ContextFormat1_4
const Coverage &get_coverage () const { return this+coverage; }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED))
return_trace (false);
const RuleSet &rule_set = this+ruleSet[index];
const RuleSet &rule_set = this+ruleSet[coverage_index];
struct ContextApplyLookupContext lookup_context = {
{match_glyph},
nullptr
@ -2451,13 +2457,11 @@ struct ContextFormat2_5
}
}
bool apply_cached (hb_ot_apply_context_t *c) const { return _apply (c, true); }
bool apply (hb_ot_apply_context_t *c) const { return _apply (c, false); }
bool _apply (hb_ot_apply_context_t *c, bool cached) const
bool apply_cached (hb_ot_apply_context_t *c, unsigned coverage_index) const { return _apply (c, coverage_index, true); }
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const { return _apply (c, coverage_index, false); }
bool _apply (hb_ot_apply_context_t *c, unsigned coverage_index, bool cached) const
{
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
const ClassDef &class_def = this+classDef;
@ -2466,6 +2470,7 @@ struct ContextFormat2_5
&class_def
};
unsigned index;
if (cached && c->buffer->cur().syllable() < 255)
index = c->buffer->cur().syllable ();
else
@ -2643,11 +2648,9 @@ struct ContextFormat3
const Coverage &get_coverage () const { return this+coverageZ[0]; }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
struct ContextApplyLookupContext lookup_context = {
@ -3317,13 +3320,11 @@ struct ChainContextFormat1_4
const Coverage &get_coverage () const { return this+coverage; }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
const ChainRuleSet &rule_set = this+ruleSet[index];
const ChainRuleSet &rule_set = this+ruleSet[coverage_index];
struct ChainContextApplyLookupContext lookup_context = {
{{match_glyph, match_glyph, match_glyph}},
{nullptr, nullptr, nullptr}
@ -3556,13 +3557,11 @@ struct ChainContextFormat2_5
}
}
bool apply_cached (hb_ot_apply_context_t *c) const { return _apply (c, true); }
bool apply (hb_ot_apply_context_t *c) const { return _apply (c, false); }
bool _apply (hb_ot_apply_context_t *c, bool cached) const
bool apply_cached (hb_ot_apply_context_t *c, unsigned coverage_index) const { return _apply (c, coverage_index, true); }
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const { return _apply (c, coverage_index, false); }
bool _apply (hb_ot_apply_context_t *c, unsigned coverage_index, bool cached) const
{
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
const ClassDef &backtrack_class_def = this+backtrackClassDef;
const ClassDef &input_class_def = this+inputClassDef;
@ -3587,7 +3586,7 @@ struct ChainContextFormat2_5
&lookahead_class_def}
};
index = input_class_def.get_class (c->buffer->cur().codepoint);
unsigned index = input_class_def.get_class (c->buffer->cur().codepoint);
const ChainRuleSet &rule_set = this+ruleSet[index];
return_trace (rule_set.apply (c, lookup_context));
}
@ -3805,14 +3804,11 @@ struct ChainContextFormat3
return this+input[0];
}
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, unsigned coverage_index) const
{
TRACE_APPLY (this);
const auto &input = StructAfter<decltype (inputX)> (backtrack);
unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
struct ChainContextApplyLookupContext lookup_context = {