[PairPos] Cache coverage as well

Another 3% down in Roboto-Regular.
This commit is contained in:
Behdad Esfahbod 2025-02-05 18:51:11 +00:00
parent 726af2e4e5
commit f22943a2a2
6 changed files with 47 additions and 8 deletions

View file

@ -96,6 +96,15 @@ struct Coverage
default:return NOT_COVERED;
}
}
unsigned int get_coverage (hb_codepoint_t glyph_id,
hb_ot_lookup_cache_t *cache) const
{
unsigned coverage;
if (cache && cache->get (glyph_id, &coverage)) return coverage;
coverage = get_coverage (glyph_id);
if (cache) cache->set (glyph_id, coverage);
return coverage;
}
unsigned get_population () const
{
@ -201,6 +210,19 @@ struct Coverage
}
}
unsigned cost () const
{
switch (u.format) {
case 1: hb_barrier (); return u.format1.cost ();
case 2: hb_barrier (); return u.format2.cost ();
#ifndef HB_NO_BEYOND_64K
case 3: hb_barrier (); return u.format3.cost ();
case 4: hb_barrier (); return u.format4.cost ();
#endif
default:return 0u;
}
}
/* Might return false if array looks unsorted.
* Used for faster rejection of corrupt data. */
template <typename set_t>

View file

@ -103,6 +103,8 @@ struct CoverageFormat1_3
intersect_glyphs << glyphArray[i];
}
unsigned cost () const { return 1; }
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{ return glyphs->add_sorted_array (glyphArray.as_array ()); }

View file

@ -157,6 +157,8 @@ struct CoverageFormat2_4
}
}
unsigned cost () const { return hb_bit_storage ((unsigned) rangeRecord.len); /* bsearch cost */ }
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{

View file

@ -123,12 +123,16 @@ struct PairPosFormat2_4 : ValueBase
const Coverage &get_coverage () const { return this+coverage; }
using pair_pos_cache_t = hb_pair_t<hb_ot_class_cache_t, hb_ot_class_cache_t>;
struct pair_pos_cache_t
{
hb_ot_lookup_cache_t coverage;
hb_ot_lookup_cache_t first;
hb_ot_lookup_cache_t second;
};
unsigned cache_cost () const
{
unsigned c = (this+classDef1).cost () + (this+classDef2).cost ();
return c >= 4 ? c : 0;
return (this+coverage).cost () + (this+classDef1).cost () + (this+classDef2).cost ();
}
static void * cache_func (void *p, hb_ot_lookup_cache_op_t op)
{
@ -139,6 +143,7 @@ struct PairPosFormat2_4 : ValueBase
pair_pos_cache_t *cache = (pair_pos_cache_t *) hb_malloc (sizeof (pair_pos_cache_t));
if (likely (cache))
{
cache->coverage.clear ();
cache->first.clear ();
cache->second.clear ();
}
@ -162,8 +167,17 @@ struct PairPosFormat2_4 : ValueBase
bool _apply (hb_ot_apply_context_t *c, bool cached) const
{
TRACE_APPLY (this);
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
pair_pos_cache_t *cache = cached ? (pair_pos_cache_t *) c->lookup_accel->cache : nullptr;
#endif
hb_buffer_t *buffer = c->buffer;
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint, cache ? &cache->coverage : nullptr);
#else
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
#endif
if (likely (index == NOT_COVERED)) return_trace (false);
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
@ -176,7 +190,6 @@ struct PairPosFormat2_4 : ValueBase
}
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
pair_pos_cache_t *cache = cached ? (pair_pos_cache_t *) c->lookup_accel->cache : nullptr;
unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint, cache ? &cache->first : nullptr);
unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint, cache ? &cache->second : nullptr);
#else

View file

@ -29,6 +29,9 @@
#ifndef OT_LAYOUT_TYPES_HH
#define OT_LAYOUT_TYPES_HH
using hb_ot_lookup_cache_t = hb_cache_t<15, 8, 7>;
static_assert (sizeof (hb_ot_lookup_cache_t) == 256, "");
namespace OT {
namespace Layout {

View file

@ -48,9 +48,6 @@ using OT::Layout::MediumTypes;
namespace OT {
using hb_ot_class_cache_t = hb_cache_t<15, 8, 7>;
static_assert (sizeof (hb_ot_class_cache_t) == 256, "");
template<typename Iterator>
static inline bool ClassDef_serialize (hb_serialize_context_t *c,
Iterator it);
@ -2081,7 +2078,7 @@ struct ClassDef
}
}
unsigned int get_class (hb_codepoint_t glyph_id,
hb_ot_class_cache_t *cache) const
hb_ot_lookup_cache_t *cache) const
{
unsigned klass;
if (cache && cache->get (glyph_id, &klass)) return klass;