mirror of
https://github.com/harfbuzz/harfbuzz.git
synced 2025-04-16 02:04:44 +00:00
Merge pull request #5043 from harfbuzz/pairpos-cache
[OT] PairPos cache
This commit is contained in:
commit
9a15e0e69a
8 changed files with 232 additions and 44 deletions
|
@ -96,6 +96,15 @@ struct Coverage
|
|||
default:return NOT_COVERED;
|
||||
}
|
||||
}
|
||||
unsigned int get_coverage (hb_codepoint_t glyph_id,
|
||||
hb_ot_lookup_cache_t *cache) const
|
||||
{
|
||||
unsigned coverage;
|
||||
if (cache && cache->get (glyph_id, &coverage)) return coverage;
|
||||
coverage = get_coverage (glyph_id);
|
||||
if (cache) cache->set (glyph_id, coverage);
|
||||
return coverage;
|
||||
}
|
||||
|
||||
unsigned get_population () const
|
||||
{
|
||||
|
@ -201,6 +210,19 @@ struct Coverage
|
|||
}
|
||||
}
|
||||
|
||||
unsigned cost () const
|
||||
{
|
||||
switch (u.format) {
|
||||
case 1: hb_barrier (); return u.format1.cost ();
|
||||
case 2: hb_barrier (); return u.format2.cost ();
|
||||
#ifndef HB_NO_BEYOND_64K
|
||||
case 3: hb_barrier (); return u.format3.cost ();
|
||||
case 4: hb_barrier (); return u.format4.cost ();
|
||||
#endif
|
||||
default:return 0u;
|
||||
}
|
||||
}
|
||||
|
||||
/* Might return false if array looks unsorted.
|
||||
* Used for faster rejection of corrupt data. */
|
||||
template <typename set_t>
|
||||
|
|
|
@ -103,6 +103,8 @@ struct CoverageFormat1_3
|
|||
intersect_glyphs << glyphArray[i];
|
||||
}
|
||||
|
||||
unsigned cost () const { return 1; }
|
||||
|
||||
template <typename set_t>
|
||||
bool collect_coverage (set_t *glyphs) const
|
||||
{ return glyphs->add_sorted_array (glyphArray.as_array ()); }
|
||||
|
|
|
@ -157,6 +157,8 @@ struct CoverageFormat2_4
|
|||
}
|
||||
}
|
||||
|
||||
unsigned cost () const { return hb_bit_storage ((unsigned) rangeRecord.len); /* bsearch cost */ }
|
||||
|
||||
template <typename set_t>
|
||||
bool collect_coverage (set_t *glyphs) const
|
||||
{
|
||||
|
|
|
@ -103,11 +103,50 @@ struct PairPosFormat1_3
|
|||
|
||||
const Coverage &get_coverage () const { return this+coverage; }
|
||||
|
||||
bool apply (hb_ot_apply_context_t *c) const
|
||||
unsigned cache_cost () const
|
||||
{
|
||||
return (this+coverage).cost ();
|
||||
}
|
||||
static void * cache_func (void *p, hb_ot_lookup_cache_op_t op)
|
||||
{
|
||||
switch (op)
|
||||
{
|
||||
case hb_ot_lookup_cache_op_t::CREATE:
|
||||
{
|
||||
hb_ot_lookup_cache_t *cache = (hb_ot_lookup_cache_t *) hb_malloc (sizeof (hb_ot_lookup_cache_t));
|
||||
if (likely (cache))
|
||||
cache->clear ();
|
||||
return cache;
|
||||
}
|
||||
case hb_ot_lookup_cache_op_t::ENTER:
|
||||
return (void *) true;
|
||||
case hb_ot_lookup_cache_op_t::LEAVE:
|
||||
return nullptr;
|
||||
case hb_ot_lookup_cache_op_t::DESTROY:
|
||||
{
|
||||
hb_ot_lookup_cache_t *cache = (hb_ot_lookup_cache_t *) p;
|
||||
hb_free (cache);
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool apply_cached (hb_ot_apply_context_t *c) const { return _apply (c, true); }
|
||||
bool apply (hb_ot_apply_context_t *c) const { return _apply (c, false); }
|
||||
bool _apply (hb_ot_apply_context_t *c, bool cached) const
|
||||
{
|
||||
TRACE_APPLY (this);
|
||||
|
||||
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
|
||||
hb_ot_lookup_cache_t *cache = cached ? (hb_ot_lookup_cache_t *) c->lookup_accel->cache : nullptr;
|
||||
#endif
|
||||
|
||||
hb_buffer_t *buffer = c->buffer;
|
||||
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
|
||||
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint, cache);
|
||||
#else
|
||||
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
|
||||
#endif
|
||||
if (likely (index == NOT_COVERED)) return_trace (false);
|
||||
|
||||
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
||||
|
@ -156,7 +195,7 @@ struct PairPosFormat1_3
|
|||
strip = true;
|
||||
newFormats = compute_effective_value_formats (glyphset, strip, true);
|
||||
}
|
||||
|
||||
|
||||
out->valueFormat[0] = newFormats.first;
|
||||
out->valueFormat[1] = newFormats.second;
|
||||
|
||||
|
|
|
@ -123,11 +123,61 @@ struct PairPosFormat2_4 : ValueBase
|
|||
|
||||
const Coverage &get_coverage () const { return this+coverage; }
|
||||
|
||||
bool apply (hb_ot_apply_context_t *c) const
|
||||
struct pair_pos_cache_t
|
||||
{
|
||||
hb_ot_lookup_cache_t coverage;
|
||||
hb_ot_lookup_cache_t first;
|
||||
hb_ot_lookup_cache_t second;
|
||||
};
|
||||
|
||||
unsigned cache_cost () const
|
||||
{
|
||||
return (this+coverage).cost () + (this+classDef1).cost () + (this+classDef2).cost ();
|
||||
}
|
||||
static void * cache_func (void *p, hb_ot_lookup_cache_op_t op)
|
||||
{
|
||||
switch (op)
|
||||
{
|
||||
case hb_ot_lookup_cache_op_t::CREATE:
|
||||
{
|
||||
pair_pos_cache_t *cache = (pair_pos_cache_t *) hb_malloc (sizeof (pair_pos_cache_t));
|
||||
if (likely (cache))
|
||||
{
|
||||
cache->coverage.clear ();
|
||||
cache->first.clear ();
|
||||
cache->second.clear ();
|
||||
}
|
||||
return cache;
|
||||
}
|
||||
case hb_ot_lookup_cache_op_t::ENTER:
|
||||
return (void *) true;
|
||||
case hb_ot_lookup_cache_op_t::LEAVE:
|
||||
return nullptr;
|
||||
case hb_ot_lookup_cache_op_t::DESTROY:
|
||||
{
|
||||
pair_pos_cache_t *cache = (pair_pos_cache_t *) p;
|
||||
hb_free (cache);
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool apply_cached (hb_ot_apply_context_t *c) const { return _apply (c, true); }
|
||||
bool apply (hb_ot_apply_context_t *c) const { return _apply (c, false); }
|
||||
bool _apply (hb_ot_apply_context_t *c, bool cached) const
|
||||
{
|
||||
TRACE_APPLY (this);
|
||||
|
||||
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
|
||||
pair_pos_cache_t *cache = cached ? (pair_pos_cache_t *) c->lookup_accel->cache : nullptr;
|
||||
#endif
|
||||
|
||||
hb_buffer_t *buffer = c->buffer;
|
||||
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
|
||||
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint, cache ? &cache->coverage : nullptr);
|
||||
#else
|
||||
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
|
||||
#endif
|
||||
if (likely (index == NOT_COVERED)) return_trace (false);
|
||||
|
||||
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
||||
|
@ -139,8 +189,13 @@ struct PairPosFormat2_4 : ValueBase
|
|||
return_trace (false);
|
||||
}
|
||||
|
||||
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
|
||||
unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint, cache ? &cache->first : nullptr);
|
||||
unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint, cache ? &cache->second : nullptr);
|
||||
#else
|
||||
unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint);
|
||||
unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint);
|
||||
#endif
|
||||
if (unlikely (klass1 >= class1Count || klass2 >= class2Count))
|
||||
{
|
||||
buffer->unsafe_to_concat (buffer->idx, skippy_iter.idx + 1);
|
||||
|
|
|
@ -29,6 +29,9 @@
|
|||
#ifndef OT_LAYOUT_TYPES_HH
|
||||
#define OT_LAYOUT_TYPES_HH
|
||||
|
||||
using hb_ot_lookup_cache_t = hb_cache_t<15, 8, 7>;
|
||||
static_assert (sizeof (hb_ot_lookup_cache_t) == 256, "");
|
||||
|
||||
namespace OT {
|
||||
namespace Layout {
|
||||
|
||||
|
|
|
@ -34,6 +34,7 @@
|
|||
#include "hb-open-type.hh"
|
||||
#include "hb-set.hh"
|
||||
#include "hb-bimap.hh"
|
||||
#include "hb-cache.hh"
|
||||
|
||||
#include "OT/Layout/Common/Coverage.hh"
|
||||
#include "OT/Layout/types.hh"
|
||||
|
@ -2076,6 +2077,15 @@ struct ClassDef
|
|||
default:return 0;
|
||||
}
|
||||
}
|
||||
unsigned int get_class (hb_codepoint_t glyph_id,
|
||||
hb_ot_lookup_cache_t *cache) const
|
||||
{
|
||||
unsigned klass;
|
||||
if (cache && cache->get (glyph_id, &klass)) return klass;
|
||||
klass = get_class (glyph_id);
|
||||
if (cache) cache->set (glyph_id, klass);
|
||||
return klass;
|
||||
}
|
||||
|
||||
unsigned get_population () const
|
||||
{
|
||||
|
|
|
@ -713,6 +713,7 @@ struct hb_ot_apply_context_t :
|
|||
recurse_func_t recurse_func = nullptr;
|
||||
const GDEF &gdef;
|
||||
const GDEF::accelerator_t &gdef_accel;
|
||||
const hb_ot_layout_lookup_accelerator_t *lookup_accel = nullptr;
|
||||
const ItemVariationStore &var_store;
|
||||
ItemVariationStore::cache_t *var_store_cache;
|
||||
hb_set_digest_t digest;
|
||||
|
@ -899,6 +900,13 @@ struct hb_ot_apply_context_t :
|
|||
}
|
||||
};
|
||||
|
||||
enum class hb_ot_lookup_cache_op_t
|
||||
{
|
||||
CREATE,
|
||||
ENTER,
|
||||
LEAVE,
|
||||
DESTROY,
|
||||
};
|
||||
|
||||
struct hb_accelerate_subtables_context_t :
|
||||
hb_dispatch_context_t<hb_accelerate_subtables_context_t>
|
||||
|
@ -923,19 +931,23 @@ struct hb_accelerate_subtables_context_t :
|
|||
}
|
||||
|
||||
template <typename T>
|
||||
static inline auto cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<1>) HB_RETURN (bool, obj->cache_func (c, enter) )
|
||||
template <typename T>
|
||||
static inline bool cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<0>) { return false; }
|
||||
static inline auto cache_func_ (void *p,
|
||||
hb_ot_lookup_cache_op_t op,
|
||||
hb_priority<1>) HB_RETURN (void *, T::cache_func (p, op) )
|
||||
template <typename T=void>
|
||||
static inline void * cache_func_ (void *p,
|
||||
hb_ot_lookup_cache_op_t op HB_UNUSED,
|
||||
hb_priority<0>) { return (void *) false; }
|
||||
template <typename Type>
|
||||
static inline bool cache_func_to (const void *obj, hb_ot_apply_context_t *c, bool enter)
|
||||
static inline void * cache_func_to (void *p,
|
||||
hb_ot_lookup_cache_op_t op)
|
||||
{
|
||||
const Type *typed_obj = (const Type *) obj;
|
||||
return cache_func_ (typed_obj, c, enter, hb_prioritize);
|
||||
return cache_func_<Type> (p, op, hb_prioritize);
|
||||
}
|
||||
#endif
|
||||
|
||||
typedef bool (*hb_apply_func_t) (const void *obj, hb_ot_apply_context_t *c);
|
||||
typedef bool (*hb_cache_func_t) (const void *obj, hb_ot_apply_context_t *c, bool enter);
|
||||
typedef void * (*hb_cache_func_t) (void *p, hb_ot_lookup_cache_op_t op);
|
||||
|
||||
struct hb_applicable_t
|
||||
{
|
||||
|
@ -972,11 +984,11 @@ struct hb_accelerate_subtables_context_t :
|
|||
}
|
||||
bool cache_enter (hb_ot_apply_context_t *c) const
|
||||
{
|
||||
return cache_func (obj, c, true);
|
||||
return (bool) cache_func (c, hb_ot_lookup_cache_op_t::ENTER);
|
||||
}
|
||||
void cache_leave (hb_ot_apply_context_t *c) const
|
||||
{
|
||||
cache_func (obj, c, false);
|
||||
cache_func (c, hb_ot_lookup_cache_op_t::LEAVE);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -2623,24 +2635,33 @@ struct ContextFormat2_5
|
|||
unsigned c = (this+classDef).cost () * ruleSet.len;
|
||||
return c >= 4 ? c : 0;
|
||||
}
|
||||
bool cache_func (hb_ot_apply_context_t *c, bool enter) const
|
||||
static void * cache_func (void *p, hb_ot_lookup_cache_op_t op)
|
||||
{
|
||||
if (enter)
|
||||
switch (op)
|
||||
{
|
||||
if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
|
||||
return false;
|
||||
auto &info = c->buffer->info;
|
||||
unsigned count = c->buffer->len;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
info[i].syllable() = 255;
|
||||
c->new_syllables = 255;
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
c->new_syllables = (unsigned) -1;
|
||||
HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
|
||||
return true;
|
||||
case hb_ot_lookup_cache_op_t::CREATE:
|
||||
return (void *) true;
|
||||
case hb_ot_lookup_cache_op_t::ENTER:
|
||||
{
|
||||
hb_ot_apply_context_t *c = (hb_ot_apply_context_t *) p;
|
||||
if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
|
||||
return (void *) false;
|
||||
auto &info = c->buffer->info;
|
||||
unsigned count = c->buffer->len;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
info[i].syllable() = 255;
|
||||
c->new_syllables = 255;
|
||||
return (void *) true;
|
||||
}
|
||||
case hb_ot_lookup_cache_op_t::LEAVE:
|
||||
{
|
||||
hb_ot_apply_context_t *c = (hb_ot_apply_context_t *) p;
|
||||
c->new_syllables = (unsigned) -1;
|
||||
HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
|
||||
return nullptr;
|
||||
}
|
||||
case hb_ot_lookup_cache_op_t::DESTROY:
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3864,24 +3885,33 @@ struct ChainContextFormat2_5
|
|||
unsigned c = (this+lookaheadClassDef).cost () * ruleSet.len;
|
||||
return c >= 4 ? c : 0;
|
||||
}
|
||||
bool cache_func (hb_ot_apply_context_t *c, bool enter) const
|
||||
static void * cache_func (void *p, hb_ot_lookup_cache_op_t op)
|
||||
{
|
||||
if (enter)
|
||||
switch (op)
|
||||
{
|
||||
if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
|
||||
return false;
|
||||
auto &info = c->buffer->info;
|
||||
unsigned count = c->buffer->len;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
info[i].syllable() = 255;
|
||||
c->new_syllables = 255;
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
c->new_syllables = (unsigned) -1;
|
||||
HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
|
||||
return true;
|
||||
case hb_ot_lookup_cache_op_t::CREATE:
|
||||
return (void *) true;
|
||||
case hb_ot_lookup_cache_op_t::ENTER:
|
||||
{
|
||||
hb_ot_apply_context_t *c = (hb_ot_apply_context_t *) p;
|
||||
if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
|
||||
return (void *) false;
|
||||
auto &info = c->buffer->info;
|
||||
unsigned count = c->buffer->len;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
info[i].syllable() = 255;
|
||||
c->new_syllables = 255;
|
||||
return (void *) true;
|
||||
}
|
||||
case hb_ot_lookup_cache_op_t::LEAVE:
|
||||
{
|
||||
hb_ot_apply_context_t *c = (hb_ot_apply_context_t *) p;
|
||||
c->new_syllables = (unsigned) -1;
|
||||
HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
|
||||
return nullptr;
|
||||
}
|
||||
case hb_ot_lookup_cache_op_t::DESTROY:
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4409,6 +4439,14 @@ struct hb_ot_layout_lookup_accelerator_t
|
|||
|
||||
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
|
||||
thiz->cache_user_idx = c_accelerate_subtables.cache_user_idx;
|
||||
|
||||
if (thiz->cache_user_idx != (unsigned) -1)
|
||||
{
|
||||
thiz->cache = thiz->subtables[thiz->cache_user_idx].cache_func (nullptr, hb_ot_lookup_cache_op_t::CREATE);
|
||||
if (!thiz->cache)
|
||||
thiz->cache_user_idx = (unsigned) -1;
|
||||
}
|
||||
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
if (i != thiz->cache_user_idx)
|
||||
thiz->subtables[i].apply_cached_func = thiz->subtables[i].apply_func;
|
||||
|
@ -4417,6 +4455,17 @@ struct hb_ot_layout_lookup_accelerator_t
|
|||
return thiz;
|
||||
}
|
||||
|
||||
void fini ()
|
||||
{
|
||||
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
|
||||
if (cache)
|
||||
{
|
||||
assert (cache_user_idx != (unsigned) -1);
|
||||
subtables[cache_user_idx].cache_func (cache, hb_ot_lookup_cache_op_t::DESTROY);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
bool may_have (hb_codepoint_t g) const
|
||||
{ return digest.may_have (g); }
|
||||
|
||||
|
@ -4425,6 +4474,7 @@ struct hb_ot_layout_lookup_accelerator_t
|
|||
#endif
|
||||
bool apply (hb_ot_apply_context_t *c, unsigned subtables_count, bool use_cache) const
|
||||
{
|
||||
c->lookup_accel = this;
|
||||
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
|
||||
if (use_cache)
|
||||
{
|
||||
|
@ -4464,6 +4514,7 @@ struct hb_ot_layout_lookup_accelerator_t
|
|||
|
||||
|
||||
hb_set_digest_t digest;
|
||||
void *cache = nullptr;
|
||||
private:
|
||||
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
|
||||
unsigned cache_user_idx = (unsigned) -1;
|
||||
|
@ -4852,7 +4903,11 @@ struct GSUBGPOS
|
|||
~accelerator_t ()
|
||||
{
|
||||
for (unsigned int i = 0; i < this->lookup_count; i++)
|
||||
{
|
||||
if (this->accels[i])
|
||||
this->accels[i]->fini ();
|
||||
hb_free (this->accels[i]);
|
||||
}
|
||||
hb_free (this->accels);
|
||||
this->table.destroy ();
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue