mirror of
https://github.com/harfbuzz/harfbuzz.git
synced 2025-04-15 01:18:13 +00:00
[instancer] add serialize() for VariationStore
Input region_list and encoding_rows are from item_variations_t
This commit is contained in:
parent
2326879229
commit
e4e1ac44f0
1 changed files with 106 additions and 0 deletions
|
@ -2735,6 +2735,81 @@ struct VarData
|
|||
get_row_size ()));
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
bool has_long,
|
||||
const hb_vector_t<const hb_vector_t<int>*>& rows)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (this))) return_trace (false);
|
||||
unsigned row_count = rows.length;
|
||||
itemCount = row_count;
|
||||
|
||||
int min_threshold = has_long ? -65536 : -128;
|
||||
int max_threshold = has_long ? +65535 : +127;
|
||||
enum delta_size_t { kZero=0, kNonWord, kWord };
|
||||
hb_vector_t<delta_size_t> delta_sz;
|
||||
unsigned num_regions = rows[0]->length;
|
||||
if (!delta_sz.resize (num_regions))
|
||||
return_trace (false);
|
||||
|
||||
unsigned word_count = 0;
|
||||
for (unsigned r = 0; r < num_regions; r++)
|
||||
{
|
||||
for (unsigned i = 0; i < row_count; i++)
|
||||
{
|
||||
int delta = rows[i]->arrayZ[r];
|
||||
if (delta < min_threshold || delta > max_threshold)
|
||||
{
|
||||
delta_sz[r] = kWord;
|
||||
word_count++;
|
||||
break;
|
||||
}
|
||||
else if (delta != 0)
|
||||
{
|
||||
delta_sz[r] = kNonWord;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* reorder regions: words and then non-words*/
|
||||
unsigned word_index = 0;
|
||||
unsigned non_word_index = word_count;
|
||||
hb_map_t ri_map;
|
||||
for (unsigned r = 0; r < num_regions; r++)
|
||||
{
|
||||
if (!delta_sz[r]) continue;
|
||||
unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
|
||||
if (!ri_map.set (new_r, r))
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
wordSizeCount = word_count | (has_long ? 0x8000u /* LONG_WORDS */ : 0);
|
||||
|
||||
unsigned ri_count = ri_map.get_population ();
|
||||
regionIndices.len = ri_count;
|
||||
if (unlikely (!c->extend (this))) return_trace (false);
|
||||
|
||||
for (unsigned r = 0; r < ri_count; r++)
|
||||
{
|
||||
hb_codepoint_t *idx;
|
||||
if (!ri_map.has (r, &idx))
|
||||
return_trace (false);
|
||||
regionIndices[r] = *idx;
|
||||
}
|
||||
|
||||
HBUINT8 *delta_bytes = get_delta_bytes ();
|
||||
unsigned row_size = get_row_size ();
|
||||
for (unsigned int i = 0; i < row_count; i++)
|
||||
{
|
||||
for (unsigned int r = 0; r < ri_count; r++)
|
||||
{
|
||||
int delta = rows[i]->arrayZ[ri_map[r]];
|
||||
set_item_delta_fast (i, r, delta, delta_bytes, row_size);
|
||||
}
|
||||
}
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
const VarData *src,
|
||||
const hb_inc_bimap_t &inner_map,
|
||||
|
@ -2894,6 +2969,7 @@ struct VarData
|
|||
get_row_size ());
|
||||
}
|
||||
|
||||
protected:
|
||||
void set_item_delta_fast (unsigned int item, unsigned int region, int32_t delta,
|
||||
HBUINT8 *delta_bytes, unsigned row_size)
|
||||
{
|
||||
|
@ -3007,6 +3083,36 @@ struct VariationStore
|
|||
dataSets.sanitize (c, this));
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
bool has_long,
|
||||
const hb_vector_t<hb_tag_t>& axis_tags,
|
||||
const hb_vector_t<const hb_hashmap_t<hb_tag_t, Triple>*>& region_list,
|
||||
const hb_vector_t<delta_row_encoding_t>& vardata_encodings)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
#ifdef HB_NO_VAR
|
||||
return_trace (false);
|
||||
#endif
|
||||
if (unlikely (!c->extend_min (this))) return_trace (false);
|
||||
|
||||
format = 1;
|
||||
if (!regions.serialize_serialize (c, axis_tags, region_list))
|
||||
return_trace (false);
|
||||
|
||||
unsigned num_var_data = vardata_encodings.length;
|
||||
if (!num_var_data) return_trace (false);
|
||||
if (unlikely (!c->check_assign (dataSets.len, num_var_data,
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW)))
|
||||
return_trace (false);
|
||||
|
||||
if (unlikely (!c->extend (dataSets))) return_trace (false);
|
||||
for (unsigned i = 0; i < num_var_data; i++)
|
||||
if (!dataSets[i].serialize_serialize (c, has_long, vardata_encodings[i].items))
|
||||
return_trace (false);
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
const VariationStore *src,
|
||||
const hb_array_t <const hb_inc_bimap_t> &inner_maps)
|
||||
|
|
Loading…
Add table
Reference in a new issue