summaryrefslogtreecommitdiffstats
path: root/src/3rdparty/harfbuzz-ng/src/hb-ot-var-hvar-table.hh
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/harfbuzz-ng/src/hb-ot-var-hvar-table.hh')
-rw-r--r--src/3rdparty/harfbuzz-ng/src/hb-ot-var-hvar-table.hh330
1 files changed, 160 insertions, 170 deletions
diff --git a/src/3rdparty/harfbuzz-ng/src/hb-ot-var-hvar-table.hh b/src/3rdparty/harfbuzz-ng/src/hb-ot-var-hvar-table.hh
index fdcc88d674..53a4642d38 100644
--- a/src/3rdparty/harfbuzz-ng/src/hb-ot-var-hvar-table.hh
+++ b/src/3rdparty/harfbuzz-ng/src/hb-ot-var-hvar-table.hh
@@ -28,97 +28,11 @@
#define HB_OT_VAR_HVAR_TABLE_HH
#include "hb-ot-layout-common.hh"
-
+#include "hb-ot-var-common.hh"
namespace OT {
-struct DeltaSetIndexMap
-{
- bool sanitize (hb_sanitize_context_t *c) const
- {
- TRACE_SANITIZE (this);
- return_trace (c->check_struct (this) &&
- c->check_range (mapDataZ.arrayZ,
- mapCount,
- get_width ()));
- }
-
- template <typename T>
- bool serialize (hb_serialize_context_t *c, const T &plan)
- {
- unsigned int width = plan.get_width ();
- unsigned int inner_bit_count = plan.get_inner_bit_count ();
- const hb_array_t<const unsigned int> output_map = plan.get_output_map ();
-
- TRACE_SERIALIZE (this);
- if (unlikely (output_map.length && ((((inner_bit_count-1)&~0xF)!=0) || (((width-1)&~0x3)!=0))))
- return_trace (false);
- if (unlikely (!c->extend_min (*this))) return_trace (false);
-
- format = ((width-1)<<4)|(inner_bit_count-1);
- mapCount = output_map.length;
- HBUINT8 *p = c->allocate_size<HBUINT8> (width * output_map.length);
- if (unlikely (!p)) return_trace (false);
- for (unsigned int i = 0; i < output_map.length; i++)
- {
- unsigned int v = output_map[i];
- unsigned int outer = v >> 16;
- unsigned int inner = v & 0xFFFF;
- unsigned int u = (outer << inner_bit_count) | inner;
- for (unsigned int w = width; w > 0;)
- {
- p[--w] = u;
- u >>= 8;
- }
- p += width;
- }
- return_trace (true);
- }
-
- unsigned int map (unsigned int v) const /* Returns 16.16 outer.inner. */
- {
- /* If count is zero, pass value unchanged. This takes
- * care of direct mapping for advance map. */
- if (!mapCount)
- return v;
-
- if (v >= mapCount)
- v = mapCount - 1;
-
- unsigned int u = 0;
- { /* Fetch it. */
- unsigned int w = get_width ();
- const HBUINT8 *p = mapDataZ.arrayZ + w * v;
- for (; w; w--)
- u = (u << 8) + *p++;
- }
-
- { /* Repack it. */
- unsigned int n = get_inner_bit_count ();
- unsigned int outer = u >> n;
- unsigned int inner = u & ((1 << n) - 1);
- u = (outer<<16) | inner;
- }
-
- return u;
- }
-
- unsigned int get_map_count () const { return mapCount; }
- unsigned int get_width () const { return ((format >> 4) & 3) + 1; }
- unsigned int get_inner_bit_count () const { return (format & 0xF) + 1; }
-
- protected:
- HBUINT16 format; /* A packed field that describes the compressed
- * representation of delta-set indices. */
- HBUINT16 mapCount; /* The number of mapping entries. */
- UnsizedArrayOf<HBUINT8>
- mapDataZ; /* The delta-set index mapping data. */
-
- public:
- DEFINE_SIZE_ARRAY (4, mapDataZ);
-};
-
struct index_map_subset_plan_t
{
enum index_map_index_t {
@@ -131,7 +45,8 @@ struct index_map_subset_plan_t
void init (const DeltaSetIndexMap &index_map,
hb_inc_bimap_t &outer_map,
hb_vector_t<hb_set_t *> &inner_sets,
- const hb_subset_plan_t *plan)
+ const hb_subset_plan_t *plan,
+ bool bypass_empty = true)
{
map_count = 0;
outer_bit_count = 0;
@@ -139,55 +54,51 @@ struct index_map_subset_plan_t
max_inners.init ();
output_map.init ();
- if (&index_map == &Null (DeltaSetIndexMap)) return;
+ if (bypass_empty && !index_map.get_map_count ()) return;
unsigned int last_val = (unsigned int)-1;
- hb_codepoint_t last_gid = (hb_codepoint_t)-1;
- hb_codepoint_t gid = (hb_codepoint_t) hb_min (index_map.get_map_count (), plan->num_output_glyphs ());
+ hb_codepoint_t last_gid = HB_CODEPOINT_INVALID;
outer_bit_count = (index_map.get_width () * 8) - index_map.get_inner_bit_count ();
max_inners.resize (inner_sets.length);
for (unsigned i = 0; i < inner_sets.length; i++) max_inners[i] = 0;
/* Search backwards for a map value different from the last map value */
- for (; gid > 0; gid--)
+ auto &new_to_old_gid_list = plan->new_to_old_gid_list;
+ unsigned count = new_to_old_gid_list.length;
+ for (unsigned j = count; j; j--)
{
- hb_codepoint_t old_gid;
- if (!plan->old_gid_for_new_gid (gid - 1, &old_gid))
- {
- if (last_gid == (hb_codepoint_t) -1)
- continue;
- else
- break;
- }
+ hb_codepoint_t gid = new_to_old_gid_list.arrayZ[j - 1].first;
+ hb_codepoint_t old_gid = new_to_old_gid_list.arrayZ[j - 1].second;
unsigned int v = index_map.map (old_gid);
- if (last_gid == (hb_codepoint_t) -1)
+ if (last_gid == HB_CODEPOINT_INVALID)
{
last_val = v;
last_gid = gid;
continue;
}
- if (v != last_val) break;
+ if (v != last_val)
+ break;
last_gid = gid;
}
if (unlikely (last_gid == (hb_codepoint_t)-1)) return;
- map_count = last_gid;
- for (gid = 0; gid < map_count; gid++)
+ map_count = last_gid + 1;
+ for (auto _ : plan->new_to_old_gid_list)
{
- hb_codepoint_t old_gid;
- if (plan->old_gid_for_new_gid (gid, &old_gid))
- {
- unsigned int v = index_map.map (old_gid);
- unsigned int outer = v >> 16;
- unsigned int inner = v & 0xFFFF;
- outer_map.add (outer);
- if (inner > max_inners[outer]) max_inners[outer] = inner;
- if (outer >= inner_sets.length) return;
- inner_sets[outer]->add (inner);
- }
+ hb_codepoint_t gid = _.first;
+ if (gid >= map_count) break;
+
+ hb_codepoint_t old_gid = _.second;
+ unsigned int v = index_map.map (old_gid);
+ unsigned int outer = v >> 16;
+ unsigned int inner = v & 0xFFFF;
+ outer_map.add (outer);
+ if (inner > max_inners[outer]) max_inners[outer] = inner;
+ if (outer >= inner_sets.length) return;
+ inner_sets[outer]->add (inner);
}
}
@@ -202,8 +113,6 @@ struct index_map_subset_plan_t
const hb_vector_t<hb_inc_bimap_t> &inner_maps,
const hb_subset_plan_t *plan)
{
- if (input_map == &Null (DeltaSetIndexMap)) return;
-
for (unsigned int i = 0; i < max_inners.length; i++)
{
if (inner_maps[i].get_population () == 0) continue;
@@ -211,19 +120,48 @@ struct index_map_subset_plan_t
if (bit_count > inner_bit_count) inner_bit_count = bit_count;
}
- output_map.resize (map_count);
- for (hb_codepoint_t gid = 0; gid < output_map.length; gid++)
+ if (unlikely (!output_map.resize (map_count))) return;
+ for (const auto &_ : plan->new_to_old_gid_list)
{
- hb_codepoint_t old_gid;
- if (plan->old_gid_for_new_gid (gid, &old_gid))
- {
- unsigned int v = input_map->map (old_gid);
- unsigned int outer = v >> 16;
- output_map[gid] = (outer_map[outer] << 16) | (inner_maps[outer][v & 0xFFFF]);
- }
- else
- output_map[gid] = 0; /* Map unused glyph to outer/inner=0/0 */
+ hb_codepoint_t new_gid = _.first;
+ hb_codepoint_t old_gid = _.second;
+
+ if (unlikely (new_gid >= map_count)) break;
+
+ uint32_t v = input_map->map (old_gid);
+ unsigned int outer = v >> 16;
+ output_map.arrayZ[new_gid] = (outer_map[outer] << 16) | (inner_maps[outer][v & 0xFFFF]);
+ }
+ }
+
+ bool remap_after_instantiation (const hb_subset_plan_t *plan,
+ const hb_map_t& varidx_map)
+ {
+ /* recalculate bit_count after remapping */
+ outer_bit_count = 1;
+ inner_bit_count = 1;
+
+ for (const auto &_ : plan->new_to_old_gid_list)
+ {
+ hb_codepoint_t new_gid = _.first;
+ if (unlikely (new_gid >= map_count)) break;
+
+ uint32_t v = output_map.arrayZ[new_gid];
+ uint32_t *new_varidx;
+ if (!varidx_map.has (v, &new_varidx))
+ return false;
+
+ output_map.arrayZ[new_gid] = *new_varidx;
+
+ unsigned outer = (*new_varidx) >> 16;
+ unsigned bit_count = (outer == 0) ? 1 : hb_bit_storage (outer);
+ outer_bit_count = hb_max (bit_count, outer_bit_count);
+
+ unsigned inner = (*new_varidx) & 0xFFFF;
+ bit_count = (inner == 0) ? 1 : hb_bit_storage (inner);
+ inner_bit_count = hb_max (bit_count, inner_bit_count);
}
+ return true;
}
unsigned int get_inner_bit_count () const { return inner_bit_count; }
@@ -234,14 +172,14 @@ struct index_map_subset_plan_t
{ return (map_count? (DeltaSetIndexMap::min_size + get_width () * map_count): 0); }
bool is_identity () const { return get_output_map ().length == 0; }
- hb_array_t<const unsigned int> get_output_map () const { return output_map.as_array (); }
+ hb_array_t<const uint32_t> get_output_map () const { return output_map.as_array (); }
protected:
unsigned int map_count;
hb_vector_t<unsigned int> max_inners;
unsigned int outer_bit_count;
unsigned int inner_bit_count;
- hb_vector_t<unsigned int> output_map;
+ hb_vector_t<uint32_t> output_map;
};
struct hvarvvar_subset_plan_t
@@ -263,23 +201,16 @@ struct hvarvvar_subset_plan_t
inner_maps.resize (var_store->get_sub_table_count ());
- for (unsigned int i = 0; i < inner_maps.length; i++)
- inner_maps[i].init ();
-
if (unlikely (!index_map_plans.length || !inner_sets.length || !inner_maps.length)) return;
bool retain_adv_map = false;
- index_map_plans[0].init (*index_maps[0], outer_map, inner_sets, plan);
+ index_map_plans[0].init (*index_maps[0], outer_map, inner_sets, plan, false);
if (index_maps[0] == &Null (DeltaSetIndexMap))
{
- retain_adv_map = plan->retain_gids;
+ retain_adv_map = plan->flags & HB_SUBSET_FLAGS_RETAIN_GIDS;
outer_map.add (0);
- for (hb_codepoint_t gid = 0; gid < plan->num_output_glyphs (); gid++)
- {
- hb_codepoint_t old_gid;
- if (plan->old_gid_for_new_gid (gid, &old_gid))
- inner_sets[0]->add (old_gid);
- }
+ for (hb_codepoint_t old_gid : plan->glyphset()->iter())
+ inner_sets[0]->add (old_gid);
hb_set_union (adv_set, inner_sets[0]);
}
@@ -290,11 +221,11 @@ struct hvarvvar_subset_plan_t
if (retain_adv_map)
{
- for (hb_codepoint_t gid = 0; gid < plan->num_output_glyphs (); gid++)
- if (inner_sets[0]->has (gid))
- inner_maps[0].add (gid);
- else
- inner_maps[0].skip ();
+ for (const auto &_ : plan->new_to_old_gid_list)
+ {
+ hb_codepoint_t old_gid = _.second;
+ inner_maps[0].add (old_gid);
+ }
}
else
{
@@ -310,13 +241,23 @@ struct hvarvvar_subset_plan_t
index_map_plans[i].remap (index_maps[i], outer_map, inner_maps, plan);
}
+ /* remap */
+ bool remap_index_map_plans (const hb_subset_plan_t *plan,
+ const hb_map_t& varidx_map)
+ {
+ for (unsigned i = 0; i < index_map_plans.length; i++)
+ if (!index_map_plans[i].remap_after_instantiation (plan, varidx_map))
+ return false;
+ return true;
+ }
+
void fini ()
{
for (unsigned int i = 0; i < inner_sets.length; i++)
hb_set_destroy (inner_sets[i]);
hb_set_destroy (adv_set);
- inner_maps.fini_deep ();
- index_map_plans.fini_deep ();
+ inner_maps.fini ();
+ index_map_plans.fini ();
}
hb_inc_bimap_t outer_map;
@@ -347,6 +288,7 @@ struct HVARVVAR
{
TRACE_SANITIZE (this);
return_trace (version.sanitize (c) &&
+ hb_barrier () &&
likely (version.major == 1) &&
varStore.sanitize (c, this) &&
advMap.sanitize (c, this) &&
@@ -354,6 +296,9 @@ struct HVARVVAR
rsbMap.sanitize (c, this));
}
+ const VariationStore& get_var_store () const
+ { return this+varStore; }
+
void listup_index_maps (hb_vector_t<const DeltaSetIndexMap *> &index_maps) const
{
index_maps.push (&(this+advMap));
@@ -367,15 +312,15 @@ struct HVARVVAR
TRACE_SERIALIZE (this);
if (im_plans[index_map_subset_plan_t::ADV_INDEX].is_identity ())
advMap = 0;
- else if (unlikely (!advMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::ADV_INDEX])))
+ else if (unlikely (!advMap.serialize_serialize (c, im_plans[index_map_subset_plan_t::ADV_INDEX])))
return_trace (false);
if (im_plans[index_map_subset_plan_t::LSB_INDEX].is_identity ())
lsbMap = 0;
- else if (unlikely (!lsbMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::LSB_INDEX])))
+ else if (unlikely (!lsbMap.serialize_serialize (c, im_plans[index_map_subset_plan_t::LSB_INDEX])))
return_trace (false);
if (im_plans[index_map_subset_plan_t::RSB_INDEX].is_identity ())
rsbMap = 0;
- else if (unlikely (!rsbMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::RSB_INDEX])))
+ else if (unlikely (!rsbMap.serialize_serialize (c, im_plans[index_map_subset_plan_t::RSB_INDEX])))
return_trace (false);
return_trace (true);
@@ -385,6 +330,9 @@ struct HVARVVAR
bool _subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
+ if (c->plan->all_axes_pinned)
+ return_trace (false);
+
hvarvvar_subset_plan_t hvar_plan;
hb_vector_t<const DeltaSetIndexMap *>
index_maps;
@@ -398,40 +346,72 @@ struct HVARVVAR
out->version.major = 1;
out->version.minor = 0;
- if (unlikely (!out->varStore.serialize (c->serializer, out)
- .serialize (c->serializer, hvar_plan.var_store, hvar_plan.inner_maps.as_array ())))
+ if (c->plan->normalized_coords)
+ {
+ item_variations_t item_vars;
+ if (!item_vars.instantiate (this+varStore, c->plan,
+ advMap == 0 ? false : true,
+ false, /* use_no_variation_idx = false */
+ hvar_plan.inner_maps.as_array ()))
+ return_trace (false);
+
+ if (!out->varStore.serialize_serialize (c->serializer,
+ item_vars.has_long_word (),
+ c->plan->axis_tags,
+ item_vars.get_region_list (),
+ item_vars.get_vardata_encodings ()))
+ return_trace (false);
+
+ /* if varstore is optimized, remap output_map */
+ if (advMap)
+ {
+ if (!hvar_plan.remap_index_map_plans (c->plan, item_vars.get_varidx_map ()))
+ return_trace (false);
+ }
+ }
+ else
+ {
+ if (unlikely (!out->varStore
+ .serialize_serialize (c->serializer,
+ hvar_plan.var_store,
+ hvar_plan.inner_maps.as_array ())))
return_trace (false);
+ }
return_trace (out->T::serialize_index_maps (c->serializer,
hvar_plan.index_map_plans.as_array ()));
}
- float get_advance_var (hb_codepoint_t glyph, hb_font_t *font) const
+ float get_advance_delta_unscaled (hb_codepoint_t glyph,
+ const int *coords, unsigned int coord_count,
+ VariationStore::cache_t *store_cache = nullptr) const
{
- unsigned int varidx = (this+advMap).map (glyph);
- return (this+varStore).get_delta (varidx, font->coords, font->num_coords);
+ uint32_t varidx = (this+advMap).map (glyph);
+ return (this+varStore).get_delta (varidx,
+ coords, coord_count,
+ store_cache);
}
- float get_side_bearing_var (hb_codepoint_t glyph,
- const int *coords, unsigned int coord_count) const
+ bool get_lsb_delta_unscaled (hb_codepoint_t glyph,
+ const int *coords, unsigned int coord_count,
+ float *lsb) const
{
- if (!has_side_bearing_deltas ()) return 0.f;
- unsigned int varidx = (this+lsbMap).map (glyph);
- return (this+varStore).get_delta (varidx, coords, coord_count);
+ if (!lsbMap) return false;
+ uint32_t varidx = (this+lsbMap).map (glyph);
+ *lsb = (this+varStore).get_delta (varidx, coords, coord_count);
+ return true;
}
- bool has_side_bearing_deltas () const { return lsbMap && rsbMap; }
-
- protected:
+ public:
FixedVersion<>version; /* Version of the metrics variation table
* initially set to 0x00010000u */
- LOffsetTo<VariationStore>
+ Offset32To<VariationStore>
varStore; /* Offset to item variation store table. */
- LOffsetTo<DeltaSetIndexMap>
+ Offset32To<DeltaSetIndexMap>
advMap; /* Offset to advance var-idx mapping. */
- LOffsetTo<DeltaSetIndexMap>
+ Offset32To<DeltaSetIndexMap>
lsbMap; /* Offset to lsb/tsb var-idx mapping. */
- LOffsetTo<DeltaSetIndexMap>
+ Offset32To<DeltaSetIndexMap>
rsbMap; /* Offset to rsb/bsb var-idx mapping. */
public:
@@ -466,7 +446,7 @@ struct VVAR : HVARVVAR {
return_trace (false);
if (!im_plans[index_map_subset_plan_t::VORG_INDEX].get_map_count ())
vorgMap = 0;
- else if (unlikely (!vorgMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::VORG_INDEX])))
+ else if (unlikely (!vorgMap.serialize_serialize (c, im_plans[index_map_subset_plan_t::VORG_INDEX])))
return_trace (false);
return_trace (true);
@@ -474,8 +454,18 @@ struct VVAR : HVARVVAR {
bool subset (hb_subset_context_t *c) const { return HVARVVAR::_subset<VVAR> (c); }
+ bool get_vorg_delta_unscaled (hb_codepoint_t glyph,
+ const int *coords, unsigned int coord_count,
+ float *delta) const
+ {
+ if (!vorgMap) return false;
+ uint32_t varidx = (this+vorgMap).map (glyph);
+ *delta = (this+varStore).get_delta (varidx, coords, coord_count);
+ return true;
+ }
+
protected:
- LOffsetTo<DeltaSetIndexMap>
+ Offset32To<DeltaSetIndexMap>
vorgMap; /* Offset to vertical-origin var-idx mapping. */
public: