summaryrefslogtreecommitdiff
path: root/src/3rdparty/harfbuzz-ng/src/hb-ot-var-gvar-table.hh
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/harfbuzz-ng/src/hb-ot-var-gvar-table.hh')
-rw-r--r--src/3rdparty/harfbuzz-ng/src/hb-ot-var-gvar-table.hh467
1 files changed, 120 insertions, 347 deletions
diff --git a/src/3rdparty/harfbuzz-ng/src/hb-ot-var-gvar-table.hh b/src/3rdparty/harfbuzz-ng/src/hb-ot-var-gvar-table.hh
index 1eae6a3532..4752a08fbe 100644
--- a/src/3rdparty/harfbuzz-ng/src/hb-ot-var-gvar-table.hh
+++ b/src/3rdparty/harfbuzz-ng/src/hb-ot-var-gvar-table.hh
@@ -29,6 +29,7 @@
#define HB_OT_VAR_GVAR_TABLE_HH
#include "hb-open-type.hh"
+#include "hb-ot-var-common.hh"
/*
* gvar -- Glyph Variation Table
@@ -90,311 +91,8 @@ struct contour_point_vector_t : hb_vector_t<contour_point_t>
}
};
-/* https://docs.microsoft.com/en-us/typography/opentype/spec/otvarcommonformats#tuplevariationheader */
-struct TupleVariationHeader
-{
- unsigned get_size (unsigned axis_count) const
- { return min_size + get_all_tuples (axis_count).get_size (); }
-
- unsigned get_data_size () const { return varDataSize; }
-
- const TupleVariationHeader &get_next (unsigned axis_count) const
- { return StructAtOffset<TupleVariationHeader> (this, get_size (axis_count)); }
-
- float calculate_scalar (hb_array_t<int> coords, unsigned int coord_count,
- const hb_array_t<const F2DOT14> shared_tuples) const
- {
- hb_array_t<const F2DOT14> peak_tuple;
-
- if (has_peak ())
- peak_tuple = get_peak_tuple (coord_count);
- else
- {
- unsigned int index = get_index ();
- if (unlikely (index * coord_count >= shared_tuples.length))
- return 0.f;
- peak_tuple = shared_tuples.sub_array (coord_count * index, coord_count);
- }
-
- hb_array_t<const F2DOT14> start_tuple;
- hb_array_t<const F2DOT14> end_tuple;
- if (has_intermediate ())
- {
- start_tuple = get_start_tuple (coord_count);
- end_tuple = get_end_tuple (coord_count);
- }
-
- float scalar = 1.f;
- for (unsigned int i = 0; i < coord_count; i++)
- {
- int v = coords[i];
- int peak = peak_tuple[i].to_int ();
- if (!peak || v == peak) continue;
-
- if (has_intermediate ())
- {
- int start = start_tuple[i].to_int ();
- int end = end_tuple[i].to_int ();
- if (unlikely (start > peak || peak > end ||
- (start < 0 && end > 0 && peak))) continue;
- if (v < start || v > end) return 0.f;
- if (v < peak)
- { if (peak != start) scalar *= (float) (v - start) / (peak - start); }
- else
- { if (peak != end) scalar *= (float) (end - v) / (end - peak); }
- }
- else if (!v || v < hb_min (0, peak) || v > hb_max (0, peak)) return 0.f;
- else
- scalar *= (float) v / peak;
- }
- return scalar;
- }
-
- bool has_peak () const { return tupleIndex & TuppleIndex::EmbeddedPeakTuple; }
- bool has_intermediate () const { return tupleIndex & TuppleIndex::IntermediateRegion; }
- bool has_private_points () const { return tupleIndex & TuppleIndex::PrivatePointNumbers; }
- unsigned get_index () const { return tupleIndex & TuppleIndex::TupleIndexMask; }
-
- protected:
- struct TuppleIndex : HBUINT16
- {
- enum Flags {
- EmbeddedPeakTuple = 0x8000u,
- IntermediateRegion = 0x4000u,
- PrivatePointNumbers = 0x2000u,
- TupleIndexMask = 0x0FFFu
- };
-
- DEFINE_SIZE_STATIC (2);
- };
-
- hb_array_t<const F2DOT14> get_all_tuples (unsigned axis_count) const
- { return StructAfter<UnsizedArrayOf<F2DOT14>> (tupleIndex).as_array ((has_peak () + has_intermediate () * 2) * axis_count); }
- hb_array_t<const F2DOT14> get_peak_tuple (unsigned axis_count) const
- { return get_all_tuples (axis_count).sub_array (0, axis_count); }
- hb_array_t<const F2DOT14> get_start_tuple (unsigned axis_count) const
- { return get_all_tuples (axis_count).sub_array (has_peak () * axis_count, axis_count); }
- hb_array_t<const F2DOT14> get_end_tuple (unsigned axis_count) const
- { return get_all_tuples (axis_count).sub_array (has_peak () * axis_count + axis_count, axis_count); }
-
- HBUINT16 varDataSize; /* The size in bytes of the serialized
- * data for this tuple variation table. */
- TuppleIndex tupleIndex; /* A packed field. The high 4 bits are flags (see below).
- The low 12 bits are an index into a shared tuple
- records array. */
- /* UnsizedArrayOf<F2DOT14> peakTuple - optional */
- /* Peak tuple record for this tuple variation table — optional,
- * determined by flags in the tupleIndex value.
- *
- * Note that this must always be included in the 'cvar' table. */
- /* UnsizedArrayOf<F2DOT14> intermediateStartTuple - optional */
- /* Intermediate start tuple record for this tuple variation table — optional,
- determined by flags in the tupleIndex value. */
- /* UnsizedArrayOf<F2DOT14> intermediateEndTuple - optional */
- /* Intermediate end tuple record for this tuple variation table — optional,
- * determined by flags in the tupleIndex value. */
- public:
- DEFINE_SIZE_MIN (4);
-};
-
-struct GlyphVariationData
-{
- const TupleVariationHeader &get_tuple_var_header (void) const
- { return StructAfter<TupleVariationHeader> (data); }
-
- struct tuple_iterator_t
- {
- void init (hb_bytes_t var_data_bytes_, unsigned int axis_count_)
- {
- var_data_bytes = var_data_bytes_;
- var_data = var_data_bytes_.as<GlyphVariationData> ();
- index = 0;
- axis_count = axis_count_;
- current_tuple = &var_data->get_tuple_var_header ();
- data_offset = 0;
- }
-
- bool get_shared_indices (hb_vector_t<unsigned int> &shared_indices /* OUT */)
- {
- if (var_data->has_shared_point_numbers ())
- {
- const HBUINT8 *base = &(var_data+var_data->data);
- const HBUINT8 *p = base;
- if (!unpack_points (p, shared_indices, (const HBUINT8 *) (var_data_bytes.arrayZ + var_data_bytes.length))) return false;
- data_offset = p - base;
- }
- return true;
- }
-
- bool is_valid () const
- {
- return (index < var_data->tupleVarCount.get_count ()) &&
- var_data_bytes.check_range (current_tuple, TupleVariationHeader::min_size) &&
- var_data_bytes.check_range (current_tuple, hb_max (current_tuple->get_data_size (),
- current_tuple->get_size (axis_count)));
- }
-
- bool move_to_next ()
- {
- data_offset += current_tuple->get_data_size ();
- current_tuple = &current_tuple->get_next (axis_count);
- index++;
- return is_valid ();
- }
-
- const HBUINT8 *get_serialized_data () const
- { return &(var_data+var_data->data) + data_offset; }
-
- private:
- const GlyphVariationData *var_data;
- unsigned int index;
- unsigned int axis_count;
- unsigned int data_offset;
-
- public:
- hb_bytes_t var_data_bytes;
- const TupleVariationHeader *current_tuple;
- };
-
- static bool get_tuple_iterator (hb_bytes_t var_data_bytes, unsigned axis_count,
- hb_vector_t<unsigned int> &shared_indices /* OUT */,
- tuple_iterator_t *iterator /* OUT */)
- {
- iterator->init (var_data_bytes, axis_count);
- if (!iterator->get_shared_indices (shared_indices))
- return false;
- return iterator->is_valid ();
- }
-
- bool has_shared_point_numbers () const { return tupleVarCount.has_shared_point_numbers (); }
-
- static bool unpack_points (const HBUINT8 *&p /* IN/OUT */,
- hb_vector_t<unsigned int> &points /* OUT */,
- const HBUINT8 *end)
- {
- enum packed_point_flag_t
- {
- POINTS_ARE_WORDS = 0x80,
- POINT_RUN_COUNT_MASK = 0x7F
- };
-
- if (unlikely (p + 1 > end)) return false;
-
- unsigned count = *p++;
- if (count & POINTS_ARE_WORDS)
- {
- if (unlikely (p + 1 > end)) return false;
- count = ((count & POINT_RUN_COUNT_MASK) << 8) | *p++;
- }
- if (unlikely (!points.resize (count, false))) return false;
-
- unsigned n = 0;
- unsigned i = 0;
- while (i < count)
- {
- if (unlikely (p + 1 > end)) return false;
- unsigned control = *p++;
- unsigned run_count = (control & POINT_RUN_COUNT_MASK) + 1;
- if (unlikely (i + run_count > count)) return false;
- unsigned j;
- if (control & POINTS_ARE_WORDS)
- {
- if (unlikely (p + run_count * HBUINT16::static_size > end)) return false;
- for (j = 0; j < run_count; j++, i++)
- {
- n += *(const HBUINT16 *)p;
- points.arrayZ[i] = n;
- p += HBUINT16::static_size;
- }
- }
- else
- {
- if (unlikely (p + run_count > end)) return false;
- for (j = 0; j < run_count; j++, i++)
- {
- n += *p++;
- points.arrayZ[i] = n;
- }
- }
- }
- return true;
- }
-
- static bool unpack_deltas (const HBUINT8 *&p /* IN/OUT */,
- hb_vector_t<int> &deltas /* IN/OUT */,
- const HBUINT8 *end)
- {
- enum packed_delta_flag_t
- {
- DELTAS_ARE_ZERO = 0x80,
- DELTAS_ARE_WORDS = 0x40,
- DELTA_RUN_COUNT_MASK = 0x3F
- };
-
- unsigned i = 0;
- unsigned count = deltas.length;
- while (i < count)
- {
- if (unlikely (p + 1 > end)) return false;
- unsigned control = *p++;
- unsigned run_count = (control & DELTA_RUN_COUNT_MASK) + 1;
- if (unlikely (i + run_count > count)) return false;
- unsigned j;
- if (control & DELTAS_ARE_ZERO)
- {
- for (j = 0; j < run_count; j++, i++)
- deltas.arrayZ[i] = 0;
- }
- else if (control & DELTAS_ARE_WORDS)
- {
- if (unlikely (p + run_count * HBUINT16::static_size > end)) return false;
- for (j = 0; j < run_count; j++, i++)
- {
- deltas.arrayZ[i] = * (const HBINT16 *) p;
- p += HBUINT16::static_size;
- }
- }
- else
- {
- if (unlikely (p + run_count > end)) return false;
- for (j = 0; j < run_count; j++, i++)
- {
- deltas.arrayZ[i] = * (const HBINT8 *) p++;
- }
- }
- }
- return true;
- }
-
- bool has_data () const { return tupleVarCount; }
-
- protected:
- struct TupleVarCount : HBUINT16
- {
- bool has_shared_point_numbers () const { return ((*this) & SharedPointNumbers); }
- unsigned int get_count () const { return (*this) & CountMask; }
-
- protected:
- enum Flags
- {
- SharedPointNumbers= 0x8000u,
- CountMask = 0x0FFFu
- };
- public:
- DEFINE_SIZE_STATIC (2);
- };
-
- TupleVarCount tupleVarCount; /* A packed field. The high 4 bits are flags, and the
- * low 12 bits are the number of tuple variation tables
- * for this glyph. The number of tuple variation tables
- * can be any number between 1 and 4095. */
- Offset16To<HBUINT8>
- data; /* Offset from the start of the GlyphVariationData table
- * to the serialized data. */
- /* TupleVariationHeader tupleVariationHeaders[] *//* Array of tuple variation headers. */
- public:
- DEFINE_SIZE_MIN (4);
-};
+struct GlyphVariationData : TupleVariationData
+{};
struct gvar
{
@@ -406,8 +104,8 @@ struct gvar
return_trace (c->check_struct (this) && (version.major == 1) &&
sharedTuples.sanitize (c, this, axisCount * sharedTupleCount) &&
(is_long_offset () ?
- c->check_array (get_long_offset_array (), glyphCount+1) :
- c->check_array (get_short_offset_array (), glyphCount+1)));
+ c->check_array (get_long_offset_array (), c->get_num_glyphs () + 1) :
+ c->check_array (get_short_offset_array (), c->get_num_glyphs () + 1)));
}
/* GlyphVariationData not sanitized here; must be checked while accessing each glyph variation data */
@@ -418,6 +116,8 @@ struct gvar
{
TRACE_SUBSET (this);
+ unsigned glyph_count = version.to_int () ? c->plan->source->get_num_glyphs () : 0;
+
gvar *out = c->serializer->allocate_min<gvar> ();
if (unlikely (!out)) return_trace (false);
@@ -427,7 +127,7 @@ struct gvar
out->sharedTupleCount = sharedTupleCount;
unsigned int num_glyphs = c->plan->num_output_glyphs ();
- out->glyphCount = num_glyphs;
+ out->glyphCountX = hb_min (0xFFFFu, num_glyphs);
unsigned int subset_data_size = 0;
for (hb_codepoint_t gid = (c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE) ? 0 : 1;
@@ -436,7 +136,7 @@ struct gvar
{
hb_codepoint_t old_gid;
if (!c->plan->old_gid_for_new_gid (gid, &old_gid)) continue;
- subset_data_size += get_glyph_var_data_bytes (c->source_blob, old_gid).length;
+ subset_data_size += get_glyph_var_data_bytes (c->source_blob, glyph_count, old_gid).length;
}
bool long_offset = subset_data_size & ~0xFFFFu;
@@ -468,7 +168,9 @@ struct gvar
{
hb_codepoint_t old_gid;
hb_bytes_t var_data_bytes = c->plan->old_gid_for_new_gid (gid, &old_gid)
- ? get_glyph_var_data_bytes (c->source_blob, old_gid)
+ ? get_glyph_var_data_bytes (c->source_blob,
+ glyph_count,
+ old_gid)
: hb_bytes_t ();
if (long_offset)
@@ -490,10 +192,12 @@ struct gvar
}
protected:
- const hb_bytes_t get_glyph_var_data_bytes (hb_blob_t *blob, hb_codepoint_t glyph) const
+ const hb_bytes_t get_glyph_var_data_bytes (hb_blob_t *blob,
+ unsigned glyph_count,
+ hb_codepoint_t glyph) const
{
- unsigned start_offset = get_offset (glyph);
- unsigned end_offset = get_offset (glyph+1);
+ unsigned start_offset = get_offset (glyph_count, glyph);
+ unsigned end_offset = get_offset (glyph_count, glyph+1);
if (unlikely (end_offset < start_offset)) return hb_bytes_t ();
unsigned length = end_offset - start_offset;
hb_bytes_t var_data = blob->as_bytes ().sub_array (((unsigned) dataZ) + start_offset, length);
@@ -502,9 +206,9 @@ struct gvar
bool is_long_offset () const { return flags & 1; }
- unsigned get_offset (unsigned i) const
+ unsigned get_offset (unsigned glyph_count, unsigned i) const
{
- if (unlikely (i > glyphCount)) return 0;
+ if (unlikely (i > glyph_count)) return 0;
_hb_compiler_memory_r_barrier ();
return is_long_offset () ? get_long_offset_array ()[i] : get_short_offset_array ()[i] * 2;
}
@@ -516,7 +220,38 @@ struct gvar
struct accelerator_t
{
accelerator_t (hb_face_t *face)
- { table = hb_sanitize_context_t ().reference_table<gvar> (face); }
+ {
+ table = hb_sanitize_context_t ().reference_table<gvar> (face);
+ /* If sanitize failed, set glyphCount to 0. */
+ glyphCount = table->version.to_int () ? face->get_num_glyphs () : 0;
+
+ /* For shared tuples that only have one axis active, shared the index of
+ * that axis as a cache. This will speed up caclulate_scalar() a lot
+ * for fonts with lots of axes and many "monovar" tuples. */
+ hb_array_t<const F2DOT14> shared_tuples = (table+table->sharedTuples).as_array (table->sharedTupleCount * table->axisCount);
+ unsigned count = table->sharedTupleCount;
+ if (unlikely (!shared_tuple_active_idx.resize (count, false))) return;
+ unsigned axis_count = table->axisCount;
+ for (unsigned i = 0; i < count; i++)
+ {
+ hb_array_t<const F2DOT14> tuple = shared_tuples.sub_array (axis_count * i, axis_count);
+ int idx = -1;
+ for (unsigned j = 0; j < axis_count; j++)
+ {
+ F2DOT14 peak = tuple.arrayZ[j];
+ if (peak.to_int () != 0)
+ {
+ if (idx != -1)
+ {
+ idx = -1;
+ break;
+ }
+ idx = j;
+ }
+ }
+ shared_tuple_active_idx[i] = idx;
+ }
+ }
~accelerator_t () { table.destroy (); }
private:
@@ -554,30 +289,26 @@ struct gvar
{
if (!coords) return true;
- if (unlikely (glyph >= table->glyphCount)) return true;
+ if (unlikely (glyph >= glyphCount)) return true;
- hb_bytes_t var_data_bytes = table->get_glyph_var_data_bytes (table.get_blob (), glyph);
+ hb_bytes_t var_data_bytes = table->get_glyph_var_data_bytes (table.get_blob (), glyphCount, glyph);
if (!var_data_bytes.as<GlyphVariationData> ()->has_data ()) return true;
hb_vector_t<unsigned int> shared_indices;
GlyphVariationData::tuple_iterator_t iterator;
if (!GlyphVariationData::get_tuple_iterator (var_data_bytes, table->axisCount,
+ var_data_bytes.arrayZ,
shared_indices, &iterator))
return true; /* so isn't applied at all */
/* Save original points for inferred delta calculation */
- contour_point_vector_t orig_points_vec;
- orig_points_vec.extend (points);
- if (unlikely (orig_points_vec.in_error ())) return false;
+ contour_point_vector_t orig_points_vec; // Populated lazily
auto orig_points = orig_points_vec.as_array ();
- contour_point_vector_t deltas_vec; /* flag is used to indicate referenced point */
- if (unlikely (!deltas_vec.resize (points.length, false))) return false;
+ /* flag is used to indicate referenced point */
+ contour_point_vector_t deltas_vec; // Populated lazily
auto deltas = deltas_vec.as_array ();
- hb_vector_t<unsigned> end_points;
- for (unsigned i = 0; i < points.length; ++i)
- if (points.arrayZ[i].is_end_point)
- end_points.push (i);
+ hb_vector_t<unsigned> end_points; // Populated lazily
unsigned num_coords = table->axisCount;
hb_array_t<const F2DOT14> shared_tuples = (table+table->sharedTuples).as_array (table->sharedTupleCount * table->axisCount);
@@ -585,15 +316,23 @@ struct gvar
hb_vector_t<unsigned int> private_indices;
hb_vector_t<int> x_deltas;
hb_vector_t<int> y_deltas;
+ bool flush = false;
do
{
- float scalar = iterator.current_tuple->calculate_scalar (coords, num_coords, shared_tuples);
+ float scalar = iterator.current_tuple->calculate_scalar (coords, num_coords, shared_tuples,
+ shared_tuple_active_idx.in_error () ? nullptr : &shared_tuple_active_idx);
if (scalar == 0.f) continue;
const HBUINT8 *p = iterator.get_serialized_data ();
unsigned int length = iterator.current_tuple->get_data_size ();
if (unlikely (!iterator.var_data_bytes.check_range (p, length)))
return false;
+ if (!deltas)
+ {
+ if (unlikely (!deltas_vec.resize (points.length))) return false;
+ deltas = deltas_vec.as_array ();
+ }
+
const HBUINT8 *end = p + length;
bool has_private_points = iterator.current_tuple->has_private_points ();
@@ -609,16 +348,37 @@ struct gvar
if (unlikely (!y_deltas.resize (num_deltas, false))) return false;
if (unlikely (!GlyphVariationData::unpack_deltas (p, y_deltas, end))) return false;
- hb_memset (deltas.arrayZ, 0, deltas.get_size ());
+ if (!apply_to_all)
+ {
+ if (!orig_points)
+ {
+ orig_points_vec.extend (points);
+ if (unlikely (orig_points_vec.in_error ())) return false;
+ orig_points = orig_points_vec.as_array ();
+ }
+
+ if (flush)
+ {
+ for (unsigned int i = 0; i < points.length; i++)
+ points.arrayZ[i].translate (deltas.arrayZ[i]);
+ flush = false;
+
+ }
+ hb_memset (deltas.arrayZ, 0, deltas.get_size ());
+ }
- unsigned ref_points = 0;
if (scalar != 1.0f)
for (unsigned int i = 0; i < num_deltas; i++)
{
- unsigned int pt_index = apply_to_all ? i : indices[i];
- if (unlikely (pt_index >= deltas.length)) continue;
+ unsigned int pt_index;
+ if (apply_to_all)
+ pt_index = i;
+ else
+ {
+ pt_index = indices[i];
+ if (unlikely (pt_index >= deltas.length)) continue;
+ }
auto &delta = deltas.arrayZ[pt_index];
- ref_points += !delta.flag;
delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */
delta.x += x_deltas.arrayZ[i] * scalar;
delta.y += y_deltas.arrayZ[i] * scalar;
@@ -626,23 +386,34 @@ struct gvar
else
for (unsigned int i = 0; i < num_deltas; i++)
{
- unsigned int pt_index = apply_to_all ? i : indices[i];
- if (unlikely (pt_index >= deltas.length)) continue;
+ unsigned int pt_index;
+ if (apply_to_all)
+ pt_index = i;
+ else
+ {
+ pt_index = indices[i];
+ if (unlikely (pt_index >= deltas.length)) continue;
+ }
auto &delta = deltas.arrayZ[pt_index];
- ref_points += !delta.flag;
delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */
delta.x += x_deltas.arrayZ[i];
delta.y += y_deltas.arrayZ[i];
}
/* infer deltas for unreferenced points */
- if (ref_points && ref_points < orig_points.length)
+ if (!apply_to_all)
{
- unsigned start_point = 0;
- for (unsigned c = 0; c < end_points.length; c++)
+ if (!end_points)
{
- unsigned end_point = end_points.arrayZ[c];
+ for (unsigned i = 0; i < points.length; ++i)
+ if (points.arrayZ[i].is_end_point)
+ end_points.push (i);
+ if (unlikely (end_points.in_error ())) return false;
+ }
+ unsigned start_point = 0;
+ for (unsigned end_point : end_points)
+ {
/* Check the number of unreferenced points in a contour. If no unref points or no ref points, nothing to do. */
unsigned unref_count = 0;
for (unsigned i = start_point; i < end_point + 1; i++)
@@ -689,14 +460,14 @@ struct gvar
}
}
- /* apply specified / inferred deltas to points */
- for (unsigned int i = 0; i < points.length; i++)
- {
- points.arrayZ[i].x += deltas.arrayZ[i].x;
- points.arrayZ[i].y += deltas.arrayZ[i].y;
- }
+ flush = true;
+
} while (iterator.move_to_next ());
+ if (flush)
+ for (unsigned int i = 0; i < points.length; i++)
+ points.arrayZ[i].translate (deltas.arrayZ[i]);
+
return true;
}
@@ -704,6 +475,8 @@ struct gvar
private:
hb_blob_ptr_t<gvar> table;
+ unsigned glyphCount;
+ hb_vector_t<signed> shared_tuple_active_idx;
};
protected:
@@ -719,7 +492,7 @@ struct gvar
NNOffset32To<UnsizedArrayOf<F2DOT14>>
sharedTuples; /* Offset from the start of this table to the shared tuple records.
* Array of tuple records shared across all glyph variation data tables. */
- HBUINT16 glyphCount; /* The number of glyphs in this font. This must match the number of
+ HBUINT16 glyphCountX; /* The number of glyphs in this font. This must match the number of
* glyphs stored elsewhere in the font. */
HBUINT16 flags; /* Bit-field that gives the format of the offset array that follows.
* If bit 0 is clear, the offsets are uint16; if bit 0 is set, the