summaryrefslogtreecommitdiffstats
path: root/src/3rdparty/harfbuzz-ng/src/hb-buffer.hh
diff options
context:
space:
mode:
authorEskil Abrahamsen Blomfeldt <eskil.abrahamsen-blomfeldt@qt.io>2022-05-19 09:58:31 +0200
committerEskil Abrahamsen Blomfeldt <eskil.abrahamsen-blomfeldt@qt.io>2022-05-23 19:25:46 +0000
commitd88da0b2b0a8abaaa521bef864378fe1dbf86709 (patch)
tree4795f260c1ceaaeeb2cbe64f8314d604e0a7cbe7 /src/3rdparty/harfbuzz-ng/src/hb-buffer.hh
parent0d97723ee2778203af0cfd13599572110fb69dd5 (diff)
Update Harfbuzz to version 4.2.1
[ChangeLog][QtGui][Text] Updated the Harfbuzz code included with Qt to version 4.2.1. Pick-to: 6.2 6.3 Fixes: QTBUG-103603 Change-Id: I45fdde8fd0772e4470304c5f6f5a876666356d04 Reviewed-by: Konstantin Ritt <ritt.ks@gmail.com> Reviewed-by: Lars Knoll <lars.knoll@qt.io>
Diffstat (limited to 'src/3rdparty/harfbuzz-ng/src/hb-buffer.hh')
-rw-r--r--src/3rdparty/harfbuzz-ng/src/hb-buffer.hh267
1 files changed, 201 insertions, 66 deletions
diff --git a/src/3rdparty/harfbuzz-ng/src/hb-buffer.hh b/src/3rdparty/harfbuzz-ng/src/hb-buffer.hh
index 8635ebd35f..bc6992905e 100644
--- a/src/3rdparty/harfbuzz-ng/src/hb-buffer.hh
+++ b/src/3rdparty/harfbuzz-ng/src/hb-buffer.hh
@@ -67,8 +67,8 @@ enum hb_buffer_scratch_flags_t {
HB_BUFFER_SCRATCH_FLAG_HAS_DEFAULT_IGNORABLES = 0x00000002u,
HB_BUFFER_SCRATCH_FLAG_HAS_SPACE_FALLBACK = 0x00000004u,
HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT = 0x00000008u,
- HB_BUFFER_SCRATCH_FLAG_HAS_UNSAFE_TO_BREAK = 0x00000010u,
- HB_BUFFER_SCRATCH_FLAG_HAS_CGJ = 0x00000020u,
+ HB_BUFFER_SCRATCH_FLAG_HAS_CGJ = 0x00000010u,
+ HB_BUFFER_SCRATCH_FLAG_HAS_GLYPH_FLAGS = 0x00000020u,
/* Reserved for complex shapers' internal use. */
HB_BUFFER_SCRATCH_FLAG_COMPLEX0 = 0x01000000u,
@@ -87,17 +87,21 @@ struct hb_buffer_t
{
hb_object_header_t header;
- /* Information about how the text in the buffer should be treated */
+ /*
+ * Information about how the text in the buffer should be treated.
+ */
+
hb_unicode_funcs_t *unicode; /* Unicode functions */
hb_buffer_flags_t flags; /* BOT / EOT / etc. */
hb_buffer_cluster_level_t cluster_level;
hb_codepoint_t replacement; /* U+FFFD or something else. */
hb_codepoint_t invisible; /* 0 or something else. */
- hb_buffer_scratch_flags_t scratch_flags; /* Have space-fallback, etc. */
- unsigned int max_len; /* Maximum allowed len. */
- int max_ops; /* Maximum allowed operations. */
+ hb_codepoint_t not_found; /* 0 or something else. */
+
+ /*
+ * Buffer contents
+ */
- /* Buffer contents */
hb_buffer_content_type_t content_type;
hb_segment_properties_t props; /* Script, language, direction */
@@ -114,8 +118,6 @@ struct hb_buffer_t
hb_glyph_info_t *out_info;
hb_glyph_position_t *pos;
- unsigned int serial;
-
/* Text before / after the main buffer contents.
* Always in Unicode, and ordered outward.
* Index 0 is for "pre-context", 1 for "post-context". */
@@ -123,7 +125,25 @@ struct hb_buffer_t
hb_codepoint_t context[2][CONTEXT_LENGTH];
unsigned int context_len[2];
- /* Debugging API */
+
+ /*
+ * Managed by enter / leave
+ */
+
+#ifndef HB_NDEBUG
+ uint8_t allocated_var_bits;
+#endif
+ uint8_t serial;
+ hb_buffer_scratch_flags_t scratch_flags; /* Have space-fallback, etc. */
+ unsigned int max_len; /* Maximum allowed len. */
+ int max_ops; /* Maximum allowed operations. */
+ /* The bits here reflect current allocations of the bytes in glyph_info_t's var1 and var2. */
+
+
+ /*
+ * Messaging callback
+ */
+
#ifndef HB_NO_BUFFER_MESSAGE
hb_buffer_message_func_t message_func;
void *message_data;
@@ -133,11 +153,6 @@ struct hb_buffer_t
static constexpr unsigned message_depth = 0u;
#endif
- /* Internal debugging. */
- /* The bits here reflect current allocations of the bytes in glyph_info_t's var1 and var2. */
-#ifndef HB_NDEBUG
- uint8_t allocated_var_bits;
-#endif
/* Methods */
@@ -189,23 +204,88 @@ struct hb_buffer_t
hb_glyph_info_t &prev () { return out_info[out_len ? out_len - 1 : 0]; }
hb_glyph_info_t prev () const { return out_info[out_len ? out_len - 1 : 0]; }
+ HB_INTERNAL void similar (const hb_buffer_t &src);
HB_INTERNAL void reset ();
HB_INTERNAL void clear ();
+ /* Called around shape() */
+ HB_INTERNAL void enter ();
+ HB_INTERNAL void leave ();
+
+#ifndef HB_NO_BUFFER_VERIFY
+ HB_INTERNAL
+#endif
+ bool verify (hb_buffer_t *text_buffer,
+ hb_font_t *font,
+ const hb_feature_t *features,
+ unsigned int num_features,
+ const char * const *shapers)
+#ifndef HB_NO_BUFFER_VERIFY
+ ;
+#else
+ { return true; }
+#endif
+
unsigned int backtrack_len () const { return have_output ? out_len : idx; }
unsigned int lookahead_len () const { return len - idx; }
- unsigned int next_serial () { return serial++; }
+ uint8_t next_serial () { return ++serial ? serial : ++serial; }
HB_INTERNAL void add (hb_codepoint_t codepoint,
unsigned int cluster);
HB_INTERNAL void add_info (const hb_glyph_info_t &glyph_info);
- HB_INTERNAL void reverse_range (unsigned int start, unsigned int end);
- HB_INTERNAL void reverse ();
- HB_INTERNAL void reverse_clusters ();
+ void reverse_range (unsigned start, unsigned end)
+ {
+ hb_array_t<hb_glyph_info_t> (info, len).reverse (start, end);
+ if (have_positions)
+ hb_array_t<hb_glyph_position_t> (pos, len).reverse (start, end);
+ }
+ void reverse () { reverse_range (0, len); }
+
+ template <typename FuncType>
+ void reverse_groups (const FuncType& group,
+ bool merge_clusters = false)
+ {
+ if (unlikely (!len))
+ return;
+
+ unsigned start = 0;
+ unsigned i;
+ for (i = 1; i < len; i++)
+ {
+ if (!group (info[i - 1], info[i]))
+ {
+ if (merge_clusters)
+ this->merge_clusters (start, i);
+ reverse_range (start, i);
+ start = i;
+ }
+ }
+ if (merge_clusters)
+ this->merge_clusters (start, i);
+ reverse_range (start, i);
+
+ reverse ();
+ }
+
+ template <typename FuncType>
+ unsigned group_end (unsigned start, const FuncType& group) const
+ {
+ while (++start < len && group (info[start - 1], info[start]))
+ ;
+
+ return start;
+ }
+
+ static bool _cluster_group_func (const hb_glyph_info_t& a,
+ const hb_glyph_info_t& b)
+ { return a.cluster == b.cluster; }
+
+ void reverse_clusters () { reverse_groups (_cluster_group_func); }
+
HB_INTERNAL void guess_segment_properties ();
- HB_INTERNAL void swap_buffers ();
+ HB_INTERNAL void sync ();
HB_INTERNAL void clear_output ();
HB_INTERNAL void clear_positions ();
@@ -319,15 +399,87 @@ struct hb_buffer_t
/* Merge clusters for deleting current glyph, and skip it. */
HB_INTERNAL void delete_glyph ();
- void unsafe_to_break (unsigned int start,
- unsigned int end)
+
+ /* Adds glyph flags in mask to infos with clusters between start and end.
+ * The start index will be from out-buffer if from_out_buffer is true.
+ * If interior is true, then the cluster having the minimum value is skipped. */
+ void _set_glyph_flags (hb_mask_t mask,
+ unsigned start = 0,
+ unsigned end = (unsigned) -1,
+ bool interior = false,
+ bool from_out_buffer = false)
{
- if (end - start < 2)
+ end = hb_min (end, len);
+
+ if (interior && !from_out_buffer && end - start < 2)
return;
- unsafe_to_break_impl (start, end);
+
+ scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GLYPH_FLAGS;
+
+ if (!from_out_buffer || !have_output)
+ {
+ if (!interior)
+ {
+ for (unsigned i = start; i < end; i++)
+ info[i].mask |= mask;
+ }
+ else
+ {
+ unsigned cluster = _infos_find_min_cluster (info, start, end);
+ _infos_set_glyph_flags (info, start, end, cluster, mask);
+ }
+ }
+ else
+ {
+ assert (start <= out_len);
+ assert (idx <= end);
+
+ if (!interior)
+ {
+ for (unsigned i = start; i < out_len; i++)
+ out_info[i].mask |= mask;
+ for (unsigned i = idx; i < end; i++)
+ info[i].mask |= mask;
+ }
+ else
+ {
+ unsigned cluster = _infos_find_min_cluster (info, idx, end);
+ cluster = _infos_find_min_cluster (out_info, start, out_len, cluster);
+
+ _infos_set_glyph_flags (out_info, start, out_len, cluster, mask);
+ _infos_set_glyph_flags (info, idx, end, cluster, mask);
+ }
+ }
+ }
+
+ void unsafe_to_break (unsigned int start = 0, unsigned int end = -1)
+ {
+ _set_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_BREAK | HB_GLYPH_FLAG_UNSAFE_TO_CONCAT,
+ start, end,
+ true);
+ }
+ void unsafe_to_concat (unsigned int start = 0, unsigned int end = -1)
+ {
+ if (likely ((flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT) == 0))
+ return;
+ _set_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_CONCAT,
+ start, end,
+ true);
+ }
+ void unsafe_to_break_from_outbuffer (unsigned int start = 0, unsigned int end = -1)
+ {
+ _set_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_BREAK | HB_GLYPH_FLAG_UNSAFE_TO_CONCAT,
+ start, end,
+ true, true);
+ }
+ void unsafe_to_concat_from_outbuffer (unsigned int start = 0, unsigned int end = -1)
+ {
+ if (likely ((flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT) == 0))
+ return;
+ _set_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_CONCAT,
+ start, end,
+ false, true);
}
- HB_INTERNAL void unsafe_to_break_impl (unsigned int start, unsigned int end);
- HB_INTERNAL void unsafe_to_break_from_outbuffer (unsigned int start, unsigned int end);
/* Internal methods */
@@ -418,67 +570,50 @@ struct hb_buffer_t
set_cluster (hb_glyph_info_t &inf, unsigned int cluster, unsigned int mask = 0)
{
if (inf.cluster != cluster)
- {
- if (mask & HB_GLYPH_FLAG_UNSAFE_TO_BREAK)
- inf.mask |= HB_GLYPH_FLAG_UNSAFE_TO_BREAK;
- else
- inf.mask &= ~HB_GLYPH_FLAG_UNSAFE_TO_BREAK;
- }
+ inf.mask = (inf.mask & ~HB_GLYPH_FLAG_DEFINED) | (mask & HB_GLYPH_FLAG_DEFINED);
inf.cluster = cluster;
}
-
- unsigned int
- _unsafe_to_break_find_min_cluster (const hb_glyph_info_t *infos,
- unsigned int start, unsigned int end,
- unsigned int cluster) const
- {
- for (unsigned int i = start; i < end; i++)
- cluster = hb_min (cluster, infos[i].cluster);
- return cluster;
- }
void
- _unsafe_to_break_set_mask (hb_glyph_info_t *infos,
- unsigned int start, unsigned int end,
- unsigned int cluster)
+ _infos_set_glyph_flags (hb_glyph_info_t *infos,
+ unsigned int start, unsigned int end,
+ unsigned int cluster,
+ hb_mask_t mask)
{
for (unsigned int i = start; i < end; i++)
if (cluster != infos[i].cluster)
{
- scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_UNSAFE_TO_BREAK;
- infos[i].mask |= HB_GLYPH_FLAG_UNSAFE_TO_BREAK;
+ scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GLYPH_FLAGS;
+ infos[i].mask |= mask;
}
}
+ static unsigned
+ _infos_find_min_cluster (const hb_glyph_info_t *infos,
+ unsigned start, unsigned end,
+ unsigned cluster = UINT_MAX)
+ {
+ for (unsigned int i = start; i < end; i++)
+ cluster = hb_min (cluster, infos[i].cluster);
+ return cluster;
+ }
- void unsafe_to_break_all () { unsafe_to_break_impl (0, len); }
- void safe_to_break_all ()
+ void clear_glyph_flags (hb_mask_t mask = 0)
{
for (unsigned int i = 0; i < len; i++)
- info[i].mask &= ~HB_GLYPH_FLAG_UNSAFE_TO_BREAK;
+ info[i].mask = (info[i].mask & ~HB_GLYPH_FLAG_DEFINED) | (mask & HB_GLYPH_FLAG_DEFINED);
}
};
DECLARE_NULL_INSTANCE (hb_buffer_t);
-/* Loop over clusters. Duplicated in foreach_syllable(). */
-#define foreach_cluster(buffer, start, end) \
+#define foreach_group(buffer, start, end, group_func) \
for (unsigned int \
_count = buffer->len, \
- start = 0, end = _count ? _next_cluster (buffer, 0) : 0; \
+ start = 0, end = _count ? buffer->group_end (0, group_func) : 0; \
start < _count; \
- start = end, end = _next_cluster (buffer, start))
-
-static inline unsigned int
-_next_cluster (hb_buffer_t *buffer, unsigned int start)
-{
- hb_glyph_info_t *info = buffer->info;
- unsigned int count = buffer->len;
-
- unsigned int cluster = info[start].cluster;
- while (++start < count && cluster == info[start].cluster)
- ;
+ start = end, end = buffer->group_end (start, group_func))
- return start;
-}
+#define foreach_cluster(buffer, start, end) \
+ foreach_group (buffer, start, end, hb_buffer_t::_cluster_group_func)
#define HB_BUFFER_XALLOCATE_VAR(b, func, var) \