HarfBuzz: Update to version 3.2.0
This commit is contained in:
parent
e664d195cb
commit
b63aced768
35 changed files with 731 additions and 415 deletions
|
@ -206,11 +206,11 @@ Copyright: 2010-2020, Google, Inc.
|
|||
2019-2020, Facebook, Inc.
|
||||
2012, Mozilla Foundation
|
||||
2011, Codethink Limited
|
||||
2008, 2010 Nokia Corporation and/or its subsidiary(-ies)
|
||||
2008, 2010, Nokia Corporation and/or its subsidiary(-ies)
|
||||
2009, Keith Stribley
|
||||
2009, Martin Hosken and SIL International
|
||||
2007, Chris Wilson
|
||||
2006, Behdad Esfahbod
|
||||
2005-2006, 2020-2021, Behdad Esfahbod
|
||||
2005, David Turner
|
||||
2004, 2007-2010, Red Hat, Inc.
|
||||
1998-2004, David Turner and Werner Lemberg
|
||||
|
|
2
thirdparty/README.md
vendored
2
thirdparty/README.md
vendored
|
@ -201,7 +201,7 @@ Files extracted from upstream source:
|
|||
## harfbuzz
|
||||
|
||||
- Upstream: https://github.com/harfbuzz/harfbuzz
|
||||
- Version: 3.1.2 (8aed5c21a31eece6a9f3cd775fda8facb6c28b9b, 2021)
|
||||
- Version: 3.2.0 (be91d2917d9860326cb5fd1d03ffe1042a72f6d3, 2021)
|
||||
- License: MIT
|
||||
|
||||
Files extracted from upstream source:
|
||||
|
|
|
@ -1038,12 +1038,12 @@ struct Chain
|
|||
goto skip;
|
||||
|
||||
if (reverse)
|
||||
c->buffer->reverse ();
|
||||
_hb_ot_layout_reverse_graphemes (c->buffer);
|
||||
|
||||
subtable->apply (c);
|
||||
|
||||
if (reverse)
|
||||
c->buffer->reverse ();
|
||||
_hb_ot_layout_reverse_graphemes (c->buffer);
|
||||
|
||||
(void) c->buffer->message (c->font, "end chainsubtable %d", c->lookup_index);
|
||||
|
||||
|
|
53
thirdparty/harfbuzz/src/hb-buffer.cc
vendored
53
thirdparty/harfbuzz/src/hb-buffer.cc
vendored
|
@ -395,52 +395,6 @@ hb_buffer_t::set_masks (hb_mask_t value,
|
|||
info[i].mask = (info[i].mask & not_mask) | value;
|
||||
}
|
||||
|
||||
void
|
||||
hb_buffer_t::reverse_range (unsigned int start,
|
||||
unsigned int end)
|
||||
{
|
||||
if (end - start < 2)
|
||||
return;
|
||||
|
||||
hb_array_t<hb_glyph_info_t> (info, len).reverse (start, end);
|
||||
|
||||
if (have_positions) {
|
||||
hb_array_t<hb_glyph_position_t> (pos, len).reverse (start, end);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
hb_buffer_t::reverse ()
|
||||
{
|
||||
if (unlikely (!len))
|
||||
return;
|
||||
|
||||
reverse_range (0, len);
|
||||
}
|
||||
|
||||
void
|
||||
hb_buffer_t::reverse_clusters ()
|
||||
{
|
||||
unsigned int i, start, count, last_cluster;
|
||||
|
||||
if (unlikely (!len))
|
||||
return;
|
||||
|
||||
reverse ();
|
||||
|
||||
count = len;
|
||||
start = 0;
|
||||
last_cluster = info[0].cluster;
|
||||
for (i = 1; i < count; i++) {
|
||||
if (last_cluster != info[i].cluster) {
|
||||
reverse_range (start, i);
|
||||
start = i;
|
||||
last_cluster = info[i].cluster;
|
||||
}
|
||||
}
|
||||
reverse_range (start, i);
|
||||
}
|
||||
|
||||
void
|
||||
hb_buffer_t::merge_clusters_impl (unsigned int start,
|
||||
unsigned int end)
|
||||
|
@ -543,7 +497,7 @@ void
|
|||
hb_buffer_t::unsafe_to_break_impl (unsigned int start, unsigned int end)
|
||||
{
|
||||
unsigned int cluster = UINT_MAX;
|
||||
cluster = _unsafe_to_break_find_min_cluster (info, start, end, cluster);
|
||||
cluster = _infos_find_min_cluster (info, start, end, cluster);
|
||||
_unsafe_to_break_set_mask (info, start, end, cluster);
|
||||
}
|
||||
void
|
||||
|
@ -559,8 +513,9 @@ hb_buffer_t::unsafe_to_break_from_outbuffer (unsigned int start, unsigned int en
|
|||
assert (idx <= end);
|
||||
|
||||
unsigned int cluster = UINT_MAX;
|
||||
cluster = _unsafe_to_break_find_min_cluster (out_info, start, out_len, cluster);
|
||||
cluster = _unsafe_to_break_find_min_cluster (info, idx, end, cluster);
|
||||
cluster = _infos_find_min_cluster (out_info, start, out_len, cluster);
|
||||
cluster = _infos_find_min_cluster (info, idx, end, cluster);
|
||||
|
||||
_unsafe_to_break_set_mask (out_info, start, out_len, cluster);
|
||||
_unsafe_to_break_set_mask (info, idx, end, cluster);
|
||||
}
|
||||
|
|
86
thirdparty/harfbuzz/src/hb-buffer.hh
vendored
86
thirdparty/harfbuzz/src/hb-buffer.hh
vendored
|
@ -201,9 +201,55 @@ struct hb_buffer_t
|
|||
unsigned int cluster);
|
||||
HB_INTERNAL void add_info (const hb_glyph_info_t &glyph_info);
|
||||
|
||||
HB_INTERNAL void reverse_range (unsigned int start, unsigned int end);
|
||||
HB_INTERNAL void reverse ();
|
||||
HB_INTERNAL void reverse_clusters ();
|
||||
void reverse_range (unsigned start, unsigned end)
|
||||
{
|
||||
hb_array_t<hb_glyph_info_t> (info, len).reverse (start, end);
|
||||
if (have_positions)
|
||||
hb_array_t<hb_glyph_position_t> (pos, len).reverse (start, end);
|
||||
}
|
||||
void reverse () { reverse_range (0, len); }
|
||||
|
||||
template <typename FuncType>
|
||||
void reverse_groups (const FuncType& group,
|
||||
bool merge_clusters = false)
|
||||
{
|
||||
if (unlikely (!len))
|
||||
return;
|
||||
|
||||
unsigned start = 0;
|
||||
unsigned i;
|
||||
for (i = 1; i < len; i++)
|
||||
{
|
||||
if (!group (info[i - 1], info[i]))
|
||||
{
|
||||
if (merge_clusters)
|
||||
this->merge_clusters (start, i);
|
||||
reverse_range (start, i);
|
||||
start = i;
|
||||
}
|
||||
}
|
||||
if (merge_clusters)
|
||||
this->merge_clusters (start, i);
|
||||
reverse_range (start, i);
|
||||
|
||||
reverse ();
|
||||
}
|
||||
|
||||
template <typename FuncType>
|
||||
unsigned group_end (unsigned start, const FuncType& group) const
|
||||
{
|
||||
while (++start < len && group (info[start - 1], info[start]))
|
||||
;
|
||||
|
||||
return start;
|
||||
}
|
||||
|
||||
static bool _cluster_group_func (const hb_glyph_info_t& a,
|
||||
const hb_glyph_info_t& b)
|
||||
{ return a.cluster == b.cluster; }
|
||||
|
||||
void reverse_clusters () { reverse_groups (_cluster_group_func); }
|
||||
|
||||
HB_INTERNAL void guess_segment_properties ();
|
||||
|
||||
HB_INTERNAL void swap_buffers ();
|
||||
|
@ -428,10 +474,10 @@ struct hb_buffer_t
|
|||
inf.cluster = cluster;
|
||||
}
|
||||
|
||||
unsigned int
|
||||
_unsafe_to_break_find_min_cluster (const hb_glyph_info_t *infos,
|
||||
unsigned int start, unsigned int end,
|
||||
unsigned int cluster) const
|
||||
static unsigned
|
||||
_infos_find_min_cluster (const hb_glyph_info_t *infos,
|
||||
unsigned start, unsigned end,
|
||||
unsigned cluster)
|
||||
{
|
||||
for (unsigned int i = start; i < end; i++)
|
||||
cluster = hb_min (cluster, infos[i].cluster);
|
||||
|
@ -450,36 +496,24 @@ struct hb_buffer_t
|
|||
}
|
||||
}
|
||||
|
||||
void unsafe_to_break_all () { unsafe_to_break_impl (0, len); }
|
||||
void safe_to_break_all ()
|
||||
void clear_glyph_flags (hb_mask_t mask = 0)
|
||||
{
|
||||
for (unsigned int i = 0; i < len; i++)
|
||||
info[i].mask &= ~HB_GLYPH_FLAG_UNSAFE_TO_BREAK;
|
||||
info[i].mask = (info[i].mask & ~HB_GLYPH_FLAG_DEFINED) | (mask & HB_GLYPH_FLAG_DEFINED);
|
||||
}
|
||||
};
|
||||
DECLARE_NULL_INSTANCE (hb_buffer_t);
|
||||
|
||||
|
||||
/* Loop over clusters. Duplicated in foreach_syllable(). */
|
||||
#define foreach_cluster(buffer, start, end) \
|
||||
#define foreach_group(buffer, start, end, group_func) \
|
||||
for (unsigned int \
|
||||
_count = buffer->len, \
|
||||
start = 0, end = _count ? _next_cluster (buffer, 0) : 0; \
|
||||
start = 0, end = _count ? buffer->group_end (0, group_func) : 0; \
|
||||
start < _count; \
|
||||
start = end, end = _next_cluster (buffer, start))
|
||||
start = end, end = buffer->group_end (start, group_func))
|
||||
|
||||
static inline unsigned int
|
||||
_next_cluster (hb_buffer_t *buffer, unsigned int start)
|
||||
{
|
||||
hb_glyph_info_t *info = buffer->info;
|
||||
unsigned int count = buffer->len;
|
||||
|
||||
unsigned int cluster = info[start].cluster;
|
||||
while (++start < count && cluster == info[start].cluster)
|
||||
;
|
||||
|
||||
return start;
|
||||
}
|
||||
#define foreach_cluster(buffer, start, end) \
|
||||
foreach_group (buffer, start, end, hb_buffer_t::_cluster_group_func)
|
||||
|
||||
|
||||
#define HB_BUFFER_XALLOCATE_VAR(b, func, var) \
|
||||
|
|
7
thirdparty/harfbuzz/src/hb-config.hh
vendored
7
thirdparty/harfbuzz/src/hb-config.hh
vendored
|
@ -86,8 +86,11 @@
|
|||
#define HB_NO_LEGACY
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_CONFIG_OVERRIDE_H
|
||||
#include "config-override.h"
|
||||
#if defined(HAVE_CONFIG_OVERRIDE_H) || defined(HB_CONFIG_OVERRIDE_H)
|
||||
#ifndef HB_CONFIG_OVERRIDE_H
|
||||
#define HB_CONFIG_OVERRIDE_H "config-override.h"
|
||||
#endif
|
||||
#include HB_CONFIG_OVERRIDE_H
|
||||
#endif
|
||||
|
||||
/* Closure of options. */
|
||||
|
|
2
thirdparty/harfbuzz/src/hb-coretext.cc
vendored
2
thirdparty/harfbuzz/src/hb-coretext.cc
vendored
|
@ -1213,7 +1213,7 @@ resize_and_retry:
|
|||
}
|
||||
}
|
||||
|
||||
buffer->unsafe_to_break_all ();
|
||||
buffer->clear_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_BREAK);
|
||||
|
||||
#undef FAIL
|
||||
|
||||
|
|
36
thirdparty/harfbuzz/src/hb-directwrite.cc
vendored
36
thirdparty/harfbuzz/src/hb-directwrite.cc
vendored
|
@ -43,14 +43,6 @@
|
|||
* Functions for using HarfBuzz with DirectWrite fonts.
|
||||
**/
|
||||
|
||||
/* Declare object creator for dynamic support of DWRITE */
|
||||
typedef HRESULT (* WINAPI t_DWriteCreateFactory)(
|
||||
DWRITE_FACTORY_TYPE factoryType,
|
||||
REFIID iid,
|
||||
IUnknown **factory
|
||||
);
|
||||
|
||||
|
||||
/*
|
||||
* DirectWrite font stream helpers
|
||||
*/
|
||||
|
@ -145,7 +137,6 @@ public:
|
|||
|
||||
struct hb_directwrite_face_data_t
|
||||
{
|
||||
HMODULE dwrite_dll;
|
||||
IDWriteFactory *dwriteFactory;
|
||||
IDWriteFontFile *fontFile;
|
||||
DWriteFontFileStream *fontFileStream;
|
||||
|
@ -167,32 +158,11 @@ _hb_directwrite_shaper_face_data_create (hb_face_t *face)
|
|||
return nullptr; \
|
||||
} HB_STMT_END
|
||||
|
||||
data->dwrite_dll = LoadLibrary (TEXT ("DWRITE"));
|
||||
if (unlikely (!data->dwrite_dll))
|
||||
FAIL ("Cannot find DWrite.DLL");
|
||||
|
||||
t_DWriteCreateFactory p_DWriteCreateFactory;
|
||||
|
||||
#if defined(__GNUC__)
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wcast-function-type"
|
||||
#endif
|
||||
|
||||
p_DWriteCreateFactory = (t_DWriteCreateFactory)
|
||||
GetProcAddress (data->dwrite_dll, "DWriteCreateFactory");
|
||||
|
||||
#if defined(__GNUC__)
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
||||
if (unlikely (!p_DWriteCreateFactory))
|
||||
FAIL ("Cannot find DWriteCreateFactory().");
|
||||
|
||||
HRESULT hr;
|
||||
|
||||
// TODO: factory and fontFileLoader should be cached separately
|
||||
IDWriteFactory* dwriteFactory;
|
||||
hr = p_DWriteCreateFactory (DWRITE_FACTORY_TYPE_SHARED, __uuidof (IDWriteFactory),
|
||||
hr = DWriteCreateFactory (DWRITE_FACTORY_TYPE_SHARED, __uuidof (IDWriteFactory),
|
||||
(IUnknown**) &dwriteFactory);
|
||||
|
||||
if (unlikely (hr != S_OK))
|
||||
|
@ -257,8 +227,6 @@ _hb_directwrite_shaper_face_data_destroy (hb_directwrite_face_data_t *data)
|
|||
delete data->fontFileStream;
|
||||
if (data->faceBlob)
|
||||
hb_blob_destroy (data->faceBlob);
|
||||
if (data->dwrite_dll)
|
||||
FreeLibrary (data->dwrite_dll);
|
||||
if (data)
|
||||
delete data;
|
||||
}
|
||||
|
@ -794,6 +762,8 @@ retry_getglyphs:
|
|||
|
||||
if (isRightToLeft) hb_buffer_reverse (buffer);
|
||||
|
||||
buffer->clear_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_BREAK);
|
||||
|
||||
delete [] clusterMap;
|
||||
delete [] glyphIndices;
|
||||
delete [] textProperties;
|
||||
|
|
2
thirdparty/harfbuzz/src/hb-fallback-shape.cc
vendored
2
thirdparty/harfbuzz/src/hb-fallback-shape.cc
vendored
|
@ -117,7 +117,7 @@ _hb_fallback_shape (hb_shape_plan_t *shape_plan HB_UNUSED,
|
|||
if (HB_DIRECTION_IS_BACKWARD (direction))
|
||||
hb_buffer_reverse (buffer);
|
||||
|
||||
buffer->safe_to_break_all ();
|
||||
buffer->clear_glyph_flags ();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
15
thirdparty/harfbuzz/src/hb-ft.cc
vendored
15
thirdparty/harfbuzz/src/hb-ft.cc
vendored
|
@ -361,6 +361,7 @@ hb_ft_get_glyph_h_advances (hb_font_t* font, void* font_data,
|
|||
}
|
||||
}
|
||||
|
||||
#ifndef HB_NO_VERTICAL
|
||||
static hb_position_t
|
||||
hb_ft_get_glyph_v_advance (hb_font_t *font,
|
||||
void *font_data,
|
||||
|
@ -381,7 +382,9 @@ hb_ft_get_glyph_v_advance (hb_font_t *font,
|
|||
* have a Y growing upward. Hence the extra negation. */
|
||||
return (-v + (1<<9)) >> 10;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef HB_NO_VERTICAL
|
||||
static hb_bool_t
|
||||
hb_ft_get_glyph_v_origin (hb_font_t *font,
|
||||
void *font_data,
|
||||
|
@ -409,6 +412,7 @@ hb_ft_get_glyph_v_origin (hb_font_t *font,
|
|||
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef HB_NO_OT_SHAPE_FALLBACK
|
||||
static hb_position_t
|
||||
|
@ -569,15 +573,20 @@ static struct hb_ft_font_funcs_lazy_loader_t : hb_font_funcs_lazy_loader_t<hb_ft
|
|||
{
|
||||
hb_font_funcs_t *funcs = hb_font_funcs_create ();
|
||||
|
||||
hb_font_funcs_set_font_h_extents_func (funcs, hb_ft_get_font_h_extents, nullptr, nullptr);
|
||||
//hb_font_funcs_set_font_v_extents_func (funcs, hb_ft_get_font_v_extents, nullptr, nullptr);
|
||||
hb_font_funcs_set_nominal_glyph_func (funcs, hb_ft_get_nominal_glyph, nullptr, nullptr);
|
||||
hb_font_funcs_set_nominal_glyphs_func (funcs, hb_ft_get_nominal_glyphs, nullptr, nullptr);
|
||||
hb_font_funcs_set_variation_glyph_func (funcs, hb_ft_get_variation_glyph, nullptr, nullptr);
|
||||
|
||||
hb_font_funcs_set_font_h_extents_func (funcs, hb_ft_get_font_h_extents, nullptr, nullptr);
|
||||
hb_font_funcs_set_glyph_h_advances_func (funcs, hb_ft_get_glyph_h_advances, nullptr, nullptr);
|
||||
hb_font_funcs_set_glyph_v_advance_func (funcs, hb_ft_get_glyph_v_advance, nullptr, nullptr);
|
||||
//hb_font_funcs_set_glyph_h_origin_func (funcs, hb_ft_get_glyph_h_origin, nullptr, nullptr);
|
||||
|
||||
#ifndef HB_NO_VERTICAL
|
||||
//hb_font_funcs_set_font_v_extents_func (funcs, hb_ft_get_font_v_extents, nullptr, nullptr);
|
||||
hb_font_funcs_set_glyph_v_advance_func (funcs, hb_ft_get_glyph_v_advance, nullptr, nullptr);
|
||||
hb_font_funcs_set_glyph_v_origin_func (funcs, hb_ft_get_glyph_v_origin, nullptr, nullptr);
|
||||
#endif
|
||||
|
||||
#ifndef HB_NO_OT_SHAPE_FALLBACK
|
||||
hb_font_funcs_set_glyph_h_kerning_func (funcs, hb_ft_get_glyph_h_kerning, nullptr, nullptr);
|
||||
#endif
|
||||
|
|
2
thirdparty/harfbuzz/src/hb-graphite2.cc
vendored
2
thirdparty/harfbuzz/src/hb-graphite2.cc
vendored
|
@ -439,7 +439,7 @@ _hb_graphite2_shape (hb_shape_plan_t *shape_plan HB_UNUSED,
|
|||
if (feats) gr_featureval_destroy (feats);
|
||||
gr_seg_destroy (seg);
|
||||
|
||||
buffer->unsafe_to_break_all ();
|
||||
buffer->clear_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_BREAK);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
85
thirdparty/harfbuzz/src/hb-iter.hh
vendored
85
thirdparty/harfbuzz/src/hb-iter.hh
vendored
|
@ -581,6 +581,91 @@ struct
|
|||
}
|
||||
HB_FUNCOBJ (hb_zip);
|
||||
|
||||
/* hb_concat() */
|
||||
|
||||
template <typename A, typename B>
|
||||
struct hb_concat_iter_t :
|
||||
hb_iter_t<hb_concat_iter_t<A, B>, typename A::item_t>
|
||||
{
|
||||
hb_concat_iter_t () {}
|
||||
hb_concat_iter_t (A& a, B& b) : a (a), b (b) {}
|
||||
hb_concat_iter_t (const A& a, const B& b) : a (a), b (b) {}
|
||||
|
||||
|
||||
typedef typename A::item_t __item_t__;
|
||||
static constexpr bool is_random_access_iterator =
|
||||
A::is_random_access_iterator &&
|
||||
B::is_random_access_iterator;
|
||||
static constexpr bool is_sorted_iterator = false;
|
||||
|
||||
__item_t__ __item__ () const
|
||||
{
|
||||
if (!a)
|
||||
return *b;
|
||||
return *a;
|
||||
}
|
||||
|
||||
__item_t__ __item_at__ (unsigned i) const
|
||||
{
|
||||
unsigned a_len = a.len ();
|
||||
if (i < a_len)
|
||||
return a[i];
|
||||
return b[i - a_len];
|
||||
}
|
||||
|
||||
bool __more__ () const { return bool (a) || bool (b); }
|
||||
|
||||
unsigned __len__ () const { return a.len () + b.len (); }
|
||||
|
||||
void __next__ ()
|
||||
{
|
||||
if (a)
|
||||
++a;
|
||||
else
|
||||
++b;
|
||||
}
|
||||
|
||||
void __forward__ (unsigned n)
|
||||
{
|
||||
if (!n) return;
|
||||
if (!is_random_access_iterator) {
|
||||
while (n-- && *this) {
|
||||
(*this)++;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
unsigned a_len = a.len ();
|
||||
if (n > a_len) {
|
||||
n -= a_len;
|
||||
a.__forward__ (a_len);
|
||||
b.__forward__ (n);
|
||||
} else {
|
||||
a.__forward__ (n);
|
||||
}
|
||||
}
|
||||
|
||||
hb_concat_iter_t __end__ () const { return hb_concat_iter_t (a.end (), b.end ()); }
|
||||
bool operator != (const hb_concat_iter_t& o) const
|
||||
{
|
||||
return a != o.a
|
||||
|| b != o.b;
|
||||
}
|
||||
|
||||
private:
|
||||
A a;
|
||||
B b;
|
||||
};
|
||||
struct
|
||||
{ HB_PARTIALIZE(2);
|
||||
template <typename A, typename B,
|
||||
hb_requires (hb_is_iterable (A) && hb_is_iterable (B))>
|
||||
hb_concat_iter_t<hb_iter_type<A>, hb_iter_type<B>>
|
||||
operator () (A&& a, B&& b) const
|
||||
{ return hb_concat_iter_t<hb_iter_type<A>, hb_iter_type<B>> (hb_iter (a), hb_iter (b)); }
|
||||
}
|
||||
HB_FUNCOBJ (hb_concat);
|
||||
|
||||
/* hb_apply() */
|
||||
|
||||
template <typename Appl>
|
||||
|
|
270
thirdparty/harfbuzz/src/hb-ot-cmap-table.hh
vendored
270
thirdparty/harfbuzz/src/hb-ot-cmap-table.hh
vendored
|
@ -93,120 +93,192 @@ struct CmapSubtableFormat0
|
|||
struct CmapSubtableFormat4
|
||||
{
|
||||
|
||||
|
||||
template<typename Iterator,
|
||||
typename Writer,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
HBUINT16* serialize_endcode_array (hb_serialize_context_t *c,
|
||||
Iterator it)
|
||||
void to_ranges (Iterator it, Writer& range_writer)
|
||||
{
|
||||
HBUINT16 *endCode = c->start_embed<HBUINT16> ();
|
||||
hb_codepoint_t prev_endcp = 0xFFFF;
|
||||
hb_codepoint_t start_cp = 0, prev_run_start_cp = 0, run_start_cp = 0, end_cp = 0, last_gid = 0;
|
||||
int run_length = 0 , delta = 0, prev_delta = 0;
|
||||
|
||||
for (const auto& _ : +it)
|
||||
{
|
||||
if (prev_endcp != 0xFFFF && prev_endcp + 1u != _.first)
|
||||
{
|
||||
HBUINT16 end_code;
|
||||
end_code = prev_endcp;
|
||||
c->copy<HBUINT16> (end_code);
|
||||
}
|
||||
prev_endcp = _.first;
|
||||
enum {
|
||||
FIRST_SUB_RANGE,
|
||||
FOLLOWING_SUB_RANGE,
|
||||
} mode;
|
||||
|
||||
while (it) {
|
||||
// Start a new range
|
||||
start_cp = (*it).first;
|
||||
prev_run_start_cp = (*it).first;
|
||||
run_start_cp = (*it).first;
|
||||
end_cp = (*it).first;
|
||||
last_gid = (*it).second;
|
||||
run_length = 1;
|
||||
prev_delta = 0;
|
||||
|
||||
delta = (*it).second - (*it).first;
|
||||
mode = FIRST_SUB_RANGE;
|
||||
it++;
|
||||
|
||||
while (it) {
|
||||
// Process range
|
||||
hb_codepoint_t next_cp = (*it).first;
|
||||
hb_codepoint_t next_gid = (*it).second;
|
||||
if (next_cp != end_cp + 1) {
|
||||
// Current range is over, stop processing.
|
||||
break;
|
||||
}
|
||||
|
||||
{
|
||||
// last endCode
|
||||
HBUINT16 endcode;
|
||||
endcode = prev_endcp;
|
||||
if (unlikely (!c->copy<HBUINT16> (endcode))) return nullptr;
|
||||
// There must be a final entry with end_code == 0xFFFF.
|
||||
if (prev_endcp != 0xFFFF)
|
||||
{
|
||||
HBUINT16 finalcode;
|
||||
finalcode = 0xFFFF;
|
||||
if (unlikely (!c->copy<HBUINT16> (finalcode))) return nullptr;
|
||||
if (next_gid == last_gid + 1) {
|
||||
// The current run continues.
|
||||
end_cp = next_cp;
|
||||
run_length++;
|
||||
last_gid = next_gid;
|
||||
it++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// A new run is starting, decide if we want to commit the current run.
|
||||
int split_cost = (mode == FIRST_SUB_RANGE) ? 8 : 16;
|
||||
int run_cost = run_length * 2;
|
||||
if (run_cost >= split_cost) {
|
||||
commit_current_range(start_cp,
|
||||
prev_run_start_cp,
|
||||
run_start_cp,
|
||||
end_cp,
|
||||
delta,
|
||||
prev_delta,
|
||||
split_cost,
|
||||
range_writer);
|
||||
start_cp = next_cp;
|
||||
}
|
||||
|
||||
// Start the new run
|
||||
mode = FOLLOWING_SUB_RANGE;
|
||||
prev_run_start_cp = run_start_cp;
|
||||
run_start_cp = next_cp;
|
||||
end_cp = next_cp;
|
||||
prev_delta = delta;
|
||||
delta = next_gid - run_start_cp;
|
||||
run_length = 1;
|
||||
last_gid = next_gid;
|
||||
it++;
|
||||
}
|
||||
|
||||
// Finalize range
|
||||
commit_current_range (start_cp,
|
||||
prev_run_start_cp,
|
||||
run_start_cp,
|
||||
end_cp,
|
||||
delta,
|
||||
prev_delta,
|
||||
8,
|
||||
range_writer);
|
||||
}
|
||||
|
||||
if (likely (end_cp != 0xFFFF)) {
|
||||
range_writer (0xFFFF, 0xFFFF, 1);
|
||||
}
|
||||
}
|
||||
|
||||
return endCode;
|
||||
/*
|
||||
* Writes the current range as either one or two ranges depending on what is most efficient.
|
||||
*/
|
||||
template<typename Writer>
|
||||
void commit_current_range (hb_codepoint_t start,
|
||||
hb_codepoint_t prev_run_start,
|
||||
hb_codepoint_t run_start,
|
||||
hb_codepoint_t end,
|
||||
int run_delta,
|
||||
int previous_run_delta,
|
||||
int split_cost,
|
||||
Writer& range_writer) {
|
||||
bool should_split = false;
|
||||
if (start < run_start && run_start < end) {
|
||||
int run_cost = (end - run_start + 1) * 2;
|
||||
if (run_cost >= split_cost) {
|
||||
should_split = true;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(grieger): handle case where delta is legitimately 0, mark range offset array instead?
|
||||
if (should_split) {
|
||||
if (start == prev_run_start)
|
||||
range_writer (start, run_start - 1, previous_run_delta);
|
||||
else
|
||||
range_writer (start, run_start - 1, 0);
|
||||
range_writer (run_start, end, run_delta);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
if (start == run_start) {
|
||||
// Range is only a run
|
||||
range_writer (start, end, run_delta);
|
||||
return;
|
||||
}
|
||||
|
||||
// Write only a single non-run range.
|
||||
range_writer (start, end, 0);
|
||||
}
|
||||
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
HBUINT16* serialize_startcode_array (hb_serialize_context_t *c,
|
||||
Iterator it)
|
||||
{
|
||||
HBUINT16 *startCode = c->start_embed<HBUINT16> ();
|
||||
hb_codepoint_t prev_cp = 0xFFFF;
|
||||
unsigned serialize_find_segcount (Iterator it) {
|
||||
struct Counter {
|
||||
unsigned segcount = 0;
|
||||
|
||||
for (const auto& _ : +it)
|
||||
{
|
||||
if (prev_cp == 0xFFFF || prev_cp + 1u != _.first)
|
||||
{
|
||||
HBUINT16 start_code;
|
||||
start_code = _.first;
|
||||
c->copy<HBUINT16> (start_code);
|
||||
void operator() (hb_codepoint_t start,
|
||||
hb_codepoint_t end,
|
||||
int delta) {
|
||||
segcount++;
|
||||
}
|
||||
} counter;
|
||||
|
||||
to_ranges (+it, counter);
|
||||
return counter.segcount;
|
||||
}
|
||||
|
||||
prev_cp = _.first;
|
||||
}
|
||||
|
||||
// There must be a final entry with end_code == 0xFFFF.
|
||||
if (it.len () == 0 || prev_cp != 0xFFFF)
|
||||
{
|
||||
HBUINT16 finalcode;
|
||||
finalcode = 0xFFFF;
|
||||
if (unlikely (!c->copy<HBUINT16> (finalcode))) return nullptr;
|
||||
}
|
||||
|
||||
return startCode;
|
||||
}
|
||||
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
HBINT16* serialize_idDelta_array (hb_serialize_context_t *c,
|
||||
bool serialize_start_end_delta_arrays (hb_serialize_context_t *c,
|
||||
Iterator it,
|
||||
HBUINT16 *endCode,
|
||||
HBUINT16 *startCode,
|
||||
unsigned segcount)
|
||||
int segcount)
|
||||
{
|
||||
unsigned i = 0;
|
||||
hb_codepoint_t last_gid = 0, start_gid = 0, last_cp = 0xFFFF;
|
||||
bool use_delta = true;
|
||||
struct Writer {
|
||||
hb_serialize_context_t *serializer_;
|
||||
HBUINT16* end_code_;
|
||||
HBUINT16* start_code_;
|
||||
HBINT16* id_delta_;
|
||||
int index_;
|
||||
|
||||
HBINT16 *idDelta = c->start_embed<HBINT16> ();
|
||||
if ((char *)idDelta - (char *)startCode != (int) segcount * (int) HBINT16::static_size)
|
||||
return nullptr;
|
||||
|
||||
for (const auto& _ : +it)
|
||||
{
|
||||
if (_.first == startCode[i])
|
||||
{
|
||||
use_delta = true;
|
||||
start_gid = _.second;
|
||||
Writer(hb_serialize_context_t *serializer)
|
||||
: serializer_(serializer),
|
||||
end_code_(nullptr),
|
||||
start_code_(nullptr),
|
||||
id_delta_(nullptr),
|
||||
index_ (0) {}
|
||||
void operator() (hb_codepoint_t start,
|
||||
hb_codepoint_t end,
|
||||
int delta) {
|
||||
start_code_[index_] = start;
|
||||
end_code_[index_] = end;
|
||||
id_delta_[index_] = delta;
|
||||
index_++;
|
||||
}
|
||||
else if (_.second != last_gid + 1) use_delta = false;
|
||||
} writer(c);
|
||||
|
||||
if (_.first == endCode[i])
|
||||
{
|
||||
HBINT16 delta;
|
||||
if (use_delta) delta = (int)start_gid - (int)startCode[i];
|
||||
else delta = 0;
|
||||
c->copy<HBINT16> (delta);
|
||||
writer.end_code_ = c->allocate_size<HBUINT16> (HBUINT16::static_size * segcount);
|
||||
c->allocate_size<HBUINT16> (2); // padding
|
||||
writer.start_code_ = c->allocate_size<HBUINT16> (HBUINT16::static_size * segcount);
|
||||
writer.id_delta_ = c->allocate_size<HBINT16> (HBINT16::static_size * segcount);
|
||||
|
||||
i++;
|
||||
}
|
||||
if (unlikely (!writer.end_code_ || !writer.start_code_ || !writer.id_delta_)) return false;
|
||||
|
||||
last_gid = _.second;
|
||||
last_cp = _.first;
|
||||
}
|
||||
|
||||
if (it.len () == 0 || last_cp != 0xFFFF)
|
||||
{
|
||||
HBINT16 delta;
|
||||
delta = 1;
|
||||
if (unlikely (!c->copy<HBINT16> (delta))) return nullptr;
|
||||
}
|
||||
|
||||
return idDelta;
|
||||
to_ranges (+it, writer);
|
||||
return true;
|
||||
}
|
||||
|
||||
template<typename Iterator,
|
||||
|
@ -257,22 +329,14 @@ struct CmapSubtableFormat4
|
|||
if (unlikely (!c->extend_min (this))) return;
|
||||
this->format = 4;
|
||||
|
||||
//serialize endCode[]
|
||||
HBUINT16 *endCode = serialize_endcode_array (c, format4_iter);
|
||||
if (unlikely (!endCode)) return;
|
||||
//serialize endCode[], startCode[], idDelta[]
|
||||
HBUINT16* endCode = c->start_embed<HBUINT16> ();
|
||||
unsigned segcount = serialize_find_segcount (format4_iter);
|
||||
if (unlikely (!serialize_start_end_delta_arrays (c, format4_iter, segcount)))
|
||||
return;
|
||||
|
||||
unsigned segcount = (c->length () - min_size) / HBUINT16::static_size;
|
||||
|
||||
// 2 bytes of padding.
|
||||
if (unlikely (!c->allocate_size<HBUINT16> (HBUINT16::static_size))) return; // 2 bytes of padding.
|
||||
|
||||
// serialize startCode[]
|
||||
HBUINT16 *startCode = serialize_startcode_array (c, format4_iter);
|
||||
if (unlikely (!startCode)) return;
|
||||
|
||||
//serialize idDelta[]
|
||||
HBINT16 *idDelta = serialize_idDelta_array (c, format4_iter, endCode, startCode, segcount);
|
||||
if (unlikely (!idDelta)) return;
|
||||
HBUINT16 *startCode = endCode + segcount + 1;
|
||||
HBINT16 *idDelta = ((HBINT16*)startCode) + segcount;
|
||||
|
||||
HBUINT16 *idRangeOffset = serialize_rangeoffset_glyid (c, format4_iter, endCode, startCode, idDelta, segcount);
|
||||
if (unlikely (!c->check_success (idRangeOffset))) return;
|
||||
|
|
|
@ -1025,7 +1025,7 @@ struct ClipList
|
|||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
if (!c->serializer->check_assign (out->format, format, HB_SERIALIZE_ERROR_INT_OVERFLOW)) return_trace (false);
|
||||
|
||||
const hb_set_t& glyphset = *c->plan->_glyphset;
|
||||
const hb_set_t& glyphset = *c->plan->_glyphset_colred;
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
hb_map_t new_gid_offset_map;
|
||||
|
@ -1193,7 +1193,7 @@ struct BaseGlyphList : SortedArray32Of<BaseGlyphPaintRecord>
|
|||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
const hb_set_t* glyphset = c->plan->_glyphset;
|
||||
const hb_set_t* glyphset = c->plan->_glyphset_colred;
|
||||
|
||||
for (const auto& _ : as_array ())
|
||||
{
|
||||
|
@ -1411,10 +1411,9 @@ struct COLR
|
|||
|
||||
const BaseGlyphRecord* get_base_glyph_record (hb_codepoint_t gid) const
|
||||
{
|
||||
if ((unsigned int) gid == 0) // Ignore notdef.
|
||||
return nullptr;
|
||||
const BaseGlyphRecord* record = &(this+baseGlyphsZ).bsearch (numBaseGlyphs, (unsigned int) gid);
|
||||
if ((record && (hb_codepoint_t) record->glyphId != gid))
|
||||
if (record == &Null (BaseGlyphRecord) ||
|
||||
(record && (hb_codepoint_t) record->glyphId != gid))
|
||||
record = nullptr;
|
||||
return record;
|
||||
}
|
||||
|
@ -1432,9 +1431,16 @@ struct COLR
|
|||
TRACE_SUBSET (this);
|
||||
|
||||
const hb_map_t &reverse_glyph_map = *c->plan->reverse_glyph_map;
|
||||
const hb_set_t& glyphset = *c->plan->_glyphset_colred;
|
||||
|
||||
auto base_it =
|
||||
+ hb_range (c->plan->num_output_glyphs ())
|
||||
| hb_filter ([&](hb_codepoint_t new_gid)
|
||||
{
|
||||
hb_codepoint_t old_gid = reverse_glyph_map.get (new_gid);
|
||||
if (glyphset.has (old_gid)) return true;
|
||||
return false;
|
||||
})
|
||||
| hb_map_retains_sorting ([&](hb_codepoint_t new_gid)
|
||||
{
|
||||
hb_codepoint_t old_gid = reverse_glyph_map.get (new_gid);
|
||||
|
@ -1442,7 +1448,6 @@ struct COLR
|
|||
const BaseGlyphRecord* old_record = get_base_glyph_record (old_gid);
|
||||
if (unlikely (!old_record))
|
||||
return hb_pair_t<bool, BaseGlyphRecord> (false, Null (BaseGlyphRecord));
|
||||
|
||||
BaseGlyphRecord new_record = {};
|
||||
new_record.glyphId = new_gid;
|
||||
new_record.numLayers = old_record->numLayers;
|
||||
|
@ -1455,6 +1460,7 @@ struct COLR
|
|||
auto layer_it =
|
||||
+ hb_range (c->plan->num_output_glyphs ())
|
||||
| hb_map (reverse_glyph_map)
|
||||
| hb_filter (glyphset)
|
||||
| hb_map_retains_sorting ([&](hb_codepoint_t old_gid)
|
||||
{
|
||||
const BaseGlyphRecord* old_record = get_base_glyph_record (old_gid);
|
||||
|
|
|
@ -67,8 +67,11 @@ HB_OT_ACCELERATOR (OT, meta)
|
|||
#endif
|
||||
|
||||
/* Vertical layout. */
|
||||
#ifndef HB_NO_VERTICAL
|
||||
HB_OT_TABLE (OT, vhea)
|
||||
HB_OT_ACCELERATOR (OT, vmtx)
|
||||
HB_OT_TABLE (OT, VORG)
|
||||
#endif
|
||||
|
||||
/* TrueType outlines. */
|
||||
HB_OT_ACCELERATOR (OT, glyf)
|
||||
|
@ -77,7 +80,6 @@ HB_OT_ACCELERATOR (OT, glyf)
|
|||
#ifndef HB_NO_CFF
|
||||
HB_OT_ACCELERATOR (OT, cff1)
|
||||
HB_OT_ACCELERATOR (OT, cff2)
|
||||
HB_OT_TABLE (OT, VORG)
|
||||
#endif
|
||||
|
||||
/* OpenType variations. */
|
||||
|
|
20
thirdparty/harfbuzz/src/hb-ot-font.cc
vendored
20
thirdparty/harfbuzz/src/hb-ot-font.cc
vendored
|
@ -118,6 +118,7 @@ hb_ot_get_glyph_h_advances (hb_font_t* font, void* font_data,
|
|||
}
|
||||
}
|
||||
|
||||
#ifndef HB_NO_VERTICAL
|
||||
static void
|
||||
hb_ot_get_glyph_v_advances (hb_font_t* font, void* font_data,
|
||||
unsigned count,
|
||||
|
@ -137,7 +138,9 @@ hb_ot_get_glyph_v_advances (hb_font_t* font, void* font_data,
|
|||
first_advance = &StructAtOffsetUnaligned<hb_position_t> (first_advance, advance_stride);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef HB_NO_VERTICAL
|
||||
static hb_bool_t
|
||||
hb_ot_get_glyph_v_origin (hb_font_t *font,
|
||||
void *font_data,
|
||||
|
@ -150,14 +153,12 @@ hb_ot_get_glyph_v_origin (hb_font_t *font,
|
|||
|
||||
*x = font->get_glyph_h_advance (glyph) / 2;
|
||||
|
||||
#ifndef HB_NO_OT_FONT_CFF
|
||||
const OT::VORG &VORG = *ot_face->VORG;
|
||||
if (VORG.has_data ())
|
||||
{
|
||||
*y = font->em_scale_y (VORG.get_y_origin (glyph));
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
hb_glyph_extents_t extents = {0};
|
||||
if (ot_face->glyf->get_extents (font, glyph, &extents))
|
||||
|
@ -174,6 +175,7 @@ hb_ot_get_glyph_v_origin (hb_font_t *font,
|
|||
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
static hb_bool_t
|
||||
hb_ot_get_glyph_extents (hb_font_t *font,
|
||||
|
@ -242,6 +244,7 @@ hb_ot_get_font_h_extents (hb_font_t *font,
|
|||
_hb_ot_metrics_get_position_common (font, HB_OT_METRICS_TAG_HORIZONTAL_LINE_GAP, &metrics->line_gap);
|
||||
}
|
||||
|
||||
#ifndef HB_NO_VERTICAL
|
||||
static hb_bool_t
|
||||
hb_ot_get_font_v_extents (hb_font_t *font,
|
||||
void *font_data HB_UNUSED,
|
||||
|
@ -252,6 +255,7 @@ hb_ot_get_font_v_extents (hb_font_t *font,
|
|||
_hb_ot_metrics_get_position_common (font, HB_OT_METRICS_TAG_VERTICAL_DESCENDER, &metrics->descender) &&
|
||||
_hb_ot_metrics_get_position_common (font, HB_OT_METRICS_TAG_VERTICAL_LINE_GAP, &metrics->line_gap);
|
||||
}
|
||||
#endif
|
||||
|
||||
static inline void free_static_ot_funcs ();
|
||||
|
||||
|
@ -261,17 +265,23 @@ static struct hb_ot_font_funcs_lazy_loader_t : hb_font_funcs_lazy_loader_t<hb_ot
|
|||
{
|
||||
hb_font_funcs_t *funcs = hb_font_funcs_create ();
|
||||
|
||||
hb_font_funcs_set_font_h_extents_func (funcs, hb_ot_get_font_h_extents, nullptr, nullptr);
|
||||
hb_font_funcs_set_font_v_extents_func (funcs, hb_ot_get_font_v_extents, nullptr, nullptr);
|
||||
hb_font_funcs_set_nominal_glyph_func (funcs, hb_ot_get_nominal_glyph, nullptr, nullptr);
|
||||
hb_font_funcs_set_nominal_glyphs_func (funcs, hb_ot_get_nominal_glyphs, nullptr, nullptr);
|
||||
hb_font_funcs_set_variation_glyph_func (funcs, hb_ot_get_variation_glyph, nullptr, nullptr);
|
||||
|
||||
hb_font_funcs_set_font_h_extents_func (funcs, hb_ot_get_font_h_extents, nullptr, nullptr);
|
||||
hb_font_funcs_set_glyph_h_advances_func (funcs, hb_ot_get_glyph_h_advances, nullptr, nullptr);
|
||||
hb_font_funcs_set_glyph_v_advances_func (funcs, hb_ot_get_glyph_v_advances, nullptr, nullptr);
|
||||
//hb_font_funcs_set_glyph_h_origin_func (funcs, hb_ot_get_glyph_h_origin, nullptr, nullptr);
|
||||
|
||||
#ifndef HB_NO_VERTICAL
|
||||
hb_font_funcs_set_font_v_extents_func (funcs, hb_ot_get_font_v_extents, nullptr, nullptr);
|
||||
hb_font_funcs_set_glyph_v_advances_func (funcs, hb_ot_get_glyph_v_advances, nullptr, nullptr);
|
||||
hb_font_funcs_set_glyph_v_origin_func (funcs, hb_ot_get_glyph_v_origin, nullptr, nullptr);
|
||||
#endif
|
||||
|
||||
hb_font_funcs_set_glyph_extents_func (funcs, hb_ot_get_glyph_extents, nullptr, nullptr);
|
||||
//hb_font_funcs_set_glyph_contour_point_func (funcs, hb_ot_get_glyph_contour_point, nullptr, nullptr);
|
||||
|
||||
#ifndef HB_NO_OT_FONT_GLYPH_NAMES
|
||||
hb_font_funcs_set_glyph_name_func (funcs, hb_ot_get_glyph_name, nullptr, nullptr);
|
||||
hb_font_funcs_set_glyph_from_name_func (funcs, hb_ot_get_glyph_from_name, nullptr, nullptr);
|
||||
|
|
73
thirdparty/harfbuzz/src/hb-ot-glyf-table.hh
vendored
73
thirdparty/harfbuzz/src/hb-ot-glyf-table.hh
vendored
|
@ -93,22 +93,16 @@ struct glyf
|
|||
template<typename Iterator,
|
||||
hb_requires (hb_is_source_of (Iterator, unsigned int))>
|
||||
static bool
|
||||
_add_loca_and_head (hb_subset_plan_t * plan, Iterator padded_offsets)
|
||||
_add_loca_and_head (hb_subset_plan_t * plan, Iterator padded_offsets, bool use_short_loca)
|
||||
{
|
||||
unsigned max_offset =
|
||||
+ padded_offsets
|
||||
| hb_reduce (hb_add, 0)
|
||||
;
|
||||
unsigned num_offsets = padded_offsets.len () + 1;
|
||||
bool use_short_loca = max_offset < 0x1FFFF;
|
||||
unsigned entry_size = use_short_loca ? 2 : 4;
|
||||
char *loca_prime_data = (char *) hb_calloc (entry_size, num_offsets);
|
||||
|
||||
if (unlikely (!loca_prime_data)) return false;
|
||||
|
||||
DEBUG_MSG (SUBSET, nullptr, "loca entry_size %d num_offsets %d "
|
||||
"max_offset %d size %d",
|
||||
entry_size, num_offsets, max_offset, entry_size * num_offsets);
|
||||
DEBUG_MSG (SUBSET, nullptr, "loca entry_size %d num_offsets %d size %d",
|
||||
entry_size, num_offsets, entry_size * num_offsets);
|
||||
|
||||
if (use_short_loca)
|
||||
_write_loca (padded_offsets, 1, hb_array ((HBUINT16 *) loca_prime_data, num_offsets));
|
||||
|
@ -151,11 +145,12 @@ struct glyf
|
|||
template <typename Iterator>
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
Iterator it,
|
||||
bool use_short_loca,
|
||||
const hb_subset_plan_t *plan)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
unsigned init_len = c->length ();
|
||||
for (const auto &_ : it) _.serialize (c, plan);
|
||||
for (const auto &_ : it) _.serialize (c, use_short_loca, plan);
|
||||
|
||||
/* As a special case when all glyph in the font are empty, add a zero byte
|
||||
* to the table, so that OTS doesn’t reject it, and to make the table work
|
||||
|
@ -183,16 +178,28 @@ struct glyf
|
|||
hb_vector_t<SubsetGlyph> glyphs;
|
||||
_populate_subset_glyphs (c->plan, &glyphs);
|
||||
|
||||
glyf_prime->serialize (c->serializer, hb_iter (glyphs), c->plan);
|
||||
|
||||
auto padded_offsets =
|
||||
+ hb_iter (glyphs)
|
||||
| hb_map (&SubsetGlyph::padded_size)
|
||||
;
|
||||
|
||||
unsigned max_offset = + padded_offsets | hb_reduce (hb_add, 0);
|
||||
bool use_short_loca = max_offset < 0x1FFFF;
|
||||
|
||||
|
||||
glyf_prime->serialize (c->serializer, hb_iter (glyphs), use_short_loca, c->plan);
|
||||
if (!use_short_loca) {
|
||||
padded_offsets =
|
||||
+ hb_iter (glyphs)
|
||||
| hb_map (&SubsetGlyph::length)
|
||||
;
|
||||
}
|
||||
|
||||
|
||||
if (unlikely (c->serializer->in_error ())) return_trace (false);
|
||||
return_trace (c->serializer->check_success (_add_loca_and_head (c->plan,
|
||||
padded_offsets)));
|
||||
padded_offsets,
|
||||
use_short_loca)));
|
||||
}
|
||||
|
||||
template <typename SubsetGlyph>
|
||||
|
@ -792,10 +799,23 @@ struct glyf
|
|||
hb_array_t<contour_point_t> phantoms = points.sub_array (points.length - PHANTOM_COUNT, PHANTOM_COUNT);
|
||||
{
|
||||
for (unsigned i = 0; i < PHANTOM_COUNT; ++i) phantoms[i].init ();
|
||||
int h_delta = (int) header->xMin - glyf_accelerator.hmtx->get_side_bearing (gid);
|
||||
int v_orig = (int) header->yMax + glyf_accelerator.vmtx->get_side_bearing (gid);
|
||||
int h_delta = (int) header->xMin -
|
||||
glyf_accelerator.hmtx->get_side_bearing (gid);
|
||||
int v_orig = (int) header->yMax +
|
||||
#ifndef HB_NO_VERTICAL
|
||||
glyf_accelerator.vmtx->get_side_bearing (gid)
|
||||
#else
|
||||
0
|
||||
#endif
|
||||
;
|
||||
unsigned h_adv = glyf_accelerator.hmtx->get_advance (gid);
|
||||
unsigned v_adv = glyf_accelerator.vmtx->get_advance (gid);
|
||||
unsigned v_adv =
|
||||
#ifndef HB_NO_VERTICAL
|
||||
glyf_accelerator.vmtx->get_advance (gid)
|
||||
#else
|
||||
- font->face->get_upem ()
|
||||
#endif
|
||||
;
|
||||
phantoms[PHANTOM_LEFT].x = h_delta;
|
||||
phantoms[PHANTOM_RIGHT].x = h_adv + h_delta;
|
||||
phantoms[PHANTOM_TOP].y = v_orig;
|
||||
|
@ -910,7 +930,9 @@ struct glyf
|
|||
gvar = nullptr;
|
||||
#endif
|
||||
hmtx = nullptr;
|
||||
#ifndef HB_NO_VERTICAL
|
||||
vmtx = nullptr;
|
||||
#endif
|
||||
face = face_;
|
||||
const OT::head &head = *face->table.head;
|
||||
if (head.indexToLocFormat > 1 || head.glyphDataFormat > 0)
|
||||
|
@ -924,7 +946,9 @@ struct glyf
|
|||
gvar = face->table.gvar;
|
||||
#endif
|
||||
hmtx = face->table.hmtx;
|
||||
#ifndef HB_NO_VERTICAL
|
||||
vmtx = face->table.vmtx;
|
||||
#endif
|
||||
|
||||
num_glyphs = hb_max (1u, loca_table.get_length () / (short_offset ? 2 : 4)) - 1;
|
||||
num_glyphs = hb_min (num_glyphs, face->get_num_glyphs ());
|
||||
|
@ -1037,7 +1061,11 @@ struct glyf
|
|||
success = get_points (font, gid, points_aggregator_t (font, nullptr, phantoms));
|
||||
|
||||
if (unlikely (!success))
|
||||
return is_vertical ? vmtx->get_advance (gid) : hmtx->get_advance (gid);
|
||||
return
|
||||
#ifndef HB_NO_VERTICAL
|
||||
is_vertical ? vmtx->get_advance (gid) :
|
||||
#endif
|
||||
hmtx->get_advance (gid);
|
||||
|
||||
float result = is_vertical
|
||||
? phantoms[PHANTOM_TOP].y - phantoms[PHANTOM_BOTTOM].y
|
||||
|
@ -1053,7 +1081,11 @@ struct glyf
|
|||
|
||||
contour_point_t phantoms[PHANTOM_COUNT];
|
||||
if (unlikely (!get_points (font, gid, points_aggregator_t (font, &extents, phantoms))))
|
||||
return is_vertical ? vmtx->get_side_bearing (gid) : hmtx->get_side_bearing (gid);
|
||||
return
|
||||
#ifndef HB_NO_VERTICAL
|
||||
is_vertical ? vmtx->get_side_bearing (gid) :
|
||||
#endif
|
||||
hmtx->get_side_bearing (gid);
|
||||
|
||||
return is_vertical
|
||||
? ceilf (phantoms[PHANTOM_TOP].y) - extents.y_bearing
|
||||
|
@ -1250,7 +1282,9 @@ struct glyf
|
|||
const gvar_accelerator_t *gvar;
|
||||
#endif
|
||||
const hmtx_accelerator_t *hmtx;
|
||||
#ifndef HB_NO_VERTICAL
|
||||
const vmtx_accelerator_t *vmtx;
|
||||
#endif
|
||||
|
||||
private:
|
||||
bool short_offset;
|
||||
|
@ -1269,13 +1303,14 @@ struct glyf
|
|||
hb_bytes_t dest_end; /* region of source_glyph to copy second */
|
||||
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
bool use_short_loca,
|
||||
const hb_subset_plan_t *plan) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
hb_bytes_t dest_glyph = dest_start.copy (c);
|
||||
dest_glyph = hb_bytes_t (&dest_glyph, dest_glyph.length + dest_end.copy (c).length);
|
||||
unsigned int pad_length = padding ();
|
||||
unsigned int pad_length = use_short_loca ? padding () : 0;
|
||||
DEBUG_MSG (SUBSET, nullptr, "serialize %d byte glyph, width %d pad %d", dest_glyph.length, dest_glyph.length + pad_length, pad_length);
|
||||
|
||||
HBUINT8 pad;
|
||||
|
|
9
thirdparty/harfbuzz/src/hb-ot-hmtx-table.hh
vendored
9
thirdparty/harfbuzz/src/hb-ot-hmtx-table.hh
vendored
|
@ -165,7 +165,14 @@ struct hmtxvmtx
|
|||
{
|
||||
default_advance = default_advance_ ? default_advance_ : hb_face_get_upem (face);
|
||||
|
||||
num_advances = T::is_horizontal ? face->table.hhea->numberOfLongMetrics : face->table.vhea->numberOfLongMetrics;
|
||||
num_advances = T::is_horizontal ?
|
||||
face->table.hhea->numberOfLongMetrics :
|
||||
#ifndef HB_NO_VERTICAL
|
||||
face->table.vhea->numberOfLongMetrics
|
||||
#else
|
||||
0
|
||||
#endif
|
||||
;
|
||||
|
||||
table = hb_sanitize_context_t ().reference_table<hmtxvmtx> (face, T::tableTag);
|
||||
|
||||
|
|
|
@ -68,8 +68,8 @@
|
|||
#define HB_MAX_FEATURE_INDICES 1500
|
||||
#endif
|
||||
|
||||
#ifndef HB_MAX_LOOKUP_INDICES
|
||||
#define HB_MAX_LOOKUP_INDICES 20000
|
||||
#ifndef HB_MAX_LOOKUP_VISIT_COUNT
|
||||
#define HB_MAX_LOOKUP_VISIT_COUNT 35000
|
||||
#endif
|
||||
|
||||
|
||||
|
@ -173,7 +173,7 @@ struct hb_subset_layout_context_t :
|
|||
bool visitLookupIndex()
|
||||
{
|
||||
lookup_index_count++;
|
||||
return lookup_index_count < HB_MAX_LOOKUP_INDICES;
|
||||
return lookup_index_count < HB_MAX_LOOKUP_VISIT_COUNT;
|
||||
}
|
||||
|
||||
hb_subset_context_t *subset_context;
|
||||
|
|
|
@ -118,7 +118,13 @@ struct ValueFormat : HBUINT16
|
|||
if (!format) return ret;
|
||||
|
||||
hb_font_t *font = c->font;
|
||||
bool horizontal = HB_DIRECTION_IS_HORIZONTAL (c->direction);
|
||||
bool horizontal =
|
||||
#ifndef HB_NO_VERTICAL
|
||||
HB_DIRECTION_IS_HORIZONTAL (c->direction)
|
||||
#else
|
||||
true
|
||||
#endif
|
||||
;
|
||||
|
||||
if (format & xPlacement) glyph_pos.x_offset += font->em_scale_x (get_short (values++, &ret));
|
||||
if (format & yPlacement) glyph_pos.y_offset += font->em_scale_y (get_short (values++, &ret));
|
||||
|
|
|
@ -566,7 +566,7 @@ struct AlternateSet
|
|||
{
|
||||
/* Maybe we can do better than unsafe-to-break all; but since we are
|
||||
* changing random state, it would be hard to track that. Good 'nough. */
|
||||
c->buffer->unsafe_to_break_all ();
|
||||
c->buffer->unsafe_to_break (0, c->buffer->len);
|
||||
alt_index = c->random_number () % count + 1;
|
||||
}
|
||||
|
||||
|
|
31
thirdparty/harfbuzz/src/hb-ot-layout-gsubgpos.hh
vendored
31
thirdparty/harfbuzz/src/hb-ot-layout-gsubgpos.hh
vendored
|
@ -81,12 +81,15 @@ struct hb_closure_context_t :
|
|||
nesting_level_left++;
|
||||
}
|
||||
|
||||
void reset_lookup_visit_count ()
|
||||
{ lookup_count = 0; }
|
||||
|
||||
bool lookup_limit_exceeded ()
|
||||
{ return lookup_count > HB_MAX_LOOKUP_INDICES; }
|
||||
{ return lookup_count > HB_MAX_LOOKUP_VISIT_COUNT; }
|
||||
|
||||
bool should_visit_lookup (unsigned int lookup_index)
|
||||
{
|
||||
if (lookup_count++ > HB_MAX_LOOKUP_INDICES)
|
||||
if (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT)
|
||||
return false;
|
||||
|
||||
if (is_lookup_done (lookup_index))
|
||||
|
@ -211,7 +214,11 @@ struct hb_closure_lookups_context_t :
|
|||
return;
|
||||
|
||||
/* Return if new lookup was recursed to before. */
|
||||
if (is_lookup_visited (lookup_index))
|
||||
if (lookup_limit_exceeded ()
|
||||
|| visited_lookups->in_error ()
|
||||
|| visited_lookups->has (lookup_index))
|
||||
// Don't increment lookup count here, that will be done in the call to closure_lookups()
|
||||
// made by recurse_func.
|
||||
return;
|
||||
|
||||
nesting_level_left--;
|
||||
|
@ -226,12 +233,20 @@ struct hb_closure_lookups_context_t :
|
|||
{ inactive_lookups->add (lookup_index); }
|
||||
|
||||
bool lookup_limit_exceeded ()
|
||||
{ return lookup_count > HB_MAX_LOOKUP_INDICES; }
|
||||
{
|
||||
bool ret = lookup_count > HB_MAX_LOOKUP_VISIT_COUNT;
|
||||
if (ret)
|
||||
DEBUG_MSG (SUBSET, nullptr, "lookup visit count limit exceeded in lookup closure!");
|
||||
return ret; }
|
||||
|
||||
bool is_lookup_visited (unsigned lookup_index)
|
||||
{
|
||||
if (unlikely (lookup_count++ > HB_MAX_LOOKUP_INDICES))
|
||||
if (unlikely (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT))
|
||||
{
|
||||
DEBUG_MSG (SUBSET, nullptr, "total visited lookup count %u exceeds max limit, lookup %u is dropped.",
|
||||
lookup_count, lookup_index);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (unlikely (visited_lookups->in_error ()))
|
||||
return true;
|
||||
|
@ -1303,8 +1318,7 @@ static void context_closure_recurse_lookups (hb_closure_context_t *c,
|
|||
}
|
||||
|
||||
hb_set_add (covered_seq_indicies, seqIndex);
|
||||
if (pos_glyphs)
|
||||
c->push_cur_active_glyphs (pos_glyphs);
|
||||
c->push_cur_active_glyphs (pos_glyphs ? pos_glyphs : c->glyphs);
|
||||
|
||||
unsigned endIndex = inputCount;
|
||||
if (context_format == ContextFormat::CoverageBasedContext)
|
||||
|
@ -1312,11 +1326,10 @@ static void context_closure_recurse_lookups (hb_closure_context_t *c,
|
|||
|
||||
c->recurse (lookupRecord[i].lookupListIndex, covered_seq_indicies, seqIndex, endIndex);
|
||||
|
||||
if (pos_glyphs) {
|
||||
c->pop_cur_done_glyphs ();
|
||||
if (pos_glyphs)
|
||||
hb_set_destroy (pos_glyphs);
|
||||
}
|
||||
}
|
||||
|
||||
hb_set_destroy (covered_seq_indicies);
|
||||
}
|
||||
|
|
1
thirdparty/harfbuzz/src/hb-ot-layout.cc
vendored
1
thirdparty/harfbuzz/src/hb-ot-layout.cc
vendored
|
@ -1530,6 +1530,7 @@ hb_ot_layout_lookups_substitute_closure (hb_face_t *face,
|
|||
unsigned int glyphs_length;
|
||||
do
|
||||
{
|
||||
c.reset_lookup_visit_count ();
|
||||
glyphs_length = glyphs->get_population ();
|
||||
if (lookups)
|
||||
{
|
||||
|
|
26
thirdparty/harfbuzz/src/hb-ot-layout.hh
vendored
26
thirdparty/harfbuzz/src/hb-ot-layout.hh
vendored
|
@ -350,24 +350,20 @@ _hb_glyph_info_is_continuation (const hb_glyph_info_t *info)
|
|||
{
|
||||
return info->unicode_props() & UPROPS_MASK_CONTINUATION;
|
||||
}
|
||||
/* Loop over grapheme. Based on foreach_cluster(). */
|
||||
|
||||
static inline bool
|
||||
_hb_grapheme_group_func (const hb_glyph_info_t& a HB_UNUSED,
|
||||
const hb_glyph_info_t& b)
|
||||
{ return _hb_glyph_info_is_continuation (&b); }
|
||||
|
||||
#define foreach_grapheme(buffer, start, end) \
|
||||
for (unsigned int \
|
||||
_count = buffer->len, \
|
||||
start = 0, end = _count ? _hb_next_grapheme (buffer, 0) : 0; \
|
||||
start < _count; \
|
||||
start = end, end = _hb_next_grapheme (buffer, start))
|
||||
foreach_group (buffer, start, end, _hb_grapheme_group_func)
|
||||
|
||||
static inline unsigned int
|
||||
_hb_next_grapheme (hb_buffer_t *buffer, unsigned int start)
|
||||
static inline void
|
||||
_hb_ot_layout_reverse_graphemes (hb_buffer_t *buffer)
|
||||
{
|
||||
hb_glyph_info_t *info = buffer->info;
|
||||
unsigned int count = buffer->len;
|
||||
|
||||
while (++start < count && _hb_glyph_info_is_continuation (&info[start]))
|
||||
;
|
||||
|
||||
return start;
|
||||
buffer->reverse_groups (_hb_grapheme_group_func,
|
||||
buffer->cluster_level == HB_BUFFER_CLUSTER_LEVEL_MONOTONE_CHARACTERS);
|
||||
}
|
||||
|
||||
static inline bool
|
||||
|
|
6
thirdparty/harfbuzz/src/hb-ot-math-table.hh
vendored
6
thirdparty/harfbuzz/src/hb-ot-math-table.hh
vendored
|
@ -511,7 +511,8 @@ struct MathGlyphInfo
|
|||
| hb_map_retains_sorting (glyph_map)
|
||||
;
|
||||
|
||||
out->extendedShapeCoverage.serialize_serialize (c->serializer, it);
|
||||
if (it) out->extendedShapeCoverage.serialize_serialize (c->serializer, it);
|
||||
else out->extendedShapeCoverage = 0;
|
||||
|
||||
out->mathKernInfo.serialize_subset (c, mathKernInfo, this);
|
||||
return_trace (true);
|
||||
|
@ -885,7 +886,10 @@ struct MathVariants
|
|||
o->serialize_subset (c, glyphConstruction[i], this);
|
||||
}
|
||||
|
||||
if (new_vert_coverage)
|
||||
out->vertGlyphCoverage.serialize_serialize (c->serializer, new_vert_coverage.iter ());
|
||||
|
||||
if (new_hori_coverage)
|
||||
out->horizGlyphCoverage.serialize_serialize (c->serializer, new_hori_coverage.iter ());
|
||||
return_trace (true);
|
||||
}
|
||||
|
|
7
thirdparty/harfbuzz/src/hb-ot-metrics.cc
vendored
7
thirdparty/harfbuzz/src/hb-ot-metrics.cc
vendored
|
@ -77,6 +77,7 @@ _hb_ot_metrics_get_position_common (hb_font_t *font,
|
|||
(face->table.TABLE->has_data () && \
|
||||
(position && (*position = font->em_scalef_y (_fix_ascender_descender ( \
|
||||
face->table.TABLE->ATTR + GET_VAR, metrics_tag))), true))
|
||||
|
||||
case HB_OT_METRICS_TAG_HORIZONTAL_ASCENDER:
|
||||
return (face->table.OS2->use_typo_metrics () && GET_METRIC_Y (OS2, sTypoAscender)) ||
|
||||
GET_METRIC_Y (hhea, ascender);
|
||||
|
@ -86,9 +87,13 @@ _hb_ot_metrics_get_position_common (hb_font_t *font,
|
|||
case HB_OT_METRICS_TAG_HORIZONTAL_LINE_GAP:
|
||||
return (face->table.OS2->use_typo_metrics () && GET_METRIC_Y (OS2, sTypoLineGap)) ||
|
||||
GET_METRIC_Y (hhea, lineGap);
|
||||
|
||||
#ifndef HB_NO_VERTICAL
|
||||
case HB_OT_METRICS_TAG_VERTICAL_ASCENDER: return GET_METRIC_X (vhea, ascender);
|
||||
case HB_OT_METRICS_TAG_VERTICAL_DESCENDER: return GET_METRIC_X (vhea, descender);
|
||||
case HB_OT_METRICS_TAG_VERTICAL_LINE_GAP: return GET_METRIC_X (vhea, lineGap);
|
||||
#endif
|
||||
|
||||
#undef GET_METRIC_Y
|
||||
#undef GET_METRIC_X
|
||||
#undef GET_VAR
|
||||
|
@ -158,9 +163,11 @@ hb_ot_metrics_get_position (hb_font_t *font,
|
|||
case HB_OT_METRICS_TAG_HORIZONTAL_CARET_RISE: return GET_METRIC_Y (hhea, caretSlopeRise);
|
||||
case HB_OT_METRICS_TAG_HORIZONTAL_CARET_RUN: return GET_METRIC_X (hhea, caretSlopeRun);
|
||||
case HB_OT_METRICS_TAG_HORIZONTAL_CARET_OFFSET: return GET_METRIC_X (hhea, caretOffset);
|
||||
#ifndef HB_NO_VERTICAL
|
||||
case HB_OT_METRICS_TAG_VERTICAL_CARET_RISE: return GET_METRIC_X (vhea, caretSlopeRise);
|
||||
case HB_OT_METRICS_TAG_VERTICAL_CARET_RUN: return GET_METRIC_Y (vhea, caretSlopeRun);
|
||||
case HB_OT_METRICS_TAG_VERTICAL_CARET_OFFSET: return GET_METRIC_Y (vhea, caretOffset);
|
||||
#endif
|
||||
case HB_OT_METRICS_TAG_X_HEIGHT: return GET_METRIC_Y (OS2->v2 (), sxHeight);
|
||||
case HB_OT_METRICS_TAG_CAP_HEIGHT: return GET_METRIC_Y (OS2->v2 (), sCapHeight);
|
||||
case HB_OT_METRICS_TAG_SUBSCRIPT_EM_X_SIZE: return GET_METRIC_X (OS2, ySubscriptXSize);
|
||||
|
|
19
thirdparty/harfbuzz/src/hb-ot-shape.cc
vendored
19
thirdparty/harfbuzz/src/hb-ot-shape.cc
vendored
|
@ -628,20 +628,7 @@ hb_ensure_native_direction (hb_buffer_t *buffer)
|
|||
(HB_DIRECTION_IS_VERTICAL (direction) &&
|
||||
direction != HB_DIRECTION_TTB))
|
||||
{
|
||||
|
||||
if (buffer->cluster_level == HB_BUFFER_CLUSTER_LEVEL_MONOTONE_CHARACTERS)
|
||||
foreach_grapheme (buffer, start, end)
|
||||
{
|
||||
buffer->merge_clusters (start, end);
|
||||
buffer->reverse_range (start, end);
|
||||
}
|
||||
else
|
||||
foreach_grapheme (buffer, start, end)
|
||||
/* form_clusters() merged clusters already, we don't merge. */
|
||||
buffer->reverse_range (start, end);
|
||||
|
||||
buffer->reverse ();
|
||||
|
||||
_hb_ot_layout_reverse_graphemes (buffer);
|
||||
buffer->props.direction = HB_DIRECTION_REVERSE (buffer->props.direction);
|
||||
}
|
||||
}
|
||||
|
@ -651,6 +638,7 @@ hb_ensure_native_direction (hb_buffer_t *buffer)
|
|||
* Substitute
|
||||
*/
|
||||
|
||||
#ifndef HB_NO_VERTICAL
|
||||
static hb_codepoint_t
|
||||
hb_vert_char_for (hb_codepoint_t u)
|
||||
{
|
||||
|
@ -701,6 +689,7 @@ hb_vert_char_for (hb_codepoint_t u)
|
|||
|
||||
return u;
|
||||
}
|
||||
#endif
|
||||
|
||||
static inline void
|
||||
hb_ot_rotate_chars (const hb_ot_shape_context_t *c)
|
||||
|
@ -723,6 +712,7 @@ hb_ot_rotate_chars (const hb_ot_shape_context_t *c)
|
|||
}
|
||||
}
|
||||
|
||||
#ifndef HB_NO_VERTICAL
|
||||
if (HB_DIRECTION_IS_VERTICAL (c->target_direction) && !c->plan->has_vert)
|
||||
{
|
||||
for (unsigned int i = 0; i < count; i++) {
|
||||
|
@ -731,6 +721,7 @@ hb_ot_rotate_chars (const hb_ot_shape_context_t *c)
|
|||
info[i].codepoint = codepoint;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
static inline void
|
||||
|
|
14
thirdparty/harfbuzz/src/hb-ot-tag-table.hh
vendored
14
thirdparty/harfbuzz/src/hb-ot-tag-table.hh
vendored
|
@ -6,7 +6,7 @@
|
|||
*
|
||||
* on files with these headers:
|
||||
*
|
||||
* <meta name="updated_at" content="2021-09-02 09:40 PM" />
|
||||
* <meta name="updated_at" content="2021-12-09 12:01 AM" />
|
||||
* File-Date: 2021-08-06
|
||||
*/
|
||||
|
||||
|
@ -933,6 +933,7 @@ static const LangTag ot_languages[] = {
|
|||
{"mnp", HB_TAG('Z','H','S',' ')}, /* Min Bei Chinese -> Chinese, Simplified */
|
||||
{"mns", HB_TAG('M','A','N',' ')}, /* Mansi */
|
||||
{"mnw", HB_TAG('M','O','N',' ')}, /* Mon */
|
||||
{"mnw", HB_TAG('M','O','N','T')}, /* Mon -> Thailand Mon */
|
||||
{"mnx", HB_TAG_NONE }, /* Manikion != Manx */
|
||||
{"mo", HB_TAG('M','O','L',' ')}, /* Moldavian (retired code) */
|
||||
{"mod", HB_TAG('C','P','P',' ')}, /* Mobilian -> Creoles */
|
||||
|
@ -1422,6 +1423,7 @@ static const LangTag ot_languages[] = {
|
|||
{"tia", HB_TAG('B','B','R',' ')}, /* Tidikelt Tamazight -> Berber */
|
||||
{"tig", HB_TAG('T','G','R',' ')}, /* Tigre */
|
||||
/*{"tiv", HB_TAG('T','I','V',' ')},*/ /* Tiv */
|
||||
/*{"tjl", HB_TAG('T','J','L',' ')},*/ /* Tai Laing */
|
||||
{"tjo", HB_TAG('B','B','R',' ')}, /* Temacine Tamazight -> Berber */
|
||||
{"tk", HB_TAG('T','K','M',' ')}, /* Turkmen */
|
||||
{"tkg", HB_TAG('M','L','G',' ')}, /* Tesaka Malagasy -> Malagasy */
|
||||
|
@ -2521,6 +2523,14 @@ hb_ot_tags_from_complex_language (const char *lang_str,
|
|||
*count = 1;
|
||||
return true;
|
||||
}
|
||||
if (0 == strncmp (&lang_str[1], "nw-", 3)
|
||||
&& subtag_matches (lang_str, limit, "-th"))
|
||||
{
|
||||
/* Mon; Thailand */
|
||||
tags[0] = HB_TAG('M','O','N','T'); /* Thailand Mon */
|
||||
*count = 1;
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case 'n':
|
||||
if (lang_matches (&lang_str[1], "an-hant-hk"))
|
||||
|
@ -2884,6 +2894,8 @@ hb_ot_ambiguous_tag_to_language (hb_tag_t tag)
|
|||
return hb_language_from_string ("man", -1); /* Mandingo [macrolanguage] */
|
||||
case HB_TAG('M','O','L',' '): /* Moldavian */
|
||||
return hb_language_from_string ("ro-MD", -1); /* Romanian; Moldova */
|
||||
case HB_TAG('M','O','N','T'): /* Thailand Mon */
|
||||
return hb_language_from_string ("mnw-TH", -1); /* Mon; Thailand */
|
||||
case HB_TAG('M','Y','N',' '): /* Mayan */
|
||||
return hb_language_from_string ("myn", -1); /* Mayan [family] */
|
||||
case HB_TAG('N','A','H',' '): /* Nahuatl */
|
||||
|
|
226
thirdparty/harfbuzz/src/hb-repacker.hh
vendored
226
thirdparty/harfbuzz/src/hb-repacker.hh
vendored
|
@ -100,12 +100,18 @@ struct graph_t
|
|||
|
||||
bool is_leaf () const
|
||||
{
|
||||
return !obj.links.length;
|
||||
return !obj.real_links.length && !obj.virtual_links.length;
|
||||
}
|
||||
|
||||
void raise_priority ()
|
||||
bool raise_priority ()
|
||||
{
|
||||
if (has_max_priority ()) return false;
|
||||
priority++;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool has_max_priority () const {
|
||||
return priority >= 3;
|
||||
}
|
||||
|
||||
int64_t modified_distance (unsigned order) const
|
||||
|
@ -115,15 +121,22 @@ struct graph_t
|
|||
// it's parent where possible.
|
||||
|
||||
int64_t modified_distance =
|
||||
hb_min (hb_max(distance + distance_modifier (), 0), 0x7FFFFFFFFF);
|
||||
return (modified_distance << 22) | (0x003FFFFF & order);
|
||||
hb_min (hb_max(distance + distance_modifier (), 0), 0x7FFFFFFFFFF);
|
||||
if (has_max_priority ()) {
|
||||
modified_distance = 0;
|
||||
}
|
||||
return (modified_distance << 18) | (0x003FFFF & order);
|
||||
}
|
||||
|
||||
int64_t distance_modifier () const
|
||||
{
|
||||
if (!priority) return 0;
|
||||
int64_t table_size = obj.tail - obj.head;
|
||||
return -(table_size - table_size / (1 << hb_min(priority, 16u)));
|
||||
|
||||
if (priority == 1)
|
||||
return -table_size / 2;
|
||||
|
||||
return -table_size;
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -164,9 +177,10 @@ struct graph_t
|
|||
if (check_success (!vertices_.in_error ()))
|
||||
v->obj = *objects[i];
|
||||
if (!removed_nil) continue;
|
||||
for (unsigned i = 0; i < v->obj.links.length; i++)
|
||||
// Fix indices to account for removed nil object.
|
||||
v->obj.links[i].objidx--;
|
||||
for (auto& l : v->obj.all_links_writer ()) {
|
||||
l.objidx--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -203,26 +217,46 @@ struct graph_t
|
|||
/*
|
||||
* serialize graph into the provided serialization buffer.
|
||||
*/
|
||||
void serialize (hb_serialize_context_t* c) const
|
||||
hb_blob_t* serialize () const
|
||||
{
|
||||
c->start_serialize<void> ();
|
||||
hb_vector_t<char> buffer;
|
||||
size_t size = serialized_length ();
|
||||
if (!buffer.alloc (size)) {
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Unable to allocate output buffer.");
|
||||
return nullptr;
|
||||
}
|
||||
hb_serialize_context_t c((void *) buffer, size);
|
||||
|
||||
c.start_serialize<void> ();
|
||||
for (unsigned i = 0; i < vertices_.length; i++) {
|
||||
c->push ();
|
||||
c.push ();
|
||||
|
||||
size_t size = vertices_[i].obj.tail - vertices_[i].obj.head;
|
||||
char* start = c->allocate_size <char> (size);
|
||||
if (!start) return;
|
||||
char* start = c.allocate_size <char> (size);
|
||||
if (!start) {
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Buffer out of space.");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
memcpy (start, vertices_[i].obj.head, size);
|
||||
|
||||
for (const auto& link : vertices_[i].obj.links)
|
||||
serialize_link (link, start, c);
|
||||
// Only real links needs to be serialized.
|
||||
for (const auto& link : vertices_[i].obj.real_links)
|
||||
serialize_link (link, start, &c);
|
||||
|
||||
// All duplications are already encoded in the graph, so don't
|
||||
// enable sharing during packing.
|
||||
c->pop_pack (false);
|
||||
c.pop_pack (false);
|
||||
}
|
||||
c->end_serialize ();
|
||||
c.end_serialize ();
|
||||
|
||||
if (c.in_error ()) {
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Error during serialization. Err flag: %d",
|
||||
c.errors);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
return c.copy_blob ();
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -260,7 +294,7 @@ struct graph_t
|
|||
sorted_graph[new_id] = next;
|
||||
id_map[next_id] = new_id--;
|
||||
|
||||
for (const auto& link : next.obj.links) {
|
||||
for (const auto& link : next.obj.all_links ()) {
|
||||
removed_edges[link.objidx]++;
|
||||
if (!(vertices_[link.objidx].incoming_edges () - removed_edges[link.objidx]))
|
||||
queue.push (link.objidx);
|
||||
|
@ -314,7 +348,7 @@ struct graph_t
|
|||
sorted_graph[new_id] = next;
|
||||
id_map[next_id] = new_id--;
|
||||
|
||||
for (const auto& link : next.obj.links) {
|
||||
for (const auto& link : next.obj.all_links ()) {
|
||||
removed_edges[link.objidx]++;
|
||||
if (!(vertices_[link.objidx].incoming_edges () - removed_edges[link.objidx]))
|
||||
// Add the order that the links were encountered to the priority.
|
||||
|
@ -348,7 +382,8 @@ struct graph_t
|
|||
hb_set_t roots;
|
||||
for (unsigned i = 0; i <= root_index; i++)
|
||||
{
|
||||
for (auto& l : vertices_[i].obj.links)
|
||||
// Only real links can form 32 bit spaces
|
||||
for (auto& l : vertices_[i].obj.real_links)
|
||||
{
|
||||
if (l.width == 4 && !l.is_signed)
|
||||
{
|
||||
|
@ -466,7 +501,7 @@ struct graph_t
|
|||
|
||||
void find_subgraph (unsigned node_idx, hb_hashmap_t<unsigned, unsigned>& subgraph)
|
||||
{
|
||||
for (const auto& link : vertices_[node_idx].obj.links)
|
||||
for (const auto& link : vertices_[node_idx].obj.all_links ())
|
||||
{
|
||||
if (subgraph.has (link.objidx))
|
||||
{
|
||||
|
@ -482,7 +517,7 @@ struct graph_t
|
|||
{
|
||||
if (subgraph.has (node_idx)) return;
|
||||
subgraph.add (node_idx);
|
||||
for (const auto& link : vertices_[node_idx].obj.links)
|
||||
for (const auto& link : vertices_[node_idx].obj.all_links ())
|
||||
find_subgraph (link.objidx, subgraph);
|
||||
}
|
||||
|
||||
|
@ -497,7 +532,7 @@ struct graph_t
|
|||
return;
|
||||
|
||||
index_map.set (node_idx, duplicate (node_idx));
|
||||
for (const auto& l : object (node_idx).links) {
|
||||
for (const auto& l : object (node_idx).all_links ()) {
|
||||
duplicate_subgraph (l.objidx, index_map);
|
||||
}
|
||||
}
|
||||
|
@ -523,13 +558,19 @@ struct graph_t
|
|||
clone->parents.reset ();
|
||||
|
||||
unsigned clone_idx = vertices_.length - 2;
|
||||
for (const auto& l : child.obj.links)
|
||||
for (const auto& l : child.obj.real_links)
|
||||
{
|
||||
clone->obj.links.push (l);
|
||||
clone->obj.real_links.push (l);
|
||||
vertices_[l.objidx].parents.push (clone_idx);
|
||||
}
|
||||
for (const auto& l : child.obj.virtual_links)
|
||||
{
|
||||
clone->obj.virtual_links.push (l);
|
||||
vertices_[l.objidx].parents.push (clone_idx);
|
||||
}
|
||||
|
||||
check_success (!clone->obj.links.in_error ());
|
||||
check_success (!clone->obj.real_links.in_error ());
|
||||
check_success (!clone->obj.virtual_links.in_error ());
|
||||
|
||||
// The last object is the root of the graph, so swap back the root to the end.
|
||||
// The root's obj idx does change, however since it's root nothing else refers to it.
|
||||
|
@ -539,7 +580,7 @@ struct graph_t
|
|||
vertices_[vertices_.length - 1] = root;
|
||||
|
||||
// Since the root moved, update the parents arrays of all children on the root.
|
||||
for (const auto& l : root.obj.links)
|
||||
for (const auto& l : root.obj.all_links ())
|
||||
vertices_[l.objidx].remap_parent (root_idx () - 1, root_idx ());
|
||||
|
||||
return clone_idx;
|
||||
|
@ -555,7 +596,7 @@ struct graph_t
|
|||
update_parents ();
|
||||
|
||||
unsigned links_to_child = 0;
|
||||
for (const auto& l : vertices_[parent_idx].obj.links)
|
||||
for (const auto& l : vertices_[parent_idx].obj.all_links ())
|
||||
{
|
||||
if (l.objidx == child_idx) links_to_child++;
|
||||
}
|
||||
|
@ -578,9 +619,8 @@ struct graph_t
|
|||
if (parent_idx == clone_idx) parent_idx++;
|
||||
|
||||
auto& parent = vertices_[parent_idx];
|
||||
for (unsigned i = 0; i < parent.obj.links.length; i++)
|
||||
for (auto& l : parent.obj.all_links_writer ())
|
||||
{
|
||||
auto& l = parent.obj.links[i];
|
||||
if (l.objidx != child_idx)
|
||||
continue;
|
||||
|
||||
|
@ -593,7 +633,7 @@ struct graph_t
|
|||
/*
|
||||
* Raises the sorting priority of all children.
|
||||
*/
|
||||
void raise_childrens_priority (unsigned parent_idx)
|
||||
bool raise_childrens_priority (unsigned parent_idx)
|
||||
{
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, " Raising priority of all children of %d",
|
||||
parent_idx);
|
||||
|
@ -601,8 +641,10 @@ struct graph_t
|
|||
// to invalidate positions. It does not change graph structure so no need
|
||||
// to update distances or edge counts.
|
||||
auto& parent = vertices_[parent_idx].obj;
|
||||
for (unsigned i = 0; i < parent.links.length; i++)
|
||||
vertices_[parent.links[i].objidx].raise_priority ();
|
||||
bool made_change = false;
|
||||
for (auto& l : parent.all_links_writer ())
|
||||
made_change |= vertices_[l.objidx].raise_priority ();
|
||||
return made_change;
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -615,7 +657,8 @@ struct graph_t
|
|||
|
||||
for (int parent_idx = vertices_.length - 1; parent_idx >= 0; parent_idx--)
|
||||
{
|
||||
for (const auto& link : vertices_[parent_idx].obj.links)
|
||||
// Don't need to check virtual links for overflow
|
||||
for (const auto& link : vertices_[parent_idx].obj.real_links)
|
||||
{
|
||||
int64_t offset = compute_offset (parent_idx, link);
|
||||
if (is_valid_offset (offset, link))
|
||||
|
@ -655,8 +698,10 @@ struct graph_t
|
|||
if (!DEBUG_ENABLED(SUBSET_REPACK)) return;
|
||||
|
||||
update_parents ();
|
||||
int limit = 10;
|
||||
for (const auto& o : overflows)
|
||||
{
|
||||
if (!limit--) break;
|
||||
const auto& parent = vertices_[o.parent];
|
||||
const auto& child = vertices_[o.child];
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr,
|
||||
|
@ -665,13 +710,16 @@ struct graph_t
|
|||
"%4d (%4d in, %4d out, space %2d)",
|
||||
o.parent,
|
||||
parent.incoming_edges (),
|
||||
parent.obj.links.length,
|
||||
parent.obj.real_links.length + parent.obj.virtual_links.length,
|
||||
space_for (o.parent),
|
||||
o.child,
|
||||
child.incoming_edges (),
|
||||
child.obj.links.length,
|
||||
child.obj.real_links.length + child.obj.virtual_links.length,
|
||||
space_for (o.child));
|
||||
}
|
||||
if (overflows.length > 10) {
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, " ... plus %d more overflows.", overflows.length - 10);
|
||||
}
|
||||
}
|
||||
|
||||
unsigned num_roots_for_space (unsigned space) const
|
||||
|
@ -684,12 +732,19 @@ struct graph_t
|
|||
return num_roots_for_space_.length;
|
||||
}
|
||||
|
||||
void move_to_new_space (unsigned index)
|
||||
void move_to_new_space (const hb_set_t& indices)
|
||||
{
|
||||
num_roots_for_space_.push (0);
|
||||
unsigned new_space = num_roots_for_space_.length - 1;
|
||||
|
||||
for (unsigned index : indices) {
|
||||
auto& node = vertices_[index];
|
||||
num_roots_for_space_.push (1);
|
||||
num_roots_for_space_[node.space] = num_roots_for_space_[node.space] - 1;
|
||||
node.space = num_roots_for_space_.length - 1;
|
||||
num_roots_for_space_[new_space] = num_roots_for_space_[new_space] + 1;
|
||||
node.space = new_space;
|
||||
distance_invalid = true;
|
||||
positions_invalid = true;
|
||||
}
|
||||
}
|
||||
|
||||
unsigned space_for (unsigned index, unsigned* root = nullptr) const
|
||||
|
@ -716,6 +771,15 @@ struct graph_t
|
|||
|
||||
private:
|
||||
|
||||
size_t serialized_length () const {
|
||||
size_t total_size = 0;
|
||||
for (unsigned i = 0; i < vertices_.length; i++) {
|
||||
size_t size = vertices_[i].obj.tail - vertices_[i].obj.head;
|
||||
total_size += size;
|
||||
}
|
||||
return total_size;
|
||||
}
|
||||
|
||||
/*
|
||||
* Returns the numbers of incoming edges that are 32bits wide.
|
||||
*/
|
||||
|
@ -728,7 +792,8 @@ struct graph_t
|
|||
if (visited.has (p)) continue;
|
||||
visited.add (p);
|
||||
|
||||
for (const auto& l : vertices_[p].obj.links)
|
||||
// Only real links can be wide
|
||||
for (const auto& l : vertices_[p].obj.real_links)
|
||||
{
|
||||
if (l.objidx == node_idx && l.width == 4 && !l.is_signed)
|
||||
{
|
||||
|
@ -755,7 +820,7 @@ struct graph_t
|
|||
|
||||
for (unsigned p = 0; p < vertices_.length; p++)
|
||||
{
|
||||
for (auto& l : vertices_[p].obj.links)
|
||||
for (auto& l : vertices_[p].obj.all_links ())
|
||||
{
|
||||
vertices_[l.objidx].parents.push (p);
|
||||
}
|
||||
|
@ -823,7 +888,7 @@ struct graph_t
|
|||
int64_t next_distance = vertices_[next_idx].distance;
|
||||
visited[next_idx] = true;
|
||||
|
||||
for (const auto& link : next.obj.links)
|
||||
for (const auto& link : next.obj.all_links ())
|
||||
{
|
||||
if (visited[link.objidx]) continue;
|
||||
|
||||
|
@ -922,9 +987,8 @@ struct graph_t
|
|||
if (!id_map) return;
|
||||
for (unsigned i : subgraph)
|
||||
{
|
||||
for (unsigned j = 0; j < vertices_[i].obj.links.length; j++)
|
||||
for (auto& link : vertices_[i].obj.all_links_writer ())
|
||||
{
|
||||
auto& link = vertices_[i].obj.links[j];
|
||||
if (!id_map.has (link.objidx)) continue;
|
||||
if (only_wide && !(link.width == 4 && !link.is_signed)) continue;
|
||||
|
||||
|
@ -942,9 +1006,8 @@ struct graph_t
|
|||
for (unsigned i = 0; i < sorted_graph->length; i++)
|
||||
{
|
||||
(*sorted_graph)[i].remap_parents (id_map);
|
||||
for (unsigned j = 0; j < (*sorted_graph)[i].obj.links.length; j++)
|
||||
for (auto& link : (*sorted_graph)[i].obj.all_links_writer ())
|
||||
{
|
||||
auto& link = (*sorted_graph)[i].obj.links[j];
|
||||
link.objidx = id_map[link.objidx];
|
||||
}
|
||||
}
|
||||
|
@ -1023,7 +1086,7 @@ struct graph_t
|
|||
const auto& v = vertices_[start_idx];
|
||||
|
||||
// Graph is treated as undirected so search children and parents of start_idx
|
||||
for (const auto& l : v.obj.links)
|
||||
for (const auto& l : v.obj.all_links ())
|
||||
find_connected_nodes (l.objidx, targets, visited, connected);
|
||||
|
||||
for (unsigned p : v.parents)
|
||||
|
@ -1044,28 +1107,51 @@ struct graph_t
|
|||
static bool _try_isolating_subgraphs (const hb_vector_t<graph_t::overflow_record_t>& overflows,
|
||||
graph_t& sorted_graph)
|
||||
{
|
||||
unsigned space = 0;
|
||||
hb_set_t roots_to_isolate;
|
||||
|
||||
for (int i = overflows.length - 1; i >= 0; i--)
|
||||
{
|
||||
const graph_t::overflow_record_t& r = overflows[i];
|
||||
unsigned root = 0;
|
||||
unsigned space = sorted_graph.space_for (r.parent, &root);
|
||||
if (!space) continue;
|
||||
if (sorted_graph.num_roots_for_space (space) <= 1) continue;
|
||||
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Overflow in space %d moving subgraph %d to space %d.",
|
||||
unsigned root;
|
||||
unsigned overflow_space = sorted_graph.space_for (r.parent, &root);
|
||||
if (!overflow_space) continue;
|
||||
if (sorted_graph.num_roots_for_space (overflow_space) <= 1) continue;
|
||||
|
||||
if (!space) {
|
||||
space = overflow_space;
|
||||
}
|
||||
|
||||
if (space == overflow_space)
|
||||
roots_to_isolate.add(root);
|
||||
}
|
||||
|
||||
if (!roots_to_isolate) return false;
|
||||
|
||||
unsigned maximum_to_move = hb_max ((sorted_graph.num_roots_for_space (space) / 2u), 1u);
|
||||
if (roots_to_isolate.get_population () > maximum_to_move) {
|
||||
// Only move at most half of the roots in a space at a time.
|
||||
unsigned extra = roots_to_isolate.get_population () - maximum_to_move;
|
||||
while (extra--) {
|
||||
unsigned root = HB_SET_VALUE_INVALID;
|
||||
roots_to_isolate.previous (&root);
|
||||
roots_to_isolate.del (root);
|
||||
}
|
||||
}
|
||||
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr,
|
||||
"Overflow in space %d (%d roots). Moving %d roots to space %d.",
|
||||
space,
|
||||
root,
|
||||
sorted_graph.num_roots_for_space (space),
|
||||
roots_to_isolate.get_population (),
|
||||
sorted_graph.next_space ());
|
||||
|
||||
hb_set_t roots;
|
||||
roots.add (root);
|
||||
sorted_graph.isolate_subgraph (roots);
|
||||
for (unsigned new_root : roots)
|
||||
sorted_graph.move_to_new_space (new_root);
|
||||
sorted_graph.isolate_subgraph (roots_to_isolate);
|
||||
sorted_graph.move_to_new_space (roots_to_isolate);
|
||||
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool _process_overflows (const hb_vector_t<graph_t::overflow_record_t>& overflows,
|
||||
hb_set_t& priority_bumped_parents,
|
||||
|
@ -1093,16 +1179,16 @@ static bool _process_overflows (const hb_vector_t<graph_t::overflow_record_t>& o
|
|||
// TODO(garretrieger): initially limiting this to leaf's since they can be
|
||||
// moved closer with fewer consequences. However, this can
|
||||
// likely can be used for non-leafs as well.
|
||||
// TODO(garretrieger): add a maximum priority, don't try to raise past this.
|
||||
// TODO(garretrieger): also try lowering priority of the parent. Make it
|
||||
// get placed further up in the ordering, closer to it's children.
|
||||
// this is probably preferable if the total size of the parent object
|
||||
// is < then the total size of the children (and the parent can be moved).
|
||||
// Since in that case moving the parent will cause a smaller increase in
|
||||
// the length of other offsets.
|
||||
sorted_graph.raise_childrens_priority (r.parent);
|
||||
if (sorted_graph.raise_childrens_priority (r.parent)) {
|
||||
priority_bumped_parents.add (r.parent);
|
||||
resolution_attempted = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -1127,19 +1213,17 @@ static bool _process_overflows (const hb_vector_t<graph_t::overflow_record_t>& o
|
|||
* For a detailed writeup describing how the algorithm operates see:
|
||||
* docs/repacker.md
|
||||
*/
|
||||
inline void
|
||||
inline hb_blob_t*
|
||||
hb_resolve_overflows (const hb_vector_t<hb_serialize_context_t::object_t *>& packed,
|
||||
hb_tag_t table_tag,
|
||||
hb_serialize_context_t* c,
|
||||
unsigned max_rounds = 10) {
|
||||
unsigned max_rounds = 20) {
|
||||
// Kahn sort is ~twice as fast as shortest distance sort and works for many fonts
|
||||
// so try it first to save time.
|
||||
graph_t sorted_graph (packed);
|
||||
sorted_graph.sort_kahn ();
|
||||
if (!sorted_graph.will_overflow ())
|
||||
{
|
||||
sorted_graph.serialize (c);
|
||||
return;
|
||||
return sorted_graph.serialize ();
|
||||
}
|
||||
|
||||
sorted_graph.sort_shortest_distance ();
|
||||
|
@ -1178,17 +1262,17 @@ hb_resolve_overflows (const hb_vector_t<hb_serialize_context_t::object_t *>& pac
|
|||
|
||||
if (sorted_graph.in_error ())
|
||||
{
|
||||
c->err (HB_SERIALIZE_ERROR_OTHER);
|
||||
return;
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Sorted graph in error state.");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (sorted_graph.will_overflow ())
|
||||
{
|
||||
c->err (HB_SERIALIZE_ERROR_OFFSET_OVERFLOW);
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Offset overflow resolution failed.");
|
||||
return;
|
||||
return nullptr;
|
||||
}
|
||||
sorted_graph.serialize (c);
|
||||
|
||||
return sorted_graph.serialize ();
|
||||
}
|
||||
|
||||
#endif /* HB_REPACKER_HH */
|
||||
|
|
55
thirdparty/harfbuzz/src/hb-serialize.hh
vendored
55
thirdparty/harfbuzz/src/hb-serialize.hh
vendored
|
@ -65,19 +65,26 @@ struct hb_serialize_context_t
|
|||
|
||||
struct object_t
|
||||
{
|
||||
void fini () { links.fini (); }
|
||||
void fini () {
|
||||
real_links.fini ();
|
||||
virtual_links.fini ();
|
||||
}
|
||||
|
||||
bool operator == (const object_t &o) const
|
||||
{
|
||||
// Virtual links aren't considered for equality since they don't affect the functionality
|
||||
// of the object.
|
||||
return (tail - head == o.tail - o.head)
|
||||
&& (links.length == o.links.length)
|
||||
&& (real_links.length == o.real_links.length)
|
||||
&& 0 == hb_memcmp (head, o.head, tail - head)
|
||||
&& links.as_bytes () == o.links.as_bytes ();
|
||||
&& real_links.as_bytes () == o.real_links.as_bytes ();
|
||||
}
|
||||
uint32_t hash () const
|
||||
{
|
||||
// Virtual links aren't considered for equality since they don't affect the functionality
|
||||
// of the object.
|
||||
return hb_bytes_t (head, tail - head).hash () ^
|
||||
links.as_bytes ().hash ();
|
||||
real_links.as_bytes ().hash ();
|
||||
}
|
||||
|
||||
struct link_t
|
||||
|
@ -92,8 +99,14 @@ struct hb_serialize_context_t
|
|||
|
||||
char *head;
|
||||
char *tail;
|
||||
hb_vector_t<link_t> links;
|
||||
hb_vector_t<link_t> real_links;
|
||||
hb_vector_t<link_t> virtual_links;
|
||||
object_t *next;
|
||||
|
||||
auto all_links () const HB_AUTO_RETURN
|
||||
(( hb_concat (this->real_links, this->virtual_links) ));
|
||||
auto all_links_writer () HB_AUTO_RETURN
|
||||
(( hb_concat (this->real_links.writer (), this->virtual_links.writer ()) ));
|
||||
};
|
||||
|
||||
struct snapshot_t
|
||||
|
@ -101,12 +114,14 @@ struct hb_serialize_context_t
|
|||
char *head;
|
||||
char *tail;
|
||||
object_t *current; // Just for sanity check
|
||||
unsigned num_links;
|
||||
unsigned num_real_links;
|
||||
unsigned num_virtual_links;
|
||||
hb_serialize_error_t errors;
|
||||
};
|
||||
|
||||
snapshot_t snapshot ()
|
||||
{ return snapshot_t { head, tail, current, current->links.length, errors }; }
|
||||
{ return snapshot_t {
|
||||
head, tail, current, current->real_links.length, current->virtual_links.length, errors }; }
|
||||
|
||||
hb_serialize_context_t (void *start_, unsigned int size) :
|
||||
start ((char *) start_),
|
||||
|
@ -282,7 +297,8 @@ struct hb_serialize_context_t
|
|||
|
||||
if (!len)
|
||||
{
|
||||
assert (!obj->links.length);
|
||||
assert (!obj->real_links.length);
|
||||
assert (!obj->virtual_links.length);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -292,6 +308,7 @@ struct hb_serialize_context_t
|
|||
objidx = packed_map.get (obj);
|
||||
if (objidx)
|
||||
{
|
||||
merge_virtual_links (obj, objidx);
|
||||
obj->fini ();
|
||||
return objidx;
|
||||
}
|
||||
|
@ -327,7 +344,8 @@ struct hb_serialize_context_t
|
|||
// Overflows that happened after the snapshot will be erased by the revert.
|
||||
if (unlikely (in_error () && !only_overflow ())) return;
|
||||
assert (snap.current == current);
|
||||
current->links.shrink (snap.num_links);
|
||||
current->real_links.shrink (snap.num_real_links);
|
||||
current->virtual_links.shrink (snap.num_virtual_links);
|
||||
errors = snap.errors;
|
||||
revert (snap.head, snap.tail);
|
||||
}
|
||||
|
@ -375,8 +393,8 @@ struct hb_serialize_context_t
|
|||
|
||||
assert (current);
|
||||
|
||||
auto& link = *current->links.push ();
|
||||
if (current->links.in_error ())
|
||||
auto& link = *current->virtual_links.push ();
|
||||
if (current->virtual_links.in_error ())
|
||||
err (HB_SERIALIZE_ERROR_OTHER);
|
||||
|
||||
link.width = 0;
|
||||
|
@ -400,8 +418,8 @@ struct hb_serialize_context_t
|
|||
assert (current);
|
||||
assert (current->head <= (const char *) &ofs);
|
||||
|
||||
auto& link = *current->links.push ();
|
||||
if (current->links.in_error ())
|
||||
auto& link = *current->real_links.push ();
|
||||
if (current->real_links.in_error ())
|
||||
err (HB_SERIALIZE_ERROR_OTHER);
|
||||
|
||||
link.width = sizeof (T);
|
||||
|
@ -440,10 +458,8 @@ struct hb_serialize_context_t
|
|||
assert (packed.length > 1);
|
||||
|
||||
for (const object_t* parent : ++hb_iter (packed))
|
||||
for (const object_t::link_t &link : parent->links)
|
||||
for (const object_t::link_t &link : parent->real_links)
|
||||
{
|
||||
if (unlikely (!link.width)) continue; // Don't need to resolve virtual offsets
|
||||
|
||||
const object_t* child = packed[link.objidx];
|
||||
if (unlikely (!child)) { err (HB_SERIALIZE_ERROR_OTHER); return; }
|
||||
unsigned offset = 0;
|
||||
|
@ -642,6 +658,13 @@ struct hb_serialize_context_t
|
|||
|
||||
private:
|
||||
|
||||
void merge_virtual_links (const object_t* from, objidx_t to_idx) {
|
||||
object_t* to = packed[to_idx];
|
||||
for (const auto& l : from->virtual_links) {
|
||||
to->virtual_links.push (l);
|
||||
}
|
||||
}
|
||||
|
||||
/* Object memory pool. */
|
||||
hb_pool_t<object_t> object_pool;
|
||||
|
||||
|
|
4
thirdparty/harfbuzz/src/hb-subset-plan.cc
vendored
4
thirdparty/harfbuzz/src/hb-subset-plan.cc
vendored
|
@ -248,7 +248,6 @@ static void _colr_closure (hb_face_t *face,
|
|||
unsigned glyphs_num;
|
||||
{
|
||||
glyphs_num = glyphs_colred->get_population ();
|
||||
|
||||
// Collect all glyphs referenced by COLRv0
|
||||
hb_set_t glyphset_colrv0;
|
||||
for (hb_codepoint_t gid : glyphs_colred->iter ())
|
||||
|
@ -397,6 +396,7 @@ _populate_gids_to_retain (hb_subset_plan_t* plan,
|
|||
_colr_closure (plan->source, plan->colrv1_layers, plan->colr_palettes, &cur_glyphset);
|
||||
_remove_invalid_gids (&cur_glyphset, plan->source->get_num_glyphs ());
|
||||
|
||||
hb_set_set (plan->_glyphset_colred, &cur_glyphset);
|
||||
// Populate a full set of glyphs to retain by adding all referenced
|
||||
// composite glyphs.
|
||||
for (hb_codepoint_t gid : cur_glyphset.iter ())
|
||||
|
@ -511,6 +511,7 @@ hb_subset_plan_create (hb_face_t *face,
|
|||
plan->_glyphset = hb_set_create ();
|
||||
plan->_glyphset_gsub = hb_set_create ();
|
||||
plan->_glyphset_mathed = hb_set_create ();
|
||||
plan->_glyphset_colred = hb_set_create ();
|
||||
plan->codepoint_to_glyph = hb_map_create ();
|
||||
plan->glyph_map = hb_map_create ();
|
||||
plan->reverse_glyph_map = hb_map_create ();
|
||||
|
@ -579,6 +580,7 @@ hb_subset_plan_destroy (hb_subset_plan_t *plan)
|
|||
hb_set_destroy (plan->_glyphset);
|
||||
hb_set_destroy (plan->_glyphset_gsub);
|
||||
hb_set_destroy (plan->_glyphset_mathed);
|
||||
hb_set_destroy (plan->_glyphset_colred);
|
||||
hb_map_destroy (plan->gsub_lookups);
|
||||
hb_map_destroy (plan->gpos_lookups);
|
||||
hb_map_destroy (plan->gsub_features);
|
||||
|
|
1
thirdparty/harfbuzz/src/hb-subset-plan.hh
vendored
1
thirdparty/harfbuzz/src/hb-subset-plan.hh
vendored
|
@ -78,6 +78,7 @@ struct hb_subset_plan_t
|
|||
hb_set_t *_glyphset;
|
||||
hb_set_t *_glyphset_gsub;
|
||||
hb_set_t *_glyphset_mathed;
|
||||
hb_set_t *_glyphset_colred;
|
||||
|
||||
//active lookups we'd like to retain
|
||||
hb_map_t *gsub_lookups;
|
||||
|
|
20
thirdparty/harfbuzz/src/hb-subset.cc
vendored
20
thirdparty/harfbuzz/src/hb-subset.cc
vendored
|
@ -104,20 +104,16 @@ _repack (hb_tag_t tag, const hb_serialize_context_t& c)
|
|||
if (!c.offset_overflow ())
|
||||
return c.copy_blob ();
|
||||
|
||||
hb_vector_t<char> buf;
|
||||
int buf_size = c.end - c.start;
|
||||
if (unlikely (!buf.alloc (buf_size)))
|
||||
hb_blob_t* result = hb_resolve_overflows (c.object_graph (), tag);
|
||||
|
||||
if (unlikely (!result))
|
||||
{
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c offset overflow resolution failed.",
|
||||
HB_UNTAG (tag));
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
hb_serialize_context_t repacked ((void *) buf, buf_size);
|
||||
hb_resolve_overflows (c.object_graph (), tag, &repacked);
|
||||
|
||||
if (unlikely (repacked.in_error ()))
|
||||
// TODO(garretrieger): refactor so we can share the resize/retry logic with the subset
|
||||
// portion.
|
||||
return nullptr;
|
||||
|
||||
return repacked.copy_blob ();
|
||||
return result;
|
||||
}
|
||||
|
||||
template<typename TableType>
|
||||
|
|
2
thirdparty/harfbuzz/src/hb-uniscribe.cc
vendored
2
thirdparty/harfbuzz/src/hb-uniscribe.cc
vendored
|
@ -878,7 +878,7 @@ retry:
|
|||
if (backward)
|
||||
hb_buffer_reverse (buffer);
|
||||
|
||||
buffer->unsafe_to_break_all ();
|
||||
buffer->clear_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_BREAK);
|
||||
|
||||
/* Wow, done! */
|
||||
return true;
|
||||
|
|
6
thirdparty/harfbuzz/src/hb-version.h
vendored
6
thirdparty/harfbuzz/src/hb-version.h
vendored
|
@ -47,20 +47,20 @@ HB_BEGIN_DECLS
|
|||
*
|
||||
* The minor component of the library version available at compile-time.
|
||||
*/
|
||||
#define HB_VERSION_MINOR 1
|
||||
#define HB_VERSION_MINOR 2
|
||||
/**
|
||||
* HB_VERSION_MICRO:
|
||||
*
|
||||
* The micro component of the library version available at compile-time.
|
||||
*/
|
||||
#define HB_VERSION_MICRO 2
|
||||
#define HB_VERSION_MICRO 0
|
||||
|
||||
/**
|
||||
* HB_VERSION_STRING:
|
||||
*
|
||||
* A string literal containing the library version available at compile-time.
|
||||
*/
|
||||
#define HB_VERSION_STRING "3.1.2"
|
||||
#define HB_VERSION_STRING "3.2.0"
|
||||
|
||||
/**
|
||||
* HB_VERSION_ATLEAST:
|
||||
|
|
Loading…
Reference in a new issue