Merge pull request #61312 from bruvzg/hb_430
HarfBuzz: Update to version 4.3.0
This commit is contained in:
commit
9b7db785eb
45 changed files with 3052 additions and 2659 deletions
2
thirdparty/README.md
vendored
2
thirdparty/README.md
vendored
|
@ -208,7 +208,7 @@ Files extracted from upstream source:
|
|||
## harfbuzz
|
||||
|
||||
- Upstream: https://github.com/harfbuzz/harfbuzz
|
||||
- Version: 4.2.1 (f7aee78e90bc53b3a95eb56d7550c9effe569ea2, 2022)
|
||||
- Version: 4.3.0 (aee123fc83388b8f5acfb301d87bd92eccc5b843, 2022)
|
||||
- License: MIT
|
||||
|
||||
Files extracted from upstream source:
|
||||
|
|
28
thirdparty/harfbuzz/src/hb-algs.hh
vendored
28
thirdparty/harfbuzz/src/hb-algs.hh
vendored
|
@ -150,10 +150,26 @@ struct BEInt<Type, 4>
|
|||
uint8_t ((V >> 16) & 0xFF),
|
||||
uint8_t ((V >> 8) & 0xFF),
|
||||
uint8_t ((V ) & 0xFF)} {}
|
||||
constexpr operator Type () const { return (v[0] << 24)
|
||||
+ (v[1] << 16)
|
||||
+ (v[2] << 8)
|
||||
+ (v[3] ); }
|
||||
|
||||
struct __attribute__((packed)) packed_uint32_t { uint32_t v; };
|
||||
constexpr operator Type () const {
|
||||
#if ((defined(__GNUC__) && __GNUC__ >= 5) || defined(__clang__)) && \
|
||||
defined(__BYTE_ORDER) && \
|
||||
(__BYTE_ORDER == __LITTLE_ENDIAN || __BYTE_ORDER == __BIG_ENDIAN)
|
||||
/* Spoon-feed the compiler a big-endian integer with alignment 1.
|
||||
* https://github.com/harfbuzz/harfbuzz/pull/1398 */
|
||||
#if __BYTE_ORDER == __LITTLE_ENDIAN
|
||||
return __builtin_bswap32 (((packed_uint32_t *) this)->v);
|
||||
#else /* __BYTE_ORDER == __BIG_ENDIAN */
|
||||
return ((packed_uint32_t *) this)->v;
|
||||
#endif
|
||||
#else
|
||||
return (v[0] << 24)
|
||||
+ (v[1] << 16)
|
||||
+ (v[2] << 8)
|
||||
+ (v[3] );
|
||||
#endif
|
||||
}
|
||||
private: uint8_t v[4];
|
||||
};
|
||||
|
||||
|
@ -213,11 +229,11 @@ HB_FUNCOBJ (hb_bool);
|
|||
|
||||
template <typename T>
|
||||
static inline
|
||||
T hb_coerce (const T v) { return v; }
|
||||
constexpr T hb_coerce (const T v) { return v; }
|
||||
template <typename T, typename V,
|
||||
hb_enable_if (!hb_is_same (hb_decay<T>, hb_decay<V>) && std::is_pointer<V>::value)>
|
||||
static inline
|
||||
T hb_coerce (const V v) { return *v; }
|
||||
constexpr T hb_coerce (const V v) { return *v; }
|
||||
|
||||
struct
|
||||
{
|
||||
|
|
26
thirdparty/harfbuzz/src/hb-array.hh
vendored
26
thirdparty/harfbuzz/src/hb-array.hh
vendored
|
@ -346,7 +346,7 @@ struct hb_sorted_array_t :
|
|||
unsigned int i;
|
||||
return bfind (x, &i) ? &this->arrayZ[i] : not_found;
|
||||
}
|
||||
template <typename T>
|
||||
template <typename T, typename ...Ts>
|
||||
const Type *bsearch (const T &x, const Type *not_found = nullptr) const
|
||||
{
|
||||
unsigned int i;
|
||||
|
@ -384,15 +384,16 @@ struct hb_sorted_array_t :
|
|||
}
|
||||
return false;
|
||||
}
|
||||
template <typename T>
|
||||
bool bsearch_impl (const T &x, unsigned *pos) const
|
||||
template <typename T, typename ...Ts>
|
||||
bool bsearch_impl (const T &x, unsigned *pos, Ts... ds) const
|
||||
{
|
||||
return hb_bsearch_impl (pos,
|
||||
x,
|
||||
this->arrayZ,
|
||||
this->length,
|
||||
sizeof (Type),
|
||||
_hb_cmp_method<T, Type>);
|
||||
_hb_cmp_method<T, Type, Ts...>,
|
||||
ds...);
|
||||
}
|
||||
};
|
||||
template <typename T> inline hb_sorted_array_t<T>
|
||||
|
@ -403,7 +404,7 @@ hb_sorted_array (T (&array_)[length_])
|
|||
{ return hb_sorted_array_t<T> (array_); }
|
||||
|
||||
template <typename T>
|
||||
bool hb_array_t<T>::operator == (const hb_array_t<T> &o) const
|
||||
inline bool hb_array_t<T>::operator == (const hb_array_t<T> &o) const
|
||||
{
|
||||
if (o.length != this->length) return false;
|
||||
for (unsigned int i = 0; i < this->length; i++) {
|
||||
|
@ -411,8 +412,18 @@ bool hb_array_t<T>::operator == (const hb_array_t<T> &o) const
|
|||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* TODO Specialize operator== for hb_bytes_t and hb_ubytes_t. */
|
||||
template <>
|
||||
inline bool hb_array_t<const char>::operator == (const hb_array_t<const char> &o) const
|
||||
{
|
||||
if (o.length != this->length) return false;
|
||||
return 0 == hb_memcmp (arrayZ, o.arrayZ, length);
|
||||
}
|
||||
template <>
|
||||
inline bool hb_array_t<const unsigned char>::operator == (const hb_array_t<const unsigned char> &o) const
|
||||
{
|
||||
if (o.length != this->length) return false;
|
||||
return 0 == hb_memcmp (arrayZ, o.arrayZ, length);
|
||||
}
|
||||
|
||||
template <>
|
||||
inline uint32_t hb_array_t<const char>::hash () const {
|
||||
|
@ -421,7 +432,6 @@ inline uint32_t hb_array_t<const char>::hash () const {
|
|||
current = current * 31 + (uint32_t) (this->arrayZ[i] * 2654435761u);
|
||||
return current;
|
||||
}
|
||||
|
||||
template <>
|
||||
inline uint32_t hb_array_t<const unsigned char>::hash () const {
|
||||
uint32_t current = 0;
|
||||
|
|
6
thirdparty/harfbuzz/src/hb-bimap.hh
vendored
6
thirdparty/harfbuzz/src/hb-bimap.hh
vendored
|
@ -39,6 +39,12 @@ struct hb_bimap_t
|
|||
back_map.reset ();
|
||||
}
|
||||
|
||||
void resize (unsigned pop)
|
||||
{
|
||||
forw_map.resize (pop);
|
||||
back_map.resize (pop);
|
||||
}
|
||||
|
||||
bool in_error () const { return forw_map.in_error () || back_map.in_error (); }
|
||||
|
||||
void set (hb_codepoint_t lhs, hb_codepoint_t rhs)
|
||||
|
|
9
thirdparty/harfbuzz/src/hb-bit-page.hh
vendored
9
thirdparty/harfbuzz/src/hb-bit-page.hh
vendored
|
@ -40,11 +40,18 @@ struct hb_bit_page_t
|
|||
|
||||
bool is_empty () const
|
||||
{
|
||||
for (unsigned int i = 0; i < len (); i++)
|
||||
for (unsigned i = 0; i < len (); i++)
|
||||
if (v[i])
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
uint32_t hash () const
|
||||
{
|
||||
uint32_t h = 0;
|
||||
for (unsigned i = 0; i < len (); i++)
|
||||
h = h * 31 + hb_hash (v[i]);
|
||||
return h;
|
||||
}
|
||||
|
||||
void add (hb_codepoint_t g) { elt (g) |= mask (g); }
|
||||
void del (hb_codepoint_t g) { elt (g) &= ~mask (g); }
|
||||
|
|
|
@ -38,10 +38,10 @@ struct hb_bit_set_invertible_t
|
|||
bool inverted = false;
|
||||
|
||||
hb_bit_set_invertible_t () = default;
|
||||
hb_bit_set_invertible_t (hb_bit_set_invertible_t& o) = default;
|
||||
hb_bit_set_invertible_t (hb_bit_set_invertible_t&& o) = default;
|
||||
hb_bit_set_invertible_t (const hb_bit_set_invertible_t& o) = default;
|
||||
hb_bit_set_invertible_t (hb_bit_set_invertible_t&& other) : hb_bit_set_invertible_t () { hb_swap (*this, other); }
|
||||
hb_bit_set_invertible_t& operator= (const hb_bit_set_invertible_t& o) = default;
|
||||
hb_bit_set_invertible_t& operator= (hb_bit_set_invertible_t&& o) = default;
|
||||
hb_bit_set_invertible_t& operator= (hb_bit_set_invertible_t&& other) { hb_swap (*this, other); return *this; }
|
||||
friend void swap (hb_bit_set_invertible_t &a, hb_bit_set_invertible_t &b)
|
||||
{
|
||||
if (likely (!a.s.successful || !b.s.successful))
|
||||
|
@ -56,6 +56,7 @@ struct hb_bit_set_invertible_t
|
|||
bool in_error () const { return s.in_error (); }
|
||||
explicit operator bool () const { return !is_empty (); }
|
||||
|
||||
void alloc (unsigned sz) { s.alloc (sz); }
|
||||
void reset ()
|
||||
{
|
||||
s.reset ();
|
||||
|
@ -79,6 +80,8 @@ struct hb_bit_set_invertible_t
|
|||
next (&v);
|
||||
return v == INVALID;
|
||||
}
|
||||
uint32_t hash () const { return s.hash () ^ inverted; }
|
||||
|
||||
hb_codepoint_t get_min () const
|
||||
{
|
||||
hb_codepoint_t v = INVALID;
|
||||
|
|
64
thirdparty/harfbuzz/src/hb-bit-set.hh
vendored
64
thirdparty/harfbuzz/src/hb-bit-set.hh
vendored
|
@ -97,6 +97,13 @@ struct hb_bit_set_t
|
|||
return true;
|
||||
}
|
||||
|
||||
void alloc (unsigned sz)
|
||||
{
|
||||
sz >>= (page_t::PAGE_BITS_LOG_2 - 1);
|
||||
pages.alloc (sz);
|
||||
page_map.alloc (sz);
|
||||
}
|
||||
|
||||
void reset ()
|
||||
{
|
||||
successful = true;
|
||||
|
@ -119,6 +126,14 @@ struct hb_bit_set_t
|
|||
}
|
||||
explicit operator bool () const { return !is_empty (); }
|
||||
|
||||
uint32_t hash () const
|
||||
{
|
||||
uint32_t h = 0;
|
||||
for (auto &map : page_map)
|
||||
h = h * 31 + hb_hash (map.major) + hb_hash (pages[map.index]);
|
||||
return h;
|
||||
}
|
||||
|
||||
private:
|
||||
void dirty () { population = UINT_MAX; }
|
||||
public:
|
||||
|
@ -341,15 +356,14 @@ struct hb_bit_set_t
|
|||
return;
|
||||
population = other.population;
|
||||
|
||||
/* TODO switch to vector operator =. */
|
||||
hb_memcpy ((void *) pages, (const void *) other.pages, count * pages.item_size);
|
||||
hb_memcpy ((void *) page_map, (const void *) other.page_map, count * page_map.item_size);
|
||||
page_map = other.page_map;
|
||||
pages = other.pages;
|
||||
}
|
||||
|
||||
bool is_equal (const hb_bit_set_t &other) const
|
||||
{
|
||||
if (has_population () && other.has_population () &&
|
||||
get_population () != other.get_population ())
|
||||
population != other.population)
|
||||
return false;
|
||||
|
||||
unsigned int na = pages.length;
|
||||
|
@ -377,7 +391,7 @@ struct hb_bit_set_t
|
|||
bool is_subset (const hb_bit_set_t &larger_set) const
|
||||
{
|
||||
if (has_population () && larger_set.has_population () &&
|
||||
get_population () != larger_set.get_population ())
|
||||
population != larger_set.population)
|
||||
return false;
|
||||
|
||||
uint32_t spi = 0;
|
||||
|
@ -874,7 +888,19 @@ struct hb_bit_set_t
|
|||
|
||||
page_t *page_for (hb_codepoint_t g, bool insert = false)
|
||||
{
|
||||
page_map_t map = {get_major (g), pages.length};
|
||||
unsigned major = get_major (g);
|
||||
|
||||
/* The extra page_map length is necessary; can't just rely on vector here,
|
||||
* since the next check would be tricked because a null page also has
|
||||
* major==0, which we can't distinguish from an actualy major==0 page... */
|
||||
if (likely (last_page_lookup < page_map.length))
|
||||
{
|
||||
auto &cached_page = page_map.arrayZ[last_page_lookup];
|
||||
if (cached_page.major == major)
|
||||
return &pages[cached_page.index];
|
||||
}
|
||||
|
||||
page_map_t map = {major, pages.length};
|
||||
unsigned int i;
|
||||
if (!page_map.bfind (map, &i, HB_NOT_FOUND_STORE_CLOSEST))
|
||||
{
|
||||
|
@ -890,15 +916,31 @@ struct hb_bit_set_t
|
|||
(page_map.length - 1 - i) * page_map.item_size);
|
||||
page_map[i] = map;
|
||||
}
|
||||
|
||||
last_page_lookup = i;
|
||||
return &pages[page_map[i].index];
|
||||
}
|
||||
const page_t *page_for (hb_codepoint_t g) const
|
||||
{
|
||||
page_map_t key = {get_major (g)};
|
||||
const page_map_t *found = page_map.bsearch (key);
|
||||
if (found)
|
||||
return &pages[found->index];
|
||||
return nullptr;
|
||||
unsigned major = get_major (g);
|
||||
|
||||
/* The extra page_map length is necessary; can't just rely on vector here,
|
||||
* since the next check would be tricked because a null page also has
|
||||
* major==0, which we can't distinguish from an actualy major==0 page... */
|
||||
if (likely (last_page_lookup < page_map.length))
|
||||
{
|
||||
auto &cached_page = page_map.arrayZ[last_page_lookup];
|
||||
if (cached_page.major == major)
|
||||
return &pages[cached_page.index];
|
||||
}
|
||||
|
||||
page_map_t key = {major};
|
||||
unsigned int i;
|
||||
if (!page_map.bfind (key, &i))
|
||||
return nullptr;
|
||||
|
||||
last_page_lookup = i;
|
||||
return &pages[page_map[i].index];
|
||||
}
|
||||
page_t &page_at (unsigned int i) { return pages[page_map[i].index]; }
|
||||
const page_t &page_at (unsigned int i) const { return pages[page_map[i].index]; }
|
||||
|
|
118
thirdparty/harfbuzz/src/hb-cff-interp-common.hh
vendored
118
thirdparty/harfbuzz/src/hb-cff-interp-common.hh
vendored
|
@ -248,6 +248,9 @@ struct number_t
|
|||
/* byte string */
|
||||
struct UnsizedByteStr : UnsizedArrayOf <HBUINT8>
|
||||
{
|
||||
hb_ubytes_t as_ubytes (unsigned l) const
|
||||
{ return hb_ubytes_t ((const unsigned char *) this, l); }
|
||||
|
||||
// encode 2-byte int (Dict/CharString) or 4-byte int (Dict)
|
||||
template <typename T, typename V>
|
||||
static bool serialize_int (hb_serialize_context_t *c, op_code_t intOp, V value)
|
||||
|
@ -274,33 +277,10 @@ struct UnsizedByteStr : UnsizedArrayOf <HBUINT8>
|
|||
/* Defining null_size allows a Null object may be created. Should be safe because:
|
||||
* A descendent struct Dict uses a Null pointer to indicate a missing table,
|
||||
* checked before access.
|
||||
* byte_str_t, a wrapper struct pairing a byte pointer along with its length, always
|
||||
* checks the length before access. A Null pointer is used as the initial pointer
|
||||
* along with zero length by the default ctor.
|
||||
*/
|
||||
DEFINE_SIZE_MIN(0);
|
||||
};
|
||||
|
||||
/* Holder of a section of byte string within a CFFIndex entry */
|
||||
struct byte_str_t : hb_ubytes_t
|
||||
{
|
||||
byte_str_t ()
|
||||
: hb_ubytes_t () {}
|
||||
byte_str_t (const UnsizedByteStr& s, unsigned int l)
|
||||
: hb_ubytes_t ((const unsigned char*)&s, l) {}
|
||||
byte_str_t (const unsigned char *s, unsigned int l)
|
||||
: hb_ubytes_t (s, l) {}
|
||||
byte_str_t (const hb_ubytes_t &ub) /* conversion from hb_ubytes_t */
|
||||
: hb_ubytes_t (ub) {}
|
||||
|
||||
/* sub-string */
|
||||
byte_str_t sub_str (unsigned int offset, unsigned int len_) const
|
||||
{ return byte_str_t (hb_ubytes_t::sub_array (offset, len_)); }
|
||||
|
||||
bool check_limit (unsigned int offset, unsigned int count) const
|
||||
{ return (offset + count <= length); }
|
||||
};
|
||||
|
||||
/* A byte string associated with the current offset and an error condition */
|
||||
struct byte_str_ref_t
|
||||
{
|
||||
|
@ -308,17 +288,17 @@ struct byte_str_ref_t
|
|||
|
||||
void init ()
|
||||
{
|
||||
str = byte_str_t ();
|
||||
str = hb_ubytes_t ();
|
||||
offset = 0;
|
||||
error = false;
|
||||
}
|
||||
|
||||
void fini () {}
|
||||
|
||||
byte_str_ref_t (const byte_str_t &str_, unsigned int offset_ = 0)
|
||||
byte_str_ref_t (const hb_ubytes_t &str_, unsigned int offset_ = 0)
|
||||
: str (str_), offset (offset_), error (false) {}
|
||||
|
||||
void reset (const byte_str_t &str_, unsigned int offset_ = 0)
|
||||
void reset (const hb_ubytes_t &str_, unsigned int offset_ = 0)
|
||||
{
|
||||
str = str_;
|
||||
offset = offset_;
|
||||
|
@ -334,14 +314,14 @@ struct byte_str_ref_t
|
|||
return str[offset + i];
|
||||
}
|
||||
|
||||
/* Conversion to byte_str_t */
|
||||
operator byte_str_t () const { return str.sub_str (offset, str.length - offset); }
|
||||
/* Conversion to hb_ubytes_t */
|
||||
operator hb_ubytes_t () const { return str.sub_array (offset, str.length - offset); }
|
||||
|
||||
byte_str_t sub_str (unsigned int offset_, unsigned int len_) const
|
||||
{ return str.sub_str (offset_, len_); }
|
||||
hb_ubytes_t sub_array (unsigned int offset_, unsigned int len_) const
|
||||
{ return str.sub_array (offset_, len_); }
|
||||
|
||||
bool avail (unsigned int count=1) const
|
||||
{ return (!in_error () && str.check_limit (offset, count)); }
|
||||
{ return (!in_error () && offset + count <= str.length); }
|
||||
void inc (unsigned int count=1)
|
||||
{
|
||||
if (likely (!in_error () && (offset <= str.length) && (offset + count <= str.length)))
|
||||
|
@ -358,44 +338,39 @@ struct byte_str_ref_t
|
|||
void set_error () { error = true; }
|
||||
bool in_error () const { return error; }
|
||||
|
||||
byte_str_t str;
|
||||
hb_ubytes_t str;
|
||||
unsigned int offset; /* beginning of the sub-string within str */
|
||||
|
||||
protected:
|
||||
bool error;
|
||||
};
|
||||
|
||||
typedef hb_vector_t<byte_str_t> byte_str_array_t;
|
||||
using byte_str_array_t = hb_vector_t<hb_ubytes_t>;
|
||||
|
||||
/* stack */
|
||||
template <typename ELEM, int LIMIT>
|
||||
struct cff_stack_t
|
||||
{
|
||||
void init ()
|
||||
{
|
||||
error = false;
|
||||
count = 0;
|
||||
elements.init ();
|
||||
elements.resize (kSizeLimit);
|
||||
}
|
||||
void fini () { elements.fini (); }
|
||||
|
||||
ELEM& operator [] (unsigned int i)
|
||||
{
|
||||
if (unlikely (i >= count)) set_error ();
|
||||
if (unlikely (i >= count))
|
||||
{
|
||||
set_error ();
|
||||
return Crap (ELEM);
|
||||
}
|
||||
return elements[i];
|
||||
}
|
||||
|
||||
void push (const ELEM &v)
|
||||
{
|
||||
if (likely (count < elements.length))
|
||||
if (likely (count < LIMIT))
|
||||
elements[count++] = v;
|
||||
else
|
||||
set_error ();
|
||||
}
|
||||
ELEM &push ()
|
||||
{
|
||||
if (likely (count < elements.length))
|
||||
if (likely (count < LIMIT))
|
||||
return elements[count++];
|
||||
else
|
||||
{
|
||||
|
@ -424,7 +399,7 @@ struct cff_stack_t
|
|||
|
||||
const ELEM& peek ()
|
||||
{
|
||||
if (unlikely (count < 0))
|
||||
if (unlikely (count == 0))
|
||||
{
|
||||
set_error ();
|
||||
return Null (ELEM);
|
||||
|
@ -434,7 +409,7 @@ struct cff_stack_t
|
|||
|
||||
void unpop ()
|
||||
{
|
||||
if (likely (count < elements.length))
|
||||
if (likely (count < LIMIT))
|
||||
count++;
|
||||
else
|
||||
set_error ();
|
||||
|
@ -442,18 +417,19 @@ struct cff_stack_t
|
|||
|
||||
void clear () { count = 0; }
|
||||
|
||||
bool in_error () const { return (error || elements.in_error ()); }
|
||||
bool in_error () const { return (error); }
|
||||
void set_error () { error = true; }
|
||||
|
||||
unsigned int get_count () const { return count; }
|
||||
bool is_empty () const { return !count; }
|
||||
|
||||
static constexpr unsigned kSizeLimit = LIMIT;
|
||||
hb_array_t<const ELEM> sub_array (unsigned start, unsigned length) const
|
||||
{ return hb_array_t<const ELEM> (elements).sub_array (start, length); }
|
||||
|
||||
protected:
|
||||
bool error;
|
||||
unsigned int count;
|
||||
hb_vector_t<ELEM> elements;
|
||||
private:
|
||||
bool error = false;
|
||||
unsigned int count = 0;
|
||||
ELEM elements[LIMIT];
|
||||
};
|
||||
|
||||
/* argument stack */
|
||||
|
@ -508,9 +484,6 @@ struct arg_stack_t : cff_stack_t<ARG, 513>
|
|||
return true;
|
||||
}
|
||||
|
||||
hb_array_t<const ARG> get_subarray (unsigned int start) const
|
||||
{ return S::elements.sub_array (start); }
|
||||
|
||||
private:
|
||||
typedef cff_stack_t<ARG, 513> S;
|
||||
};
|
||||
|
@ -518,8 +491,8 @@ struct arg_stack_t : cff_stack_t<ARG, 513>
|
|||
/* an operator prefixed by its operands in a byte string */
|
||||
struct op_str_t
|
||||
{
|
||||
hb_ubytes_t str;
|
||||
op_code_t op;
|
||||
byte_str_t str;
|
||||
};
|
||||
|
||||
/* base of OP_SERIALIZER */
|
||||
|
@ -547,11 +520,16 @@ struct parsed_values_t
|
|||
}
|
||||
void fini () { values.fini (); }
|
||||
|
||||
void alloc (unsigned n)
|
||||
{
|
||||
values.alloc (n);
|
||||
}
|
||||
|
||||
void add_op (op_code_t op, const byte_str_ref_t& str_ref = byte_str_ref_t ())
|
||||
{
|
||||
VAL *val = values.push ();
|
||||
val->op = op;
|
||||
val->str = str_ref.str.sub_str (opStart, str_ref.offset - opStart);
|
||||
val->str = str_ref.str.sub_array (opStart, str_ref.offset - opStart);
|
||||
opStart = str_ref.offset;
|
||||
}
|
||||
|
||||
|
@ -559,14 +537,14 @@ struct parsed_values_t
|
|||
{
|
||||
VAL *val = values.push (v);
|
||||
val->op = op;
|
||||
val->str = str_ref.sub_str ( opStart, str_ref.offset - opStart);
|
||||
val->str = str_ref.sub_array ( opStart, str_ref.offset - opStart);
|
||||
opStart = str_ref.offset;
|
||||
}
|
||||
|
||||
bool has_op (op_code_t op) const
|
||||
{
|
||||
for (unsigned int i = 0; i < get_count (); i++)
|
||||
if (get_value (i).op == op) return true;
|
||||
for (const auto& v : values)
|
||||
if (v.op == op) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -581,14 +559,11 @@ struct parsed_values_t
|
|||
template <typename ARG=number_t>
|
||||
struct interp_env_t
|
||||
{
|
||||
void init (const byte_str_t &str_)
|
||||
interp_env_t () {}
|
||||
interp_env_t (const hb_ubytes_t &str_)
|
||||
{
|
||||
str_ref.reset (str_);
|
||||
argStack.init ();
|
||||
error = false;
|
||||
}
|
||||
void fini () { argStack.fini (); }
|
||||
|
||||
bool in_error () const
|
||||
{ return error || str_ref.in_error () || argStack.in_error (); }
|
||||
|
||||
|
@ -622,10 +597,10 @@ struct interp_env_t
|
|||
arg_stack_t<ARG>
|
||||
argStack;
|
||||
protected:
|
||||
bool error;
|
||||
bool error = false;
|
||||
};
|
||||
|
||||
typedef interp_env_t<> num_interp_env_t;
|
||||
using num_interp_env_t = interp_env_t<>;
|
||||
|
||||
template <typename ARG=number_t>
|
||||
struct opset_t
|
||||
|
@ -668,11 +643,8 @@ struct opset_t
|
|||
template <typename ENV>
|
||||
struct interpreter_t
|
||||
{
|
||||
~interpreter_t() { fini (); }
|
||||
|
||||
void fini () { env.fini (); }
|
||||
|
||||
ENV env;
|
||||
interpreter_t (ENV& env_) : env (env_) {}
|
||||
ENV& env;
|
||||
};
|
||||
|
||||
} /* namespace CFF */
|
||||
|
|
|
@ -79,10 +79,10 @@ struct biased_subrs_t
|
|||
unsigned int get_count () const { return subrs ? subrs->count : 0; }
|
||||
unsigned int get_bias () const { return bias; }
|
||||
|
||||
byte_str_t operator [] (unsigned int index) const
|
||||
hb_ubytes_t operator [] (unsigned int index) const
|
||||
{
|
||||
if (unlikely (!subrs || index >= subrs->count))
|
||||
return Null (byte_str_t);
|
||||
return hb_ubytes_t ();
|
||||
else
|
||||
return (*subrs)[index];
|
||||
}
|
||||
|
@ -112,10 +112,9 @@ struct point_t
|
|||
template <typename ARG, typename SUBRS>
|
||||
struct cs_interp_env_t : interp_env_t<ARG>
|
||||
{
|
||||
void init (const byte_str_t &str, const SUBRS *globalSubrs_, const SUBRS *localSubrs_)
|
||||
cs_interp_env_t (const hb_ubytes_t &str, const SUBRS *globalSubrs_, const SUBRS *localSubrs_) :
|
||||
interp_env_t<ARG> (str)
|
||||
{
|
||||
interp_env_t<ARG>::init (str);
|
||||
|
||||
context.init (str, CSType_CharString);
|
||||
seen_moveto = true;
|
||||
seen_hintmask = false;
|
||||
|
@ -123,15 +122,11 @@ struct cs_interp_env_t : interp_env_t<ARG>
|
|||
vstem_count = 0;
|
||||
hintmask_size = 0;
|
||||
pt.set_int (0, 0);
|
||||
callStack.init ();
|
||||
globalSubrs.init (globalSubrs_);
|
||||
localSubrs.init (localSubrs_);
|
||||
}
|
||||
void fini ()
|
||||
~cs_interp_env_t ()
|
||||
{
|
||||
interp_env_t<ARG>::fini ();
|
||||
|
||||
callStack.fini ();
|
||||
globalSubrs.fini ();
|
||||
localSubrs.fini ();
|
||||
}
|
||||
|
@ -880,6 +875,8 @@ struct path_procs_t
|
|||
template <typename ENV, typename OPSET, typename PARAM>
|
||||
struct cs_interpreter_t : interpreter_t<ENV>
|
||||
{
|
||||
cs_interpreter_t (ENV& env_) : interpreter_t<ENV> (env_) {}
|
||||
|
||||
bool interpret (PARAM& param)
|
||||
{
|
||||
SUPER::env.set_endchar (false);
|
||||
|
|
|
@ -179,6 +179,8 @@ struct top_dict_opset_t : dict_opset_t
|
|||
template <typename OPSET, typename PARAM, typename ENV=num_interp_env_t>
|
||||
struct dict_interpreter_t : interpreter_t<ENV>
|
||||
{
|
||||
dict_interpreter_t (ENV& env_) : interpreter_t<ENV> (env_) {}
|
||||
|
||||
bool interpret (PARAM& param)
|
||||
{
|
||||
param.init ();
|
||||
|
|
8
thirdparty/harfbuzz/src/hb-cff1-interp-cs.hh
vendored
8
thirdparty/harfbuzz/src/hb-cff1-interp-cs.hh
vendored
|
@ -38,17 +38,15 @@ typedef biased_subrs_t<CFF1Subrs> cff1_biased_subrs_t;
|
|||
struct cff1_cs_interp_env_t : cs_interp_env_t<number_t, CFF1Subrs>
|
||||
{
|
||||
template <typename ACC>
|
||||
void init (const byte_str_t &str, ACC &acc, unsigned int fd)
|
||||
cff1_cs_interp_env_t (const hb_ubytes_t &str, ACC &acc, unsigned int fd)
|
||||
: SUPER (str, acc.globalSubrs, acc.privateDicts[fd].localSubrs)
|
||||
{
|
||||
SUPER::init (str, acc.globalSubrs, acc.privateDicts[fd].localSubrs);
|
||||
processed_width = false;
|
||||
has_width = false;
|
||||
arg_start = 0;
|
||||
in_seac = false;
|
||||
}
|
||||
|
||||
void fini () { SUPER::fini (); }
|
||||
|
||||
void set_width (bool has_width_)
|
||||
{
|
||||
if (likely (!processed_width && (SUPER::argStack.get_count () > 0)))
|
||||
|
@ -154,7 +152,7 @@ struct cff1_cs_opset_t : cs_opset_t<number_t, OPSET, cff1_cs_interp_env_t, PARAM
|
|||
};
|
||||
|
||||
template <typename OPSET, typename PARAM>
|
||||
struct cff1_cs_interpreter_t : cs_interpreter_t<cff1_cs_interp_env_t, OPSET, PARAM> {};
|
||||
using cff1_cs_interpreter_t = cs_interpreter_t<cff1_cs_interp_env_t, OPSET, PARAM>;
|
||||
|
||||
} /* namespace CFF */
|
||||
|
||||
|
|
89
thirdparty/harfbuzz/src/hb-cff2-interp-cs.hh
vendored
89
thirdparty/harfbuzz/src/hb-cff2-interp-cs.hh
vendored
|
@ -64,14 +64,14 @@ struct blend_arg_t : number_t
|
|||
typedef interp_env_t<blend_arg_t> BlendInterpEnv;
|
||||
typedef biased_subrs_t<CFF2Subrs> cff2_biased_subrs_t;
|
||||
|
||||
struct cff2_cs_interp_env_t : cs_interp_env_t<blend_arg_t, CFF2Subrs>
|
||||
template <typename ELEM>
|
||||
struct cff2_cs_interp_env_t : cs_interp_env_t<ELEM, CFF2Subrs>
|
||||
{
|
||||
template <typename ACC>
|
||||
void init (const byte_str_t &str, ACC &acc, unsigned int fd,
|
||||
const int *coords_=nullptr, unsigned int num_coords_=0)
|
||||
cff2_cs_interp_env_t (const hb_ubytes_t &str, ACC &acc, unsigned int fd,
|
||||
const int *coords_=nullptr, unsigned int num_coords_=0)
|
||||
: SUPER (str, acc.globalSubrs, acc.privateDicts[fd].localSubrs)
|
||||
{
|
||||
SUPER::init (str, acc.globalSubrs, acc.privateDicts[fd].localSubrs);
|
||||
|
||||
coords = coords_;
|
||||
num_coords = num_coords_;
|
||||
varStore = acc.varStore;
|
||||
|
@ -100,18 +100,14 @@ struct cff2_cs_interp_env_t : cs_interp_env_t<blend_arg_t, CFF2Subrs>
|
|||
return OpCode_return;
|
||||
}
|
||||
|
||||
const blend_arg_t& eval_arg (unsigned int i)
|
||||
const ELEM& eval_arg (unsigned int i)
|
||||
{
|
||||
blend_arg_t &arg = argStack[i];
|
||||
blend_arg (arg);
|
||||
return arg;
|
||||
return SUPER::argStack[i];
|
||||
}
|
||||
|
||||
const blend_arg_t& pop_arg ()
|
||||
const ELEM& pop_arg ()
|
||||
{
|
||||
blend_arg_t &arg = argStack.pop ();
|
||||
blend_arg (arg);
|
||||
return arg;
|
||||
return SUPER::argStack.pop ();
|
||||
}
|
||||
|
||||
void process_blend ()
|
||||
|
@ -122,7 +118,7 @@ struct cff2_cs_interp_env_t : cs_interp_env_t<blend_arg_t, CFF2Subrs>
|
|||
if (do_blend)
|
||||
{
|
||||
if (unlikely (!scalars.resize (region_count)))
|
||||
set_error ();
|
||||
SUPER::set_error ();
|
||||
else
|
||||
varStore->varStore.get_region_scalars (get_ivs (), coords, num_coords,
|
||||
&scalars[0], region_count);
|
||||
|
@ -133,10 +129,10 @@ struct cff2_cs_interp_env_t : cs_interp_env_t<blend_arg_t, CFF2Subrs>
|
|||
|
||||
void process_vsindex ()
|
||||
{
|
||||
unsigned int index = argStack.pop_uint ();
|
||||
unsigned int index = SUPER::argStack.pop_uint ();
|
||||
if (unlikely (seen_vsindex () || seen_blend))
|
||||
{
|
||||
set_error ();
|
||||
SUPER::set_error ();
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -151,22 +147,18 @@ struct cff2_cs_interp_env_t : cs_interp_env_t<blend_arg_t, CFF2Subrs>
|
|||
void set_ivs (unsigned int ivs_) { ivs = ivs_; }
|
||||
bool seen_vsindex () const { return seen_vsindex_; }
|
||||
|
||||
protected:
|
||||
void blend_arg (blend_arg_t &arg)
|
||||
double blend_deltas (hb_array_t<const ELEM> deltas) const
|
||||
{
|
||||
if (do_blend && arg.blending ())
|
||||
double v = 0;
|
||||
if (do_blend)
|
||||
{
|
||||
if (likely (scalars.length == arg.deltas.length))
|
||||
if (likely (scalars.length == deltas.length))
|
||||
{
|
||||
double v = arg.to_real ();
|
||||
for (unsigned int i = 0; i < scalars.length; i++)
|
||||
{
|
||||
v += (double)scalars[i] * arg.deltas[i].to_real ();
|
||||
}
|
||||
arg.set_real (v);
|
||||
arg.deltas.resize (0);
|
||||
v += (double) scalars[i] * deltas[i].to_real ();
|
||||
}
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
protected:
|
||||
|
@ -180,22 +172,24 @@ struct cff2_cs_interp_env_t : cs_interp_env_t<blend_arg_t, CFF2Subrs>
|
|||
bool seen_vsindex_;
|
||||
bool seen_blend;
|
||||
|
||||
typedef cs_interp_env_t<blend_arg_t, CFF2Subrs> SUPER;
|
||||
typedef cs_interp_env_t<ELEM, CFF2Subrs> SUPER;
|
||||
};
|
||||
template <typename OPSET, typename PARAM, typename PATH=path_procs_null_t<cff2_cs_interp_env_t, PARAM>>
|
||||
struct cff2_cs_opset_t : cs_opset_t<blend_arg_t, OPSET, cff2_cs_interp_env_t, PARAM, PATH>
|
||||
template <typename OPSET, typename PARAM, typename ELEM, typename PATH=path_procs_null_t<cff2_cs_interp_env_t<ELEM>, PARAM>>
|
||||
struct cff2_cs_opset_t : cs_opset_t<ELEM, OPSET, cff2_cs_interp_env_t<ELEM>, PARAM, PATH>
|
||||
{
|
||||
static void process_op (op_code_t op, cff2_cs_interp_env_t &env, PARAM& param)
|
||||
static void process_op (op_code_t op, cff2_cs_interp_env_t<ELEM> &env, PARAM& param)
|
||||
{
|
||||
switch (op) {
|
||||
case OpCode_callsubr:
|
||||
case OpCode_callgsubr:
|
||||
/* a subroutine number shouldn't be a blended value */
|
||||
#if 0
|
||||
if (unlikely (env.argStack.peek ().blending ()))
|
||||
{
|
||||
env.set_error ();
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
SUPER::process_op (op, env, param);
|
||||
break;
|
||||
|
||||
|
@ -204,11 +198,13 @@ struct cff2_cs_opset_t : cs_opset_t<blend_arg_t, OPSET, cff2_cs_interp_env_t, PA
|
|||
break;
|
||||
|
||||
case OpCode_vsindexcs:
|
||||
#if 0
|
||||
if (unlikely (env.argStack.peek ().blending ()))
|
||||
{
|
||||
env.set_error ();
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
OPSET::process_vsindex (env, param);
|
||||
break;
|
||||
|
||||
|
@ -217,7 +213,26 @@ struct cff2_cs_opset_t : cs_opset_t<blend_arg_t, OPSET, cff2_cs_interp_env_t, PA
|
|||
}
|
||||
}
|
||||
|
||||
static void process_blend (cff2_cs_interp_env_t &env, PARAM& param)
|
||||
template <typename T = ELEM,
|
||||
hb_enable_if (hb_is_same (T, blend_arg_t))>
|
||||
static void process_arg_blend (cff2_cs_interp_env_t<ELEM> &env,
|
||||
ELEM &arg,
|
||||
const hb_array_t<const ELEM> blends,
|
||||
unsigned n, unsigned i)
|
||||
{
|
||||
arg.set_blends (n, i, blends.length, blends);
|
||||
}
|
||||
template <typename T = ELEM,
|
||||
hb_enable_if (!hb_is_same (T, blend_arg_t))>
|
||||
static void process_arg_blend (cff2_cs_interp_env_t<ELEM> &env,
|
||||
ELEM &arg,
|
||||
const hb_array_t<const ELEM> blends,
|
||||
unsigned n, unsigned i)
|
||||
{
|
||||
arg.set_real (arg.to_real () + env.blend_deltas (blends));
|
||||
}
|
||||
|
||||
static void process_blend (cff2_cs_interp_env_t<ELEM> &env, PARAM& param)
|
||||
{
|
||||
unsigned int n, k;
|
||||
|
||||
|
@ -234,26 +249,26 @@ struct cff2_cs_opset_t : cs_opset_t<blend_arg_t, OPSET, cff2_cs_interp_env_t, PA
|
|||
}
|
||||
for (unsigned int i = 0; i < n; i++)
|
||||
{
|
||||
const hb_array_t<const blend_arg_t> blends = env.argStack.get_subarray (start + n + (i * k));
|
||||
env.argStack[start + i].set_blends (n, i, k, blends);
|
||||
const hb_array_t<const ELEM> blends = env.argStack.sub_array (start + n + (i * k), k);
|
||||
process_arg_blend (env, env.argStack[start + i], blends, n, i);
|
||||
}
|
||||
|
||||
/* pop off blend values leaving default values now adorned with blend values */
|
||||
env.argStack.pop (k * n);
|
||||
}
|
||||
|
||||
static void process_vsindex (cff2_cs_interp_env_t &env, PARAM& param)
|
||||
static void process_vsindex (cff2_cs_interp_env_t<ELEM> &env, PARAM& param)
|
||||
{
|
||||
env.process_vsindex ();
|
||||
env.clear_args ();
|
||||
}
|
||||
|
||||
private:
|
||||
typedef cs_opset_t<blend_arg_t, OPSET, cff2_cs_interp_env_t, PARAM, PATH> SUPER;
|
||||
typedef cs_opset_t<ELEM, OPSET, cff2_cs_interp_env_t<ELEM>, PARAM, PATH> SUPER;
|
||||
};
|
||||
|
||||
template <typename OPSET, typename PARAM>
|
||||
struct cff2_cs_interpreter_t : cs_interpreter_t<cff2_cs_interp_env_t, OPSET, PARAM> {};
|
||||
template <typename OPSET, typename PARAM, typename ELEM>
|
||||
using cff2_cs_interpreter_t = cs_interpreter_t<cff2_cs_interp_env_t<ELEM>, OPSET, PARAM>;
|
||||
|
||||
} /* namespace CFF */
|
||||
|
||||
|
|
4
thirdparty/harfbuzz/src/hb-font.cc
vendored
4
thirdparty/harfbuzz/src/hb-font.cc
vendored
|
@ -2596,12 +2596,14 @@ hb_font_funcs_set_glyph_func (hb_font_funcs_t *ffuncs,
|
|||
return;
|
||||
}
|
||||
|
||||
/* Since we pass it to two destroying functions. */
|
||||
trampoline_reference (&trampoline->closure);
|
||||
|
||||
hb_font_funcs_set_nominal_glyph_func (ffuncs,
|
||||
hb_font_get_nominal_glyph_trampoline,
|
||||
trampoline,
|
||||
trampoline_destroy);
|
||||
|
||||
trampoline_reference (&trampoline->closure);
|
||||
hb_font_funcs_set_variation_glyph_func (ffuncs,
|
||||
hb_font_get_variation_glyph_trampoline,
|
||||
trampoline,
|
||||
|
|
4
thirdparty/harfbuzz/src/hb-ft.cc
vendored
4
thirdparty/harfbuzz/src/hb-ft.cc
vendored
|
@ -80,12 +80,12 @@
|
|||
|
||||
struct hb_ft_font_t
|
||||
{
|
||||
mutable hb_mutex_t lock;
|
||||
FT_Face ft_face;
|
||||
int load_flags;
|
||||
bool symbol; /* Whether selected cmap is symbol cmap. */
|
||||
bool unref; /* Whether to destroy ft_face when done. */
|
||||
|
||||
mutable hb_mutex_t lock;
|
||||
FT_Face ft_face;
|
||||
mutable int cached_x_scale;
|
||||
mutable hb_advance_cache_t advance_cache;
|
||||
};
|
||||
|
|
20
thirdparty/harfbuzz/src/hb-map.cc
vendored
20
thirdparty/harfbuzz/src/hb-map.cc
vendored
|
@ -289,3 +289,23 @@ hb_map_get_population (const hb_map_t *map)
|
|||
{
|
||||
return map->get_population ();
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_map_is_equal:
|
||||
* @map: A map
|
||||
* @other: Another map
|
||||
*
|
||||
* Tests whether @map and @other are equal (contain the same
|
||||
* elements).
|
||||
*
|
||||
* Return value: %true if the two maps are equal, %false otherwise.
|
||||
*
|
||||
* Since: 4.3.0
|
||||
**/
|
||||
hb_bool_t
|
||||
hb_map_is_equal (const hb_map_t *map,
|
||||
const hb_map_t *other)
|
||||
{
|
||||
return map->is_equal (*other);
|
||||
}
|
||||
|
||||
|
|
4
thirdparty/harfbuzz/src/hb-map.h
vendored
4
thirdparty/harfbuzz/src/hb-map.h
vendored
|
@ -91,6 +91,10 @@ hb_map_is_empty (const hb_map_t *map);
|
|||
HB_EXTERN unsigned int
|
||||
hb_map_get_population (const hb_map_t *map);
|
||||
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_map_is_equal (const hb_map_t *map,
|
||||
const hb_map_t *other);
|
||||
|
||||
HB_EXTERN void
|
||||
hb_map_set (hb_map_t *map,
|
||||
hb_codepoint_t key,
|
||||
|
|
41
thirdparty/harfbuzz/src/hb-map.hh
vendored
41
thirdparty/harfbuzz/src/hb-map.hh
vendored
|
@ -42,11 +42,12 @@ template <typename K, typename V,
|
|||
struct hb_hashmap_t
|
||||
{
|
||||
hb_hashmap_t () { init (); }
|
||||
hb_hashmap_t (std::nullptr_t) : hb_hashmap_t () {}
|
||||
~hb_hashmap_t () { fini (); }
|
||||
|
||||
hb_hashmap_t (const hb_hashmap_t& o) : hb_hashmap_t () { hb_copy (o, *this); }
|
||||
hb_hashmap_t (const hb_hashmap_t& o) : hb_hashmap_t () { resize (population); hb_copy (o, *this); }
|
||||
hb_hashmap_t (hb_hashmap_t&& o) : hb_hashmap_t () { hb_swap (*this, o); }
|
||||
hb_hashmap_t& operator= (const hb_hashmap_t& o) { hb_copy (o, *this); return *this; }
|
||||
hb_hashmap_t& operator= (const hb_hashmap_t& o) { resize (population); hb_copy (o, *this); return *this; }
|
||||
hb_hashmap_t& operator= (hb_hashmap_t&& o) { hb_swap (*this, o); return *this; }
|
||||
|
||||
hb_hashmap_t (std::initializer_list<hb_pair_t<K, V>> lst) : hb_hashmap_t ()
|
||||
|
@ -58,7 +59,10 @@ struct hb_hashmap_t
|
|||
hb_requires (hb_is_iterable (Iterable))>
|
||||
hb_hashmap_t (const Iterable &o) : hb_hashmap_t ()
|
||||
{
|
||||
hb_copy (o, *this);
|
||||
auto iter = hb_iter (o);
|
||||
if (iter.is_random_access_iterator)
|
||||
resize (hb_len (iter));
|
||||
hb_copy (iter, *this);
|
||||
}
|
||||
|
||||
struct item_t
|
||||
|
@ -154,11 +158,11 @@ struct hb_hashmap_t
|
|||
|
||||
bool in_error () const { return !successful; }
|
||||
|
||||
bool resize ()
|
||||
bool resize (unsigned new_population = 0)
|
||||
{
|
||||
if (unlikely (!successful)) return false;
|
||||
|
||||
unsigned int power = hb_bit_storage (population * 2 + 8);
|
||||
unsigned int power = hb_bit_storage (hb_max (population, new_population) * 2 + 8);
|
||||
unsigned int new_size = 1u << power;
|
||||
item_t *new_items = (item_t *) hb_malloc ((size_t) new_size * sizeof (item_t));
|
||||
if (unlikely (!new_items))
|
||||
|
@ -235,6 +239,27 @@ struct hb_hashmap_t
|
|||
bool is_empty () const { return population == 0; }
|
||||
explicit operator bool () const { return !is_empty (); }
|
||||
|
||||
uint32_t hash () const
|
||||
{
|
||||
uint32_t h = 0;
|
||||
for (auto pair : iter ())
|
||||
h ^= (hb_hash (pair.first) * 31) + hb_hash (pair.second);
|
||||
return h;
|
||||
}
|
||||
|
||||
bool is_equal (const hb_hashmap_t &other) const
|
||||
{
|
||||
if (population != other.population) return false;
|
||||
|
||||
for (auto pair : iter ())
|
||||
if (get (pair.first) != pair.second)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
bool operator == (const hb_hashmap_t &other) const { return is_equal (other); }
|
||||
bool operator != (const hb_hashmap_t &other) const { return !is_equal (other); }
|
||||
|
||||
unsigned int get_population () const { return population; }
|
||||
|
||||
/*
|
||||
|
@ -389,9 +414,11 @@ struct hb_map_t : hb_hashmap_t<hb_codepoint_t,
|
|||
HB_MAP_VALUE_INVALID,
|
||||
HB_MAP_VALUE_INVALID>;
|
||||
|
||||
hb_map_t () = default;
|
||||
~hb_map_t () = default;
|
||||
hb_map_t (hb_map_t&) = default;
|
||||
hb_map_t () : hashmap () {}
|
||||
hb_map_t (std::nullptr_t) : hb_map_t () {}
|
||||
hb_map_t (const hb_map_t &o) : hashmap ((hashmap &) o) {}
|
||||
hb_map_t (hb_map_t &&o) : hashmap (std::move ((hashmap &) o)) {}
|
||||
hb_map_t& operator= (const hb_map_t&) = default;
|
||||
hb_map_t& operator= (hb_map_t&&) = default;
|
||||
hb_map_t (std::initializer_list<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> lst) : hashmap (lst) {}
|
||||
|
|
13
thirdparty/harfbuzz/src/hb-meta.hh
vendored
13
thirdparty/harfbuzz/src/hb-meta.hh
vendored
|
@ -188,6 +188,19 @@ template <> struct hb_int_max<signed long long> : hb_integral_constant<signed l
|
|||
template <> struct hb_int_max<unsigned long long> : hb_integral_constant<unsigned long long, ULLONG_MAX> {};
|
||||
#define hb_int_max(T) hb_int_max<T>::value
|
||||
|
||||
#if __GNUG__ && __GNUC__ < 5
|
||||
#define hb_is_trivially_copyable(T) __has_trivial_copy(T)
|
||||
#define hb_is_trivially_copy_assignable(T) __has_trivial_assign(T)
|
||||
#define hb_is_trivially_constructible(T) __has_trivial_constructor(T)
|
||||
#define hb_is_trivially_copy_constructible(T) __has_trivial_copy_constructor(T)
|
||||
#define hb_is_trivially_destructible(T) __has_trivial_destructor(T)
|
||||
#else
|
||||
#define hb_is_trivially_copyable(T) std::is_trivially_copyable<T>::value
|
||||
#define hb_is_trivially_copy_assignable(T) std::is_trivially_copy_assignable<T>::value
|
||||
#define hb_is_trivially_constructible(T) std::is_trivially_constructible<T>::value
|
||||
#define hb_is_trivially_copy_constructible(T) std::is_trivially_copy_constructible<T>::value
|
||||
#define hb_is_trivially_destructible(T) std::is_trivially_destructible<T>::value
|
||||
#endif
|
||||
|
||||
/* Class traits. */
|
||||
|
||||
|
|
11
thirdparty/harfbuzz/src/hb-open-type.hh
vendored
11
thirdparty/harfbuzz/src/hb-open-type.hh
vendored
|
@ -33,6 +33,7 @@
|
|||
#include "hb-blob.hh"
|
||||
#include "hb-face.hh"
|
||||
#include "hb-machinery.hh"
|
||||
#include "hb-meta.hh"
|
||||
#include "hb-subset.hh"
|
||||
|
||||
|
||||
|
@ -518,7 +519,7 @@ struct UnsizedArrayOf
|
|||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
|
||||
if (!sizeof... (Ts) && std::is_trivially_copyable<Type>::value) return_trace (true);
|
||||
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
|
||||
return_trace (false);
|
||||
|
@ -707,7 +708,7 @@ struct ArrayOf
|
|||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||
if (!sizeof... (Ts) && std::is_trivially_copyable<Type>::value) return_trace (true);
|
||||
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
||||
unsigned int count = len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
|
||||
|
@ -835,7 +836,7 @@ struct HeadlessArrayOf
|
|||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||
if (!sizeof... (Ts) && std::is_trivially_copyable<Type>::value) return_trace (true);
|
||||
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
||||
unsigned int count = get_length ();
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
|
||||
|
@ -884,7 +885,7 @@ struct ArrayOfM1
|
|||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||
if (!sizeof... (Ts) && std::is_trivially_copyable<Type>::value) return_trace (true);
|
||||
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
||||
unsigned int count = lenM1 + 1;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
|
||||
|
@ -1070,7 +1071,7 @@ struct VarSizedBinSearchArrayOf
|
|||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||
if (!sizeof... (Ts) && std::is_trivially_copyable<Type>::value) return_trace (true);
|
||||
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
||||
unsigned int count = get_length ();
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!(*this)[i].sanitize (c, std::forward<Ts> (ds)...)))
|
||||
|
|
184
thirdparty/harfbuzz/src/hb-ot-cff-common.hh
vendored
184
thirdparty/harfbuzz/src/hb-ot-cff-common.hh
vendored
|
@ -46,49 +46,21 @@ template<typename Type>
|
|||
static inline const Type& StructAtOffsetOrNull (const void *P, unsigned int offset)
|
||||
{ return offset ? StructAtOffset<Type> (P, offset) : Null (Type); }
|
||||
|
||||
inline unsigned int calcOffSize (unsigned int dataSize)
|
||||
{
|
||||
unsigned int size = 1;
|
||||
unsigned int offset = dataSize + 1;
|
||||
while (offset & ~0xFF)
|
||||
{
|
||||
size++;
|
||||
offset >>= 8;
|
||||
}
|
||||
/* format does not support size > 4; caller should handle it as an error */
|
||||
return size;
|
||||
}
|
||||
|
||||
struct code_pair_t
|
||||
{
|
||||
hb_codepoint_t code;
|
||||
hb_codepoint_t glyph;
|
||||
};
|
||||
|
||||
typedef hb_vector_t<unsigned char> str_buff_t;
|
||||
struct str_buff_vec_t : hb_vector_t<str_buff_t>
|
||||
{
|
||||
unsigned int total_size () const
|
||||
{
|
||||
unsigned int size = 0;
|
||||
for (unsigned int i = 0; i < length; i++)
|
||||
size += (*this)[i].length;
|
||||
return size;
|
||||
}
|
||||
|
||||
private:
|
||||
typedef hb_vector_t<str_buff_t> SUPER;
|
||||
};
|
||||
using str_buff_t = hb_vector_t<unsigned char>;
|
||||
using str_buff_vec_t = hb_vector_t<str_buff_t>;
|
||||
|
||||
/* CFF INDEX */
|
||||
template <typename COUNT>
|
||||
struct CFFIndex
|
||||
{
|
||||
static unsigned int calculate_offset_array_size (unsigned int offSize, unsigned int count)
|
||||
{ return offSize * (count + 1); }
|
||||
|
||||
unsigned int offset_array_size () const
|
||||
{ return calculate_offset_array_size (offSize, count); }
|
||||
{ return offSize * (count + 1); }
|
||||
|
||||
CFFIndex *copy (hb_serialize_context_t *c) const
|
||||
{
|
||||
|
@ -100,55 +72,46 @@ struct CFFIndex
|
|||
return_trace (out);
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c, const CFFIndex &src)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
unsigned int size = src.get_size ();
|
||||
CFFIndex *dest = c->allocate_size<CFFIndex> (size);
|
||||
if (unlikely (!dest)) return_trace (false);
|
||||
memcpy (dest, &src, size);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
unsigned int offSize_,
|
||||
const byte_str_array_t &byteArray)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
if (byteArray.length == 0)
|
||||
{
|
||||
COUNT *dest = c->allocate_min<COUNT> ();
|
||||
if (unlikely (!dest)) return_trace (false);
|
||||
*dest = 0;
|
||||
return_trace (true);
|
||||
}
|
||||
else
|
||||
|
||||
/* serialize CFFIndex header */
|
||||
if (unlikely (!c->extend_min (this))) return_trace (false);
|
||||
this->count = byteArray.length;
|
||||
this->offSize = offSize_;
|
||||
if (unlikely (!c->allocate_size<HBUINT8> (offSize_ * (byteArray.length + 1))))
|
||||
return_trace (false);
|
||||
|
||||
/* serialize indices */
|
||||
unsigned int offset = 1;
|
||||
unsigned int i = 0;
|
||||
for (; i < byteArray.length; i++)
|
||||
{
|
||||
/* serialize CFFIndex header */
|
||||
if (unlikely (!c->extend_min (this))) return_trace (false);
|
||||
this->count = byteArray.length;
|
||||
this->offSize = offSize_;
|
||||
if (unlikely (!c->allocate_size<HBUINT8> (offSize_ * (byteArray.length + 1))))
|
||||
return_trace (false);
|
||||
|
||||
/* serialize indices */
|
||||
unsigned int offset = 1;
|
||||
unsigned int i = 0;
|
||||
for (; i < byteArray.length; i++)
|
||||
{
|
||||
set_offset_at (i, offset);
|
||||
offset += byteArray[i].get_size ();
|
||||
}
|
||||
set_offset_at (i, offset);
|
||||
|
||||
/* serialize data */
|
||||
for (unsigned int i = 0; i < byteArray.length; i++)
|
||||
{
|
||||
const byte_str_t &bs = byteArray[i];
|
||||
unsigned char *dest = c->allocate_size<unsigned char> (bs.length);
|
||||
if (unlikely (!dest)) return_trace (false);
|
||||
memcpy (dest, &bs[0], bs.length);
|
||||
}
|
||||
offset += byteArray[i].get_size ();
|
||||
}
|
||||
set_offset_at (i, offset);
|
||||
|
||||
/* serialize data */
|
||||
for (unsigned int i = 0; i < byteArray.length; i++)
|
||||
{
|
||||
const hb_ubytes_t &bs = byteArray[i];
|
||||
unsigned char *dest = c->allocate_size<unsigned char> (bs.length);
|
||||
if (unlikely (!dest)) return_trace (false);
|
||||
memcpy (dest, &bs[0], bs.length);
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
|
@ -160,7 +123,7 @@ struct CFFIndex
|
|||
byteArray.init ();
|
||||
byteArray.resize (buffArray.length);
|
||||
for (unsigned int i = 0; i < byteArray.length; i++)
|
||||
byteArray[i] = byte_str_t (buffArray[i].arrayZ, buffArray[i].length);
|
||||
byteArray[i] = hb_ubytes_t (buffArray[i].arrayZ, buffArray[i].length);
|
||||
bool result = this->serialize (c, offSize_, byteArray);
|
||||
byteArray.fini ();
|
||||
return result;
|
||||
|
@ -172,18 +135,9 @@ struct CFFIndex
|
|||
Iterator it)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (it.len () == 0)
|
||||
{
|
||||
COUNT *dest = c->allocate_min<COUNT> ();
|
||||
if (unlikely (!dest)) return_trace (false);
|
||||
*dest = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
serialize_header(c, + it | hb_map ([] (const byte_str_t &_) { return _.length; }));
|
||||
for (const auto &_ : +it)
|
||||
_.copy (c);
|
||||
}
|
||||
serialize_header(c, + it | hb_map ([] (const hb_ubytes_t &_) { return _.length; }));
|
||||
for (const auto &_ : +it)
|
||||
_.copy (c);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
|
@ -196,7 +150,7 @@ struct CFFIndex
|
|||
{
|
||||
auto it =
|
||||
+ hb_iter (buffArray)
|
||||
| hb_map ([] (const str_buff_t &_) { return byte_str_t (_.arrayZ, _.length); })
|
||||
| hb_map ([] (const str_buff_t &_) { return hb_ubytes_t (_.arrayZ, _.length); })
|
||||
;
|
||||
return serialize (c, it);
|
||||
}
|
||||
|
@ -209,13 +163,15 @@ struct CFFIndex
|
|||
TRACE_SERIALIZE (this);
|
||||
|
||||
unsigned total = + it | hb_reduce (hb_add, 0);
|
||||
unsigned off_size = calcOffSize (total);
|
||||
unsigned off_size = (hb_bit_storage (total + 1) + 7) / 8;
|
||||
|
||||
/* serialize CFFIndex header */
|
||||
if (unlikely (!c->extend_min (this))) return_trace (false);
|
||||
this->count = it.len ();
|
||||
if (!this->count) return_trace (true);
|
||||
if (unlikely (!c->extend (this->offSize))) return_trace (false);
|
||||
this->offSize = off_size;
|
||||
if (unlikely (!c->allocate_size<HBUINT8> (off_size * (it.len () + 1))))
|
||||
if (unlikely (!c->allocate_size<HBUINT8> (off_size * (this->count + 1))))
|
||||
return_trace (false);
|
||||
|
||||
/* serialize indices */
|
||||
|
@ -233,6 +189,7 @@ struct CFFIndex
|
|||
|
||||
void set_offset_at (unsigned int index, unsigned int offset)
|
||||
{
|
||||
assert (index <= count);
|
||||
HBUINT8 *p = offsets + offSize * index + offSize;
|
||||
unsigned int size = offSize;
|
||||
for (; size; size--)
|
||||
|
@ -243,11 +200,13 @@ struct CFFIndex
|
|||
}
|
||||
}
|
||||
|
||||
private:
|
||||
unsigned int offset_at (unsigned int index) const
|
||||
{
|
||||
assert (index <= count);
|
||||
const HBUINT8 *p = offsets + offSize * index;
|
||||
|
||||
unsigned int size = offSize;
|
||||
const HBUINT8 *p = offsets + size * index;
|
||||
unsigned int offset = 0;
|
||||
for (; size; size--)
|
||||
offset = (offset << 8) + *p++;
|
||||
|
@ -256,72 +215,57 @@ struct CFFIndex
|
|||
|
||||
unsigned int length_at (unsigned int index) const
|
||||
{
|
||||
if (unlikely ((offset_at (index + 1) < offset_at (index)) ||
|
||||
(offset_at (index + 1) > offset_at (count))))
|
||||
unsigned offset0 = offset_at (index);
|
||||
unsigned offset1 = offset_at (index + 1);
|
||||
if (unlikely (offset1 < offset0 || offset1 > offset_at (count)))
|
||||
return 0;
|
||||
return offset_at (index + 1) - offset_at (index);
|
||||
return offset1 - offset0;
|
||||
}
|
||||
|
||||
const unsigned char *data_base () const
|
||||
{ return (const unsigned char *) this + min_size + offset_array_size (); }
|
||||
{ return (const unsigned char *) this + min_size + offSize.static_size + offset_array_size (); }
|
||||
public:
|
||||
|
||||
unsigned int data_size () const { return HBINT8::static_size; }
|
||||
|
||||
byte_str_t operator [] (unsigned int index) const
|
||||
hb_ubytes_t operator [] (unsigned int index) const
|
||||
{
|
||||
if (unlikely (index >= count)) return Null (byte_str_t);
|
||||
return byte_str_t (data_base () + offset_at (index) - 1, length_at (index));
|
||||
if (unlikely (index >= count)) return hb_ubytes_t ();
|
||||
unsigned length = length_at (index);
|
||||
if (unlikely (!length)) return hb_ubytes_t ();
|
||||
return hb_ubytes_t (data_base () + offset_at (index) - 1, length);
|
||||
}
|
||||
|
||||
unsigned int get_size () const
|
||||
{
|
||||
if (this == &Null (CFFIndex)) return 0;
|
||||
if (count > 0)
|
||||
return min_size + offset_array_size () + (offset_at (count) - 1);
|
||||
return count.static_size; /* empty CFFIndex contains count only */
|
||||
if (count)
|
||||
return min_size + offSize.static_size + offset_array_size () + (offset_at (count) - 1);
|
||||
return min_size; /* empty CFFIndex contains count only */
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (likely ((c->check_struct (this) && count == 0) || /* empty INDEX */
|
||||
(c->check_struct (this) && offSize >= 1 && offSize <= 4 &&
|
||||
c->check_array (offsets, offSize, count + 1) &&
|
||||
c->check_array ((const HBUINT8*) data_base (), 1, max_offset () - 1))));
|
||||
}
|
||||
|
||||
protected:
|
||||
unsigned int max_offset () const
|
||||
{
|
||||
unsigned int max = 0;
|
||||
for (unsigned int i = 0; i < count + 1u; i++)
|
||||
{
|
||||
unsigned int off = offset_at (i);
|
||||
if (off > max) max = off;
|
||||
}
|
||||
return max;
|
||||
return_trace (likely (c->check_struct (this) &&
|
||||
(count == 0 || /* empty INDEX */
|
||||
(count < count + 1u &&
|
||||
c->check_struct (&offSize) && offSize >= 1 && offSize <= 4 &&
|
||||
c->check_array (offsets, offSize, count + 1u) &&
|
||||
c->check_array ((const HBUINT8*) data_base (), 1, offset_at (count) - 1)))));
|
||||
}
|
||||
|
||||
public:
|
||||
COUNT count; /* Number of object data. Note there are (count+1) offsets */
|
||||
private:
|
||||
HBUINT8 offSize; /* The byte size of each offset in the offsets array. */
|
||||
HBUINT8 offsets[HB_VAR_ARRAY];
|
||||
/* The array of (count + 1) offsets into objects array (1-base). */
|
||||
/* HBUINT8 data[HB_VAR_ARRAY]; Object data */
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (COUNT::static_size + HBUINT8::static_size, offsets);
|
||||
DEFINE_SIZE_MIN (COUNT::static_size);
|
||||
};
|
||||
|
||||
template <typename COUNT, typename TYPE>
|
||||
struct CFFIndexOf : CFFIndex<COUNT>
|
||||
{
|
||||
const byte_str_t operator [] (unsigned int index) const
|
||||
{
|
||||
if (likely (index < CFFIndex<COUNT>::count))
|
||||
return byte_str_t (CFFIndex<COUNT>::data_base () + CFFIndex<COUNT>::offset_at (index) - 1, CFFIndex<COUNT>::length_at (index));
|
||||
return Null (byte_str_t);
|
||||
}
|
||||
|
||||
template <typename DATA, typename PARAM1, typename PARAM2>
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
unsigned int offSize_,
|
||||
|
|
45
thirdparty/harfbuzz/src/hb-ot-cff1-table.cc
vendored
45
thirdparty/harfbuzz/src/hb-ot-cff1-table.cc
vendored
|
@ -311,10 +311,8 @@ struct bounds_t
|
|||
|
||||
struct cff1_extents_param_t
|
||||
{
|
||||
void init (const OT::cff1::accelerator_t *_cff)
|
||||
cff1_extents_param_t (const OT::cff1::accelerator_t *_cff) : cff (_cff)
|
||||
{
|
||||
path_open = false;
|
||||
cff = _cff;
|
||||
bounds.init ();
|
||||
}
|
||||
|
||||
|
@ -322,7 +320,7 @@ struct cff1_extents_param_t
|
|||
void end_path () { path_open = false; }
|
||||
bool is_path_open () const { return path_open; }
|
||||
|
||||
bool path_open;
|
||||
bool path_open = false;
|
||||
bounds_t bounds;
|
||||
|
||||
const OT::cff1::accelerator_t *cff;
|
||||
|
@ -395,12 +393,11 @@ bool _get_bounds (const OT::cff1::accelerator_t *cff, hb_codepoint_t glyph, boun
|
|||
if (unlikely (!cff->is_valid () || (glyph >= cff->num_glyphs))) return false;
|
||||
|
||||
unsigned int fd = cff->fdSelect->get_fd (glyph);
|
||||
cff1_cs_interpreter_t<cff1_cs_opset_extents_t, cff1_extents_param_t> interp;
|
||||
const byte_str_t str = (*cff->charStrings)[glyph];
|
||||
interp.env.init (str, *cff, fd);
|
||||
interp.env.set_in_seac (in_seac);
|
||||
cff1_extents_param_t param;
|
||||
param.init (cff);
|
||||
const hb_ubytes_t str = (*cff->charStrings)[glyph];
|
||||
cff1_cs_interp_env_t env (str, *cff, fd);
|
||||
env.set_in_seac (in_seac);
|
||||
cff1_cs_interpreter_t<cff1_cs_opset_extents_t, cff1_extents_param_t> interp (env);
|
||||
cff1_extents_param_t param (cff);
|
||||
if (unlikely (!interp.interpret (param))) return false;
|
||||
bounds = param.bounds;
|
||||
return true;
|
||||
|
@ -541,10 +538,10 @@ bool _get_path (const OT::cff1::accelerator_t *cff, hb_font_t *font, hb_codepoin
|
|||
if (unlikely (!cff->is_valid () || (glyph >= cff->num_glyphs))) return false;
|
||||
|
||||
unsigned int fd = cff->fdSelect->get_fd (glyph);
|
||||
cff1_cs_interpreter_t<cff1_cs_opset_path_t, cff1_path_param_t> interp;
|
||||
const byte_str_t str = (*cff->charStrings)[glyph];
|
||||
interp.env.init (str, *cff, fd);
|
||||
interp.env.set_in_seac (in_seac);
|
||||
const hb_ubytes_t str = (*cff->charStrings)[glyph];
|
||||
cff1_cs_interp_env_t env (str, *cff, fd);
|
||||
env.set_in_seac (in_seac);
|
||||
cff1_cs_interpreter_t<cff1_cs_opset_path_t, cff1_path_param_t> interp (env);
|
||||
cff1_path_param_t param (cff, font, draw_session, delta);
|
||||
if (unlikely (!interp.interpret (param))) return false;
|
||||
|
||||
|
@ -566,18 +563,13 @@ bool OT::cff1::accelerator_t::get_path (hb_font_t *font, hb_codepoint_t glyph, h
|
|||
|
||||
struct get_seac_param_t
|
||||
{
|
||||
void init (const OT::cff1::accelerator_t *_cff)
|
||||
{
|
||||
cff = _cff;
|
||||
base = 0;
|
||||
accent = 0;
|
||||
}
|
||||
get_seac_param_t (const OT::cff1::accelerator_t *_cff) : cff (_cff) {}
|
||||
|
||||
bool has_seac () const { return base && accent; }
|
||||
|
||||
const OT::cff1::accelerator_t *cff;
|
||||
hb_codepoint_t base;
|
||||
hb_codepoint_t accent;
|
||||
hb_codepoint_t base = 0;
|
||||
hb_codepoint_t accent = 0;
|
||||
};
|
||||
|
||||
struct cff1_cs_opset_seac_t : cff1_cs_opset_t<cff1_cs_opset_seac_t, get_seac_param_t>
|
||||
|
@ -598,11 +590,10 @@ bool OT::cff1::accelerator_t::get_seac_components (hb_codepoint_t glyph, hb_code
|
|||
if (unlikely (!is_valid () || (glyph >= num_glyphs))) return false;
|
||||
|
||||
unsigned int fd = fdSelect->get_fd (glyph);
|
||||
cff1_cs_interpreter_t<cff1_cs_opset_seac_t, get_seac_param_t> interp;
|
||||
const byte_str_t str = (*charStrings)[glyph];
|
||||
interp.env.init (str, *this, fd);
|
||||
get_seac_param_t param;
|
||||
param.init (this);
|
||||
const hb_ubytes_t str = (*charStrings)[glyph];
|
||||
cff1_cs_interp_env_t env (str, *this, fd);
|
||||
cff1_cs_interpreter_t<cff1_cs_opset_seac_t, get_seac_param_t> interp (env);
|
||||
get_seac_param_t param (this);
|
||||
if (unlikely (!interp.interpret (param))) return false;
|
||||
|
||||
if (param.has_seac ())
|
||||
|
|
165
thirdparty/harfbuzz/src/hb-ot-cff1-table.hh
vendored
165
thirdparty/harfbuzz/src/hb-ot-cff1-table.hh
vendored
|
@ -318,14 +318,21 @@ struct Charset0 {
|
|||
return_trace (c->check_struct (this) && sids[num_glyphs - 1].sanitize (c));
|
||||
}
|
||||
|
||||
hb_codepoint_t get_sid (hb_codepoint_t glyph) const
|
||||
hb_codepoint_t get_sid (hb_codepoint_t glyph, unsigned num_glyphs) const
|
||||
{
|
||||
if (unlikely (glyph >= num_glyphs)) return 0;
|
||||
if (glyph == 0)
|
||||
return 0;
|
||||
else
|
||||
return sids[glyph - 1];
|
||||
}
|
||||
|
||||
void collect_glyph_to_sid_map (hb_map_t *mapping, unsigned int num_glyphs) const
|
||||
{
|
||||
for (hb_codepoint_t gid = 1; gid < num_glyphs; gid++)
|
||||
mapping->set (gid, sids[gid - 1]);
|
||||
}
|
||||
|
||||
hb_codepoint_t get_glyph (hb_codepoint_t sid, unsigned int num_glyphs) const
|
||||
{
|
||||
if (sid == 0)
|
||||
|
@ -381,20 +388,36 @@ struct Charset1_2 {
|
|||
return_trace (true);
|
||||
}
|
||||
|
||||
hb_codepoint_t get_sid (hb_codepoint_t glyph) const
|
||||
hb_codepoint_t get_sid (hb_codepoint_t glyph, unsigned num_glyphs) const
|
||||
{
|
||||
if (unlikely (glyph >= num_glyphs)) return 0;
|
||||
if (glyph == 0) return 0;
|
||||
glyph--;
|
||||
for (unsigned int i = 0;; i++)
|
||||
{
|
||||
if (glyph <= ranges[i].nLeft)
|
||||
return (hb_codepoint_t)ranges[i].first + glyph;
|
||||
return (hb_codepoint_t) ranges[i].first + glyph;
|
||||
glyph -= (ranges[i].nLeft + 1);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
void collect_glyph_to_sid_map (hb_map_t *mapping, unsigned int num_glyphs) const
|
||||
{
|
||||
hb_codepoint_t gid = 1;
|
||||
for (unsigned i = 0;; i++)
|
||||
{
|
||||
hb_codepoint_t sid = ranges[i].first;
|
||||
unsigned count = ranges[i].nLeft + 1;
|
||||
for (unsigned j = 0; j < count; j++)
|
||||
mapping->set (gid++, sid++);
|
||||
|
||||
if (gid >= num_glyphs)
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
hb_codepoint_t get_glyph (hb_codepoint_t sid, unsigned int num_glyphs) const
|
||||
{
|
||||
if (sid == 0) return 0;
|
||||
|
@ -521,16 +544,26 @@ struct Charset
|
|||
|
||||
hb_codepoint_t get_sid (hb_codepoint_t glyph, unsigned int num_glyphs) const
|
||||
{
|
||||
if (unlikely (glyph >= num_glyphs)) return 0;
|
||||
switch (format)
|
||||
{
|
||||
case 0: return u.format0.get_sid (glyph);
|
||||
case 1: return u.format1.get_sid (glyph);
|
||||
case 2: return u.format2.get_sid (glyph);
|
||||
case 0: return u.format0.get_sid (glyph, num_glyphs);
|
||||
case 1: return u.format1.get_sid (glyph, num_glyphs);
|
||||
case 2: return u.format2.get_sid (glyph, num_glyphs);
|
||||
default:return 0;
|
||||
}
|
||||
}
|
||||
|
||||
void collect_glyph_to_sid_map (hb_map_t *mapping, unsigned int num_glyphs) const
|
||||
{
|
||||
switch (format)
|
||||
{
|
||||
case 0: u.format0.collect_glyph_to_sid_map (mapping, num_glyphs); return;
|
||||
case 1: u.format1.collect_glyph_to_sid_map (mapping, num_glyphs); return;
|
||||
case 2: u.format2.collect_glyph_to_sid_map (mapping, num_glyphs); return;
|
||||
default:return;
|
||||
}
|
||||
}
|
||||
|
||||
hb_codepoint_t get_glyph (hb_codepoint_t sid, unsigned int num_glyphs) const
|
||||
{
|
||||
switch (format)
|
||||
|
@ -602,6 +635,8 @@ struct cff1_top_dict_interp_env_t : num_interp_env_t
|
|||
{
|
||||
cff1_top_dict_interp_env_t ()
|
||||
: num_interp_env_t(), prev_offset(0), last_offset(0) {}
|
||||
cff1_top_dict_interp_env_t (const hb_ubytes_t &bytes)
|
||||
: num_interp_env_t(bytes), prev_offset(0), last_offset(0) {}
|
||||
|
||||
unsigned int prev_offset;
|
||||
unsigned int last_offset;
|
||||
|
@ -1024,11 +1059,10 @@ struct cff1
|
|||
{ fini (); return; }
|
||||
|
||||
{ /* parse top dict */
|
||||
const byte_str_t topDictStr = (*topDictIndex)[0];
|
||||
const hb_ubytes_t topDictStr = (*topDictIndex)[0];
|
||||
if (unlikely (!topDictStr.sanitize (&sc))) { fini (); return; }
|
||||
cff1_top_dict_interpreter_t top_interp;
|
||||
top_interp.env.init (topDictStr);
|
||||
topDict.init ();
|
||||
cff1_top_dict_interp_env_t env (topDictStr);
|
||||
cff1_top_dict_interpreter_t top_interp (env);
|
||||
if (unlikely (!top_interp.interpret (topDict))) { fini (); return; }
|
||||
}
|
||||
|
||||
|
@ -1098,20 +1132,20 @@ struct cff1
|
|||
{
|
||||
for (unsigned int i = 0; i < fdCount; i++)
|
||||
{
|
||||
byte_str_t fontDictStr = (*fdArray)[i];
|
||||
hb_ubytes_t fontDictStr = (*fdArray)[i];
|
||||
if (unlikely (!fontDictStr.sanitize (&sc))) { fini (); return; }
|
||||
cff1_font_dict_values_t *font;
|
||||
cff1_font_dict_interpreter_t font_interp;
|
||||
font_interp.env.init (fontDictStr);
|
||||
cff1_top_dict_interp_env_t env (fontDictStr);
|
||||
cff1_font_dict_interpreter_t font_interp (env);
|
||||
font = fontDicts.push ();
|
||||
if (unlikely (font == &Crap (cff1_font_dict_values_t))) { fini (); return; }
|
||||
font->init ();
|
||||
if (unlikely (!font_interp.interpret (*font))) { fini (); return; }
|
||||
PRIVDICTVAL *priv = &privateDicts[i];
|
||||
const byte_str_t privDictStr (StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset), font->privateDictInfo.size);
|
||||
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (!privDictStr.sanitize (&sc))) { fini (); return; }
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp;
|
||||
priv_interp.env.init (privDictStr);
|
||||
num_interp_env_t env2 (privDictStr);
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env2);
|
||||
priv->init ();
|
||||
if (unlikely (!priv_interp.interpret (*priv))) { fini (); return; }
|
||||
|
||||
|
@ -1126,10 +1160,10 @@ struct cff1
|
|||
cff1_top_dict_values_t *font = &topDict;
|
||||
PRIVDICTVAL *priv = &privateDicts[0];
|
||||
|
||||
const byte_str_t privDictStr (StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset), font->privateDictInfo.size);
|
||||
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (!privDictStr.sanitize (&sc))) { fini (); return; }
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp;
|
||||
priv_interp.env.init (privDictStr);
|
||||
num_interp_env_t env (privDictStr);
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env);
|
||||
priv->init ();
|
||||
if (unlikely (!priv_interp.interpret (*priv))) { fini (); return; }
|
||||
|
||||
|
@ -1194,6 +1228,19 @@ struct cff1
|
|||
}
|
||||
}
|
||||
|
||||
hb_map_t *create_glyph_to_sid_map () const
|
||||
{
|
||||
if (charset != &Null (Charset))
|
||||
{
|
||||
hb_map_t *mapping = hb_map_create ();
|
||||
mapping->set (0, 0);
|
||||
charset->collect_glyph_to_sid_map (mapping, num_glyphs);
|
||||
return mapping;
|
||||
}
|
||||
else
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
hb_codepoint_t glyph_to_sid (hb_codepoint_t glyph) const
|
||||
{
|
||||
if (charset != &Null (Charset))
|
||||
|
@ -1274,30 +1321,20 @@ struct cff1
|
|||
{
|
||||
SUPER::init (face);
|
||||
|
||||
glyph_names.set_relaxed (nullptr);
|
||||
|
||||
if (!is_valid ()) return;
|
||||
if (is_CID ()) return;
|
||||
|
||||
/* fill glyph_names */
|
||||
for (hb_codepoint_t gid = 0; gid < num_glyphs; gid++)
|
||||
{
|
||||
hb_codepoint_t sid = glyph_to_sid (gid);
|
||||
gname_t gname;
|
||||
gname.sid = sid;
|
||||
if (sid < cff1_std_strings_length)
|
||||
gname.name = cff1_std_strings (sid);
|
||||
else
|
||||
{
|
||||
byte_str_t ustr = (*stringIndex)[sid - cff1_std_strings_length];
|
||||
gname.name = hb_bytes_t ((const char*)ustr.arrayZ, ustr.length);
|
||||
}
|
||||
if (unlikely (!gname.name.arrayZ)) { fini (); return; }
|
||||
glyph_names.push (gname);
|
||||
}
|
||||
glyph_names.qsort ();
|
||||
}
|
||||
~accelerator_t ()
|
||||
{
|
||||
glyph_names.fini ();
|
||||
hb_sorted_vector_t<gname_t> *names = glyph_names.get_relaxed ();
|
||||
if (names)
|
||||
{
|
||||
names->fini ();
|
||||
free (names);
|
||||
}
|
||||
|
||||
SUPER::fini ();
|
||||
}
|
||||
|
@ -1305,9 +1342,9 @@ struct cff1
|
|||
bool get_glyph_name (hb_codepoint_t glyph,
|
||||
char *buf, unsigned int buf_len) const
|
||||
{
|
||||
if (!buf) return true;
|
||||
if (unlikely (!is_valid ())) return false;
|
||||
if (is_CID()) return false;
|
||||
if (unlikely (!buf_len)) return true;
|
||||
hb_codepoint_t sid = glyph_to_sid (glyph);
|
||||
const char *str;
|
||||
size_t str_len;
|
||||
|
@ -1319,7 +1356,7 @@ struct cff1
|
|||
}
|
||||
else
|
||||
{
|
||||
byte_str_t ubyte_str = (*stringIndex)[sid - cff1_std_strings_length];
|
||||
hb_ubytes_t ubyte_str = (*stringIndex)[sid - cff1_std_strings_length];
|
||||
str = (const char *)ubyte_str.arrayZ;
|
||||
str_len = ubyte_str.length;
|
||||
}
|
||||
|
@ -1333,11 +1370,53 @@ struct cff1
|
|||
bool get_glyph_from_name (const char *name, int len,
|
||||
hb_codepoint_t *glyph) const
|
||||
{
|
||||
if (unlikely (!is_valid ())) return false;
|
||||
if (is_CID()) return false;
|
||||
if (len < 0) len = strlen (name);
|
||||
if (unlikely (!len)) return false;
|
||||
|
||||
retry:
|
||||
hb_sorted_vector_t<gname_t> *names = glyph_names.get ();
|
||||
if (unlikely (!names))
|
||||
{
|
||||
names = (hb_sorted_vector_t<gname_t> *) calloc (sizeof (hb_sorted_vector_t<gname_t>), 1);
|
||||
if (likely (names))
|
||||
{
|
||||
names->init ();
|
||||
/* TODO */
|
||||
|
||||
/* fill glyph names */
|
||||
for (hb_codepoint_t gid = 0; gid < num_glyphs; gid++)
|
||||
{
|
||||
hb_codepoint_t sid = glyph_to_sid (gid);
|
||||
gname_t gname;
|
||||
gname.sid = sid;
|
||||
if (sid < cff1_std_strings_length)
|
||||
gname.name = cff1_std_strings (sid);
|
||||
else
|
||||
{
|
||||
hb_ubytes_t ustr = (*stringIndex)[sid - cff1_std_strings_length];
|
||||
gname.name = hb_bytes_t ((const char*) ustr.arrayZ, ustr.length);
|
||||
}
|
||||
if (unlikely (!gname.name.arrayZ))
|
||||
gname.name = hb_bytes_t ("", 0); /* To avoid nullptr. */
|
||||
names->push (gname);
|
||||
}
|
||||
names->qsort ();
|
||||
}
|
||||
if (unlikely (!glyph_names.cmpexch (nullptr, names)))
|
||||
{
|
||||
if (names)
|
||||
{
|
||||
names->fini ();
|
||||
free (names);
|
||||
}
|
||||
goto retry;
|
||||
}
|
||||
}
|
||||
|
||||
gname_t key = { hb_bytes_t (name, len), 0 };
|
||||
const gname_t *gname = glyph_names.bsearch (key);
|
||||
const gname_t *gname = glyph_names->bsearch (key);
|
||||
if (!gname) return false;
|
||||
hb_codepoint_t gid = sid_to_glyph (gname->sid);
|
||||
if (!gid && gname->sid) return false;
|
||||
|
@ -1359,7 +1438,7 @@ struct cff1
|
|||
{
|
||||
const gname_t *a = (const gname_t *)a_;
|
||||
const gname_t *b = (const gname_t *)b_;
|
||||
int minlen = hb_min (a->name.length, b->name.length);
|
||||
unsigned minlen = hb_min (a->name.length, b->name.length);
|
||||
int ret = strncmp (a->name.arrayZ, b->name.arrayZ, minlen);
|
||||
if (ret) return ret;
|
||||
return a->name.length - b->name.length;
|
||||
|
@ -1368,7 +1447,7 @@ struct cff1
|
|||
int cmp (const gname_t &a) const { return cmp (&a, this); }
|
||||
};
|
||||
|
||||
hb_sorted_vector_t<gname_t> glyph_names;
|
||||
mutable hb_atomic_ptr_t<hb_sorted_vector_t<gname_t>> glyph_names;
|
||||
|
||||
typedef accelerator_templ_t<cff1_private_dict_opset_t, cff1_private_dict_values_t> SUPER;
|
||||
};
|
||||
|
|
38
thirdparty/harfbuzz/src/hb-ot-cff2-table.cc
vendored
38
thirdparty/harfbuzz/src/hb-ot-cff2-table.cc
vendored
|
@ -36,9 +36,8 @@ using namespace CFF;
|
|||
|
||||
struct cff2_extents_param_t
|
||||
{
|
||||
void init ()
|
||||
cff2_extents_param_t ()
|
||||
{
|
||||
path_open = false;
|
||||
min_x.set_int (INT_MAX);
|
||||
min_y.set_int (INT_MAX);
|
||||
max_x.set_int (INT_MIN);
|
||||
|
@ -57,22 +56,22 @@ struct cff2_extents_param_t
|
|||
if (pt.y > max_y) max_y = pt.y;
|
||||
}
|
||||
|
||||
bool path_open;
|
||||
bool path_open = false;
|
||||
number_t min_x;
|
||||
number_t min_y;
|
||||
number_t max_x;
|
||||
number_t max_y;
|
||||
};
|
||||
|
||||
struct cff2_path_procs_extents_t : path_procs_t<cff2_path_procs_extents_t, cff2_cs_interp_env_t, cff2_extents_param_t>
|
||||
struct cff2_path_procs_extents_t : path_procs_t<cff2_path_procs_extents_t, cff2_cs_interp_env_t<number_t>, cff2_extents_param_t>
|
||||
{
|
||||
static void moveto (cff2_cs_interp_env_t &env, cff2_extents_param_t& param, const point_t &pt)
|
||||
static void moveto (cff2_cs_interp_env_t<number_t> &env, cff2_extents_param_t& param, const point_t &pt)
|
||||
{
|
||||
param.end_path ();
|
||||
env.moveto (pt);
|
||||
}
|
||||
|
||||
static void line (cff2_cs_interp_env_t &env, cff2_extents_param_t& param, const point_t &pt1)
|
||||
static void line (cff2_cs_interp_env_t<number_t> &env, cff2_extents_param_t& param, const point_t &pt1)
|
||||
{
|
||||
if (!param.is_path_open ())
|
||||
{
|
||||
|
@ -83,7 +82,7 @@ struct cff2_path_procs_extents_t : path_procs_t<cff2_path_procs_extents_t, cff2_
|
|||
param.update_bounds (env.get_pt ());
|
||||
}
|
||||
|
||||
static void curve (cff2_cs_interp_env_t &env, cff2_extents_param_t& param, const point_t &pt1, const point_t &pt2, const point_t &pt3)
|
||||
static void curve (cff2_cs_interp_env_t<number_t> &env, cff2_extents_param_t& param, const point_t &pt1, const point_t &pt2, const point_t &pt3)
|
||||
{
|
||||
if (!param.is_path_open ())
|
||||
{
|
||||
|
@ -98,7 +97,7 @@ struct cff2_path_procs_extents_t : path_procs_t<cff2_path_procs_extents_t, cff2_
|
|||
}
|
||||
};
|
||||
|
||||
struct cff2_cs_opset_extents_t : cff2_cs_opset_t<cff2_cs_opset_extents_t, cff2_extents_param_t, cff2_path_procs_extents_t> {};
|
||||
struct cff2_cs_opset_extents_t : cff2_cs_opset_t<cff2_cs_opset_extents_t, cff2_extents_param_t, number_t, cff2_path_procs_extents_t> {};
|
||||
|
||||
bool OT::cff2::accelerator_t::get_extents (hb_font_t *font,
|
||||
hb_codepoint_t glyph,
|
||||
|
@ -112,11 +111,10 @@ bool OT::cff2::accelerator_t::get_extents (hb_font_t *font,
|
|||
if (unlikely (!is_valid () || (glyph >= num_glyphs))) return false;
|
||||
|
||||
unsigned int fd = fdSelect->get_fd (glyph);
|
||||
cff2_cs_interpreter_t<cff2_cs_opset_extents_t, cff2_extents_param_t> interp;
|
||||
const byte_str_t str = (*charStrings)[glyph];
|
||||
interp.env.init (str, *this, fd, font->coords, font->num_coords);
|
||||
const hb_ubytes_t str = (*charStrings)[glyph];
|
||||
cff2_cs_interp_env_t<number_t> env (str, *this, fd, font->coords, font->num_coords);
|
||||
cff2_cs_interpreter_t<cff2_cs_opset_extents_t, cff2_extents_param_t, number_t> interp (env);
|
||||
cff2_extents_param_t param;
|
||||
param.init ();
|
||||
if (unlikely (!interp.interpret (param))) return false;
|
||||
|
||||
if (param.min_x >= param.max_x)
|
||||
|
@ -169,28 +167,28 @@ struct cff2_path_param_t
|
|||
hb_font_t *font;
|
||||
};
|
||||
|
||||
struct cff2_path_procs_path_t : path_procs_t<cff2_path_procs_path_t, cff2_cs_interp_env_t, cff2_path_param_t>
|
||||
struct cff2_path_procs_path_t : path_procs_t<cff2_path_procs_path_t, cff2_cs_interp_env_t<number_t>, cff2_path_param_t>
|
||||
{
|
||||
static void moveto (cff2_cs_interp_env_t &env, cff2_path_param_t& param, const point_t &pt)
|
||||
static void moveto (cff2_cs_interp_env_t<number_t> &env, cff2_path_param_t& param, const point_t &pt)
|
||||
{
|
||||
param.move_to (pt);
|
||||
env.moveto (pt);
|
||||
}
|
||||
|
||||
static void line (cff2_cs_interp_env_t &env, cff2_path_param_t& param, const point_t &pt1)
|
||||
static void line (cff2_cs_interp_env_t<number_t> &env, cff2_path_param_t& param, const point_t &pt1)
|
||||
{
|
||||
param.line_to (pt1);
|
||||
env.moveto (pt1);
|
||||
}
|
||||
|
||||
static void curve (cff2_cs_interp_env_t &env, cff2_path_param_t& param, const point_t &pt1, const point_t &pt2, const point_t &pt3)
|
||||
static void curve (cff2_cs_interp_env_t<number_t> &env, cff2_path_param_t& param, const point_t &pt1, const point_t &pt2, const point_t &pt3)
|
||||
{
|
||||
param.cubic_to (pt1, pt2, pt3);
|
||||
env.moveto (pt3);
|
||||
}
|
||||
};
|
||||
|
||||
struct cff2_cs_opset_path_t : cff2_cs_opset_t<cff2_cs_opset_path_t, cff2_path_param_t, cff2_path_procs_path_t> {};
|
||||
struct cff2_cs_opset_path_t : cff2_cs_opset_t<cff2_cs_opset_path_t, cff2_path_param_t, number_t, cff2_path_procs_path_t> {};
|
||||
|
||||
bool OT::cff2::accelerator_t::get_path (hb_font_t *font, hb_codepoint_t glyph, hb_draw_session_t &draw_session) const
|
||||
{
|
||||
|
@ -202,9 +200,9 @@ bool OT::cff2::accelerator_t::get_path (hb_font_t *font, hb_codepoint_t glyph, h
|
|||
if (unlikely (!is_valid () || (glyph >= num_glyphs))) return false;
|
||||
|
||||
unsigned int fd = fdSelect->get_fd (glyph);
|
||||
cff2_cs_interpreter_t<cff2_cs_opset_path_t, cff2_path_param_t> interp;
|
||||
const byte_str_t str = (*charStrings)[glyph];
|
||||
interp.env.init (str, *this, fd, font->coords, font->num_coords);
|
||||
const hb_ubytes_t str = (*charStrings)[glyph];
|
||||
cff2_cs_interp_env_t<number_t> env (str, *this, fd, font->coords, font->num_coords);
|
||||
cff2_cs_interpreter_t<cff2_cs_opset_path_t, cff2_path_param_t, number_t> interp (env);
|
||||
cff2_path_param_t param (font, draw_session);
|
||||
if (unlikely (!interp.interpret (param))) return false;
|
||||
return true;
|
||||
|
|
30
thirdparty/harfbuzz/src/hb-ot-cff2-table.hh
vendored
30
thirdparty/harfbuzz/src/hb-ot-cff2-table.hh
vendored
|
@ -247,12 +247,8 @@ typedef cff2_private_dict_values_base_t<num_dict_val_t> cff2_private_dict_values
|
|||
|
||||
struct cff2_priv_dict_interp_env_t : num_interp_env_t
|
||||
{
|
||||
void init (const byte_str_t &str)
|
||||
{
|
||||
num_interp_env_t::init (str);
|
||||
ivs = 0;
|
||||
seen_vsindex = false;
|
||||
}
|
||||
cff2_priv_dict_interp_env_t (const hb_ubytes_t &str) :
|
||||
num_interp_env_t (str) {}
|
||||
|
||||
void process_vsindex ()
|
||||
{
|
||||
|
@ -267,8 +263,8 @@ struct cff2_priv_dict_interp_env_t : num_interp_env_t
|
|||
void set_ivs (unsigned int ivs_) { ivs = ivs_; }
|
||||
|
||||
protected:
|
||||
unsigned int ivs;
|
||||
bool seen_vsindex;
|
||||
unsigned int ivs = 0;
|
||||
bool seen_vsindex = false;
|
||||
};
|
||||
|
||||
struct cff2_private_dict_opset_t : dict_opset_t
|
||||
|
@ -415,10 +411,10 @@ struct cff2
|
|||
goto fail;
|
||||
|
||||
{ /* parse top dict */
|
||||
byte_str_t topDictStr (cff2 + cff2->topDict, cff2->topDictSize);
|
||||
hb_ubytes_t topDictStr = (cff2 + cff2->topDict).as_ubytes (cff2->topDictSize);
|
||||
if (unlikely (!topDictStr.sanitize (&sc))) goto fail;
|
||||
cff2_top_dict_interpreter_t top_interp;
|
||||
top_interp.env.init (topDictStr);
|
||||
num_interp_env_t env (topDictStr);
|
||||
cff2_top_dict_interpreter_t top_interp (env);
|
||||
topDict.init ();
|
||||
if (unlikely (!top_interp.interpret (topDict))) goto fail;
|
||||
}
|
||||
|
@ -447,20 +443,20 @@ struct cff2
|
|||
/* parse font dicts and gather private dicts */
|
||||
for (unsigned int i = 0; i < fdCount; i++)
|
||||
{
|
||||
const byte_str_t fontDictStr = (*fdArray)[i];
|
||||
const hb_ubytes_t fontDictStr = (*fdArray)[i];
|
||||
if (unlikely (!fontDictStr.sanitize (&sc))) goto fail;
|
||||
cff2_font_dict_values_t *font;
|
||||
cff2_font_dict_interpreter_t font_interp;
|
||||
font_interp.env.init (fontDictStr);
|
||||
num_interp_env_t env (fontDictStr);
|
||||
cff2_font_dict_interpreter_t font_interp (env);
|
||||
font = fontDicts.push ();
|
||||
if (unlikely (font == &Crap (cff2_font_dict_values_t))) goto fail;
|
||||
font->init ();
|
||||
if (unlikely (!font_interp.interpret (*font))) goto fail;
|
||||
|
||||
const byte_str_t privDictStr (StructAtOffsetOrNull<UnsizedByteStr> (cff2, font->privateDictInfo.offset), font->privateDictInfo.size);
|
||||
const hb_ubytes_t privDictStr = StructAtOffsetOrNull<UnsizedByteStr> (cff2, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL, cff2_priv_dict_interp_env_t> priv_interp;
|
||||
priv_interp.env.init(privDictStr);
|
||||
cff2_priv_dict_interp_env_t env2 (privDictStr);
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL, cff2_priv_dict_interp_env_t> priv_interp (env2);
|
||||
privateDicts[i].init ();
|
||||
if (unlikely (!priv_interp.interpret (privateDicts[i]))) goto fail;
|
||||
|
||||
|
|
145
thirdparty/harfbuzz/src/hb-ot-cmap-table.hh
vendored
145
thirdparty/harfbuzz/src/hb-ot-cmap-table.hh
vendored
|
@ -44,7 +44,7 @@ struct CmapSubtableFormat0
|
|||
bool get_glyph (hb_codepoint_t codepoint, hb_codepoint_t *glyph) const
|
||||
{
|
||||
hb_codepoint_t gid = codepoint < 256 ? glyphIdArray[codepoint] : 0;
|
||||
if (!gid)
|
||||
if (unlikely (!gid))
|
||||
return false;
|
||||
*glyph = gid;
|
||||
return true;
|
||||
|
@ -109,22 +109,26 @@ struct CmapSubtableFormat4
|
|||
|
||||
while (it) {
|
||||
// Start a new range
|
||||
start_cp = (*it).first;
|
||||
prev_run_start_cp = (*it).first;
|
||||
run_start_cp = (*it).first;
|
||||
end_cp = (*it).first;
|
||||
last_gid = (*it).second;
|
||||
run_length = 1;
|
||||
prev_delta = 0;
|
||||
{
|
||||
const auto& pair = *it;
|
||||
start_cp = pair.first;
|
||||
prev_run_start_cp = start_cp;
|
||||
run_start_cp = start_cp;
|
||||
end_cp = start_cp;
|
||||
last_gid = pair.second;
|
||||
run_length = 1;
|
||||
prev_delta = 0;
|
||||
}
|
||||
|
||||
delta = (*it).second - (*it).first;
|
||||
delta = last_gid - start_cp;
|
||||
mode = FIRST_SUB_RANGE;
|
||||
it++;
|
||||
|
||||
while (it) {
|
||||
// Process range
|
||||
hb_codepoint_t next_cp = (*it).first;
|
||||
hb_codepoint_t next_gid = (*it).second;
|
||||
const auto& pair = *it;
|
||||
hb_codepoint_t next_cp = pair.first;
|
||||
hb_codepoint_t next_gid = pair.second;
|
||||
if (next_cp != end_cp + 1) {
|
||||
// Current range is over, stop processing.
|
||||
break;
|
||||
|
@ -282,23 +286,22 @@ struct CmapSubtableFormat4
|
|||
}
|
||||
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
HBUINT16* serialize_rangeoffset_glyid (hb_serialize_context_t *c,
|
||||
Iterator it,
|
||||
Iterator it,
|
||||
HBUINT16 *endCode,
|
||||
HBUINT16 *startCode,
|
||||
HBINT16 *idDelta,
|
||||
unsigned segcount)
|
||||
{
|
||||
hb_hashmap_t<hb_codepoint_t, hb_codepoint_t> cp_to_gid;
|
||||
+ it | hb_sink (cp_to_gid);
|
||||
hb_map_t cp_to_gid { it };
|
||||
|
||||
HBUINT16 *idRangeOffset = c->allocate_size<HBUINT16> (HBUINT16::static_size * segcount);
|
||||
if (unlikely (!c->check_success (idRangeOffset))) return nullptr;
|
||||
if (unlikely ((char *)idRangeOffset - (char *)idDelta != (int) segcount * (int) HBINT16::static_size)) return nullptr;
|
||||
|
||||
for (unsigned i : + hb_range (segcount)
|
||||
| hb_filter ([&] (const unsigned _) { return idDelta[_] == 0; }))
|
||||
| hb_filter ([&] (const unsigned _) { return idDelta[_] == 0; }))
|
||||
{
|
||||
idRangeOffset[i] = 2 * (c->start_embed<HBUINT16> () - idRangeOffset - i);
|
||||
for (hb_codepoint_t cp = startCode[i]; cp <= endCode[i]; cp++)
|
||||
|
@ -323,22 +326,31 @@ struct CmapSubtableFormat4
|
|||
{ return _.first <= 0xFFFF; })
|
||||
;
|
||||
|
||||
if (format4_iter.len () == 0) return;
|
||||
if (!format4_iter) return;
|
||||
|
||||
unsigned table_initpos = c->length ();
|
||||
if (unlikely (!c->extend_min (this))) return;
|
||||
this->format = 4;
|
||||
|
||||
hb_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> cp_to_gid {
|
||||
format4_iter
|
||||
};
|
||||
|
||||
//serialize endCode[], startCode[], idDelta[]
|
||||
HBUINT16* endCode = c->start_embed<HBUINT16> ();
|
||||
unsigned segcount = serialize_find_segcount (format4_iter);
|
||||
if (unlikely (!serialize_start_end_delta_arrays (c, format4_iter, segcount)))
|
||||
unsigned segcount = serialize_find_segcount (cp_to_gid.iter());
|
||||
if (unlikely (!serialize_start_end_delta_arrays (c, cp_to_gid.iter(), segcount)))
|
||||
return;
|
||||
|
||||
HBUINT16 *startCode = endCode + segcount + 1;
|
||||
HBINT16 *idDelta = ((HBINT16*)startCode) + segcount;
|
||||
|
||||
HBUINT16 *idRangeOffset = serialize_rangeoffset_glyid (c, format4_iter, endCode, startCode, idDelta, segcount);
|
||||
HBUINT16 *idRangeOffset = serialize_rangeoffset_glyid (c,
|
||||
cp_to_gid.iter (),
|
||||
endCode,
|
||||
startCode,
|
||||
idDelta,
|
||||
segcount);
|
||||
if (unlikely (!c->check_success (idRangeOffset))) return;
|
||||
|
||||
this->length = c->length () - table_initpos;
|
||||
|
@ -401,7 +413,7 @@ struct CmapSubtableFormat4
|
|||
2,
|
||||
_hb_cmp_method<hb_codepoint_t, CustomRange, unsigned>,
|
||||
this->segCount + 1);
|
||||
if (!found)
|
||||
if (unlikely (!found))
|
||||
return false;
|
||||
unsigned int i = found - endCount;
|
||||
|
||||
|
@ -421,7 +433,7 @@ struct CmapSubtableFormat4
|
|||
gid += this->idDelta[i];
|
||||
}
|
||||
gid &= 0xFFFFu;
|
||||
if (!gid)
|
||||
if (unlikely (!gid))
|
||||
return false;
|
||||
*glyph = gid;
|
||||
return true;
|
||||
|
@ -440,14 +452,14 @@ struct CmapSubtableFormat4
|
|||
hb_codepoint_t start = this->startCount[i];
|
||||
hb_codepoint_t end = this->endCount[i];
|
||||
unsigned int rangeOffset = this->idRangeOffset[i];
|
||||
out->add_range(start, end);
|
||||
if (rangeOffset == 0)
|
||||
{
|
||||
for (hb_codepoint_t codepoint = start; codepoint <= end; codepoint++)
|
||||
{
|
||||
hb_codepoint_t gid = (codepoint + this->idDelta[i]) & 0xFFFFu;
|
||||
if (unlikely (!gid))
|
||||
continue;
|
||||
out->add (codepoint);
|
||||
out->del(codepoint);
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@ -456,11 +468,13 @@ struct CmapSubtableFormat4
|
|||
{
|
||||
unsigned int index = rangeOffset / 2 + (codepoint - this->startCount[i]) + i - this->segCount;
|
||||
if (unlikely (index >= this->glyphIdArrayLength))
|
||||
{
|
||||
out->del_range (codepoint, end);
|
||||
break;
|
||||
}
|
||||
hb_codepoint_t gid = this->glyphIdArray[index];
|
||||
if (unlikely (!gid))
|
||||
continue;
|
||||
out->add (codepoint);
|
||||
out->del(codepoint);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -469,6 +483,8 @@ struct CmapSubtableFormat4
|
|||
void collect_mapping (hb_set_t *unicodes, /* OUT */
|
||||
hb_map_t *mapping /* OUT */) const
|
||||
{
|
||||
// TODO(grieger): optimize similar to collect_unicodes
|
||||
// (ie. use add_range())
|
||||
unsigned count = this->segCount;
|
||||
if (count && this->startCount[count - 1] == 0xFFFFu)
|
||||
count--; /* Skip sentinel segment. */
|
||||
|
@ -620,7 +636,7 @@ struct CmapSubtableTrimmed
|
|||
{
|
||||
/* Rely on our implicit array bound-checking. */
|
||||
hb_codepoint_t gid = glyphIdArray[codepoint - startCharCode];
|
||||
if (!gid)
|
||||
if (unlikely (!gid))
|
||||
return false;
|
||||
*glyph = gid;
|
||||
return true;
|
||||
|
@ -674,7 +690,7 @@ struct CmapSubtableTrimmed
|
|||
};
|
||||
|
||||
struct CmapSubtableFormat6 : CmapSubtableTrimmed<HBUINT16> {};
|
||||
struct CmapSubtableFormat10 : CmapSubtableTrimmed<HBUINT32 > {};
|
||||
struct CmapSubtableFormat10 : CmapSubtableTrimmed<HBUINT32> {};
|
||||
|
||||
template <typename T>
|
||||
struct CmapSubtableLongSegmented
|
||||
|
@ -684,7 +700,7 @@ struct CmapSubtableLongSegmented
|
|||
bool get_glyph (hb_codepoint_t codepoint, hb_codepoint_t *glyph) const
|
||||
{
|
||||
hb_codepoint_t gid = T::group_get_glyph (groups.bsearch (codepoint), codepoint);
|
||||
if (!gid)
|
||||
if (unlikely (!gid))
|
||||
return false;
|
||||
*glyph = gid;
|
||||
return true;
|
||||
|
@ -722,11 +738,19 @@ struct CmapSubtableLongSegmented
|
|||
hb_map_t *mapping, /* OUT */
|
||||
unsigned num_glyphs) const
|
||||
{
|
||||
hb_codepoint_t last_end = 0;
|
||||
for (unsigned i = 0; i < this->groups.len; i++)
|
||||
{
|
||||
hb_codepoint_t start = this->groups[i].startCharCode;
|
||||
hb_codepoint_t end = hb_min ((hb_codepoint_t) this->groups[i].endCharCode,
|
||||
(hb_codepoint_t) HB_UNICODE_MAX);
|
||||
if (unlikely (start > end || start < last_end)) {
|
||||
// Range is not in order and is invalid, skip it.
|
||||
continue;
|
||||
}
|
||||
last_end = end;
|
||||
|
||||
|
||||
hb_codepoint_t gid = this->groups[i].glyphID;
|
||||
if (!gid)
|
||||
{
|
||||
|
@ -778,16 +802,16 @@ struct CmapSubtableFormat12 : CmapSubtableLongSegmented<CmapSubtableFormat12>
|
|||
void serialize (hb_serialize_context_t *c,
|
||||
Iterator it)
|
||||
{
|
||||
if (it.len () == 0) return;
|
||||
if (!it) return;
|
||||
unsigned table_initpos = c->length ();
|
||||
if (unlikely (!c->extend_min (this))) return;
|
||||
|
||||
hb_codepoint_t startCharCode = 0xFFFF, endCharCode = 0xFFFF;
|
||||
hb_codepoint_t startCharCode = (hb_codepoint_t) -1, endCharCode = (hb_codepoint_t) -1;
|
||||
hb_codepoint_t glyphID = 0;
|
||||
|
||||
for (const auto& _ : +it)
|
||||
{
|
||||
if (startCharCode == 0xFFFF)
|
||||
if (startCharCode == (hb_codepoint_t) -1)
|
||||
{
|
||||
startCharCode = _.first;
|
||||
endCharCode = _.first;
|
||||
|
@ -818,7 +842,7 @@ struct CmapSubtableFormat12 : CmapSubtableLongSegmented<CmapSubtableFormat12>
|
|||
this->format = 12;
|
||||
this->reserved = 0;
|
||||
this->length = c->length () - table_initpos;
|
||||
this->groups.len = (this->length - min_size)/CmapSubtableLongGroup::static_size;
|
||||
this->groups.len = (this->length - min_size) / CmapSubtableLongGroup::static_size;
|
||||
}
|
||||
|
||||
static size_t get_sub_table_size (const hb_sorted_vector_t<CmapSubtableLongGroup> &groups_data)
|
||||
|
@ -1448,6 +1472,37 @@ struct EncodingRecord
|
|||
DEFINE_SIZE_STATIC (8);
|
||||
};
|
||||
|
||||
struct SubtableUnicodesCache {
|
||||
|
||||
private:
|
||||
const void* base;
|
||||
hb_hashmap_t<intptr_t, hb_set_t*> cached_unicodes;
|
||||
|
||||
public:
|
||||
SubtableUnicodesCache(const void* cmap_base)
|
||||
: base(cmap_base), cached_unicodes() {}
|
||||
~SubtableUnicodesCache()
|
||||
{
|
||||
for (hb_set_t* s : cached_unicodes.values()) {
|
||||
hb_set_destroy (s);
|
||||
}
|
||||
}
|
||||
|
||||
hb_set_t* set_for(const EncodingRecord* record)
|
||||
{
|
||||
if (!cached_unicodes.has ((intptr_t) record)) {
|
||||
hb_set_t* new_set = hb_set_create ();
|
||||
if (!cached_unicodes.set ((intptr_t) record, new_set)) {
|
||||
hb_set_destroy (new_set);
|
||||
return hb_set_get_empty ();
|
||||
}
|
||||
(base+record->subtable).collect_unicodes (cached_unicodes.get ((intptr_t) record));
|
||||
}
|
||||
return cached_unicodes.get ((intptr_t) record);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
struct cmap
|
||||
{
|
||||
static constexpr hb_tag_t tableTag = HB_OT_TAG_cmap;
|
||||
|
@ -1467,6 +1522,7 @@ struct cmap
|
|||
unsigned format4objidx = 0, format12objidx = 0, format14objidx = 0;
|
||||
auto snap = c->snapshot ();
|
||||
|
||||
SubtableUnicodesCache unicodes_cache (base);
|
||||
for (const EncodingRecord& _ : encodingrec_iter)
|
||||
{
|
||||
if (c->in_error ())
|
||||
|
@ -1475,12 +1531,11 @@ struct cmap
|
|||
unsigned format = (base+_.subtable).u.format;
|
||||
if (format != 4 && format != 12 && format != 14) continue;
|
||||
|
||||
hb_set_t unicodes_set;
|
||||
(base+_.subtable).collect_unicodes (&unicodes_set);
|
||||
hb_set_t* unicodes_set = unicodes_cache.set_for (&_);
|
||||
|
||||
if (!drop_format_4 && format == 4)
|
||||
{
|
||||
c->copy (_, + it | hb_filter (unicodes_set, hb_first), 4u, base, plan, &format4objidx);
|
||||
c->copy (_, + it | hb_filter (*unicodes_set, hb_first), 4u, base, plan, &format4objidx);
|
||||
if (c->in_error () && c->only_overflow ())
|
||||
{
|
||||
// cmap4 overflowed, reset and retry serialization without format 4 subtables.
|
||||
|
@ -1495,8 +1550,8 @@ struct cmap
|
|||
|
||||
else if (format == 12)
|
||||
{
|
||||
if (_can_drop (_, unicodes_set, base, + it | hb_map (hb_first), encodingrec_iter)) continue;
|
||||
c->copy (_, + it | hb_filter (unicodes_set, hb_first), 12u, base, plan, &format12objidx);
|
||||
if (_can_drop (_, *unicodes_set, base, unicodes_cache, + it | hb_map (hb_first), encodingrec_iter)) continue;
|
||||
c->copy (_, + it | hb_filter (*unicodes_set, hb_first), 12u, base, plan, &format12objidx);
|
||||
}
|
||||
else if (format == 14) c->copy (_, it, 14u, base, plan, &format14objidx);
|
||||
}
|
||||
|
@ -1514,6 +1569,7 @@ struct cmap
|
|||
bool _can_drop (const EncodingRecord& cmap12,
|
||||
const hb_set_t& cmap12_unicodes,
|
||||
const void* base,
|
||||
SubtableUnicodesCache& unicodes_cache,
|
||||
Iterator subset_unicodes,
|
||||
EncodingRecordIterator encoding_records)
|
||||
{
|
||||
|
@ -1544,11 +1600,10 @@ struct cmap
|
|||
|| (base+_.subtable).get_language() != target_language)
|
||||
continue;
|
||||
|
||||
hb_set_t sibling_unicodes;
|
||||
(base+_.subtable).collect_unicodes (&sibling_unicodes);
|
||||
hb_set_t* sibling_unicodes = unicodes_cache.set_for (&_);
|
||||
|
||||
auto cmap12 = + subset_unicodes | hb_filter (cmap12_unicodes);
|
||||
auto sibling = + subset_unicodes | hb_filter (sibling_unicodes);
|
||||
auto sibling = + subset_unicodes | hb_filter (*sibling_unicodes);
|
||||
for (; cmap12 && sibling; cmap12++, sibling++)
|
||||
{
|
||||
unsigned a = *cmap12;
|
||||
|
@ -1616,13 +1671,7 @@ struct cmap
|
|||
if (unlikely (has_format12 && (!unicode_ucs4 && !ms_ucs4))) return_trace (false);
|
||||
|
||||
auto it =
|
||||
+ hb_iter (c->plan->unicodes)
|
||||
| hb_map ([&] (hb_codepoint_t _)
|
||||
{
|
||||
hb_codepoint_t new_gid = HB_MAP_VALUE_INVALID;
|
||||
c->plan->new_gid_for_codepoint (_, &new_gid);
|
||||
return hb_pair_t<hb_codepoint_t, hb_codepoint_t> (_, new_gid);
|
||||
})
|
||||
+ c->plan->unicode_to_new_gid_list.iter ()
|
||||
| hb_filter ([&] (const hb_pair_t<hb_codepoint_t, hb_codepoint_t> _)
|
||||
{ return (_.second != HB_MAP_VALUE_INVALID); })
|
||||
;
|
||||
|
|
|
@ -197,30 +197,38 @@ struct CPAL
|
|||
|
||||
public:
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
const hb_array_t<const BGRAColor> &color_records,
|
||||
const hb_array_t<const HBUINT16> &color_record_indices,
|
||||
const hb_map_t &color_record_index_map,
|
||||
const hb_set_t &retained_color_record_indices) const
|
||||
const hb_array_t<const BGRAColor> &color_records,
|
||||
const hb_vector_t<unsigned>& first_color_index_for_layer,
|
||||
const hb_map_t& first_color_to_layer_index,
|
||||
const hb_set_t &retained_color_indices) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
// TODO(grieger): limit total final size.
|
||||
|
||||
for (const auto idx : color_record_indices)
|
||||
{
|
||||
hb_codepoint_t layer_index = first_color_to_layer_index[idx];
|
||||
|
||||
HBUINT16 new_idx;
|
||||
if (idx == 0) new_idx = 0;
|
||||
else new_idx = color_record_index_map.get (idx);
|
||||
new_idx = layer_index * retained_color_indices.get_population ();
|
||||
if (!c->copy<HBUINT16> (new_idx)) return_trace (false);
|
||||
}
|
||||
|
||||
c->push ();
|
||||
for (const auto _ : retained_color_record_indices.iter ())
|
||||
for (unsigned first_color_index : first_color_index_for_layer)
|
||||
{
|
||||
if (!c->copy<BGRAColor> (color_records[_]))
|
||||
for (hb_codepoint_t color_index : retained_color_indices)
|
||||
{
|
||||
c->pop_discard ();
|
||||
return_trace (false);
|
||||
if (!c->copy<BGRAColor> (color_records[first_color_index + color_index]))
|
||||
{
|
||||
c->pop_discard ();
|
||||
return_trace (false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
c->add_link (colorRecordsZ, c->pop_pack ());
|
||||
return_trace (true);
|
||||
}
|
||||
|
@ -228,6 +236,8 @@ struct CPAL
|
|||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
if (!numPalettes) return_trace (false);
|
||||
|
||||
const hb_map_t *color_index_map = c->plan->colr_palettes;
|
||||
if (color_index_map->is_empty ()) return_trace (false);
|
||||
|
||||
|
@ -242,30 +252,34 @@ struct CPAL
|
|||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
|
||||
out->version = version;
|
||||
out->numColors = retained_color_indices.get_population ();
|
||||
out->numPalettes = numPalettes;
|
||||
|
||||
const hb_array_t<const HBUINT16> colorRecordIndices = colorRecordIndicesZ.as_array (numPalettes);
|
||||
hb_map_t color_record_index_map;
|
||||
hb_set_t retained_color_record_indices;
|
||||
hb_vector_t<unsigned> first_color_index_for_layer;
|
||||
hb_map_t first_color_to_layer_index;
|
||||
|
||||
unsigned record_count = 0;
|
||||
const hb_array_t<const HBUINT16> colorRecordIndices = colorRecordIndicesZ.as_array (numPalettes);
|
||||
for (const auto first_color_record_idx : colorRecordIndices)
|
||||
{
|
||||
for (unsigned retained_color_idx : retained_color_indices.iter ())
|
||||
{
|
||||
unsigned color_record_idx = first_color_record_idx + retained_color_idx;
|
||||
if (color_record_index_map.has (color_record_idx)) continue;
|
||||
color_record_index_map.set (color_record_idx, record_count);
|
||||
retained_color_record_indices.add (color_record_idx);
|
||||
record_count++;
|
||||
}
|
||||
if (first_color_to_layer_index.has (first_color_record_idx)) continue;
|
||||
|
||||
first_color_index_for_layer.push (first_color_record_idx);
|
||||
first_color_to_layer_index.set (first_color_record_idx,
|
||||
first_color_index_for_layer.length - 1);
|
||||
}
|
||||
|
||||
out->numColorRecords = record_count;
|
||||
out->numColorRecords = first_color_index_for_layer.length
|
||||
* retained_color_indices.get_population ();
|
||||
|
||||
const hb_array_t<const BGRAColor> color_records = (this+colorRecordsZ).as_array (numColorRecords);
|
||||
if (!out->serialize (c->serializer, color_records, colorRecordIndices, color_record_index_map, retained_color_record_indices))
|
||||
if (!out->serialize (c->serializer,
|
||||
colorRecordIndices,
|
||||
color_records,
|
||||
first_color_index_for_layer,
|
||||
first_color_to_layer_index,
|
||||
retained_color_indices))
|
||||
return_trace (false);
|
||||
|
||||
if (version == 1)
|
||||
|
|
2
thirdparty/harfbuzz/src/hb-ot-glyf-table.hh
vendored
2
thirdparty/harfbuzz/src/hb-ot-glyf-table.hh
vendored
|
@ -953,6 +953,8 @@ struct glyf
|
|||
glyf_table.destroy ();
|
||||
}
|
||||
|
||||
bool has_data () const { return num_glyphs; }
|
||||
|
||||
protected:
|
||||
template<typename T>
|
||||
bool get_points (hb_font_t *font, hb_codepoint_t gid, T consumer) const
|
||||
|
|
266
thirdparty/harfbuzz/src/hb-ot-layout-common.hh
vendored
266
thirdparty/harfbuzz/src/hb-ot-layout-common.hh
vendored
|
@ -91,12 +91,12 @@ template<typename Iterator>
|
|||
static inline void ClassDef_serialize (hb_serialize_context_t *c,
|
||||
Iterator it);
|
||||
|
||||
static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
|
||||
const hb_map_t &gid_klass_map,
|
||||
hb_sorted_vector_t<HBGlyphID16> &glyphs,
|
||||
const hb_set_t &klasses,
|
||||
bool use_class_zero,
|
||||
hb_map_t *klass_map /*INOUT*/);
|
||||
static void ClassDef_remap_and_serialize (
|
||||
hb_serialize_context_t *c,
|
||||
const hb_set_t &klasses,
|
||||
bool use_class_zero,
|
||||
hb_sorted_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> &glyph_and_klass, /* IN/OUT */
|
||||
hb_map_t *klass_map /*IN/OUT*/);
|
||||
|
||||
|
||||
struct hb_prune_langsys_context_t
|
||||
|
@ -1470,7 +1470,8 @@ struct CoverageFormat1
|
|||
void next () { i++; }
|
||||
hb_codepoint_t get_glyph () const { return c->glyphArray[i]; }
|
||||
bool operator != (const iter_t& o) const
|
||||
{ return i != o.i || c != o.c; }
|
||||
{ return i != o.i; }
|
||||
iter_t __end__ () const { iter_t it; it.init (*c); it.i = c->glyphArray.len; return it; }
|
||||
|
||||
private:
|
||||
const struct CoverageFormat1 *c;
|
||||
|
@ -1506,12 +1507,6 @@ struct CoverageFormat2
|
|||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (this))) return_trace (false);
|
||||
|
||||
if (unlikely (!glyphs))
|
||||
{
|
||||
rangeRecord.len = 0;
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
/* TODO(iter) Write more efficiently? */
|
||||
|
||||
unsigned num_ranges = 0;
|
||||
|
@ -1524,6 +1519,7 @@ struct CoverageFormat2
|
|||
}
|
||||
|
||||
if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false);
|
||||
if (!num_ranges) return_trace (true);
|
||||
|
||||
unsigned count = 0;
|
||||
unsigned range = (unsigned) -1;
|
||||
|
@ -1552,25 +1548,26 @@ struct CoverageFormat2
|
|||
|
||||
bool intersects (const hb_set_t *glyphs) const
|
||||
{
|
||||
/* TODO Speed up, using hb_set_next() and bsearch()? */
|
||||
/* TODO(iter) Rewrite as dagger. */
|
||||
for (const auto& range : rangeRecord.as_array ())
|
||||
if (range.intersects (glyphs))
|
||||
return true;
|
||||
return false;
|
||||
return hb_any (+ hb_iter (rangeRecord.as_array ())
|
||||
| hb_map ([glyphs] (const RangeRecord &range) { return range.intersects (glyphs); }));
|
||||
}
|
||||
bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
|
||||
{
|
||||
/* TODO(iter) Rewrite as dagger. */
|
||||
for (const auto& range : rangeRecord.as_array ())
|
||||
auto cmp = [] (const void *pk, const void *pr) -> int
|
||||
{
|
||||
if (range.value <= index &&
|
||||
index < (unsigned int) range.value + (range.last - range.first) &&
|
||||
range.intersects (glyphs))
|
||||
return true;
|
||||
else if (index < range.value)
|
||||
return false;
|
||||
}
|
||||
unsigned index = * (const unsigned *) pk;
|
||||
const RangeRecord &range = * (const RangeRecord *) pr;
|
||||
if (index < range.value) return -1;
|
||||
if (index > (unsigned int) range.value + (range.last - range.first)) return +1;
|
||||
return 0;
|
||||
};
|
||||
|
||||
auto arr = rangeRecord.as_array ();
|
||||
unsigned idx;
|
||||
if (hb_bsearch_impl (&idx, index,
|
||||
arr.arrayZ, arr.length, sizeof (arr[0]),
|
||||
(int (*)(const void *_key, const void *_item)) cmp))
|
||||
return arr.arrayZ[idx].intersects (glyphs);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1579,8 +1576,10 @@ struct CoverageFormat2
|
|||
for (const auto& range : rangeRecord.as_array ())
|
||||
{
|
||||
if (!range.intersects (glyphs)) continue;
|
||||
for (hb_codepoint_t g = range.first; g <= range.last; g++)
|
||||
if (glyphs->has (g)) intersect_glyphs->add (g);
|
||||
unsigned last = range.last;
|
||||
for (hb_codepoint_t g = range.first - 1;
|
||||
glyphs->next (&g) && g <= last;)
|
||||
intersect_glyphs->add (g);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1632,6 +1631,8 @@ struct CoverageFormat2
|
|||
return;
|
||||
}
|
||||
}
|
||||
else
|
||||
j = 0;
|
||||
return;
|
||||
}
|
||||
coverage++;
|
||||
|
@ -1639,7 +1640,15 @@ struct CoverageFormat2
|
|||
}
|
||||
hb_codepoint_t get_glyph () const { return j; }
|
||||
bool operator != (const iter_t& o) const
|
||||
{ return i != o.i || j != o.j || c != o.c; }
|
||||
{ return i != o.i || j != o.j; }
|
||||
iter_t __end__ () const
|
||||
{
|
||||
iter_t it;
|
||||
it.init (*c);
|
||||
it.i = c->rangeRecord.len;
|
||||
it.j = 0;
|
||||
return it;
|
||||
}
|
||||
|
||||
private:
|
||||
const struct CoverageFormat2 *c;
|
||||
|
@ -1708,18 +1717,17 @@ struct Coverage
|
|||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
auto it =
|
||||
+ iter ()
|
||||
| hb_filter (glyphset)
|
||||
| hb_map_retains_sorting (glyph_map)
|
||||
| hb_filter (c->plan->glyph_map_gsub)
|
||||
| hb_map_retains_sorting (c->plan->glyph_map_gsub)
|
||||
;
|
||||
|
||||
bool ret = bool (it);
|
||||
Coverage_serialize (c->serializer, it);
|
||||
return_trace (ret);
|
||||
// Cache the iterator result as it will be iterated multiple times
|
||||
// by the serialize code below.
|
||||
hb_sorted_vector_t<hb_codepoint_t> glyphs (it);
|
||||
Coverage_serialize (c->serializer, glyphs.iter ());
|
||||
return_trace (bool (glyphs));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -1822,7 +1830,7 @@ struct Coverage
|
|||
}
|
||||
bool operator != (const iter_t& o) const
|
||||
{
|
||||
if (format != o.format) return true;
|
||||
if (unlikely (format != o.format)) return true;
|
||||
switch (format)
|
||||
{
|
||||
case 1: return u.format1 != o.u.format1;
|
||||
|
@ -1830,6 +1838,18 @@ struct Coverage
|
|||
default:return false;
|
||||
}
|
||||
}
|
||||
iter_t __end__ () const
|
||||
{
|
||||
iter_t it = {};
|
||||
it.format = format;
|
||||
switch (format)
|
||||
{
|
||||
case 1: it.u.format1 = u.format1.__end__ (); break;
|
||||
case 2: it.u.format2 = u.format2.__end__ (); break;
|
||||
default: break;
|
||||
}
|
||||
return it;
|
||||
}
|
||||
|
||||
private:
|
||||
unsigned int format;
|
||||
|
@ -1857,16 +1877,14 @@ Coverage_serialize (hb_serialize_context_t *c,
|
|||
{ c->start_embed<Coverage> ()->serialize (c, it); }
|
||||
|
||||
static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
|
||||
const hb_map_t &gid_klass_map,
|
||||
hb_sorted_vector_t<HBGlyphID16> &glyphs,
|
||||
const hb_set_t &klasses,
|
||||
bool use_class_zero,
|
||||
hb_map_t *klass_map /*INOUT*/)
|
||||
hb_sorted_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> &glyph_and_klass, /* IN/OUT */
|
||||
hb_map_t *klass_map /*IN/OUT*/)
|
||||
{
|
||||
if (!klass_map)
|
||||
{
|
||||
ClassDef_serialize (c, hb_zip (glyphs.iter (), + glyphs.iter ()
|
||||
| hb_map (gid_klass_map)));
|
||||
ClassDef_serialize (c, glyph_and_klass.iter ());
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1883,17 +1901,15 @@ static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
|
|||
idx++;
|
||||
}
|
||||
|
||||
auto it =
|
||||
+ glyphs.iter ()
|
||||
| hb_map_retains_sorting ([&] (const HBGlyphID16& gid) -> hb_pair_t<hb_codepoint_t, unsigned>
|
||||
{
|
||||
unsigned new_klass = klass_map->get (gid_klass_map[gid]);
|
||||
return hb_pair ((hb_codepoint_t)gid, new_klass);
|
||||
})
|
||||
;
|
||||
|
||||
c->propagate_error (glyphs, klasses);
|
||||
ClassDef_serialize (c, it);
|
||||
for (unsigned i = 0; i < glyph_and_klass.length; i++)
|
||||
{
|
||||
hb_codepoint_t klass = glyph_and_klass[i].second;
|
||||
glyph_and_klass[i].second = klass_map->get (klass);
|
||||
}
|
||||
|
||||
c->propagate_error (glyph_and_klass, klasses);
|
||||
ClassDef_serialize (c, glyph_and_klass.iter ());
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -1949,36 +1965,37 @@ struct ClassDefFormat1
|
|||
const Coverage* glyph_filter = nullptr) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map_gsub;
|
||||
|
||||
hb_sorted_vector_t<HBGlyphID16> glyphs;
|
||||
hb_sorted_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> glyph_and_klass;
|
||||
hb_set_t orig_klasses;
|
||||
hb_map_t gid_org_klass_map;
|
||||
|
||||
hb_codepoint_t start = startGlyph;
|
||||
hb_codepoint_t end = start + classValue.len;
|
||||
|
||||
for (const hb_codepoint_t gid : + hb_range (start, end)
|
||||
| hb_filter (glyphset))
|
||||
for (const hb_codepoint_t gid : + hb_range (start, end))
|
||||
{
|
||||
hb_codepoint_t new_gid = glyph_map[gid];
|
||||
if (new_gid == HB_MAP_VALUE_INVALID) continue;
|
||||
if (glyph_filter && !glyph_filter->has(gid)) continue;
|
||||
|
||||
unsigned klass = classValue[gid - start];
|
||||
if (!klass) continue;
|
||||
|
||||
glyphs.push (glyph_map[gid]);
|
||||
gid_org_klass_map.set (glyph_map[gid], klass);
|
||||
glyph_and_klass.push (hb_pair (new_gid, klass));
|
||||
orig_klasses.add (klass);
|
||||
}
|
||||
|
||||
unsigned glyph_count = glyph_filter
|
||||
? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
|
||||
: glyphset.get_population ();
|
||||
use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
|
||||
ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
|
||||
glyphs, orig_klasses, use_class_zero, klass_map);
|
||||
return_trace (keep_empty_table || (bool) glyphs);
|
||||
? hb_len (hb_iter (glyph_map.keys()) | hb_filter (glyph_filter))
|
||||
: glyph_map.get_population ();
|
||||
use_class_zero = use_class_zero && glyph_count <= glyph_and_klass.length;
|
||||
ClassDef_remap_and_serialize (c->serializer,
|
||||
orig_klasses,
|
||||
use_class_zero,
|
||||
glyph_and_klass,
|
||||
klass_map);
|
||||
return_trace (keep_empty_table || (bool) glyph_and_klass);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -2044,10 +2061,9 @@ struct ClassDefFormat1
|
|||
}
|
||||
/* TODO Speed up, using set overlap first? */
|
||||
/* TODO(iter) Rewrite as dagger. */
|
||||
HBUINT16 k {klass};
|
||||
const HBUINT16 *arr = classValue.arrayZ;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (arr[i] == k && glyphs->has (startGlyph + i))
|
||||
if (arr[i] == klass && glyphs->has (startGlyph + i))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
@ -2057,17 +2073,32 @@ struct ClassDefFormat1
|
|||
unsigned count = classValue.len;
|
||||
if (klass == 0)
|
||||
{
|
||||
hb_codepoint_t endGlyph = startGlyph + count -1;
|
||||
for (hb_codepoint_t g : glyphs->iter ())
|
||||
if (g < startGlyph || g > endGlyph)
|
||||
intersect_glyphs->add (g);
|
||||
unsigned start_glyph = startGlyph;
|
||||
for (unsigned g = HB_SET_VALUE_INVALID;
|
||||
hb_set_next (glyphs, &g) && g < start_glyph;)
|
||||
intersect_glyphs->add (g);
|
||||
|
||||
for (unsigned g = startGlyph + count - 1;
|
||||
hb_set_next (glyphs, &g);)
|
||||
intersect_glyphs->add (g);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
if (classValue[i] == klass && glyphs->has (startGlyph + i))
|
||||
intersect_glyphs->add (startGlyph + i);
|
||||
intersect_glyphs->add (startGlyph + i);
|
||||
|
||||
#if 0
|
||||
/* The following implementation is faster asymptotically, but slower
|
||||
* in practice. */
|
||||
unsigned start_glyph = startGlyph;
|
||||
unsigned end_glyph = start_glyph + count;
|
||||
for (unsigned g = startGlyph - 1;
|
||||
hb_set_next (glyphs, &g) && g < end_glyph;)
|
||||
if (classValue.arrayZ[g - start_glyph] == klass)
|
||||
intersect_glyphs->add (g);
|
||||
#endif
|
||||
}
|
||||
|
||||
void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
|
||||
|
@ -2167,12 +2198,10 @@ struct ClassDefFormat2
|
|||
const Coverage* glyph_filter = nullptr) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map_gsub;
|
||||
|
||||
hb_sorted_vector_t<HBGlyphID16> glyphs;
|
||||
hb_sorted_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> glyph_and_klass;
|
||||
hb_set_t orig_klasses;
|
||||
hb_map_t gid_org_klass_map;
|
||||
|
||||
unsigned count = rangeRecord.len;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
|
@ -2183,21 +2212,26 @@ struct ClassDefFormat2
|
|||
hb_codepoint_t end = rangeRecord[i].last + 1;
|
||||
for (hb_codepoint_t g = start; g < end; g++)
|
||||
{
|
||||
if (!glyphset.has (g)) continue;
|
||||
hb_codepoint_t new_gid = glyph_map[g];
|
||||
if (new_gid == HB_MAP_VALUE_INVALID) continue;
|
||||
if (glyph_filter && !glyph_filter->has (g)) continue;
|
||||
glyphs.push (glyph_map[g]);
|
||||
gid_org_klass_map.set (glyph_map[g], klass);
|
||||
|
||||
glyph_and_klass.push (hb_pair (new_gid, klass));
|
||||
orig_klasses.add (klass);
|
||||
}
|
||||
}
|
||||
|
||||
const hb_set_t& glyphset = *c->plan->glyphset_gsub ();
|
||||
unsigned glyph_count = glyph_filter
|
||||
? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
|
||||
: glyphset.get_population ();
|
||||
use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
|
||||
ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
|
||||
glyphs, orig_klasses, use_class_zero, klass_map);
|
||||
return_trace (keep_empty_table || (bool) glyphs);
|
||||
: glyph_map.get_population ();
|
||||
use_class_zero = use_class_zero && glyph_count <= glyph_and_klass.length;
|
||||
ClassDef_remap_and_serialize (c->serializer,
|
||||
orig_klasses,
|
||||
use_class_zero,
|
||||
glyph_and_klass,
|
||||
klass_map);
|
||||
return_trace (keep_empty_table || (bool) glyph_and_klass);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -2263,10 +2297,9 @@ struct ClassDefFormat2
|
|||
}
|
||||
/* TODO Speed up, using set overlap first? */
|
||||
/* TODO(iter) Rewrite as dagger. */
|
||||
HBUINT16 k {klass};
|
||||
const RangeRecord *arr = rangeRecord.arrayZ;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (arr[i].value == k && arr[i].intersects (glyphs))
|
||||
if (arr[i].value == klass && arr[i].intersects (glyphs))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
@ -2279,43 +2312,44 @@ struct ClassDefFormat2
|
|||
hb_codepoint_t g = HB_SET_VALUE_INVALID;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
if (!hb_set_next (glyphs, &g))
|
||||
break;
|
||||
while (g != HB_SET_VALUE_INVALID && g < rangeRecord[i].first)
|
||||
{
|
||||
intersect_glyphs->add (g);
|
||||
hb_set_next (glyphs, &g);
|
||||
if (!hb_set_next (glyphs, &g))
|
||||
goto done;
|
||||
while (g < rangeRecord[i].first)
|
||||
{
|
||||
intersect_glyphs->add (g);
|
||||
if (!hb_set_next (glyphs, &g))
|
||||
goto done;
|
||||
}
|
||||
g = rangeRecord[i].last;
|
||||
}
|
||||
while (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
|
||||
intersect_glyphs->add (g);
|
||||
while (hb_set_next (glyphs, &g))
|
||||
intersect_glyphs->add (g);
|
||||
done:
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
hb_codepoint_t g = HB_SET_VALUE_INVALID;
|
||||
#if 0
|
||||
/* The following implementation is faster asymptotically, but slower
|
||||
* in practice. */
|
||||
if ((count >> 3) > glyphs->get_population ())
|
||||
{
|
||||
for (hb_codepoint_t g = HB_SET_VALUE_INVALID;
|
||||
hb_set_next (glyphs, &g);)
|
||||
if (rangeRecord.as_array ().bfind (g))
|
||||
intersect_glyphs->add (g);
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
if (rangeRecord[i].value != klass) continue;
|
||||
|
||||
if (g != HB_SET_VALUE_INVALID)
|
||||
{
|
||||
if (g >= rangeRecord[i].first &&
|
||||
g <= rangeRecord[i].last)
|
||||
intersect_glyphs->add (g);
|
||||
if (g > rangeRecord[i].last)
|
||||
continue;
|
||||
}
|
||||
|
||||
g = rangeRecord[i].first - 1;
|
||||
while (hb_set_next (glyphs, &g))
|
||||
{
|
||||
if (g >= rangeRecord[i].first && g <= rangeRecord[i].last)
|
||||
intersect_glyphs->add (g);
|
||||
else if (g > rangeRecord[i].last)
|
||||
break;
|
||||
}
|
||||
unsigned end = rangeRecord[i].last + 1;
|
||||
for (hb_codepoint_t g = rangeRecord[i].first - 1;
|
||||
hb_set_next (glyphs, &g) && g < end;)
|
||||
intersect_glyphs->add (g);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1328,7 +1328,7 @@ static void context_closure_recurse_lookups (hb_closure_context_t *c,
|
|||
bool has_pos_glyphs = false;
|
||||
hb_set_t pos_glyphs;
|
||||
|
||||
if (hb_set_is_empty (covered_seq_indicies) || !hb_set_has (covered_seq_indicies, seqIndex))
|
||||
if (!hb_set_has (covered_seq_indicies, seqIndex))
|
||||
{
|
||||
has_pos_glyphs = true;
|
||||
if (seqIndex == 0)
|
||||
|
@ -1361,7 +1361,7 @@ static void context_closure_recurse_lookups (hb_closure_context_t *c,
|
|||
|
||||
covered_seq_indicies->add (seqIndex);
|
||||
if (has_pos_glyphs) {
|
||||
c->push_cur_active_glyphs () = pos_glyphs;
|
||||
c->push_cur_active_glyphs () = std::move (pos_glyphs);
|
||||
} else {
|
||||
c->push_cur_active_glyphs ().set (*c->glyphs);
|
||||
}
|
||||
|
|
|
@ -606,7 +606,7 @@ static const uint8_t use_table[] = {
|
|||
/* 10A00 */ B, VBlw, VBlw, VBlw, WJ, VAbv, VBlw, WJ, WJ, WJ, WJ, WJ, VPst, VMBlw, VMBlw, VMAbv,
|
||||
/* 10A10 */ B, B, B, B, WJ, B, B, B, WJ, B, B, B, B, B, B, B,
|
||||
/* 10A20 */ B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B,
|
||||
/* 10A30 */ B, B, B, B, B, B, WJ, WJ, CMAbv, CMBlw, CMBlw, WJ, WJ, WJ, WJ, IS,
|
||||
/* 10A30 */ B, B, B, B, B, B, WJ, WJ, CMBlw, CMBlw, CMBlw, WJ, WJ, WJ, WJ, IS,
|
||||
/* 10A40 */ B, B, B, B, B, B, B, B, B, WJ, WJ, WJ, WJ, WJ, WJ, WJ,
|
||||
|
||||
#define use_offset_0x10ac0u 4304
|
||||
|
|
3559
thirdparty/harfbuzz/src/hb-ot-tag-table.hh
vendored
3559
thirdparty/harfbuzz/src/hb-ot-tag-table.hh
vendored
File diff suppressed because it is too large
Load diff
107
thirdparty/harfbuzz/src/hb-ot-tag.cc
vendored
107
thirdparty/harfbuzz/src/hb-ot-tag.cc
vendored
|
@ -189,48 +189,46 @@ hb_ot_tag_to_script (hb_tag_t tag)
|
|||
|
||||
/* hb_language_t */
|
||||
|
||||
static bool
|
||||
static inline bool
|
||||
subtag_matches (const char *lang_str,
|
||||
const char *limit,
|
||||
const char *subtag)
|
||||
const char *subtag,
|
||||
unsigned subtag_len)
|
||||
{
|
||||
if (likely ((unsigned) (limit - lang_str) < subtag_len))
|
||||
return false;
|
||||
|
||||
do {
|
||||
const char *s = strstr (lang_str, subtag);
|
||||
if (!s || s >= limit)
|
||||
return false;
|
||||
if (!ISALNUM (s[strlen (subtag)]))
|
||||
if (!ISALNUM (s[subtag_len]))
|
||||
return true;
|
||||
lang_str = s + strlen (subtag);
|
||||
lang_str = s + subtag_len;
|
||||
} while (true);
|
||||
}
|
||||
|
||||
static hb_bool_t
|
||||
lang_matches (const char *lang_str, const char *spec)
|
||||
static bool
|
||||
lang_matches (const char *lang_str,
|
||||
const char *limit,
|
||||
const char *spec,
|
||||
unsigned spec_len)
|
||||
{
|
||||
unsigned int len = strlen (spec);
|
||||
if (likely ((unsigned) (limit - lang_str) < spec_len))
|
||||
return false;
|
||||
|
||||
return strncmp (lang_str, spec, len) == 0 &&
|
||||
(lang_str[len] == '\0' || lang_str[len] == '-');
|
||||
return strncmp (lang_str, spec, spec_len) == 0 &&
|
||||
(lang_str[spec_len] == '\0' || lang_str[spec_len] == '-');
|
||||
}
|
||||
|
||||
struct LangTag
|
||||
{
|
||||
char language[4];
|
||||
hb_tag_t language;
|
||||
hb_tag_t tag;
|
||||
|
||||
int cmp (const char *a) const
|
||||
int cmp (hb_tag_t a) const
|
||||
{
|
||||
const char *b = this->language;
|
||||
unsigned int da, db;
|
||||
const char *p;
|
||||
|
||||
p = strchr (a, '-');
|
||||
da = p ? (unsigned int) (p - a) : strlen (a);
|
||||
|
||||
p = strchr (b, '-');
|
||||
db = p ? (unsigned int) (p - b) : strlen (b);
|
||||
|
||||
return strncmp (a, b, hb_max (da, db));
|
||||
return a < this->language ? -1 : a > this->language ? +1 : 0;
|
||||
}
|
||||
int cmp (const LangTag *that) const
|
||||
{ return cmp (that->language); }
|
||||
|
@ -266,7 +264,6 @@ hb_ot_tags_from_language (const char *lang_str,
|
|||
hb_tag_t *tags)
|
||||
{
|
||||
const char *s;
|
||||
unsigned int tag_idx;
|
||||
|
||||
/* Check for matches of multiple subtags. */
|
||||
if (hb_ot_tags_from_complex_language (lang_str, limit, count, tags))
|
||||
|
@ -283,17 +280,39 @@ hb_ot_tags_from_language (const char *lang_str,
|
|||
ISALPHA (s[1]))
|
||||
lang_str = s + 1;
|
||||
}
|
||||
if (hb_sorted_array (ot_languages).bfind (lang_str, &tag_idx))
|
||||
const LangTag *ot_languages = nullptr;
|
||||
unsigned ot_languages_len = 0;
|
||||
const char *dash = strchr (lang_str, '-');
|
||||
unsigned first_len = dash ? dash - lang_str : limit - lang_str;
|
||||
if (first_len == 2)
|
||||
{
|
||||
ot_languages = ot_languages2;
|
||||
ot_languages_len = ARRAY_LENGTH (ot_languages2);
|
||||
}
|
||||
else if (first_len == 3)
|
||||
{
|
||||
ot_languages = ot_languages3;
|
||||
ot_languages_len = ARRAY_LENGTH (ot_languages3);
|
||||
}
|
||||
|
||||
hb_tag_t lang_tag = hb_tag_from_string (lang_str, first_len);
|
||||
|
||||
static unsigned last_tag_idx; /* Poor man's cache. */
|
||||
unsigned tag_idx = last_tag_idx;
|
||||
|
||||
if (likely (tag_idx < ot_languages_len && ot_languages[tag_idx].language == lang_tag) ||
|
||||
hb_sorted_array (ot_languages, ot_languages_len).bfind (lang_tag, &tag_idx))
|
||||
{
|
||||
last_tag_idx = tag_idx;
|
||||
unsigned int i;
|
||||
while (tag_idx != 0 &&
|
||||
0 == strcmp (ot_languages[tag_idx].language, ot_languages[tag_idx - 1].language))
|
||||
ot_languages[tag_idx].language == ot_languages[tag_idx - 1].language)
|
||||
tag_idx--;
|
||||
for (i = 0;
|
||||
i < *count &&
|
||||
tag_idx + i < ARRAY_LENGTH (ot_languages) &&
|
||||
tag_idx + i < ot_languages_len &&
|
||||
ot_languages[tag_idx + i].tag != HB_TAG_NONE &&
|
||||
0 == strcmp (ot_languages[tag_idx + i].language, ot_languages[tag_idx].language);
|
||||
ot_languages[tag_idx + i].language == ot_languages[tag_idx].language;
|
||||
i++)
|
||||
tags[i] = ot_languages[tag_idx + i].tag;
|
||||
*count = i;
|
||||
|
@ -459,9 +478,19 @@ hb_ot_tag_to_language (hb_tag_t tag)
|
|||
return disambiguated_tag;
|
||||
}
|
||||
|
||||
for (i = 0; i < ARRAY_LENGTH (ot_languages); i++)
|
||||
if (ot_languages[i].tag == tag)
|
||||
return hb_language_from_string (ot_languages[i].language, -1);
|
||||
char buf[4];
|
||||
for (i = 0; i < ARRAY_LENGTH (ot_languages2); i++)
|
||||
if (ot_languages2[i].tag == tag)
|
||||
{
|
||||
hb_tag_to_string (ot_languages2[i].language, buf);
|
||||
return hb_language_from_string (buf, 2);
|
||||
}
|
||||
for (i = 0; i < ARRAY_LENGTH (ot_languages3); i++)
|
||||
if (ot_languages3[i].tag == tag)
|
||||
{
|
||||
hb_tag_to_string (ot_languages3[i].language, buf);
|
||||
return hb_language_from_string (buf, 3);
|
||||
}
|
||||
|
||||
/* Return a custom language in the form of "x-hbot-AABBCCDD".
|
||||
* If it's three letters long, also guess it's ISO 639-3 and lower-case and
|
||||
|
@ -557,13 +586,23 @@ hb_ot_tags_to_script_and_language (hb_tag_t script_tag,
|
|||
static inline void
|
||||
test_langs_sorted ()
|
||||
{
|
||||
for (unsigned int i = 1; i < ARRAY_LENGTH (ot_languages); i++)
|
||||
for (unsigned int i = 1; i < ARRAY_LENGTH (ot_languages2); i++)
|
||||
{
|
||||
int c = ot_languages[i].cmp (&ot_languages[i - 1]);
|
||||
int c = ot_languages2[i].cmp (&ot_languages2[i - 1]);
|
||||
if (c > 0)
|
||||
{
|
||||
fprintf (stderr, "ot_languages not sorted at index %d: %s %d %s\n",
|
||||
i, ot_languages[i-1].language, c, ot_languages[i].language);
|
||||
fprintf (stderr, "ot_languages2 not sorted at index %d: %08x %d %08x\n",
|
||||
i, ot_languages2[i-1].language, c, ot_languages2[i].language);
|
||||
abort();
|
||||
}
|
||||
}
|
||||
for (unsigned int i = 1; i < ARRAY_LENGTH (ot_languages3); i++)
|
||||
{
|
||||
int c = ot_languages3[i].cmp (&ot_languages3[i - 1]);
|
||||
if (c > 0)
|
||||
{
|
||||
fprintf (stderr, "ot_languages3 not sorted at index %d: %08x %d %08x\n",
|
||||
i, ot_languages3[i-1].language, c, ot_languages3[i].language);
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
|
16
thirdparty/harfbuzz/src/hb-ot-var-gvar-table.hh
vendored
16
thirdparty/harfbuzz/src/hb-ot-var-gvar-table.hh
vendored
|
@ -390,13 +390,10 @@ struct gvar
|
|||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) && (version.major == 1) &&
|
||||
(glyphCount == c->get_num_glyphs ()) &&
|
||||
sharedTuples.sanitize (c, this, axisCount * sharedTupleCount) &&
|
||||
(is_long_offset () ?
|
||||
c->check_array (get_long_offset_array (), glyphCount+1) :
|
||||
c->check_array (get_short_offset_array (), glyphCount+1)) &&
|
||||
c->check_array (((const HBUINT8*)&(this+dataZ)) + get_offset (0),
|
||||
get_offset (glyphCount) - get_offset (0)));
|
||||
c->check_array (get_short_offset_array (), glyphCount+1)));
|
||||
}
|
||||
|
||||
/* GlyphVariationData not sanitized here; must be checked while accessing each glyph variation data */
|
||||
|
@ -482,7 +479,9 @@ struct gvar
|
|||
const hb_bytes_t get_glyph_var_data_bytes (hb_blob_t *blob, hb_codepoint_t glyph) const
|
||||
{
|
||||
unsigned start_offset = get_offset (glyph);
|
||||
unsigned length = get_offset (glyph+1) - start_offset;
|
||||
unsigned end_offset = get_offset (glyph+1);
|
||||
if (unlikely (end_offset < start_offset)) return hb_bytes_t ();
|
||||
unsigned length = end_offset - start_offset;
|
||||
hb_bytes_t var_data = blob->as_bytes ().sub_array (((unsigned) dataZ) + start_offset, length);
|
||||
return likely (var_data.length >= GlyphVariationData::min_size) ? var_data : hb_bytes_t ();
|
||||
}
|
||||
|
@ -490,7 +489,10 @@ struct gvar
|
|||
bool is_long_offset () const { return flags & 1; }
|
||||
|
||||
unsigned get_offset (unsigned i) const
|
||||
{ return is_long_offset () ? get_long_offset_array ()[i] : get_short_offset_array ()[i] * 2; }
|
||||
{
|
||||
if (unlikely (i > glyphCount)) return 0;
|
||||
return is_long_offset () ? get_long_offset_array ()[i] : get_short_offset_array ()[i] * 2;
|
||||
}
|
||||
|
||||
const HBUINT32 * get_long_offset_array () const { return (const HBUINT32 *) &offsetZ; }
|
||||
const HBUINT16 *get_short_offset_array () const { return (const HBUINT16 *) &offsetZ; }
|
||||
|
@ -696,7 +698,7 @@ no_more_gaps:
|
|||
offsetZ; /* Offsets from the start of the GlyphVariationData array
|
||||
* to each GlyphVariationData table. */
|
||||
public:
|
||||
DEFINE_SIZE_MIN (20);
|
||||
DEFINE_SIZE_ARRAY (20, offsetZ);
|
||||
};
|
||||
|
||||
struct gvar_accelerator_t : gvar::accelerator_t {
|
||||
|
|
32
thirdparty/harfbuzz/src/hb-priority-queue.hh
vendored
32
thirdparty/harfbuzz/src/hb-priority-queue.hh
vendored
|
@ -38,18 +38,11 @@
|
|||
*/
|
||||
struct hb_priority_queue_t
|
||||
{
|
||||
HB_DELETE_COPY_ASSIGN (hb_priority_queue_t);
|
||||
hb_priority_queue_t () { init (); }
|
||||
~hb_priority_queue_t () { fini (); }
|
||||
|
||||
private:
|
||||
typedef hb_pair_t<int64_t, unsigned> item_t;
|
||||
hb_vector_t<item_t> heap;
|
||||
|
||||
public:
|
||||
void init () { heap.init (); }
|
||||
|
||||
void fini () { heap.fini (); }
|
||||
|
||||
void reset () { heap.resize (0); }
|
||||
|
||||
|
@ -58,14 +51,17 @@ struct hb_priority_queue_t
|
|||
void insert (int64_t priority, unsigned value)
|
||||
{
|
||||
heap.push (item_t (priority, value));
|
||||
if (unlikely (heap.in_error ())) return;
|
||||
bubble_up (heap.length - 1);
|
||||
}
|
||||
|
||||
item_t pop_minimum ()
|
||||
{
|
||||
item_t result = heap[0];
|
||||
assert (!is_empty ());
|
||||
|
||||
heap[0] = heap[heap.length - 1];
|
||||
item_t result = heap.arrayZ[0];
|
||||
|
||||
heap.arrayZ[0] = heap.arrayZ[heap.length - 1];
|
||||
heap.shrink (heap.length - 1);
|
||||
bubble_down (0);
|
||||
|
||||
|
@ -104,6 +100,8 @@ struct hb_priority_queue_t
|
|||
|
||||
void bubble_down (unsigned index)
|
||||
{
|
||||
assert (index <= heap.length);
|
||||
|
||||
unsigned left = left_child (index);
|
||||
unsigned right = right_child (index);
|
||||
|
||||
|
@ -113,11 +111,11 @@ struct hb_priority_queue_t
|
|||
return;
|
||||
|
||||
bool has_right = right < heap.length;
|
||||
if (heap[index].first <= heap[left].first
|
||||
&& (!has_right || heap[index].first <= heap[right].first))
|
||||
if (heap.arrayZ[index].first <= heap.arrayZ[left].first
|
||||
&& (!has_right || heap[index].first <= heap.arrayZ[right].first))
|
||||
return;
|
||||
|
||||
if (!has_right || heap[left].first < heap[right].first)
|
||||
if (!has_right || heap.arrayZ[left].first < heap.arrayZ[right].first)
|
||||
{
|
||||
swap (index, left);
|
||||
bubble_down (left);
|
||||
|
@ -130,10 +128,12 @@ struct hb_priority_queue_t
|
|||
|
||||
void bubble_up (unsigned index)
|
||||
{
|
||||
assert (index <= heap.length);
|
||||
|
||||
if (index == 0) return;
|
||||
|
||||
unsigned parent_index = parent (index);
|
||||
if (heap[parent_index].first <= heap[index].first)
|
||||
if (heap.arrayZ[parent_index].first <= heap.arrayZ[index].first)
|
||||
return;
|
||||
|
||||
swap (index, parent_index);
|
||||
|
@ -142,9 +142,9 @@ struct hb_priority_queue_t
|
|||
|
||||
void swap (unsigned a, unsigned b)
|
||||
{
|
||||
item_t temp = heap[a];
|
||||
heap[a] = heap[b];
|
||||
heap[b] = temp;
|
||||
assert (a <= heap.length);
|
||||
assert (b <= heap.length);
|
||||
hb_swap (heap.arrayZ[a], heap.arrayZ[b]);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
82
thirdparty/harfbuzz/src/hb-repacker.hh
vendored
82
thirdparty/harfbuzz/src/hb-repacker.hh
vendored
|
@ -49,6 +49,17 @@ struct graph_t
|
|||
unsigned end = 0;
|
||||
unsigned priority = 0;
|
||||
|
||||
friend void swap (vertex_t& a, vertex_t& b)
|
||||
{
|
||||
hb_swap (a.obj, b.obj);
|
||||
hb_swap (a.distance, b.distance);
|
||||
hb_swap (a.space, b.space);
|
||||
hb_swap (a.parents, b.parents);
|
||||
hb_swap (a.start, b.start);
|
||||
hb_swap (a.end, b.end);
|
||||
hb_swap (a.priority, b.priority);
|
||||
}
|
||||
|
||||
bool is_shared () const
|
||||
{
|
||||
return parents.length > 1;
|
||||
|
@ -148,6 +159,8 @@ struct graph_t
|
|||
{
|
||||
num_roots_for_space_.push (1);
|
||||
bool removed_nil = false;
|
||||
vertices_.alloc (objects.length);
|
||||
vertices_scratch_.alloc (objects.length);
|
||||
for (unsigned i = 0; i < objects.length; i++)
|
||||
{
|
||||
// TODO(grieger): check all links point to valid objects.
|
||||
|
@ -246,59 +259,6 @@ struct graph_t
|
|||
return c.copy_blob ();
|
||||
}
|
||||
|
||||
/*
|
||||
* Generates a new topological sorting of graph using Kahn's
|
||||
* algorithm: https://en.wikipedia.org/wiki/Topological_sorting#Algorithms
|
||||
*/
|
||||
void sort_kahn ()
|
||||
{
|
||||
positions_invalid = true;
|
||||
|
||||
if (vertices_.length <= 1) {
|
||||
// Graph of 1 or less doesn't need sorting.
|
||||
return;
|
||||
}
|
||||
|
||||
hb_vector_t<unsigned> queue;
|
||||
hb_vector_t<vertex_t> sorted_graph;
|
||||
if (unlikely (!check_success (sorted_graph.resize (vertices_.length)))) return;
|
||||
hb_vector_t<unsigned> id_map;
|
||||
if (unlikely (!check_success (id_map.resize (vertices_.length)))) return;
|
||||
|
||||
hb_vector_t<unsigned> removed_edges;
|
||||
if (unlikely (!check_success (removed_edges.resize (vertices_.length)))) return;
|
||||
update_parents ();
|
||||
|
||||
queue.push (root_idx ());
|
||||
int new_id = vertices_.length - 1;
|
||||
|
||||
while (!queue.in_error () && queue.length)
|
||||
{
|
||||
unsigned next_id = queue[0];
|
||||
queue.remove (0);
|
||||
|
||||
vertex_t& next = vertices_[next_id];
|
||||
sorted_graph[new_id] = next;
|
||||
id_map[next_id] = new_id--;
|
||||
|
||||
for (const auto& link : next.obj.all_links ()) {
|
||||
removed_edges[link.objidx]++;
|
||||
if (!(vertices_[link.objidx].incoming_edges () - removed_edges[link.objidx]))
|
||||
queue.push (link.objidx);
|
||||
}
|
||||
}
|
||||
|
||||
check_success (!queue.in_error ());
|
||||
check_success (!sorted_graph.in_error ());
|
||||
if (!check_success (new_id == -1))
|
||||
print_orphaned_nodes ();
|
||||
|
||||
remap_all_obj_indices (id_map, &sorted_graph);
|
||||
|
||||
hb_swap (vertices_, sorted_graph);
|
||||
sorted_graph.fini ();
|
||||
}
|
||||
|
||||
/*
|
||||
* Generates a new topological sorting of graph ordered by the shortest
|
||||
* distance to each node.
|
||||
|
@ -315,7 +275,7 @@ struct graph_t
|
|||
update_distances ();
|
||||
|
||||
hb_priority_queue_t queue;
|
||||
hb_vector_t<vertex_t> sorted_graph;
|
||||
hb_vector_t<vertex_t> &sorted_graph = vertices_scratch_;
|
||||
if (unlikely (!check_success (sorted_graph.resize (vertices_.length)))) return;
|
||||
hb_vector_t<unsigned> id_map;
|
||||
if (unlikely (!check_success (id_map.resize (vertices_.length)))) return;
|
||||
|
@ -331,8 +291,9 @@ struct graph_t
|
|||
{
|
||||
unsigned next_id = queue.pop_minimum().second;
|
||||
|
||||
vertex_t& next = vertices_[next_id];
|
||||
sorted_graph[new_id] = next;
|
||||
hb_swap (sorted_graph[new_id], vertices_[next_id]);
|
||||
const vertex_t& next = sorted_graph[new_id];
|
||||
|
||||
id_map[next_id] = new_id--;
|
||||
|
||||
for (const auto& link : next.obj.all_links ()) {
|
||||
|
@ -356,7 +317,6 @@ struct graph_t
|
|||
remap_all_obj_indices (id_map, &sorted_graph);
|
||||
|
||||
hb_swap (vertices_, sorted_graph);
|
||||
sorted_graph.fini ();
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -568,12 +528,10 @@ struct graph_t
|
|||
// The last object is the root of the graph, so swap back the root to the end.
|
||||
// The root's obj idx does change, however since it's root nothing else refers to it.
|
||||
// all other obj idx's will be unaffected.
|
||||
vertex_t root = vertices_[vertices_.length - 2];
|
||||
vertices_[clone_idx] = *clone;
|
||||
vertices_[vertices_.length - 1] = root;
|
||||
hb_swap (vertices_[vertices_.length - 2], *clone);
|
||||
|
||||
// Since the root moved, update the parents arrays of all children on the root.
|
||||
for (const auto& l : root.obj.all_links ())
|
||||
for (const auto& l : root ().obj.all_links ())
|
||||
vertices_[l.objidx].remap_parent (root_idx () - 1, root_idx ());
|
||||
|
||||
return clone_idx;
|
||||
|
@ -1090,6 +1048,7 @@ struct graph_t
|
|||
public:
|
||||
// TODO(garretrieger): make private, will need to move most of offset overflow code into graph.
|
||||
hb_vector_t<vertex_t> vertices_;
|
||||
hb_vector_t<vertex_t> vertices_scratch_;
|
||||
private:
|
||||
bool parents_invalid;
|
||||
bool distance_invalid;
|
||||
|
@ -1217,7 +1176,6 @@ hb_resolve_overflows (const T& packed,
|
|||
// Kahn sort is ~twice as fast as shortest distance sort and works for many fonts
|
||||
// so try it first to save time.
|
||||
graph_t sorted_graph (packed);
|
||||
sorted_graph.sort_kahn ();
|
||||
if (!sorted_graph.will_overflow ())
|
||||
{
|
||||
return sorted_graph.serialize ();
|
||||
|
|
17
thirdparty/harfbuzz/src/hb-serialize.hh
vendored
17
thirdparty/harfbuzz/src/hb-serialize.hh
vendored
|
@ -74,7 +74,7 @@ struct hb_serialize_context_t
|
|||
}
|
||||
|
||||
object_t () = default;
|
||||
|
||||
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
object_t (const hb_object_t &o)
|
||||
{
|
||||
|
@ -91,6 +91,15 @@ struct hb_serialize_context_t
|
|||
}
|
||||
#endif
|
||||
|
||||
friend void swap (object_t& a, object_t& b)
|
||||
{
|
||||
hb_swap (a.head, b.head);
|
||||
hb_swap (a.tail, b.tail);
|
||||
hb_swap (a.next, b.next);
|
||||
hb_swap (a.real_links, b.real_links);
|
||||
hb_swap (a.virtual_links, b.virtual_links);
|
||||
}
|
||||
|
||||
bool operator == (const object_t &o) const
|
||||
{
|
||||
// Virtual links aren't considered for equality since they don't affect the functionality
|
||||
|
@ -111,10 +120,10 @@ struct hb_serialize_context_t
|
|||
struct link_t
|
||||
{
|
||||
unsigned width: 3;
|
||||
bool is_signed: 1;
|
||||
unsigned is_signed: 1;
|
||||
unsigned whence: 2;
|
||||
unsigned position: 28;
|
||||
unsigned bias;
|
||||
unsigned bias : 26;
|
||||
unsigned position;
|
||||
objidx_t objidx;
|
||||
|
||||
link_t () = default;
|
||||
|
|
26
thirdparty/harfbuzz/src/hb-set.hh
vendored
26
thirdparty/harfbuzz/src/hb-set.hh
vendored
|
@ -43,8 +43,8 @@ struct hb_sparseset_t
|
|||
|
||||
hb_sparseset_t (const hb_sparseset_t& other) : hb_sparseset_t () { set (other); }
|
||||
hb_sparseset_t (hb_sparseset_t&& other) : hb_sparseset_t () { s = std::move (other.s); }
|
||||
hb_sparseset_t& operator= (const hb_sparseset_t& other) { set (other); return *this; }
|
||||
hb_sparseset_t& operator= (hb_sparseset_t&& other) { hb_swap (*this, other); return *this; }
|
||||
hb_sparseset_t& operator = (const hb_sparseset_t& other) { set (other); return *this; }
|
||||
hb_sparseset_t& operator = (hb_sparseset_t&& other) { s = std::move (other.s); return *this; }
|
||||
friend void swap (hb_sparseset_t& a, hb_sparseset_t& b) { hb_swap (a.s, b.s); }
|
||||
|
||||
hb_sparseset_t (std::initializer_list<hb_codepoint_t> lst) : hb_sparseset_t ()
|
||||
|
@ -53,7 +53,7 @@ struct hb_sparseset_t
|
|||
add (item);
|
||||
}
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
hb_sparseset_t (const Iterable &o) : hb_sparseset_t ()
|
||||
{
|
||||
hb_copy (o, *this);
|
||||
|
@ -77,10 +77,12 @@ struct hb_sparseset_t
|
|||
void err () { s.err (); }
|
||||
bool in_error () const { return s.in_error (); }
|
||||
|
||||
void alloc (unsigned sz) { s.alloc (sz); }
|
||||
void reset () { s.reset (); }
|
||||
void clear () { s.clear (); }
|
||||
void invert () { s.invert (); }
|
||||
bool is_empty () const { return s.is_empty (); }
|
||||
uint32_t hash () const { return s.hash (); }
|
||||
|
||||
void add (hb_codepoint_t g) { s.add (g); }
|
||||
bool add_range (hb_codepoint_t a, hb_codepoint_t b) { return s.add_range (a, b); }
|
||||
|
@ -125,6 +127,8 @@ struct hb_sparseset_t
|
|||
void set (const hb_sparseset_t &other) { s.set (other.s); }
|
||||
|
||||
bool is_equal (const hb_sparseset_t &other) const { return s.is_equal (other.s); }
|
||||
bool operator == (const hb_set_t &other) const { return is_equal (other); }
|
||||
bool operator != (const hb_set_t &other) const { return !is_equal (other); }
|
||||
|
||||
bool is_subset (const hb_sparseset_t &larger_set) const { return s.is_subset (larger_set.s); }
|
||||
|
||||
|
@ -158,15 +162,19 @@ struct hb_sparseset_t
|
|||
|
||||
struct hb_set_t : hb_sparseset_t<hb_bit_set_invertible_t>
|
||||
{
|
||||
hb_set_t () = default;
|
||||
using sparseset = hb_sparseset_t<hb_bit_set_invertible_t>;
|
||||
|
||||
~hb_set_t () = default;
|
||||
hb_set_t (hb_set_t&) = default;
|
||||
hb_set_t& operator= (const hb_set_t&) = default;
|
||||
hb_set_t& operator= (hb_set_t&&) = default;
|
||||
hb_set_t (std::initializer_list<hb_codepoint_t> lst) : hb_sparseset_t<hb_bit_set_invertible_t> (lst) {}
|
||||
hb_set_t () : sparseset () {};
|
||||
hb_set_t (std::nullptr_t) : hb_set_t () {};
|
||||
hb_set_t (const hb_set_t &o) : sparseset ((sparseset &) o) {};
|
||||
hb_set_t (hb_set_t&& o) : sparseset (std::move ((sparseset &) o)) {}
|
||||
hb_set_t& operator = (const hb_set_t&) = default;
|
||||
hb_set_t& operator = (hb_set_t&&) = default;
|
||||
hb_set_t (std::initializer_list<hb_codepoint_t> lst) : sparseset (lst) {}
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
hb_set_t (const Iterable &o) : hb_sparseset_t<hb_bit_set_invertible_t> (o) {}
|
||||
hb_set_t (const Iterable &o) : sparseset (o) {}
|
||||
};
|
||||
|
||||
static_assert (hb_set_t::INVALID == HB_SET_VALUE_INVALID, "");
|
||||
|
|
100
thirdparty/harfbuzz/src/hb-subset-cff-common.hh
vendored
100
thirdparty/harfbuzz/src/hb-subset-cff-common.hh
vendored
|
@ -40,7 +40,7 @@ struct str_encoder_t
|
|||
str_encoder_t (str_buff_t &buff_)
|
||||
: buff (buff_), error (false) {}
|
||||
|
||||
void reset () { buff.resize (0); }
|
||||
void reset () { buff.reset (); }
|
||||
|
||||
void encode_byte (unsigned char b)
|
||||
{
|
||||
|
@ -107,20 +107,18 @@ struct str_encoder_t
|
|||
encode_byte (op);
|
||||
}
|
||||
|
||||
void copy_str (const byte_str_t &str)
|
||||
void copy_str (const hb_ubytes_t &str)
|
||||
{
|
||||
unsigned int offset = buff.length;
|
||||
if (unlikely (!buff.resize (offset + str.length)))
|
||||
/* Manually resize buffer since faster. */
|
||||
if ((signed) (buff.length + str.length) <= buff.allocated)
|
||||
buff.length += str.length;
|
||||
else if (unlikely (!buff.resize (offset + str.length)))
|
||||
{
|
||||
set_error ();
|
||||
return;
|
||||
}
|
||||
if (unlikely (buff.length < offset + str.length))
|
||||
{
|
||||
set_error ();
|
||||
return;
|
||||
}
|
||||
memcpy (&buff[offset], &str[0], str.length);
|
||||
memcpy (buff.arrayZ + offset, &str[0], str.length);
|
||||
}
|
||||
|
||||
bool is_error () const { return error; }
|
||||
|
@ -253,12 +251,12 @@ struct subr_flattener_t
|
|||
if (endchar_op != OpCode_Invalid) flat_charstrings[i].push (endchar_op);
|
||||
continue;
|
||||
}
|
||||
const byte_str_t str = (*acc.charStrings)[glyph];
|
||||
const hb_ubytes_t str = (*acc.charStrings)[glyph];
|
||||
unsigned int fd = acc.fdSelect->get_fd (glyph);
|
||||
if (unlikely (fd >= acc.fdCount))
|
||||
return false;
|
||||
cs_interpreter_t<ENV, OPSET, flatten_param_t> interp;
|
||||
interp.env.init (str, acc, fd);
|
||||
ENV env (str, acc, fd);
|
||||
cs_interpreter_t<ENV, OPSET, flatten_param_t> interp (env);
|
||||
flatten_param_t param = {
|
||||
flat_charstrings[i],
|
||||
(bool) (plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
|
||||
|
@ -317,9 +315,9 @@ struct parsed_cs_op_t : op_str_t
|
|||
unsigned int subr_num;
|
||||
|
||||
protected:
|
||||
bool drop_flag : 1;
|
||||
bool keep_flag : 1;
|
||||
bool skip_flag : 1;
|
||||
bool drop_flag;
|
||||
bool keep_flag;
|
||||
bool skip_flag;
|
||||
};
|
||||
|
||||
struct parsed_cs_str_t : parsed_values_t<parsed_cs_op_t>
|
||||
|
@ -398,19 +396,19 @@ struct parsed_cs_str_vec_t : hb_vector_t<parsed_cs_str_t>
|
|||
|
||||
struct subr_subset_param_t
|
||||
{
|
||||
void init (parsed_cs_str_t *parsed_charstring_,
|
||||
parsed_cs_str_vec_t *parsed_global_subrs_, parsed_cs_str_vec_t *parsed_local_subrs_,
|
||||
hb_set_t *global_closure_, hb_set_t *local_closure_,
|
||||
bool drop_hints_)
|
||||
{
|
||||
parsed_charstring = parsed_charstring_;
|
||||
current_parsed_str = parsed_charstring;
|
||||
parsed_global_subrs = parsed_global_subrs_;
|
||||
parsed_local_subrs = parsed_local_subrs_;
|
||||
global_closure = global_closure_;
|
||||
local_closure = local_closure_;
|
||||
drop_hints = drop_hints_;
|
||||
}
|
||||
subr_subset_param_t (parsed_cs_str_t *parsed_charstring_,
|
||||
parsed_cs_str_vec_t *parsed_global_subrs_,
|
||||
parsed_cs_str_vec_t *parsed_local_subrs_,
|
||||
hb_set_t *global_closure_,
|
||||
hb_set_t *local_closure_,
|
||||
bool drop_hints_) :
|
||||
current_parsed_str (parsed_charstring_),
|
||||
parsed_charstring (parsed_charstring_),
|
||||
parsed_global_subrs (parsed_global_subrs_),
|
||||
parsed_local_subrs (parsed_local_subrs_),
|
||||
global_closure (global_closure_),
|
||||
local_closure (local_closure_),
|
||||
drop_hints (drop_hints_) {}
|
||||
|
||||
parsed_cs_str_t *get_parsed_str_for_context (call_context_t &context)
|
||||
{
|
||||
|
@ -468,6 +466,7 @@ struct subr_remap_t : hb_inc_bimap_t
|
|||
* no optimization based on usage counts. fonttools doesn't appear doing that either.
|
||||
*/
|
||||
|
||||
resize (closure->get_population ());
|
||||
hb_codepoint_t old_num = HB_SET_VALUE_INVALID;
|
||||
while (hb_set_next (closure, &old_num))
|
||||
add (old_num);
|
||||
|
@ -561,19 +560,21 @@ struct subr_subsetter_t
|
|||
hb_codepoint_t glyph;
|
||||
if (!plan->old_gid_for_new_gid (i, &glyph))
|
||||
continue;
|
||||
const byte_str_t str = (*acc.charStrings)[glyph];
|
||||
const hb_ubytes_t str = (*acc.charStrings)[glyph];
|
||||
unsigned int fd = acc.fdSelect->get_fd (glyph);
|
||||
if (unlikely (fd >= acc.fdCount))
|
||||
return false;
|
||||
|
||||
cs_interpreter_t<ENV, OPSET, subr_subset_param_t> interp;
|
||||
interp.env.init (str, acc, fd);
|
||||
ENV env (str, acc, fd);
|
||||
cs_interpreter_t<ENV, OPSET, subr_subset_param_t> interp (env);
|
||||
|
||||
subr_subset_param_t param;
|
||||
param.init (&parsed_charstrings[i],
|
||||
&parsed_global_subrs, &parsed_local_subrs[fd],
|
||||
&closures.global_closure, &closures.local_closures[fd],
|
||||
plan->flags & HB_SUBSET_FLAGS_NO_HINTING);
|
||||
parsed_charstrings[i].alloc (str.length);
|
||||
subr_subset_param_t param (&parsed_charstrings[i],
|
||||
&parsed_global_subrs,
|
||||
&parsed_local_subrs[fd],
|
||||
&closures.global_closure,
|
||||
&closures.local_closures[fd],
|
||||
plan->flags & HB_SUBSET_FLAGS_NO_HINTING);
|
||||
|
||||
if (unlikely (!interp.interpret (param)))
|
||||
return false;
|
||||
|
@ -593,11 +594,12 @@ struct subr_subsetter_t
|
|||
unsigned int fd = acc.fdSelect->get_fd (glyph);
|
||||
if (unlikely (fd >= acc.fdCount))
|
||||
return false;
|
||||
subr_subset_param_t param;
|
||||
param.init (&parsed_charstrings[i],
|
||||
&parsed_global_subrs, &parsed_local_subrs[fd],
|
||||
&closures.global_closure, &closures.local_closures[fd],
|
||||
plan->flags & HB_SUBSET_FLAGS_NO_HINTING);
|
||||
subr_subset_param_t param (&parsed_charstrings[i],
|
||||
&parsed_global_subrs,
|
||||
&parsed_local_subrs[fd],
|
||||
&closures.global_closure,
|
||||
&closures.local_closures[fd],
|
||||
plan->flags & HB_SUBSET_FLAGS_NO_HINTING);
|
||||
|
||||
drop_hints_param_t drop;
|
||||
if (drop_hints_in_str (parsed_charstrings[i], param, drop))
|
||||
|
@ -618,11 +620,12 @@ struct subr_subsetter_t
|
|||
unsigned int fd = acc.fdSelect->get_fd (glyph);
|
||||
if (unlikely (fd >= acc.fdCount))
|
||||
return false;
|
||||
subr_subset_param_t param;
|
||||
param.init (&parsed_charstrings[i],
|
||||
&parsed_global_subrs, &parsed_local_subrs[fd],
|
||||
&closures.global_closure, &closures.local_closures[fd],
|
||||
plan->flags & HB_SUBSET_FLAGS_NO_HINTING);
|
||||
subr_subset_param_t param (&parsed_charstrings[i],
|
||||
&parsed_global_subrs,
|
||||
&parsed_local_subrs[fd],
|
||||
&closures.global_closure,
|
||||
&closures.local_closures[fd],
|
||||
plan->flags & HB_SUBSET_FLAGS_NO_HINTING);
|
||||
collect_subr_refs_in_str (parsed_charstrings[i], param);
|
||||
}
|
||||
}
|
||||
|
@ -849,9 +852,10 @@ struct subr_subsetter_t
|
|||
|
||||
bool encode_str (const parsed_cs_str_t &str, const unsigned int fd, str_buff_t &buff) const
|
||||
{
|
||||
buff.init ();
|
||||
unsigned count = str.get_count ();
|
||||
str_encoder_t encoder (buff);
|
||||
encoder.reset ();
|
||||
buff.alloc (count * 3);
|
||||
/* if a prefix (CFF1 width or CFF2 vsindex) has been removed along with hints,
|
||||
* re-insert it at the beginning of charstreing */
|
||||
if (str.has_prefix () && str.is_hint_dropped ())
|
||||
|
@ -860,7 +864,7 @@ struct subr_subsetter_t
|
|||
if (str.prefix_op () != OpCode_Invalid)
|
||||
encoder.encode_op (str.prefix_op ());
|
||||
}
|
||||
for (unsigned int i = 0; i < str.get_count(); i++)
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
const parsed_cs_op_t &opstr = str.values[i];
|
||||
if (!opstr.for_drop () && !opstr.for_skip ())
|
||||
|
|
22
thirdparty/harfbuzz/src/hb-subset-cff1.cc
vendored
22
thirdparty/harfbuzz/src/hb-subset-cff1.cc
vendored
|
@ -169,7 +169,7 @@ struct cff1_top_dict_op_serializer_t : cff_top_dict_op_serializer_t<cff1_top_dic
|
|||
supp_op.op = op;
|
||||
if ( unlikely (!(opstr.str.length >= opstr.last_arg_offset + 3)))
|
||||
return_trace (false);
|
||||
supp_op.str = byte_str_t (&opstr.str + opstr.last_arg_offset, opstr.str.length - opstr.last_arg_offset);
|
||||
supp_op.str = hb_ubytes_t (&opstr.str + opstr.last_arg_offset, opstr.str.length - opstr.last_arg_offset);
|
||||
return_trace (UnsizedByteStr::serialize_int2 (c, mod.nameSIDs[name_dict_values_t::registry]) &&
|
||||
UnsizedByteStr::serialize_int2 (c, mod.nameSIDs[name_dict_values_t::ordering]) &&
|
||||
copy_opstr (c, supp_op));
|
||||
|
@ -270,13 +270,13 @@ struct range_list_t : hb_vector_t<code_pair_t>
|
|||
/* replace the first glyph ID in the "glyph" field each range with a nLeft value */
|
||||
bool complete (unsigned int last_glyph)
|
||||
{
|
||||
bool two_byte = false;
|
||||
for (unsigned int i = (*this).length; i > 0; i--)
|
||||
bool two_byte = false;
|
||||
unsigned count = this->length;
|
||||
for (unsigned int i = count; i; i--)
|
||||
{
|
||||
code_pair_t &pair = (*this)[i - 1];
|
||||
unsigned int nLeft = last_glyph - pair.glyph - 1;
|
||||
if (nLeft >= 0x100)
|
||||
two_byte = true;
|
||||
code_pair_t &pair = arrayZ[i - 1];
|
||||
unsigned int nLeft = last_glyph - pair.glyph - 1;
|
||||
two_byte |= nLeft >= 0x100;
|
||||
last_glyph = pair.glyph;
|
||||
pair.glyph = nLeft;
|
||||
}
|
||||
|
@ -442,6 +442,9 @@ struct cff_subset_plan {
|
|||
return;
|
||||
}
|
||||
|
||||
bool use_glyph_to_sid_map = plan->num_output_glyphs () > plan->source->get_num_glyphs () / 8.;
|
||||
hb_map_t *glyph_to_sid_map = use_glyph_to_sid_map ? acc.create_glyph_to_sid_map () : nullptr;
|
||||
|
||||
unsigned int glyph;
|
||||
for (glyph = 1; glyph < plan->num_output_glyphs (); glyph++)
|
||||
{
|
||||
|
@ -451,7 +454,7 @@ struct cff_subset_plan {
|
|||
/* Retain the SID for the old missing glyph ID */
|
||||
old_glyph = glyph;
|
||||
}
|
||||
sid = acc.glyph_to_sid (old_glyph);
|
||||
sid = glyph_to_sid_map ? glyph_to_sid_map->get (old_glyph) : acc.glyph_to_sid (old_glyph);
|
||||
|
||||
if (!acc.is_CID ())
|
||||
sid = sidmap.add (sid);
|
||||
|
@ -464,6 +467,9 @@ struct cff_subset_plan {
|
|||
last_sid = sid;
|
||||
}
|
||||
|
||||
if (glyph_to_sid_map)
|
||||
hb_map_destroy (glyph_to_sid_map);
|
||||
|
||||
bool two_byte = subset_charset_ranges.complete (glyph);
|
||||
|
||||
size0 = Charset0::min_size + HBUINT16::static_size * (plan->num_output_glyphs () - 1);
|
||||
|
|
28
thirdparty/harfbuzz/src/hb-subset-cff2.cc
vendored
28
thirdparty/harfbuzz/src/hb-subset-cff2.cc
vendored
|
@ -67,9 +67,9 @@ struct cff2_top_dict_op_serializer_t : cff_top_dict_op_serializer_t<>
|
|||
}
|
||||
};
|
||||
|
||||
struct cff2_cs_opset_flatten_t : cff2_cs_opset_t<cff2_cs_opset_flatten_t, flatten_param_t>
|
||||
struct cff2_cs_opset_flatten_t : cff2_cs_opset_t<cff2_cs_opset_flatten_t, flatten_param_t, blend_arg_t>
|
||||
{
|
||||
static void flush_args_and_op (op_code_t op, cff2_cs_interp_env_t &env, flatten_param_t& param)
|
||||
static void flush_args_and_op (op_code_t op, cff2_cs_interp_env_t<blend_arg_t> &env, flatten_param_t& param)
|
||||
{
|
||||
switch (op)
|
||||
{
|
||||
|
@ -97,7 +97,7 @@ struct cff2_cs_opset_flatten_t : cff2_cs_opset_t<cff2_cs_opset_flatten_t, flatte
|
|||
}
|
||||
}
|
||||
|
||||
static void flush_args (cff2_cs_interp_env_t &env, flatten_param_t& param)
|
||||
static void flush_args (cff2_cs_interp_env_t<blend_arg_t> &env, flatten_param_t& param)
|
||||
{
|
||||
for (unsigned int i = 0; i < env.argStack.get_count ();)
|
||||
{
|
||||
|
@ -122,7 +122,7 @@ struct cff2_cs_opset_flatten_t : cff2_cs_opset_t<cff2_cs_opset_flatten_t, flatte
|
|||
SUPER::flush_args (env, param);
|
||||
}
|
||||
|
||||
static void flatten_blends (const blend_arg_t &arg, unsigned int i, cff2_cs_interp_env_t &env, flatten_param_t& param)
|
||||
static void flatten_blends (const blend_arg_t &arg, unsigned int i, cff2_cs_interp_env_t<blend_arg_t> &env, flatten_param_t& param)
|
||||
{
|
||||
/* flatten the default values */
|
||||
str_encoder_t encoder (param.flatStr);
|
||||
|
@ -149,7 +149,7 @@ struct cff2_cs_opset_flatten_t : cff2_cs_opset_t<cff2_cs_opset_flatten_t, flatte
|
|||
encoder.encode_op (OpCode_blendcs);
|
||||
}
|
||||
|
||||
static void flush_op (op_code_t op, cff2_cs_interp_env_t &env, flatten_param_t& param)
|
||||
static void flush_op (op_code_t op, cff2_cs_interp_env_t<blend_arg_t> &env, flatten_param_t& param)
|
||||
{
|
||||
switch (op)
|
||||
{
|
||||
|
@ -163,13 +163,13 @@ struct cff2_cs_opset_flatten_t : cff2_cs_opset_t<cff2_cs_opset_flatten_t, flatte
|
|||
}
|
||||
|
||||
private:
|
||||
typedef cff2_cs_opset_t<cff2_cs_opset_flatten_t, flatten_param_t> SUPER;
|
||||
typedef cs_opset_t<blend_arg_t, cff2_cs_opset_flatten_t, cff2_cs_opset_flatten_t, cff2_cs_interp_env_t, flatten_param_t> CSOPSET;
|
||||
typedef cff2_cs_opset_t<cff2_cs_opset_flatten_t, flatten_param_t, blend_arg_t> SUPER;
|
||||
typedef cs_opset_t<blend_arg_t, cff2_cs_opset_flatten_t, cff2_cs_opset_flatten_t, cff2_cs_interp_env_t<blend_arg_t>, flatten_param_t> CSOPSET;
|
||||
};
|
||||
|
||||
struct cff2_cs_opset_subr_subset_t : cff2_cs_opset_t<cff2_cs_opset_subr_subset_t, subr_subset_param_t>
|
||||
struct cff2_cs_opset_subr_subset_t : cff2_cs_opset_t<cff2_cs_opset_subr_subset_t, subr_subset_param_t, blend_arg_t>
|
||||
{
|
||||
static void process_op (op_code_t op, cff2_cs_interp_env_t &env, subr_subset_param_t& param)
|
||||
static void process_op (op_code_t op, cff2_cs_interp_env_t<blend_arg_t> &env, subr_subset_param_t& param)
|
||||
{
|
||||
switch (op) {
|
||||
|
||||
|
@ -201,7 +201,7 @@ struct cff2_cs_opset_subr_subset_t : cff2_cs_opset_t<cff2_cs_opset_subr_subset_t
|
|||
|
||||
protected:
|
||||
static void process_call_subr (op_code_t op, cs_type_t type,
|
||||
cff2_cs_interp_env_t &env, subr_subset_param_t& param,
|
||||
cff2_cs_interp_env_t<blend_arg_t> &env, subr_subset_param_t& param,
|
||||
cff2_biased_subrs_t& subrs, hb_set_t *closure)
|
||||
{
|
||||
byte_str_ref_t str_ref = env.str_ref;
|
||||
|
@ -212,15 +212,15 @@ struct cff2_cs_opset_subr_subset_t : cff2_cs_opset_t<cff2_cs_opset_subr_subset_t
|
|||
}
|
||||
|
||||
private:
|
||||
typedef cff2_cs_opset_t<cff2_cs_opset_subr_subset_t, subr_subset_param_t> SUPER;
|
||||
typedef cff2_cs_opset_t<cff2_cs_opset_subr_subset_t, subr_subset_param_t, blend_arg_t> SUPER;
|
||||
};
|
||||
|
||||
struct cff2_subr_subsetter_t : subr_subsetter_t<cff2_subr_subsetter_t, CFF2Subrs, const OT::cff2::accelerator_subset_t, cff2_cs_interp_env_t, cff2_cs_opset_subr_subset_t>
|
||||
struct cff2_subr_subsetter_t : subr_subsetter_t<cff2_subr_subsetter_t, CFF2Subrs, const OT::cff2::accelerator_subset_t, cff2_cs_interp_env_t<blend_arg_t>, cff2_cs_opset_subr_subset_t>
|
||||
{
|
||||
cff2_subr_subsetter_t (const OT::cff2::accelerator_subset_t &acc_, const hb_subset_plan_t *plan_)
|
||||
: subr_subsetter_t (acc_, plan_) {}
|
||||
|
||||
static void complete_parsed_str (cff2_cs_interp_env_t &env, subr_subset_param_t& param, parsed_cs_str_t &charstring)
|
||||
static void complete_parsed_str (cff2_cs_interp_env_t<blend_arg_t> &env, subr_subset_param_t& param, parsed_cs_str_t &charstring)
|
||||
{
|
||||
/* vsindex is inserted at the beginning of the charstring as necessary */
|
||||
if (env.seen_vsindex ())
|
||||
|
@ -245,7 +245,7 @@ struct cff2_subset_plan {
|
|||
if (desubroutinize)
|
||||
{
|
||||
/* Flatten global & local subrs */
|
||||
subr_flattener_t<const OT::cff2::accelerator_subset_t, cff2_cs_interp_env_t, cff2_cs_opset_flatten_t>
|
||||
subr_flattener_t<const OT::cff2::accelerator_subset_t, cff2_cs_interp_env_t<blend_arg_t>, cff2_cs_opset_flatten_t>
|
||||
flattener(acc, plan);
|
||||
if (!flattener.flatten (subset_charstrings))
|
||||
return false;
|
||||
|
|
114
thirdparty/harfbuzz/src/hb-subset-plan.cc
vendored
114
thirdparty/harfbuzz/src/hb-subset-plan.cc
vendored
|
@ -279,12 +279,7 @@ static inline void
|
|||
_remove_invalid_gids (hb_set_t *glyphs,
|
||||
unsigned int num_glyphs)
|
||||
{
|
||||
hb_codepoint_t gid = HB_SET_VALUE_INVALID;
|
||||
while (glyphs->next (&gid))
|
||||
{
|
||||
if (gid >= num_glyphs)
|
||||
glyphs->del (gid);
|
||||
}
|
||||
glyphs->del_range (num_glyphs, HB_SET_VALUE_INVALID);
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -294,12 +289,13 @@ _populate_unicodes_to_retain (const hb_set_t *unicodes,
|
|||
{
|
||||
OT::cmap::accelerator_t cmap (plan->source);
|
||||
|
||||
constexpr static const int size_threshold = 4096;
|
||||
|
||||
unsigned size_threshold = plan->source->get_num_glyphs ();
|
||||
if (glyphs->is_empty () && unicodes->get_population () < size_threshold)
|
||||
{
|
||||
/* This is the fast path if it's anticipated that size of unicodes
|
||||
* is << than the number of codepoints in the font. */
|
||||
// This is approach to collection is faster, but can only be used if glyphs
|
||||
// are not being explicitly added to the subset and the input unicodes set is
|
||||
// not excessively large (eg. an inverted set).
|
||||
plan->unicode_to_new_gid_list.alloc (unicodes->get_population ());
|
||||
for (hb_codepoint_t cp : *unicodes)
|
||||
{
|
||||
hb_codepoint_t gid;
|
||||
|
@ -310,27 +306,32 @@ _populate_unicodes_to_retain (const hb_set_t *unicodes,
|
|||
}
|
||||
|
||||
plan->codepoint_to_glyph->set (cp, gid);
|
||||
plan->unicode_to_new_gid_list.push (hb_pair (cp, gid));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// This approach is slower, but can handle adding in glyphs to the subset and will match
|
||||
// them with cmap entries.
|
||||
hb_map_t unicode_glyphid_map;
|
||||
cmap.collect_mapping (hb_set_get_empty (), &unicode_glyphid_map);
|
||||
hb_set_t cmap_unicodes;
|
||||
cmap.collect_mapping (&cmap_unicodes, &unicode_glyphid_map);
|
||||
plan->unicode_to_new_gid_list.alloc (hb_min(unicodes->get_population ()
|
||||
+ glyphs->get_population (),
|
||||
cmap_unicodes.get_population ()));
|
||||
|
||||
for (hb_pair_t<hb_codepoint_t, hb_codepoint_t> cp_gid :
|
||||
+ unicode_glyphid_map.iter ())
|
||||
for (hb_codepoint_t cp : cmap_unicodes)
|
||||
{
|
||||
if (!unicodes->has (cp_gid.first) && !glyphs->has (cp_gid.second))
|
||||
continue;
|
||||
hb_codepoint_t gid = unicode_glyphid_map[cp];
|
||||
if (!unicodes->has (cp) && !glyphs->has (gid))
|
||||
continue;
|
||||
|
||||
plan->codepoint_to_glyph->set (cp_gid.first, cp_gid.second);
|
||||
plan->codepoint_to_glyph->set (cp, gid);
|
||||
plan->unicode_to_new_gid_list.push (hb_pair (cp, gid));
|
||||
}
|
||||
|
||||
/* Add gids which where requested, but not mapped in cmap */
|
||||
// TODO(garretrieger):
|
||||
// Once https://github.com/harfbuzz/harfbuzz/issues/3169
|
||||
// is implemented, this can be done with union and del_range
|
||||
for (hb_codepoint_t gid : glyphs->iter ())
|
||||
for (hb_codepoint_t gid : *glyphs)
|
||||
{
|
||||
if (gid >= plan->source->get_num_glyphs ())
|
||||
break;
|
||||
|
@ -338,8 +339,12 @@ _populate_unicodes_to_retain (const hb_set_t *unicodes,
|
|||
}
|
||||
}
|
||||
|
||||
+ plan->codepoint_to_glyph->keys () | hb_sink (plan->unicodes);
|
||||
+ plan->codepoint_to_glyph->values () | hb_sink (plan->_glyphset_gsub);
|
||||
auto &arr = plan->unicode_to_new_gid_list;
|
||||
if (arr.length)
|
||||
{
|
||||
plan->unicodes->add_sorted_array (&arr.arrayZ->first, arr.length, sizeof (*arr.arrayZ));
|
||||
plan->_glyphset_gsub->add_array (&arr.arrayZ->second, arr.length, sizeof (*arr.arrayZ));
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -388,16 +393,19 @@ _populate_gids_to_retain (hb_subset_plan_t* plan,
|
|||
_remove_invalid_gids (&cur_glyphset, plan->source->get_num_glyphs ());
|
||||
|
||||
hb_set_set (plan->_glyphset_colred, &cur_glyphset);
|
||||
// Populate a full set of glyphs to retain by adding all referenced
|
||||
// composite glyphs.
|
||||
for (hb_codepoint_t gid : cur_glyphset.iter ())
|
||||
{
|
||||
glyf.add_gid_and_children (gid, plan->_glyphset);
|
||||
|
||||
/* Populate a full set of glyphs to retain by adding all referenced
|
||||
* composite glyphs. */
|
||||
if (glyf.has_data ())
|
||||
for (hb_codepoint_t gid : cur_glyphset)
|
||||
glyf.add_gid_and_children (gid, plan->_glyphset);
|
||||
else
|
||||
plan->_glyphset->union_ (cur_glyphset);
|
||||
#ifndef HB_NO_SUBSET_CFF
|
||||
if (cff.is_valid ())
|
||||
if (cff.is_valid ())
|
||||
for (hb_codepoint_t gid : cur_glyphset)
|
||||
_add_cff_seac_components (cff, gid, plan->_glyphset);
|
||||
#endif
|
||||
}
|
||||
|
||||
_remove_invalid_gids (plan->_glyphset, plan->source->get_num_glyphs ());
|
||||
|
||||
|
@ -412,6 +420,20 @@ _populate_gids_to_retain (hb_subset_plan_t* plan,
|
|||
#endif
|
||||
}
|
||||
|
||||
static void
|
||||
_create_glyph_map_gsub (const hb_set_t* glyph_set_gsub,
|
||||
const hb_map_t* glyph_map,
|
||||
hb_map_t* out)
|
||||
{
|
||||
+ hb_iter (glyph_set_gsub)
|
||||
| hb_map ([&] (hb_codepoint_t gid) {
|
||||
return hb_pair_t<hb_codepoint_t, hb_codepoint_t> (gid,
|
||||
glyph_map->get (gid));
|
||||
})
|
||||
| hb_sink (out)
|
||||
;
|
||||
}
|
||||
|
||||
static void
|
||||
_create_old_gid_to_new_gid_map (const hb_face_t *face,
|
||||
bool retain_gids,
|
||||
|
@ -420,13 +442,19 @@ _create_old_gid_to_new_gid_map (const hb_face_t *face,
|
|||
hb_map_t *reverse_glyph_map, /* OUT */
|
||||
unsigned int *num_glyphs /* OUT */)
|
||||
{
|
||||
unsigned pop = all_gids_to_retain->get_population ();
|
||||
reverse_glyph_map->resize (pop);
|
||||
glyph_map->resize (pop);
|
||||
|
||||
if (!retain_gids)
|
||||
{
|
||||
+ hb_enumerate (hb_iter (all_gids_to_retain), (hb_codepoint_t) 0)
|
||||
| hb_sink (reverse_glyph_map)
|
||||
;
|
||||
*num_glyphs = reverse_glyph_map->get_population ();
|
||||
} else {
|
||||
}
|
||||
else
|
||||
{
|
||||
+ hb_iter (all_gids_to_retain)
|
||||
| hb_map ([] (hb_codepoint_t _) {
|
||||
return hb_pair_t<hb_codepoint_t, hb_codepoint_t> (_, _);
|
||||
|
@ -434,10 +462,9 @@ _create_old_gid_to_new_gid_map (const hb_face_t *face,
|
|||
| hb_sink (reverse_glyph_map)
|
||||
;
|
||||
|
||||
unsigned max_glyph =
|
||||
+ hb_iter (all_gids_to_retain)
|
||||
| hb_reduce (hb_max, 0u)
|
||||
;
|
||||
hb_codepoint_t max_glyph = HB_SET_VALUE_INVALID;
|
||||
hb_set_previous (all_gids_to_retain, &max_glyph);
|
||||
|
||||
*num_glyphs = max_glyph + 1;
|
||||
}
|
||||
|
||||
|
@ -485,6 +512,9 @@ hb_subset_plan_create_or_fail (hb_face_t *face,
|
|||
plan->successful = true;
|
||||
plan->flags = input->flags;
|
||||
plan->unicodes = hb_set_create ();
|
||||
|
||||
plan->unicode_to_new_gid_list.init ();
|
||||
|
||||
plan->name_ids = hb_set_copy (input->sets.name_ids);
|
||||
_nameid_closure (face, plan->name_ids);
|
||||
plan->name_languages = hb_set_copy (input->sets.name_languages);
|
||||
|
@ -502,6 +532,7 @@ hb_subset_plan_create_or_fail (hb_face_t *face,
|
|||
plan->codepoint_to_glyph = hb_map_create ();
|
||||
plan->glyph_map = hb_map_create ();
|
||||
plan->reverse_glyph_map = hb_map_create ();
|
||||
plan->glyph_map_gsub = hb_map_create ();
|
||||
plan->gsub_lookups = hb_map_create ();
|
||||
plan->gpos_lookups = hb_map_create ();
|
||||
|
||||
|
@ -536,6 +567,19 @@ hb_subset_plan_create_or_fail (hb_face_t *face,
|
|||
plan->reverse_glyph_map,
|
||||
&plan->_num_output_glyphs);
|
||||
|
||||
_create_glyph_map_gsub (
|
||||
plan->_glyphset_gsub,
|
||||
plan->glyph_map,
|
||||
plan->glyph_map_gsub);
|
||||
|
||||
// Now that we have old to new gid map update the unicode to new gid list.
|
||||
for (unsigned i = 0; i < plan->unicode_to_new_gid_list.length; i++)
|
||||
{
|
||||
// Use raw array access for performance.
|
||||
plan->unicode_to_new_gid_list.arrayZ[i].second =
|
||||
plan->glyph_map->get(plan->unicode_to_new_gid_list.arrayZ[i].second);
|
||||
}
|
||||
|
||||
if (unlikely (plan->in_error ())) {
|
||||
hb_subset_plan_destroy (plan);
|
||||
return nullptr;
|
||||
|
@ -558,6 +602,7 @@ hb_subset_plan_destroy (hb_subset_plan_t *plan)
|
|||
if (!hb_object_destroy (plan)) return;
|
||||
|
||||
hb_set_destroy (plan->unicodes);
|
||||
plan->unicode_to_new_gid_list.fini ();
|
||||
hb_set_destroy (plan->name_ids);
|
||||
hb_set_destroy (plan->name_languages);
|
||||
hb_set_destroy (plan->layout_features);
|
||||
|
@ -569,6 +614,7 @@ hb_subset_plan_destroy (hb_subset_plan_t *plan)
|
|||
hb_map_destroy (plan->codepoint_to_glyph);
|
||||
hb_map_destroy (plan->glyph_map);
|
||||
hb_map_destroy (plan->reverse_glyph_map);
|
||||
hb_map_destroy (plan->glyph_map_gsub);
|
||||
hb_set_destroy (plan->_glyphset);
|
||||
hb_set_destroy (plan->_glyphset_gsub);
|
||||
hb_set_destroy (plan->_glyphset_mathed);
|
||||
|
|
2
thirdparty/harfbuzz/src/hb-subset-plan.hh
vendored
2
thirdparty/harfbuzz/src/hb-subset-plan.hh
vendored
|
@ -44,6 +44,7 @@ struct hb_subset_plan_t
|
|||
|
||||
// For each cp that we'd like to retain maps to the corresponding gid.
|
||||
hb_set_t *unicodes;
|
||||
hb_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> unicode_to_new_gid_list;
|
||||
|
||||
// name_ids we would like to retain
|
||||
hb_set_t *name_ids;
|
||||
|
@ -69,6 +70,7 @@ struct hb_subset_plan_t
|
|||
// Old -> New glyph id mapping
|
||||
hb_map_t *glyph_map;
|
||||
hb_map_t *reverse_glyph_map;
|
||||
hb_map_t *glyph_map_gsub;
|
||||
|
||||
// Plan is only good for a specific source/dest so keep them with it
|
||||
hb_face_t *source;
|
||||
|
|
87
thirdparty/harfbuzz/src/hb-subset.cc
vendored
87
thirdparty/harfbuzz/src/hb-subset.cc
vendored
|
@ -79,12 +79,14 @@ using OT::Layout::GSUB::GSUB;
|
|||
*/
|
||||
|
||||
static unsigned
|
||||
_plan_estimate_subset_table_size (hb_subset_plan_t *plan, unsigned table_len)
|
||||
_plan_estimate_subset_table_size (hb_subset_plan_t *plan,
|
||||
unsigned table_len,
|
||||
bool same_size)
|
||||
{
|
||||
unsigned src_glyphs = plan->source->get_num_glyphs ();
|
||||
unsigned dst_glyphs = plan->glyphset ()->get_population ();
|
||||
|
||||
if (unlikely (!src_glyphs))
|
||||
if (unlikely (!src_glyphs) || same_size)
|
||||
return 512 + table_len;
|
||||
|
||||
return 512 + (unsigned) (table_len * sqrt ((double) dst_glyphs / src_glyphs));
|
||||
|
@ -123,7 +125,6 @@ static
|
|||
bool
|
||||
_try_subset (const TableType *table,
|
||||
hb_vector_t<char>* buf,
|
||||
unsigned buf_size,
|
||||
hb_subset_context_t* c /* OUT */)
|
||||
{
|
||||
c->serializer->start_serialize<TableType> ();
|
||||
|
@ -136,7 +137,8 @@ _try_subset (const TableType *table,
|
|||
return needed;
|
||||
}
|
||||
|
||||
buf_size += (buf_size >> 1) + 32;
|
||||
unsigned buf_size = buf->allocated;
|
||||
buf_size = buf_size * 2 + 16;
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c ran out of room; reallocating to %u bytes.",
|
||||
HB_UNTAG (c->table_tag), buf_size);
|
||||
|
||||
|
@ -147,13 +149,13 @@ _try_subset (const TableType *table,
|
|||
return needed;
|
||||
}
|
||||
|
||||
c->serializer->reset (buf->arrayZ, buf_size);
|
||||
return _try_subset (table, buf, buf_size, c);
|
||||
c->serializer->reset (buf->arrayZ, buf->allocated);
|
||||
return _try_subset (table, buf, c);
|
||||
}
|
||||
|
||||
template<typename TableType>
|
||||
static bool
|
||||
_subset (hb_subset_plan_t *plan)
|
||||
_subset (hb_subset_plan_t *plan, hb_vector_t<char> &buf)
|
||||
{
|
||||
hb_blob_t *source_blob = hb_sanitize_context_t ().reference_table<TableType> (plan->source);
|
||||
const TableType *table = source_blob->as<TableType> ();
|
||||
|
@ -167,10 +169,13 @@ _subset (hb_subset_plan_t *plan)
|
|||
return false;
|
||||
}
|
||||
|
||||
hb_vector_t<char> buf;
|
||||
/* TODO Not all tables are glyph-related. 'name' table size for example should not be
|
||||
* affected by number of glyphs. Accommodate that. */
|
||||
unsigned buf_size = _plan_estimate_subset_table_size (plan, source_blob->length);
|
||||
/* Tables that we want to allocate same space as the source table. For GSUB/GPOS it's
|
||||
* because those are expensive to subset, so giving them more room is fine. */
|
||||
bool same_size_table = TableType::tableTag == HB_OT_TAG_GSUB ||
|
||||
TableType::tableTag == HB_OT_TAG_GPOS ||
|
||||
TableType::tableTag == HB_OT_TAG_name;
|
||||
|
||||
unsigned buf_size = _plan_estimate_subset_table_size (plan, source_blob->length, same_size_table);
|
||||
DEBUG_MSG (SUBSET, nullptr,
|
||||
"OT::%c%c%c%c initial estimated table size: %u bytes.", HB_UNTAG (tag), buf_size);
|
||||
if (unlikely (!buf.alloc (buf_size)))
|
||||
|
@ -181,10 +186,10 @@ _subset (hb_subset_plan_t *plan)
|
|||
}
|
||||
|
||||
bool needed = false;
|
||||
hb_serialize_context_t serializer (buf.arrayZ, buf_size);
|
||||
hb_serialize_context_t serializer (buf.arrayZ, buf.allocated);
|
||||
{
|
||||
hb_subset_context_t c (source_blob, plan, &serializer, tag);
|
||||
needed = _try_subset (table, &buf, buf_size, &c);
|
||||
needed = _try_subset (table, &buf, &c);
|
||||
}
|
||||
hb_blob_destroy (source_blob);
|
||||
|
||||
|
@ -274,7 +279,9 @@ _passthrough (hb_subset_plan_t *plan, hb_tag_t tag)
|
|||
}
|
||||
|
||||
static bool
|
||||
_subset_table (hb_subset_plan_t *plan, hb_tag_t tag)
|
||||
_subset_table (hb_subset_plan_t *plan,
|
||||
hb_vector_t<char> &buf,
|
||||
hb_tag_t tag)
|
||||
{
|
||||
if (plan->no_subset_tables->has (tag)) {
|
||||
return _passthrough (plan, tag);
|
||||
|
@ -283,42 +290,42 @@ _subset_table (hb_subset_plan_t *plan, hb_tag_t tag)
|
|||
DEBUG_MSG (SUBSET, nullptr, "subset %c%c%c%c", HB_UNTAG (tag));
|
||||
switch (tag)
|
||||
{
|
||||
case HB_OT_TAG_glyf: return _subset<const OT::glyf> (plan);
|
||||
case HB_OT_TAG_hdmx: return _subset<const OT::hdmx> (plan);
|
||||
case HB_OT_TAG_name: return _subset<const OT::name> (plan);
|
||||
case HB_OT_TAG_glyf: return _subset<const OT::glyf> (plan, buf);
|
||||
case HB_OT_TAG_hdmx: return _subset<const OT::hdmx> (plan, buf);
|
||||
case HB_OT_TAG_name: return _subset<const OT::name> (plan, buf);
|
||||
case HB_OT_TAG_head:
|
||||
if (_is_table_present (plan->source, HB_OT_TAG_glyf) && !_should_drop_table (plan, HB_OT_TAG_glyf))
|
||||
return true; /* skip head, handled by glyf */
|
||||
return _subset<const OT::head> (plan);
|
||||
return _subset<const OT::head> (plan, buf);
|
||||
case HB_OT_TAG_hhea: return true; /* skip hhea, handled by hmtx */
|
||||
case HB_OT_TAG_hmtx: return _subset<const OT::hmtx> (plan);
|
||||
case HB_OT_TAG_hmtx: return _subset<const OT::hmtx> (plan, buf);
|
||||
case HB_OT_TAG_vhea: return true; /* skip vhea, handled by vmtx */
|
||||
case HB_OT_TAG_vmtx: return _subset<const OT::vmtx> (plan);
|
||||
case HB_OT_TAG_maxp: return _subset<const OT::maxp> (plan);
|
||||
case HB_OT_TAG_sbix: return _subset<const OT::sbix> (plan);
|
||||
case HB_OT_TAG_vmtx: return _subset<const OT::vmtx> (plan, buf);
|
||||
case HB_OT_TAG_maxp: return _subset<const OT::maxp> (plan, buf);
|
||||
case HB_OT_TAG_sbix: return _subset<const OT::sbix> (plan, buf);
|
||||
case HB_OT_TAG_loca: return true; /* skip loca, handled by glyf */
|
||||
case HB_OT_TAG_cmap: return _subset<const OT::cmap> (plan);
|
||||
case HB_OT_TAG_OS2 : return _subset<const OT::OS2 > (plan);
|
||||
case HB_OT_TAG_post: return _subset<const OT::post> (plan);
|
||||
case HB_OT_TAG_COLR: return _subset<const OT::COLR> (plan);
|
||||
case HB_OT_TAG_CPAL: return _subset<const OT::CPAL> (plan);
|
||||
case HB_OT_TAG_CBLC: return _subset<const OT::CBLC> (plan);
|
||||
case HB_OT_TAG_cmap: return _subset<const OT::cmap> (plan, buf);
|
||||
case HB_OT_TAG_OS2 : return _subset<const OT::OS2 > (plan, buf);
|
||||
case HB_OT_TAG_post: return _subset<const OT::post> (plan, buf);
|
||||
case HB_OT_TAG_COLR: return _subset<const OT::COLR> (plan, buf);
|
||||
case HB_OT_TAG_CPAL: return _subset<const OT::CPAL> (plan, buf);
|
||||
case HB_OT_TAG_CBLC: return _subset<const OT::CBLC> (plan, buf);
|
||||
case HB_OT_TAG_CBDT: return true; /* skip CBDT, handled by CBLC */
|
||||
case HB_OT_TAG_MATH: return _subset<const OT::MATH> (plan);
|
||||
case HB_OT_TAG_MATH: return _subset<const OT::MATH> (plan, buf);
|
||||
|
||||
#ifndef HB_NO_SUBSET_CFF
|
||||
case HB_OT_TAG_cff1: return _subset<const OT::cff1> (plan);
|
||||
case HB_OT_TAG_cff2: return _subset<const OT::cff2> (plan);
|
||||
case HB_OT_TAG_VORG: return _subset<const OT::VORG> (plan);
|
||||
case HB_OT_TAG_cff1: return _subset<const OT::cff1> (plan, buf);
|
||||
case HB_OT_TAG_cff2: return _subset<const OT::cff2> (plan, buf);
|
||||
case HB_OT_TAG_VORG: return _subset<const OT::VORG> (plan, buf);
|
||||
#endif
|
||||
|
||||
#ifndef HB_NO_SUBSET_LAYOUT
|
||||
case HB_OT_TAG_GDEF: return _subset<const OT::GDEF> (plan);
|
||||
case HB_OT_TAG_GSUB: return _subset<const GSUB> (plan);
|
||||
case HB_OT_TAG_GPOS: return _subset<const OT::GPOS> (plan);
|
||||
case HB_OT_TAG_gvar: return _subset<const OT::gvar> (plan);
|
||||
case HB_OT_TAG_HVAR: return _subset<const OT::HVAR> (plan);
|
||||
case HB_OT_TAG_VVAR: return _subset<const OT::VVAR> (plan);
|
||||
case HB_OT_TAG_GDEF: return _subset<const OT::GDEF> (plan, buf);
|
||||
case HB_OT_TAG_GSUB: return _subset<const GSUB> (plan, buf);
|
||||
case HB_OT_TAG_GPOS: return _subset<const OT::GPOS> (plan, buf);
|
||||
case HB_OT_TAG_gvar: return _subset<const OT::gvar> (plan, buf);
|
||||
case HB_OT_TAG_HVAR: return _subset<const OT::HVAR> (plan, buf);
|
||||
case HB_OT_TAG_VVAR: return _subset<const OT::VVAR> (plan, buf);
|
||||
#endif
|
||||
|
||||
default:
|
||||
|
@ -379,6 +386,8 @@ hb_subset_plan_execute_or_fail (hb_subset_plan_t *plan)
|
|||
bool success = true;
|
||||
hb_tag_t table_tags[32];
|
||||
unsigned offset = 0, num_tables = ARRAY_LENGTH (table_tags);
|
||||
hb_vector_t<char> buf;
|
||||
buf.alloc (4096 - 16);
|
||||
while ((hb_face_get_table_tags (plan->source, offset, &num_tables, table_tags), num_tables))
|
||||
{
|
||||
for (unsigned i = 0; i < num_tables; ++i)
|
||||
|
@ -386,7 +395,7 @@ hb_subset_plan_execute_or_fail (hb_subset_plan_t *plan)
|
|||
hb_tag_t tag = table_tags[i];
|
||||
if (_should_drop_table (plan, tag) && !tags_set.has (tag)) continue;
|
||||
tags_set.add (tag);
|
||||
success = _subset_table (plan, tag);
|
||||
success = _subset_table (plan, buf, tag);
|
||||
if (unlikely (!success)) goto end;
|
||||
}
|
||||
offset += num_tables;
|
||||
|
|
97
thirdparty/harfbuzz/src/hb-vector.hh
vendored
97
thirdparty/harfbuzz/src/hb-vector.hh
vendored
|
@ -29,6 +29,7 @@
|
|||
|
||||
#include "hb.hh"
|
||||
#include "hb-array.hh"
|
||||
#include "hb-meta.hh"
|
||||
#include "hb-null.hh"
|
||||
|
||||
|
||||
|
@ -42,6 +43,7 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
|
|||
using c_array_t = typename std::conditional<sorted, hb_sorted_array_t<const Type>, hb_array_t<const Type>>::type;
|
||||
|
||||
hb_vector_t () = default;
|
||||
hb_vector_t (std::nullptr_t) : hb_vector_t () {}
|
||||
hb_vector_t (std::initializer_list<Type> lst) : hb_vector_t ()
|
||||
{
|
||||
alloc (lst.size ());
|
||||
|
@ -59,7 +61,8 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
|
|||
hb_vector_t (const hb_vector_t &o) : hb_vector_t ()
|
||||
{
|
||||
alloc (o.length);
|
||||
hb_copy (o, *this);
|
||||
if (unlikely (in_error ())) return;
|
||||
copy_vector (o);
|
||||
}
|
||||
hb_vector_t (hb_vector_t &&o)
|
||||
{
|
||||
|
@ -70,9 +73,8 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
|
|||
}
|
||||
~hb_vector_t () { fini (); }
|
||||
|
||||
private:
|
||||
int allocated = 0; /* == -1 means allocation failed. */
|
||||
public:
|
||||
int allocated = 0; /* == -1 means allocation failed. */
|
||||
unsigned int length = 0;
|
||||
public:
|
||||
Type *arrayZ = nullptr;
|
||||
|
@ -108,7 +110,10 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
|
|||
{
|
||||
reset ();
|
||||
alloc (o.length);
|
||||
hb_copy (o, *this);
|
||||
if (unlikely (in_error ())) return *this;
|
||||
|
||||
copy_vector (o);
|
||||
|
||||
return *this;
|
||||
}
|
||||
hb_vector_t& operator = (hb_vector_t &&o)
|
||||
|
@ -184,12 +189,14 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
|
|||
{
|
||||
if (unlikely (!resize (length + 1)))
|
||||
return &Crap (Type);
|
||||
return &arrayZ[length - 1];
|
||||
return std::addressof (arrayZ[length - 1]);
|
||||
}
|
||||
template <typename T>
|
||||
template <typename T,
|
||||
typename T2 = Type,
|
||||
hb_enable_if (!std::is_copy_constructible<T2>::value &&
|
||||
std::is_copy_assignable<T>::value)>
|
||||
Type *push (T&& v)
|
||||
{
|
||||
/* TODO Emplace? */
|
||||
Type *p = push ();
|
||||
if (p == &Crap (Type))
|
||||
// If push failed to allocate then don't copy v, since this may cause
|
||||
|
@ -199,18 +206,34 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
|
|||
*p = std::forward<T> (v);
|
||||
return p;
|
||||
}
|
||||
template <typename T,
|
||||
typename T2 = Type,
|
||||
hb_enable_if (std::is_copy_constructible<T2>::value)>
|
||||
Type *push (T&& v)
|
||||
{
|
||||
if (unlikely (!alloc (length + 1)))
|
||||
// If push failed to allocate then don't copy v, since this may cause
|
||||
// the created copy to leak memory since we won't have stored a
|
||||
// reference to it.
|
||||
return &Crap (Type);
|
||||
|
||||
/* Emplace. */
|
||||
length++;
|
||||
Type *p = std::addressof (arrayZ[length - 1]);
|
||||
return new (p) Type (std::forward<T> (v));
|
||||
}
|
||||
|
||||
bool in_error () const { return allocated < 0; }
|
||||
|
||||
template <typename T = Type,
|
||||
hb_enable_if (std::is_trivially_copy_assignable<T>::value)>
|
||||
hb_enable_if (hb_is_trivially_copy_assignable(T))>
|
||||
Type *
|
||||
realloc_vector (unsigned new_allocated)
|
||||
{
|
||||
return (Type *) hb_realloc (arrayZ, new_allocated * sizeof (Type));
|
||||
}
|
||||
template <typename T = Type,
|
||||
hb_enable_if (!std::is_trivially_copy_assignable<T>::value)>
|
||||
hb_enable_if (!hb_is_trivially_copy_assignable(T))>
|
||||
Type *
|
||||
realloc_vector (unsigned new_allocated)
|
||||
{
|
||||
|
@ -230,8 +253,7 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
|
|||
}
|
||||
|
||||
template <typename T = Type,
|
||||
hb_enable_if (std::is_trivially_constructible<T>::value ||
|
||||
!std::is_default_constructible<T>::value)>
|
||||
hb_enable_if (hb_is_trivially_constructible(T))>
|
||||
void
|
||||
grow_vector (unsigned size)
|
||||
{
|
||||
|
@ -239,8 +261,7 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
|
|||
length = size;
|
||||
}
|
||||
template <typename T = Type,
|
||||
hb_enable_if (!std::is_trivially_constructible<T>::value &&
|
||||
std::is_default_constructible<T>::value)>
|
||||
hb_enable_if (!hb_is_trivially_constructible(T))>
|
||||
void
|
||||
grow_vector (unsigned size)
|
||||
{
|
||||
|
@ -252,14 +273,52 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
|
|||
}
|
||||
|
||||
template <typename T = Type,
|
||||
hb_enable_if (std::is_trivially_destructible<T>::value)>
|
||||
hb_enable_if (hb_is_trivially_copyable (T))>
|
||||
void
|
||||
copy_vector (const hb_vector_t &other)
|
||||
{
|
||||
length = other.length;
|
||||
hb_memcpy ((void *) arrayZ, (const void *) other.arrayZ, length * item_size);
|
||||
}
|
||||
template <typename T = Type,
|
||||
hb_enable_if (!hb_is_trivially_copyable (T) &&
|
||||
std::is_copy_constructible<T>::value)>
|
||||
void
|
||||
copy_vector (const hb_vector_t &other)
|
||||
{
|
||||
length = 0;
|
||||
while (length < other.length)
|
||||
{
|
||||
length++;
|
||||
new (std::addressof (arrayZ[length - 1])) Type (other.arrayZ[length - 1]);
|
||||
}
|
||||
}
|
||||
template <typename T = Type,
|
||||
hb_enable_if (!hb_is_trivially_copyable (T) &&
|
||||
!std::is_copy_constructible<T>::value &&
|
||||
std::is_default_constructible<T>::value &&
|
||||
std::is_copy_assignable<T>::value)>
|
||||
void
|
||||
copy_vector (const hb_vector_t &other)
|
||||
{
|
||||
length = 0;
|
||||
while (length < other.length)
|
||||
{
|
||||
length++;
|
||||
new (std::addressof (arrayZ[length - 1])) Type ();
|
||||
arrayZ[length - 1] = other.arrayZ[length - 1];
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T = Type,
|
||||
hb_enable_if (hb_is_trivially_destructible(T))>
|
||||
void
|
||||
shrink_vector (unsigned size)
|
||||
{
|
||||
length = size;
|
||||
}
|
||||
template <typename T = Type,
|
||||
hb_enable_if (!std::is_trivially_destructible<T>::value)>
|
||||
hb_enable_if (!hb_is_trivially_destructible(T))>
|
||||
void
|
||||
shrink_vector (unsigned size)
|
||||
{
|
||||
|
@ -271,7 +330,7 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
|
|||
}
|
||||
|
||||
template <typename T = Type,
|
||||
hb_enable_if (std::is_trivially_copy_assignable<T>::value)>
|
||||
hb_enable_if (hb_is_trivially_copy_assignable(T))>
|
||||
void
|
||||
shift_down_vector (unsigned i)
|
||||
{
|
||||
|
@ -280,7 +339,7 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
|
|||
(length - i) * sizeof (Type));
|
||||
}
|
||||
template <typename T = Type,
|
||||
hb_enable_if (!std::is_trivially_copy_assignable<T>::value)>
|
||||
hb_enable_if (!hb_is_trivially_copy_assignable(T))>
|
||||
void
|
||||
shift_down_vector (unsigned i)
|
||||
{
|
||||
|
@ -341,7 +400,7 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
|
|||
Type pop ()
|
||||
{
|
||||
if (!length) return Null (Type);
|
||||
Type v = std::move (arrayZ[length - 1]);
|
||||
Type v = arrayZ[length - 1];
|
||||
arrayZ[length - 1].~Type ();
|
||||
length--;
|
||||
return v;
|
||||
|
@ -351,8 +410,8 @@ struct hb_vector_t : std::conditional<sorted, hb_vector_t<Type, false>, hb_empty
|
|||
{
|
||||
if (unlikely (i >= length))
|
||||
return;
|
||||
arrayZ[i].~Type ();
|
||||
shift_down_vector (i + 1);
|
||||
arrayZ[length - 1].~Type ();
|
||||
length--;
|
||||
}
|
||||
|
||||
|
|
6
thirdparty/harfbuzz/src/hb-version.h
vendored
6
thirdparty/harfbuzz/src/hb-version.h
vendored
|
@ -47,20 +47,20 @@ HB_BEGIN_DECLS
|
|||
*
|
||||
* The minor component of the library version available at compile-time.
|
||||
*/
|
||||
#define HB_VERSION_MINOR 2
|
||||
#define HB_VERSION_MINOR 3
|
||||
/**
|
||||
* HB_VERSION_MICRO:
|
||||
*
|
||||
* The micro component of the library version available at compile-time.
|
||||
*/
|
||||
#define HB_VERSION_MICRO 1
|
||||
#define HB_VERSION_MICRO 0
|
||||
|
||||
/**
|
||||
* HB_VERSION_STRING:
|
||||
*
|
||||
* A string literal containing the library version available at compile-time.
|
||||
*/
|
||||
#define HB_VERSION_STRING "4.2.1"
|
||||
#define HB_VERSION_STRING "4.3.0"
|
||||
|
||||
/**
|
||||
* HB_VERSION_ATLEAST:
|
||||
|
|
Loading…
Reference in a new issue