HarfBuzz: Update to version 4.4.1

This commit is contained in:
bruvzg 2022-07-04 09:11:15 +03:00
parent 41fdddb1f8
commit 6c27d9d624
No known key found for this signature in database
GPG key ID: 7960FCF39844EC38
161 changed files with 11955 additions and 10225 deletions

View file

@ -75,18 +75,18 @@ if env["builtin_harfbuzz"]:
"src/hb-ot-meta.cc",
"src/hb-ot-metrics.cc",
"src/hb-ot-name.cc",
"src/hb-ot-shape-complex-arabic.cc",
"src/hb-ot-shape-complex-default.cc",
"src/hb-ot-shape-complex-hangul.cc",
"src/hb-ot-shape-complex-hebrew.cc",
"src/hb-ot-shape-complex-indic-table.cc",
"src/hb-ot-shape-complex-indic.cc",
"src/hb-ot-shape-complex-khmer.cc",
"src/hb-ot-shape-complex-myanmar.cc",
"src/hb-ot-shape-complex-syllabic.cc",
"src/hb-ot-shape-complex-thai.cc",
"src/hb-ot-shape-complex-use.cc",
"src/hb-ot-shape-complex-vowel-constraints.cc",
"src/hb-ot-shaper-arabic.cc",
"src/hb-ot-shaper-default.cc",
"src/hb-ot-shaper-hangul.cc",
"src/hb-ot-shaper-hebrew.cc",
"src/hb-ot-shaper-indic-table.cc",
"src/hb-ot-shaper-indic.cc",
"src/hb-ot-shaper-khmer.cc",
"src/hb-ot-shaper-myanmar.cc",
"src/hb-ot-shaper-syllabic.cc",
"src/hb-ot-shaper-thai.cc",
"src/hb-ot-shaper-use.cc",
"src/hb-ot-shaper-vowel-constraints.cc",
"src/hb-ot-shape-fallback.cc",
"src/hb-ot-shape-normalize.cc",
"src/hb-ot-shape.cc",

View file

@ -220,18 +220,18 @@ thirdparty_harfbuzz_sources = [
"src/hb-ot-meta.cc",
"src/hb-ot-metrics.cc",
"src/hb-ot-name.cc",
"src/hb-ot-shape-complex-arabic.cc",
"src/hb-ot-shape-complex-default.cc",
"src/hb-ot-shape-complex-hangul.cc",
"src/hb-ot-shape-complex-hebrew.cc",
"src/hb-ot-shape-complex-indic-table.cc",
"src/hb-ot-shape-complex-indic.cc",
"src/hb-ot-shape-complex-khmer.cc",
"src/hb-ot-shape-complex-myanmar.cc",
"src/hb-ot-shape-complex-syllabic.cc",
"src/hb-ot-shape-complex-thai.cc",
"src/hb-ot-shape-complex-use.cc",
"src/hb-ot-shape-complex-vowel-constraints.cc",
"src/hb-ot-shaper-arabic.cc",
"src/hb-ot-shaper-default.cc",
"src/hb-ot-shaper-hangul.cc",
"src/hb-ot-shaper-hebrew.cc",
"src/hb-ot-shaper-indic-table.cc",
"src/hb-ot-shaper-indic.cc",
"src/hb-ot-shaper-khmer.cc",
"src/hb-ot-shaper-myanmar.cc",
"src/hb-ot-shaper-syllabic.cc",
"src/hb-ot-shaper-thai.cc",
"src/hb-ot-shaper-use.cc",
"src/hb-ot-shaper-vowel-constraints.cc",
"src/hb-ot-shape-fallback.cc",
"src/hb-ot-shape-normalize.cc",
"src/hb-ot-shape.cc",

View file

@ -213,7 +213,7 @@ Files extracted from upstream source:
## harfbuzz
- Upstream: https://github.com/harfbuzz/harfbuzz
- Version: 4.3.0 (aee123fc83388b8f5acfb301d87bd92eccc5b843, 2022)
- Version: 4.4.1 (096aaa62a6e0d07c02a4894fc036efc927e5aaf9, 2022)
- License: MIT
Files extracted from upstream source:

View file

@ -0,0 +1,165 @@
#ifndef OT_LAYOUT_GPOS_HH
#define OT_LAYOUT_GPOS_HH
#include "../../hb-ot-layout-common.hh"
#include "../../hb-ot-layout-gsubgpos.hh"
#include "GPOS/Common.hh"
#include "GPOS/PosLookup.hh"
namespace OT {
namespace Layout {
static void
propagate_attachment_offsets (hb_glyph_position_t *pos,
unsigned int len,
unsigned int i,
hb_direction_t direction,
unsigned nesting_level = HB_MAX_NESTING_LEVEL);
/*
* GPOS -- Glyph Positioning
* https://docs.microsoft.com/en-us/typography/opentype/spec/gpos
*/
struct GPOS : GSUBGPOS
{
static constexpr hb_tag_t tableTag = HB_OT_TAG_GPOS;
using Lookup = GPOS_impl::PosLookup;
const GPOS_impl::PosLookup& get_lookup (unsigned int i) const
{ return static_cast<const GPOS_impl::PosLookup &> (GSUBGPOS::get_lookup (i)); }
static inline void position_start (hb_font_t *font, hb_buffer_t *buffer);
static inline void position_finish_advances (hb_font_t *font, hb_buffer_t *buffer);
static inline void position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer);
bool subset (hb_subset_context_t *c) const
{
hb_subset_layout_context_t l (c, tableTag, c->plan->gpos_lookups, c->plan->gpos_langsys, c->plan->gpos_features);
return GSUBGPOS::subset<GPOS_impl::PosLookup> (&l);
}
bool sanitize (hb_sanitize_context_t *c) const
{ return GSUBGPOS::sanitize<GPOS_impl::PosLookup> (c); }
HB_INTERNAL bool is_blocklisted (hb_blob_t *blob,
hb_face_t *face) const;
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
for (unsigned i = 0; i < GSUBGPOS::get_lookup_count (); i++)
{
if (!c->gpos_lookups->has (i)) continue;
const GPOS_impl::PosLookup &l = get_lookup (i);
l.dispatch (c);
}
}
void closure_lookups (hb_face_t *face,
const hb_set_t *glyphs,
hb_set_t *lookup_indexes /* IN/OUT */) const
{ GSUBGPOS::closure_lookups<GPOS_impl::PosLookup> (face, glyphs, lookup_indexes); }
typedef GSUBGPOS::accelerator_t<GPOS> accelerator_t;
};
static void
propagate_attachment_offsets (hb_glyph_position_t *pos,
unsigned int len,
unsigned int i,
hb_direction_t direction,
unsigned nesting_level)
{
/* Adjusts offsets of attached glyphs (both cursive and mark) to accumulate
* offset of glyph they are attached to. */
int chain = pos[i].attach_chain(), type = pos[i].attach_type();
if (likely (!chain))
return;
pos[i].attach_chain() = 0;
unsigned int j = (int) i + chain;
if (unlikely (j >= len))
return;
if (unlikely (!nesting_level))
return;
propagate_attachment_offsets (pos, len, j, direction, nesting_level - 1);
assert (!!(type & GPOS_impl::ATTACH_TYPE_MARK) ^ !!(type & GPOS_impl::ATTACH_TYPE_CURSIVE));
if (type & GPOS_impl::ATTACH_TYPE_CURSIVE)
{
if (HB_DIRECTION_IS_HORIZONTAL (direction))
pos[i].y_offset += pos[j].y_offset;
else
pos[i].x_offset += pos[j].x_offset;
}
else /*if (type & GPOS_impl::ATTACH_TYPE_MARK)*/
{
pos[i].x_offset += pos[j].x_offset;
pos[i].y_offset += pos[j].y_offset;
assert (j < i);
if (HB_DIRECTION_IS_FORWARD (direction))
for (unsigned int k = j; k < i; k++) {
pos[i].x_offset -= pos[k].x_advance;
pos[i].y_offset -= pos[k].y_advance;
}
else
for (unsigned int k = j + 1; k < i + 1; k++) {
pos[i].x_offset += pos[k].x_advance;
pos[i].y_offset += pos[k].y_advance;
}
}
}
void
GPOS::position_start (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
{
unsigned int count = buffer->len;
for (unsigned int i = 0; i < count; i++)
buffer->pos[i].attach_chain() = buffer->pos[i].attach_type() = 0;
}
void
GPOS::position_finish_advances (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer HB_UNUSED)
{
//_hb_buffer_assert_gsubgpos_vars (buffer);
}
void
GPOS::position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer)
{
_hb_buffer_assert_gsubgpos_vars (buffer);
unsigned int len;
hb_glyph_position_t *pos = hb_buffer_get_glyph_positions (buffer, &len);
hb_direction_t direction = buffer->props.direction;
/* Handle attachments */
if (buffer->scratch_flags & HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT)
for (unsigned i = 0; i < len; i++)
propagate_attachment_offsets (pos, len, i, direction);
if (unlikely (font->slant))
{
for (unsigned i = 0; i < len; i++)
if (unlikely (pos[i].y_offset))
pos[i].x_offset += _hb_roundf (font->slant_xy * pos[i].y_offset);
}
}
}
struct GPOS_accelerator_t : Layout::GPOS::accelerator_t {
GPOS_accelerator_t (hb_face_t *face) : Layout::GPOS::accelerator_t (face) {}
};
}
#endif /* OT_LAYOUT_GPOS_HH */

View file

@ -0,0 +1,84 @@
#ifndef OT_LAYOUT_GPOS_ANCHOR_HH
#define OT_LAYOUT_GPOS_ANCHOR_HH
#include "AnchorFormat1.hh"
#include "AnchorFormat2.hh"
#include "AnchorFormat3.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct Anchor
{
protected:
union {
HBUINT16 format; /* Format identifier */
AnchorFormat1 format1;
AnchorFormat2 format2;
AnchorFormat3 format3;
} u;
public:
DEFINE_SIZE_UNION (2, format);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
switch (u.format) {
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
case 3: return_trace (u.format3.sanitize (c));
default:return_trace (true);
}
}
void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
float *x, float *y) const
{
*x = *y = 0;
switch (u.format) {
case 1: u.format1.get_anchor (c, glyph_id, x, y); return;
case 2: u.format2.get_anchor (c, glyph_id, x, y); return;
case 3: u.format3.get_anchor (c, glyph_id, x, y); return;
default: return;
}
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
switch (u.format) {
case 1: return_trace (bool (reinterpret_cast<Anchor *> (u.format1.copy (c->serializer))));
case 2:
if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
{
// AnchorFormat 2 just containins extra hinting information, so
// if hints are being dropped convert to format 1.
return_trace (bool (reinterpret_cast<Anchor *> (u.format1.copy (c->serializer))));
}
return_trace (bool (reinterpret_cast<Anchor *> (u.format2.copy (c->serializer))));
case 3: return_trace (bool (reinterpret_cast<Anchor *> (u.format3.copy (c->serializer,
c->plan->layout_variation_idx_map))));
default:return_trace (false);
}
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
switch (u.format) {
case 1: case 2:
return;
case 3:
u.format3.collect_variation_indices (c);
return;
default: return;
}
}
};
}
}
}
#endif // OT_LAYOUT_GPOS_ANCHOR_HH

View file

@ -0,0 +1,46 @@
#ifndef OT_LAYOUT_GPOS_ANCHORFORMAT1_HH
#define OT_LAYOUT_GPOS_ANCHORFORMAT1_HH
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct AnchorFormat1
{
protected:
HBUINT16 format; /* Format identifier--format = 1 */
FWORD xCoordinate; /* Horizontal value--in design units */
FWORD yCoordinate; /* Vertical value--in design units */
public:
DEFINE_SIZE_STATIC (6);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this));
}
void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
float *x, float *y) const
{
hb_font_t *font = c->font;
*x = font->em_fscale_x (xCoordinate);
*y = font->em_fscale_y (yCoordinate);
}
AnchorFormat1* copy (hb_serialize_context_t *c) const
{
TRACE_SERIALIZE (this);
AnchorFormat1* out = c->embed<AnchorFormat1> (this);
if (!out) return_trace (out);
out->format = 1;
return_trace (out);
}
};
}
}
}
#endif // OT_LAYOUT_GPOS_ANCHORFORMAT1_HH

View file

@ -0,0 +1,58 @@
#ifndef OT_LAYOUT_GPOS_ANCHORFORMAT2_HH
#define OT_LAYOUT_GPOS_ANCHORFORMAT2_HH
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct AnchorFormat2
{
protected:
HBUINT16 format; /* Format identifier--format = 2 */
FWORD xCoordinate; /* Horizontal value--in design units */
FWORD yCoordinate; /* Vertical value--in design units */
HBUINT16 anchorPoint; /* Index to glyph contour point */
public:
DEFINE_SIZE_STATIC (8);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this));
}
void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
float *x, float *y) const
{
hb_font_t *font = c->font;
#ifdef HB_NO_HINTING
*x = font->em_fscale_x (xCoordinate);
*y = font->em_fscale_y (yCoordinate);
return;
#endif
unsigned int x_ppem = font->x_ppem;
unsigned int y_ppem = font->y_ppem;
hb_position_t cx = 0, cy = 0;
bool ret;
ret = (x_ppem || y_ppem) &&
font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy);
*x = ret && x_ppem ? cx : font->em_fscale_x (xCoordinate);
*y = ret && y_ppem ? cy : font->em_fscale_y (yCoordinate);
}
AnchorFormat2* copy (hb_serialize_context_t *c) const
{
TRACE_SERIALIZE (this);
return_trace (c->embed<AnchorFormat2> (this));
}
};
}
}
}
#endif // OT_LAYOUT_GPOS_ANCHORFORMAT2_HH

View file

@ -0,0 +1,70 @@
#ifndef OT_LAYOUT_GPOS_ANCHORFORMAT3_HH
#define OT_LAYOUT_GPOS_ANCHORFORMAT3_HH
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct AnchorFormat3
{
protected:
HBUINT16 format; /* Format identifier--format = 3 */
FWORD xCoordinate; /* Horizontal value--in design units */
FWORD yCoordinate; /* Vertical value--in design units */
Offset16To<Device>
xDeviceTable; /* Offset to Device table for X
* coordinate-- from beginning of
* Anchor table (may be NULL) */
Offset16To<Device>
yDeviceTable; /* Offset to Device table for Y
* coordinate-- from beginning of
* Anchor table (may be NULL) */
public:
DEFINE_SIZE_STATIC (10);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this));
}
void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
float *x, float *y) const
{
hb_font_t *font = c->font;
*x = font->em_fscale_x (xCoordinate);
*y = font->em_fscale_y (yCoordinate);
if (font->x_ppem || font->num_coords)
*x += (this+xDeviceTable).get_x_delta (font, c->var_store, c->var_store_cache);
if (font->y_ppem || font->num_coords)
*y += (this+yDeviceTable).get_y_delta (font, c->var_store, c->var_store_cache);
}
AnchorFormat3* copy (hb_serialize_context_t *c,
const hb_map_t *layout_variation_idx_map) const
{
TRACE_SERIALIZE (this);
if (!layout_variation_idx_map) return_trace (nullptr);
auto *out = c->embed<AnchorFormat3> (this);
if (unlikely (!out)) return_trace (nullptr);
out->xDeviceTable.serialize_copy (c, xDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map);
out->yDeviceTable.serialize_copy (c, yDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map);
return_trace (out);
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
(this+xDeviceTable).collect_variation_indices (c->layout_variation_indices);
(this+yDeviceTable).collect_variation_indices (c->layout_variation_indices);
}
};
}
}
}
#endif // OT_LAYOUT_GPOS_ANCHORFORMAT3_HH

View file

@ -0,0 +1,77 @@
#ifndef OT_LAYOUT_GPOS_ANCHORMATRIX_HH
#define OT_LAYOUT_GPOS_ANCHORMATRIX_HH
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct AnchorMatrix
{
HBUINT16 rows; /* Number of rows */
UnsizedArrayOf<Offset16To<Anchor>>
matrixZ; /* Matrix of offsets to Anchor tables--
* from beginning of AnchorMatrix table */
public:
DEFINE_SIZE_ARRAY (2, matrixZ);
bool sanitize (hb_sanitize_context_t *c, unsigned int cols) const
{
TRACE_SANITIZE (this);
if (!c->check_struct (this)) return_trace (false);
if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
unsigned int count = rows * cols;
if (!c->check_array (matrixZ.arrayZ, count)) return_trace (false);
for (unsigned int i = 0; i < count; i++)
if (!matrixZ[i].sanitize (c, this)) return_trace (false);
return_trace (true);
}
const Anchor& get_anchor (unsigned int row, unsigned int col,
unsigned int cols, bool *found) const
{
*found = false;
if (unlikely (row >= rows || col >= cols)) return Null (Anchor);
*found = !matrixZ[row * cols + col].is_null ();
return this+matrixZ[row * cols + col];
}
template <typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
Iterator index_iter) const
{
for (unsigned i : index_iter)
(this+matrixZ[i]).collect_variation_indices (c);
}
template <typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
bool subset (hb_subset_context_t *c,
unsigned num_rows,
Iterator index_iter) const
{
TRACE_SUBSET (this);
auto *out = c->serializer->start_embed (this);
if (!index_iter) return_trace (false);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->rows = num_rows;
for (const unsigned i : index_iter)
{
auto *offset = c->serializer->embed (matrixZ[i]);
if (!offset) return_trace (false);
offset->serialize_subset (c, matrixZ[i], this);
}
return_trace (true);
}
};
}
}
}
#endif /* OT_LAYOUT_GPOS_ANCHORMATRIX_HH */

View file

@ -0,0 +1,14 @@
#ifndef OT_LAYOUT_GPOS_CHAINCONTEXTPOS_HH
#define OT_LAYOUT_GPOS_CHAINCONTEXTPOS_HH
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct ChainContextPos : ChainContext {};
}
}
}
#endif /* OT_LAYOUT_GPOS_CHAINCONTEXTPOS_HH */

View file

@ -0,0 +1,32 @@
#ifndef OT_LAYOUT_GPOS_COMMON_HH
#define OT_LAYOUT_GPOS_COMMON_HH
namespace OT {
namespace Layout {
namespace GPOS_impl {
enum attach_type_t {
ATTACH_TYPE_NONE = 0X00,
/* Each attachment should be either a mark or a cursive; can't be both. */
ATTACH_TYPE_MARK = 0X01,
ATTACH_TYPE_CURSIVE = 0X02,
};
/* buffer **position** var allocations */
#define attach_chain() var.i16[0] /* glyph to which this attaches to, relative to current glyphs; negative for going back, positive for forward. */
#define attach_type() var.u8[2] /* attachment type */
/* Note! if attach_chain() is zero, the value of attach_type() is irrelevant. */
template<typename Iterator, typename SrcLookup>
static void SinglePos_serialize (hb_serialize_context_t *c,
const SrcLookup *src,
Iterator it,
const hb_map_t *layout_variation_idx_map);
}
}
}
#endif // OT_LAYOUT_GPOS_COMMON_HH

View file

@ -0,0 +1,14 @@
#ifndef OT_LAYOUT_GPOS_CONTEXTPOS_HH
#define OT_LAYOUT_GPOS_CONTEXTPOS_HH
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct ContextPos : Context {};
}
}
}
#endif /* OT_LAYOUT_GPOS_CONTEXTPOS_HH */

View file

@ -0,0 +1,35 @@
#ifndef OT_LAYOUT_GPOS_CURSIVEPOS_HH
#define OT_LAYOUT_GPOS_CURSIVEPOS_HH
#include "CursivePosFormat1.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct CursivePos
{
protected:
union {
HBUINT16 format; /* Format identifier */
CursivePosFormat1 format1;
} u;
public:
template <typename context_t, typename ...Ts>
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
default:return_trace (c->default_return_value ());
}
}
};
}
}
}
#endif /* OT_LAYOUT_GPOS_CURSIVEPOS_HH */

View file

@ -0,0 +1,281 @@
#ifndef OT_LAYOUT_GPOS_CURSIVEPOSFORMAT1_HH
#define OT_LAYOUT_GPOS_CURSIVEPOSFORMAT1_HH
#include "Anchor.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct EntryExitRecord
{
friend struct CursivePosFormat1;
bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
const void *src_base) const
{
(src_base+entryAnchor).collect_variation_indices (c);
(src_base+exitAnchor).collect_variation_indices (c);
}
EntryExitRecord* subset (hb_subset_context_t *c,
const void *src_base) const
{
TRACE_SERIALIZE (this);
auto *out = c->serializer->embed (this);
if (unlikely (!out)) return_trace (nullptr);
out->entryAnchor.serialize_subset (c, entryAnchor, src_base);
out->exitAnchor.serialize_subset (c, exitAnchor, src_base);
return_trace (out);
}
protected:
Offset16To<Anchor>
entryAnchor; /* Offset to EntryAnchor table--from
* beginning of CursivePos
* subtable--may be NULL */
Offset16To<Anchor>
exitAnchor; /* Offset to ExitAnchor table--from
* beginning of CursivePos
* subtable--may be NULL */
public:
DEFINE_SIZE_STATIC (4);
};
static void
reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent) {
int chain = pos[i].attach_chain(), type = pos[i].attach_type();
if (likely (!chain || 0 == (type & ATTACH_TYPE_CURSIVE)))
return;
pos[i].attach_chain() = 0;
unsigned int j = (int) i + chain;
/* Stop if we see new parent in the chain. */
if (j == new_parent)
return;
reverse_cursive_minor_offset (pos, j, direction, new_parent);
if (HB_DIRECTION_IS_HORIZONTAL (direction))
pos[j].y_offset = -pos[i].y_offset;
else
pos[j].x_offset = -pos[i].x_offset;
pos[j].attach_chain() = -chain;
pos[j].attach_type() = type;
}
struct CursivePosFormat1
{
protected:
HBUINT16 format; /* Format identifier--format = 1 */
Offset16To<Coverage>
coverage; /* Offset to Coverage table--from
* beginning of subtable */
Array16Of<EntryExitRecord>
entryExitRecord; /* Array of EntryExit records--in
* Coverage Index order */
public:
DEFINE_SIZE_ARRAY (6, entryExitRecord);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this));
}
bool intersects (const hb_set_t *glyphs) const
{ return (this+coverage).intersects (glyphs); }
void closure_lookups (hb_closure_lookups_context_t *c) const {}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
+ hb_zip (this+coverage, entryExitRecord)
| hb_filter (c->glyph_set, hb_first)
| hb_map (hb_second)
| hb_apply ([&] (const EntryExitRecord& record) { record.collect_variation_indices (c, this); })
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{ if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
const Coverage &get_coverage () const { return this+coverage; }
bool apply (hb_ot_apply_context_t *c) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)];
if (!this_record.entryAnchor) return_trace (false);
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset (buffer->idx, 1);
unsigned unsafe_from;
if (!skippy_iter.prev (&unsafe_from))
{
buffer->unsafe_to_concat_from_outbuffer (unsafe_from, buffer->idx + 1);
return_trace (false);
}
const EntryExitRecord &prev_record = entryExitRecord[(this+coverage).get_coverage (buffer->info[skippy_iter.idx].codepoint)];
if (!prev_record.exitAnchor)
{
buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
return_trace (false);
}
unsigned int i = skippy_iter.idx;
unsigned int j = buffer->idx;
buffer->unsafe_to_break (i, j);
float entry_x, entry_y, exit_x, exit_y;
(this+prev_record.exitAnchor).get_anchor (c, buffer->info[i].codepoint, &exit_x, &exit_y);
(this+this_record.entryAnchor).get_anchor (c, buffer->info[j].codepoint, &entry_x, &entry_y);
hb_glyph_position_t *pos = buffer->pos;
hb_position_t d;
/* Main-direction adjustment */
switch (c->direction) {
case HB_DIRECTION_LTR:
pos[i].x_advance = roundf (exit_x) + pos[i].x_offset;
d = roundf (entry_x) + pos[j].x_offset;
pos[j].x_advance -= d;
pos[j].x_offset -= d;
break;
case HB_DIRECTION_RTL:
d = roundf (exit_x) + pos[i].x_offset;
pos[i].x_advance -= d;
pos[i].x_offset -= d;
pos[j].x_advance = roundf (entry_x) + pos[j].x_offset;
break;
case HB_DIRECTION_TTB:
pos[i].y_advance = roundf (exit_y) + pos[i].y_offset;
d = roundf (entry_y) + pos[j].y_offset;
pos[j].y_advance -= d;
pos[j].y_offset -= d;
break;
case HB_DIRECTION_BTT:
d = roundf (exit_y) + pos[i].y_offset;
pos[i].y_advance -= d;
pos[i].y_offset -= d;
pos[j].y_advance = roundf (entry_y);
break;
case HB_DIRECTION_INVALID:
default:
break;
}
/* Cross-direction adjustment */
/* We attach child to parent (think graph theory and rooted trees whereas
* the root stays on baseline and each node aligns itself against its
* parent.
*
* Optimize things for the case of RightToLeft, as that's most common in
* Arabic. */
unsigned int child = i;
unsigned int parent = j;
hb_position_t x_offset = entry_x - exit_x;
hb_position_t y_offset = entry_y - exit_y;
if (!(c->lookup_props & LookupFlag::RightToLeft))
{
unsigned int k = child;
child = parent;
parent = k;
x_offset = -x_offset;
y_offset = -y_offset;
}
/* If child was already connected to someone else, walk through its old
* chain and reverse the link direction, such that the whole tree of its
* previous connection now attaches to new parent. Watch out for case
* where new parent is on the path from old chain...
*/
reverse_cursive_minor_offset (pos, child, c->direction, parent);
pos[child].attach_type() = ATTACH_TYPE_CURSIVE;
pos[child].attach_chain() = (int) parent - (int) child;
buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction)))
pos[child].y_offset = y_offset;
else
pos[child].x_offset = x_offset;
/* If parent was attached to child, separate them.
* https://github.com/harfbuzz/harfbuzz/issues/2469
*/
if (unlikely (pos[parent].attach_chain() == -pos[child].attach_chain()))
pos[parent].attach_chain() = 0;
buffer->idx++;
return_trace (true);
}
template <typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
void serialize (hb_subset_context_t *c,
Iterator it,
const void *src_base)
{
if (unlikely (!c->serializer->extend_min ((*this)))) return;
this->format = 1;
this->entryExitRecord.len = it.len ();
for (const EntryExitRecord& entry_record : + it
| hb_map (hb_second))
entry_record.subset (c, src_base);
auto glyphs =
+ it
| hb_map_retains_sorting (hb_first)
;
coverage.serialize_serialize (c->serializer, glyphs);
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
auto *out = c->serializer->start_embed (*this);
if (unlikely (!out)) return_trace (false);
auto it =
+ hb_zip (this+coverage, entryExitRecord)
| hb_filter (glyphset, hb_first)
| hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const EntryExitRecord&> p) -> hb_pair_t<hb_codepoint_t, const EntryExitRecord&>
{ return hb_pair (glyph_map[p.first], p.second);})
;
bool ret = bool (it);
out->serialize (c, it, this);
return_trace (ret);
}
};
}
}
}
#endif /* OT_LAYOUT_GPOS_CURSIVEPOSFORMAT1_HH */

View file

@ -0,0 +1,17 @@
#ifndef OT_LAYOUT_GPOS_EXTENSIONPOS_HH
#define OT_LAYOUT_GPOS_EXTENSIONPOS_HH
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct ExtensionPos : Extension<ExtensionPos>
{
typedef struct PosLookupSubTable SubTable;
};
}
}
}
#endif /* OT_LAYOUT_GPOS_EXTENSIONPOS_HH */

View file

@ -0,0 +1,113 @@
#ifndef OT_LAYOUT_GPOS_MARKARRAY_HH
#define OT_LAYOUT_GPOS_MARKARRAY_HH
#include "AnchorMatrix.hh"
#include "MarkRecord.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct MarkArray : Array16Of<MarkRecord> /* Array of MarkRecords--in Coverage order */
{
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (Array16Of<MarkRecord>::sanitize (c, this));
}
bool apply (hb_ot_apply_context_t *c,
unsigned int mark_index, unsigned int glyph_index,
const AnchorMatrix &anchors, unsigned int class_count,
unsigned int glyph_pos) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
const MarkRecord &record = Array16Of<MarkRecord>::operator[](mark_index);
unsigned int mark_class = record.klass;
const Anchor& mark_anchor = this + record.markAnchor;
bool found;
const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found);
/* If this subtable doesn't have an anchor for this base and this class,
* return false such that the subsequent subtables have a chance at it. */
if (unlikely (!found)) return_trace (false);
float mark_x, mark_y, base_x, base_y;
buffer->unsafe_to_break (glyph_pos, buffer->idx + 1);
mark_anchor.get_anchor (c, buffer->cur().codepoint, &mark_x, &mark_y);
glyph_anchor.get_anchor (c, buffer->info[glyph_pos].codepoint, &base_x, &base_y);
hb_glyph_position_t &o = buffer->cur_pos();
o.x_offset = roundf (base_x - mark_x);
o.y_offset = roundf (base_y - mark_y);
o.attach_type() = ATTACH_TYPE_MARK;
o.attach_chain() = (int) glyph_pos - (int) buffer->idx;
buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
buffer->idx++;
return_trace (true);
}
template <typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
bool subset (hb_subset_context_t *c,
Iterator coverage,
const hb_map_t *klass_mapping) const
{
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
auto* out = c->serializer->start_embed (this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
auto mark_iter =
+ hb_zip (coverage, this->iter ())
| hb_filter (glyphset, hb_first)
| hb_map (hb_second)
;
unsigned new_length = 0;
for (const auto& mark_record : mark_iter) {
if (unlikely (!mark_record.subset (c, this, klass_mapping)))
return_trace (false);
new_length++;
}
if (unlikely (!c->serializer->check_assign (out->len, new_length,
HB_SERIALIZE_ERROR_ARRAY_OVERFLOW)))
return_trace (false);
return_trace (true);
}
};
static void Markclass_closure_and_remap_indexes (const Coverage &mark_coverage,
const MarkArray &mark_array,
const hb_set_t &glyphset,
hb_map_t* klass_mapping /* INOUT */)
{
hb_set_t orig_classes;
+ hb_zip (mark_coverage, mark_array)
| hb_filter (glyphset, hb_first)
| hb_map (hb_second)
| hb_map (&MarkRecord::get_class)
| hb_sink (orig_classes)
;
unsigned idx = 0;
for (auto klass : orig_classes.iter ())
{
if (klass_mapping->has (klass)) continue;
klass_mapping->set (klass, idx);
idx++;
}
}
}
}
}
#endif /* OT_LAYOUT_GPOS_MARKARRAY_HH */

View file

@ -0,0 +1,35 @@
#ifndef OT_LAYOUT_GPOS_MARKBASEPOS_HH
#define OT_LAYOUT_GPOS_MARKBASEPOS_HH
#include "MarkBasePosFormat1.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct MarkBasePos
{
protected:
union {
HBUINT16 format; /* Format identifier */
MarkBasePosFormat1 format1;
} u;
public:
template <typename context_t, typename ...Ts>
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
default:return_trace (c->default_return_value ());
}
}
};
}
}
}
#endif /* OT_LAYOUT_GPOS_MARKBASEPOS_HH */

View file

@ -0,0 +1,217 @@
#ifndef OT_LAYOUT_GPOS_MARKBASEPOSFORMAT1_HH
#define OT_LAYOUT_GPOS_MARKBASEPOSFORMAT1_HH
#include "MarkArray.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
typedef AnchorMatrix BaseArray; /* base-major--
* in order of BaseCoverage Index--,
* mark-minor--
* ordered by class--zero-based. */
struct MarkBasePosFormat1
{
protected:
HBUINT16 format; /* Format identifier--format = 1 */
Offset16To<Coverage>
markCoverage; /* Offset to MarkCoverage table--from
* beginning of MarkBasePos subtable */
Offset16To<Coverage>
baseCoverage; /* Offset to BaseCoverage table--from
* beginning of MarkBasePos subtable */
HBUINT16 classCount; /* Number of classes defined for marks */
Offset16To<MarkArray>
markArray; /* Offset to MarkArray table--from
* beginning of MarkBasePos subtable */
Offset16To<BaseArray>
baseArray; /* Offset to BaseArray table--from
* beginning of MarkBasePos subtable */
public:
DEFINE_SIZE_STATIC (12);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
markCoverage.sanitize (c, this) &&
baseCoverage.sanitize (c, this) &&
markArray.sanitize (c, this) &&
baseArray.sanitize (c, this, (unsigned int) classCount));
}
bool intersects (const hb_set_t *glyphs) const
{
return (this+markCoverage).intersects (glyphs) &&
(this+baseCoverage).intersects (glyphs);
}
void closure_lookups (hb_closure_lookups_context_t *c) const {}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
+ hb_zip (this+markCoverage, this+markArray)
| hb_filter (c->glyph_set, hb_first)
| hb_map (hb_second)
| hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); })
;
hb_map_t klass_mapping;
Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping);
unsigned basecount = (this+baseArray).rows;
auto base_iter =
+ hb_zip (this+baseCoverage, hb_range (basecount))
| hb_filter (c->glyph_set, hb_first)
| hb_map (hb_second)
;
hb_sorted_vector_t<unsigned> base_indexes;
for (const unsigned row : base_iter)
{
+ hb_range ((unsigned) classCount)
| hb_filter (klass_mapping)
| hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
| hb_sink (base_indexes)
;
}
(this+baseArray).collect_variation_indices (c, base_indexes.iter ());
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
if (unlikely (!(this+baseCoverage).collect_coverage (c->input))) return;
}
const Coverage &get_coverage () const { return this+markCoverage; }
bool apply (hb_ot_apply_context_t *c) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
if (likely (mark_index == NOT_COVERED)) return_trace (false);
/* Now we search backwards for a non-mark glyph */
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset (buffer->idx, 1);
skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
do {
unsigned unsafe_from;
if (!skippy_iter.prev (&unsafe_from))
{
buffer->unsafe_to_concat_from_outbuffer (unsafe_from, buffer->idx + 1);
return_trace (false);
}
/* We only want to attach to the first of a MultipleSubst sequence.
* https://github.com/harfbuzz/harfbuzz/issues/740
* Reject others...
* ...but stop if we find a mark in the MultipleSubst sequence:
* https://github.com/harfbuzz/harfbuzz/issues/1020 */
if (!_hb_glyph_info_multiplied (&buffer->info[skippy_iter.idx]) ||
0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) ||
(skippy_iter.idx == 0 ||
_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx - 1]) ||
_hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]) !=
_hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx - 1]) ||
_hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) !=
_hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx - 1]) + 1
))
break;
skippy_iter.reject ();
} while (true);
/* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
//if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { return_trace (false); }
unsigned int base_index = (this+baseCoverage).get_coverage (buffer->info[skippy_iter.idx].codepoint);
if (base_index == NOT_COVERED)
{
buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
return_trace (false);
}
return_trace ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx));
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->format = format;
hb_map_t klass_mapping;
Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
if (!klass_mapping.get_population ()) return_trace (false);
out->classCount = klass_mapping.get_population ();
auto mark_iter =
+ hb_zip (this+markCoverage, this+markArray)
| hb_filter (glyphset, hb_first)
;
hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ mark_iter
| hb_map (hb_first)
| hb_map (glyph_map)
| hb_sink (new_coverage)
;
if (!out->markCoverage.serialize_serialize (c->serializer, new_coverage.iter ()))
return_trace (false);
out->markArray.serialize_subset (c, markArray, this,
(this+markCoverage).iter (),
&klass_mapping);
unsigned basecount = (this+baseArray).rows;
auto base_iter =
+ hb_zip (this+baseCoverage, hb_range (basecount))
| hb_filter (glyphset, hb_first)
;
new_coverage.reset ();
+ base_iter
| hb_map (hb_first)
| hb_map (glyph_map)
| hb_sink (new_coverage)
;
if (!out->baseCoverage.serialize_serialize (c->serializer, new_coverage.iter ()))
return_trace (false);
hb_sorted_vector_t<unsigned> base_indexes;
for (const unsigned row : + base_iter
| hb_map (hb_second))
{
+ hb_range ((unsigned) classCount)
| hb_filter (klass_mapping)
| hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
| hb_sink (base_indexes)
;
}
out->baseArray.serialize_subset (c, baseArray, this,
base_iter.len (),
base_indexes.iter ());
return_trace (true);
}
};
}
}
}
#endif /* OT_LAYOUT_GPOS_MARKBASEPOSFORMAT1_HH */

View file

@ -0,0 +1,35 @@
#ifndef OT_LAYOUT_GPOS_MARKLIGPOS_HH
#define OT_LAYOUT_GPOS_MARKLIGPOS_HH
#include "MarkLigPosFormat1.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct MarkLigPos
{
protected:
union {
HBUINT16 format; /* Format identifier */
MarkLigPosFormat1 format1;
} u;
public:
template <typename context_t, typename ...Ts>
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
default:return_trace (c->default_return_value ());
}
}
};
}
}
}
#endif /* OT_LAYOUT_GPOS_MARKLIGPOS_HH */

View file

@ -0,0 +1,244 @@
#ifndef OT_LAYOUT_GPOS_MARKLIGPOSFORMAT1_HH
#define OT_LAYOUT_GPOS_MARKLIGPOSFORMAT1_HH
namespace OT {
namespace Layout {
namespace GPOS_impl {
typedef AnchorMatrix LigatureAttach; /* component-major--
* in order of writing direction--,
* mark-minor--
* ordered by class--zero-based. */
/* Array of LigatureAttach tables ordered by LigatureCoverage Index */
struct LigatureArray : List16OfOffset16To<LigatureAttach>
{
template <typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
bool subset (hb_subset_context_t *c,
Iterator coverage,
unsigned class_count,
const hb_map_t *klass_mapping) const
{
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
auto *out = c->serializer->start_embed (this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
for (const auto _ : + hb_zip (coverage, *this)
| hb_filter (glyphset, hb_first))
{
auto *matrix = out->serialize_append (c->serializer);
if (unlikely (!matrix)) return_trace (false);
const LigatureAttach& src = (this + _.second);
auto indexes =
+ hb_range (src.rows * class_count)
| hb_filter ([=] (unsigned index) { return klass_mapping->has (index % class_count); })
;
matrix->serialize_subset (c,
_.second,
this,
src.rows,
indexes);
}
return_trace (this->len);
}
};
struct MarkLigPosFormat1
{
protected:
HBUINT16 format; /* Format identifier--format = 1 */
Offset16To<Coverage>
markCoverage; /* Offset to Mark Coverage table--from
* beginning of MarkLigPos subtable */
Offset16To<Coverage>
ligatureCoverage; /* Offset to Ligature Coverage
* table--from beginning of MarkLigPos
* subtable */
HBUINT16 classCount; /* Number of defined mark classes */
Offset16To<MarkArray>
markArray; /* Offset to MarkArray table--from
* beginning of MarkLigPos subtable */
Offset16To<LigatureArray>
ligatureArray; /* Offset to LigatureArray table--from
* beginning of MarkLigPos subtable */
public:
DEFINE_SIZE_STATIC (12);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
markCoverage.sanitize (c, this) &&
ligatureCoverage.sanitize (c, this) &&
markArray.sanitize (c, this) &&
ligatureArray.sanitize (c, this, (unsigned int) classCount));
}
bool intersects (const hb_set_t *glyphs) const
{
return (this+markCoverage).intersects (glyphs) &&
(this+ligatureCoverage).intersects (glyphs);
}
void closure_lookups (hb_closure_lookups_context_t *c) const {}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
+ hb_zip (this+markCoverage, this+markArray)
| hb_filter (c->glyph_set, hb_first)
| hb_map (hb_second)
| hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); })
;
hb_map_t klass_mapping;
Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping);
unsigned ligcount = (this+ligatureArray).len;
auto lig_iter =
+ hb_zip (this+ligatureCoverage, hb_range (ligcount))
| hb_filter (c->glyph_set, hb_first)
| hb_map (hb_second)
;
const LigatureArray& lig_array = this+ligatureArray;
for (const unsigned i : lig_iter)
{
hb_sorted_vector_t<unsigned> lig_indexes;
unsigned row_count = lig_array[i].rows;
for (unsigned row : + hb_range (row_count))
{
+ hb_range ((unsigned) classCount)
| hb_filter (klass_mapping)
| hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
| hb_sink (lig_indexes)
;
}
lig_array[i].collect_variation_indices (c, lig_indexes.iter ());
}
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
if (unlikely (!(this+ligatureCoverage).collect_coverage (c->input))) return;
}
const Coverage &get_coverage () const { return this+markCoverage; }
bool apply (hb_ot_apply_context_t *c) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
if (likely (mark_index == NOT_COVERED)) return_trace (false);
/* Now we search backwards for a non-mark glyph */
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset (buffer->idx, 1);
skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
unsigned unsafe_from;
if (!skippy_iter.prev (&unsafe_from))
{
buffer->unsafe_to_concat_from_outbuffer (unsafe_from, buffer->idx + 1);
return_trace (false);
}
/* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */
//if (!_hb_glyph_info_is_ligature (&buffer->info[skippy_iter.idx])) { return_trace (false); }
unsigned int j = skippy_iter.idx;
unsigned int lig_index = (this+ligatureCoverage).get_coverage (buffer->info[j].codepoint);
if (lig_index == NOT_COVERED)
{
buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
return_trace (false);
}
const LigatureArray& lig_array = this+ligatureArray;
const LigatureAttach& lig_attach = lig_array[lig_index];
/* Find component to attach to */
unsigned int comp_count = lig_attach.rows;
if (unlikely (!comp_count))
{
buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
return_trace (false);
}
/* We must now check whether the ligature ID of the current mark glyph
* is identical to the ligature ID of the found ligature. If yes, we
* can directly use the component index. If not, we attach the mark
* glyph to the last component of the ligature. */
unsigned int comp_index;
unsigned int lig_id = _hb_glyph_info_get_lig_id (&buffer->info[j]);
unsigned int mark_id = _hb_glyph_info_get_lig_id (&buffer->cur());
unsigned int mark_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
if (lig_id && lig_id == mark_id && mark_comp > 0)
comp_index = hb_min (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1;
else
comp_index = comp_count - 1;
return_trace ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j));
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->format = format;
hb_map_t klass_mapping;
Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
if (!klass_mapping.get_population ()) return_trace (false);
out->classCount = klass_mapping.get_population ();
auto mark_iter =
+ hb_zip (this+markCoverage, this+markArray)
| hb_filter (glyphset, hb_first)
;
auto new_mark_coverage =
+ mark_iter
| hb_map_retains_sorting (hb_first)
| hb_map_retains_sorting (glyph_map)
;
if (!out->markCoverage.serialize_serialize (c->serializer, new_mark_coverage))
return_trace (false);
out->markArray.serialize_subset (c, markArray, this,
(this+markCoverage).iter (),
&klass_mapping);
auto new_ligature_coverage =
+ hb_iter (this + ligatureCoverage)
| hb_filter (glyphset)
| hb_map_retains_sorting (glyph_map)
;
if (!out->ligatureCoverage.serialize_serialize (c->serializer, new_ligature_coverage))
return_trace (false);
out->ligatureArray.serialize_subset (c, ligatureArray, this,
hb_iter (this+ligatureCoverage), classCount, &klass_mapping);
return_trace (true);
}
};
}
}
}
#endif /* OT_LAYOUT_GPOS_MARKLIGPOSFORMAT1_HH */

View file

@ -0,0 +1,36 @@
#ifndef OT_LAYOUT_GPOS_MARKMARKPOS_HH
#define OT_LAYOUT_GPOS_MARKMARKPOS_HH
#include "MarkMarkPosFormat1.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct MarkMarkPos
{
protected:
union {
HBUINT16 format; /* Format identifier */
MarkMarkPosFormat1 format1;
} u;
public:
template <typename context_t, typename ...Ts>
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
default:return_trace (c->default_return_value ());
}
}
};
}
}
}
#endif /* OT_LAYOUT_GPOS_MARKMARKPOS_HH */

View file

@ -0,0 +1,227 @@
#ifndef OT_LAYOUT_GPOS_MARKMARKPOSFORMAT1_HH
#define OT_LAYOUT_GPOS_MARKMARKPOSFORMAT1_HH
#include "MarkMarkPosFormat1.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
typedef AnchorMatrix Mark2Array; /* mark2-major--
* in order of Mark2Coverage Index--,
* mark1-minor--
* ordered by class--zero-based. */
struct MarkMarkPosFormat1
{
protected:
HBUINT16 format; /* Format identifier--format = 1 */
Offset16To<Coverage>
mark1Coverage; /* Offset to Combining Mark1 Coverage
* table--from beginning of MarkMarkPos
* subtable */
Offset16To<Coverage>
mark2Coverage; /* Offset to Combining Mark2 Coverage
* table--from beginning of MarkMarkPos
* subtable */
HBUINT16 classCount; /* Number of defined mark classes */
Offset16To<MarkArray>
mark1Array; /* Offset to Mark1Array table--from
* beginning of MarkMarkPos subtable */
Offset16To<Mark2Array>
mark2Array; /* Offset to Mark2Array table--from
* beginning of MarkMarkPos subtable */
public:
DEFINE_SIZE_STATIC (12);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
mark1Coverage.sanitize (c, this) &&
mark2Coverage.sanitize (c, this) &&
mark1Array.sanitize (c, this) &&
mark2Array.sanitize (c, this, (unsigned int) classCount));
}
bool intersects (const hb_set_t *glyphs) const
{
return (this+mark1Coverage).intersects (glyphs) &&
(this+mark2Coverage).intersects (glyphs);
}
void closure_lookups (hb_closure_lookups_context_t *c) const {}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
+ hb_zip (this+mark1Coverage, this+mark1Array)
| hb_filter (c->glyph_set, hb_first)
| hb_map (hb_second)
| hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+mark1Array)); })
;
hb_map_t klass_mapping;
Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, *c->glyph_set, &klass_mapping);
unsigned mark2_count = (this+mark2Array).rows;
auto mark2_iter =
+ hb_zip (this+mark2Coverage, hb_range (mark2_count))
| hb_filter (c->glyph_set, hb_first)
| hb_map (hb_second)
;
hb_sorted_vector_t<unsigned> mark2_indexes;
for (const unsigned row : mark2_iter)
{
+ hb_range ((unsigned) classCount)
| hb_filter (klass_mapping)
| hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
| hb_sink (mark2_indexes)
;
}
(this+mark2Array).collect_variation_indices (c, mark2_indexes.iter ());
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
if (unlikely (!(this+mark1Coverage).collect_coverage (c->input))) return;
if (unlikely (!(this+mark2Coverage).collect_coverage (c->input))) return;
}
const Coverage &get_coverage () const { return this+mark1Coverage; }
bool apply (hb_ot_apply_context_t *c) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int mark1_index = (this+mark1Coverage).get_coverage (buffer->cur().codepoint);
if (likely (mark1_index == NOT_COVERED)) return_trace (false);
/* now we search backwards for a suitable mark glyph until a non-mark glyph */
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset (buffer->idx, 1);
skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags);
unsigned unsafe_from;
if (!skippy_iter.prev (&unsafe_from))
{
buffer->unsafe_to_concat_from_outbuffer (unsafe_from, buffer->idx + 1);
return_trace (false);
}
if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]))
{
buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
return_trace (false);
}
unsigned int j = skippy_iter.idx;
unsigned int id1 = _hb_glyph_info_get_lig_id (&buffer->cur());
unsigned int id2 = _hb_glyph_info_get_lig_id (&buffer->info[j]);
unsigned int comp1 = _hb_glyph_info_get_lig_comp (&buffer->cur());
unsigned int comp2 = _hb_glyph_info_get_lig_comp (&buffer->info[j]);
if (likely (id1 == id2))
{
if (id1 == 0) /* Marks belonging to the same base. */
goto good;
else if (comp1 == comp2) /* Marks belonging to the same ligature component. */
goto good;
}
else
{
/* If ligature ids don't match, it may be the case that one of the marks
* itself is a ligature. In which case match. */
if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2))
goto good;
}
/* Didn't match. */
buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
return_trace (false);
good:
unsigned int mark2_index = (this+mark2Coverage).get_coverage (buffer->info[j].codepoint);
if (mark2_index == NOT_COVERED)
{
buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
return_trace (false);
}
return_trace ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j));
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->format = format;
hb_map_t klass_mapping;
Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, glyphset, &klass_mapping);
if (!klass_mapping.get_population ()) return_trace (false);
out->classCount = klass_mapping.get_population ();
auto mark1_iter =
+ hb_zip (this+mark1Coverage, this+mark1Array)
| hb_filter (glyphset, hb_first)
;
hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ mark1_iter
| hb_map (hb_first)
| hb_map (glyph_map)
| hb_sink (new_coverage)
;
if (!out->mark1Coverage.serialize_serialize (c->serializer, new_coverage.iter ()))
return_trace (false);
out->mark1Array.serialize_subset (c, mark1Array, this,
(this+mark1Coverage).iter (),
&klass_mapping);
unsigned mark2count = (this+mark2Array).rows;
auto mark2_iter =
+ hb_zip (this+mark2Coverage, hb_range (mark2count))
| hb_filter (glyphset, hb_first)
;
new_coverage.reset ();
+ mark2_iter
| hb_map (hb_first)
| hb_map (glyph_map)
| hb_sink (new_coverage)
;
if (!out->mark2Coverage.serialize_serialize (c->serializer, new_coverage.iter ()))
return_trace (false);
hb_sorted_vector_t<unsigned> mark2_indexes;
for (const unsigned row : + mark2_iter
| hb_map (hb_second))
{
+ hb_range ((unsigned) classCount)
| hb_filter (klass_mapping)
| hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
| hb_sink (mark2_indexes)
;
}
out->mark2Array.serialize_subset (c, mark2Array, this, mark2_iter.len (), mark2_indexes.iter ());
return_trace (true);
}
};
}
}
}
#endif /* OT_LAYOUT_GPOS_MARKMARKPOSFORMAT1_HH */

View file

@ -0,0 +1,52 @@
#ifndef OT_LAYOUT_GPOS_MARKRECORD_HH
#define OT_LAYOUT_GPOS_MARKRECORD_HH
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct MarkRecord
{
friend struct MarkArray;
protected:
HBUINT16 klass; /* Class defined for this mark */
Offset16To<Anchor>
markAnchor; /* Offset to Anchor table--from
* beginning of MarkArray table */
public:
DEFINE_SIZE_STATIC (4);
unsigned get_class () const { return (unsigned) klass; }
bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && markAnchor.sanitize (c, base));
}
MarkRecord *subset (hb_subset_context_t *c,
const void *src_base,
const hb_map_t *klass_mapping) const
{
TRACE_SUBSET (this);
auto *out = c->serializer->embed (this);
if (unlikely (!out)) return_trace (nullptr);
out->klass = klass_mapping->get (klass);
out->markAnchor.serialize_subset (c, markAnchor, src_base);
return_trace (out);
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
const void *src_base) const
{
(src_base+markAnchor).collect_variation_indices (c);
}
};
}
}
}
#endif /* OT_LAYOUT_GPOS_MARKRECORD_HH */

View file

@ -0,0 +1,38 @@
#ifndef OT_LAYOUT_GPOS_PAIRPOS_HH
#define OT_LAYOUT_GPOS_PAIRPOS_HH
#include "PairPosFormat1.hh"
#include "PairPosFormat2.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct PairPos
{
protected:
union {
HBUINT16 format; /* Format identifier */
PairPosFormat1 format1;
PairPosFormat2 format2;
} u;
public:
template <typename context_t, typename ...Ts>
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
default:return_trace (c->default_return_value ());
}
}
};
}
}
}
#endif // OT_LAYOUT_GPOS_PAIRPOS_HH

View file

@ -0,0 +1,420 @@
#ifndef OT_LAYOUT_GPOS_PAIRPOSFORMAT1_HH
#define OT_LAYOUT_GPOS_PAIRPOSFORMAT1_HH
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct PairValueRecord
{
friend struct PairSet;
int cmp (hb_codepoint_t k) const
{ return secondGlyph.cmp (k); }
struct context_t
{
const void *base;
const ValueFormat *valueFormats;
const ValueFormat *newFormats;
unsigned len1; /* valueFormats[0].get_len() */
const hb_map_t *glyph_map;
const hb_map_t *layout_variation_idx_map;
};
bool subset (hb_subset_context_t *c,
context_t *closure) const
{
TRACE_SERIALIZE (this);
auto *s = c->serializer;
auto *out = s->start_embed (*this);
if (unlikely (!s->extend_min (out))) return_trace (false);
out->secondGlyph = (*closure->glyph_map)[secondGlyph];
closure->valueFormats[0].copy_values (s,
closure->newFormats[0],
closure->base, &values[0],
closure->layout_variation_idx_map);
closure->valueFormats[1].copy_values (s,
closure->newFormats[1],
closure->base,
&values[closure->len1],
closure->layout_variation_idx_map);
return_trace (true);
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
const ValueFormat *valueFormats,
const void *base) const
{
unsigned record1_len = valueFormats[0].get_len ();
unsigned record2_len = valueFormats[1].get_len ();
const hb_array_t<const Value> values_array = values.as_array (record1_len + record2_len);
if (valueFormats[0].has_device ())
valueFormats[0].collect_variation_indices (c, base, values_array.sub_array (0, record1_len));
if (valueFormats[1].has_device ())
valueFormats[1].collect_variation_indices (c, base, values_array.sub_array (record1_len, record2_len));
}
bool intersects (const hb_set_t& glyphset) const
{
return glyphset.has(secondGlyph);
}
const Value* get_values_1 () const
{
return &values[0];
}
const Value* get_values_2 (ValueFormat format1) const
{
return &values[format1.get_len ()];
}
protected:
HBGlyphID16 secondGlyph; /* GlyphID of second glyph in the
* pair--first glyph is listed in the
* Coverage table */
ValueRecord values; /* Positioning data for the first glyph
* followed by for second glyph */
public:
DEFINE_SIZE_ARRAY (2, values);
};
struct PairSet
{
friend struct PairPosFormat1;
bool intersects (const hb_set_t *glyphs,
const ValueFormat *valueFormats) const
{
unsigned int len1 = valueFormats[0].get_len ();
unsigned int len2 = valueFormats[1].get_len ();
unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
const PairValueRecord *record = &firstPairValueRecord;
unsigned int count = len;
for (unsigned int i = 0; i < count; i++)
{
if (glyphs->has (record->secondGlyph))
return true;
record = &StructAtOffset<const PairValueRecord> (record, record_size);
}
return false;
}
void collect_glyphs (hb_collect_glyphs_context_t *c,
const ValueFormat *valueFormats) const
{
unsigned int len1 = valueFormats[0].get_len ();
unsigned int len2 = valueFormats[1].get_len ();
unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
const PairValueRecord *record = &firstPairValueRecord;
c->input->add_array (&record->secondGlyph, len, record_size);
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
const ValueFormat *valueFormats) const
{
unsigned len1 = valueFormats[0].get_len ();
unsigned len2 = valueFormats[1].get_len ();
unsigned record_size = HBUINT16::static_size * (1 + len1 + len2);
const PairValueRecord *record = &firstPairValueRecord;
unsigned count = len;
for (unsigned i = 0; i < count; i++)
{
if (c->glyph_set->has (record->secondGlyph))
{ record->collect_variation_indices (c, valueFormats, this); }
record = &StructAtOffset<const PairValueRecord> (record, record_size);
}
}
bool apply (hb_ot_apply_context_t *c,
const ValueFormat *valueFormats,
unsigned int pos) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int len1 = valueFormats[0].get_len ();
unsigned int len2 = valueFormats[1].get_len ();
unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
const PairValueRecord *record = hb_bsearch (buffer->info[pos].codepoint,
&firstPairValueRecord,
len,
record_size);
if (record)
{
bool applied_first = valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos());
bool applied_second = valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]);
if (applied_first || applied_second)
buffer->unsafe_to_break (buffer->idx, pos + 1);
if (len2)
pos++;
buffer->idx = pos;
return_trace (true);
}
buffer->unsafe_to_concat (buffer->idx, pos + 1);
return_trace (false);
}
bool subset (hb_subset_context_t *c,
const ValueFormat valueFormats[2],
const ValueFormat newFormats[2]) const
{
TRACE_SUBSET (this);
auto snap = c->serializer->snapshot ();
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->len = 0;
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
unsigned len1 = valueFormats[0].get_len ();
unsigned len2 = valueFormats[1].get_len ();
unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2);
PairValueRecord::context_t context =
{
this,
valueFormats,
newFormats,
len1,
&glyph_map,
c->plan->layout_variation_idx_map
};
const PairValueRecord *record = &firstPairValueRecord;
unsigned count = len, num = 0;
for (unsigned i = 0; i < count; i++)
{
if (glyphset.has (record->secondGlyph)
&& record->subset (c, &context)) num++;
record = &StructAtOffset<const PairValueRecord> (record, record_size);
}
out->len = num;
if (!num) c->serializer->revert (snap);
return_trace (num);
}
struct sanitize_closure_t
{
const ValueFormat *valueFormats;
unsigned int len1; /* valueFormats[0].get_len() */
unsigned int stride; /* 1 + len1 + len2 */
};
bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
{
TRACE_SANITIZE (this);
if (!(c->check_struct (this)
&& c->check_range (&firstPairValueRecord,
len,
HBUINT16::static_size,
closure->stride))) return_trace (false);
unsigned int count = len;
const PairValueRecord *record = &firstPairValueRecord;
return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, this, &record->values[0], count, closure->stride) &&
closure->valueFormats[1].sanitize_values_stride_unsafe (c, this, &record->values[closure->len1], count, closure->stride));
}
protected:
HBUINT16 len; /* Number of PairValueRecords */
PairValueRecord firstPairValueRecord;
/* Array of PairValueRecords--ordered
* by GlyphID of the second glyph */
public:
DEFINE_SIZE_MIN (2);
};
struct PairPosFormat1
{
protected:
HBUINT16 format; /* Format identifier--format = 1 */
Offset16To<Coverage>
coverage; /* Offset to Coverage table--from
* beginning of subtable */
ValueFormat valueFormat[2]; /* [0] Defines the types of data in
* ValueRecord1--for the first glyph
* in the pair--may be zero (0) */
/* [1] Defines the types of data in
* ValueRecord2--for the second glyph
* in the pair--may be zero (0) */
Array16OfOffset16To<PairSet>
pairSet; /* Array of PairSet tables
* ordered by Coverage Index */
public:
DEFINE_SIZE_ARRAY (10, pairSet);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!c->check_struct (this)) return_trace (false);
unsigned int len1 = valueFormat[0].get_len ();
unsigned int len2 = valueFormat[1].get_len ();
PairSet::sanitize_closure_t closure =
{
valueFormat,
len1,
1 + len1 + len2
};
return_trace (coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure));
}
bool intersects (const hb_set_t *glyphs) const
{
return
+ hb_zip (this+coverage, pairSet)
| hb_filter (*glyphs, hb_first)
| hb_map (hb_second)
| hb_map ([glyphs, this] (const Offset16To<PairSet> &_)
{ return (this+_).intersects (glyphs, valueFormat); })
| hb_any
;
}
void closure_lookups (hb_closure_lookups_context_t *c) const {}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
if ((!valueFormat[0].has_device ()) && (!valueFormat[1].has_device ())) return;
auto it =
+ hb_zip (this+coverage, pairSet)
| hb_filter (c->glyph_set, hb_first)
| hb_map (hb_second)
;
if (!it) return;
+ it
| hb_map (hb_add (this))
| hb_apply ([&] (const PairSet& _) { _.collect_variation_indices (c, valueFormat); })
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
unsigned int count = pairSet.len;
for (unsigned int i = 0; i < count; i++)
(this+pairSet[i]).collect_glyphs (c, valueFormat);
}
const Coverage &get_coverage () const { return this+coverage; }
bool apply (hb_ot_apply_context_t *c) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset (buffer->idx, 1);
unsigned unsafe_to;
if (!skippy_iter.next (&unsafe_to))
{
buffer->unsafe_to_concat (buffer->idx, unsafe_to);
return_trace (false);
}
return_trace ((this+pairSet[index]).apply (c, valueFormat, skippy_iter.idx));
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->format = format;
out->valueFormat[0] = valueFormat[0];
out->valueFormat[1] = valueFormat[1];
if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
{
hb_pair_t<unsigned, unsigned> newFormats = compute_effective_value_formats (glyphset);
out->valueFormat[0] = newFormats.first;
out->valueFormat[1] = newFormats.second;
}
hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ hb_zip (this+coverage, pairSet)
| hb_filter (glyphset, hb_first)
| hb_filter ([this, c, out] (const Offset16To<PairSet>& _)
{
auto snap = c->serializer->snapshot ();
auto *o = out->pairSet.serialize_append (c->serializer);
if (unlikely (!o)) return false;
bool ret = o->serialize_subset (c, _, this, valueFormat, out->valueFormat);
if (!ret)
{
out->pairSet.pop ();
c->serializer->revert (snap);
}
return ret;
},
hb_second)
| hb_map (hb_first)
| hb_map (glyph_map)
| hb_sink (new_coverage)
;
out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
return_trace (bool (new_coverage));
}
hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_set_t& glyphset) const
{
unsigned len1 = valueFormat[0].get_len ();
unsigned len2 = valueFormat[1].get_len ();
unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2);
unsigned format1 = 0;
unsigned format2 = 0;
for (const Offset16To<PairSet>& _ :
+ hb_zip (this+coverage, pairSet) | hb_filter (glyphset, hb_first) | hb_map (hb_second))
{
const PairSet& set = (this + _);
const PairValueRecord *record = &set.firstPairValueRecord;
for (unsigned i = 0; i < set.len; i++)
{
if (record->intersects (glyphset))
{
format1 = format1 | valueFormat[0].get_effective_format (record->get_values_1 ());
format2 = format2 | valueFormat[1].get_effective_format (record->get_values_2 (valueFormat[0]));
}
record = &StructAtOffset<const PairValueRecord> (record, record_size);
}
}
return hb_pair (format1, format2);
}
};
}
}
}
#endif // OT_LAYOUT_GPOS_PAIRPOSFORMAT1_HH

View file

@ -0,0 +1,314 @@
#ifndef OT_LAYOUT_GPOS_PAIRPOSFORMAT2_HH
#define OT_LAYOUT_GPOS_PAIRPOSFORMAT2_HH
#include "ValueFormat.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct PairPosFormat2
{
protected:
HBUINT16 format; /* Format identifier--format = 2 */
Offset16To<Coverage>
coverage; /* Offset to Coverage table--from
* beginning of subtable */
ValueFormat valueFormat1; /* ValueRecord definition--for the
* first glyph of the pair--may be zero
* (0) */
ValueFormat valueFormat2; /* ValueRecord definition--for the
* second glyph of the pair--may be
* zero (0) */
Offset16To<ClassDef>
classDef1; /* Offset to ClassDef table--from
* beginning of PairPos subtable--for
* the first glyph of the pair */
Offset16To<ClassDef>
classDef2; /* Offset to ClassDef table--from
* beginning of PairPos subtable--for
* the second glyph of the pair */
HBUINT16 class1Count; /* Number of classes in ClassDef1
* table--includes Class0 */
HBUINT16 class2Count; /* Number of classes in ClassDef2
* table--includes Class0 */
ValueRecord values; /* Matrix of value pairs:
* class1-major, class2-minor,
* Each entry has value1 and value2 */
public:
DEFINE_SIZE_ARRAY (16, values);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!(c->check_struct (this)
&& coverage.sanitize (c, this)
&& classDef1.sanitize (c, this)
&& classDef2.sanitize (c, this))) return_trace (false);
unsigned int len1 = valueFormat1.get_len ();
unsigned int len2 = valueFormat2.get_len ();
unsigned int stride = len1 + len2;
unsigned int record_size = valueFormat1.get_size () + valueFormat2.get_size ();
unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count;
return_trace (c->check_range ((const void *) values,
count,
record_size) &&
valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) &&
valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride));
}
bool intersects (const hb_set_t *glyphs) const
{
return (this+coverage).intersects (glyphs) &&
(this+classDef2).intersects (glyphs);
}
void closure_lookups (hb_closure_lookups_context_t *c) const {}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
if (!intersects (c->glyph_set)) return;
if ((!valueFormat1.has_device ()) && (!valueFormat2.has_device ())) return;
hb_set_t klass1_glyphs, klass2_glyphs;
if (!(this+classDef1).collect_coverage (&klass1_glyphs)) return;
if (!(this+classDef2).collect_coverage (&klass2_glyphs)) return;
hb_set_t class1_set, class2_set;
for (const unsigned cp : + c->glyph_set->iter () | hb_filter (this + coverage))
{
if (!klass1_glyphs.has (cp)) class1_set.add (0);
else
{
unsigned klass1 = (this+classDef1).get (cp);
class1_set.add (klass1);
}
}
class2_set.add (0);
for (const unsigned cp : + c->glyph_set->iter () | hb_filter (klass2_glyphs))
{
unsigned klass2 = (this+classDef2).get (cp);
class2_set.add (klass2);
}
if (class1_set.is_empty ()
|| class2_set.is_empty ()
|| (class2_set.get_population() == 1 && class2_set.has(0)))
return;
unsigned len1 = valueFormat1.get_len ();
unsigned len2 = valueFormat2.get_len ();
const hb_array_t<const Value> values_array = values.as_array ((unsigned)class1Count * (unsigned) class2Count * (len1 + len2));
for (const unsigned class1_idx : class1_set.iter ())
{
for (const unsigned class2_idx : class2_set.iter ())
{
unsigned start_offset = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
if (valueFormat1.has_device ())
valueFormat1.collect_variation_indices (c, this, values_array.sub_array (start_offset, len1));
if (valueFormat2.has_device ())
valueFormat2.collect_variation_indices (c, this, values_array.sub_array (start_offset+len1, len2));
}
}
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
if (unlikely (!(this+classDef2).collect_coverage (c->input))) return;
}
const Coverage &get_coverage () const { return this+coverage; }
bool apply (hb_ot_apply_context_t *c) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset (buffer->idx, 1);
unsigned unsafe_to;
if (!skippy_iter.next (&unsafe_to))
{
buffer->unsafe_to_concat (buffer->idx, unsafe_to);
return_trace (false);
}
unsigned int len1 = valueFormat1.get_len ();
unsigned int len2 = valueFormat2.get_len ();
unsigned int record_len = len1 + len2;
unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint);
unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint);
if (unlikely (klass1 >= class1Count || klass2 >= class2Count))
{
buffer->unsafe_to_concat (buffer->idx, skippy_iter.idx + 1);
return_trace (false);
}
const Value *v = &values[record_len * (klass1 * class2Count + klass2)];
bool applied_first = false, applied_second = false;
/* Isolate simple kerning and apply it half to each side.
* Results in better cursor positinoing / underline drawing.
*
* Disabled, because causes issues... :-(
* https://github.com/harfbuzz/harfbuzz/issues/3408
* https://github.com/harfbuzz/harfbuzz/pull/3235#issuecomment-1029814978
*/
#ifndef HB_SPLIT_KERN
if (0)
#endif
{
if (!len2)
{
const hb_direction_t dir = buffer->props.direction;
const bool horizontal = HB_DIRECTION_IS_HORIZONTAL (dir);
const bool backward = HB_DIRECTION_IS_BACKWARD (dir);
unsigned mask = horizontal ? ValueFormat::xAdvance : ValueFormat::yAdvance;
if (backward)
mask |= mask >> 2; /* Add eg. xPlacement in RTL. */
/* Add Devices. */
mask |= mask << 4;
if (valueFormat1 & ~mask)
goto bail;
/* Is simple kern. Apply value on an empty position slot,
* then split it between sides. */
hb_glyph_position_t pos{};
if (valueFormat1.apply_value (c, this, v, pos))
{
hb_position_t *src = &pos.x_advance;
hb_position_t *dst1 = &buffer->cur_pos().x_advance;
hb_position_t *dst2 = &buffer->pos[skippy_iter.idx].x_advance;
unsigned i = horizontal ? 0 : 1;
hb_position_t kern = src[i];
hb_position_t kern1 = kern >> 1;
hb_position_t kern2 = kern - kern1;
if (!backward)
{
dst1[i] += kern1;
dst2[i] += kern2;
dst2[i + 2] += kern2;
}
else
{
dst1[i] += kern1;
dst1[i + 2] += src[i + 2] - kern2;
dst2[i] += kern2;
}
applied_first = applied_second = kern != 0;
goto success;
}
goto boring;
}
}
bail:
applied_first = valueFormat1.apply_value (c, this, v, buffer->cur_pos());
applied_second = valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]);
success:
if (applied_first || applied_second)
buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
else
boring:
buffer->unsafe_to_concat (buffer->idx, skippy_iter.idx + 1);
buffer->idx = skippy_iter.idx;
if (len2)
buffer->idx++;
return_trace (true);
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->format = format;
hb_map_t klass1_map;
out->classDef1.serialize_subset (c, classDef1, this, &klass1_map, true, true, &(this + coverage));
out->class1Count = klass1_map.get_population ();
hb_map_t klass2_map;
out->classDef2.serialize_subset (c, classDef2, this, &klass2_map, true, false);
out->class2Count = klass2_map.get_population ();
unsigned len1 = valueFormat1.get_len ();
unsigned len2 = valueFormat2.get_len ();
hb_pair_t<unsigned, unsigned> newFormats = hb_pair (valueFormat1, valueFormat2);
if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
newFormats = compute_effective_value_formats (klass1_map, klass2_map);
out->valueFormat1 = newFormats.first;
out->valueFormat2 = newFormats.second;
for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
{
for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
{
unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
valueFormat1.copy_values (c->serializer, newFormats.first, this, &values[idx], c->plan->layout_variation_idx_map);
valueFormat2.copy_values (c->serializer, newFormats.second, this, &values[idx + len1], c->plan->layout_variation_idx_map);
}
}
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
auto it =
+ hb_iter (this+coverage)
| hb_filter (glyphset)
| hb_map_retains_sorting (glyph_map)
;
out->coverage.serialize_serialize (c->serializer, it);
return_trace (out->class1Count && out->class2Count && bool (it));
}
hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_map_t& klass1_map,
const hb_map_t& klass2_map) const
{
unsigned len1 = valueFormat1.get_len ();
unsigned len2 = valueFormat2.get_len ();
unsigned format1 = 0;
unsigned format2 = 0;
for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
{
for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
{
unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
format1 = format1 | valueFormat1.get_effective_format (&values[idx]);
format2 = format2 | valueFormat2.get_effective_format (&values[idx + len1]);
}
}
return hb_pair (format1, format2);
}
};
}
}
}
#endif // OT_LAYOUT_GPOS_PAIRPOSFORMAT2_HH

View file

@ -0,0 +1,79 @@
#ifndef OT_LAYOUT_GPOS_POSLOOKUP_HH
#define OT_LAYOUT_GPOS_POSLOOKUP_HH
#include "PosLookupSubTable.hh"
#include "../../../hb-ot-layout-common.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct PosLookup : Lookup
{
using SubTable = PosLookupSubTable;
const SubTable& get_subtable (unsigned int i) const
{ return Lookup::get_subtable<SubTable> (i); }
bool is_reverse () const
{
return false;
}
bool apply (hb_ot_apply_context_t *c) const
{
TRACE_APPLY (this);
return_trace (dispatch (c));
}
bool intersects (const hb_set_t *glyphs) const
{
hb_intersects_context_t c (glyphs);
return dispatch (&c);
}
hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
{ return dispatch (c); }
hb_closure_lookups_context_t::return_t closure_lookups (hb_closure_lookups_context_t *c, unsigned this_index) const
{
if (c->is_lookup_visited (this_index))
return hb_closure_lookups_context_t::default_return_value ();
c->set_lookup_visited (this_index);
if (!intersects (c->glyphs))
{
c->set_lookup_inactive (this_index);
return hb_closure_lookups_context_t::default_return_value ();
}
hb_closure_lookups_context_t::return_t ret = dispatch (c);
return ret;
}
template <typename set_t>
void collect_coverage (set_t *glyphs) const
{
hb_collect_coverage_context_t<set_t> c (glyphs);
dispatch (&c);
}
template <typename context_t>
static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
template <typename context_t, typename ...Ts>
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{ return Lookup::dispatch<SubTable> (c, std::forward<Ts> (ds)...); }
bool subset (hb_subset_context_t *c) const
{ return Lookup::subset<SubTable> (c); }
bool sanitize (hb_sanitize_context_t *c) const
{ return Lookup::sanitize<SubTable> (c); }
};
}
}
}
#endif /* OT_LAYOUT_GPOS_POSLOOKUP_HH */

View file

@ -0,0 +1,79 @@
#ifndef OT_LAYOUT_GPOS_POSLOOKUPSUBTABLE_HH
#define OT_LAYOUT_GPOS_POSLOOKUPSUBTABLE_HH
#include "SinglePos.hh"
#include "PairPos.hh"
#include "CursivePos.hh"
#include "MarkBasePos.hh"
#include "MarkLigPos.hh"
#include "MarkMarkPos.hh"
#include "ContextPos.hh"
#include "ChainContextPos.hh"
#include "ExtensionPos.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct PosLookupSubTable
{
friend struct ::OT::Lookup;
friend struct PosLookup;
enum Type {
Single = 1,
Pair = 2,
Cursive = 3,
MarkBase = 4,
MarkLig = 5,
MarkMark = 6,
Context = 7,
ChainContext = 8,
Extension = 9
};
template <typename context_t, typename ...Ts>
typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
{
TRACE_DISPATCH (this, lookup_type);
switch (lookup_type) {
case Single: return_trace (u.single.dispatch (c, std::forward<Ts> (ds)...));
case Pair: return_trace (u.pair.dispatch (c, std::forward<Ts> (ds)...));
case Cursive: return_trace (u.cursive.dispatch (c, std::forward<Ts> (ds)...));
case MarkBase: return_trace (u.markBase.dispatch (c, std::forward<Ts> (ds)...));
case MarkLig: return_trace (u.markLig.dispatch (c, std::forward<Ts> (ds)...));
case MarkMark: return_trace (u.markMark.dispatch (c, std::forward<Ts> (ds)...));
case Context: return_trace (u.context.dispatch (c, std::forward<Ts> (ds)...));
case ChainContext: return_trace (u.chainContext.dispatch (c, std::forward<Ts> (ds)...));
case Extension: return_trace (u.extension.dispatch (c, std::forward<Ts> (ds)...));
default: return_trace (c->default_return_value ());
}
}
bool intersects (const hb_set_t *glyphs, unsigned int lookup_type) const
{
hb_intersects_context_t c (glyphs);
return dispatch (&c, lookup_type);
}
protected:
union {
SinglePos single;
PairPos pair;
CursivePos cursive;
MarkBasePos markBase;
MarkLigPos markLig;
MarkMarkPos markMark;
ContextPos context;
ChainContextPos chainContext;
ExtensionPos extension;
} u;
public:
DEFINE_SIZE_MIN (0);
};
}
}
}
#endif /* HB_OT_LAYOUT_GPOS_POSLOOKUPSUBTABLE_HH */

View file

@ -0,0 +1,98 @@
#ifndef OT_LAYOUT_GPOS_SINGLEPOS_HH
#define OT_LAYOUT_GPOS_SINGLEPOS_HH
#include "SinglePosFormat1.hh"
#include "SinglePosFormat2.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct SinglePos
{
protected:
union {
HBUINT16 format; /* Format identifier */
SinglePosFormat1 format1;
SinglePosFormat2 format2;
} u;
public:
template<typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
unsigned get_format (Iterator glyph_val_iter_pairs)
{
hb_array_t<const Value> first_val_iter = hb_second (*glyph_val_iter_pairs);
for (const auto iter : glyph_val_iter_pairs)
for (const auto _ : hb_zip (iter.second, first_val_iter))
if (_.first != _.second)
return 2;
return 1;
}
template<typename Iterator,
typename SrcLookup,
hb_requires (hb_is_iterator (Iterator))>
void serialize (hb_serialize_context_t *c,
const SrcLookup* src,
Iterator glyph_val_iter_pairs,
const hb_map_t *layout_variation_idx_map)
{
if (unlikely (!c->extend_min (u.format))) return;
unsigned format = 2;
ValueFormat new_format = src->get_value_format ();
if (glyph_val_iter_pairs)
{
format = get_format (glyph_val_iter_pairs);
new_format = src->get_value_format ().get_effective_format (+ glyph_val_iter_pairs | hb_map (hb_second));
}
u.format = format;
switch (u.format) {
case 1: u.format1.serialize (c,
src,
glyph_val_iter_pairs,
new_format,
layout_variation_idx_map);
return;
case 2: u.format2.serialize (c,
src,
glyph_val_iter_pairs,
new_format,
layout_variation_idx_map);
return;
default:return;
}
}
template <typename context_t, typename ...Ts>
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
default:return_trace (c->default_return_value ());
}
}
};
template<typename Iterator, typename SrcLookup>
static void
SinglePos_serialize (hb_serialize_context_t *c,
const SrcLookup *src,
Iterator it,
const hb_map_t *layout_variation_idx_map)
{ c->start_embed<SinglePos> ()->serialize (c, src, it, layout_variation_idx_map); }
}
}
}
#endif /* OT_LAYOUT_GPOS_SINGLEPOS_HH */

View file

@ -0,0 +1,124 @@
#ifndef OT_LAYOUT_GPOS_SINGLEPOSFORMAT1_HH
#define OT_LAYOUT_GPOS_SINGLEPOSFORMAT1_HH
#include "Common.hh"
#include "ValueFormat.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct SinglePosFormat1
{
protected:
HBUINT16 format; /* Format identifier--format = 1 */
Offset16To<Coverage>
coverage; /* Offset to Coverage table--from
* beginning of subtable */
ValueFormat valueFormat; /* Defines the types of data in the
* ValueRecord */
ValueRecord values; /* Defines positioning
* value(s)--applied to all glyphs in
* the Coverage table */
public:
DEFINE_SIZE_ARRAY (6, values);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
coverage.sanitize (c, this) &&
valueFormat.sanitize_value (c, this, values));
}
bool intersects (const hb_set_t *glyphs) const
{ return (this+coverage).intersects (glyphs); }
void closure_lookups (hb_closure_lookups_context_t *c) const {}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
if (!valueFormat.has_device ()) return;
auto it =
+ hb_iter (this+coverage)
| hb_filter (c->glyph_set)
;
if (!it) return;
valueFormat.collect_variation_indices (c, this, values.as_array (valueFormat.get_len ()));
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{ if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
const Coverage &get_coverage () const { return this+coverage; }
ValueFormat get_value_format () const { return valueFormat; }
bool apply (hb_ot_apply_context_t *c) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
valueFormat.apply_value (c, this, values, buffer->cur_pos());
buffer->idx++;
return_trace (true);
}
template<typename Iterator,
typename SrcLookup,
hb_requires (hb_is_iterator (Iterator))>
void serialize (hb_serialize_context_t *c,
const SrcLookup *src,
Iterator it,
ValueFormat newFormat,
const hb_map_t *layout_variation_idx_map)
{
if (unlikely (!c->extend_min (this))) return;
if (unlikely (!c->check_assign (valueFormat,
newFormat,
HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
for (const hb_array_t<const Value>& _ : + it | hb_map (hb_second))
{
src->get_value_format ().copy_values (c, newFormat, src, &_, layout_variation_idx_map);
// Only serialize the first entry in the iterator, the rest are assumed to
// be the same.
break;
}
auto glyphs =
+ it
| hb_map_retains_sorting (hb_first)
;
coverage.serialize_serialize (c, glyphs);
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
auto it =
+ hb_iter (this+coverage)
| hb_filter (glyphset)
| hb_map_retains_sorting (glyph_map)
| hb_zip (hb_repeat (values.as_array (valueFormat.get_len ())))
;
bool ret = bool (it);
SinglePos_serialize (c->serializer, this, it, c->plan->layout_variation_idx_map);
return_trace (ret);
}
};
}
}
}
#endif /* OT_LAYOUT_GPOS_SINGLEPOSFORMAT1_HH */

View file

@ -0,0 +1,140 @@
#ifndef OT_LAYOUT_GPOS_SINGLEPOSFORMAT2_HH
#define OT_LAYOUT_GPOS_SINGLEPOSFORMAT2_HH
#include "Common.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
struct SinglePosFormat2
{
protected:
HBUINT16 format; /* Format identifier--format = 2 */
Offset16To<Coverage>
coverage; /* Offset to Coverage table--from
* beginning of subtable */
ValueFormat valueFormat; /* Defines the types of data in the
* ValueRecord */
HBUINT16 valueCount; /* Number of ValueRecords */
ValueRecord values; /* Array of ValueRecords--positioning
* values applied to glyphs */
public:
DEFINE_SIZE_ARRAY (8, values);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
coverage.sanitize (c, this) &&
valueFormat.sanitize_values (c, this, values, valueCount));
}
bool intersects (const hb_set_t *glyphs) const
{ return (this+coverage).intersects (glyphs); }
void closure_lookups (hb_closure_lookups_context_t *c) const {}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{
if (!valueFormat.has_device ()) return;
auto it =
+ hb_zip (this+coverage, hb_range ((unsigned) valueCount))
| hb_filter (c->glyph_set, hb_first)
;
if (!it) return;
unsigned sub_length = valueFormat.get_len ();
const hb_array_t<const Value> values_array = values.as_array (valueCount * sub_length);
for (unsigned i : + it
| hb_map (hb_second))
valueFormat.collect_variation_indices (c, this, values_array.sub_array (i * sub_length, sub_length));
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{ if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
const Coverage &get_coverage () const { return this+coverage; }
ValueFormat get_value_format () const { return valueFormat; }
bool apply (hb_ot_apply_context_t *c) const
{
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
if (likely (index >= valueCount)) return_trace (false);
valueFormat.apply_value (c, this,
&values[index * valueFormat.get_len ()],
buffer->cur_pos());
buffer->idx++;
return_trace (true);
}
template<typename Iterator,
typename SrcLookup,
hb_requires (hb_is_iterator (Iterator))>
void serialize (hb_serialize_context_t *c,
const SrcLookup *src,
Iterator it,
ValueFormat newFormat,
const hb_map_t *layout_variation_idx_map)
{
auto out = c->extend_min (this);
if (unlikely (!out)) return;
if (unlikely (!c->check_assign (valueFormat, newFormat, HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
if (unlikely (!c->check_assign (valueCount, it.len (), HB_SERIALIZE_ERROR_ARRAY_OVERFLOW))) return;
+ it
| hb_map (hb_second)
| hb_apply ([&] (hb_array_t<const Value> _)
{ src->get_value_format ().copy_values (c, newFormat, src, &_, layout_variation_idx_map); })
;
auto glyphs =
+ it
| hb_map_retains_sorting (hb_first)
;
coverage.serialize_serialize (c, glyphs);
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
unsigned sub_length = valueFormat.get_len ();
auto values_array = values.as_array (valueCount * sub_length);
auto it =
+ hb_zip (this+coverage, hb_range ((unsigned) valueCount))
| hb_filter (glyphset, hb_first)
| hb_map_retains_sorting ([&] (const hb_pair_t<hb_codepoint_t, unsigned>& _)
{
return hb_pair (glyph_map[_.first],
values_array.sub_array (_.second * sub_length,
sub_length));
})
;
bool ret = bool (it);
SinglePos_serialize (c->serializer, this, it, c->plan->layout_variation_idx_map);
return_trace (ret);
}
};
}
}
}
#endif /* OT_LAYOUT_GPOS_SINGLEPOSFORMAT2_HH */

View file

@ -0,0 +1,329 @@
#ifndef OT_LAYOUT_GPOS_VALUEFORMAT_HH
#define OT_LAYOUT_GPOS_VALUEFORMAT_HH
#include "../../../hb-ot-layout-gsubgpos.hh"
namespace OT {
namespace Layout {
namespace GPOS_impl {
typedef HBUINT16 Value;
typedef UnsizedArrayOf<Value> ValueRecord;
struct ValueFormat : HBUINT16
{
enum Flags {
xPlacement = 0x0001u, /* Includes horizontal adjustment for placement */
yPlacement = 0x0002u, /* Includes vertical adjustment for placement */
xAdvance = 0x0004u, /* Includes horizontal adjustment for advance */
yAdvance = 0x0008u, /* Includes vertical adjustment for advance */
xPlaDevice = 0x0010u, /* Includes horizontal Device table for placement */
yPlaDevice = 0x0020u, /* Includes vertical Device table for placement */
xAdvDevice = 0x0040u, /* Includes horizontal Device table for advance */
yAdvDevice = 0x0080u, /* Includes vertical Device table for advance */
ignored = 0x0F00u, /* Was used in TrueType Open for MM fonts */
reserved = 0xF000u, /* For future use */
devices = 0x00F0u /* Mask for having any Device table */
};
/* All fields are options. Only those available advance the value pointer. */
#if 0
HBINT16 xPlacement; /* Horizontal adjustment for
* placement--in design units */
HBINT16 yPlacement; /* Vertical adjustment for
* placement--in design units */
HBINT16 xAdvance; /* Horizontal adjustment for
* advance--in design units (only used
* for horizontal writing) */
HBINT16 yAdvance; /* Vertical adjustment for advance--in
* design units (only used for vertical
* writing) */
Offset16To<Device> xPlaDevice; /* Offset to Device table for
* horizontal placement--measured from
* beginning of PosTable (may be NULL) */
Offset16To<Device> yPlaDevice; /* Offset to Device table for vertical
* placement--measured from beginning
* of PosTable (may be NULL) */
Offset16To<Device> xAdvDevice; /* Offset to Device table for
* horizontal advance--measured from
* beginning of PosTable (may be NULL) */
Offset16To<Device> yAdvDevice; /* Offset to Device table for vertical
* advance--measured from beginning of
* PosTable (may be NULL) */
#endif
IntType& operator = (uint16_t i) { v = i; return *this; }
unsigned int get_len () const { return hb_popcount ((unsigned int) *this); }
unsigned int get_size () const { return get_len () * Value::static_size; }
bool apply_value (hb_ot_apply_context_t *c,
const void *base,
const Value *values,
hb_glyph_position_t &glyph_pos) const
{
bool ret = false;
unsigned int format = *this;
if (!format) return ret;
hb_font_t *font = c->font;
bool horizontal =
#ifndef HB_NO_VERTICAL
HB_DIRECTION_IS_HORIZONTAL (c->direction)
#else
true
#endif
;
if (format & xPlacement) glyph_pos.x_offset += font->em_scale_x (get_short (values++, &ret));
if (format & yPlacement) glyph_pos.y_offset += font->em_scale_y (get_short (values++, &ret));
if (format & xAdvance) {
if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values, &ret));
values++;
}
/* y_advance values grow downward but font-space grows upward, hence negation */
if (format & yAdvance) {
if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values, &ret));
values++;
}
if (!has_device ()) return ret;
bool use_x_device = font->x_ppem || font->num_coords;
bool use_y_device = font->y_ppem || font->num_coords;
if (!use_x_device && !use_y_device) return ret;
const VariationStore &store = c->var_store;
auto *cache = c->var_store_cache;
/* pixel -> fractional pixel */
if (format & xPlaDevice) {
if (use_x_device) glyph_pos.x_offset += (base + get_device (values, &ret)).get_x_delta (font, store, cache);
values++;
}
if (format & yPlaDevice) {
if (use_y_device) glyph_pos.y_offset += (base + get_device (values, &ret)).get_y_delta (font, store, cache);
values++;
}
if (format & xAdvDevice) {
if (horizontal && use_x_device) glyph_pos.x_advance += (base + get_device (values, &ret)).get_x_delta (font, store, cache);
values++;
}
if (format & yAdvDevice) {
/* y_advance values grow downward but font-space grows upward, hence negation */
if (!horizontal && use_y_device) glyph_pos.y_advance -= (base + get_device (values, &ret)).get_y_delta (font, store, cache);
values++;
}
return ret;
}
unsigned int get_effective_format (const Value *values) const
{
unsigned int format = *this;
for (unsigned flag = xPlacement; flag <= yAdvDevice; flag = flag << 1) {
if (format & flag) should_drop (*values++, (Flags) flag, &format);
}
return format;
}
template<typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
unsigned int get_effective_format (Iterator it) const {
unsigned int new_format = 0;
for (const hb_array_t<const Value>& values : it)
new_format = new_format | get_effective_format (&values);
return new_format;
}
void copy_values (hb_serialize_context_t *c,
unsigned int new_format,
const void *base,
const Value *values,
const hb_map_t *layout_variation_idx_map) const
{
unsigned int format = *this;
if (!format) return;
if (format & xPlacement) copy_value (c, new_format, xPlacement, *values++);
if (format & yPlacement) copy_value (c, new_format, yPlacement, *values++);
if (format & xAdvance) copy_value (c, new_format, xAdvance, *values++);
if (format & yAdvance) copy_value (c, new_format, yAdvance, *values++);
if (format & xPlaDevice) copy_device (c, base, values++, layout_variation_idx_map);
if (format & yPlaDevice) copy_device (c, base, values++, layout_variation_idx_map);
if (format & xAdvDevice) copy_device (c, base, values++, layout_variation_idx_map);
if (format & yAdvDevice) copy_device (c, base, values++, layout_variation_idx_map);
}
void copy_value (hb_serialize_context_t *c,
unsigned int new_format,
Flags flag,
Value value) const
{
// Filter by new format.
if (!(new_format & flag)) return;
c->copy (value);
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
const void *base,
const hb_array_t<const Value>& values) const
{
unsigned format = *this;
unsigned i = 0;
if (format & xPlacement) i++;
if (format & yPlacement) i++;
if (format & xAdvance) i++;
if (format & yAdvance) i++;
if (format & xPlaDevice)
{
(base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
i++;
}
if (format & ValueFormat::yPlaDevice)
{
(base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
i++;
}
if (format & ValueFormat::xAdvDevice)
{
(base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
i++;
}
if (format & ValueFormat::yAdvDevice)
{
(base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
i++;
}
}
private:
bool sanitize_value_devices (hb_sanitize_context_t *c, const void *base, const Value *values) const
{
unsigned int format = *this;
if (format & xPlacement) values++;
if (format & yPlacement) values++;
if (format & xAdvance) values++;
if (format & yAdvance) values++;
if ((format & xPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
if ((format & yPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
if ((format & xAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
if ((format & yAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
return true;
}
static inline Offset16To<Device>& get_device (Value* value)
{
return *static_cast<Offset16To<Device> *> (value);
}
static inline const Offset16To<Device>& get_device (const Value* value, bool *worked=nullptr)
{
if (worked) *worked |= bool (*value);
return *static_cast<const Offset16To<Device> *> (value);
}
bool copy_device (hb_serialize_context_t *c, const void *base,
const Value *src_value, const hb_map_t *layout_variation_idx_map) const
{
Value *dst_value = c->copy (*src_value);
if (!dst_value) return false;
if (*dst_value == 0) return true;
*dst_value = 0;
c->push ();
if ((base + get_device (src_value)).copy (c, layout_variation_idx_map))
{
c->add_link (*dst_value, c->pop_pack ());
return true;
}
else
{
c->pop_discard ();
return false;
}
}
static inline const HBINT16& get_short (const Value* value, bool *worked=nullptr)
{
if (worked) *worked |= bool (*value);
return *reinterpret_cast<const HBINT16 *> (value);
}
public:
bool has_device () const
{
unsigned int format = *this;
return (format & devices) != 0;
}
bool sanitize_value (hb_sanitize_context_t *c, const void *base, const Value *values) const
{
TRACE_SANITIZE (this);
return_trace (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values)));
}
bool sanitize_values (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count) const
{
TRACE_SANITIZE (this);
unsigned int len = get_len ();
if (!c->check_range (values, count, get_size ())) return_trace (false);
if (!has_device ()) return_trace (true);
for (unsigned int i = 0; i < count; i++) {
if (!sanitize_value_devices (c, base, values))
return_trace (false);
values += len;
}
return_trace (true);
}
/* Just sanitize referenced Device tables. Doesn't check the values themselves. */
bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count, unsigned int stride) const
{
TRACE_SANITIZE (this);
if (!has_device ()) return_trace (true);
for (unsigned int i = 0; i < count; i++) {
if (!sanitize_value_devices (c, base, values))
return_trace (false);
values += stride;
}
return_trace (true);
}
private:
void should_drop (Value value, Flags flag, unsigned int* format) const
{
if (value) return;
*format = *format & ~flag;
}
};
}
}
}
#endif // #ifndef OT_LAYOUT_GPOS_VALUEFORMAT_HH

View file

@ -19,6 +19,8 @@ namespace GSUB {
struct GSUB : GSUBGPOS
{
using Lookup = SubstLookup;
static constexpr hb_tag_t tableTag = HB_OT_TAG_GSUB;
const SubstLookup& get_lookup (unsigned int i) const

View file

@ -10,7 +10,7 @@ namespace GSUB {
struct SubstLookup : Lookup
{
typedef SubstLookupSubTable SubTable;
using SubTable = SubstLookupSubTable;
bool sanitize (hb_sanitize_context_t *c) const
{ return Lookup::sanitize<SubTable> (c); }
@ -73,8 +73,6 @@ struct SubstLookup : Lookup
return hb_closure_lookups_context_t::default_return_value ();
}
c->set_recurse_func (dispatch_closure_lookups_recurse_func);
hb_closure_lookups_context_t::return_t ret = dispatch (c);
return ret;
}
@ -100,8 +98,6 @@ struct SubstLookup : Lookup
return dispatch (c);
}
static inline bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
bool serialize_single (hb_serialize_context_t *c,
uint32_t lookup_props,
hb_sorted_array_t<const HBGlyphID16> glyphs,
@ -206,8 +202,6 @@ struct SubstLookup : Lookup
return ret;
}
HB_INTERNAL static hb_closure_lookups_context_t::return_t dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned lookup_index);
template <typename context_t, typename ...Ts>
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{ return Lookup::dispatch<SubTable> (c, std::forward<Ts> (ds)...); }

View file

@ -0,0 +1,258 @@
#ifndef OT_GLYF_COMPOSITEGLYPH_HH
#define OT_GLYF_COMPOSITEGLYPH_HH
#include "../../hb-open-type.hh"
namespace OT {
namespace glyf_impl {
struct CompositeGlyphRecord
{
protected:
enum composite_glyph_flag_t
{
ARG_1_AND_2_ARE_WORDS = 0x0001,
ARGS_ARE_XY_VALUES = 0x0002,
ROUND_XY_TO_GRID = 0x0004,
WE_HAVE_A_SCALE = 0x0008,
MORE_COMPONENTS = 0x0020,
WE_HAVE_AN_X_AND_Y_SCALE = 0x0040,
WE_HAVE_A_TWO_BY_TWO = 0x0080,
WE_HAVE_INSTRUCTIONS = 0x0100,
USE_MY_METRICS = 0x0200,
OVERLAP_COMPOUND = 0x0400,
SCALED_COMPONENT_OFFSET = 0x0800,
UNSCALED_COMPONENT_OFFSET = 0x1000
};
public:
unsigned int get_size () const
{
unsigned int size = min_size;
/* arg1 and 2 are int16 */
if (flags & ARG_1_AND_2_ARE_WORDS) size += 4;
/* arg1 and 2 are int8 */
else size += 2;
/* One x 16 bit (scale) */
if (flags & WE_HAVE_A_SCALE) size += 2;
/* Two x 16 bit (xscale, yscale) */
else if (flags & WE_HAVE_AN_X_AND_Y_SCALE) size += 4;
/* Four x 16 bit (xscale, scale01, scale10, yscale) */
else if (flags & WE_HAVE_A_TWO_BY_TWO) size += 8;
return size;
}
void drop_instructions_flag () { flags = (uint16_t) flags & ~WE_HAVE_INSTRUCTIONS; }
void set_overlaps_flag ()
{
flags = (uint16_t) flags | OVERLAP_COMPOUND;
}
bool has_instructions () const { return flags & WE_HAVE_INSTRUCTIONS; }
bool has_more () const { return flags & MORE_COMPONENTS; }
bool is_use_my_metrics () const { return flags & USE_MY_METRICS; }
bool is_anchored () const { return !(flags & ARGS_ARE_XY_VALUES); }
void get_anchor_points (unsigned int &point1, unsigned int &point2) const
{
const HBUINT8 *p = &StructAfter<const HBUINT8> (glyphIndex);
if (flags & ARG_1_AND_2_ARE_WORDS)
{
point1 = ((const HBUINT16 *) p)[0];
point2 = ((const HBUINT16 *) p)[1];
}
else
{
point1 = p[0];
point2 = p[1];
}
}
void transform_points (contour_point_vector_t &points) const
{
float matrix[4];
contour_point_t trans;
if (get_transformation (matrix, trans))
{
if (scaled_offsets ())
{
points.translate (trans);
points.transform (matrix);
}
else
{
points.transform (matrix);
points.translate (trans);
}
}
}
protected:
bool scaled_offsets () const
{ return (flags & (SCALED_COMPONENT_OFFSET | UNSCALED_COMPONENT_OFFSET)) == SCALED_COMPONENT_OFFSET; }
bool get_transformation (float (&matrix)[4], contour_point_t &trans) const
{
matrix[0] = matrix[3] = 1.f;
matrix[1] = matrix[2] = 0.f;
int tx, ty;
const HBINT8 *p = &StructAfter<const HBINT8> (glyphIndex);
if (flags & ARG_1_AND_2_ARE_WORDS)
{
tx = *(const HBINT16 *) p;
p += HBINT16::static_size;
ty = *(const HBINT16 *) p;
p += HBINT16::static_size;
}
else
{
tx = *p++;
ty = *p++;
}
if (is_anchored ()) tx = ty = 0;
trans.init ((float) tx, (float) ty);
{
const F2DOT14 *points = (const F2DOT14 *) p;
if (flags & WE_HAVE_A_SCALE)
{
matrix[0] = matrix[3] = points[0].to_float ();
return true;
}
else if (flags & WE_HAVE_AN_X_AND_Y_SCALE)
{
matrix[0] = points[0].to_float ();
matrix[3] = points[1].to_float ();
return true;
}
else if (flags & WE_HAVE_A_TWO_BY_TWO)
{
matrix[0] = points[0].to_float ();
matrix[1] = points[1].to_float ();
matrix[2] = points[2].to_float ();
matrix[3] = points[3].to_float ();
return true;
}
}
return tx || ty;
}
public:
HBUINT16 flags;
HBGlyphID16 glyphIndex;
public:
DEFINE_SIZE_MIN (4);
};
struct composite_iter_t : hb_iter_with_fallback_t<composite_iter_t, const CompositeGlyphRecord &>
{
typedef const CompositeGlyphRecord *__item_t__;
composite_iter_t (hb_bytes_t glyph_, __item_t__ current_) :
glyph (glyph_), current (nullptr), current_size (0)
{
set_current (current_);
}
composite_iter_t () : glyph (hb_bytes_t ()), current (nullptr), current_size (0) {}
item_t __item__ () const { return *current; }
bool __more__ () const { return current; }
void __next__ ()
{
if (!current->has_more ()) { current = nullptr; return; }
set_current (&StructAtOffset<CompositeGlyphRecord> (current, current_size));
}
composite_iter_t __end__ () const { return composite_iter_t (); }
bool operator != (const composite_iter_t& o) const
{ return current != o.current; }
void set_current (__item_t__ current_)
{
if (!glyph.check_range (current_, CompositeGlyphRecord::min_size))
{
current = nullptr;
current_size = 0;
return;
}
unsigned size = current_->get_size ();
if (!glyph.check_range (current_, size))
{
current = nullptr;
current_size = 0;
return;
}
current = current_;
current_size = size;
}
private:
hb_bytes_t glyph;
__item_t__ current;
unsigned current_size;
};
struct CompositeGlyph
{
const GlyphHeader &header;
hb_bytes_t bytes;
CompositeGlyph (const GlyphHeader &header_, hb_bytes_t bytes_) :
header (header_), bytes (bytes_) {}
composite_iter_t iter () const
{ return composite_iter_t (bytes, &StructAfter<CompositeGlyphRecord, GlyphHeader> (header)); }
unsigned int instructions_length (hb_bytes_t bytes) const
{
unsigned int start = bytes.length;
unsigned int end = bytes.length;
const CompositeGlyphRecord *last = nullptr;
for (auto &item : iter ())
last = &item;
if (unlikely (!last)) return 0;
if (last->has_instructions ())
start = (char *) last - &bytes + last->get_size ();
if (unlikely (start > end)) return 0;
return end - start;
}
/* Trimming for composites not implemented.
* If removing hints it falls out of that. */
const hb_bytes_t trim_padding () const { return bytes; }
void drop_hints ()
{
for (const auto &_ : iter ())
const_cast<CompositeGlyphRecord &> (_).drop_instructions_flag ();
}
/* Chop instructions off the end */
void drop_hints_bytes (hb_bytes_t &dest_start) const
{ dest_start = bytes.sub_array (0, bytes.length - instructions_length (bytes)); }
void set_overlaps_flag ()
{
CompositeGlyphRecord& glyph_chain = const_cast<CompositeGlyphRecord &> (
StructAfter<CompositeGlyphRecord, GlyphHeader> (header));
if (!bytes.check_range(&glyph_chain, CompositeGlyphRecord::min_size))
return;
glyph_chain.set_overlaps_flag ();
}
};
} /* namespace glyf_impl */
} /* namespace OT */
#endif /* OT_GLYF_COMPOSITEGLYPH_HH */

233
thirdparty/harfbuzz/src/OT/glyf/Glyph.hh vendored Normal file
View file

@ -0,0 +1,233 @@
#ifndef OT_GLYF_GLYPH_HH
#define OT_GLYF_GLYPH_HH
#include "../../hb-open-type.hh"
#include "GlyphHeader.hh"
#include "SimpleGlyph.hh"
#include "CompositeGlyph.hh"
namespace OT {
struct glyf_accelerator_t;
namespace glyf_impl {
enum phantom_point_index_t
{
PHANTOM_LEFT = 0,
PHANTOM_RIGHT = 1,
PHANTOM_TOP = 2,
PHANTOM_BOTTOM = 3,
PHANTOM_COUNT = 4
};
struct Glyph
{
enum glyph_type_t { EMPTY, SIMPLE, COMPOSITE };
public:
composite_iter_t get_composite_iterator () const
{
if (type != COMPOSITE) return composite_iter_t ();
return CompositeGlyph (*header, bytes).iter ();
}
const hb_bytes_t trim_padding () const
{
switch (type) {
case COMPOSITE: return CompositeGlyph (*header, bytes).trim_padding ();
case SIMPLE: return SimpleGlyph (*header, bytes).trim_padding ();
default: return bytes;
}
}
void drop_hints ()
{
switch (type) {
case COMPOSITE: CompositeGlyph (*header, bytes).drop_hints (); return;
case SIMPLE: SimpleGlyph (*header, bytes).drop_hints (); return;
default: return;
}
}
void set_overlaps_flag ()
{
switch (type) {
case COMPOSITE: CompositeGlyph (*header, bytes).set_overlaps_flag (); return;
case SIMPLE: SimpleGlyph (*header, bytes).set_overlaps_flag (); return;
default: return;
}
}
void drop_hints_bytes (hb_bytes_t &dest_start, hb_bytes_t &dest_end) const
{
switch (type) {
case COMPOSITE: CompositeGlyph (*header, bytes).drop_hints_bytes (dest_start); return;
case SIMPLE: SimpleGlyph (*header, bytes).drop_hints_bytes (dest_start, dest_end); return;
default: return;
}
}
/* Note: Recursively calls itself.
* all_points includes phantom points
*/
template <typename accelerator_t>
bool get_points (hb_font_t *font, const accelerator_t &glyf_accelerator,
contour_point_vector_t &all_points /* OUT */,
bool phantom_only = false,
unsigned int depth = 0) const
{
if (unlikely (depth > HB_MAX_NESTING_LEVEL)) return false;
contour_point_vector_t stack_points;
bool inplace = type == SIMPLE && all_points.length == 0;
/* Load into all_points if it's empty, as an optimization. */
contour_point_vector_t &points = inplace ? all_points : stack_points;
switch (type) {
case COMPOSITE:
{
/* pseudo component points for each component in composite glyph */
unsigned num_points = hb_len (CompositeGlyph (*header, bytes).iter ());
if (unlikely (!points.resize (num_points))) return false;
break;
}
case SIMPLE:
if (unlikely (!SimpleGlyph (*header, bytes).get_contour_points (points, phantom_only)))
return false;
break;
}
/* Init phantom points */
if (unlikely (!points.resize (points.length + PHANTOM_COUNT))) return false;
hb_array_t<contour_point_t> phantoms = points.sub_array (points.length - PHANTOM_COUNT, PHANTOM_COUNT);
{
int h_delta = (int) header->xMin -
glyf_accelerator.hmtx->get_side_bearing (gid);
int v_orig = (int) header->yMax +
#ifndef HB_NO_VERTICAL
glyf_accelerator.vmtx->get_side_bearing (gid)
#else
0
#endif
;
unsigned h_adv = glyf_accelerator.hmtx->get_advance (gid);
unsigned v_adv =
#ifndef HB_NO_VERTICAL
glyf_accelerator.vmtx->get_advance (gid)
#else
- font->face->get_upem ()
#endif
;
phantoms[PHANTOM_LEFT].x = h_delta;
phantoms[PHANTOM_RIGHT].x = h_adv + h_delta;
phantoms[PHANTOM_TOP].y = v_orig;
phantoms[PHANTOM_BOTTOM].y = v_orig - (int) v_adv;
}
#ifndef HB_NO_VAR
glyf_accelerator.gvar->apply_deltas_to_points (gid, font, points.as_array ());
#endif
switch (type) {
case SIMPLE:
if (!inplace)
all_points.extend (points.as_array ());
break;
case COMPOSITE:
{
contour_point_vector_t comp_points;
unsigned int comp_index = 0;
for (auto &item : get_composite_iterator ())
{
comp_points.reset ();
if (unlikely (!glyf_accelerator.glyph_for_gid (item.glyphIndex)
.get_points (font, glyf_accelerator, comp_points,
phantom_only, depth + 1)))
return false;
/* Copy phantom points from component if USE_MY_METRICS flag set */
if (item.is_use_my_metrics ())
for (unsigned int i = 0; i < PHANTOM_COUNT; i++)
phantoms[i] = comp_points[comp_points.length - PHANTOM_COUNT + i];
/* Apply component transformation & translation */
item.transform_points (comp_points);
/* Apply translation from gvar */
comp_points.translate (points[comp_index]);
if (item.is_anchored ())
{
unsigned int p1, p2;
item.get_anchor_points (p1, p2);
if (likely (p1 < all_points.length && p2 < comp_points.length))
{
contour_point_t delta;
delta.init (all_points[p1].x - comp_points[p2].x,
all_points[p1].y - comp_points[p2].y);
comp_points.translate (delta);
}
}
all_points.extend (comp_points.sub_array (0, comp_points.length - PHANTOM_COUNT));
comp_index++;
}
all_points.extend (phantoms);
} break;
default:
all_points.extend (phantoms);
}
if (depth == 0) /* Apply at top level */
{
/* Undocumented rasterizer behavior:
* Shift points horizontally by the updated left side bearing
*/
contour_point_t delta;
delta.init (-phantoms[PHANTOM_LEFT].x, 0.f);
if (delta.x) all_points.translate (delta);
}
return !all_points.in_error ();
}
bool get_extents (hb_font_t *font, const glyf_accelerator_t &glyf_accelerator,
hb_glyph_extents_t *extents) const
{
if (type == EMPTY) return true; /* Empty glyph; zero extents. */
return header->get_extents (font, glyf_accelerator, gid, extents);
}
hb_bytes_t get_bytes () const { return bytes; }
Glyph (hb_bytes_t bytes_ = hb_bytes_t (),
hb_codepoint_t gid_ = (hb_codepoint_t) -1) : bytes (bytes_),
header (bytes.as<GlyphHeader> ()),
gid (gid_)
{
int num_contours = header->numberOfContours;
if (unlikely (num_contours == 0)) type = EMPTY;
else if (num_contours > 0) type = SIMPLE;
else type = COMPOSITE; /* negative numbers */
}
protected:
hb_bytes_t bytes;
const GlyphHeader *header;
hb_codepoint_t gid;
unsigned type;
};
} /* namespace glyf_impl */
} /* namespace OT */
#endif /* OT_GLYF_GLYPH_HH */

View file

@ -0,0 +1,48 @@
#ifndef OT_GLYF_GLYPHHEADER_HH
#define OT_GLYF_GLYPHHEADER_HH
#include "../../hb-open-type.hh"
namespace OT {
namespace glyf_impl {
struct GlyphHeader
{
bool has_data () const { return numberOfContours; }
template <typename accelerator_t>
bool get_extents (hb_font_t *font, const accelerator_t &glyf_accelerator,
hb_codepoint_t gid, hb_glyph_extents_t *extents) const
{
/* Undocumented rasterizer behavior: shift glyph to the left by (lsb - xMin), i.e., xMin = lsb */
/* extents->x_bearing = hb_min (glyph_header.xMin, glyph_header.xMax); */
extents->x_bearing = font->em_scale_x (glyf_accelerator.hmtx->get_side_bearing (gid));
extents->y_bearing = font->em_scale_y (hb_max (yMin, yMax));
extents->width = font->em_scale_x (hb_max (xMin, xMax) - hb_min (xMin, xMax));
extents->height = font->em_scale_y (hb_min (yMin, yMax) - hb_max (yMin, yMax));
return true;
}
HBINT16 numberOfContours;
/* If the number of contours is
* greater than or equal to zero,
* this is a simple glyph; if negative,
* this is a composite glyph. */
FWORD xMin; /* Minimum x for coordinate data. */
FWORD yMin; /* Minimum y for coordinate data. */
FWORD xMax; /* Maximum x for coordinate data. */
FWORD yMax; /* Maximum y for coordinate data. */
public:
DEFINE_SIZE_STATIC (10);
};
} /* namespace glyf_impl */
} /* namespace OT */
#endif /* OT_GLYF_GLYPHHEADER_HH */

View file

@ -0,0 +1,216 @@
#ifndef OT_GLYF_SIMPLEGLYPH_HH
#define OT_GLYF_SIMPLEGLYPH_HH
#include "../../hb-open-type.hh"
namespace OT {
namespace glyf_impl {
struct SimpleGlyph
{
enum simple_glyph_flag_t
{
FLAG_ON_CURVE = 0x01,
FLAG_X_SHORT = 0x02,
FLAG_Y_SHORT = 0x04,
FLAG_REPEAT = 0x08,
FLAG_X_SAME = 0x10,
FLAG_Y_SAME = 0x20,
FLAG_OVERLAP_SIMPLE = 0x40,
FLAG_RESERVED2 = 0x80
};
const GlyphHeader &header;
hb_bytes_t bytes;
SimpleGlyph (const GlyphHeader &header_, hb_bytes_t bytes_) :
header (header_), bytes (bytes_) {}
unsigned int instruction_len_offset () const
{ return GlyphHeader::static_size + 2 * header.numberOfContours; }
unsigned int length (unsigned int instruction_len) const
{ return instruction_len_offset () + 2 + instruction_len; }
unsigned int instructions_length () const
{
unsigned int instruction_length_offset = instruction_len_offset ();
if (unlikely (instruction_length_offset + 2 > bytes.length)) return 0;
const HBUINT16 &instructionLength = StructAtOffset<HBUINT16> (&bytes, instruction_length_offset);
/* Out of bounds of the current glyph */
if (unlikely (length (instructionLength) > bytes.length)) return 0;
return instructionLength;
}
const hb_bytes_t trim_padding () const
{
/* based on FontTools _g_l_y_f.py::trim */
const uint8_t *glyph = (uint8_t*) bytes.arrayZ;
const uint8_t *glyph_end = glyph + bytes.length;
/* simple glyph w/contours, possibly trimmable */
glyph += instruction_len_offset ();
if (unlikely (glyph + 2 >= glyph_end)) return hb_bytes_t ();
unsigned int num_coordinates = StructAtOffset<HBUINT16> (glyph - 2, 0) + 1;
unsigned int num_instructions = StructAtOffset<HBUINT16> (glyph, 0);
glyph += 2 + num_instructions;
unsigned int coord_bytes = 0;
unsigned int coords_with_flags = 0;
while (glyph < glyph_end)
{
uint8_t flag = *glyph;
glyph++;
unsigned int repeat = 1;
if (flag & FLAG_REPEAT)
{
if (unlikely (glyph >= glyph_end)) return hb_bytes_t ();
repeat = *glyph + 1;
glyph++;
}
unsigned int xBytes, yBytes;
xBytes = yBytes = 0;
if (flag & FLAG_X_SHORT) xBytes = 1;
else if ((flag & FLAG_X_SAME) == 0) xBytes = 2;
if (flag & FLAG_Y_SHORT) yBytes = 1;
else if ((flag & FLAG_Y_SAME) == 0) yBytes = 2;
coord_bytes += (xBytes + yBytes) * repeat;
coords_with_flags += repeat;
if (coords_with_flags >= num_coordinates) break;
}
if (unlikely (coords_with_flags != num_coordinates)) return hb_bytes_t ();
return bytes.sub_array (0, bytes.length + coord_bytes - (glyph_end - glyph));
}
/* zero instruction length */
void drop_hints ()
{
GlyphHeader &glyph_header = const_cast<GlyphHeader &> (header);
(HBUINT16 &) StructAtOffset<HBUINT16> (&glyph_header, instruction_len_offset ()) = 0;
}
void drop_hints_bytes (hb_bytes_t &dest_start, hb_bytes_t &dest_end) const
{
unsigned int instructions_len = instructions_length ();
unsigned int glyph_length = length (instructions_len);
dest_start = bytes.sub_array (0, glyph_length - instructions_len);
dest_end = bytes.sub_array (glyph_length, bytes.length - glyph_length);
}
void set_overlaps_flag ()
{
if (unlikely (!header.numberOfContours)) return;
unsigned flags_offset = length (instructions_length ());
if (unlikely (flags_offset + 1 > bytes.length)) return;
HBUINT8 &first_flag = (HBUINT8 &) StructAtOffset<HBUINT16> (&bytes, flags_offset);
first_flag = (uint8_t) first_flag | FLAG_OVERLAP_SIMPLE;
}
static bool read_flags (const HBUINT8 *&p /* IN/OUT */,
contour_point_vector_t &points_ /* IN/OUT */,
const HBUINT8 *end)
{
unsigned count = points_.length;
for (unsigned int i = 0; i < count;)
{
if (unlikely (p + 1 > end)) return false;
uint8_t flag = *p++;
points_.arrayZ[i++].flag = flag;
if (flag & FLAG_REPEAT)
{
if (unlikely (p + 1 > end)) return false;
unsigned int repeat_count = *p++;
unsigned stop = hb_min (i + repeat_count, count);
for (; i < stop;)
points_.arrayZ[i++].flag = flag;
}
}
return true;
}
static bool read_points (const HBUINT8 *&p /* IN/OUT */,
contour_point_vector_t &points_ /* IN/OUT */,
const HBUINT8 *end,
float contour_point_t::*m,
const simple_glyph_flag_t short_flag,
const simple_glyph_flag_t same_flag)
{
int v = 0;
unsigned count = points_.length;
for (unsigned i = 0; i < count; i++)
{
unsigned flag = points_[i].flag;
if (flag & short_flag)
{
if (unlikely (p + 1 > end)) return false;
if (flag & same_flag)
v += *p++;
else
v -= *p++;
}
else
{
if (!(flag & same_flag))
{
if (unlikely (p + HBINT16::static_size > end)) return false;
v += *(const HBINT16 *) p;
p += HBINT16::static_size;
}
}
points_.arrayZ[i].*m = v;
}
return true;
}
bool get_contour_points (contour_point_vector_t &points_ /* OUT */,
bool phantom_only = false) const
{
const HBUINT16 *endPtsOfContours = &StructAfter<HBUINT16> (header);
int num_contours = header.numberOfContours;
assert (num_contours);
/* One extra item at the end, for the instruction-count below. */
if (unlikely (!bytes.check_range (&endPtsOfContours[num_contours]))) return false;
unsigned int num_points = endPtsOfContours[num_contours - 1] + 1;
points_.alloc (num_points + 4); // Allocate for phantom points, to avoid a possible copy
if (!points_.resize (num_points)) return false;
if (phantom_only) return true;
for (int i = 0; i < num_contours; i++)
points_[endPtsOfContours[i]].is_end_point = true;
/* Skip instructions */
const HBUINT8 *p = &StructAtOffset<HBUINT8> (&endPtsOfContours[num_contours + 1],
endPtsOfContours[num_contours]);
if (unlikely ((const char *) p < bytes.arrayZ)) return false; /* Unlikely overflow */
const HBUINT8 *end = (const HBUINT8 *) (bytes.arrayZ + bytes.length);
if (unlikely (p >= end)) return false;
/* Read x & y coordinates */
return read_flags (p, points_, end)
&& read_points (p, points_, end, &contour_point_t::x,
FLAG_X_SHORT, FLAG_X_SAME)
&& read_points (p, points_, end, &contour_point_t::y,
FLAG_Y_SHORT, FLAG_Y_SAME);
}
};
} /* namespace glyf_impl */
} /* namespace OT */
#endif /* OT_GLYF_SIMPLEGLYPH_HH */

View file

@ -0,0 +1,72 @@
#ifndef OT_GLYF_SUBSETGLYPH_HH
#define OT_GLYF_SUBSETGLYPH_HH
#include "../../hb-open-type.hh"
namespace OT {
namespace glyf_impl {
struct SubsetGlyph
{
hb_codepoint_t new_gid;
hb_codepoint_t old_gid;
Glyph source_glyph;
hb_bytes_t dest_start; /* region of source_glyph to copy first */
hb_bytes_t dest_end; /* region of source_glyph to copy second */
bool serialize (hb_serialize_context_t *c,
bool use_short_loca,
const hb_subset_plan_t *plan) const
{
TRACE_SERIALIZE (this);
hb_bytes_t dest_glyph = dest_start.copy (c);
dest_glyph = hb_bytes_t (&dest_glyph, dest_glyph.length + dest_end.copy (c).length);
unsigned int pad_length = use_short_loca ? padding () : 0;
DEBUG_MSG (SUBSET, nullptr, "serialize %d byte glyph, width %d pad %d", dest_glyph.length, dest_glyph.length + pad_length, pad_length);
HBUINT8 pad;
pad = 0;
while (pad_length > 0)
{
c->embed (pad);
pad_length--;
}
if (unlikely (!dest_glyph.length)) return_trace (true);
/* update components gids */
for (auto &_ : Glyph (dest_glyph).get_composite_iterator ())
{
hb_codepoint_t new_gid;
if (plan->new_gid_for_old_gid (_.glyphIndex, &new_gid))
const_cast<CompositeGlyphRecord &> (_).glyphIndex = new_gid;
}
if (plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
Glyph (dest_glyph).drop_hints ();
if (plan->flags & HB_SUBSET_FLAGS_SET_OVERLAPS_FLAG)
Glyph (dest_glyph).set_overlaps_flag ();
return_trace (true);
}
void drop_hints_bytes ()
{ source_glyph.drop_hints_bytes (dest_start, dest_end); }
unsigned int length () const { return dest_start.length + dest_end.length; }
/* pad to 2 to ensure 2-byte loca will be ok */
unsigned int padding () const { return length () % 2; }
unsigned int padded_size () const { return length () + padding (); }
};
} /* namespace glyf_impl */
} /* namespace OT */
#endif /* OT_GLYF_SUBSETGLYPH_HH */

View file

@ -0,0 +1,90 @@
#ifndef OT_GLYF_GLYF_HELPERS_HH
#define OT_GLYF_GLYF_HELPERS_HH
#include "../../hb-open-type.hh"
#include "../../hb-subset-plan.hh"
#include "loca.hh"
namespace OT {
namespace glyf_impl {
template<typename IteratorIn, typename IteratorOut,
hb_requires (hb_is_source_of (IteratorIn, unsigned int)),
hb_requires (hb_is_sink_of (IteratorOut, unsigned))>
static void
_write_loca (IteratorIn it, bool short_offsets, IteratorOut dest)
{
unsigned right_shift = short_offsets ? 1 : 0;
unsigned int offset = 0;
dest << 0;
+ it
| hb_map ([=, &offset] (unsigned int padded_size)
{
offset += padded_size;
DEBUG_MSG (SUBSET, nullptr, "loca entry offset %d", offset);
return offset >> right_shift;
})
| hb_sink (dest)
;
}
static bool
_add_head_and_set_loca_version (hb_subset_plan_t *plan, bool use_short_loca)
{
hb_blob_t *head_blob = hb_sanitize_context_t ().reference_table<head> (plan->source);
hb_blob_t *head_prime_blob = hb_blob_copy_writable_or_fail (head_blob);
hb_blob_destroy (head_blob);
if (unlikely (!head_prime_blob))
return false;
head *head_prime = (head *) hb_blob_get_data_writable (head_prime_blob, nullptr);
head_prime->indexToLocFormat = use_short_loca ? 0 : 1;
bool success = plan->add_table (HB_OT_TAG_head, head_prime_blob);
hb_blob_destroy (head_prime_blob);
return success;
}
template<typename Iterator,
hb_requires (hb_is_source_of (Iterator, unsigned int))>
static bool
_add_loca_and_head (hb_subset_plan_t * plan, Iterator padded_offsets, bool use_short_loca)
{
unsigned num_offsets = padded_offsets.len () + 1;
unsigned entry_size = use_short_loca ? 2 : 4;
char *loca_prime_data = (char *) hb_calloc (entry_size, num_offsets);
if (unlikely (!loca_prime_data)) return false;
DEBUG_MSG (SUBSET, nullptr, "loca entry_size %d num_offsets %d size %d",
entry_size, num_offsets, entry_size * num_offsets);
if (use_short_loca)
_write_loca (padded_offsets, true, hb_array ((HBUINT16 *) loca_prime_data, num_offsets));
else
_write_loca (padded_offsets, false, hb_array ((HBUINT32 *) loca_prime_data, num_offsets));
hb_blob_t *loca_blob = hb_blob_create (loca_prime_data,
entry_size * num_offsets,
HB_MEMORY_MODE_WRITABLE,
loca_prime_data,
hb_free);
bool result = plan->add_table (HB_OT_TAG_loca, loca_blob)
&& _add_head_and_set_loca_version (plan, use_short_loca);
hb_blob_destroy (loca_blob);
return result;
}
} /* namespace glyf_impl */
} /* namespace OT */
#endif /* OT_GLYF_GLYF_HELPERS_HH */

388
thirdparty/harfbuzz/src/OT/glyf/glyf.hh vendored Normal file
View file

@ -0,0 +1,388 @@
#ifndef OT_GLYF_GLYF_HH
#define OT_GLYF_GLYF_HH
#include "../../hb-open-type.hh"
#include "../../hb-ot-head-table.hh"
#include "../../hb-ot-hmtx-table.hh"
#include "../../hb-ot-var-gvar-table.hh"
#include "../../hb-draw.hh"
#include "glyf-helpers.hh"
#include "Glyph.hh"
#include "SubsetGlyph.hh"
#include "loca.hh"
#include "path-builder.hh"
namespace OT {
/*
* glyf -- TrueType Glyph Data
* https://docs.microsoft.com/en-us/typography/opentype/spec/glyf
*/
#define HB_OT_TAG_glyf HB_TAG('g','l','y','f')
struct glyf
{
friend struct glyf_accelerator_t;
static constexpr hb_tag_t tableTag = HB_OT_TAG_glyf;
bool sanitize (hb_sanitize_context_t *c HB_UNUSED) const
{
TRACE_SANITIZE (this);
/* Runtime checks as eager sanitizing each glyph is costy */
return_trace (true);
}
/* requires source of SubsetGlyph complains the identifier isn't declared */
template <typename Iterator>
bool serialize (hb_serialize_context_t *c,
Iterator it,
bool use_short_loca,
const hb_subset_plan_t *plan)
{
TRACE_SERIALIZE (this);
unsigned init_len = c->length ();
for (const auto &_ : it) _.serialize (c, use_short_loca, plan);
/* As a special case when all glyph in the font are empty, add a zero byte
* to the table, so that OTS doesnt reject it, and to make the table work
* on Windows as well.
* See https://github.com/khaledhosny/ots/issues/52 */
if (init_len == c->length ())
{
HBUINT8 empty_byte;
empty_byte = 0;
c->copy (empty_byte);
}
return_trace (true);
}
/* Byte region(s) per glyph to output
unpadded, hints removed if so requested
If we fail to process a glyph we produce an empty (0-length) glyph */
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
glyf *glyf_prime = c->serializer->start_embed <glyf> ();
if (unlikely (!c->serializer->check_success (glyf_prime))) return_trace (false);
hb_vector_t<glyf_impl::SubsetGlyph> glyphs;
_populate_subset_glyphs (c->plan, &glyphs);
auto padded_offsets =
+ hb_iter (glyphs)
| hb_map (&glyf_impl::SubsetGlyph::padded_size)
;
unsigned max_offset = + padded_offsets | hb_reduce (hb_add, 0);
bool use_short_loca = max_offset < 0x1FFFF;
glyf_prime->serialize (c->serializer, hb_iter (glyphs), use_short_loca, c->plan);
if (!use_short_loca) {
padded_offsets =
+ hb_iter (glyphs)
| hb_map (&glyf_impl::SubsetGlyph::length)
;
}
if (unlikely (c->serializer->in_error ())) return_trace (false);
return_trace (c->serializer->check_success (glyf_impl::_add_loca_and_head (c->plan,
padded_offsets,
use_short_loca)));
}
void
_populate_subset_glyphs (const hb_subset_plan_t *plan,
hb_vector_t<glyf_impl::SubsetGlyph> *glyphs /* OUT */) const;
protected:
UnsizedArrayOf<HBUINT8>
dataZ; /* Glyphs data. */
public:
DEFINE_SIZE_MIN (0); /* In reality, this is UNBOUNDED() type; but since we always
* check the size externally, allow Null() object of it by
* defining it _MIN instead. */
};
struct glyf_accelerator_t
{
glyf_accelerator_t (hb_face_t *face)
{
short_offset = false;
num_glyphs = 0;
loca_table = nullptr;
glyf_table = nullptr;
#ifndef HB_NO_VAR
gvar = nullptr;
#endif
hmtx = nullptr;
#ifndef HB_NO_VERTICAL
vmtx = nullptr;
#endif
const OT::head &head = *face->table.head;
if (head.indexToLocFormat > 1 || head.glyphDataFormat > 0)
/* Unknown format. Leave num_glyphs=0, that takes care of disabling us. */
return;
short_offset = 0 == head.indexToLocFormat;
loca_table = face->table.loca.get_blob (); // Needs no destruct!
glyf_table = hb_sanitize_context_t ().reference_table<glyf> (face);
#ifndef HB_NO_VAR
gvar = face->table.gvar;
#endif
hmtx = face->table.hmtx;
#ifndef HB_NO_VERTICAL
vmtx = face->table.vmtx;
#endif
num_glyphs = hb_max (1u, loca_table.get_length () / (short_offset ? 2 : 4)) - 1;
num_glyphs = hb_min (num_glyphs, face->get_num_glyphs ());
}
~glyf_accelerator_t ()
{
glyf_table.destroy ();
}
bool has_data () const { return num_glyphs; }
protected:
template<typename T>
bool get_points (hb_font_t *font, hb_codepoint_t gid, T consumer) const
{
if (gid >= num_glyphs) return false;
/* Making this allocfree is not that easy
https://github.com/harfbuzz/harfbuzz/issues/2095
mostly because of gvar handling in VF fonts,
perhaps a separate path for non-VF fonts can be considered */
contour_point_vector_t all_points;
bool phantom_only = !consumer.is_consuming_contour_points ();
if (unlikely (!glyph_for_gid (gid).get_points (font, *this, all_points, phantom_only)))
return false;
if (consumer.is_consuming_contour_points ())
{
unsigned count = all_points.length;
assert (count >= glyf_impl::PHANTOM_COUNT);
count -= glyf_impl::PHANTOM_COUNT;
for (unsigned point_index = 0; point_index < count; point_index++)
consumer.consume_point (all_points[point_index]);
consumer.points_end ();
}
/* Where to write phantoms, nullptr if not requested */
contour_point_t *phantoms = consumer.get_phantoms_sink ();
if (phantoms)
for (unsigned i = 0; i < glyf_impl::PHANTOM_COUNT; ++i)
phantoms[i] = all_points[all_points.length - glyf_impl::PHANTOM_COUNT + i];
return true;
}
#ifndef HB_NO_VAR
struct points_aggregator_t
{
hb_font_t *font;
hb_glyph_extents_t *extents;
contour_point_t *phantoms;
struct contour_bounds_t
{
contour_bounds_t () { min_x = min_y = FLT_MAX; max_x = max_y = -FLT_MAX; }
void add (const contour_point_t &p)
{
min_x = hb_min (min_x, p.x);
min_y = hb_min (min_y, p.y);
max_x = hb_max (max_x, p.x);
max_y = hb_max (max_y, p.y);
}
bool empty () const { return (min_x >= max_x) || (min_y >= max_y); }
void get_extents (hb_font_t *font, hb_glyph_extents_t *extents)
{
if (unlikely (empty ()))
{
extents->width = 0;
extents->x_bearing = 0;
extents->height = 0;
extents->y_bearing = 0;
return;
}
extents->x_bearing = font->em_scalef_x (min_x);
extents->width = font->em_scalef_x (max_x) - extents->x_bearing;
extents->y_bearing = font->em_scalef_y (max_y);
extents->height = font->em_scalef_y (min_y) - extents->y_bearing;
}
protected:
float min_x, min_y, max_x, max_y;
} bounds;
points_aggregator_t (hb_font_t *font_, hb_glyph_extents_t *extents_, contour_point_t *phantoms_)
{
font = font_;
extents = extents_;
phantoms = phantoms_;
if (extents) bounds = contour_bounds_t ();
}
void consume_point (const contour_point_t &point) { bounds.add (point); }
void points_end () { bounds.get_extents (font, extents); }
bool is_consuming_contour_points () { return extents; }
contour_point_t *get_phantoms_sink () { return phantoms; }
};
public:
unsigned
get_advance_var (hb_font_t *font, hb_codepoint_t gid, bool is_vertical) const
{
if (unlikely (gid >= num_glyphs)) return 0;
bool success = false;
contour_point_t phantoms[glyf_impl::PHANTOM_COUNT];
if (likely (font->num_coords == gvar->get_axis_count ()))
success = get_points (font, gid, points_aggregator_t (font, nullptr, phantoms));
if (unlikely (!success))
return
#ifndef HB_NO_VERTICAL
is_vertical ? vmtx->get_advance (gid) :
#endif
hmtx->get_advance (gid);
float result = is_vertical
? phantoms[glyf_impl::PHANTOM_TOP].y - phantoms[glyf_impl::PHANTOM_BOTTOM].y
: phantoms[glyf_impl::PHANTOM_RIGHT].x - phantoms[glyf_impl::PHANTOM_LEFT].x;
return hb_clamp (roundf (result), 0.f, (float) UINT_MAX / 2);
}
int get_side_bearing_var (hb_font_t *font, hb_codepoint_t gid, bool is_vertical) const
{
if (unlikely (gid >= num_glyphs)) return 0;
hb_glyph_extents_t extents;
contour_point_t phantoms[glyf_impl::PHANTOM_COUNT];
if (unlikely (!get_points (font, gid, points_aggregator_t (font, &extents, phantoms))))
return
#ifndef HB_NO_VERTICAL
is_vertical ? vmtx->get_side_bearing (gid) :
#endif
hmtx->get_side_bearing (gid);
return is_vertical
? ceilf (phantoms[glyf_impl::PHANTOM_TOP].y) - extents.y_bearing
: floorf (phantoms[glyf_impl::PHANTOM_LEFT].x);
}
#endif
public:
bool get_extents (hb_font_t *font, hb_codepoint_t gid, hb_glyph_extents_t *extents) const
{
if (unlikely (gid >= num_glyphs)) return false;
#ifndef HB_NO_VAR
if (font->num_coords)
return get_points (font, gid, points_aggregator_t (font, extents, nullptr));
#endif
return glyph_for_gid (gid).get_extents (font, *this, extents);
}
const glyf_impl::Glyph
glyph_for_gid (hb_codepoint_t gid, bool needs_padding_removal = false) const
{
if (unlikely (gid >= num_glyphs)) return glyf_impl::Glyph ();
unsigned int start_offset, end_offset;
if (short_offset)
{
const HBUINT16 *offsets = (const HBUINT16 *) loca_table->dataZ.arrayZ;
start_offset = 2 * offsets[gid];
end_offset = 2 * offsets[gid + 1];
}
else
{
const HBUINT32 *offsets = (const HBUINT32 *) loca_table->dataZ.arrayZ;
start_offset = offsets[gid];
end_offset = offsets[gid + 1];
}
if (unlikely (start_offset > end_offset || end_offset > glyf_table.get_length ()))
return glyf_impl::Glyph ();
glyf_impl::Glyph glyph (hb_bytes_t ((const char *) this->glyf_table + start_offset,
end_offset - start_offset), gid);
return needs_padding_removal ? glyf_impl::Glyph (glyph.trim_padding (), gid) : glyph;
}
bool
get_path (hb_font_t *font, hb_codepoint_t gid, hb_draw_session_t &draw_session) const
{ return get_points (font, gid, glyf_impl::path_builder_t (font, draw_session)); }
#ifndef HB_NO_VAR
const gvar_accelerator_t *gvar;
#endif
const hmtx_accelerator_t *hmtx;
#ifndef HB_NO_VERTICAL
const vmtx_accelerator_t *vmtx;
#endif
private:
bool short_offset;
unsigned int num_glyphs;
hb_blob_ptr_t<loca> loca_table;
hb_blob_ptr_t<glyf> glyf_table;
};
inline void
glyf::_populate_subset_glyphs (const hb_subset_plan_t *plan,
hb_vector_t<glyf_impl::SubsetGlyph> *glyphs /* OUT */) const
{
OT::glyf_accelerator_t glyf (plan->source);
+ hb_range (plan->num_output_glyphs ())
| hb_map ([&] (hb_codepoint_t new_gid)
{
glyf_impl::SubsetGlyph subset_glyph = {0};
subset_glyph.new_gid = new_gid;
/* should never fail: all old gids should be mapped */
if (!plan->old_gid_for_new_gid (new_gid, &subset_glyph.old_gid))
return subset_glyph;
if (new_gid == 0 &&
!(plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE))
subset_glyph.source_glyph = glyf_impl::Glyph ();
else
subset_glyph.source_glyph = glyf.glyph_for_gid (subset_glyph.old_gid, true);
if (plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
subset_glyph.drop_hints_bytes ();
else
subset_glyph.dest_start = subset_glyph.source_glyph.get_bytes ();
return subset_glyph;
})
| hb_sink (glyphs)
;
}
} /* namespace OT */
#endif /* OT_GLYF_GLYF_HH */

43
thirdparty/harfbuzz/src/OT/glyf/loca.hh vendored Normal file
View file

@ -0,0 +1,43 @@
#ifndef OT_GLYF_LOCA_HH
#define OT_GLYF_LOCA_HH
#include "../../hb-open-type.hh"
namespace OT {
/*
* loca -- Index to Location
* https://docs.microsoft.com/en-us/typography/opentype/spec/loca
*/
#define HB_OT_TAG_loca HB_TAG('l','o','c','a')
struct loca
{
friend struct glyf;
friend struct glyf_accelerator_t;
static constexpr hb_tag_t tableTag = HB_OT_TAG_loca;
bool sanitize (hb_sanitize_context_t *c HB_UNUSED) const
{
TRACE_SANITIZE (this);
return_trace (true);
}
protected:
UnsizedArrayOf<HBUINT8>
dataZ; /* Location data. */
public:
DEFINE_SIZE_MIN (0); /* In reality, this is UNBOUNDED() type; but since we always
* check the size externally, allow Null() object of it by
* defining it _MIN instead. */
};
} /* namespace OT */
#endif /* OT_GLYF_LOCA_HH */

View file

@ -0,0 +1,135 @@
#ifndef OT_GLYF_PATH_BUILDER_HH
#define OT_GLYF_PATH_BUILDER_HH
#include "../../hb.hh"
namespace OT {
namespace glyf_impl {
struct path_builder_t
{
hb_font_t *font;
hb_draw_session_t *draw_session;
struct optional_point_t
{
optional_point_t () {}
optional_point_t (float x_, float y_) : has_data (true), x (x_), y (y_) {}
operator bool () const { return has_data; }
bool has_data = false;
float x = 0.;
float y = 0.;
optional_point_t lerp (optional_point_t p, float t)
{ return optional_point_t (x + t * (p.x - x), y + t * (p.y - y)); }
} first_oncurve, first_offcurve, last_offcurve;
path_builder_t (hb_font_t *font_, hb_draw_session_t &draw_session_)
{
font = font_;
draw_session = &draw_session_;
first_oncurve = first_offcurve = last_offcurve = optional_point_t ();
}
/* based on https://github.com/RazrFalcon/ttf-parser/blob/4f32821/src/glyf.rs#L287
See also:
* https://developer.apple.com/fonts/TrueType-Reference-Manual/RM01/Chap1.html
* https://stackoverflow.com/a/20772557 */
void consume_point (const contour_point_t &point)
{
bool is_on_curve = point.flag & glyf_impl::SimpleGlyph::FLAG_ON_CURVE;
optional_point_t p (font->em_fscalef_x (point.x), font->em_fscalef_y (point.y));
if (!first_oncurve)
{
if (is_on_curve)
{
first_oncurve = p;
draw_session->move_to (p.x, p.y);
}
else
{
if (first_offcurve)
{
optional_point_t mid = first_offcurve.lerp (p, .5f);
first_oncurve = mid;
last_offcurve = p;
draw_session->move_to (mid.x, mid.y);
}
else
first_offcurve = p;
}
}
else
{
if (last_offcurve)
{
if (is_on_curve)
{
draw_session->quadratic_to (last_offcurve.x, last_offcurve.y,
p.x, p.y);
last_offcurve = optional_point_t ();
}
else
{
optional_point_t mid = last_offcurve.lerp (p, .5f);
draw_session->quadratic_to (last_offcurve.x, last_offcurve.y,
mid.x, mid.y);
last_offcurve = p;
}
}
else
{
if (is_on_curve)
draw_session->line_to (p.x, p.y);
else
last_offcurve = p;
}
}
if (point.is_end_point)
{
if (first_offcurve && last_offcurve)
{
optional_point_t mid = last_offcurve.lerp (first_offcurve, .5f);
draw_session->quadratic_to (last_offcurve.x, last_offcurve.y,
mid.x, mid.y);
last_offcurve = optional_point_t ();
/* now check the rest */
}
if (first_offcurve && first_oncurve)
draw_session->quadratic_to (first_offcurve.x, first_offcurve.y,
first_oncurve.x, first_oncurve.y);
else if (last_offcurve && first_oncurve)
draw_session->quadratic_to (last_offcurve.x, last_offcurve.y,
first_oncurve.x, first_oncurve.y);
else if (first_oncurve)
draw_session->line_to (first_oncurve.x, first_oncurve.y);
else if (first_offcurve)
{
float x = first_offcurve.x, y = first_offcurve.y;
draw_session->move_to (x, y);
draw_session->quadratic_to (x, y, x, y);
}
/* Getting ready for the next contour */
first_oncurve = first_offcurve = last_offcurve = optional_point_t ();
draw_session->close_path ();
}
}
void points_end () {}
bool is_consuming_contour_points () { return true; }
contour_point_t *get_phantoms_sink () { return nullptr; }
};
} /* namespace glyf_impl */
} /* namespace OT */
#endif /* OT_GLYF_PATH_BUILDER_HH */

860
thirdparty/harfbuzz/src/graph/graph.hh vendored Normal file
View file

@ -0,0 +1,860 @@
/*
* Copyright © 2022 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Google Author(s): Garret Rieger
*/
#ifndef GRAPH_GRAPH_HH
#define GRAPH_GRAPH_HH
namespace graph {
/**
* Represents a serialized table in the form of a graph.
* Provides methods for modifying and reordering the graph.
*/
struct graph_t
{
struct vertex_t
{
hb_serialize_context_t::object_t obj;
int64_t distance = 0 ;
int64_t space = 0 ;
hb_vector_t<unsigned> parents;
unsigned start = 0;
unsigned end = 0;
unsigned priority = 0;
friend void swap (vertex_t& a, vertex_t& b)
{
hb_swap (a.obj, b.obj);
hb_swap (a.distance, b.distance);
hb_swap (a.space, b.space);
hb_swap (a.parents, b.parents);
hb_swap (a.start, b.start);
hb_swap (a.end, b.end);
hb_swap (a.priority, b.priority);
}
bool is_shared () const
{
return parents.length > 1;
}
unsigned incoming_edges () const
{
return parents.length;
}
void remove_parent (unsigned parent_index)
{
for (unsigned i = 0; i < parents.length; i++)
{
if (parents[i] != parent_index) continue;
parents.remove (i);
break;
}
}
void remap_parents (const hb_vector_t<unsigned>& id_map)
{
for (unsigned i = 0; i < parents.length; i++)
parents[i] = id_map[parents[i]];
}
void remap_parent (unsigned old_index, unsigned new_index)
{
for (unsigned i = 0; i < parents.length; i++)
{
if (parents[i] == old_index)
parents[i] = new_index;
}
}
bool is_leaf () const
{
return !obj.real_links.length && !obj.virtual_links.length;
}
bool raise_priority ()
{
if (has_max_priority ()) return false;
priority++;
return true;
}
bool has_max_priority () const {
return priority >= 3;
}
int64_t modified_distance (unsigned order) const
{
// TODO(garretrieger): once priority is high enough, should try
// setting distance = 0 which will force to sort immediately after
// it's parent where possible.
int64_t modified_distance =
hb_min (hb_max(distance + distance_modifier (), 0), 0x7FFFFFFFFFF);
if (has_max_priority ()) {
modified_distance = 0;
}
return (modified_distance << 18) | (0x003FFFF & order);
}
int64_t distance_modifier () const
{
if (!priority) return 0;
int64_t table_size = obj.tail - obj.head;
if (priority == 1)
return -table_size / 2;
return -table_size;
}
};
/*
* A topological sorting of an object graph. Ordered
* in reverse serialization order (first object in the
* serialization is at the end of the list). This matches
* the 'packed' object stack used internally in the
* serializer
*/
template<typename T>
graph_t (const T& objects)
: parents_invalid (true),
distance_invalid (true),
positions_invalid (true),
successful (true)
{
num_roots_for_space_.push (1);
bool removed_nil = false;
vertices_.alloc (objects.length);
vertices_scratch_.alloc (objects.length);
for (unsigned i = 0; i < objects.length; i++)
{
// TODO(grieger): check all links point to valid objects.
// If this graph came from a serialization buffer object 0 is the
// nil object. We don't need it for our purposes here so drop it.
if (i == 0 && !objects[i])
{
removed_nil = true;
continue;
}
vertex_t* v = vertices_.push ();
if (check_success (!vertices_.in_error ()))
v->obj = *objects[i];
if (!removed_nil) continue;
// Fix indices to account for removed nil object.
for (auto& l : v->obj.all_links_writer ()) {
l.objidx--;
}
}
}
~graph_t ()
{
vertices_.fini ();
}
bool in_error () const
{
return !successful ||
vertices_.in_error () ||
num_roots_for_space_.in_error ();
}
const vertex_t& root () const
{
return vertices_[root_idx ()];
}
unsigned root_idx () const
{
// Object graphs are in reverse order, the first object is at the end
// of the vector. Since the graph is topologically sorted it's safe to
// assume the first object has no incoming edges.
return vertices_.length - 1;
}
const hb_serialize_context_t::object_t& object(unsigned i) const
{
return vertices_[i].obj;
}
/*
* Generates a new topological sorting of graph ordered by the shortest
* distance to each node.
*/
void sort_shortest_distance ()
{
positions_invalid = true;
if (vertices_.length <= 1) {
// Graph of 1 or less doesn't need sorting.
return;
}
update_distances ();
hb_priority_queue_t queue;
hb_vector_t<vertex_t> &sorted_graph = vertices_scratch_;
if (unlikely (!check_success (sorted_graph.resize (vertices_.length)))) return;
hb_vector_t<unsigned> id_map;
if (unlikely (!check_success (id_map.resize (vertices_.length)))) return;
hb_vector_t<unsigned> removed_edges;
if (unlikely (!check_success (removed_edges.resize (vertices_.length)))) return;
update_parents ();
queue.insert (root ().modified_distance (0), root_idx ());
int new_id = root_idx ();
unsigned order = 1;
while (!queue.in_error () && !queue.is_empty ())
{
unsigned next_id = queue.pop_minimum().second;
hb_swap (sorted_graph[new_id], vertices_[next_id]);
const vertex_t& next = sorted_graph[new_id];
id_map[next_id] = new_id--;
for (const auto& link : next.obj.all_links ()) {
removed_edges[link.objidx]++;
if (!(vertices_[link.objidx].incoming_edges () - removed_edges[link.objidx]))
// Add the order that the links were encountered to the priority.
// This ensures that ties between priorities objects are broken in a consistent
// way. More specifically this is set up so that if a set of objects have the same
// distance they'll be added to the topological order in the order that they are
// referenced from the parent object.
queue.insert (vertices_[link.objidx].modified_distance (order++),
link.objidx);
}
}
check_success (!queue.in_error ());
check_success (!sorted_graph.in_error ());
if (!check_success (new_id == -1))
print_orphaned_nodes ();
remap_all_obj_indices (id_map, &sorted_graph);
hb_swap (vertices_, sorted_graph);
}
/*
* Assign unique space numbers to each connected subgraph of 32 bit offset(s).
*/
bool assign_32bit_spaces ()
{
unsigned root_index = root_idx ();
hb_set_t visited;
hb_set_t roots;
for (unsigned i = 0; i <= root_index; i++)
{
// Only real links can form 32 bit spaces
for (auto& l : vertices_[i].obj.real_links)
{
if (l.width == 4 && !l.is_signed)
{
roots.add (l.objidx);
find_subgraph (l.objidx, visited);
}
}
}
// Mark everything not in the subgraphs of 32 bit roots as visited.
// This prevents 32 bit subgraphs from being connected via nodes not in the 32 bit subgraphs.
visited.invert ();
if (!roots) return false;
while (roots)
{
unsigned next = HB_SET_VALUE_INVALID;
if (unlikely (!check_success (!roots.in_error ()))) break;
if (!roots.next (&next)) break;
hb_set_t connected_roots;
find_connected_nodes (next, roots, visited, connected_roots);
if (unlikely (!check_success (!connected_roots.in_error ()))) break;
isolate_subgraph (connected_roots);
if (unlikely (!check_success (!connected_roots.in_error ()))) break;
unsigned next_space = this->next_space ();
num_roots_for_space_.push (0);
for (unsigned root : connected_roots)
{
DEBUG_MSG (SUBSET_REPACK, nullptr, "Subgraph %u gets space %u", root, next_space);
vertices_[root].space = next_space;
num_roots_for_space_[next_space] = num_roots_for_space_[next_space] + 1;
distance_invalid = true;
positions_invalid = true;
}
// TODO(grieger): special case for GSUB/GPOS use extension promotions to move 16 bit space
// into the 32 bit space as needed, instead of using isolation.
}
return true;
}
/*
* Isolates the subgraph of nodes reachable from root. Any links to nodes in the subgraph
* that originate from outside of the subgraph will be removed by duplicating the linked to
* object.
*
* Indices stored in roots will be updated if any of the roots are duplicated to new indices.
*/
bool isolate_subgraph (hb_set_t& roots)
{
update_parents ();
hb_map_t subgraph;
// incoming edges to root_idx should be all 32 bit in length so we don't need to de-dup these
// set the subgraph incoming edge count to match all of root_idx's incoming edges
hb_set_t parents;
for (unsigned root_idx : roots)
{
subgraph.set (root_idx, wide_parents (root_idx, parents));
find_subgraph (root_idx, subgraph);
}
unsigned original_root_idx = root_idx ();
hb_map_t index_map;
bool made_changes = false;
for (auto entry : subgraph.iter ())
{
const auto& node = vertices_[entry.first];
unsigned subgraph_incoming_edges = entry.second;
if (subgraph_incoming_edges < node.incoming_edges ())
{
// Only de-dup objects with incoming links from outside the subgraph.
made_changes = true;
duplicate_subgraph (entry.first, index_map);
}
}
if (!made_changes)
return false;
if (original_root_idx != root_idx ()
&& parents.has (original_root_idx))
{
// If the root idx has changed since parents was determined, update root idx in parents
parents.add (root_idx ());
parents.del (original_root_idx);
}
auto new_subgraph =
+ subgraph.keys ()
| hb_map([&] (unsigned node_idx) {
const unsigned *v;
if (index_map.has (node_idx, &v)) return *v;
return node_idx;
})
;
remap_obj_indices (index_map, new_subgraph);
remap_obj_indices (index_map, parents.iter (), true);
// Update roots set with new indices as needed.
unsigned next = HB_SET_VALUE_INVALID;
while (roots.next (&next))
{
const unsigned *v;
if (index_map.has (next, &v))
{
roots.del (next);
roots.add (*v);
}
}
return true;
}
void find_subgraph (unsigned node_idx, hb_map_t& subgraph)
{
for (const auto& link : vertices_[node_idx].obj.all_links ())
{
const unsigned *v;
if (subgraph.has (link.objidx, &v))
{
subgraph.set (link.objidx, *v + 1);
continue;
}
subgraph.set (link.objidx, 1);
find_subgraph (link.objidx, subgraph);
}
}
void find_subgraph (unsigned node_idx, hb_set_t& subgraph)
{
if (subgraph.has (node_idx)) return;
subgraph.add (node_idx);
for (const auto& link : vertices_[node_idx].obj.all_links ())
find_subgraph (link.objidx, subgraph);
}
/*
* duplicates all nodes in the subgraph reachable from node_idx. Does not re-assign
* links. index_map is updated with mappings from old id to new id. If a duplication has already
* been performed for a given index, then it will be skipped.
*/
void duplicate_subgraph (unsigned node_idx, hb_map_t& index_map)
{
if (index_map.has (node_idx))
return;
index_map.set (node_idx, duplicate (node_idx));
for (const auto& l : object (node_idx).all_links ()) {
duplicate_subgraph (l.objidx, index_map);
}
}
/*
* Creates a copy of node_idx and returns it's new index.
*/
unsigned duplicate (unsigned node_idx)
{
positions_invalid = true;
distance_invalid = true;
auto* clone = vertices_.push ();
auto& child = vertices_[node_idx];
if (vertices_.in_error ()) {
return -1;
}
clone->obj.head = child.obj.head;
clone->obj.tail = child.obj.tail;
clone->distance = child.distance;
clone->space = child.space;
clone->parents.reset ();
unsigned clone_idx = vertices_.length - 2;
for (const auto& l : child.obj.real_links)
{
clone->obj.real_links.push (l);
vertices_[l.objidx].parents.push (clone_idx);
}
for (const auto& l : child.obj.virtual_links)
{
clone->obj.virtual_links.push (l);
vertices_[l.objidx].parents.push (clone_idx);
}
check_success (!clone->obj.real_links.in_error ());
check_success (!clone->obj.virtual_links.in_error ());
// The last object is the root of the graph, so swap back the root to the end.
// The root's obj idx does change, however since it's root nothing else refers to it.
// all other obj idx's will be unaffected.
hb_swap (vertices_[vertices_.length - 2], *clone);
// Since the root moved, update the parents arrays of all children on the root.
for (const auto& l : root ().obj.all_links ())
vertices_[l.objidx].remap_parent (root_idx () - 1, root_idx ());
return clone_idx;
}
/*
* Creates a copy of child and re-assigns the link from
* parent to the clone. The copy is a shallow copy, objects
* linked from child are not duplicated.
*/
bool duplicate (unsigned parent_idx, unsigned child_idx)
{
update_parents ();
unsigned links_to_child = 0;
for (const auto& l : vertices_[parent_idx].obj.all_links ())
{
if (l.objidx == child_idx) links_to_child++;
}
if (vertices_[child_idx].incoming_edges () <= links_to_child)
{
// Can't duplicate this node, doing so would orphan the original one as all remaining links
// to child are from parent.
DEBUG_MSG (SUBSET_REPACK, nullptr, " Not duplicating %d => %d",
parent_idx, child_idx);
return false;
}
DEBUG_MSG (SUBSET_REPACK, nullptr, " Duplicating %d => %d",
parent_idx, child_idx);
unsigned clone_idx = duplicate (child_idx);
if (clone_idx == (unsigned) -1) return false;
// duplicate shifts the root node idx, so if parent_idx was root update it.
if (parent_idx == clone_idx) parent_idx++;
auto& parent = vertices_[parent_idx];
for (auto& l : parent.obj.all_links_writer ())
{
if (l.objidx != child_idx)
continue;
reassign_link (l, parent_idx, clone_idx);
}
return true;
}
/*
* Raises the sorting priority of all children.
*/
bool raise_childrens_priority (unsigned parent_idx)
{
DEBUG_MSG (SUBSET_REPACK, nullptr, " Raising priority of all children of %d",
parent_idx);
// This operation doesn't change ordering until a sort is run, so no need
// to invalidate positions. It does not change graph structure so no need
// to update distances or edge counts.
auto& parent = vertices_[parent_idx].obj;
bool made_change = false;
for (auto& l : parent.all_links_writer ())
made_change |= vertices_[l.objidx].raise_priority ();
return made_change;
}
void print_orphaned_nodes ()
{
if (!DEBUG_ENABLED(SUBSET_REPACK)) return;
DEBUG_MSG (SUBSET_REPACK, nullptr, "Graph is not fully connected.");
parents_invalid = true;
update_parents();
for (unsigned i = 0; i < root_idx (); i++)
{
const auto& v = vertices_[i];
if (!v.parents)
DEBUG_MSG (SUBSET_REPACK, nullptr, "Node %u is orphaned.", i);
}
}
unsigned num_roots_for_space (unsigned space) const
{
return num_roots_for_space_[space];
}
unsigned next_space () const
{
return num_roots_for_space_.length;
}
void move_to_new_space (const hb_set_t& indices)
{
num_roots_for_space_.push (0);
unsigned new_space = num_roots_for_space_.length - 1;
for (unsigned index : indices) {
auto& node = vertices_[index];
num_roots_for_space_[node.space] = num_roots_for_space_[node.space] - 1;
num_roots_for_space_[new_space] = num_roots_for_space_[new_space] + 1;
node.space = new_space;
distance_invalid = true;
positions_invalid = true;
}
}
unsigned space_for (unsigned index, unsigned* root = nullptr) const
{
const auto& node = vertices_[index];
if (node.space)
{
if (root != nullptr)
*root = index;
return node.space;
}
if (!node.parents)
{
if (root)
*root = index;
return 0;
}
return space_for (node.parents[0], root);
}
void err_other_error () { this->successful = false; }
size_t total_size_in_bytes () const {
size_t total_size = 0;
for (unsigned i = 0; i < vertices_.length; i++) {
size_t size = vertices_[i].obj.tail - vertices_[i].obj.head;
total_size += size;
}
return total_size;
}
private:
/*
* Returns the numbers of incoming edges that are 32bits wide.
*/
unsigned wide_parents (unsigned node_idx, hb_set_t& parents) const
{
unsigned count = 0;
hb_set_t visited;
for (unsigned p : vertices_[node_idx].parents)
{
if (visited.has (p)) continue;
visited.add (p);
// Only real links can be wide
for (const auto& l : vertices_[p].obj.real_links)
{
if (l.objidx == node_idx && l.width == 4 && !l.is_signed)
{
count++;
parents.add (p);
}
}
}
return count;
}
bool check_success (bool success)
{ return this->successful && (success || ((void) err_other_error (), false)); }
public:
/*
* Creates a map from objid to # of incoming edges.
*/
void update_parents ()
{
if (!parents_invalid) return;
for (unsigned i = 0; i < vertices_.length; i++)
vertices_[i].parents.reset ();
for (unsigned p = 0; p < vertices_.length; p++)
{
for (auto& l : vertices_[p].obj.all_links ())
{
vertices_[l.objidx].parents.push (p);
}
}
parents_invalid = false;
}
/*
* compute the serialized start and end positions for each vertex.
*/
void update_positions ()
{
if (!positions_invalid) return;
unsigned current_pos = 0;
for (int i = root_idx (); i >= 0; i--)
{
auto& v = vertices_[i];
v.start = current_pos;
current_pos += v.obj.tail - v.obj.head;
v.end = current_pos;
}
positions_invalid = false;
}
/*
* Finds the distance to each object in the graph
* from the initial node.
*/
void update_distances ()
{
if (!distance_invalid) return;
// Uses Dijkstra's algorithm to find all of the shortest distances.
// https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
//
// Implementation Note:
// Since our priority queue doesn't support fast priority decreases
// we instead just add new entries into the queue when a priority changes.
// Redundant ones are filtered out later on by the visited set.
// According to https://www3.cs.stonybrook.edu/~rezaul/papers/TR-07-54.pdf
// for practical performance this is faster then using a more advanced queue
// (such as a fibonacci queue) with a fast decrease priority.
for (unsigned i = 0; i < vertices_.length; i++)
{
if (i == vertices_.length - 1)
vertices_[i].distance = 0;
else
vertices_[i].distance = hb_int_max (int64_t);
}
hb_priority_queue_t queue;
queue.insert (0, vertices_.length - 1);
hb_vector_t<bool> visited;
visited.resize (vertices_.length);
while (!queue.in_error () && !queue.is_empty ())
{
unsigned next_idx = queue.pop_minimum ().second;
if (visited[next_idx]) continue;
const auto& next = vertices_[next_idx];
int64_t next_distance = vertices_[next_idx].distance;
visited[next_idx] = true;
for (const auto& link : next.obj.all_links ())
{
if (visited[link.objidx]) continue;
const auto& child = vertices_[link.objidx].obj;
unsigned link_width = link.width ? link.width : 4; // treat virtual offsets as 32 bits wide
int64_t child_weight = (child.tail - child.head) +
((int64_t) 1 << (link_width * 8)) * (vertices_[link.objidx].space + 1);
int64_t child_distance = next_distance + child_weight;
if (child_distance < vertices_[link.objidx].distance)
{
vertices_[link.objidx].distance = child_distance;
queue.insert (child_distance, link.objidx);
}
}
}
check_success (!queue.in_error ());
if (!check_success (queue.is_empty ()))
{
print_orphaned_nodes ();
return;
}
distance_invalid = false;
}
private:
/*
* Updates a link in the graph to point to a different object. Corrects the
* parents vector on the previous and new child nodes.
*/
void reassign_link (hb_serialize_context_t::object_t::link_t& link,
unsigned parent_idx,
unsigned new_idx)
{
unsigned old_idx = link.objidx;
link.objidx = new_idx;
vertices_[old_idx].remove_parent (parent_idx);
vertices_[new_idx].parents.push (parent_idx);
}
/*
* Updates all objidx's in all links using the provided mapping. Corrects incoming edge counts.
*/
template<typename Iterator, hb_requires (hb_is_iterator (Iterator))>
void remap_obj_indices (const hb_map_t& id_map,
Iterator subgraph,
bool only_wide = false)
{
if (!id_map) return;
for (unsigned i : subgraph)
{
for (auto& link : vertices_[i].obj.all_links_writer ())
{
const unsigned *v;
if (!id_map.has (link.objidx, &v)) continue;
if (only_wide && !(link.width == 4 && !link.is_signed)) continue;
reassign_link (link, i, *v);
}
}
}
/*
* Updates all objidx's in all links using the provided mapping.
*/
void remap_all_obj_indices (const hb_vector_t<unsigned>& id_map,
hb_vector_t<vertex_t>* sorted_graph) const
{
for (unsigned i = 0; i < sorted_graph->length; i++)
{
(*sorted_graph)[i].remap_parents (id_map);
for (auto& link : (*sorted_graph)[i].obj.all_links_writer ())
{
link.objidx = id_map[link.objidx];
}
}
}
/*
* Finds all nodes in targets that are reachable from start_idx, nodes in visited will be skipped.
* For this search the graph is treated as being undirected.
*
* Connected targets will be added to connected and removed from targets. All visited nodes
* will be added to visited.
*/
void find_connected_nodes (unsigned start_idx,
hb_set_t& targets,
hb_set_t& visited,
hb_set_t& connected)
{
if (unlikely (!check_success (!visited.in_error ()))) return;
if (visited.has (start_idx)) return;
visited.add (start_idx);
if (targets.has (start_idx))
{
targets.del (start_idx);
connected.add (start_idx);
}
const auto& v = vertices_[start_idx];
// Graph is treated as undirected so search children and parents of start_idx
for (const auto& l : v.obj.all_links ())
find_connected_nodes (l.objidx, targets, visited, connected);
for (unsigned p : v.parents)
find_connected_nodes (p, targets, visited, connected);
}
public:
// TODO(garretrieger): make private, will need to move most of offset overflow code into graph.
hb_vector_t<vertex_t> vertices_;
hb_vector_t<vertex_t> vertices_scratch_;
private:
bool parents_invalid;
bool distance_invalid;
bool positions_invalid;
bool successful;
hb_vector_t<unsigned> num_roots_for_space_;
};
}
#endif // GRAPH_GRAPH_HH

View file

@ -0,0 +1,249 @@
/*
* Copyright © 2022 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Google Author(s): Garret Rieger
*/
#ifndef GRAPH_SERIALIZE_HH
#define GRAPH_SERIALIZE_HH
namespace graph {
struct overflow_record_t
{
unsigned parent;
unsigned child;
};
inline
int64_t compute_offset (
const graph_t& graph,
unsigned parent_idx,
const hb_serialize_context_t::object_t::link_t& link)
{
const auto& parent = graph.vertices_[parent_idx];
const auto& child = graph.vertices_[link.objidx];
int64_t offset = 0;
switch ((hb_serialize_context_t::whence_t) link.whence) {
case hb_serialize_context_t::whence_t::Head:
offset = child.start - parent.start; break;
case hb_serialize_context_t::whence_t::Tail:
offset = child.start - parent.end; break;
case hb_serialize_context_t::whence_t::Absolute:
offset = child.start; break;
}
assert (offset >= link.bias);
offset -= link.bias;
return offset;
}
inline
bool is_valid_offset (int64_t offset,
const hb_serialize_context_t::object_t::link_t& link)
{
if (unlikely (!link.width))
// Virtual links can't overflow.
return link.is_signed || offset >= 0;
if (link.is_signed)
{
if (link.width == 4)
return offset >= -((int64_t) 1 << 31) && offset < ((int64_t) 1 << 31);
else
return offset >= -(1 << 15) && offset < (1 << 15);
}
else
{
if (link.width == 4)
return offset >= 0 && offset < ((int64_t) 1 << 32);
else if (link.width == 3)
return offset >= 0 && offset < ((int32_t) 1 << 24);
else
return offset >= 0 && offset < (1 << 16);
}
}
/*
* Will any offsets overflow on graph when it's serialized?
*/
inline bool
will_overflow (graph_t& graph,
hb_vector_t<overflow_record_t>* overflows = nullptr)
{
if (overflows) overflows->resize (0);
graph.update_positions ();
const auto& vertices = graph.vertices_;
for (int parent_idx = vertices.length - 1; parent_idx >= 0; parent_idx--)
{
// Don't need to check virtual links for overflow
for (const auto& link : vertices[parent_idx].obj.real_links)
{
int64_t offset = compute_offset (graph, parent_idx, link);
if (is_valid_offset (offset, link))
continue;
if (!overflows) return true;
overflow_record_t r;
r.parent = parent_idx;
r.child = link.objidx;
overflows->push (r);
}
}
if (!overflows) return false;
return overflows->length;
}
inline
void print_overflows (graph_t& graph,
const hb_vector_t<overflow_record_t>& overflows)
{
if (!DEBUG_ENABLED(SUBSET_REPACK)) return;
graph.update_parents ();
int limit = 10;
for (const auto& o : overflows)
{
if (!limit--) break;
const auto& parent = graph.vertices_[o.parent];
const auto& child = graph.vertices_[o.child];
DEBUG_MSG (SUBSET_REPACK, nullptr,
" overflow from "
"%4d (%4d in, %4d out, space %2d) => "
"%4d (%4d in, %4d out, space %2d)",
o.parent,
parent.incoming_edges (),
parent.obj.real_links.length + parent.obj.virtual_links.length,
graph.space_for (o.parent),
o.child,
child.incoming_edges (),
child.obj.real_links.length + child.obj.virtual_links.length,
graph.space_for (o.child));
}
if (overflows.length > 10) {
DEBUG_MSG (SUBSET_REPACK, nullptr, " ... plus %d more overflows.", overflows.length - 10);
}
}
template <typename O> inline void
serialize_link_of_type (const hb_serialize_context_t::object_t::link_t& link,
char* head,
hb_serialize_context_t* c)
{
OT::Offset<O>* offset = reinterpret_cast<OT::Offset<O>*> (head + link.position);
*offset = 0;
c->add_link (*offset,
// serializer has an extra nil object at the start of the
// object array. So all id's are +1 of what our id's are.
link.objidx + 1,
(hb_serialize_context_t::whence_t) link.whence,
link.bias);
}
inline
void serialize_link (const hb_serialize_context_t::object_t::link_t& link,
char* head,
hb_serialize_context_t* c)
{
switch (link.width)
{
case 0:
// Virtual links aren't serialized.
return;
case 4:
if (link.is_signed)
{
serialize_link_of_type<OT::HBINT32> (link, head, c);
} else {
serialize_link_of_type<OT::HBUINT32> (link, head, c);
}
return;
case 2:
if (link.is_signed)
{
serialize_link_of_type<OT::HBINT16> (link, head, c);
} else {
serialize_link_of_type<OT::HBUINT16> (link, head, c);
}
return;
case 3:
serialize_link_of_type<OT::HBUINT24> (link, head, c);
return;
default:
// Unexpected link width.
assert (0);
}
}
/*
* serialize graph into the provided serialization buffer.
*/
inline hb_blob_t* serialize (const graph_t& graph)
{
hb_vector_t<char> buffer;
size_t size = graph.total_size_in_bytes ();
if (!buffer.alloc (size)) {
DEBUG_MSG (SUBSET_REPACK, nullptr, "Unable to allocate output buffer.");
return nullptr;
}
hb_serialize_context_t c((void *) buffer, size);
c.start_serialize<void> ();
const auto& vertices = graph.vertices_;
for (unsigned i = 0; i < vertices.length; i++) {
c.push ();
size_t size = vertices[i].obj.tail - vertices[i].obj.head;
char* start = c.allocate_size <char> (size);
if (!start) {
DEBUG_MSG (SUBSET_REPACK, nullptr, "Buffer out of space.");
return nullptr;
}
memcpy (start, vertices[i].obj.head, size);
// Only real links needs to be serialized.
for (const auto& link : vertices[i].obj.real_links)
serialize_link (link, start, &c);
// All duplications are already encoded in the graph, so don't
// enable sharing during packing.
c.pop_pack (false);
}
c.end_serialize ();
if (c.in_error ()) {
DEBUG_MSG (SUBSET_REPACK, nullptr, "Error during serialization. Err flag: %d",
c.errors);
return nullptr;
}
return c.copy_blob ();
}
} // namespace graph
#endif // GRAPH_SERIALIZE_HH

View file

@ -287,7 +287,7 @@ struct KerxSubTableFormat1
* in the 'kern' table example. */
if (v == -0x8000)
{
o.attach_type() = ATTACH_TYPE_NONE;
o.attach_type() = OT::Layout::GPOS_impl::ATTACH_TYPE_NONE;
o.attach_chain() = 0;
o.y_offset = 0;
}
@ -310,7 +310,7 @@ struct KerxSubTableFormat1
/* CoreText doesn't do crossStream kerning in vertical. We do. */
if (v == -0x8000)
{
o.attach_type() = ATTACH_TYPE_NONE;
o.attach_type() = OT::Layout::GPOS_impl::ATTACH_TYPE_NONE;
o.attach_chain() = 0;
o.x_offset = 0;
}
@ -567,7 +567,7 @@ struct KerxSubTableFormat4
}
break;
}
o.attach_type() = ATTACH_TYPE_MARK;
o.attach_type() = OT::Layout::GPOS_impl::ATTACH_TYPE_MARK;
o.attach_chain() = (int) mark - (int) buffer->idx;
buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
}
@ -901,7 +901,7 @@ struct KerxTable
unsigned int count = c->buffer->len;
for (unsigned int i = 0; i < count; i++)
{
pos[i].attach_type() = ATTACH_TYPE_CURSIVE;
pos[i].attach_type() = OT::Layout::GPOS_impl::ATTACH_TYPE_CURSIVE;
pos[i].attach_chain() = HB_DIRECTION_IS_FORWARD (c->buffer->props.direction) ? -1 : +1;
/* We intentionally don't set HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT,
* since there needs to be a non-zero attachment for post-positioning to

View file

@ -123,7 +123,7 @@ struct RearrangementSubtable
bool reverse_l = 3 == (m >> 4);
bool reverse_r = 3 == (m & 0x0F);
if (end - start >= l + r)
if (end - start >= l + r && end-start <= HB_MAX_CONTEXT_LENGTH)
{
buffer->merge_clusters (start, hb_min (buffer->idx + 1, buffer->len));
buffer->merge_clusters (start, end);

View file

@ -59,7 +59,7 @@
static inline constexpr T operator | (T l, T r) { return T ((unsigned) l | (unsigned) r); } \
static inline constexpr T operator & (T l, T r) { return T ((unsigned) l & (unsigned) r); } \
static inline constexpr T operator ^ (T l, T r) { return T ((unsigned) l ^ (unsigned) r); } \
static inline constexpr T operator ~ (T r) { return T (~(unsigned int) r); } \
static inline constexpr unsigned operator ~ (T r) { return (~(unsigned) r); } \
static inline T& operator |= (T &l, T r) { l = l | r; return l; } \
static inline T& operator &= (T& l, T r) { l = l & r; return l; } \
static inline T& operator ^= (T& l, T r) { l = l ^ r; return l; } \
@ -227,31 +227,26 @@ struct
}
HB_FUNCOBJ (hb_bool);
template <typename T>
static inline
constexpr T hb_coerce (const T v) { return v; }
template <typename T, typename V,
hb_enable_if (!hb_is_same (hb_decay<T>, hb_decay<V>) && std::is_pointer<V>::value)>
static inline
constexpr T hb_coerce (const V v) { return *v; }
struct
{
private:
template <typename T> constexpr auto
impl (const T& v, hb_priority<2>) const HB_RETURN (uint32_t, hb_deref (v).hash ())
impl (const T& v, hb_priority<1>) const HB_RETURN (uint32_t, hb_deref (v).hash ())
template <typename T> constexpr uint32_t
impl (const hb::shared_ptr<T>& v, hb_priority<1>) const
{
return v.get () ? v.get ()->hash () : 0;
}
template <typename T> constexpr uint32_t
impl (const hb::unique_ptr<T>& v, hb_priority<1>) const
{
return v.get () ? v.get ()->hash () : 0;
}
template <typename T> constexpr auto
impl (const T& v, hb_priority<1>) const HB_RETURN (uint32_t, std::hash<hb_decay<decltype (hb_deref (v))>>{} (hb_deref (v)))
template <typename T,
hb_enable_if (std::is_integral<T>::value)> constexpr auto
impl (const T& v, hb_priority<0>) const HB_AUTO_RETURN
(
/* Knuth's multiplicative method: */
(uint32_t) v * 2654435761u
)
impl (const T& v, hb_priority<0>) const HB_RETURN (uint32_t, std::hash<hb_decay<decltype (hb_deref (v))>>{} (hb_deref (v)))
public:
@ -862,6 +857,11 @@ hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2, T lo3, T hi3)
{
return hb_in_range (u, lo1, hi1) || hb_in_range (u, lo2, hi2) || hb_in_range (u, lo3, hi3);
}
template <typename T> static inline bool
hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2, T lo3, T hi3, T lo4, T hi4)
{
return hb_in_range (u, lo1, hi1) || hb_in_range (u, lo2, hi2) || hb_in_range (u, lo3, hi3) || hb_in_range (u, lo4, hi4);
}
/*

View file

@ -56,7 +56,6 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
hb_array_t& operator= (const hb_array_t&) = default;
hb_array_t& operator= (hb_array_t&&) = default;
constexpr hb_array_t (std::nullptr_t) : hb_array_t () {}
constexpr hb_array_t (Type *array_, unsigned int length_) : arrayZ (array_), length (length_) {}
template <unsigned int length_>
constexpr hb_array_t (Type (&array_)[length_]) : hb_array_t (array_, length_) {}
@ -314,7 +313,6 @@ struct hb_sorted_array_t :
hb_sorted_array_t& operator= (const hb_sorted_array_t&) = default;
hb_sorted_array_t& operator= (hb_sorted_array_t&&) = default;
constexpr hb_sorted_array_t (std::nullptr_t) : hb_sorted_array_t () {}
constexpr hb_sorted_array_t (Type *array_, unsigned int length_) : hb_array_t<Type> (array_, length_) {}
template <unsigned int length_>
constexpr hb_sorted_array_t (Type (&array_)[length_]) : hb_array_t<Type> (array_) {}

View file

@ -54,17 +54,18 @@ struct hb_bimap_t
if (unlikely (rhs == HB_MAP_VALUE_INVALID)) { del (lhs); return; }
forw_map.set (lhs, rhs);
if (in_error ()) return;
if (unlikely (in_error ())) return;
back_map.set (rhs, lhs);
if (in_error ()) forw_map.del (lhs);
if (unlikely (in_error ())) forw_map.del (lhs);
}
hb_codepoint_t get (hb_codepoint_t lhs) const { return forw_map.get (lhs); }
hb_codepoint_t backward (hb_codepoint_t rhs) const { return back_map.get (rhs); }
hb_codepoint_t operator [] (hb_codepoint_t lhs) const { return get (lhs); }
bool has (hb_codepoint_t lhs, hb_codepoint_t *vp = nullptr) const { return forw_map.has (lhs, vp); }
bool has (hb_codepoint_t lhs) const { return forw_map.has (lhs); }
void del (hb_codepoint_t lhs)
{
@ -78,7 +79,7 @@ struct hb_bimap_t
back_map.clear ();
}
bool is_empty () const { return get_population () == 0; }
bool is_empty () const { return forw_map.is_empty (); }
unsigned int get_population () const { return forw_map.get_population (); }

View file

@ -80,7 +80,7 @@ struct hb_bit_set_invertible_t
next (&v);
return v == INVALID;
}
uint32_t hash () const { return s.hash () ^ inverted; }
uint32_t hash () const { return s.hash () ^ (uint32_t) inverted; }
hb_codepoint_t get_min () const
{
@ -100,7 +100,7 @@ struct hb_bit_set_invertible_t
void add (hb_codepoint_t g) { unlikely (inverted) ? s.del (g) : s.add (g); }
bool add_range (hb_codepoint_t a, hb_codepoint_t b)
{ return unlikely (inverted) ? (s.del_range (a, b), true) : s.add_range (a, b); }
{ return unlikely (inverted) ? ((void) s.del_range (a, b), true) : s.add_range (a, b); }
template <typename T>
void add_array (const T *array, unsigned int count, unsigned int stride=sizeof(T))

View file

@ -56,7 +56,7 @@ struct hb_bit_set_t
{
successful = true;
population = 0;
last_page_lookup = 0;
last_page_lookup.set_relaxed (0);
page_map.init ();
pages.init ();
}
@ -78,7 +78,7 @@ struct hb_bit_set_t
bool successful = true; /* Allocations successful */
mutable unsigned int population = 0;
mutable unsigned int last_page_lookup = 0;
mutable hb_atomic_int_t last_page_lookup = 0;
hb_sorted_vector_t<page_map_t> page_map;
hb_vector_t<page_t> pages;
@ -607,7 +607,7 @@ struct hb_bit_set_t
const auto* page_map_array = page_map.arrayZ;
unsigned int major = get_major (*codepoint);
unsigned int i = last_page_lookup;
unsigned int i = last_page_lookup.get_relaxed ();
if (unlikely (i >= page_map.length || page_map_array[i].major != major))
{
@ -625,7 +625,7 @@ struct hb_bit_set_t
if (pages_array[current.index].next (codepoint))
{
*codepoint += current.major * page_t::PAGE_BITS;
last_page_lookup = i;
last_page_lookup.set_relaxed (i);
return true;
}
i++;
@ -638,11 +638,11 @@ struct hb_bit_set_t
if (m != INVALID)
{
*codepoint = current.major * page_t::PAGE_BITS + m;
last_page_lookup = i;
last_page_lookup.set_relaxed (i);
return true;
}
}
last_page_lookup = 0;
last_page_lookup.set_relaxed (0);
*codepoint = INVALID;
return false;
}
@ -725,7 +725,7 @@ struct hb_bit_set_t
{
const auto* page_map_array = page_map.arrayZ;
unsigned int major = get_major (codepoint);
unsigned int i = last_page_lookup;
unsigned int i = last_page_lookup.get_relaxed ();
if (unlikely (i >= page_map.length || page_map_array[i].major != major))
{
page_map.bfind (major, &i, HB_NOT_FOUND_STORE_CLOSEST);
@ -766,7 +766,7 @@ struct hb_bit_set_t
{
const auto* page_map_array = page_map.arrayZ;
unsigned int major = get_major (codepoint);
unsigned int i = last_page_lookup;
unsigned int i = last_page_lookup.get_relaxed ();
if (unlikely (i >= page_map.length || page_map_array[i].major != major))
{
page_map.bfind(major, &i, HB_NOT_FOUND_STORE_CLOSEST);
@ -893,15 +893,15 @@ struct hb_bit_set_t
/* The extra page_map length is necessary; can't just rely on vector here,
* since the next check would be tricked because a null page also has
* major==0, which we can't distinguish from an actualy major==0 page... */
if (likely (last_page_lookup < page_map.length))
unsigned i = last_page_lookup.get_relaxed ();
if (likely (i < page_map.length))
{
auto &cached_page = page_map.arrayZ[last_page_lookup];
auto &cached_page = page_map.arrayZ[i];
if (cached_page.major == major)
return &pages[cached_page.index];
}
page_map_t map = {major, pages.length};
unsigned int i;
if (!page_map.bfind (map, &i, HB_NOT_FOUND_STORE_CLOSEST))
{
if (!insert)
@ -917,7 +917,7 @@ struct hb_bit_set_t
page_map[i] = map;
}
last_page_lookup = i;
last_page_lookup.set_relaxed (i);
return &pages[page_map[i].index];
}
const page_t *page_for (hb_codepoint_t g) const
@ -927,19 +927,19 @@ struct hb_bit_set_t
/* The extra page_map length is necessary; can't just rely on vector here,
* since the next check would be tricked because a null page also has
* major==0, which we can't distinguish from an actualy major==0 page... */
if (likely (last_page_lookup < page_map.length))
unsigned i = last_page_lookup.get_relaxed ();
if (likely (i < page_map.length))
{
auto &cached_page = page_map.arrayZ[last_page_lookup];
auto &cached_page = page_map.arrayZ[i];
if (cached_page.major == major)
return &pages[cached_page.index];
}
page_map_t key = {major};
unsigned int i;
if (!page_map.bfind (key, &i))
return nullptr;
last_page_lookup = i;
last_page_lookup.set_relaxed (i);
return &pages[page_map[i].index];
}
page_t &page_at (unsigned int i) { return pages[page_map[i].index]; }

View file

@ -369,7 +369,7 @@ hb_blob_get_length (hb_blob_t *blob)
*
* Fetches the data from a blob.
*
* Returns: (transfer none) (array length=length): the byte data of @blob.
* Returns: (nullable) (transfer none) (array length=length): the byte data of @blob.
*
* Since: 0.9.2
**/
@ -572,7 +572,7 @@ _open_resource_fork (const char *file_name, hb_mapped_file_t *file)
strncpy (rsrc_name, file_name, name_len);
strncpy (rsrc_name + name_len, _PATH_RSRCFORKSPEC,
sizeof (_PATH_RSRCFORKSPEC) - 1);
sizeof (_PATH_RSRCFORKSPEC));
int fd = open (rsrc_name, O_RDONLY | O_BINARY, 0);
hb_free (rsrc_name);

View file

@ -31,7 +31,7 @@
#include "hb-buffer.hh"
static const char *serialize_formats[] = {
static const char *_hb_buffer_serialize_formats[] = {
"text",
"json",
nullptr
@ -50,7 +50,7 @@ static const char *serialize_formats[] = {
const char **
hb_buffer_serialize_list_formats ()
{
return serialize_formats;
return _hb_buffer_serialize_formats;
}
/**
@ -91,8 +91,8 @@ hb_buffer_serialize_format_to_string (hb_buffer_serialize_format_t format)
{
switch ((unsigned) format)
{
case HB_BUFFER_SERIALIZE_FORMAT_TEXT: return serialize_formats[0];
case HB_BUFFER_SERIALIZE_FORMAT_JSON: return serialize_formats[1];
case HB_BUFFER_SERIALIZE_FORMAT_TEXT: return _hb_buffer_serialize_formats[0];
case HB_BUFFER_SERIALIZE_FORMAT_JSON: return _hb_buffer_serialize_formats[1];
default:
case HB_BUFFER_SERIALIZE_FORMAT_INVALID: return nullptr;
}
@ -400,9 +400,9 @@ _hb_buffer_serialize_unicode_text (hb_buffer_t *buffer,
* @buf: (out) (array length=buf_size) (element-type uint8_t): output string to
* write serialized buffer into.
* @buf_size: the size of @buf.
* @buf_consumed: (out) (optional): if not %NULL, will be set to the number of byes written into @buf.
* @buf_consumed: (out) (optional): if not %NULL, will be set to the number of bytes written into @buf.
* @font: (nullable): the #hb_font_t used to shape this buffer, needed to
* read glyph names and extents. If %NULL, and empty font will be used.
* read glyph names and extents. If %NULL, an empty font will be used.
* @format: the #hb_buffer_serialize_format_t to use for formatting the output.
* @flags: the #hb_buffer_serialize_flags_t that control what glyph properties
* to serialize.
@ -514,7 +514,7 @@ hb_buffer_serialize_glyphs (hb_buffer_t *buffer,
* @buf: (out) (array length=buf_size) (element-type uint8_t): output string to
* write serialized buffer into.
* @buf_size: the size of @buf.
* @buf_consumed: (out) (optional): if not %NULL, will be set to the number of byes written into @buf.
* @buf_consumed: (out) (optional): if not %NULL, will be set to the number of bytes written into @buf.
* @format: the #hb_buffer_serialize_format_t to use for formatting the output.
* @flags: the #hb_buffer_serialize_flags_t that control what glyph properties
* to serialize.
@ -637,9 +637,9 @@ _hb_buffer_serialize_invalid (hb_buffer_t *buffer,
* @buf: (out) (array length=buf_size) (element-type uint8_t): output string to
* write serialized buffer into.
* @buf_size: the size of @buf.
* @buf_consumed: (out) (optional): if not %NULL, will be set to the number of byes written into @buf.
* @buf_consumed: (out) (optional): if not %NULL, will be set to the number of bytes written into @buf.
* @font: (nullable): the #hb_font_t used to shape this buffer, needed to
* read glyph names and extents. If %NULL, and empty font will be used.
* read glyph names and extents. If %NULL, an empty font will be used.
* @format: the #hb_buffer_serialize_format_t to use for formatting the output.
* @flags: the #hb_buffer_serialize_flags_t that control what glyph properties
* to serialize.

View file

@ -102,9 +102,9 @@ buffer_verify_unsafe_to_break (hb_buffer_t *buffer,
/* Check that breaking up shaping at safe-to-break is indeed safe. */
hb_buffer_t *fragment = hb_buffer_create_similar (buffer);
hb_buffer_set_flags (fragment, hb_buffer_get_flags (fragment) & ~HB_BUFFER_FLAG_VERIFY);
hb_buffer_set_flags (fragment, (hb_buffer_flags_t (hb_buffer_get_flags (fragment) & ~HB_BUFFER_FLAG_VERIFY)));
hb_buffer_t *reconstruction = hb_buffer_create_similar (buffer);
hb_buffer_set_flags (reconstruction, hb_buffer_get_flags (reconstruction) & ~HB_BUFFER_FLAG_VERIFY);
hb_buffer_set_flags (reconstruction, (hb_buffer_flags_t (hb_buffer_get_flags (reconstruction) & ~HB_BUFFER_FLAG_VERIFY)));
unsigned int num_glyphs;
hb_glyph_info_t *info = hb_buffer_get_glyph_infos (buffer, &num_glyphs);
@ -169,6 +169,12 @@ buffer_verify_unsafe_to_break (hb_buffer_t *buffer,
hb_buffer_destroy (fragment);
return false;
}
else if (!fragment->successful || fragment->shaping_failed)
{
hb_buffer_destroy (reconstruction);
hb_buffer_destroy (fragment);
return true;
}
hb_buffer_append (reconstruction, fragment, 0, -1);
start = end;
@ -238,10 +244,10 @@ buffer_verify_unsafe_to_concat (hb_buffer_t *buffer,
hb_buffer_t *fragments[2] {hb_buffer_create_similar (buffer),
hb_buffer_create_similar (buffer)};
hb_buffer_set_flags (fragments[0], hb_buffer_get_flags (fragments[0]) & ~HB_BUFFER_FLAG_VERIFY);
hb_buffer_set_flags (fragments[1], hb_buffer_get_flags (fragments[1]) & ~HB_BUFFER_FLAG_VERIFY);
hb_buffer_set_flags (fragments[0], (hb_buffer_flags_t (hb_buffer_get_flags (fragments[0]) & ~HB_BUFFER_FLAG_VERIFY)));
hb_buffer_set_flags (fragments[1], (hb_buffer_flags_t (hb_buffer_get_flags (fragments[1]) & ~HB_BUFFER_FLAG_VERIFY)));
hb_buffer_t *reconstruction = hb_buffer_create_similar (buffer);
hb_buffer_set_flags (reconstruction, hb_buffer_get_flags (reconstruction) & ~HB_BUFFER_FLAG_VERIFY);
hb_buffer_set_flags (reconstruction, (hb_buffer_flags_t (hb_buffer_get_flags (reconstruction) & ~HB_BUFFER_FLAG_VERIFY)));
hb_segment_properties_t props;
hb_buffer_get_segment_properties (buffer, &props);
hb_buffer_set_segment_properties (fragments[0], &props);
@ -317,12 +323,22 @@ buffer_verify_unsafe_to_concat (hb_buffer_t *buffer,
ret = false;
goto out;
}
else if (!fragments[0]->successful || fragments[0]->shaping_failed)
{
ret = true;
goto out;
}
if (!hb_shape_full (font, fragments[1], features, num_features, shapers))
{
buffer_verify_error (buffer, font, BUFFER_VERIFY_ERROR "shaping failed while shaping fragment.");
ret = false;
goto out;
}
else if (!fragments[1]->successful || fragments[1]->shaping_failed)
{
ret = true;
goto out;
}
if (!forward)
{
@ -402,6 +418,7 @@ hb_buffer_t::verify (hb_buffer_t *text_buffer,
ret = false;
if (!ret)
{
#ifndef HB_NO_BUFFER_SERIALIZE
unsigned len = text_buffer->len;
hb_vector_t<char> bytes;
if (likely (bytes.resize (len * 10 + 16)))
@ -414,6 +431,7 @@ hb_buffer_t::verify (hb_buffer_t *text_buffer,
HB_BUFFER_SERIALIZE_FLAG_NO_CLUSTERS);
buffer_verify_error (this, font, BUFFER_VERIFY_ERROR "text was: %s.", bytes.arrayZ);
}
#endif
}
return ret;
}

View file

@ -81,8 +81,8 @@ hb_segment_properties_equal (const hb_segment_properties_t *a,
unsigned int
hb_segment_properties_hash (const hb_segment_properties_t *p)
{
return (unsigned int) p->direction ^
(unsigned int) p->script ^
return ((unsigned int) p->direction * 31 +
(unsigned int) p->script) * 31 +
(intptr_t) (p->language);
}
@ -289,6 +289,7 @@ hb_buffer_t::clear ()
props = default_props;
successful = true;
shaping_failed = false;
have_output = false;
have_positions = false;
@ -310,6 +311,7 @@ hb_buffer_t::enter ()
{
deallocate_var_all ();
serial = 0;
shaping_failed = false;
scratch_flags = HB_BUFFER_SCRATCH_FLAG_DEFAULT;
if (likely (!hb_unsigned_mul_overflows (len, HB_BUFFER_MAX_LEN_FACTOR)))
{
@ -329,6 +331,7 @@ hb_buffer_t::leave ()
max_ops = HB_BUFFER_MAX_OPS_DEFAULT;
deallocate_var_all ();
serial = 0;
// Intentionally not reseting shaping_failed, such that it can be inspected.
}
@ -542,7 +545,8 @@ hb_buffer_t::delete_glyph ()
/* The logic here is duplicated in hb_ot_hide_default_ignorables(). */
unsigned int cluster = info[idx].cluster;
if (idx + 1 < len && cluster == info[idx + 1].cluster)
if ((idx + 1 < len && cluster == info[idx + 1].cluster) ||
(out_len && cluster == out_info[out_len - 1].cluster))
{
/* Cluster survives; do nothing. */
goto done;
@ -623,6 +627,7 @@ DEFINE_NULL_INSTANCE (hb_buffer_t) =
HB_SEGMENT_PROPERTIES_DEFAULT,
false, /* successful */
true, /* shaping_failed */
false, /* have_output */
true /* have_positions */
@ -631,7 +636,7 @@ DEFINE_NULL_INSTANCE (hb_buffer_t) =
/**
* hb_buffer_create: (Xconstructor)
* hb_buffer_create:
*
* Creates a new #hb_buffer_t with all properties to defaults.
*
@ -834,7 +839,7 @@ hb_buffer_set_content_type (hb_buffer_t *buffer,
* Since: 0.9.5
**/
hb_buffer_content_type_t
hb_buffer_get_content_type (hb_buffer_t *buffer)
hb_buffer_get_content_type (const hb_buffer_t *buffer)
{
return buffer->content_type;
}
@ -876,7 +881,7 @@ hb_buffer_set_unicode_funcs (hb_buffer_t *buffer,
* Since: 0.9.2
**/
hb_unicode_funcs_t *
hb_buffer_get_unicode_funcs (hb_buffer_t *buffer)
hb_buffer_get_unicode_funcs (const hb_buffer_t *buffer)
{
return buffer->unicode;
}
@ -919,7 +924,7 @@ hb_buffer_set_direction (hb_buffer_t *buffer,
* Since: 0.9.2
**/
hb_direction_t
hb_buffer_get_direction (hb_buffer_t *buffer)
hb_buffer_get_direction (const hb_buffer_t *buffer)
{
return buffer->props.direction;
}
@ -963,7 +968,7 @@ hb_buffer_set_script (hb_buffer_t *buffer,
* Since: 0.9.2
**/
hb_script_t
hb_buffer_get_script (hb_buffer_t *buffer)
hb_buffer_get_script (const hb_buffer_t *buffer)
{
return buffer->props.script;
}
@ -1007,7 +1012,7 @@ hb_buffer_set_language (hb_buffer_t *buffer,
* Since: 0.9.2
**/
hb_language_t
hb_buffer_get_language (hb_buffer_t *buffer)
hb_buffer_get_language (const hb_buffer_t *buffer)
{
return buffer->props.language;
}
@ -1043,7 +1048,7 @@ hb_buffer_set_segment_properties (hb_buffer_t *buffer,
* Since: 0.9.7
**/
void
hb_buffer_get_segment_properties (hb_buffer_t *buffer,
hb_buffer_get_segment_properties (const hb_buffer_t *buffer,
hb_segment_properties_t *props)
{
*props = buffer->props;
@ -1081,7 +1086,7 @@ hb_buffer_set_flags (hb_buffer_t *buffer,
* Since: 0.9.7
**/
hb_buffer_flags_t
hb_buffer_get_flags (hb_buffer_t *buffer)
hb_buffer_get_flags (const hb_buffer_t *buffer)
{
return buffer->flags;
}
@ -1120,7 +1125,7 @@ hb_buffer_set_cluster_level (hb_buffer_t *buffer,
* Since: 0.9.42
**/
hb_buffer_cluster_level_t
hb_buffer_get_cluster_level (hb_buffer_t *buffer)
hb_buffer_get_cluster_level (const hb_buffer_t *buffer)
{
return buffer->cluster_level;
}
@ -1161,7 +1166,7 @@ hb_buffer_set_replacement_codepoint (hb_buffer_t *buffer,
* Since: 0.9.31
**/
hb_codepoint_t
hb_buffer_get_replacement_codepoint (hb_buffer_t *buffer)
hb_buffer_get_replacement_codepoint (const hb_buffer_t *buffer)
{
return buffer->replacement;
}
@ -1201,7 +1206,7 @@ hb_buffer_set_invisible_glyph (hb_buffer_t *buffer,
* Since: 2.0.0
**/
hb_codepoint_t
hb_buffer_get_invisible_glyph (hb_buffer_t *buffer)
hb_buffer_get_invisible_glyph (const hb_buffer_t *buffer)
{
return buffer->invisible;
}
@ -1241,7 +1246,7 @@ hb_buffer_set_not_found_glyph (hb_buffer_t *buffer,
* Since: 3.1.0
**/
hb_codepoint_t
hb_buffer_get_not_found_glyph (hb_buffer_t *buffer)
hb_buffer_get_not_found_glyph (const hb_buffer_t *buffer)
{
return buffer->not_found;
}
@ -1381,7 +1386,7 @@ hb_buffer_set_length (hb_buffer_t *buffer,
* Since: 0.9.2
**/
unsigned int
hb_buffer_get_length (hb_buffer_t *buffer)
hb_buffer_get_length (const hb_buffer_t *buffer)
{
return buffer->len;
}

View file

@ -289,7 +289,7 @@ hb_buffer_set_content_type (hb_buffer_t *buffer,
hb_buffer_content_type_t content_type);
HB_EXTERN hb_buffer_content_type_t
hb_buffer_get_content_type (hb_buffer_t *buffer);
hb_buffer_get_content_type (const hb_buffer_t *buffer);
HB_EXTERN void
@ -297,21 +297,21 @@ hb_buffer_set_unicode_funcs (hb_buffer_t *buffer,
hb_unicode_funcs_t *unicode_funcs);
HB_EXTERN hb_unicode_funcs_t *
hb_buffer_get_unicode_funcs (hb_buffer_t *buffer);
hb_buffer_get_unicode_funcs (const hb_buffer_t *buffer);
HB_EXTERN void
hb_buffer_set_direction (hb_buffer_t *buffer,
hb_direction_t direction);
HB_EXTERN hb_direction_t
hb_buffer_get_direction (hb_buffer_t *buffer);
hb_buffer_get_direction (const hb_buffer_t *buffer);
HB_EXTERN void
hb_buffer_set_script (hb_buffer_t *buffer,
hb_script_t script);
HB_EXTERN hb_script_t
hb_buffer_get_script (hb_buffer_t *buffer);
hb_buffer_get_script (const hb_buffer_t *buffer);
HB_EXTERN void
hb_buffer_set_language (hb_buffer_t *buffer,
@ -319,14 +319,14 @@ hb_buffer_set_language (hb_buffer_t *buffer,
HB_EXTERN hb_language_t
hb_buffer_get_language (hb_buffer_t *buffer);
hb_buffer_get_language (const hb_buffer_t *buffer);
HB_EXTERN void
hb_buffer_set_segment_properties (hb_buffer_t *buffer,
const hb_segment_properties_t *props);
HB_EXTERN void
hb_buffer_get_segment_properties (hb_buffer_t *buffer,
hb_buffer_get_segment_properties (const hb_buffer_t *buffer,
hb_segment_properties_t *props);
HB_EXTERN void
@ -373,6 +373,7 @@ hb_buffer_guess_segment_properties (hb_buffer_t *buffer);
* flag indicating that the @HB_GLYPH_FLAG_UNSAFE_TO_CONCAT
* glyph-flag should be produced by the shaper. By default
* it will not be produced since it incurs a cost. Since: 4.0.0
* @HB_BUFFER_FLAG_DEFINED: All currently defined flags: Since: 4.4.0
*
* Flags for #hb_buffer_t.
*
@ -386,7 +387,9 @@ typedef enum { /*< flags >*/
HB_BUFFER_FLAG_REMOVE_DEFAULT_IGNORABLES = 0x00000008u,
HB_BUFFER_FLAG_DO_NOT_INSERT_DOTTED_CIRCLE = 0x00000010u,
HB_BUFFER_FLAG_VERIFY = 0x00000020u,
HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT = 0x00000040u
HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT = 0x00000040u,
HB_BUFFER_FLAG_DEFINED = 0x0000007Fu
} hb_buffer_flags_t;
HB_EXTERN void
@ -394,7 +397,7 @@ hb_buffer_set_flags (hb_buffer_t *buffer,
hb_buffer_flags_t flags);
HB_EXTERN hb_buffer_flags_t
hb_buffer_get_flags (hb_buffer_t *buffer);
hb_buffer_get_flags (const hb_buffer_t *buffer);
/**
* hb_buffer_cluster_level_t:
@ -436,7 +439,7 @@ hb_buffer_set_cluster_level (hb_buffer_t *buffer,
hb_buffer_cluster_level_t cluster_level);
HB_EXTERN hb_buffer_cluster_level_t
hb_buffer_get_cluster_level (hb_buffer_t *buffer);
hb_buffer_get_cluster_level (const hb_buffer_t *buffer);
/**
* HB_BUFFER_REPLACEMENT_CODEPOINT_DEFAULT:
@ -453,21 +456,21 @@ hb_buffer_set_replacement_codepoint (hb_buffer_t *buffer,
hb_codepoint_t replacement);
HB_EXTERN hb_codepoint_t
hb_buffer_get_replacement_codepoint (hb_buffer_t *buffer);
hb_buffer_get_replacement_codepoint (const hb_buffer_t *buffer);
HB_EXTERN void
hb_buffer_set_invisible_glyph (hb_buffer_t *buffer,
hb_codepoint_t invisible);
HB_EXTERN hb_codepoint_t
hb_buffer_get_invisible_glyph (hb_buffer_t *buffer);
hb_buffer_get_invisible_glyph (const hb_buffer_t *buffer);
HB_EXTERN void
hb_buffer_set_not_found_glyph (hb_buffer_t *buffer,
hb_codepoint_t not_found);
HB_EXTERN hb_codepoint_t
hb_buffer_get_not_found_glyph (hb_buffer_t *buffer);
hb_buffer_get_not_found_glyph (const hb_buffer_t *buffer);
/*
@ -549,7 +552,7 @@ hb_buffer_set_length (hb_buffer_t *buffer,
unsigned int length);
HB_EXTERN unsigned int
hb_buffer_get_length (hb_buffer_t *buffer);
hb_buffer_get_length (const hb_buffer_t *buffer);
/* Getting glyphs out of the buffer */
@ -583,6 +586,7 @@ hb_buffer_normalize_glyphs (hb_buffer_t *buffer);
* @HB_BUFFER_SERIALIZE_FLAG_GLYPH_FLAGS: serialize glyph flags. Since: 1.5.0
* @HB_BUFFER_SERIALIZE_FLAG_NO_ADVANCES: do not serialize glyph advances,
* glyph offsets will reflect absolute glyph positions. Since: 1.8.0
* @HB_BUFFER_SERIALIZE_FLAG_DEFINED: All currently defined flags. Since: 4.4.0
*
* Flags that control what glyph information are serialized in hb_buffer_serialize_glyphs().
*
@ -595,7 +599,9 @@ typedef enum { /*< flags >*/
HB_BUFFER_SERIALIZE_FLAG_NO_GLYPH_NAMES = 0x00000004u,
HB_BUFFER_SERIALIZE_FLAG_GLYPH_EXTENTS = 0x00000008u,
HB_BUFFER_SERIALIZE_FLAG_GLYPH_FLAGS = 0x00000010u,
HB_BUFFER_SERIALIZE_FLAG_NO_ADVANCES = 0x00000020u
HB_BUFFER_SERIALIZE_FLAG_NO_ADVANCES = 0x00000020u,
HB_BUFFER_SERIALIZE_FLAG_DEFINED = 0x0000003Fu
} hb_buffer_serialize_flags_t;
/**

View file

@ -57,6 +57,7 @@
static_assert ((sizeof (hb_glyph_info_t) == 20), "");
static_assert ((sizeof (hb_glyph_info_t) == sizeof (hb_glyph_position_t)), "");
HB_MARK_AS_FLAG_T (hb_glyph_flags_t);
HB_MARK_AS_FLAG_T (hb_buffer_flags_t);
HB_MARK_AS_FLAG_T (hb_buffer_serialize_flags_t);
HB_MARK_AS_FLAG_T (hb_buffer_diff_flags_t);
@ -69,12 +70,13 @@ enum hb_buffer_scratch_flags_t {
HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT = 0x00000008u,
HB_BUFFER_SCRATCH_FLAG_HAS_CGJ = 0x00000010u,
HB_BUFFER_SCRATCH_FLAG_HAS_GLYPH_FLAGS = 0x00000020u,
HB_BUFFER_SCRATCH_FLAG_HAS_BROKEN_SYLLABLE = 0x00000040u,
/* Reserved for complex shapers' internal use. */
HB_BUFFER_SCRATCH_FLAG_COMPLEX0 = 0x01000000u,
HB_BUFFER_SCRATCH_FLAG_COMPLEX1 = 0x02000000u,
HB_BUFFER_SCRATCH_FLAG_COMPLEX2 = 0x04000000u,
HB_BUFFER_SCRATCH_FLAG_COMPLEX3 = 0x08000000u,
/* Reserved for shapers' internal use. */
HB_BUFFER_SCRATCH_FLAG_SHAPER0 = 0x01000000u,
HB_BUFFER_SCRATCH_FLAG_SHAPER1 = 0x02000000u,
HB_BUFFER_SCRATCH_FLAG_SHAPER2 = 0x04000000u,
HB_BUFFER_SCRATCH_FLAG_SHAPER3 = 0x08000000u,
};
HB_MARK_AS_FLAG_T (hb_buffer_scratch_flags_t);
@ -106,6 +108,7 @@ struct hb_buffer_t
hb_segment_properties_t props; /* Script, language, direction */
bool successful; /* Allocations successful */
bool shaping_failed; /* Shaping failure */
bool have_output; /* Whether we have an output buffer going on */
bool have_positions; /* Whether we have positions */
@ -130,9 +133,7 @@ struct hb_buffer_t
* Managed by enter / leave
*/
#ifndef HB_NDEBUG
uint8_t allocated_var_bits;
#endif
uint8_t serial;
hb_buffer_scratch_flags_t scratch_flags; /* Have space-fallback, etc. */
unsigned int max_len; /* Maximum allowed len. */
@ -161,38 +162,40 @@ struct hb_buffer_t
void allocate_var (unsigned int start, unsigned int count)
{
#ifndef HB_NDEBUG
unsigned int end = start + count;
assert (end <= 8);
unsigned int bits = (1u<<end) - (1u<<start);
assert (0 == (allocated_var_bits & bits));
allocated_var_bits |= bits;
#endif
}
bool try_allocate_var (unsigned int start, unsigned int count)
{
unsigned int end = start + count;
assert (end <= 8);
unsigned int bits = (1u<<end) - (1u<<start);
if (allocated_var_bits & bits)
return false;
allocated_var_bits |= bits;
return true;
}
void deallocate_var (unsigned int start, unsigned int count)
{
#ifndef HB_NDEBUG
unsigned int end = start + count;
assert (end <= 8);
unsigned int bits = (1u<<end) - (1u<<start);
assert (bits == (allocated_var_bits & bits));
allocated_var_bits &= ~bits;
#endif
}
void assert_var (unsigned int start, unsigned int count)
{
#ifndef HB_NDEBUG
unsigned int end = start + count;
assert (end <= 8);
unsigned int bits = (1u<<end) - (1u<<start);
HB_UNUSED unsigned int bits = (1u<<end) - (1u<<start);
assert (bits == (allocated_var_bits & bits));
#endif
}
void deallocate_var_all ()
{
#ifndef HB_NDEBUG
allocated_var_bits = 0;
#endif
}
hb_glyph_info_t &cur (unsigned int i = 0) { return info[idx + i]; }
@ -549,7 +552,7 @@ struct hb_buffer_t
#ifdef HB_NO_BUFFER_MESSAGE
return true;
#else
if (!messaging ())
if (likely (!messaging ()))
return true;
message_depth++;
@ -619,9 +622,10 @@ DECLARE_NULL_INSTANCE (hb_buffer_t);
#define HB_BUFFER_XALLOCATE_VAR(b, func, var) \
b->func (offsetof (hb_glyph_info_t, var) - offsetof(hb_glyph_info_t, var1), \
sizeof (b->info[0].var))
#define HB_BUFFER_ALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, allocate_var, var ())
#define HB_BUFFER_DEALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, deallocate_var, var ())
#define HB_BUFFER_ASSERT_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, assert_var, var ())
#define HB_BUFFER_ALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, allocate_var, var ())
#define HB_BUFFER_TRY_ALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, try_allocate_var, var ())
#define HB_BUFFER_DEALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, deallocate_var, var ())
#define HB_BUFFER_ASSERT_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, assert_var, var ())
#endif /* HB_BUFFER_HH */

View file

@ -32,7 +32,7 @@
/* Implements a lockfree cache for int->int functions. */
template <unsigned int key_bits, unsigned int value_bits, unsigned int cache_bits>
template <unsigned int key_bits=16, unsigned int value_bits=8 + 32 - key_bits, unsigned int cache_bits=8>
struct hb_cache_t
{
static_assert ((key_bits >= cache_bits), "");

View file

@ -160,7 +160,7 @@ hb_tag_to_string (hb_tag_t tag, char *buf)
/* hb_direction_t */
const char direction_strings[][4] = {
static const char direction_strings[][4] = {
"ltr",
"rtl",
"ttb",

View file

@ -64,6 +64,7 @@
#define HB_NO_FACE_COLLECT_UNICODES
#define HB_NO_GETENV
#define HB_NO_HINTING
#define HB_NO_LANGUAGE_LONG
#define HB_NO_LANGUAGE_PRIVATE_SUBTAG
#define HB_NO_LAYOUT_FEATURE_PARAMS
#define HB_NO_LAYOUT_COLLECT_GLYPHS
@ -145,10 +146,10 @@
#endif
#ifdef HB_NO_OT_SHAPE_FALLBACK
#define HB_NO_OT_SHAPE_COMPLEX_ARABIC_FALLBACK
#define HB_NO_OT_SHAPE_COMPLEX_HEBREW_FALLBACK
#define HB_NO_OT_SHAPE_COMPLEX_THAI_FALLBACK
#define HB_NO_OT_SHAPE_COMPLEX_VOWEL_CONSTRAINTS
#define HB_NO_OT_SHAPER_ARABIC_FALLBACK
#define HB_NO_OT_SHAPER_HEBREW_FALLBACK
#define HB_NO_OT_SHAPER_THAI_FALLBACK
#define HB_NO_OT_SHAPER_VOWEL_CONSTRAINTS
#endif
#ifdef NDEBUG
@ -163,5 +164,9 @@
#endif
#endif
#ifdef HB_OPTIMIZE_SIZE
#define HB_NO_OT_LAYOUT_LOOKUP_CACHE
#endif
#endif /* HB_CONFIG_HH */

View file

@ -332,7 +332,7 @@ _hb_coretext_shaper_font_data_create (hb_font_t *font)
return nullptr;
}
if (font->coords)
if (font->num_coords)
{
CFMutableDictionaryRef variations =
CFDictionaryCreateMutable (kCFAllocatorDefault,
@ -379,37 +379,6 @@ _hb_coretext_shaper_font_data_destroy (hb_coretext_font_data_t *data)
CFRelease ((CTFontRef) data);
}
static const hb_coretext_font_data_t *
hb_coretext_font_data_sync (hb_font_t *font)
{
retry:
const hb_coretext_font_data_t *data = font->data.coretext;
if (unlikely (!data)) return nullptr;
if (fabs (CTFontGetSize ((CTFontRef) data) - (CGFloat) font->ptem) > (CGFloat) .5)
{
/* XXX-MT-bug
* Note that evaluating condition above can be dangerous if another thread
* got here first and destructed data. That's, as always, bad use pattern.
* If you modify the font (change font size), other threads must not be
* using it at the same time. However, since this check is delayed to
* when one actually tries to shape something, this is a XXX race condition
* (and the only one we have that I know of) right now. Ie. you modify the
* font size in one thread, then (supposedly safely) try to use it from two
* or more threads and BOOM! I'm not sure how to fix this. We want RCU.
*/
/* Drop and recreate. */
/* If someone dropped it in the mean time, throw it away and don't touch it.
* Otherwise, destruct it. */
if (likely (font->data.coretext.cmpexch (const_cast<hb_coretext_font_data_t *> (data), nullptr)))
_hb_coretext_shaper_font_data_destroy (const_cast<hb_coretext_font_data_t *> (data));
else
goto retry;
}
return font->data.coretext;
}
/**
* hb_coretext_font_create:
* @ct_font: The CTFontRef to work upon
@ -455,8 +424,8 @@ hb_coretext_font_create (CTFontRef ct_font)
CTFontRef
hb_coretext_font_get_ct_font (hb_font_t *font)
{
const hb_coretext_font_data_t *data = hb_coretext_font_data_sync (font);
return data ? (CTFontRef) data : nullptr;
CTFontRef ct_font = (CTFontRef) (const void *) font->data.coretext;
return ct_font ? (CTFontRef) ct_font : nullptr;
}
@ -516,7 +485,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan,
{
hb_face_t *face = font->face;
CGFontRef cg_font = (CGFontRef) (const void *) face->data.coretext;
CTFontRef ct_font = (CTFontRef) hb_coretext_font_data_sync (font);
CTFontRef ct_font = (CTFontRef) (const void *) font->data.coretext;
CGFloat ct_font_size = CTFontGetSize (ct_font);
CGFloat x_mult = (CGFloat) font->x_scale / ct_font_size;
@ -1106,7 +1075,8 @@ resize_and_retry:
advance = positions[j + 1].x - positions[j].x;
else /* last glyph */
advance = run_advance - (positions[j].x - positions[0].x);
info->mask = round (advance * x_mult);
/* int cast necessary to pass through negative values. */
info->mask = (int) round (advance * x_mult);
info->var1.i32 = x_offset;
info->var2.i32 = round (positions[j].y * y_mult);
info++;
@ -1122,7 +1092,8 @@ resize_and_retry:
advance = positions[j + 1].y - positions[j].y;
else /* last glyph */
advance = run_advance - (positions[j].y - positions[0].y);
info->mask = round (advance * y_mult);
/* int cast necessary to pass through negative values. */
info->mask = (int) round (advance * y_mult);
info->var1.i32 = round (positions[j].x * x_mult);
info->var2.i32 = y_offset;
info++;
@ -1151,7 +1122,7 @@ resize_and_retry:
pos->x_offset = info->var1.i32;
pos->y_offset = info->var2.i32;
info++, pos++;
info++; pos++;
}
else
for (unsigned int i = 0; i < count; i++)
@ -1160,7 +1131,7 @@ resize_and_retry:
pos->x_offset = info->var1.i32;
pos->y_offset = info->var2.i32;
info++, pos++;
info++; pos++;
}
/* Fix up clusters so that we never return out-of-order indices;
@ -1173,7 +1144,8 @@ resize_and_retry:
* This does *not* mean we'll form the same clusters as Uniscribe
* or the native OT backend, only that the cluster indices will be
* monotonic in the output buffer. */
if (count > 1 && (status_or & kCTRunStatusNonMonotonic))
if (count > 1 && (status_or & kCTRunStatusNonMonotonic) &&
buffer->cluster_level != HB_BUFFER_CLUSTER_LEVEL_CHARACTERS)
{
hb_glyph_info_t *info = buffer->info;
if (HB_DIRECTION_IS_FORWARD (buffer->props.direction))
@ -1197,6 +1169,10 @@ resize_and_retry:
}
}
/* TODO: Sometimes the above positioning code generates negative
* advance values. Fix them up. Example, with NotoNastaliqUrdu
* font and sequence ابهد. */
buffer->clear_glyph_flags ();
buffer->unsafe_to_break ();

192
thirdparty/harfbuzz/src/hb-cplusplus.hh vendored Normal file
View file

@ -0,0 +1,192 @@
/*
* Copyright © 2022 Behdad Esfahbod
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*/
#ifndef HB_CPLUSPLUS_HH
#define HB_CPLUSPLUS_HH
#include "hb.h"
HB_BEGIN_DECLS
HB_END_DECLS
#ifdef __cplusplus
#include <functional>
#include <utility>
#if 0
#if !(__cplusplus >= 201103L)
#error "HarfBuzz C++ helpers require C++11"
#endif
#endif
namespace hb {
template <typename T>
struct vtable;
template <typename T>
struct shared_ptr
{
using element_type = T;
using v = vtable<T>;
explicit shared_ptr (T *p = nullptr) : p (p) {}
shared_ptr (const shared_ptr &o) : p (v::reference (o.p)) {}
shared_ptr (shared_ptr &&o) : p (o.p) { o.p = nullptr; }
shared_ptr& operator = (const shared_ptr &o) { if (p != o.p) { destroy (); p = o.p; reference (); } return *this; }
shared_ptr& operator = (shared_ptr &&o) { v::destroy (p); p = o.p; o.p = nullptr; return *this; }
~shared_ptr () { v::destroy (p); p = nullptr; }
T* get() const { return p; }
void swap (shared_ptr &o) { std::swap (p, o.p); }
friend void swap (shared_ptr &a, shared_ptr &b) { std::swap (a.p, b.p); }
operator T * () const { return p; }
T& operator * () const { return *get (); }
T* operator -> () const { return get (); }
operator bool () { return p; }
bool operator == (const shared_ptr &o) { return p == o.p; }
bool operator != (const shared_ptr &o) { return p != o.p; }
static T* get_empty() { return v::get_empty (); }
T* reference() { return v::reference (p); }
void destroy() { v::destroy (p); }
void set_user_data (hb_user_data_key_t *key,
void *value,
hb_destroy_func_t destroy,
hb_bool_t replace) { v::set_user_data (p, key, value, destroy, replace); }
void * get_user_data (hb_user_data_key_t *key) { return v::get_user_data (p, key); }
private:
T *p;
};
template<typename T> struct is_shared_ptr : std::false_type {};
template<typename T> struct is_shared_ptr<shared_ptr<T>> : std::true_type {};
template <typename T>
struct unique_ptr
{
using element_type = T;
using v = vtable<T>;
explicit unique_ptr (T *p = nullptr) : p (p) {}
unique_ptr (const unique_ptr &o) = delete;
unique_ptr (unique_ptr &&o) : p (o.p) { o.p = nullptr; }
unique_ptr& operator = (const unique_ptr &o) = delete;
unique_ptr& operator = (unique_ptr &&o) { v::destroy (p); p = o.p; o.p = nullptr; return *this; }
~unique_ptr () { v::destroy (p); p = nullptr; }
T* get() const { return p; }
T* release () { T* v = p; p = nullptr; return v; }
void swap (unique_ptr &o) { std::swap (p, o.p); }
friend void swap (unique_ptr &a, unique_ptr &b) { std::swap (a.p, b.p); }
operator T * () const { return p; }
T& operator * () const { return *get (); }
T* operator -> () const { return get (); }
operator bool () { return p; }
private:
T *p;
};
template<typename T> struct is_unique_ptr : std::false_type {};
template<typename T> struct is_unique_ptr<unique_ptr<T>> : std::true_type {};
template <typename T,
T * (*_get_empty) (void),
T * (*_reference) (T *),
void (*_destroy) (T *),
hb_bool_t (*_set_user_data) (T *,
hb_user_data_key_t *,
void *,
hb_destroy_func_t,
hb_bool_t),
void * (*_get_user_data) (T *,
hb_user_data_key_t *)>
struct vtable_t
{
static constexpr auto get_empty = _get_empty;
static constexpr auto reference = _reference;
static constexpr auto destroy = _destroy;
static constexpr auto set_user_data = _set_user_data;
static constexpr auto get_user_data = _get_user_data;
};
#define HB_DEFINE_VTABLE(name) \
template<> \
struct vtable<hb_##name##_t> \
: vtable_t<hb_##name##_t, \
&hb_##name##_get_empty, \
&hb_##name##_reference, \
&hb_##name##_destroy, \
&hb_##name##_set_user_data, \
&hb_##name##_get_user_data> {}
HB_DEFINE_VTABLE (buffer);
HB_DEFINE_VTABLE (blob);
HB_DEFINE_VTABLE (face);
HB_DEFINE_VTABLE (font);
HB_DEFINE_VTABLE (font_funcs);
HB_DEFINE_VTABLE (map);
HB_DEFINE_VTABLE (set);
HB_DEFINE_VTABLE (shape_plan);
HB_DEFINE_VTABLE (unicode_funcs);
#undef HB_DEFINE_VTABLE
} // namespace hb
template<typename T>
struct std::hash<hb::shared_ptr<T>>
{
std::size_t operator()(const hb::shared_ptr<T>& v) const noexcept
{
std::size_t h = std::hash<decltype (v.get ())>{}(v.get ());
return h;
}
};
template<typename T>
struct std::hash<hb::unique_ptr<T>>
{
std::size_t operator()(const hb::unique_ptr<T>& v) const noexcept
{
std::size_t h = std::hash<decltype (v.get ())>{}(v.get ());
return h;
}
};
#endif /* __cplusplus */
#endif /* HB_CPLUSPLUS_HH */

View file

@ -241,17 +241,12 @@ struct hb_directwrite_font_data_t {};
hb_directwrite_font_data_t *
_hb_directwrite_shaper_font_data_create (hb_font_t *font)
{
hb_directwrite_font_data_t *data = new hb_directwrite_font_data_t;
if (unlikely (!data))
return nullptr;
return data;
return (hb_directwrite_font_data_t *) HB_SHAPER_DATA_SUCCEEDED;
}
void
_hb_directwrite_shaper_font_data_destroy (hb_directwrite_font_data_t *data)
{
delete data;
}

View file

@ -56,12 +56,14 @@ hb_draw_quadratic_to_nil (hb_draw_funcs_t *dfuncs, void *draw_data,
float to_x, float to_y,
void *user_data HB_UNUSED)
{
#define HB_ONE_THIRD 0.33333333f
dfuncs->emit_cubic_to (draw_data, *st,
(st->current_x + 2.f * control_x) / 3.f,
(st->current_y + 2.f * control_y) / 3.f,
(to_x + 2.f * control_x) / 3.f,
(to_y + 2.f * control_y) / 3.f,
(st->current_x + 2.f * control_x) * HB_ONE_THIRD,
(st->current_y + 2.f * control_y) * HB_ONE_THIRD,
(to_x + 2.f * control_x) * HB_ONE_THIRD,
(to_y + 2.f * control_y) * HB_ONE_THIRD,
to_x, to_y);
#undef HB_ONE_THIRD
}
static void
@ -89,25 +91,46 @@ hb_draw_funcs_set_##name##_func (hb_draw_funcs_t *dfuncs, \
if (hb_object_is_immutable (dfuncs)) \
return; \
\
if (dfuncs->destroy.name) \
dfuncs->destroy.name (dfuncs->user_data.name); \
\
if (func) { \
dfuncs->func.name = func; \
dfuncs->user_data.name = user_data; \
dfuncs->destroy.name = destroy; \
} else { \
dfuncs->func.name = hb_draw_##name##_nil; \
dfuncs->user_data.name = nullptr; \
dfuncs->destroy.name = nullptr; \
} \
if (dfuncs->destroy && dfuncs->destroy->name) \
dfuncs->destroy->name (!dfuncs->user_data ? nullptr : dfuncs->user_data->name); \
\
if (user_data && !dfuncs->user_data) \
{ \
dfuncs->user_data = (decltype (dfuncs->user_data)) hb_calloc (1, sizeof (*dfuncs->user_data)); \
if (unlikely (!dfuncs->user_data)) \
goto fail; \
} \
if (destroy && !dfuncs->destroy) \
{ \
dfuncs->destroy = (decltype (dfuncs->destroy)) hb_calloc (1, sizeof (*dfuncs->destroy)); \
if (unlikely (!dfuncs->destroy)) \
goto fail; \
} \
\
if (func) { \
dfuncs->func.name = func; \
if (dfuncs->user_data) \
dfuncs->user_data->name = user_data; \
if (dfuncs->destroy) \
dfuncs->destroy->name = destroy; \
} else { \
dfuncs->func.name = hb_draw_##name##_nil; \
if (dfuncs->user_data) \
dfuncs->user_data->name = nullptr; \
if (dfuncs->destroy) \
dfuncs->destroy->name = nullptr; \
} \
\
fail: \
if (destroy) \
destroy (user_data); \
}
HB_DRAW_FUNCS_IMPLEMENT_CALLBACKS
#undef HB_DRAW_FUNC_IMPLEMENT
/**
* hb_draw_funcs_create: (Xconstructor)
* hb_draw_funcs_create:
*
* Creates a new draw callbacks object.
*
@ -177,11 +200,13 @@ hb_draw_funcs_destroy (hb_draw_funcs_t *dfuncs)
{
if (!hb_object_destroy (dfuncs)) return;
if (dfuncs->destroy)
{
#define HB_DRAW_FUNC_IMPLEMENT(name) \
if (dfuncs->destroy.name) dfuncs->destroy.name (dfuncs->user_data.name);
HB_DRAW_FUNCS_IMPLEMENT_CALLBACKS
if (dfuncs->destroy->name) dfuncs->destroy->name (!dfuncs->user_data ? nullptr : dfuncs->user_data->name);
HB_DRAW_FUNCS_IMPLEMENT_CALLBACKS
#undef HB_DRAW_FUNC_IMPLEMENT
}
hb_free (dfuncs);
}

View file

@ -54,31 +54,31 @@ struct hb_draw_funcs_t
#define HB_DRAW_FUNC_IMPLEMENT(name) void *name;
HB_DRAW_FUNCS_IMPLEMENT_CALLBACKS
#undef HB_DRAW_FUNC_IMPLEMENT
} user_data;
} *user_data;
struct {
#define HB_DRAW_FUNC_IMPLEMENT(name) hb_destroy_func_t name;
HB_DRAW_FUNCS_IMPLEMENT_CALLBACKS
#undef HB_DRAW_FUNC_IMPLEMENT
} destroy;
} *destroy;
void emit_move_to (void *draw_data, hb_draw_state_t &st,
float to_x, float to_y)
{ func.move_to (this, draw_data, &st,
to_x, to_y,
user_data.move_to); }
!user_data ? nullptr : user_data->move_to); }
void emit_line_to (void *draw_data, hb_draw_state_t &st,
float to_x, float to_y)
{ func.line_to (this, draw_data, &st,
to_x, to_y,
user_data.line_to); }
!user_data ? nullptr : user_data->line_to); }
void emit_quadratic_to (void *draw_data, hb_draw_state_t &st,
float control_x, float control_y,
float to_x, float to_y)
{ func.quadratic_to (this, draw_data, &st,
control_x, control_y,
to_x, to_y,
user_data.quadratic_to); }
!user_data ? nullptr : user_data->quadratic_to); }
void emit_cubic_to (void *draw_data, hb_draw_state_t &st,
float control1_x, float control1_y,
float control2_x, float control2_y,
@ -87,10 +87,10 @@ struct hb_draw_funcs_t
control1_x, control1_y,
control2_x, control2_y,
to_x, to_y,
user_data.cubic_to); }
!user_data ? nullptr : user_data->cubic_to); }
void emit_close_path (void *draw_data, hb_draw_state_t &st)
{ func.close_path (this, draw_data, &st,
user_data.close_path); }
!user_data ? nullptr : user_data->close_path); }
void move_to (void *draw_data, hb_draw_state_t &st,

View file

@ -190,7 +190,7 @@ _hb_face_for_data_reference_table (hb_face_t *face HB_UNUSED, hb_tag_t tag, void
}
/**
* hb_face_create: (Xconstructor)
* hb_face_create:
* @blob: #hb_blob_t to work upon
* @index: The index of the face within @blob
*
@ -342,7 +342,7 @@ hb_face_set_user_data (hb_face_t *face,
* Since: 0.9.2
**/
void *
hb_face_get_user_data (const hb_face_t *face,
hb_face_get_user_data (hb_face_t *face,
hb_user_data_key_t *key)
{
return hb_object_get_user_data (face, key);

View file

@ -96,7 +96,7 @@ hb_face_set_user_data (hb_face_t *face,
hb_bool_t replace);
HB_EXTERN void *
hb_face_get_user_data (const hb_face_t *face,
hb_face_get_user_data (hb_face_t *face,
hb_user_data_key_t *key);
HB_EXTERN void

View file

@ -636,16 +636,8 @@ DEFINE_NULL_INSTANCE (hb_font_funcs_t) =
{
HB_OBJECT_HEADER_STATIC,
{
#define HB_FONT_FUNC_IMPLEMENT(name) nullptr,
HB_FONT_FUNCS_IMPLEMENT_CALLBACKS
#undef HB_FONT_FUNC_IMPLEMENT
},
{
#define HB_FONT_FUNC_IMPLEMENT(name) nullptr,
HB_FONT_FUNCS_IMPLEMENT_CALLBACKS
#undef HB_FONT_FUNC_IMPLEMENT
},
nullptr,
nullptr,
{
{
#define HB_FONT_FUNC_IMPLEMENT(name) hb_font_get_##name##_nil,
@ -658,16 +650,8 @@ DEFINE_NULL_INSTANCE (hb_font_funcs_t) =
static const hb_font_funcs_t _hb_font_funcs_default = {
HB_OBJECT_HEADER_STATIC,
{
#define HB_FONT_FUNC_IMPLEMENT(name) nullptr,
HB_FONT_FUNCS_IMPLEMENT_CALLBACKS
#undef HB_FONT_FUNC_IMPLEMENT
},
{
#define HB_FONT_FUNC_IMPLEMENT(name) nullptr,
HB_FONT_FUNCS_IMPLEMENT_CALLBACKS
#undef HB_FONT_FUNC_IMPLEMENT
},
nullptr,
nullptr,
{
{
#define HB_FONT_FUNC_IMPLEMENT(name) hb_font_get_##name##_default,
@ -679,7 +663,7 @@ static const hb_font_funcs_t _hb_font_funcs_default = {
/**
* hb_font_funcs_create: (Xconstructor)
* hb_font_funcs_create:
*
* Creates a new #hb_font_funcs_t structure of font functions.
*
@ -746,10 +730,16 @@ hb_font_funcs_destroy (hb_font_funcs_t *ffuncs)
{
if (!hb_object_destroy (ffuncs)) return;
#define HB_FONT_FUNC_IMPLEMENT(name) if (ffuncs->destroy.name) \
ffuncs->destroy.name (ffuncs->user_data.name);
HB_FONT_FUNCS_IMPLEMENT_CALLBACKS
if (ffuncs->destroy)
{
#define HB_FONT_FUNC_IMPLEMENT(name) if (ffuncs->destroy->name) \
ffuncs->destroy->name (!ffuncs->user_data ? nullptr : ffuncs->user_data->name);
HB_FONT_FUNCS_IMPLEMENT_CALLBACKS
#undef HB_FONT_FUNC_IMPLEMENT
}
hb_free (ffuncs->destroy);
hb_free (ffuncs->user_data);
hb_free (ffuncs);
}
@ -841,24 +831,50 @@ hb_font_funcs_set_##name##_func (hb_font_funcs_t *ffuncs, \
hb_destroy_func_t destroy) \
{ \
if (hb_object_is_immutable (ffuncs)) \
goto fail; \
\
if (!func) \
{ \
if (destroy) \
destroy (user_data); \
return; \
destroy = nullptr; \
user_data = nullptr; \
} \
\
if (ffuncs->destroy.name) \
ffuncs->destroy.name (ffuncs->user_data.name); \
if (ffuncs->destroy && ffuncs->destroy->name) \
ffuncs->destroy->name (!ffuncs->user_data ? nullptr : ffuncs->user_data->name); \
\
if (user_data && !ffuncs->user_data) \
{ \
ffuncs->user_data = (decltype (ffuncs->user_data)) hb_calloc (1, sizeof (*ffuncs->user_data)); \
if (unlikely (!ffuncs->user_data)) \
goto fail; \
} \
if (destroy && !ffuncs->destroy) \
{ \
ffuncs->destroy = (decltype (ffuncs->destroy)) hb_calloc (1, sizeof (*ffuncs->destroy)); \
if (unlikely (!ffuncs->destroy)) \
goto fail; \
} \
\
if (func) { \
ffuncs->get.f.name = func; \
ffuncs->user_data.name = user_data; \
ffuncs->destroy.name = destroy; \
if (ffuncs->user_data) \
ffuncs->user_data->name = user_data; \
if (ffuncs->destroy) \
ffuncs->destroy->name = destroy; \
} else { \
ffuncs->get.f.name = hb_font_get_##name##_default; \
ffuncs->user_data.name = nullptr; \
ffuncs->destroy.name = nullptr; \
if (ffuncs->user_data) \
ffuncs->user_data->name = nullptr; \
if (ffuncs->destroy) \
ffuncs->destroy->name = nullptr; \
} \
return; \
\
fail: \
if (destroy) \
destroy (user_data); \
}
HB_FONT_FUNCS_IMPLEMENT_CALLBACKS
@ -1623,6 +1639,9 @@ DEFINE_NULL_INSTANCE (hb_font_t) =
{
HB_OBJECT_HEADER_STATIC,
0, /* serial */
0, /* serial_coords */
nullptr, /* parent */
const_cast<hb_face_t *> (&_hb_Null_hb_face_t),
@ -1630,6 +1649,8 @@ DEFINE_NULL_INSTANCE (hb_font_t) =
1000, /* y_scale */
0., /* slant */
0., /* slant_xy; */
1.f, /* x_multf */
1.f, /* y_multf */
1<<16, /* x_mult */
1<<16, /* y_mult */
@ -1662,14 +1683,15 @@ _hb_font_create (hb_face_t *face)
font->face = hb_face_reference (face);
font->klass = hb_font_funcs_get_empty ();
font->data.init0 (font);
font->x_scale = font->y_scale = hb_face_get_upem (face);
font->x_scale = font->y_scale = face->get_upem ();
font->x_multf = font->y_multf = 1.f;
font->x_mult = font->y_mult = 1 << 16;
return font;
}
/**
* hb_font_create: (Xconstructor)
* hb_font_create:
* @face: a face.
*
* Constructs a new font object from the specified face.
@ -1715,6 +1737,8 @@ _hb_font_adopt_var_coords (hb_font_t *font,
font->coords = coords;
font->design_coords = design_coords;
font->num_coords = coords_length;
font->mults_changed (); // Easiest to call this to drop cached data
}
/**
@ -1744,7 +1768,6 @@ hb_font_create_sub_font (hb_font_t *parent)
font->x_scale = parent->x_scale;
font->y_scale = parent->y_scale;
font->slant = parent->slant;
font->mults_changed ();
font->x_ppem = parent->x_ppem;
font->y_ppem = parent->y_ppem;
font->ptem = parent->ptem;
@ -1767,6 +1790,8 @@ hb_font_create_sub_font (hb_font_t *parent)
}
}
font->mults_changed ();
return font;
}
@ -1852,6 +1877,9 @@ hb_font_set_user_data (hb_font_t *font,
hb_destroy_func_t destroy /* May be NULL. */,
hb_bool_t replace)
{
if (!hb_object_is_immutable (font))
font->serial++;
return hb_object_set_user_data (font, key, data, destroy, replace);
}
@ -1910,6 +1938,45 @@ hb_font_is_immutable (hb_font_t *font)
return hb_object_is_immutable (font);
}
/**
* hb_font_get_serial:
* @font: #hb_font_t to work upon
*
* Returns the internal serial number of the font. The serial
* number is increased every time a setting on the font is
* changed, using a setter function.
*
* Return value: serial number
*
* Since: 4.4.0.
**/
unsigned int
hb_font_get_serial (hb_font_t *font)
{
return font->serial;
}
/**
* hb_font_changed:
* @font: #hb_font_t to work upon
*
* Notifies the @font that underlying font data has changed.
* This has the effect of increasing the serial as returned
* by hb_font_get_serial(), which invalidates internal caches.
*
* Since: 4.4.0.
**/
void
hb_font_changed (hb_font_t *font)
{
if (hb_object_is_immutable (font))
return;
font->serial++;
font->mults_changed ();
}
/**
* hb_font_set_parent:
* @font: #hb_font_t to work upon
@ -1926,6 +1993,11 @@ hb_font_set_parent (hb_font_t *font,
if (hb_object_is_immutable (font))
return;
if (parent == font->parent)
return;
font->serial++;
if (!parent)
parent = hb_font_get_empty ();
@ -1968,6 +2040,11 @@ hb_font_set_face (hb_font_t *font,
if (hb_object_is_immutable (font))
return;
if (face == font->face)
return;
font->serial++;
if (unlikely (!face))
face = hb_face_get_empty ();
@ -2022,6 +2099,8 @@ hb_font_set_funcs (hb_font_t *font,
return;
}
font->serial++;
if (font->destroy)
font->destroy (font->user_data);
@ -2059,6 +2138,8 @@ hb_font_set_funcs_data (hb_font_t *font,
return;
}
font->serial++;
if (font->destroy)
font->destroy (font->user_data);
@ -2085,6 +2166,11 @@ hb_font_set_scale (hb_font_t *font,
if (hb_object_is_immutable (font))
return;
if (font->x_scale == x_scale && font->y_scale == y_scale)
return;
font->serial++;
font->x_scale = x_scale;
font->y_scale = y_scale;
font->mults_changed ();
@ -2127,6 +2213,11 @@ hb_font_set_ppem (hb_font_t *font,
if (hb_object_is_immutable (font))
return;
if (font->x_ppem == x_ppem && font->y_ppem == y_ppem)
return;
font->serial++;
font->x_ppem = x_ppem;
font->y_ppem = y_ppem;
}
@ -2169,6 +2260,11 @@ hb_font_set_ptem (hb_font_t *font,
if (hb_object_is_immutable (font))
return;
if (font->ptem == ptem)
return;
font->serial++;
font->ptem = ptem;
}
@ -2216,6 +2312,11 @@ hb_font_set_synthetic_slant (hb_font_t *font, float slant)
if (hb_object_is_immutable (font))
return;
if (font->slant == slant)
return;
font->serial++;
font->slant = slant;
font->mults_changed ();
}
@ -2263,6 +2364,8 @@ hb_font_set_variations (hb_font_t *font,
if (hb_object_is_immutable (font))
return;
font->serial_coords = ++font->serial;
if (!variations_length)
{
hb_font_set_var_coords_normalized (font, nullptr, 0);
@ -2322,6 +2425,8 @@ hb_font_set_var_coords_design (hb_font_t *font,
if (hb_object_is_immutable (font))
return;
font->serial_coords = ++font->serial;
int *normalized = coords_length ? (int *) hb_calloc (coords_length, sizeof (int)) : nullptr;
float *design_coords = coords_length ? (float *) hb_calloc (coords_length, sizeof (float)) : nullptr;
@ -2355,6 +2460,8 @@ hb_font_set_var_named_instance (hb_font_t *font,
if (hb_object_is_immutable (font))
return;
font->serial_coords = ++font->serial;
unsigned int coords_length = hb_ot_var_named_instance_get_design_coords (font->face, instance_index, nullptr, nullptr);
float *coords = coords_length ? (float *) hb_calloc (coords_length, sizeof (float)) : nullptr;
@ -2391,6 +2498,8 @@ hb_font_set_var_coords_normalized (hb_font_t *font,
if (hb_object_is_immutable (font))
return;
font->serial_coords = ++font->serial;
int *copy = coords_length ? (int *) hb_calloc (coords_length, sizeof (coords[0])) : nullptr;
int *unmapped = coords_length ? (int *) hb_calloc (coords_length, sizeof (coords[0])) : nullptr;
float *design_coords = coords_length ? (float *) hb_calloc (coords_length, sizeof (design_coords[0])) : nullptr;

View file

@ -1002,6 +1002,12 @@ hb_font_make_immutable (hb_font_t *font);
HB_EXTERN hb_bool_t
hb_font_is_immutable (hb_font_t *font);
HB_EXTERN unsigned int
hb_font_get_serial (hb_font_t *font);
HB_EXTERN void
hb_font_changed (hb_font_t *font);
HB_EXTERN void
hb_font_set_parent (hb_font_t *font,
hb_font_t *parent);

View file

@ -68,13 +68,13 @@ struct hb_font_funcs_t
#define HB_FONT_FUNC_IMPLEMENT(name) void *name;
HB_FONT_FUNCS_IMPLEMENT_CALLBACKS
#undef HB_FONT_FUNC_IMPLEMENT
} user_data;
} *user_data;
struct {
#define HB_FONT_FUNC_IMPLEMENT(name) hb_destroy_func_t name;
HB_FONT_FUNCS_IMPLEMENT_CALLBACKS
#undef HB_FONT_FUNC_IMPLEMENT
} destroy;
} *destroy;
/* Don't access these directly. Call font->get_*() instead. */
union get_t {
@ -104,6 +104,8 @@ DECLARE_NULL_INSTANCE (hb_font_funcs_t);
struct hb_font_t
{
hb_object_header_t header;
unsigned int serial;
unsigned int serial_coords;
hb_font_t *parent;
hb_face_t *face;
@ -112,6 +114,8 @@ struct hb_font_t
int32_t y_scale;
float slant;
float slant_xy;
float x_multf;
float y_multf;
int64_t x_mult;
int64_t y_mult;
@ -137,12 +141,12 @@ struct hb_font_t
{ return HB_DIRECTION_IS_VERTICAL(direction) ? y_mult : x_mult; }
hb_position_t em_scale_x (int16_t v) { return em_mult (v, x_mult); }
hb_position_t em_scale_y (int16_t v) { return em_mult (v, y_mult); }
hb_position_t em_scalef_x (float v) { return em_scalef (v, x_scale); }
hb_position_t em_scalef_y (float v) { return em_scalef (v, y_scale); }
float em_fscale_x (int16_t v) { return em_fscale (v, x_scale); }
float em_fscale_y (int16_t v) { return em_fscale (v, y_scale); }
float em_fscalef_x (float v) { return em_fscalef (v, x_scale); }
float em_fscalef_y (float v) { return em_fscalef (v, y_scale); }
hb_position_t em_scalef_x (float v) { return em_multf (v, x_multf); }
hb_position_t em_scalef_y (float v) { return em_multf (v, y_multf); }
float em_fscale_x (int16_t v) { return em_fmult (v, x_multf); }
float em_fscale_y (int16_t v) { return em_fmult (v, y_multf); }
float em_fscalef_x (float v) { return em_fmultf (v, x_multf); }
float em_fscalef_y (float v) { return em_fmultf (v, y_multf); }
hb_position_t em_scale_dir (int16_t v, hb_direction_t direction)
{ return em_mult (v, dir_mult (direction)); }
@ -205,14 +209,14 @@ struct hb_font_t
memset (extents, 0, sizeof (*extents));
return klass->get.f.font_h_extents (this, user_data,
extents,
klass->user_data.font_h_extents);
!klass->user_data ? nullptr : klass->user_data->font_h_extents);
}
hb_bool_t get_font_v_extents (hb_font_extents_t *extents)
{
memset (extents, 0, sizeof (*extents));
return klass->get.f.font_v_extents (this, user_data,
extents,
klass->user_data.font_v_extents);
!klass->user_data ? nullptr : klass->user_data->font_v_extents);
}
bool has_glyph (hb_codepoint_t unicode)
@ -228,7 +232,7 @@ struct hb_font_t
*glyph = not_found;
return klass->get.f.nominal_glyph (this, user_data,
unicode, glyph,
klass->user_data.nominal_glyph);
!klass->user_data ? nullptr : klass->user_data->nominal_glyph);
}
unsigned int get_nominal_glyphs (unsigned int count,
const hb_codepoint_t *first_unicode,
@ -240,7 +244,7 @@ struct hb_font_t
count,
first_unicode, unicode_stride,
first_glyph, glyph_stride,
klass->user_data.nominal_glyphs);
!klass->user_data ? nullptr : klass->user_data->nominal_glyphs);
}
hb_bool_t get_variation_glyph (hb_codepoint_t unicode, hb_codepoint_t variation_selector,
@ -250,21 +254,21 @@ struct hb_font_t
*glyph = not_found;
return klass->get.f.variation_glyph (this, user_data,
unicode, variation_selector, glyph,
klass->user_data.variation_glyph);
!klass->user_data ? nullptr : klass->user_data->variation_glyph);
}
hb_position_t get_glyph_h_advance (hb_codepoint_t glyph)
{
return klass->get.f.glyph_h_advance (this, user_data,
glyph,
klass->user_data.glyph_h_advance);
!klass->user_data ? nullptr : klass->user_data->glyph_h_advance);
}
hb_position_t get_glyph_v_advance (hb_codepoint_t glyph)
{
return klass->get.f.glyph_v_advance (this, user_data,
glyph,
klass->user_data.glyph_v_advance);
!klass->user_data ? nullptr : klass->user_data->glyph_v_advance);
}
void get_glyph_h_advances (unsigned int count,
@ -277,7 +281,7 @@ struct hb_font_t
count,
first_glyph, glyph_stride,
first_advance, advance_stride,
klass->user_data.glyph_h_advances);
!klass->user_data ? nullptr : klass->user_data->glyph_h_advances);
}
void get_glyph_v_advances (unsigned int count,
@ -290,7 +294,7 @@ struct hb_font_t
count,
first_glyph, glyph_stride,
first_advance, advance_stride,
klass->user_data.glyph_v_advances);
!klass->user_data ? nullptr : klass->user_data->glyph_v_advances);
}
hb_bool_t get_glyph_h_origin (hb_codepoint_t glyph,
@ -299,7 +303,7 @@ struct hb_font_t
*x = *y = 0;
return klass->get.f.glyph_h_origin (this, user_data,
glyph, x, y,
klass->user_data.glyph_h_origin);
!klass->user_data ? nullptr : klass->user_data->glyph_h_origin);
}
hb_bool_t get_glyph_v_origin (hb_codepoint_t glyph,
@ -308,7 +312,7 @@ struct hb_font_t
*x = *y = 0;
return klass->get.f.glyph_v_origin (this, user_data,
glyph, x, y,
klass->user_data.glyph_v_origin);
!klass->user_data ? nullptr : klass->user_data->glyph_v_origin);
}
hb_position_t get_glyph_h_kerning (hb_codepoint_t left_glyph,
@ -319,7 +323,7 @@ struct hb_font_t
#else
return klass->get.f.glyph_h_kerning (this, user_data,
left_glyph, right_glyph,
klass->user_data.glyph_h_kerning);
!klass->user_data ? nullptr : klass->user_data->glyph_h_kerning);
#endif
}
@ -331,7 +335,7 @@ struct hb_font_t
#else
return klass->get.f.glyph_v_kerning (this, user_data,
top_glyph, bottom_glyph,
klass->user_data.glyph_v_kerning);
!klass->user_data ? nullptr : klass->user_data->glyph_v_kerning);
#endif
}
@ -342,7 +346,7 @@ struct hb_font_t
return klass->get.f.glyph_extents (this, user_data,
glyph,
extents,
klass->user_data.glyph_extents);
!klass->user_data ? nullptr : klass->user_data->glyph_extents);
}
hb_bool_t get_glyph_contour_point (hb_codepoint_t glyph, unsigned int point_index,
@ -352,7 +356,7 @@ struct hb_font_t
return klass->get.f.glyph_contour_point (this, user_data,
glyph, point_index,
x, y,
klass->user_data.glyph_contour_point);
!klass->user_data ? nullptr : klass->user_data->glyph_contour_point);
}
hb_bool_t get_glyph_name (hb_codepoint_t glyph,
@ -362,7 +366,7 @@ struct hb_font_t
return klass->get.f.glyph_name (this, user_data,
glyph,
name, size,
klass->user_data.glyph_name);
!klass->user_data ? nullptr : klass->user_data->glyph_name);
}
hb_bool_t get_glyph_from_name (const char *name, int len, /* -1 means nul-terminated */
@ -373,7 +377,7 @@ struct hb_font_t
return klass->get.f.glyph_from_name (this, user_data,
name, len,
glyph,
klass->user_data.glyph_from_name);
!klass->user_data ? nullptr : klass->user_data->glyph_from_name);
}
void get_glyph_shape (hb_codepoint_t glyph,
@ -382,7 +386,7 @@ struct hb_font_t
klass->get.f.glyph_shape (this, user_data,
glyph,
draw_funcs, draw_data,
klass->user_data.glyph_shape);
!klass->user_data ? nullptr : klass->user_data->glyph_shape);
}
@ -444,7 +448,6 @@ struct hb_font_t
{
*x = get_glyph_h_advance (glyph) / 2;
/* TODO cache this somehow?! */
hb_font_extents_t extents;
get_h_extents_with_fallback (&extents);
*y = extents.ascender;
@ -628,20 +631,26 @@ struct hb_font_t
void mults_changed ()
{
signed upem = face->get_upem ();
x_mult = ((int64_t) x_scale << 16) / upem;
y_mult = ((int64_t) y_scale << 16) / upem;
float upem = face->get_upem ();
x_multf = x_scale / upem;
y_multf = y_scale / upem;
bool x_neg = x_scale < 0;
x_mult = (x_neg ? -((int64_t) -x_scale << 16) : ((int64_t) x_scale << 16)) / upem;
bool y_neg = y_scale < 0;
y_mult = (y_neg ? -((int64_t) -y_scale << 16) : ((int64_t) y_scale << 16)) / upem;
slant_xy = y_scale ? slant * x_scale / y_scale : 0.f;
data.fini ();
}
hb_position_t em_mult (int16_t v, int64_t mult)
{ return (hb_position_t) ((v * mult + 32768) >> 16); }
hb_position_t em_scalef (float v, int scale)
{ return (hb_position_t) roundf (em_fscalef (v, scale)); }
float em_fscalef (float v, int scale)
{ return v * scale / face->get_upem (); }
float em_fscale (int16_t v, int scale)
{ return (float) v * scale / face->get_upem (); }
hb_position_t em_multf (float v, float mult)
{ return (hb_position_t) roundf (em_fmultf (v, mult)); }
float em_fmultf (float v, float mult)
{ return v * mult; }
float em_fmult (int16_t v, float mult)
{ return (float) v * mult; }
};
DECLARE_NULL_INSTANCE (hb_font_t);

View file

@ -37,6 +37,8 @@
#include "hb-font.hh"
#include "hb-machinery.hh"
#include "hb-cache.hh"
#include "hb-ot-os2-table.hh"
#include "hb-ot-shaper-arabic-pua.hh"
#include FT_ADVANCES_H
#include FT_MULTIPLE_MASTERS_H
@ -86,7 +88,7 @@ struct hb_ft_font_t
mutable hb_mutex_t lock;
FT_Face ft_face;
mutable int cached_x_scale;
mutable unsigned cached_serial;
mutable hb_advance_cache_t advance_cache;
};
@ -103,7 +105,7 @@ _hb_ft_font_create (FT_Face ft_face, bool symbol, bool unref)
ft_font->load_flags = FT_LOAD_DEFAULT | FT_LOAD_NO_HINTING;
ft_font->cached_x_scale = 0;
ft_font->cached_serial = (unsigned) -1;
ft_font->advance_cache.init ();
return ft_font;
@ -130,6 +132,58 @@ _hb_ft_font_destroy (void *data)
hb_free (ft_font);
}
/* hb_font changed, update FT_Face. */
static void _hb_ft_hb_font_changed (hb_font_t *font, FT_Face ft_face)
{
FT_Set_Char_Size (ft_face,
abs (font->x_scale), abs (font->y_scale),
0, 0);
#if 0
font->x_ppem * 72 * 64 / font->x_scale,
font->y_ppem * 72 * 64 / font->y_scale);
#endif
if (font->x_scale < 0 || font->y_scale < 0)
{
FT_Matrix matrix = { font->x_scale < 0 ? -1 : +1, 0,
0, font->y_scale < 0 ? -1 : +1};
FT_Set_Transform (ft_face, &matrix, nullptr);
}
#if defined(HAVE_FT_GET_VAR_BLEND_COORDINATES) && !defined(HB_NO_VAR)
unsigned int num_coords;
const int *coords = hb_font_get_var_coords_normalized (font, &num_coords);
if (num_coords)
{
FT_Fixed *ft_coords = (FT_Fixed *) hb_calloc (num_coords, sizeof (FT_Fixed));
if (ft_coords)
{
for (unsigned int i = 0; i < num_coords; i++)
ft_coords[i] = coords[i] * 4;
FT_Set_Var_Blend_Coordinates (ft_face, num_coords, ft_coords);
hb_free (ft_coords);
}
}
#endif
}
/* Check if hb_font changed, update FT_Face. */
static inline bool
_hb_ft_hb_font_check_changed (hb_font_t *font,
const hb_ft_font_t *ft_font)
{
if (font->serial != ft_font->cached_serial)
{
_hb_ft_hb_font_changed (font, ft_font->ft_face);
ft_font->advance_cache.clear ();
ft_font->cached_serial = font->serial;
return true;
}
return false;
}
/**
* hb_ft_font_set_load_flags:
* @font: #hb_font_t to work upon
@ -181,7 +235,7 @@ hb_ft_font_get_load_flags (hb_font_t *font)
}
/**
* hb_ft_font_get_face:
* hb_ft_font_get_face: (skip)
* @font: #hb_font_t to work upon
*
* Fetches the FT_Face associated with the specified #hb_font_t
@ -203,7 +257,7 @@ hb_ft_font_get_face (hb_font_t *font)
}
/**
* hb_ft_font_lock_face:
* hb_ft_font_lock_face: (skip)
* @font: #hb_font_t to work upon
*
* Gets the FT_Face associated with @font, This face will be kept around until
@ -246,7 +300,7 @@ hb_ft_font_unlock_face (hb_font_t *font)
static hb_bool_t
hb_ft_get_nominal_glyph (hb_font_t *font HB_UNUSED,
hb_ft_get_nominal_glyph (hb_font_t *font,
void *font_data,
hb_codepoint_t unicode,
hb_codepoint_t *glyph,
@ -258,14 +312,29 @@ hb_ft_get_nominal_glyph (hb_font_t *font HB_UNUSED,
if (unlikely (!g))
{
if (unlikely (ft_font->symbol) && unicode <= 0x00FFu)
if (unlikely (ft_font->symbol))
{
/* For symbol-encoded OpenType fonts, we duplicate the
* U+F000..F0FF range at U+0000..U+00FF. That's what
* Windows seems to do, and that's hinted about at:
* https://docs.microsoft.com/en-us/typography/opentype/spec/recom
* under "Non-Standard (Symbol) Fonts". */
g = FT_Get_Char_Index (ft_font->ft_face, 0xF000u + unicode);
switch ((unsigned) font->face->table.OS2->get_font_page ()) {
case OT::OS2::font_page_t::FONT_PAGE_NONE:
if (unicode <= 0x00FFu)
/* For symbol-encoded OpenType fonts, we duplicate the
* U+F000..F0FF range at U+0000..U+00FF. That's what
* Windows seems to do, and that's hinted about at:
* https://docs.microsoft.com/en-us/typography/opentype/spec/recom
* under "Non-Standard (Symbol) Fonts". */
g = FT_Get_Char_Index (ft_font->ft_face, 0xF000u + unicode);
break;
#ifndef HB_NO_OT_SHAPER_ARABIC_FALLBACK
case OT::OS2::font_page_t::FONT_PAGE_SIMP_ARABIC:
g = FT_Get_Char_Index (ft_font->ft_face, _hb_arabic_pua_simp_map (unicode));
break;
case OT::OS2::font_page_t::FONT_PAGE_TRAD_ARABIC:
g = FT_Get_Char_Index (ft_font->ft_face, _hb_arabic_pua_trad_map (unicode));
break;
#endif
default:
break;
}
if (!g)
return false;
}
@ -337,12 +406,6 @@ hb_ft_get_glyph_h_advances (hb_font_t* font, void* font_data,
int load_flags = ft_font->load_flags;
int mult = font->x_scale < 0 ? -1 : +1;
if (font->x_scale != ft_font->cached_x_scale)
{
ft_font->advance_cache.clear ();
ft_font->cached_x_scale = font->x_scale;
}
for (unsigned int i = 0; i < count; i++)
{
FT_Fixed v = 0;
@ -426,6 +489,7 @@ hb_ft_get_glyph_h_kerning (hb_font_t *font,
void *user_data HB_UNUSED)
{
const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font_data;
hb_lock_t lock (ft_font->lock);
FT_Vector kerningv;
FT_Kerning_Mode mode = font->x_ppem ? FT_KERNING_DEFAULT : FT_KERNING_UNFITTED;
@ -556,6 +620,7 @@ hb_ft_get_font_h_extents (hb_font_t *font HB_UNUSED,
const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font_data;
hb_lock_t lock (ft_font->lock);
FT_Face ft_face = ft_font->ft_face;
metrics->ascender = FT_MulFix(ft_face->ascender, ft_face->size->metrics.y_scale);
metrics->descender = FT_MulFix(ft_face->descender, ft_face->size->metrics.y_scale);
metrics->line_gap = FT_MulFix( ft_face->height, ft_face->size->metrics.y_scale ) - (metrics->ascender - metrics->descender);
@ -619,6 +684,8 @@ hb_ft_get_glyph_shape (hb_font_t *font HB_UNUSED,
hb_lock_t lock (ft_font->lock);
FT_Face ft_face = ft_font->ft_face;
_hb_ft_hb_font_check_changed (font, ft_font);
if (unlikely (FT_Load_Glyph (ft_face, glyph,
FT_LOAD_NO_BITMAP | ft_font->load_flags)))
return;
@ -963,6 +1030,31 @@ hb_ft_font_changed (hb_font_t *font)
#endif
}
/**
* hb_ft_hb_font_changed:
* @font: #hb_font_t to work upon
*
* Refreshes the state of the underlying FT_Face of @font when the hb_font_t
* @font has changed.
* This function should be called after changing the size or
* variation-axis settings on the @font.
* This call is fast if nothing has changed on @font.
*
* Return value: true if changed, false otherwise
*
* Since: 4.4.0
**/
hb_bool_t
hb_ft_hb_font_changed (hb_font_t *font)
{
if (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy)
return false;
hb_ft_font_t *ft_font = (hb_ft_font_t *) font->user_data;
return _hb_ft_hb_font_check_changed (font, ft_font);
}
/**
* hb_ft_font_create_referenced:
* @ft_face: FT_Face to work upon
@ -1081,35 +1173,7 @@ hb_ft_font_set_funcs (hb_font_t *font)
if (FT_Select_Charmap (ft_face, FT_ENCODING_MS_SYMBOL))
FT_Select_Charmap (ft_face, FT_ENCODING_UNICODE);
FT_Set_Char_Size (ft_face,
abs (font->x_scale), abs (font->y_scale),
0, 0);
#if 0
font->x_ppem * 72 * 64 / font->x_scale,
font->y_ppem * 72 * 64 / font->y_scale);
#endif
if (font->x_scale < 0 || font->y_scale < 0)
{
FT_Matrix matrix = { font->x_scale < 0 ? -1 : +1, 0,
0, font->y_scale < 0 ? -1 : +1};
FT_Set_Transform (ft_face, &matrix, nullptr);
}
#if defined(HAVE_FT_GET_VAR_BLEND_COORDINATES) && !defined(HB_NO_VAR)
unsigned int num_coords;
const int *coords = hb_font_get_var_coords_normalized (font, &num_coords);
if (num_coords)
{
FT_Fixed *ft_coords = (FT_Fixed *) hb_calloc (num_coords, sizeof (FT_Fixed));
if (ft_coords)
{
for (unsigned int i = 0; i < num_coords; i++)
ft_coords[i] = coords[i] * 4;
FT_Set_Var_Blend_Coordinates (ft_face, num_coords, ft_coords);
hb_free (ft_coords);
}
}
#endif
_hb_ft_hb_font_changed (font, ft_face);
ft_face->generic.data = blob;
ft_face->generic.finalizer = (FT_Generic_Finalizer) _release_blob;

View file

@ -122,10 +122,17 @@ hb_ft_font_set_load_flags (hb_font_t *font, int load_flags);
HB_EXTERN int
hb_ft_font_get_load_flags (hb_font_t *font);
/* Call when size or variations settings on underlying FT_Face change. */
/* Call when size or variations settings on underlying FT_Face changed,
* and you want to update the hb_font_t from it. */
HB_EXTERN void
hb_ft_font_changed (hb_font_t *font);
/* Call when size or variations settings on underlying hb_font_t may have
* changed, and you want to update the FT_Face from it. This call is fast
* if nothing changed on hb_font_t. Returns true if changed. */
HB_EXTERN hb_bool_t
hb_ft_hb_font_changed (hb_font_t *font);
/* Makes an hb_font_t use FreeType internally to implement font functions.
* Note: this internally creates an FT_Face. Use it when you create your
* hb_face_t using hb_face_create(). */

View file

@ -43,17 +43,12 @@
* is writable, then the iterator returns lvalues, otherwise it
* returns rvalues.
*
* TODO Document more.
*
* If iterator implementation implements operator!=, then can be
* If iterator implementation implements operator!=, then it can be
* used in range-based for loop. That already happens if the iterator
* is random-access. Otherwise, the range-based for loop incurs
* one traversal to find end(), which can be avoided if written
* as a while-style for loop, or if iterator implements a faster
* __end__() method.
* TODO When opting in for C++17, address this by changing return
* type of .end()?
*/
* __end__() method. */
/*
* Base classes for iterators.
@ -75,10 +70,6 @@ struct hb_iter_t
iter_t* thiz () { return static_cast< iter_t *> (this); }
public:
/* TODO:
* Port operators below to use hb_enable_if to sniff which method implements
* an operator and use it, and remove hb_iter_fallback_mixin_t completely. */
/* Operators. */
iter_t iter () const { return *thiz(); }
iter_t operator + () const { return *thiz(); }
@ -87,8 +78,7 @@ struct hb_iter_t
explicit operator bool () const { return thiz()->__more__ (); }
unsigned len () const { return thiz()->__len__ (); }
/* The following can only be enabled if item_t is reference type. Otherwise
* it will be returning pointer to temporary rvalue.
* TODO Use a wrapper return type to fix for non-reference type. */
* it will be returning pointer to temporary rvalue. */
template <typename T = item_t,
hb_enable_if (std::is_reference<T>::value)>
hb_remove_reference<item_t>* operator -> () const { return std::addressof (**thiz()); }

View file

@ -176,7 +176,7 @@ struct hb_lazy_loader_t : hb_data_wrapper_t<Data, WheresData>
void init0 () {} /* Init, when memory is already set to 0. No-op for us. */
void init () { instance.set_relaxed (nullptr); }
void fini () { do_destroy (instance.get ()); }
void fini () { do_destroy (instance.get ()); init (); }
void free_instance ()
{

View file

@ -40,7 +40,7 @@
/**
* hb_map_create: (Xconstructor)
* hb_map_create:
*
* Creates a new, initially empty map.
*
@ -172,6 +172,25 @@ hb_map_allocation_successful (const hb_map_t *map)
return map->successful;
}
/**
* hb_map_copy:
* @map: A map
*
* Allocate a copy of @map.
*
* Return value: Newly-allocated map.
*
* Since: 4.4.0
**/
hb_map_t *
hb_map_copy (const hb_map_t *map)
{
hb_map_t *copy = hb_map_create ();
if (unlikely (!copy)) return nullptr;
copy->resize (map->population);
hb_copy (*map, *copy);
return copy;
}
/**
* hb_map_set:
@ -309,3 +328,20 @@ hb_map_is_equal (const hb_map_t *map,
return map->is_equal (*other);
}
/**
* hb_map_hash:
* @map: A map
*
* Creates a hash representing @map.
*
* Return value:
* A hash of @map.
*
* Since: 4.4.0
**/
HB_EXTERN unsigned int
hb_map_hash (const hb_map_t *map)
{
return map->hash ();
}

View file

@ -82,6 +82,9 @@ hb_map_get_user_data (hb_map_t *map,
HB_EXTERN hb_bool_t
hb_map_allocation_successful (const hb_map_t *map);
HB_EXTERN hb_map_t *
hb_map_copy (const hb_map_t *map);
HB_EXTERN void
hb_map_clear (hb_map_t *map);
@ -95,6 +98,9 @@ HB_EXTERN hb_bool_t
hb_map_is_equal (const hb_map_t *map,
const hb_map_t *other);
HB_EXTERN unsigned int
hb_map_hash (const hb_map_t *map);
HB_EXTERN void
hb_map_set (hb_map_t *map,
hb_codepoint_t key,

View file

@ -34,15 +34,13 @@
* hb_hashmap_t
*/
extern HB_INTERNAL const hb_codepoint_t minus_1;
template <typename K, typename V,
typename k_invalid_t = K,
typename v_invalid_t = V,
k_invalid_t kINVALID = std::is_pointer<K>::value ? 0 : std::is_signed<K>::value ? hb_int_min (K) : (K) -1,
v_invalid_t vINVALID = std::is_pointer<V>::value ? 0 : std::is_signed<V>::value ? hb_int_min (V) : (V) -1>
bool minus_one = false>
struct hb_hashmap_t
{
hb_hashmap_t () { init (); }
hb_hashmap_t (std::nullptr_t) : hb_hashmap_t () {}
~hb_hashmap_t () { fini (); }
hb_hashmap_t (const hb_hashmap_t& o) : hb_hashmap_t () { resize (population); hb_copy (o, *this); }
@ -68,38 +66,44 @@ struct hb_hashmap_t
struct item_t
{
K key;
uint32_t hash : 30;
uint32_t is_used_ : 1;
uint32_t is_tombstone_ : 1;
V value;
uint32_t hash;
bool is_used () const { return is_used_; }
void set_used (bool is_used) { is_used_ = is_used; }
bool is_tombstone () const { return is_tombstone_; }
void set_tombstone (bool is_tombstone) { is_tombstone_ = is_tombstone; }
bool is_real () const { return is_used_ && !is_tombstone_; }
template <bool v = minus_one,
hb_enable_if (v == false)>
static inline const V& default_value () { return Null(V); };
template <bool v = minus_one,
hb_enable_if (v == true)>
static inline const V& default_value ()
{
static_assert (hb_is_same (V, hb_codepoint_t), "");
return minus_1;
};
void clear ()
{
new (std::addressof (key)) K ();
key = hb_coerce<K> (kINVALID);
new (std::addressof (value)) V ();
value = hb_coerce<V> (vINVALID);
hash = 0;
is_used_ = false;
is_tombstone_ = false;
}
bool operator == (const K &o) { return hb_deref (key) == hb_deref (o); }
bool operator == (const item_t &o) { return *this == o.key; }
bool is_unused () const
{
const K inv = hb_coerce<K> (kINVALID);
return key == inv;
}
bool is_tombstone () const
{
const K kinv = hb_coerce<K> (kINVALID);
const V vinv = hb_coerce<V> (vINVALID);
return key != kinv && value == vinv;
}
bool is_real () const
{
const K kinv = hb_coerce<K> (kINVALID);
const V vinv = hb_coerce<V> (vINVALID);
return key != kinv && value != vinv;
}
hb_pair_t<K, V> get_pair() const { return hb_pair_t<K, V> (key, value); }
hb_pair_t<const K &, const V &> get_pair_ref() const { return hb_pair_t<const K &, const V &> (key, value); }
uint32_t total_hash () const
{ return (hash * 31) + hb_hash (value); }
};
hb_object_header_t header;
@ -200,27 +204,39 @@ struct hb_hashmap_t
return true;
}
bool set (K key, const V& value) { return set_with_hash (key, hb_hash (key), value); }
bool set (K key, V&& value) { return set_with_hash (key, hb_hash (key), std::move (value)); }
template <typename VV>
bool set (K key, VV&& value) { return set_with_hash (key, hb_hash (key), std::forward<VV> (value)); }
V get (K key) const
const V& get (K key) const
{
if (unlikely (!items)) return hb_coerce<V> (vINVALID);
if (unlikely (!items)) return item_t::default_value ();
unsigned int i = bucket_for (key);
return items[i].is_real () && items[i] == key ? items[i].value : hb_coerce<V> (vINVALID);
return items[i].is_real () && items[i] == key ? items[i].value : item_t::default_value ();
}
void del (K key) { set (key, hb_coerce<V> (vINVALID)); }
void del (K key) { set_with_hash (key, hb_hash (key), item_t::default_value (), true); }
/* Has interface. */
typedef V value_t;
typedef const V& value_t;
value_t operator [] (K k) const { return get (k); }
bool has (K k, V *vp = nullptr) const
bool has (K key, const V **vp = nullptr) const
{
V v = (*this)[k];
if (vp) *vp = v;
const V vinv = hb_coerce<V> (vINVALID);
return v != vinv;
if (unlikely (!items))
{
if (vp) *vp = &item_t::default_value ();
return false;
}
unsigned int i = bucket_for (key);
if (items[i].is_real () && items[i] == key)
{
if (vp) *vp = &items[i].value;
return true;
}
else
{
if (vp) *vp = &item_t::default_value ();
return false;
}
}
/* Projection. */
V operator () (K k) const { return get (k); }
@ -242,8 +258,9 @@ struct hb_hashmap_t
uint32_t hash () const
{
uint32_t h = 0;
for (auto pair : iter ())
h ^= (hb_hash (pair.first) * 31) + hb_hash (pair.second);
for (const auto &item : + hb_array (items, mask ? mask + 1 : 0)
| hb_filter (&item_t::is_real))
h ^= item.total_hash ();
return h;
}
@ -271,6 +288,12 @@ struct hb_hashmap_t
| hb_filter (&item_t::is_real)
| hb_map (&item_t::get_pair)
)
auto iter_ref () const HB_AUTO_RETURN
(
+ hb_array (items, mask ? mask + 1 : 0)
| hb_filter (&item_t::is_real)
| hb_map (&item_t::get_pair_ref)
)
auto keys () const HB_AUTO_RETURN
(
+ hb_array (items, mask ? mask + 1 : 0)
@ -293,19 +316,16 @@ struct hb_hashmap_t
protected:
template <typename VV>
bool set_with_hash (K key, uint32_t hash, VV&& value)
bool set_with_hash (K key, uint32_t hash, VV&& value, bool is_delete=false)
{
if (unlikely (!successful)) return false;
const K kinv = hb_coerce<K> (kINVALID);
if (unlikely (key == kinv)) return true;
if (unlikely ((occupancy + occupancy / 2) >= mask && !resize ())) return false;
unsigned int i = bucket_for_hash (key, hash);
const V vinv = hb_coerce<V> (vINVALID);
if (value == vinv && items[i].key != key)
if (is_delete && items[i].key != key)
return true; /* Trying to delete non-existent key. */
if (!items[i].is_unused ())
if (items[i].is_used ())
{
occupancy--;
if (!items[i].is_tombstone ())
@ -313,27 +333,30 @@ struct hb_hashmap_t
}
items[i].key = key;
items[i].value = value;
items[i].value = std::forward<VV> (value);
items[i].hash = hash;
items[i].set_used (true);
items[i].set_tombstone (is_delete);
occupancy++;
if (!items[i].is_tombstone ())
if (!is_delete)
population++;
return true;
}
unsigned int bucket_for (K key) const
unsigned int bucket_for (const K &key) const
{
return bucket_for_hash (key, hb_hash (key));
}
unsigned int bucket_for_hash (K key, uint32_t hash) const
unsigned int bucket_for_hash (const K &key, uint32_t hash) const
{
hash &= 0x3FFFFFFF; // We only store lower 30bit of hash
unsigned int i = hash % prime;
unsigned int step = 0;
unsigned int tombstone = (unsigned) -1;
while (!items[i].is_unused ())
while (items[i].is_used ())
{
if (items[i].hash == hash && items[i] == key)
return i;
@ -402,21 +425,14 @@ struct hb_hashmap_t
struct hb_map_t : hb_hashmap_t<hb_codepoint_t,
hb_codepoint_t,
hb_codepoint_t,
hb_codepoint_t,
HB_MAP_VALUE_INVALID,
HB_MAP_VALUE_INVALID>
true>
{
using hashmap = hb_hashmap_t<hb_codepoint_t,
hb_codepoint_t,
hb_codepoint_t,
hb_codepoint_t,
HB_MAP_VALUE_INVALID,
HB_MAP_VALUE_INVALID>;
true>;
~hb_map_t () = default;
hb_map_t () : hashmap () {}
hb_map_t (std::nullptr_t) : hb_map_t () {}
hb_map_t (const hb_map_t &o) : hashmap ((hashmap &) o) {}
hb_map_t (hb_map_t &&o) : hashmap (std::move ((hashmap &) o)) {}
hb_map_t& operator= (const hb_map_t&) = default;

View file

@ -188,7 +188,7 @@ template <> struct hb_int_max<signed long long> : hb_integral_constant<signed l
template <> struct hb_int_max<unsigned long long> : hb_integral_constant<unsigned long long, ULLONG_MAX> {};
#define hb_int_max(T) hb_int_max<T>::value
#if __GNUG__ && __GNUC__ < 5
#if defined(__GNUC__) && __GNUC__ < 5
#define hb_is_trivially_copyable(T) __has_trivial_copy(T)
#define hb_is_trivially_copy_assignable(T) __has_trivial_assign(T)
#define hb_is_trivially_constructible(T) __has_trivial_constructor(T)

View file

@ -37,7 +37,7 @@
/* Global nul-content Null pool. Enlarge as necessary. */
#define HB_NULL_POOL_SIZE 384
#define HB_NULL_POOL_SIZE 448
/* Use SFINAE to sniff whether T has min_size; in which case return the larger
* of sizeof(T) and T::null_size, otherwise return sizeof(T).
@ -108,7 +108,7 @@ struct NullHelper
/* Specializations for arbitrary-content Null objects expressed in bytes. */
#define DECLARE_NULL_NAMESPACE_BYTES(Namespace, Type) \
} /* Close namespace. */ \
extern HB_INTERNAL const unsigned char _hb_Null_##Namespace##_##Type[Namespace::Type::null_size]; \
extern HB_INTERNAL const unsigned char _hb_Null_##Namespace##_##Type[hb_null_size (Namespace::Type)]; \
template <> \
struct Null<Namespace::Type> { \
static Namespace::Type const & get_null () { \
@ -118,7 +118,7 @@ struct NullHelper
namespace Namespace { \
static_assert (true, "") /* Require semicolon after. */
#define DEFINE_NULL_NAMESPACE_BYTES(Namespace, Type) \
const unsigned char _hb_Null_##Namespace##_##Type[Namespace::Type::null_size]
const unsigned char _hb_Null_##Namespace##_##Type[hb_null_size (Namespace::Type)]
/* Specializations for arbitrary-content Null objects expressed as struct initializer. */
#define DECLARE_NULL_INSTANCE(Type) \

View file

@ -406,6 +406,8 @@ struct Charset1_2 {
void collect_glyph_to_sid_map (hb_map_t *mapping, unsigned int num_glyphs) const
{
hb_codepoint_t gid = 1;
if (gid >= num_glyphs)
return;
for (unsigned i = 0;; i++)
{
hb_codepoint_t sid = ranges[i].first;
@ -1138,7 +1140,8 @@ struct cff1
cff1_top_dict_interp_env_t env (fontDictStr);
cff1_font_dict_interpreter_t font_interp (env);
font = fontDicts.push ();
if (unlikely (font == &Crap (cff1_font_dict_values_t))) { fini (); return; }
if (unlikely (fontDicts.in_error ())) { fini (); return; }
font->init ();
if (unlikely (!font_interp.interpret (*font))) { fini (); return; }
PRIVDICTVAL *priv = &privateDicts[i];
@ -1333,7 +1336,7 @@ struct cff1
if (names)
{
names->fini ();
free (names);
hb_free (names);
}
SUPER::fini ();
@ -1379,7 +1382,7 @@ struct cff1
hb_sorted_vector_t<gname_t> *names = glyph_names.get ();
if (unlikely (!names))
{
names = (hb_sorted_vector_t<gname_t> *) calloc (sizeof (hb_sorted_vector_t<gname_t>), 1);
names = (hb_sorted_vector_t<gname_t> *) hb_calloc (sizeof (hb_sorted_vector_t<gname_t>), 1);
if (likely (names))
{
names->init ();
@ -1409,14 +1412,14 @@ struct cff1
if (names)
{
names->fini ();
free (names);
hb_free (names);
}
goto retry;
}
}
}
gname_t key = { hb_bytes_t (name, len), 0 };
const gname_t *gname = glyph_names->bsearch (key);
const gname_t *gname = names ? names->bsearch (key) : nullptr;
if (!gname) return false;
hb_codepoint_t gid = sid_to_glyph (gname->sid);
if (!gid && gname->sid) return false;

View file

@ -27,6 +27,8 @@
#ifndef HB_OT_CMAP_TABLE_HH
#define HB_OT_CMAP_TABLE_HH
#include "hb-ot-os2-table.hh"
#include "hb-ot-shaper-arabic-pua.hh"
#include "hb-open-type.hh"
#include "hb-set.hh"
@ -1476,33 +1478,47 @@ struct SubtableUnicodesCache {
private:
const void* base;
hb_hashmap_t<intptr_t, hb_set_t*> cached_unicodes;
hb_hashmap_t<intptr_t, hb::unique_ptr<hb_set_t>> cached_unicodes;
public:
SubtableUnicodesCache(const void* cmap_base)
: base(cmap_base), cached_unicodes() {}
~SubtableUnicodesCache()
{
for (hb_set_t* s : cached_unicodes.values()) {
hb_set_destroy (s);
}
}
hb_set_t* set_for(const EncodingRecord* record)
hb_set_t* set_for (const EncodingRecord* record)
{
if (!cached_unicodes.has ((intptr_t) record)) {
hb_set_t* new_set = hb_set_create ();
if (!cached_unicodes.set ((intptr_t) record, new_set)) {
hb_set_destroy (new_set);
if (!cached_unicodes.has ((intptr_t) record))
{
hb_set_t *s = hb_set_create ();
if (unlikely (s->in_error ()))
return hb_set_get_empty ();
(base+record->subtable).collect_unicodes (s);
if (unlikely (!cached_unicodes.set ((intptr_t) record, hb::unique_ptr<hb_set_t> {s})))
return hb_set_get_empty ();
}
(base+record->subtable).collect_unicodes (cached_unicodes.get ((intptr_t) record));
return s;
}
return cached_unicodes.get ((intptr_t) record);
}
};
static inline uint_fast16_t
_hb_symbol_pua_map (unsigned codepoint)
{
if (codepoint <= 0x00FFu)
{
/* For symbol-encoded OpenType fonts, we duplicate the
* U+F000..F0FF range at U+0000..U+00FF. That's what
* Windows seems to do, and that's hinted about at:
* https://docs.microsoft.com/en-us/typography/opentype/spec/recom
* under "Non-Standard (Symbol) Fonts". */
return 0xF000u + codepoint;
}
return 0;
}
struct cmap
{
static constexpr hb_tag_t tableTag = HB_OT_TAG_cmap;
@ -1726,7 +1742,24 @@ struct cmap
this->get_glyph_data = subtable;
if (unlikely (symbol))
this->get_glyph_funcZ = get_glyph_from_symbol<CmapSubtable>;
{
switch ((unsigned) face->table.OS2->get_font_page ()) {
case OS2::font_page_t::FONT_PAGE_NONE:
this->get_glyph_funcZ = get_glyph_from_symbol<CmapSubtable, _hb_symbol_pua_map>;
break;
#ifndef HB_NO_OT_SHAPER_ARABIC_FALLBACK
case OS2::font_page_t::FONT_PAGE_SIMP_ARABIC:
this->get_glyph_funcZ = get_glyph_from_symbol<CmapSubtable, _hb_arabic_pua_simp_map>;
break;
case OS2::font_page_t::FONT_PAGE_TRAD_ARABIC:
this->get_glyph_funcZ = get_glyph_from_symbol<CmapSubtable, _hb_arabic_pua_trad_map>;
break;
#endif
default:
this->get_glyph_funcZ = get_glyph_from<CmapSubtable>;
break;
}
}
else
{
switch (subtable->u.format) {
@ -1808,6 +1841,7 @@ struct cmap
typedef bool (*hb_cmap_get_glyph_func_t) (const void *obj,
hb_codepoint_t codepoint,
hb_codepoint_t *glyph);
typedef uint_fast16_t (*hb_pua_remap_func_t) (unsigned);
template <typename Type>
HB_INTERNAL static bool get_glyph_from (const void *obj,
@ -1818,7 +1852,7 @@ struct cmap
return typed_obj->get_glyph (codepoint, glyph);
}
template <typename Type>
template <typename Type, hb_pua_remap_func_t remap>
HB_INTERNAL static bool get_glyph_from_symbol (const void *obj,
hb_codepoint_t codepoint,
hb_codepoint_t *glyph)
@ -1827,15 +1861,8 @@ struct cmap
if (likely (typed_obj->get_glyph (codepoint, glyph)))
return true;
if (codepoint <= 0x00FFu)
{
/* For symbol-encoded OpenType fonts, we duplicate the
* U+F000..F0FF range at U+0000..U+00FF. That's what
* Windows seems to do, and that's hinted about at:
* https://docs.microsoft.com/en-us/typography/opentype/spec/recom
* under "Non-Standard (Symbol) Fonts". */
return typed_obj->get_glyph (0xF000u + codepoint, glyph);
}
if (hb_codepoint_t c = remap (codepoint))
return typed_obj->get_glyph (c, glyph);
return false;
}

View file

@ -97,9 +97,10 @@ struct CPALV1Tail
c->push ();
for (const auto _ : colorLabels)
{
if (!color_index_map->has (_)) continue;
const hb_codepoint_t *v;
if (!color_index_map->has (_, &v)) continue;
NameID new_color_idx;
new_color_idx = color_index_map->get (_);
new_color_idx = *v;
if (!c->copy<NameID> (new_color_idx))
{
c->pop_discard ();

View file

@ -298,6 +298,12 @@ struct sbix
const PNGHeader &png = *blob->as<PNGHeader>();
if (png.IHDR.height >= 65536 || png.IHDR.width >= 65536)
{
hb_blob_destroy (blob);
return false;
}
extents->x_bearing = x_offset;
extents->y_bearing = png.IHDR.height + y_offset;
extents->width = png.IHDR.width;

View file

@ -30,6 +30,7 @@
#include "hb-ot.h"
#include "hb-cache.hh"
#include "hb-font.hh"
#include "hb-machinery.hh"
#include "hb-ot-face.hh"
@ -58,6 +59,41 @@
* never need to call these functions directly.
**/
struct hb_ot_font_t
{
const hb_ot_face_t *ot_face;
/* h_advance caching */
mutable hb_atomic_int_t cached_coords_serial;
mutable hb_atomic_ptr_t<hb_advance_cache_t> advance_cache;
};
static hb_ot_font_t *
_hb_ot_font_create (hb_font_t *font)
{
hb_ot_font_t *ot_font = (hb_ot_font_t *) hb_calloc (1, sizeof (hb_ot_font_t));
if (unlikely (!ot_font))
return nullptr;
ot_font->ot_face = &font->face->table;
return ot_font;
}
static void
_hb_ot_font_destroy (void *font_data)
{
hb_ot_font_t *ot_font = (hb_ot_font_t *) font_data;
auto *cache = ot_font->advance_cache.get_relaxed ();
if (cache)
{
cache->fini ();
hb_free (cache);
}
hb_free (ot_font);
}
static hb_bool_t
hb_ot_get_nominal_glyph (hb_font_t *font HB_UNUSED,
@ -66,7 +102,8 @@ hb_ot_get_nominal_glyph (hb_font_t *font HB_UNUSED,
hb_codepoint_t *glyph,
void *user_data HB_UNUSED)
{
const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data;
const hb_ot_font_t *ot_font = (const hb_ot_font_t *) font_data;
const hb_ot_face_t *ot_face = ot_font->ot_face;
return ot_face->cmap->get_nominal_glyph (unicode, glyph);
}
@ -80,7 +117,8 @@ hb_ot_get_nominal_glyphs (hb_font_t *font HB_UNUSED,
unsigned int glyph_stride,
void *user_data HB_UNUSED)
{
const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data;
const hb_ot_font_t *ot_font = (const hb_ot_font_t *) font_data;
const hb_ot_face_t *ot_face = ot_font->ot_face;
return ot_face->cmap->get_nominal_glyphs (count,
first_unicode, unicode_stride,
first_glyph, glyph_stride);
@ -94,7 +132,8 @@ hb_ot_get_variation_glyph (hb_font_t *font HB_UNUSED,
hb_codepoint_t *glyph,
void *user_data HB_UNUSED)
{
const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data;
const hb_ot_font_t *ot_font = (const hb_ot_font_t *) font_data;
const hb_ot_face_t *ot_face = ot_font->ot_face;
return ot_face->cmap->get_variation_glyph (unicode, variation_selector, glyph);
}
@ -107,15 +146,83 @@ hb_ot_get_glyph_h_advances (hb_font_t* font, void* font_data,
unsigned advance_stride,
void *user_data HB_UNUSED)
{
const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data;
const hb_ot_font_t *ot_font = (const hb_ot_font_t *) font_data;
const hb_ot_face_t *ot_face = ot_font->ot_face;
const OT::hmtx_accelerator_t &hmtx = *ot_face->hmtx;
for (unsigned int i = 0; i < count; i++)
#ifndef HB_NO_VAR
const OT::HVARVVAR &HVAR = *hmtx.var_table;
const OT::VariationStore &varStore = &HVAR + HVAR.varStore;
OT::VariationStore::cache_t *varStore_cache = font->num_coords * count >= 128 ? varStore.create_cache () : nullptr;
bool use_cache = font->num_coords;
#else
OT::VariationStore::cache_t *varStore_cache = nullptr;
bool use_cache = false;
#endif
hb_advance_cache_t *cache = nullptr;
if (use_cache)
{
*first_advance = font->em_scale_x (hmtx.get_advance (*first_glyph, font));
first_glyph = &StructAtOffsetUnaligned<hb_codepoint_t> (first_glyph, glyph_stride);
first_advance = &StructAtOffsetUnaligned<hb_position_t> (first_advance, advance_stride);
retry:
cache = ot_font->advance_cache.get ();
if (unlikely (!cache))
{
cache = (hb_advance_cache_t *) hb_malloc (sizeof (hb_advance_cache_t));
if (unlikely (!cache))
{
use_cache = false;
goto out;
}
cache->init ();
if (unlikely (!ot_font->advance_cache.cmpexch (nullptr, cache)))
{
hb_free (cache);
goto retry;
}
ot_font->cached_coords_serial.set (font->serial_coords);
}
}
out:
if (!use_cache)
{
for (unsigned int i = 0; i < count; i++)
{
*first_advance = font->em_scale_x (hmtx.get_advance (*first_glyph, font, varStore_cache));
first_glyph = &StructAtOffsetUnaligned<hb_codepoint_t> (first_glyph, glyph_stride);
first_advance = &StructAtOffsetUnaligned<hb_position_t> (first_advance, advance_stride);
}
}
else
{ /* Use cache. */
if (ot_font->cached_coords_serial.get () != (int) font->serial_coords)
{
ot_font->advance_cache->init ();
ot_font->cached_coords_serial.set (font->serial_coords);
}
for (unsigned int i = 0; i < count; i++)
{
hb_position_t v;
unsigned cv;
if (ot_font->advance_cache->get (*first_glyph, &cv))
v = cv;
else
{
v = hmtx.get_advance (*first_glyph, font, varStore_cache);
ot_font->advance_cache->set (*first_glyph, v);
}
*first_advance = font->em_scale_x (v);
first_glyph = &StructAtOffsetUnaligned<hb_codepoint_t> (first_glyph, glyph_stride);
first_advance = &StructAtOffsetUnaligned<hb_position_t> (first_advance, advance_stride);
}
}
#ifndef HB_NO_VAR
OT::VariationStore::destroy_cache (varStore_cache);
#endif
}
#ifndef HB_NO_VERTICAL
@ -128,16 +235,31 @@ hb_ot_get_glyph_v_advances (hb_font_t* font, void* font_data,
unsigned advance_stride,
void *user_data HB_UNUSED)
{
const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data;
const hb_ot_font_t *ot_font = (const hb_ot_font_t *) font_data;
const hb_ot_face_t *ot_face = ot_font->ot_face;
const OT::vmtx_accelerator_t &vmtx = *ot_face->vmtx;
if (vmtx.has_data ())
{
#ifndef HB_NO_VAR
const OT::HVARVVAR &VVAR = *vmtx.var_table;
const OT::VariationStore &varStore = &VVAR + VVAR.varStore;
OT::VariationStore::cache_t *varStore_cache = font->num_coords ? varStore.create_cache () : nullptr;
#else
OT::VariationStore::cache_t *varStore_cache = nullptr;
#endif
for (unsigned int i = 0; i < count; i++)
{
*first_advance = font->em_scale_y (-(int) vmtx.get_advance (*first_glyph, font));
*first_advance = font->em_scale_y (-(int) vmtx.get_advance (*first_glyph, font, varStore_cache));
first_glyph = &StructAtOffsetUnaligned<hb_codepoint_t> (first_glyph, glyph_stride);
first_advance = &StructAtOffsetUnaligned<hb_position_t> (first_advance, advance_stride);
}
#ifndef HB_NO_VAR
OT::VariationStore::destroy_cache (varStore_cache);
#endif
}
else
{
hb_font_extents_t font_extents;
@ -163,7 +285,8 @@ hb_ot_get_glyph_v_origin (hb_font_t *font,
hb_position_t *y,
void *user_data HB_UNUSED)
{
const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data;
const hb_ot_font_t *ot_font = (const hb_ot_font_t *) font_data;
const hb_ot_face_t *ot_face = ot_font->ot_face;
*x = font->get_glyph_h_advance (glyph) / 2;
@ -208,7 +331,8 @@ hb_ot_get_glyph_extents (hb_font_t *font,
hb_glyph_extents_t *extents,
void *user_data HB_UNUSED)
{
const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data;
const hb_ot_font_t *ot_font = (const hb_ot_font_t *) font_data;
const hb_ot_face_t *ot_face = ot_font->ot_face;
#if !defined(HB_NO_OT_FONT_BITMAP) && !defined(HB_NO_COLOR)
if (ot_face->sbix->get_extents (font, glyph, extents)) return true;
@ -234,7 +358,9 @@ hb_ot_get_glyph_name (hb_font_t *font HB_UNUSED,
char *name, unsigned int size,
void *user_data HB_UNUSED)
{
const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data;
const hb_ot_font_t *ot_font = (const hb_ot_font_t *) font_data;
const hb_ot_face_t *ot_face = ot_font->ot_face;
if (ot_face->post->get_glyph_name (glyph, name, size)) return true;
#ifndef HB_NO_OT_FONT_CFF
if (ot_face->cff1->get_glyph_name (glyph, name, size)) return true;
@ -248,7 +374,9 @@ hb_ot_get_glyph_from_name (hb_font_t *font HB_UNUSED,
hb_codepoint_t *glyph,
void *user_data HB_UNUSED)
{
const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data;
const hb_ot_font_t *ot_font = (const hb_ot_font_t *) font_data;
const hb_ot_face_t *ot_face = ot_font->ot_face;
if (ot_face->post->get_glyph_from_name (name, len, glyph)) return true;
#ifndef HB_NO_OT_FONT_CFF
if (ot_face->cff1->get_glyph_from_name (name, len, glyph)) return true;
@ -364,10 +492,14 @@ _hb_ot_get_font_funcs ()
void
hb_ot_font_set_funcs (hb_font_t *font)
{
hb_ot_font_t *ot_font = _hb_ot_font_create (font);
if (unlikely (!ot_font))
return;
hb_font_set_funcs (font,
_hb_ot_get_font_funcs (),
&font->face->table,
nullptr);
ot_font,
_hb_ot_font_destroy);
}
#ifndef HB_NO_VAR

File diff suppressed because it is too large Load diff

View file

@ -242,7 +242,7 @@ struct hmtxvmtx
return side_bearing;
if (var_table.get_length ())
return side_bearing + var_table->get_side_bearing_var (glyph, font->coords, font->num_coords); // TODO Optimize?!
return side_bearing + var_table->get_side_bearing_var (glyph, font->coords, font->num_coords);
return _glyf_get_side_bearing_var (font, glyph, T::tableTag == HB_OT_TAG_vmtx);
#else
@ -284,7 +284,8 @@ struct hmtxvmtx
}
unsigned int get_advance (hb_codepoint_t glyph,
hb_font_t *font) const
hb_font_t *font,
VariationStore::cache_t *store_cache = nullptr) const
{
unsigned int advance = get_advance (glyph);
@ -293,7 +294,7 @@ struct hmtxvmtx
return advance;
if (var_table.get_length ())
return advance + roundf (var_table->get_advance_var (glyph, font)); // TODO Optimize?!
return advance + roundf (var_table->get_advance_var (glyph, font, store_cache)); // TODO Optimize?!
return _glyf_get_advance_var (font, glyph, T::tableTag == HB_OT_TAG_vmtx);
#else
@ -310,7 +311,7 @@ struct hmtxvmtx
unsigned int default_advance;
private:
public:
hb_blob_ptr_t<hmtxvmtx> table;
hb_blob_ptr_t<HVARVVAR> var_table;
};

View file

@ -102,7 +102,7 @@ static void ClassDef_remap_and_serialize (
struct hb_prune_langsys_context_t
{
hb_prune_langsys_context_t (const void *table_,
hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map_,
hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map_,
const hb_map_t *duplicate_feature_map_,
hb_set_t *new_collected_feature_indexes_)
:table (table_),
@ -122,7 +122,7 @@ struct hb_prune_langsys_context_t
public:
const void *table;
hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map;
hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map;
const hb_map_t *duplicate_feature_map;
hb_set_t *new_feature_indexes;
@ -162,14 +162,14 @@ struct hb_subset_layout_context_t :
hb_subset_context_t *subset_context;
const hb_tag_t table_tag;
const hb_map_t *lookup_index_map;
const hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map;
const hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map;
const hb_map_t *feature_index_map;
unsigned cur_script_index;
hb_subset_layout_context_t (hb_subset_context_t *c_,
hb_tag_t tag_,
hb_map_t *lookup_map_,
hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map_,
hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map_,
hb_map_t *feature_index_map_) :
subset_context (c_),
table_tag (tag_),
@ -659,7 +659,8 @@ struct LangSys
auto *out = c->serializer->start_embed (*this);
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex) ? l->feature_index_map->get (reqFeatureIndex) : 0xFFFFu;
const unsigned *v;
out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex, &v) ? *v : 0xFFFFu;
if (!l->visitFeatureIndex (featureIndex.len))
return_trace (false);
@ -722,12 +723,8 @@ struct Script
if (!c->script_langsys_map->has (script_index))
{
hb_set_t* empty_set = hb_set_create ();
if (unlikely (!c->script_langsys_map->set (script_index, empty_set)))
{
hb_set_destroy (empty_set);
if (unlikely (!c->script_langsys_map->set (script_index, hb::unique_ptr<hb_set_t> {hb_set_create ()})))
return;
}
}
unsigned langsys_count = get_lang_sys_count ();
@ -2004,6 +2001,8 @@ struct ClassDefFormat1
return_trace (c->check_struct (this) && classValue.sanitize (c));
}
unsigned cost () const { return 1; }
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{
@ -2240,6 +2239,8 @@ struct ClassDefFormat2
return_trace (rangeRecord.sanitize (c));
}
unsigned cost () const { return hb_bit_storage ((unsigned) rangeRecord.len); /* bsearch cost */ }
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{
@ -2480,6 +2481,15 @@ struct ClassDef
}
}
unsigned cost () const
{
switch (u.format) {
case 1: return u.format1.cost ();
case 2: return u.format2.cost ();
default:return 0u;
}
}
/* Might return false if array looks unsorted.
* Used for faster rejection of corrupt data. */
template <typename set_t>
@ -2601,14 +2611,27 @@ struct VarRegionAxis
DEFINE_SIZE_STATIC (6);
};
#define REGION_CACHE_ITEM_CACHE_INVALID 2.f
struct VarRegionList
{
using cache_t = float;
float evaluate (unsigned int region_index,
const int *coords, unsigned int coord_len) const
const int *coords, unsigned int coord_len,
cache_t *cache = nullptr) const
{
if (unlikely (region_index >= regionCount))
return 0.;
float *cached_value = nullptr;
if (cache)
{
cached_value = &(cache[region_index]);
if (likely (*cached_value != REGION_CACHE_ITEM_CACHE_INVALID))
return *cached_value;
}
const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount);
float v = 1.;
@ -2618,9 +2641,16 @@ struct VarRegionList
int coord = i < coord_len ? coords[i] : 0;
float factor = axes[i].evaluate (coord);
if (factor == 0.f)
{
if (cache)
*cached_value = 0.;
return 0.;
}
v *= factor;
}
if (cache)
*cached_value = v;
return v;
}
@ -2668,7 +2698,7 @@ struct VarData
{ return regionIndices.len; }
unsigned int get_row_size () const
{ return shortCount + regionIndices.len; }
{ return (wordCount () + regionIndices.len) * (longWords () ? 2 : 1); }
unsigned int get_size () const
{ return min_size
@ -2678,13 +2708,17 @@ struct VarData
float get_delta (unsigned int inner,
const int *coords, unsigned int coord_count,
const VarRegionList &regions) const
const VarRegionList &regions,
VarRegionList::cache_t *cache = nullptr) const
{
if (unlikely (inner >= itemCount))
return 0.;
unsigned int count = regionIndices.len;
unsigned int scount = shortCount;
bool is_long = longWords ();
unsigned word_count = wordCount ();
unsigned int scount = is_long ? count - word_count : word_count;
unsigned int lcount = is_long ? word_count : 0;
const HBUINT8 *bytes = get_delta_bytes ();
const HBUINT8 *row = bytes + inner * (scount + count);
@ -2692,16 +2726,22 @@ struct VarData
float delta = 0.;
unsigned int i = 0;
const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (row);
const HBINT16 *lcursor = reinterpret_cast<const HBINT16 *> (row);
for (; i < lcount; i++)
{
float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache);
delta += scalar * *lcursor++;
}
const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (lcursor);
for (; i < scount; i++)
{
float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache);
delta += scalar * *scursor++;
}
const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor);
for (; i < count; i++)
{
float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache);
delta += scalar * *bcursor++;
}
@ -2725,7 +2765,7 @@ struct VarData
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
regionIndices.sanitize (c) &&
shortCount <= regionIndices.len &&
wordCount () <= regionIndices.len &&
c->check_range (get_delta_bytes (),
itemCount,
get_row_size ()));
@ -2740,43 +2780,66 @@ struct VarData
if (unlikely (!c->extend_min (this))) return_trace (false);
itemCount = inner_map.get_next_value ();
/* Optimize short count */
unsigned short ri_count = src->regionIndices.len;
enum delta_size_t { kZero=0, kByte, kShort };
/* Optimize word count */
unsigned ri_count = src->regionIndices.len;
enum delta_size_t { kZero=0, kNonWord, kWord };
hb_vector_t<delta_size_t> delta_sz;
hb_vector_t<unsigned int> ri_map; /* maps old index to new index */
delta_sz.resize (ri_count);
ri_map.resize (ri_count);
unsigned int new_short_count = 0;
unsigned int new_word_count = 0;
unsigned int r;
bool has_long = false;
if (src->longWords ())
{
for (r = 0; r < ri_count; r++)
{
for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
{
unsigned int old = inner_map.backward (i);
int32_t delta = src->get_item_delta (old, r);
if (delta < -65536 || 65535 < delta)
{
has_long = true;
break;
}
}
}
}
signed min_threshold = has_long ? -65536 : -128;
signed max_threshold = has_long ? +65535 : +127;
for (r = 0; r < ri_count; r++)
{
delta_sz[r] = kZero;
for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
{
unsigned int old = inner_map.backward (i);
int16_t delta = src->get_item_delta (old, r);
if (delta < -128 || 127 < delta)
int32_t delta = src->get_item_delta (old, r);
if (delta < min_threshold || max_threshold < delta)
{
delta_sz[r] = kShort;
new_short_count++;
delta_sz[r] = kWord;
new_word_count++;
break;
}
else if (delta != 0)
delta_sz[r] = kByte;
delta_sz[r] = kNonWord;
}
}
unsigned int short_index = 0;
unsigned int byte_index = new_short_count;
unsigned int word_index = 0;
unsigned int non_word_index = new_word_count;
unsigned int new_ri_count = 0;
for (r = 0; r < ri_count; r++)
if (delta_sz[r])
{
ri_map[r] = (delta_sz[r] == kShort)? short_index++ : byte_index++;
ri_map[r] = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
new_ri_count++;
}
shortCount = new_short_count;
wordSizeCount = new_word_count | (has_long ? 0x8000u /* LONG_WORDS */ : 0);
regionIndices.len = new_ri_count;
if (unlikely (!c->extend (this))) return_trace (false);
@ -2816,28 +2879,55 @@ struct VarData
HBUINT8 *get_delta_bytes ()
{ return &StructAfter<HBUINT8> (regionIndices); }
int16_t get_item_delta (unsigned int item, unsigned int region) const
int32_t get_item_delta (unsigned int item, unsigned int region) const
{
if ( item >= itemCount || unlikely (region >= regionIndices.len)) return 0;
const HBINT8 *p = (const HBINT8 *)get_delta_bytes () + item * get_row_size ();
if (region < shortCount)
return ((const HBINT16 *)p)[region];
const HBINT8 *p = (const HBINT8 *) get_delta_bytes () + item * get_row_size ();
unsigned word_count = wordCount ();
bool is_long = longWords ();
if (is_long)
{
if (region < word_count)
return ((const HBINT32 *) p)[region];
else
return ((const HBINT16 *)(p + HBINT32::static_size * word_count))[region - word_count];
}
else
return (p + HBINT16::static_size * shortCount)[region - shortCount];
{
if (region < word_count)
return ((const HBINT16 *) p)[region];
else
return (p + HBINT16::static_size * word_count)[region - word_count];
}
}
void set_item_delta (unsigned int item, unsigned int region, int16_t delta)
void set_item_delta (unsigned int item, unsigned int region, int32_t delta)
{
HBINT8 *p = (HBINT8 *)get_delta_bytes () + item * get_row_size ();
if (region < shortCount)
((HBINT16 *)p)[region] = delta;
unsigned word_count = wordCount ();
bool is_long = longWords ();
if (is_long)
{
if (region < word_count)
((HBINT32 *) p)[region] = delta;
else
((HBINT16 *)(p + HBINT32::static_size * word_count))[region - word_count] = delta;
}
else
(p + HBINT16::static_size * shortCount)[region - shortCount] = delta;
{
if (region < word_count)
((HBINT16 *) p)[region] = delta;
else
(p + HBINT16::static_size * word_count)[region - word_count] = delta;
}
}
bool longWords () const { return wordSizeCount & 0x8000u /* LONG_WORDS */; }
unsigned wordCount () const { return wordSizeCount & 0x7FFFu /* WORD_DELTA_COUNT_MASK */; }
protected:
HBUINT16 itemCount;
HBUINT16 shortCount;
HBUINT16 wordSizeCount;
Array16Of<HBUINT16> regionIndices;
/*UnsizedArrayOf<HBUINT8>bytesX;*/
public:
@ -2846,9 +2936,28 @@ struct VarData
struct VariationStore
{
using cache_t = VarRegionList::cache_t;
cache_t *create_cache () const
{
auto &r = this+regions;
unsigned count = r.regionCount;
float *cache = (float *) hb_malloc (sizeof (float) * count);
if (unlikely (!cache)) return nullptr;
for (unsigned i = 0; i < count; i++)
cache[i] = REGION_CACHE_ITEM_CACHE_INVALID;
return cache;
}
static void destroy_cache (cache_t *cache) { hb_free (cache); }
private:
float get_delta (unsigned int outer, unsigned int inner,
const int *coords, unsigned int coord_count) const
const int *coords, unsigned int coord_count,
VarRegionList::cache_t *cache = nullptr) const
{
#ifdef HB_NO_VAR
return 0.f;
@ -2859,16 +2968,18 @@ struct VariationStore
return (this+dataSets[outer]).get_delta (inner,
coords, coord_count,
this+regions);
this+regions,
cache);
}
public:
float get_delta (unsigned int index,
const int *coords, unsigned int coord_count) const
const int *coords, unsigned int coord_count,
VarRegionList::cache_t *cache = nullptr) const
{
unsigned int outer = index >> 16;
unsigned int inner = index & 0xFFFF;
return get_delta (outer, inner, coords, coord_count);
return get_delta (outer, inner, coords, coord_count, cache);
}
bool sanitize (hb_sanitize_context_t *c) const
@ -2995,6 +3106,8 @@ struct VariationStore
DEFINE_SIZE_ARRAY_SIZED (8, dataSets);
};
#undef REGION_CACHE_ITEM_CACHE_INVALID
/*
* Feature Variations
*/
@ -3462,11 +3575,15 @@ struct VariationDevice
private:
hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store) const
{ return font->em_scalef_x (get_delta (font, store)); }
hb_position_t get_x_delta (hb_font_t *font,
const VariationStore &store,
VariationStore::cache_t *store_cache = nullptr) const
{ return font->em_scalef_x (get_delta (font, store, store_cache)); }
hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store) const
{ return font->em_scalef_y (get_delta (font, store)); }
hb_position_t get_y_delta (hb_font_t *font,
const VariationStore &store,
VariationStore::cache_t *store_cache = nullptr) const
{ return font->em_scalef_y (get_delta (font, store, store_cache)); }
VariationDevice* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map) const
{
@ -3500,9 +3617,11 @@ struct VariationDevice
private:
float get_delta (hb_font_t *font, const VariationStore &store) const
float get_delta (hb_font_t *font,
const VariationStore &store,
VariationStore::cache_t *store_cache = nullptr) const
{
return store.get_delta (varIdx, font->coords, font->num_coords);
return store.get_delta (varIdx, font->coords, font->num_coords, (VariationStore::cache_t *) store_cache);
}
protected:
@ -3525,7 +3644,9 @@ struct DeviceHeader
struct Device
{
hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
hb_position_t get_x_delta (hb_font_t *font,
const VariationStore &store=Null (VariationStore),
VariationStore::cache_t *store_cache = nullptr) const
{
switch (u.b.format)
{
@ -3535,13 +3656,15 @@ struct Device
#endif
#ifndef HB_NO_VAR
case 0x8000:
return u.variation.get_x_delta (font, store);
return u.variation.get_x_delta (font, store, store_cache);
#endif
default:
return 0;
}
}
hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
hb_position_t get_y_delta (hb_font_t *font,
const VariationStore &store=Null (VariationStore),
VariationStore::cache_t *store_cache = nullptr) const
{
switch (u.b.format)
{
@ -3551,7 +3674,7 @@ struct Device
#endif
#ifndef HB_NO_VAR
case 0x8000:
return u.variation.get_y_delta (font, store);
return u.variation.get_y_delta (font, store, store_cache);
#endif
default:
return 0;

View file

@ -571,7 +571,7 @@ struct GDEF
static_assert (((unsigned int) HB_OT_LAYOUT_GLYPH_PROPS_MARK == (unsigned int) LookupFlag::IgnoreMarks), "");
switch (klass) {
default: return 0;
default: return HB_OT_LAYOUT_GLYPH_CLASS_UNCLASSIFIED;
case BaseGlyph: return HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH;
case LigatureGlyph: return HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE;
case MarkGlyph:

File diff suppressed because it is too large Load diff

View file

@ -59,13 +59,16 @@ template <typename context_t>
return l.dispatch (c);
}
/*static*/ inline hb_closure_lookups_context_t::return_t SubstLookup::dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index)
template <>
inline hb_closure_lookups_context_t::return_t
SubstLookup::dispatch_recurse_func<hb_closure_lookups_context_t> (hb_closure_lookups_context_t *c, unsigned this_index)
{
const SubstLookup &l = c->face->table.GSUB.get_relaxed ()->table->get_lookup (this_index);
return l.closure_lookups (c, this_index);
}
/*static*/ bool SubstLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index)
template <>
inline bool SubstLookup::dispatch_recurse_func<hb_ot_apply_context_t> (hb_ot_apply_context_t *c, unsigned int lookup_index)
{
const SubstLookup &l = c->face->table.GSUB.get_relaxed ()->table->get_lookup (lookup_index);
unsigned int saved_lookup_props = c->lookup_props;

View file

@ -109,14 +109,10 @@ struct hb_closure_context_t :
{
done_lookups_glyph_count->set (lookup_index, glyphs->get_population ());
if (!done_lookups_glyph_set->get (lookup_index))
if (!done_lookups_glyph_set->has (lookup_index))
{
hb_set_t* empty_set = hb_set_create ();
if (unlikely (!done_lookups_glyph_set->set (lookup_index, empty_set)))
{
hb_set_destroy (empty_set);
if (unlikely (!done_lookups_glyph_set->set (lookup_index, hb::unique_ptr<hb_set_t> {hb_set_create ()})))
return true;
}
}
hb_set_clear (done_lookups_glyph_set->get (lookup_index));
@ -165,21 +161,19 @@ struct hb_closure_context_t :
hb_set_t *glyphs;
hb_set_t output[1];
hb_vector_t<hb_set_t> active_glyphs_stack;
recurse_func_t recurse_func;
recurse_func_t recurse_func = nullptr;
unsigned int nesting_level_left;
hb_closure_context_t (hb_face_t *face_,
hb_set_t *glyphs_,
hb_map_t *done_lookups_glyph_count_,
hb_hashmap_t<unsigned, hb_set_t *> *done_lookups_glyph_set_,
hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set_,
unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
face (face_),
glyphs (glyphs_),
recurse_func (nullptr),
nesting_level_left (nesting_level_left_),
done_lookups_glyph_count (done_lookups_glyph_count_),
done_lookups_glyph_set (done_lookups_glyph_set_),
lookup_count (0)
done_lookups_glyph_set (done_lookups_glyph_set_)
{}
~hb_closure_context_t () { flush (); }
@ -197,8 +191,8 @@ struct hb_closure_context_t :
private:
hb_map_t *done_lookups_glyph_count;
hb_hashmap_t<unsigned, hb_set_t *> *done_lookups_glyph_set;
unsigned int lookup_count;
hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set;
unsigned int lookup_count = 0;
};
@ -400,7 +394,6 @@ struct hb_collect_coverage_context_t :
set_t *set;
};
struct hb_ot_apply_context_t :
hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
{
@ -416,7 +409,7 @@ struct hb_ot_apply_context_t :
match_func (nullptr),
match_data (nullptr) {}
typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
typedef bool (*match_func_t) (hb_glyph_info_t &info, const HBUINT16 &value, const void *data);
void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
@ -434,7 +427,7 @@ struct hb_ot_apply_context_t :
MATCH_MAYBE
};
may_match_t may_match (const hb_glyph_info_t &info,
may_match_t may_match (hb_glyph_info_t &info,
const HBUINT16 *glyph_data) const
{
if (!(info.mask & mask) ||
@ -442,7 +435,7 @@ struct hb_ot_apply_context_t :
return MATCH_NO;
if (match_func)
return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
return match_func (info, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
return MATCH_MAYBE;
}
@ -530,7 +523,7 @@ struct hb_ot_apply_context_t :
while (idx + num_items < end)
{
idx++;
const hb_glyph_info_t &info = c->buffer->info[idx];
hb_glyph_info_t &info = c->buffer->info[idx];
matcher_t::may_skip_t skip = matcher.may_skip (c, info);
if (unlikely (skip == matcher_t::SKIP_YES))
@ -563,7 +556,7 @@ struct hb_ot_apply_context_t :
while (idx > num_items - 1)
{
idx--;
const hb_glyph_info_t &info = c->buffer->out_info[idx];
hb_glyph_info_t &info = c->buffer->out_info[idx];
matcher_t::may_skip_t skip = matcher.may_skip (c, info);
if (unlikely (skip == matcher_t::SKIP_YES))
@ -611,7 +604,10 @@ struct hb_ot_apply_context_t :
return_t recurse (unsigned int sub_lookup_index)
{
if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
{
buffer->shaping_failed = true;
return default_return_value ();
}
nesting_level_left--;
bool ret = recurse_func (this, sub_lookup_index);
@ -621,35 +617,34 @@ struct hb_ot_apply_context_t :
skipping_iterator_t iter_input, iter_context;
unsigned int table_index; /* GSUB/GPOS */
hb_font_t *font;
hb_face_t *face;
hb_buffer_t *buffer;
recurse_func_t recurse_func;
recurse_func_t recurse_func = nullptr;
const GDEF &gdef;
const VariationStore &var_store;
VariationStore::cache_t *var_store_cache;
hb_direction_t direction;
hb_mask_t lookup_mask;
unsigned int table_index; /* GSUB/GPOS */
unsigned int lookup_index;
unsigned int lookup_props;
unsigned int nesting_level_left;
hb_mask_t lookup_mask = 1;
unsigned int lookup_index = (unsigned) -1;
unsigned int lookup_props = 0;
unsigned int nesting_level_left = HB_MAX_NESTING_LEVEL;
bool has_glyph_classes;
bool auto_zwnj;
bool auto_zwj;
bool per_syllable;
bool random;
uint32_t random_state;
bool auto_zwnj = true;
bool auto_zwj = true;
bool per_syllable = false;
bool random = false;
uint32_t random_state = 1;
unsigned new_syllables = (unsigned) -1;
hb_ot_apply_context_t (unsigned int table_index_,
hb_font_t *font_,
hb_buffer_t *buffer_) :
iter_input (), iter_context (),
table_index (table_index_),
font (font_), face (font->face), buffer (buffer_),
recurse_func (nullptr),
gdef (
#ifndef HB_NO_OT_LAYOUT
*face->table.GDEF->table
@ -658,18 +653,23 @@ struct hb_ot_apply_context_t :
#endif
),
var_store (gdef.get_var_store ()),
var_store_cache (
#ifndef HB_NO_VAR
table_index == 1 && font->num_coords ? var_store.create_cache () : nullptr
#else
nullptr
#endif
),
direction (buffer_->props.direction),
lookup_mask (1),
table_index (table_index_),
lookup_index ((unsigned int) -1),
lookup_props (0),
nesting_level_left (HB_MAX_NESTING_LEVEL),
has_glyph_classes (gdef.has_glyph_classes ()),
auto_zwnj (true),
auto_zwj (true),
per_syllable (false),
random (false),
random_state (1) { init_iters (); }
has_glyph_classes (gdef.has_glyph_classes ())
{ init_iters (); }
~hb_ot_apply_context_t ()
{
#ifndef HB_NO_VAR
VariationStore::destroy_cache (var_store_cache);
#endif
}
void init_iters ()
{
@ -736,6 +736,9 @@ struct hb_ot_apply_context_t :
bool ligature = false,
bool component = false) const
{
if (new_syllables != (unsigned) -1)
buffer->cur().syllable() = new_syllables;
unsigned int props = _hb_glyph_info_get_glyph_props (&buffer->cur());
props |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
if (ligature)
@ -790,8 +793,8 @@ struct hb_ot_apply_context_t :
};
struct hb_get_subtables_context_t :
hb_dispatch_context_t<hb_get_subtables_context_t>
struct hb_accelerate_subtables_context_t :
hb_dispatch_context_t<hb_accelerate_subtables_context_t>
{
template <typename Type>
static inline bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
@ -800,15 +803,53 @@ struct hb_get_subtables_context_t :
return typed_obj->apply (c);
}
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
template <typename T>
static inline auto apply_cached_ (const T *obj, OT::hb_ot_apply_context_t *c, hb_priority<1>) HB_RETURN (bool, obj->apply (c, true) )
template <typename T>
static inline auto apply_cached_ (const T *obj, OT::hb_ot_apply_context_t *c, hb_priority<0>) HB_RETURN (bool, obj->apply (c) )
template <typename Type>
static inline bool apply_cached_to (const void *obj, OT::hb_ot_apply_context_t *c)
{
const Type *typed_obj = (const Type *) obj;
return apply_cached_ (typed_obj, c, hb_prioritize);
}
template <typename T>
static inline auto cache_func_ (const T *obj, OT::hb_ot_apply_context_t *c, bool enter, hb_priority<1>) HB_RETURN (bool, obj->cache_func (c, enter) )
template <typename T>
static inline bool cache_func_ (const T *obj, OT::hb_ot_apply_context_t *c, bool enter, hb_priority<0>) { return false; }
template <typename Type>
static inline bool cache_func_to (const void *obj, OT::hb_ot_apply_context_t *c, bool enter)
{
const Type *typed_obj = (const Type *) obj;
return cache_func_ (typed_obj, c, enter, hb_prioritize);
}
#endif
typedef bool (*hb_apply_func_t) (const void *obj, OT::hb_ot_apply_context_t *c);
typedef bool (*hb_cache_func_t) (const void *obj, OT::hb_ot_apply_context_t *c, bool enter);
struct hb_applicable_t
{
friend struct hb_accelerate_subtables_context_t;
friend struct hb_ot_layout_lookup_accelerator_t;
template <typename T>
void init (const T &obj_, hb_apply_func_t apply_func_)
void init (const T &obj_,
hb_apply_func_t apply_func_
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
, hb_apply_func_t apply_cached_func_
, hb_cache_func_t cache_func_
#endif
)
{
obj = &obj_;
apply_func = apply_func_;
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
apply_cached_func = apply_cached_func_;
cache_func = cache_func_;
#endif
digest.init ();
obj_.get_coverage ().collect_coverage (&digest);
}
@ -817,38 +858,93 @@ struct hb_get_subtables_context_t :
{
return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
}
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
bool apply_cached (OT::hb_ot_apply_context_t *c) const
{
return digest.may_have (c->buffer->cur().codepoint) && apply_cached_func (obj, c);
}
bool cache_enter (OT::hb_ot_apply_context_t *c) const
{
return cache_func (obj, c, true);
}
void cache_leave (OT::hb_ot_apply_context_t *c) const
{
cache_func (obj, c, false);
}
#endif
private:
const void *obj;
hb_apply_func_t apply_func;
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
hb_apply_func_t apply_cached_func;
hb_cache_func_t cache_func;
#endif
hb_set_digest_t digest;
};
typedef hb_vector_t<hb_applicable_t> array_t;
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
template <typename T>
auto cache_cost (const T &obj, hb_priority<1>) HB_AUTO_RETURN ( obj.cache_cost () )
template <typename T>
auto cache_cost (const T &obj, hb_priority<0>) HB_AUTO_RETURN ( 0u )
#endif
/* Dispatch interface. */
template <typename T>
return_t dispatch (const T &obj)
{
hb_applicable_t *entry = array.push();
entry->init (obj, apply_to<T>);
hb_applicable_t entry;
entry.init (obj,
apply_to<T>
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
, apply_cached_to<T>
, cache_func_to<T>
#endif
);
array.push (entry);
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
/* Cache handling
*
* We allow one subtable from each lookup to use a cache. The assumption
* being that multiple subtables of the same lookup cannot use a cache
* because the resources they would use will collide. As such, we ask
* each subtable to tell us how much it costs (which a cache would avoid),
* and we allocate the cache opportunity to the costliest subtable.
*/
unsigned cost = cache_cost (obj, hb_prioritize);
if (cost > cache_user_cost && !array.in_error ())
{
cache_user_idx = array.length - 1;
cache_user_cost = cost;
}
#endif
return hb_empty_t ();
}
static return_t default_return_value () { return hb_empty_t (); }
hb_get_subtables_context_t (array_t &array_) :
array (array_) {}
hb_accelerate_subtables_context_t (array_t &array_) :
array (array_) {}
array_t &array;
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
unsigned cache_user_idx = (unsigned) -1;
unsigned cache_user_cost = 0;
#endif
};
typedef bool (*intersects_func_t) (const hb_set_t *glyphs, const HBUINT16 &value, const void *data);
typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs);
typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const HBUINT16 &value, const void *data);
typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
typedef bool (*match_func_t) (hb_glyph_info_t &info, const HBUINT16 &value, const void *data);
struct ContextClosureFuncs
{
@ -863,6 +959,10 @@ struct ContextApplyFuncs
{
match_func_t match;
};
struct ChainContextApplyFuncs
{
match_func_t match[3];
};
static inline bool intersects_glyph (const hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
@ -939,19 +1039,30 @@ static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
}
static inline bool match_glyph (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data HB_UNUSED)
static inline bool match_glyph (hb_glyph_info_t &info, const HBUINT16 &value, const void *data HB_UNUSED)
{
return glyph_id == value;
return info.codepoint == value;
}
static inline bool match_class (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
static inline bool match_class (hb_glyph_info_t &info, const HBUINT16 &value, const void *data)
{
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
return class_def.get_class (glyph_id) == value;
return class_def.get_class (info.codepoint) == value;
}
static inline bool match_coverage (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
static inline bool match_class_cached (hb_glyph_info_t &info, const HBUINT16 &value, const void *data)
{
unsigned klass = info.syllable();
if (klass < 255)
return klass == value;
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
klass = class_def.get_class (info.codepoint);
if (likely (klass < 255))
info.syllable() = klass;
return klass == value;
}
static inline bool match_coverage (hb_glyph_info_t &info, const HBUINT16 &value, const void *data)
{
const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
return (data+coverage).get_coverage (info.codepoint) != NOT_COVERED;
}
static inline bool would_match_input (hb_would_apply_context_t *c,
@ -964,8 +1075,12 @@ static inline bool would_match_input (hb_would_apply_context_t *c,
return false;
for (unsigned int i = 1; i < count; i++)
if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
{
hb_glyph_info_t info;
info.codepoint = c->glyphs[i];
if (likely (!match_func (info, input[i - 1], match_data)))
return false;
}
return true;
}
@ -2125,19 +2240,54 @@ struct ContextFormat2
const Coverage &get_coverage () const { return this+coverage; }
bool apply (hb_ot_apply_context_t *c) const
unsigned cache_cost () const
{
unsigned c = (this+classDef).cost () * ruleSet.len;
return c >= 4 ? c : 0;
}
bool cache_func (hb_ot_apply_context_t *c, bool enter) const
{
if (enter)
{
if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
return false;
auto &info = c->buffer->info;
unsigned count = c->buffer->len;
for (unsigned i = 0; i < count; i++)
info[i].syllable() = 255;
c->new_syllables = 255;
return true;
}
else
{
c->new_syllables = (unsigned) -1;
HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
return true;
}
}
bool apply (hb_ot_apply_context_t *c, bool cached = false) const
{
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
const ClassDef &class_def = this+classDef;
index = class_def.get_class (c->buffer->cur().codepoint);
const RuleSet &rule_set = this+ruleSet[index];
struct ContextApplyLookupContext lookup_context = {
{match_class},
{cached ? match_class_cached : match_class},
&class_def
};
if (cached && c->buffer->cur().syllable() < 255)
index = c->buffer->cur().syllable ();
else
{
index = class_def.get_class (c->buffer->cur().codepoint);
if (cached && index < 255)
c->buffer->cur().syllable() = index;
}
const RuleSet &rule_set = this+ruleSet[index];
return_trace (rule_set.apply (c, lookup_context));
}
@ -2411,7 +2561,7 @@ struct ChainContextCollectGlyphsLookupContext
struct ChainContextApplyLookupContext
{
ContextApplyFuncs funcs;
ChainContextApplyFuncs funcs;
const void *match_data[3];
};
@ -2499,7 +2649,7 @@ static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c
return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
&& would_match_input (c,
inputCount, input,
lookup_context.funcs.match, lookup_context.match_data[1]);
lookup_context.funcs.match[1], lookup_context.match_data[1]);
}
static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
@ -2518,11 +2668,11 @@ static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
if (!(match_input (c,
inputCount, input,
lookup_context.funcs.match, lookup_context.match_data[1],
lookup_context.funcs.match[1], lookup_context.match_data[1],
&match_end, match_positions) && (end_index = match_end)
&& match_lookahead (c,
lookaheadCount, lookahead,
lookup_context.funcs.match, lookup_context.match_data[2],
lookup_context.funcs.match[2], lookup_context.match_data[2],
match_end, &end_index)))
{
c->buffer->unsafe_to_concat (c->buffer->idx, end_index);
@ -2532,7 +2682,7 @@ static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
unsigned start_index = c->buffer->out_len;
if (!match_backtrack (c,
backtrackCount, backtrack,
lookup_context.funcs.match, lookup_context.match_data[0],
lookup_context.funcs.match[0], lookup_context.match_data[0],
&start_index))
{
c->buffer->unsafe_to_concat_from_outbuffer (start_index, end_index);
@ -2934,7 +3084,7 @@ struct ChainContextFormat1
{
const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
struct ChainContextApplyLookupContext lookup_context = {
{match_glyph},
{{match_glyph, match_glyph, match_glyph}},
{nullptr, nullptr, nullptr}
};
return rule_set.would_apply (c, lookup_context);
@ -2950,7 +3100,7 @@ struct ChainContextFormat1
const ChainRuleSet &rule_set = this+ruleSet[index];
struct ChainContextApplyLookupContext lookup_context = {
{match_glyph},
{{match_glyph, match_glyph, match_glyph}},
{nullptr, nullptr, nullptr}
};
return_trace (rule_set.apply (c, lookup_context));
@ -3134,7 +3284,7 @@ struct ChainContextFormat2
unsigned int index = input_class_def.get_class (c->glyphs[0]);
const ChainRuleSet &rule_set = this+ruleSet[index];
struct ChainContextApplyLookupContext lookup_context = {
{match_class},
{{match_class, match_class, match_class}},
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def}
@ -3144,7 +3294,33 @@ struct ChainContextFormat2
const Coverage &get_coverage () const { return this+coverage; }
bool apply (hb_ot_apply_context_t *c) const
unsigned cache_cost () const
{
unsigned c = (this+lookaheadClassDef).cost () * ruleSet.len;
return c >= 4 ? c : 0;
}
bool cache_func (hb_ot_apply_context_t *c, bool enter) const
{
if (enter)
{
if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
return false;
auto &info = c->buffer->info;
unsigned count = c->buffer->len;
for (unsigned i = 0; i < count; i++)
info[i].syllable() = 255;
c->new_syllables = 255;
return true;
}
else
{
c->new_syllables = (unsigned) -1;
HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
return true;
}
}
bool apply (hb_ot_apply_context_t *c, bool cached = false) const
{
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
@ -3154,14 +3330,27 @@ struct ChainContextFormat2
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
index = input_class_def.get_class (c->buffer->cur().codepoint);
const ChainRuleSet &rule_set = this+ruleSet[index];
/* For ChainContextFormat2 we cache the LookaheadClassDef instead of InputClassDef.
* The reason is that most heavy fonts want to identify a glyph in context and apply
* a lookup to it. In this scenario, the length of the input sequence is one, whereas
* the lookahead / backtrack are typically longer. The one glyph in input sequence is
* looked-up below and no input glyph is looked up in individual rules, whereas the
* lookahead and backtrack glyphs are tried. Since we match lookahead before backtrack,
* we should cache lookahead. This decisions showed a 20% improvement in shaping of
* the Gulzar font.
*/
struct ChainContextApplyLookupContext lookup_context = {
{match_class},
{{cached && &backtrack_class_def == &input_class_def ? match_class_cached : match_class,
cached && &input_class_def == &lookahead_class_def ? match_class_cached : match_class,
cached ? match_class_cached : match_class}},
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def}
};
index = input_class_def.get_class (c->buffer->cur().codepoint);
const ChainRuleSet &rule_set = this+ruleSet[index];
return_trace (rule_set.apply (c, lookup_context));
}
@ -3359,7 +3548,7 @@ struct ChainContextFormat3
const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
struct ChainContextApplyLookupContext lookup_context = {
{match_coverage},
{{match_coverage, match_coverage, match_coverage}},
{this, this, this}
};
return chain_context_would_apply_lookup (c,
@ -3386,7 +3575,7 @@ struct ChainContextFormat3
const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
struct ChainContextApplyLookupContext lookup_context = {
{match_coverage},
{{match_coverage, match_coverage, match_coverage}},
{this, this, this}
};
return_trace (chain_context_apply_lookup (c,
@ -3623,25 +3812,63 @@ struct hb_ot_layout_lookup_accelerator_t
lookup.collect_coverage (&digest);
subtables.init ();
OT::hb_get_subtables_context_t c_get_subtables (subtables);
lookup.dispatch (&c_get_subtables);
OT::hb_accelerate_subtables_context_t c_accelerate_subtables (subtables);
lookup.dispatch (&c_accelerate_subtables);
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
cache_user_idx = c_accelerate_subtables.cache_user_idx;
for (unsigned i = 0; i < subtables.length; i++)
if (i != cache_user_idx)
subtables[i].apply_cached_func = subtables[i].apply_func;
#endif
}
void fini () { subtables.fini (); }
bool may_have (hb_codepoint_t g) const
{ return digest.may_have (g); }
bool apply (hb_ot_apply_context_t *c) const
bool apply (hb_ot_apply_context_t *c, bool use_cache) const
{
for (unsigned int i = 0; i < subtables.length; i++)
if (subtables[i].apply (c))
return true;
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
if (use_cache)
{
for (unsigned int i = 0; i < subtables.length; i++)
if (subtables[i].apply_cached (c))
return true;
}
else
#endif
{
for (unsigned int i = 0; i < subtables.length; i++)
if (subtables[i].apply (c))
return true;
}
return false;
}
bool cache_enter (OT::hb_ot_apply_context_t *c) const
{
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
return cache_user_idx != (unsigned) -1 &&
subtables[cache_user_idx].cache_enter (c);
#else
return false;
#endif
}
void cache_leave (OT::hb_ot_apply_context_t *c) const
{
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
subtables[cache_user_idx].cache_leave (c);
#endif
}
private:
hb_set_digest_t digest;
hb_get_subtables_context_t::array_t subtables;
hb_accelerate_subtables_context_t::array_t subtables;
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
unsigned cache_user_idx = (unsigned) -1;
#endif
};
struct GSUBGPOS
@ -3721,6 +3948,8 @@ struct GSUBGPOS
hb_set_t visited_lookups, inactive_lookups;
OT::hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
c.set_recurse_func (TLookup::template dispatch_recurse_func<hb_closure_lookups_context_t>);
for (unsigned lookup_index : + hb_iter (lookup_indexes))
reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
@ -3729,7 +3958,7 @@ struct GSUBGPOS
}
void prune_langsys (const hb_map_t *duplicate_feature_map,
hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map,
hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map,
hb_set_t *new_feature_indexes /* OUT */) const
{
hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes);
@ -3787,7 +4016,7 @@ struct GSUBGPOS
hb_map_t *duplicate_feature_map /* OUT */) const
{
if (feature_indices->is_empty ()) return;
hb_hashmap_t<hb_tag_t, hb_set_t *> unique_features;
hb_hashmap_t<hb_tag_t, hb::unique_ptr<hb_set_t>> unique_features;
//find out duplicate features after subset
for (unsigned i : feature_indices->iter ())
{
@ -3795,16 +4024,9 @@ struct GSUBGPOS
if (t == HB_MAP_VALUE_INVALID) continue;
if (!unique_features.has (t))
{
hb_set_t* indices = hb_set_create ();
if (unlikely (indices == hb_set_get_empty () ||
!unique_features.set (t, indices)))
{
hb_set_destroy (indices);
for (auto _ : unique_features.iter ())
hb_set_destroy (_.second);
if (unlikely (!unique_features.set (t, hb::unique_ptr<hb_set_t> {hb_set_create ()})))
return;
}
if (unique_features.get (t))
if (unique_features.has (t))
unique_features.get (t)->add (i);
duplicate_feature_map->set (i, i);
continue;
@ -3849,9 +4071,6 @@ struct GSUBGPOS
duplicate_feature_map->set (i, i);
}
}
for (auto _ : unique_features.iter ())
hb_set_destroy (_.second);
}
void prune_features (const hb_map_t *lookup_indices, /* IN */

View file

@ -46,7 +46,7 @@
#include "hb-ot-layout-gdef-table.hh"
#include "hb-ot-layout-gsub-table.hh"
#include "hb-ot-layout-gpos-table.hh"
#include "hb-ot-layout-base-table.hh" // Just so we compile it; unused otherwise.
#include "hb-ot-layout-base-table.hh"
#include "hb-ot-layout-jstf-table.hh" // Just so we compile it; unused otherwise.
#include "hb-ot-name-table.hh"
#include "hb-ot-os2-table.hh"
@ -55,6 +55,7 @@
#include "hb-aat-layout-opbd-table.hh" // Just so we compile it; unused otherwise.
using OT::Layout::GSUB::GSUB;
using OT::Layout::GPOS;
/**
* SECTION:hb-ot-layout
@ -260,7 +261,6 @@ _hb_ot_layout_set_glyph_props (hb_font_t *font,
{
_hb_glyph_info_set_glyph_props (&buffer->info[i], gdef.get_glyph_props (buffer->info[i].codepoint));
_hb_glyph_info_clear_lig_props (&buffer->info[i]);
buffer->info[i].syllable() = 0;
}
}
@ -401,7 +401,7 @@ GSUB::is_blocklisted (hb_blob_t *blob HB_UNUSED,
}
bool
OT::GPOS::is_blocklisted (hb_blob_t *blob HB_UNUSED,
GPOS::is_blocklisted (hb_blob_t *blob HB_UNUSED,
hb_face_t *face HB_UNUSED) const
{
#ifdef HB_NO_OT_LAYOUT_BLOCKLIST
@ -1501,15 +1501,12 @@ hb_ot_layout_lookup_substitute_closure (hb_face_t *face,
hb_set_t *glyphs /* OUT */)
{
hb_map_t done_lookups_glyph_count;
hb_hashmap_t<unsigned, hb_set_t *> done_lookups_glyph_set;
hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> done_lookups_glyph_set;
OT::hb_closure_context_t c (face, glyphs, &done_lookups_glyph_count, &done_lookups_glyph_set);
const OT::SubstLookup& l = face->table.GSUB->table->get_lookup (lookup_index);
l.closure (&c, lookup_index);
for (auto _ : done_lookups_glyph_set.iter ())
hb_set_destroy (_.second);
}
/**
@ -1529,7 +1526,7 @@ hb_ot_layout_lookups_substitute_closure (hb_face_t *face,
hb_set_t *glyphs /* OUT */)
{
hb_map_t done_lookups_glyph_count;
hb_hashmap_t<unsigned, hb_set_t *> done_lookups_glyph_set;
hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> done_lookups_glyph_set;
OT::hb_closure_context_t c (face, glyphs, &done_lookups_glyph_count, &done_lookups_glyph_set);
const GSUB& gsub = *face->table.GSUB->table;
@ -1551,13 +1548,10 @@ hb_ot_layout_lookups_substitute_closure (hb_face_t *face,
}
} while (iteration_count++ <= HB_CLOSURE_MAX_STAGES &&
glyphs_length != glyphs->get_population ());
for (auto _ : done_lookups_glyph_set.iter ())
hb_set_destroy (_.second);
}
/*
* OT::GPOS
* GPOS
*/
@ -1588,7 +1582,7 @@ hb_ot_layout_has_positioning (hb_face_t *face)
void
hb_ot_layout_position_start (hb_font_t *font, hb_buffer_t *buffer)
{
OT::GPOS::position_start (font, buffer);
GPOS::position_start (font, buffer);
}
@ -1603,7 +1597,7 @@ hb_ot_layout_position_start (hb_font_t *font, hb_buffer_t *buffer)
void
hb_ot_layout_position_finish_advances (hb_font_t *font, hb_buffer_t *buffer)
{
OT::GPOS::position_finish_advances (font, buffer);
GPOS::position_finish_advances (font, buffer);
}
/**
@ -1617,7 +1611,7 @@ hb_ot_layout_position_finish_advances (hb_font_t *font, hb_buffer_t *buffer)
void
hb_ot_layout_position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer)
{
OT::GPOS::position_finish_offsets (font, buffer);
GPOS::position_finish_offsets (font, buffer);
}
@ -1652,7 +1646,7 @@ hb_ot_layout_get_size_params (hb_face_t *face,
unsigned int *range_start, /* OUT. May be NULL */
unsigned int *range_end /* OUT. May be NULL */)
{
const OT::GPOS &gpos = *face->table.GPOS->table;
const GPOS &gpos = *face->table.GPOS->table;
const hb_tag_t tag = HB_TAG ('s','i','z','e');
unsigned int num_features = gpos.get_feature_count ();
@ -1803,7 +1797,7 @@ hb_ot_layout_feature_get_characters (hb_face_t *face,
struct GSUBProxy
{
static constexpr unsigned table_index = 0u;
static constexpr bool inplace = false;
static constexpr bool always_inplace = false;
typedef OT::SubstLookup Lookup;
GSUBProxy (hb_face_t *face) :
@ -1817,14 +1811,14 @@ struct GSUBProxy
struct GPOSProxy
{
static constexpr unsigned table_index = 1u;
static constexpr bool inplace = true;
static constexpr bool always_inplace = true;
typedef OT::PosLookup Lookup;
GPOSProxy (hb_face_t *face) :
table (*face->table.GPOS->table),
accels (face->table.GPOS->accels) {}
const OT::GPOS &table;
const GPOS &table;
const OT::hb_ot_layout_lookup_accelerator_t *accels;
};
@ -1833,6 +1827,8 @@ static inline bool
apply_forward (OT::hb_ot_apply_context_t *c,
const OT::hb_ot_layout_lookup_accelerator_t &accel)
{
bool use_cache = accel.cache_enter (c);
bool ret = false;
hb_buffer_t *buffer = c->buffer;
while (buffer->idx < buffer->len && buffer->successful)
@ -1842,7 +1838,7 @@ apply_forward (OT::hb_ot_apply_context_t *c,
(buffer->cur().mask & c->lookup_mask) &&
c->check_glyph_property (&buffer->cur(), c->lookup_props))
{
applied = accel.apply (c);
applied = accel.apply (c, use_cache);
}
if (applied)
@ -1850,6 +1846,10 @@ apply_forward (OT::hb_ot_apply_context_t *c,
else
(void) buffer->next_glyph ();
}
if (use_cache)
accel.cache_leave (c);
return ret;
}
@ -1864,7 +1864,7 @@ apply_backward (OT::hb_ot_apply_context_t *c,
if (accel.may_have (buffer->cur().codepoint) &&
(buffer->cur().mask & c->lookup_mask) &&
c->check_glyph_property (&buffer->cur(), c->lookup_props))
ret |= accel.apply (c);
ret |= accel.apply (c, false);
/* The reverse lookup doesn't "advance" cursor (for good reason). */
buffer->idx--;
@ -1890,13 +1890,13 @@ apply_string (OT::hb_ot_apply_context_t *c,
if (likely (!lookup.is_reverse ()))
{
/* in/out forward substitution/positioning */
if (!Proxy::inplace)
if (!Proxy::always_inplace)
buffer->clear_output ();
buffer->idx = 0;
apply_forward (c, accel);
if (!Proxy::inplace)
if (!Proxy::always_inplace)
buffer->sync ();
}
else
@ -1917,7 +1917,7 @@ inline void hb_ot_map_t::apply (const Proxy &proxy,
const unsigned int table_index = proxy.table_index;
unsigned int i = 0;
OT::hb_ot_apply_context_t c (table_index, font, buffer);
c.set_recurse_func (Proxy::Lookup::apply_recurse_func);
c.set_recurse_func (Proxy::Lookup::template dispatch_recurse_func<OT::hb_ot_apply_context_t>);
for (unsigned int stage_index = 0; stage_index < stages[table_index].length; stage_index++)
{

View file

@ -589,13 +589,11 @@ _hb_buffer_allocate_gsubgpos_vars (hb_buffer_t *buffer)
{
HB_BUFFER_ALLOCATE_VAR (buffer, glyph_props);
HB_BUFFER_ALLOCATE_VAR (buffer, lig_props);
HB_BUFFER_ALLOCATE_VAR (buffer, syllable);
}
static inline void
_hb_buffer_deallocate_gsubgpos_vars (hb_buffer_t *buffer)
{
HB_BUFFER_DEALLOCATE_VAR (buffer, syllable);
HB_BUFFER_DEALLOCATE_VAR (buffer, lig_props);
HB_BUFFER_DEALLOCATE_VAR (buffer, glyph_props);
}
@ -605,7 +603,6 @@ _hb_buffer_assert_gsubgpos_vars (hb_buffer_t *buffer)
{
HB_BUFFER_ASSERT_VAR (buffer, glyph_props);
HB_BUFFER_ASSERT_VAR (buffer, lig_props);
HB_BUFFER_ASSERT_VAR (buffer, syllable);
}
/* Make sure no one directly touches our props... */

View file

@ -45,7 +45,7 @@ struct hb_ot_language_map_t
};
static const hb_ot_language_map_t
hb_ms_language_map[] =
_hb_ms_language_map[] =
{
{0x0001, "ar"}, /* ??? */
{0x0004, "zh"}, /* ??? */
@ -298,7 +298,7 @@ hb_ms_language_map[] =
};
static const hb_ot_language_map_t
hb_mac_language_map[] =
_hb_mac_language_map[] =
{
{ 0, "en"}, /* English */
{ 1, "fr"}, /* French */
@ -441,16 +441,16 @@ hb_language_t
_hb_ot_name_language_for_ms_code (unsigned int code)
{
return _hb_ot_name_language_for (code,
hb_ms_language_map,
ARRAY_LENGTH (hb_ms_language_map));
_hb_ms_language_map,
ARRAY_LENGTH (_hb_ms_language_map));
}
hb_language_t
_hb_ot_name_language_for_mac_code (unsigned int code)
{
return _hb_ot_name_language_for (code,
hb_mac_language_map,
ARRAY_LENGTH (hb_mac_language_map));
_hb_mac_language_map,
ARRAY_LENGTH (_hb_mac_language_map));
}
#endif /* HB_OT_NAME_LANGUAGE_STATIC_HH */

View file

@ -156,7 +156,7 @@ struct NameRecord
};
static int
_hb_ot_name_entry_cmp_key (const void *pa, const void *pb)
_hb_ot_name_entry_cmp_key (const void *pa, const void *pb, bool exact)
{
const hb_ot_name_entry_t *a = (const hb_ot_name_entry_t *) pa;
const hb_ot_name_entry_t *b = (const hb_ot_name_entry_t *) pb;
@ -169,8 +169,23 @@ _hb_ot_name_entry_cmp_key (const void *pa, const void *pb)
if (a->language == b->language) return 0;
if (!a->language) return -1;
if (!b->language) return +1;
return strcmp (hb_language_to_string (a->language),
hb_language_to_string (b->language));
const char *astr = hb_language_to_string (a->language);
const char *bstr = hb_language_to_string (b->language);
signed c = strcmp (astr, bstr);
if (!exact && c)
{
unsigned la = strlen (astr);
unsigned lb = strlen (bstr);
// 'a' is the user request, and 'b' is string in the font.
// If eg. user asks for "en-us" and font has "en", approve.
if (la > lb && astr[lb] == '-' && !strncmp (astr, bstr, lb))
return 0;
}
return c;
}
static int
@ -178,7 +193,7 @@ _hb_ot_name_entry_cmp (const void *pa, const void *pb)
{
/* Compare by name_id, then language, then score, then index. */
int v = _hb_ot_name_entry_cmp_key (pa, pb);
int v = _hb_ot_name_entry_cmp_key (pa, pb, true);
if (v)
return v;
@ -330,7 +345,18 @@ struct name
const hb_ot_name_entry_t *entry = hb_bsearch (key, (const hb_ot_name_entry_t *) this->names,
this->names.length,
sizeof (hb_ot_name_entry_t),
_hb_ot_name_entry_cmp_key);
_hb_ot_name_entry_cmp_key,
true);
if (!entry)
{
entry = hb_bsearch (key, (const hb_ot_name_entry_t *) this->names,
this->names.length,
sizeof (hb_ot_name_entry_t),
_hb_ot_name_entry_cmp_key,
false);
}
if (!entry)
return -1;

View file

@ -224,9 +224,11 @@ struct OS2
*max_cp = hb_min (0xFFFFu, codepoints->get_max ());
}
/* https://github.com/Microsoft/Font-Validator/blob/520aaae/OTFontFileVal/val_OS2.cs#L644-L681 */
/* https://github.com/Microsoft/Font-Validator/blob/520aaae/OTFontFileVal/val_OS2.cs#L644-L681
* https://docs.microsoft.com/en-us/typography/legacy/legacy_arabic_fonts */
enum font_page_t
{
FONT_PAGE_NONE = 0,
FONT_PAGE_HEBREW = 0xB100, /* Hebrew Windows 3.1 font page */
FONT_PAGE_SIMP_ARABIC = 0xB200, /* Simplified Arabic Windows 3.1 font page */
FONT_PAGE_TRAD_ARABIC = 0xB300, /* Traditional Arabic Windows 3.1 font page */

View file

@ -52,16 +52,16 @@ HB_INTERNAL bool postV2Tail::serialize (hb_serialize_context_t *c,
{
unsigned glyph_id = _.first;
unsigned new_index = _.second;
if (new_index < 258) continue;
if (copied_indices.has (new_index)) continue;
copied_indices.add (new_index);
hb_bytes_t s = reinterpret_cast<const post::accelerator_t*> (_post)->find_glyph_name (glyph_id);
HBUINT8 *o = c->allocate_size<HBUINT8> (HBUINT8::static_size * (s.length + 1));
if (unlikely (!o)) return_trace (false);
if (!c->check_assign (o[0], s.length, HB_SERIALIZE_ERROR_INT_OVERFLOW)) return_trace (false);
memcpy (o+1, s.arrayZ, HBUINT8::static_size * s.length);
hb_memcpy (o+1, s.arrayZ, HBUINT8::static_size * s.length);
}
return_trace (true);
@ -78,17 +78,19 @@ HB_INTERNAL bool postV2Tail::subset (hb_subset_context_t *c) const
post::accelerator_t _post (c->plan->source);
hb_hashmap_t<hb_bytes_t, unsigned, std::nullptr_t, unsigned, nullptr, (unsigned)-1> glyph_name_to_new_index;
hb_hashmap_t<hb_bytes_t, unsigned, true> glyph_name_to_new_index;
for (hb_codepoint_t new_gid = 0; new_gid < num_glyphs; new_gid++)
{
hb_codepoint_t old_gid = reverse_glyph_map.get (new_gid);
unsigned old_index = glyphNameIndex[old_gid];
unsigned new_index;
const unsigned *new_index2;
if (old_index <= 257) new_index = old_index;
else if (old_new_index_map.has (old_index)) new_index = old_new_index_map.get (old_index);
else
else if (old_new_index_map.has (old_index, &new_index2))
{
new_index = *new_index2;
} else {
hb_bytes_t s = _post.find_glyph_name (old_gid);
new_index = glyph_name_to_new_index.get (s);
if (new_index == (unsigned)-1)

View file

@ -1,603 +0,0 @@
#line 1 "hb-ot-shape-complex-indic-machine.rl"
/*
* Copyright © 2011,2012 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Google Author(s): Behdad Esfahbod
*/
#ifndef HB_OT_SHAPE_COMPLEX_INDIC_MACHINE_HH
#define HB_OT_SHAPE_COMPLEX_INDIC_MACHINE_HH
#include "hb.hh"
enum indic_syllable_type_t {
indic_consonant_syllable,
indic_vowel_syllable,
indic_standalone_cluster,
indic_symbol_cluster,
indic_broken_cluster,
indic_non_indic_cluster,
};
#line 45 "hb-ot-shape-complex-indic-machine.hh"
#define indic_syllable_machine_ex_A 10u
#define indic_syllable_machine_ex_C 1u
#define indic_syllable_machine_ex_CM 17u
#define indic_syllable_machine_ex_CS 19u
#define indic_syllable_machine_ex_DOTTEDCIRCLE 12u
#define indic_syllable_machine_ex_H 4u
#define indic_syllable_machine_ex_M 7u
#define indic_syllable_machine_ex_N 3u
#define indic_syllable_machine_ex_PLACEHOLDER 11u
#define indic_syllable_machine_ex_RS 13u
#define indic_syllable_machine_ex_Ra 16u
#define indic_syllable_machine_ex_Repha 15u
#define indic_syllable_machine_ex_SM 8u
#define indic_syllable_machine_ex_Symbol 18u
#define indic_syllable_machine_ex_V 2u
#define indic_syllable_machine_ex_ZWJ 6u
#define indic_syllable_machine_ex_ZWNJ 5u
#line 65 "hb-ot-shape-complex-indic-machine.hh"
static const unsigned char _indic_syllable_machine_trans_keys[] = {
8u, 8u, 4u, 8u, 5u, 7u, 5u, 8u, 4u, 8u, 6u, 6u, 16u, 16u, 4u, 8u,
4u, 13u, 4u, 8u, 8u, 8u, 5u, 7u, 5u, 8u, 4u, 8u, 6u, 6u, 16u, 16u,
4u, 8u, 4u, 13u, 4u, 13u, 4u, 13u, 8u, 8u, 5u, 7u, 5u, 8u, 4u, 8u,
6u, 6u, 16u, 16u, 4u, 8u, 4u, 8u, 4u, 13u, 8u, 8u, 5u, 7u, 5u, 8u,
4u, 8u, 6u, 6u, 16u, 16u, 4u, 8u, 4u, 8u, 5u, 8u, 8u, 8u, 1u, 19u,
3u, 17u, 3u, 17u, 4u, 17u, 1u, 16u, 5u, 10u, 5u, 10u, 10u, 10u, 5u, 10u,
1u, 16u, 1u, 16u, 1u, 16u, 3u, 10u, 4u, 10u, 5u, 10u, 4u, 10u, 5u, 10u,
3u, 10u, 5u, 10u, 3u, 17u, 3u, 17u, 3u, 17u, 3u, 17u, 4u, 17u, 1u, 16u,
3u, 17u, 3u, 17u, 4u, 17u, 1u, 16u, 5u, 10u, 10u, 10u, 5u, 10u, 1u, 16u,
1u, 16u, 3u, 10u, 4u, 10u, 5u, 10u, 4u, 10u, 5u, 10u, 5u, 10u, 3u, 10u,
5u, 10u, 3u, 17u, 3u, 17u, 4u, 8u, 3u, 17u, 3u, 17u, 4u, 17u, 1u, 16u,
3u, 17u, 1u, 16u, 5u, 10u, 10u, 10u, 5u, 10u, 1u, 16u, 1u, 16u, 3u, 10u,
4u, 10u, 5u, 10u, 3u, 17u, 4u, 10u, 5u, 10u, 5u, 10u, 3u, 10u, 5u, 10u,
3u, 17u, 4u, 13u, 4u, 8u, 3u, 17u, 3u, 17u, 4u, 17u, 1u, 16u, 3u, 17u,
1u, 16u, 5u, 10u, 10u, 10u, 5u, 10u, 1u, 16u, 1u, 16u, 3u, 10u, 4u, 10u,
5u, 10u, 3u, 17u, 4u, 10u, 5u, 10u, 5u, 10u, 3u, 10u, 5u, 10u, 1u, 17u,
3u, 17u, 1u, 17u, 4u, 13u, 5u, 10u, 10u, 10u, 5u, 10u, 1u, 16u, 3u, 10u,
5u, 10u, 5u, 10u, 10u, 10u, 5u, 10u, 1u, 16u, 0
};
static const char _indic_syllable_machine_key_spans[] = {
1, 5, 3, 4, 5, 1, 1, 5,
10, 5, 1, 3, 4, 5, 1, 1,
5, 10, 10, 10, 1, 3, 4, 5,
1, 1, 5, 5, 10, 1, 3, 4,
5, 1, 1, 5, 5, 4, 1, 19,
15, 15, 14, 16, 6, 6, 1, 6,
16, 16, 16, 8, 7, 6, 7, 6,
8, 6, 15, 15, 15, 15, 14, 16,
15, 15, 14, 16, 6, 1, 6, 16,
16, 8, 7, 6, 7, 6, 6, 8,
6, 15, 15, 5, 15, 15, 14, 16,
15, 16, 6, 1, 6, 16, 16, 8,
7, 6, 15, 7, 6, 6, 8, 6,
15, 10, 5, 15, 15, 14, 16, 15,
16, 6, 1, 6, 16, 16, 8, 7,
6, 15, 7, 6, 6, 8, 6, 17,
15, 17, 10, 6, 1, 6, 16, 8,
6, 6, 1, 6, 16
};
static const short _indic_syllable_machine_index_offsets[] = {
0, 2, 8, 12, 17, 23, 25, 27,
33, 44, 50, 52, 56, 61, 67, 69,
71, 77, 88, 99, 110, 112, 116, 121,
127, 129, 131, 137, 143, 154, 156, 160,
165, 171, 173, 175, 181, 187, 192, 194,
214, 230, 246, 261, 278, 285, 292, 294,
301, 318, 335, 352, 361, 369, 376, 384,
391, 400, 407, 423, 439, 455, 471, 486,
503, 519, 535, 550, 567, 574, 576, 583,
600, 617, 626, 634, 641, 649, 656, 663,
672, 679, 695, 711, 717, 733, 749, 764,
781, 797, 814, 821, 823, 830, 847, 864,
873, 881, 888, 904, 912, 919, 926, 935,
942, 958, 969, 975, 991, 1007, 1022, 1039,
1055, 1072, 1079, 1081, 1088, 1105, 1122, 1131,
1139, 1146, 1162, 1170, 1177, 1184, 1193, 1200,
1218, 1234, 1252, 1263, 1270, 1272, 1279, 1296,
1305, 1312, 1319, 1321, 1328
};
static const unsigned char _indic_syllable_machine_indicies[] = {
1, 0, 2, 3, 3, 4, 1, 0,
3, 3, 4, 0, 3, 3, 4, 1,
0, 5, 3, 3, 4, 1, 0, 6,
0, 7, 0, 8, 3, 3, 4, 1,
0, 2, 3, 3, 4, 1, 0, 0,
0, 0, 9, 0, 11, 12, 12, 13,
14, 10, 14, 10, 12, 12, 13, 10,
12, 12, 13, 14, 10, 15, 12, 12,
13, 14, 10, 16, 10, 17, 10, 18,
12, 12, 13, 14, 10, 11, 12, 12,
13, 14, 10, 10, 10, 10, 19, 10,
11, 12, 12, 13, 14, 10, 10, 10,
10, 20, 10, 22, 23, 23, 24, 25,
21, 21, 21, 21, 26, 21, 25, 21,
23, 23, 24, 27, 23, 23, 24, 25,
21, 28, 23, 23, 24, 25, 21, 29,
21, 30, 21, 22, 23, 23, 24, 25,
21, 31, 23, 23, 24, 25, 21, 33,
34, 34, 35, 36, 32, 32, 32, 32,
37, 32, 36, 32, 34, 34, 35, 32,
34, 34, 35, 36, 32, 38, 34, 34,
35, 36, 32, 39, 32, 40, 32, 33,
34, 34, 35, 36, 32, 41, 34, 34,
35, 36, 32, 23, 23, 24, 1, 0,
43, 42, 45, 46, 47, 48, 49, 50,
24, 25, 44, 51, 52, 52, 26, 44,
53, 54, 55, 56, 57, 44, 59, 60,
61, 62, 4, 1, 58, 63, 58, 58,
9, 58, 58, 58, 64, 58, 65, 60,
66, 66, 4, 1, 58, 63, 58, 58,
58, 58, 58, 58, 64, 58, 60, 66,
66, 4, 1, 58, 63, 58, 58, 58,
58, 58, 58, 64, 58, 45, 58, 58,
58, 67, 68, 58, 1, 58, 63, 58,
58, 58, 58, 58, 45, 58, 69, 69,
58, 1, 58, 63, 58, 63, 58, 58,
70, 58, 63, 58, 63, 58, 63, 58,
58, 58, 58, 63, 58, 45, 58, 71,
58, 69, 69, 58, 1, 58, 63, 58,
58, 58, 58, 58, 45, 58, 45, 58,
58, 58, 69, 69, 58, 1, 58, 63,
58, 58, 58, 58, 58, 45, 58, 45,
58, 58, 58, 69, 68, 58, 1, 58,
63, 58, 58, 58, 58, 58, 45, 58,
72, 7, 73, 74, 4, 1, 58, 63,
58, 7, 73, 74, 4, 1, 58, 63,
58, 73, 73, 4, 1, 58, 63, 58,
75, 76, 76, 4, 1, 58, 63, 58,
67, 77, 58, 1, 58, 63, 58, 67,
58, 69, 69, 58, 1, 58, 63, 58,
69, 77, 58, 1, 58, 63, 58, 59,
60, 66, 66, 4, 1, 58, 63, 58,
58, 58, 58, 58, 58, 64, 58, 59,
60, 61, 66, 4, 1, 58, 63, 58,
58, 9, 58, 58, 58, 64, 58, 79,
80, 81, 82, 13, 14, 78, 83, 78,
78, 20, 78, 78, 78, 84, 78, 85,
80, 86, 82, 13, 14, 78, 83, 78,
78, 78, 78, 78, 78, 84, 78, 80,
86, 82, 13, 14, 78, 83, 78, 78,
78, 78, 78, 78, 84, 78, 87, 78,
78, 78, 88, 89, 78, 14, 78, 83,
78, 78, 78, 78, 78, 87, 78, 90,
80, 91, 92, 13, 14, 78, 83, 78,
78, 19, 78, 78, 78, 84, 78, 93,
80, 86, 86, 13, 14, 78, 83, 78,
78, 78, 78, 78, 78, 84, 78, 80,
86, 86, 13, 14, 78, 83, 78, 78,
78, 78, 78, 78, 84, 78, 87, 78,
78, 78, 94, 89, 78, 14, 78, 83,
78, 78, 78, 78, 78, 87, 78, 83,
78, 78, 95, 78, 83, 78, 83, 78,
83, 78, 78, 78, 78, 83, 78, 87,
78, 96, 78, 94, 94, 78, 14, 78,
83, 78, 78, 78, 78, 78, 87, 78,
87, 78, 78, 78, 94, 94, 78, 14,
78, 83, 78, 78, 78, 78, 78, 87,
78, 97, 17, 98, 99, 13, 14, 78,
83, 78, 17, 98, 99, 13, 14, 78,
83, 78, 98, 98, 13, 14, 78, 83,
78, 100, 101, 101, 13, 14, 78, 83,
78, 88, 102, 78, 14, 78, 83, 78,
94, 94, 78, 14, 78, 83, 78, 88,
78, 94, 94, 78, 14, 78, 83, 78,
94, 102, 78, 14, 78, 83, 78, 90,
80, 86, 86, 13, 14, 78, 83, 78,
78, 78, 78, 78, 78, 84, 78, 90,
80, 91, 86, 13, 14, 78, 83, 78,
78, 19, 78, 78, 78, 84, 78, 11,
12, 12, 13, 14, 78, 79, 80, 86,
82, 13, 14, 78, 83, 78, 78, 78,
78, 78, 78, 84, 78, 104, 48, 105,
105, 24, 25, 103, 51, 103, 103, 103,
103, 103, 103, 55, 103, 48, 105, 105,
24, 25, 103, 51, 103, 103, 103, 103,
103, 103, 55, 103, 106, 103, 103, 103,
107, 108, 103, 25, 103, 51, 103, 103,
103, 103, 103, 106, 103, 47, 48, 109,
110, 24, 25, 103, 51, 103, 103, 26,
103, 103, 103, 55, 103, 106, 103, 103,
103, 111, 108, 103, 25, 103, 51, 103,
103, 103, 103, 103, 106, 103, 51, 103,
103, 112, 103, 51, 103, 51, 103, 51,
103, 103, 103, 103, 51, 103, 106, 103,
113, 103, 111, 111, 103, 25, 103, 51,
103, 103, 103, 103, 103, 106, 103, 106,
103, 103, 103, 111, 111, 103, 25, 103,
51, 103, 103, 103, 103, 103, 106, 103,
114, 30, 115, 116, 24, 25, 103, 51,
103, 30, 115, 116, 24, 25, 103, 51,
103, 115, 115, 24, 25, 103, 51, 103,
47, 48, 105, 105, 24, 25, 103, 51,
103, 103, 103, 103, 103, 103, 55, 103,
117, 118, 118, 24, 25, 103, 51, 103,
107, 119, 103, 25, 103, 51, 103, 111,
111, 103, 25, 103, 51, 103, 107, 103,
111, 111, 103, 25, 103, 51, 103, 111,
119, 103, 25, 103, 51, 103, 47, 48,
109, 105, 24, 25, 103, 51, 103, 103,
26, 103, 103, 103, 55, 103, 22, 23,
23, 24, 25, 120, 120, 120, 120, 26,
120, 22, 23, 23, 24, 25, 120, 122,
123, 124, 125, 35, 36, 121, 126, 121,
121, 37, 121, 121, 121, 127, 121, 128,
123, 125, 125, 35, 36, 121, 126, 121,
121, 121, 121, 121, 121, 127, 121, 123,
125, 125, 35, 36, 121, 126, 121, 121,
121, 121, 121, 121, 127, 121, 129, 121,
121, 121, 130, 131, 121, 36, 121, 126,
121, 121, 121, 121, 121, 129, 121, 122,
123, 124, 52, 35, 36, 121, 126, 121,
121, 37, 121, 121, 121, 127, 121, 129,
121, 121, 121, 132, 131, 121, 36, 121,
126, 121, 121, 121, 121, 121, 129, 121,
126, 121, 121, 133, 121, 126, 121, 126,
121, 126, 121, 121, 121, 121, 126, 121,
129, 121, 134, 121, 132, 132, 121, 36,
121, 126, 121, 121, 121, 121, 121, 129,
121, 129, 121, 121, 121, 132, 132, 121,
36, 121, 126, 121, 121, 121, 121, 121,
129, 121, 135, 40, 136, 137, 35, 36,
121, 126, 121, 40, 136, 137, 35, 36,
121, 126, 121, 136, 136, 35, 36, 121,
126, 121, 122, 123, 125, 125, 35, 36,
121, 126, 121, 121, 121, 121, 121, 121,
127, 121, 138, 139, 139, 35, 36, 121,
126, 121, 130, 140, 121, 36, 121, 126,
121, 132, 132, 121, 36, 121, 126, 121,
130, 121, 132, 132, 121, 36, 121, 126,
121, 132, 140, 121, 36, 121, 126, 121,
45, 46, 47, 48, 109, 105, 24, 25,
103, 51, 52, 52, 26, 103, 103, 45,
55, 103, 59, 141, 61, 62, 4, 1,
58, 63, 58, 58, 9, 58, 58, 58,
64, 58, 45, 46, 47, 48, 142, 143,
24, 144, 58, 145, 58, 52, 26, 58,
58, 45, 55, 58, 22, 146, 146, 24,
144, 58, 63, 58, 58, 26, 58, 145,
58, 58, 147, 58, 145, 58, 145, 58,
145, 58, 58, 58, 58, 145, 58, 45,
58, 71, 22, 146, 146, 24, 144, 58,
63, 58, 58, 58, 58, 58, 45, 58,
149, 148, 150, 150, 148, 43, 148, 151,
148, 150, 150, 148, 43, 148, 151, 148,
151, 148, 148, 152, 148, 151, 148, 151,
148, 151, 148, 148, 148, 148, 151, 148,
45, 120, 120, 120, 120, 120, 120, 120,
120, 120, 52, 120, 120, 120, 120, 45,
120, 0
};
static const unsigned char _indic_syllable_machine_trans_targs[] = {
39, 45, 50, 2, 51, 5, 6, 53,
57, 58, 39, 67, 11, 73, 68, 14,
15, 75, 80, 81, 84, 39, 89, 21,
95, 90, 98, 39, 24, 25, 97, 103,
39, 112, 30, 118, 113, 121, 33, 34,
120, 126, 39, 137, 39, 40, 60, 85,
87, 105, 106, 91, 107, 127, 128, 99,
135, 140, 39, 41, 43, 8, 59, 46,
54, 42, 1, 44, 48, 0, 47, 49,
52, 3, 4, 55, 7, 56, 39, 61,
63, 18, 83, 69, 76, 62, 9, 64,
78, 71, 65, 17, 82, 66, 10, 70,
72, 74, 12, 13, 77, 16, 79, 39,
86, 26, 88, 101, 93, 19, 104, 20,
92, 94, 96, 22, 23, 100, 27, 102,
39, 39, 108, 110, 28, 35, 114, 122,
109, 111, 124, 116, 29, 115, 117, 119,
31, 32, 123, 36, 125, 129, 130, 134,
131, 132, 37, 133, 39, 136, 38, 138,
139
};
static const char _indic_syllable_machine_trans_actions[] = {
1, 0, 2, 0, 2, 0, 0, 2,
2, 2, 3, 2, 0, 2, 0, 0,
0, 2, 2, 2, 2, 4, 2, 0,
5, 0, 5, 6, 0, 0, 5, 2,
7, 2, 0, 2, 0, 2, 0, 0,
2, 2, 8, 0, 11, 2, 2, 5,
0, 12, 12, 0, 2, 5, 2, 5,
2, 0, 13, 2, 0, 0, 2, 0,
2, 2, 0, 2, 2, 0, 0, 2,
2, 0, 0, 0, 0, 2, 14, 2,
0, 0, 2, 0, 2, 2, 0, 2,
2, 2, 2, 0, 2, 2, 0, 0,
2, 2, 0, 0, 0, 0, 2, 15,
5, 0, 5, 2, 2, 0, 5, 0,
0, 2, 5, 0, 0, 0, 0, 2,
16, 17, 2, 0, 0, 0, 0, 2,
2, 2, 2, 2, 0, 0, 2, 2,
0, 0, 0, 0, 2, 0, 18, 18,
0, 0, 0, 0, 19, 2, 0, 0,
0
};
static const char _indic_syllable_machine_to_state_actions[] = {
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 9,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0
};
static const char _indic_syllable_machine_from_state_actions[] = {
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 10,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0
};
static const short _indic_syllable_machine_eof_trans[] = {
1, 1, 1, 1, 1, 1, 1, 1,
1, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 22, 22, 28, 22, 22,
22, 22, 22, 22, 33, 33, 33, 33,
33, 33, 33, 33, 33, 1, 43, 0,
59, 59, 59, 59, 59, 59, 59, 59,
59, 59, 59, 59, 59, 59, 59, 59,
59, 59, 59, 59, 79, 79, 79, 79,
79, 79, 79, 79, 79, 79, 79, 79,
79, 79, 79, 79, 79, 79, 79, 79,
79, 79, 79, 79, 79, 104, 104, 104,
104, 104, 104, 104, 104, 104, 104, 104,
104, 104, 104, 104, 104, 104, 104, 104,
104, 121, 121, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 104,
59, 59, 59, 59, 59, 59, 59, 149,
149, 149, 149, 149, 121
};
static const int indic_syllable_machine_start = 39;
static const int indic_syllable_machine_first_final = 39;
static const int indic_syllable_machine_error = -1;
static const int indic_syllable_machine_en_main = 39;
#line 46 "hb-ot-shape-complex-indic-machine.rl"
#line 102 "hb-ot-shape-complex-indic-machine.rl"
#define found_syllable(syllable_type) \
HB_STMT_START { \
if (0) fprintf (stderr, "syllable %d..%d %s\n", ts, te, #syllable_type); \
for (unsigned int i = ts; i < te; i++) \
info[i].syllable() = (syllable_serial << 4) | syllable_type; \
syllable_serial++; \
if (unlikely (syllable_serial == 16)) syllable_serial = 1; \
} HB_STMT_END
static void
find_syllables_indic (hb_buffer_t *buffer)
{
unsigned int p, pe, eof, ts, te, act;
int cs;
hb_glyph_info_t *info = buffer->info;
#line 440 "hb-ot-shape-complex-indic-machine.hh"
{
cs = indic_syllable_machine_start;
ts = 0;
te = 0;
act = 0;
}
#line 122 "hb-ot-shape-complex-indic-machine.rl"
p = 0;
pe = eof = buffer->len;
unsigned int syllable_serial = 1;
#line 456 "hb-ot-shape-complex-indic-machine.hh"
{
int _slen;
int _trans;
const unsigned char *_keys;
const unsigned char *_inds;
if ( p == pe )
goto _test_eof;
_resume:
switch ( _indic_syllable_machine_from_state_actions[cs] ) {
case 10:
#line 1 "NONE"
{ts = p;}
break;
#line 470 "hb-ot-shape-complex-indic-machine.hh"
}
_keys = _indic_syllable_machine_trans_keys + (cs<<1);
_inds = _indic_syllable_machine_indicies + _indic_syllable_machine_index_offsets[cs];
_slen = _indic_syllable_machine_key_spans[cs];
_trans = _inds[ _slen > 0 && _keys[0] <=( info[p].indic_category()) &&
( info[p].indic_category()) <= _keys[1] ?
( info[p].indic_category()) - _keys[0] : _slen ];
_eof_trans:
cs = _indic_syllable_machine_trans_targs[_trans];
if ( _indic_syllable_machine_trans_actions[_trans] == 0 )
goto _again;
switch ( _indic_syllable_machine_trans_actions[_trans] ) {
case 2:
#line 1 "NONE"
{te = p+1;}
break;
case 11:
#line 98 "hb-ot-shape-complex-indic-machine.rl"
{te = p+1;{ found_syllable (indic_non_indic_cluster); }}
break;
case 13:
#line 93 "hb-ot-shape-complex-indic-machine.rl"
{te = p;p--;{ found_syllable (indic_consonant_syllable); }}
break;
case 14:
#line 94 "hb-ot-shape-complex-indic-machine.rl"
{te = p;p--;{ found_syllable (indic_vowel_syllable); }}
break;
case 17:
#line 95 "hb-ot-shape-complex-indic-machine.rl"
{te = p;p--;{ found_syllable (indic_standalone_cluster); }}
break;
case 19:
#line 96 "hb-ot-shape-complex-indic-machine.rl"
{te = p;p--;{ found_syllable (indic_symbol_cluster); }}
break;
case 15:
#line 97 "hb-ot-shape-complex-indic-machine.rl"
{te = p;p--;{ found_syllable (indic_broken_cluster); }}
break;
case 16:
#line 98 "hb-ot-shape-complex-indic-machine.rl"
{te = p;p--;{ found_syllable (indic_non_indic_cluster); }}
break;
case 1:
#line 93 "hb-ot-shape-complex-indic-machine.rl"
{{p = ((te))-1;}{ found_syllable (indic_consonant_syllable); }}
break;
case 3:
#line 94 "hb-ot-shape-complex-indic-machine.rl"
{{p = ((te))-1;}{ found_syllable (indic_vowel_syllable); }}
break;
case 7:
#line 95 "hb-ot-shape-complex-indic-machine.rl"
{{p = ((te))-1;}{ found_syllable (indic_standalone_cluster); }}
break;
case 8:
#line 96 "hb-ot-shape-complex-indic-machine.rl"
{{p = ((te))-1;}{ found_syllable (indic_symbol_cluster); }}
break;
case 4:
#line 97 "hb-ot-shape-complex-indic-machine.rl"
{{p = ((te))-1;}{ found_syllable (indic_broken_cluster); }}
break;
case 6:
#line 1 "NONE"
{ switch( act ) {
case 1:
{{p = ((te))-1;} found_syllable (indic_consonant_syllable); }
break;
case 5:
{{p = ((te))-1;} found_syllable (indic_broken_cluster); }
break;
case 6:
{{p = ((te))-1;} found_syllable (indic_non_indic_cluster); }
break;
}
}
break;
case 18:
#line 1 "NONE"
{te = p+1;}
#line 93 "hb-ot-shape-complex-indic-machine.rl"
{act = 1;}
break;
case 5:
#line 1 "NONE"
{te = p+1;}
#line 97 "hb-ot-shape-complex-indic-machine.rl"
{act = 5;}
break;
case 12:
#line 1 "NONE"
{te = p+1;}
#line 98 "hb-ot-shape-complex-indic-machine.rl"
{act = 6;}
break;
#line 573 "hb-ot-shape-complex-indic-machine.hh"
}
_again:
switch ( _indic_syllable_machine_to_state_actions[cs] ) {
case 9:
#line 1 "NONE"
{ts = 0;}
break;
#line 582 "hb-ot-shape-complex-indic-machine.hh"
}
if ( ++p != pe )
goto _resume;
_test_eof: {}
if ( p == eof )
{
if ( _indic_syllable_machine_eof_trans[cs] > 0 ) {
_trans = _indic_syllable_machine_eof_trans[cs] - 1;
goto _eof_trans;
}
}
}
#line 130 "hb-ot-shape-complex-indic-machine.rl"
}
#undef found_syllable
#endif /* HB_OT_SHAPE_COMPLEX_INDIC_MACHINE_HH */

View file

@ -1,501 +0,0 @@
/* == Start of generated table == */
/*
* The following table is generated by running:
*
* ./gen-indic-table.py IndicSyllabicCategory.txt IndicPositionalCategory.txt Blocks.txt
*
* on files with these headers:
*
* # IndicSyllabicCategory-14.0.0.txt
* # Date: 2021-05-22, 01:01:00 GMT [KW, RP]
* # IndicPositionalCategory-14.0.0.txt
* # Date: 2021-05-22, 01:01:00 GMT [KW, RP]
* # Blocks-14.0.0.txt
* # Date: 2021-01-22, 23:29:00 GMT [KW]
*/
#include "hb.hh"
#ifndef HB_NO_OT_SHAPE
#include "hb-ot-shape-complex-indic.hh"
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-macros"
#define ISC_A INDIC_SYLLABIC_CATEGORY_AVAGRAHA /* 17 chars; Avagraha */
#define ISC_Bi INDIC_SYLLABIC_CATEGORY_BINDU /* 91 chars; Bindu */
#define ISC_BJN INDIC_SYLLABIC_CATEGORY_BRAHMI_JOINING_NUMBER /* 20 chars; Brahmi_Joining_Number */
#define ISC_Ca INDIC_SYLLABIC_CATEGORY_CANTILLATION_MARK /* 59 chars; Cantillation_Mark */
#define ISC_C INDIC_SYLLABIC_CATEGORY_CONSONANT /* 2206 chars; Consonant */
#define ISC_CD INDIC_SYLLABIC_CATEGORY_CONSONANT_DEAD /* 14 chars; Consonant_Dead */
#define ISC_CF INDIC_SYLLABIC_CATEGORY_CONSONANT_FINAL /* 70 chars; Consonant_Final */
#define ISC_CHL INDIC_SYLLABIC_CATEGORY_CONSONANT_HEAD_LETTER /* 5 chars; Consonant_Head_Letter */
#define ISC_CIP INDIC_SYLLABIC_CATEGORY_CONSONANT_INITIAL_POSTFIXED /* 1 chars; Consonant_Initial_Postfixed */
#define ISC_CK INDIC_SYLLABIC_CATEGORY_CONSONANT_KILLER /* 2 chars; Consonant_Killer */
#define ISC_CM INDIC_SYLLABIC_CATEGORY_CONSONANT_MEDIAL /* 31 chars; Consonant_Medial */
#define ISC_CP INDIC_SYLLABIC_CATEGORY_CONSONANT_PLACEHOLDER /* 22 chars; Consonant_Placeholder */
#define ISC_CPR INDIC_SYLLABIC_CATEGORY_CONSONANT_PRECEDING_REPHA /* 3 chars; Consonant_Preceding_Repha */
#define ISC_CPrf INDIC_SYLLABIC_CATEGORY_CONSONANT_PREFIXED /* 10 chars; Consonant_Prefixed */
#define ISC_CS INDIC_SYLLABIC_CATEGORY_CONSONANT_SUBJOINED /* 94 chars; Consonant_Subjoined */
#define ISC_CSR INDIC_SYLLABIC_CATEGORY_CONSONANT_SUCCEEDING_REPHA /* 1 chars; Consonant_Succeeding_Repha */
#define ISC_CWS INDIC_SYLLABIC_CATEGORY_CONSONANT_WITH_STACKER /* 8 chars; Consonant_With_Stacker */
#define ISC_GM INDIC_SYLLABIC_CATEGORY_GEMINATION_MARK /* 3 chars; Gemination_Mark */
#define ISC_IS INDIC_SYLLABIC_CATEGORY_INVISIBLE_STACKER /* 12 chars; Invisible_Stacker */
#define ISC_ZWJ INDIC_SYLLABIC_CATEGORY_JOINER /* 1 chars; Joiner */
#define ISC_ML INDIC_SYLLABIC_CATEGORY_MODIFYING_LETTER /* 1 chars; Modifying_Letter */
#define ISC_ZWNJ INDIC_SYLLABIC_CATEGORY_NON_JOINER /* 1 chars; Non_Joiner */
#define ISC_N INDIC_SYLLABIC_CATEGORY_NUKTA /* 32 chars; Nukta */
#define ISC_Nd INDIC_SYLLABIC_CATEGORY_NUMBER /* 491 chars; Number */
#define ISC_NJ INDIC_SYLLABIC_CATEGORY_NUMBER_JOINER /* 1 chars; Number_Joiner */
#define ISC_x INDIC_SYLLABIC_CATEGORY_OTHER /* 1 chars; Other */
#define ISC_PK INDIC_SYLLABIC_CATEGORY_PURE_KILLER /* 25 chars; Pure_Killer */
#define ISC_RS INDIC_SYLLABIC_CATEGORY_REGISTER_SHIFTER /* 2 chars; Register_Shifter */
#define ISC_SM INDIC_SYLLABIC_CATEGORY_SYLLABLE_MODIFIER /* 25 chars; Syllable_Modifier */
#define ISC_TL INDIC_SYLLABIC_CATEGORY_TONE_LETTER /* 7 chars; Tone_Letter */
#define ISC_TM INDIC_SYLLABIC_CATEGORY_TONE_MARK /* 42 chars; Tone_Mark */
#define ISC_V INDIC_SYLLABIC_CATEGORY_VIRAMA /* 27 chars; Virama */
#define ISC_Vs INDIC_SYLLABIC_CATEGORY_VISARGA /* 35 chars; Visarga */
#define ISC_Vo INDIC_SYLLABIC_CATEGORY_VOWEL /* 30 chars; Vowel */
#define ISC_M INDIC_SYLLABIC_CATEGORY_VOWEL_DEPENDENT /* 686 chars; Vowel_Dependent */
#define ISC_VI INDIC_SYLLABIC_CATEGORY_VOWEL_INDEPENDENT /* 486 chars; Vowel_Independent */
#define IMC_B INDIC_MATRA_CATEGORY_BOTTOM /* 352 chars; Bottom */
#define IMC_BL INDIC_MATRA_CATEGORY_BOTTOM_AND_LEFT /* 1 chars; Bottom_And_Left */
#define IMC_BR INDIC_MATRA_CATEGORY_BOTTOM_AND_RIGHT /* 4 chars; Bottom_And_Right */
#define IMC_L INDIC_MATRA_CATEGORY_LEFT /* 64 chars; Left */
#define IMC_LR INDIC_MATRA_CATEGORY_LEFT_AND_RIGHT /* 22 chars; Left_And_Right */
#define IMC_x INDIC_MATRA_CATEGORY_NOT_APPLICABLE /* 1 chars; Not_Applicable */
#define IMC_O INDIC_MATRA_CATEGORY_OVERSTRUCK /* 10 chars; Overstruck */
#define IMC_R INDIC_MATRA_CATEGORY_RIGHT /* 290 chars; Right */
#define IMC_T INDIC_MATRA_CATEGORY_TOP /* 418 chars; Top */
#define IMC_TB INDIC_MATRA_CATEGORY_TOP_AND_BOTTOM /* 10 chars; Top_And_Bottom */
#define IMC_TBL INDIC_MATRA_CATEGORY_TOP_AND_BOTTOM_AND_LEFT /* 2 chars; Top_And_Bottom_And_Left */
#define IMC_TBR INDIC_MATRA_CATEGORY_TOP_AND_BOTTOM_AND_RIGHT /* 1 chars; Top_And_Bottom_And_Right */
#define IMC_TL INDIC_MATRA_CATEGORY_TOP_AND_LEFT /* 6 chars; Top_And_Left */
#define IMC_TLR INDIC_MATRA_CATEGORY_TOP_AND_LEFT_AND_RIGHT /* 4 chars; Top_And_Left_And_Right */
#define IMC_TR INDIC_MATRA_CATEGORY_TOP_AND_RIGHT /* 13 chars; Top_And_Right */
#define IMC_VOL INDIC_MATRA_CATEGORY_VISUAL_ORDER_LEFT /* 19 chars; Visual_Order_Left */
#pragma GCC diagnostic pop
#define _(S,M) INDIC_COMBINE_CATEGORIES (ISC_##S, IMC_##M)
static const uint16_t indic_table[] = {
#define indic_offset_0x0028u 0
/* Basic Latin */
/* 0028 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(CP,x), _(x,x), _(x,x),
/* 0030 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 0038 */ _(Nd,x), _(Nd,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
#define indic_offset_0x00b0u 24
/* Latin-1 Supplement */
/* 00B0 */ _(x,x), _(x,x), _(SM,x), _(SM,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 00B8 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 00C0 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 00C8 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 00D0 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(CP,x),
#define indic_offset_0x0900u 64
/* Devanagari */
/* 0900 */ _(Bi,T), _(Bi,T), _(Bi,T), _(Vs,R), _(VI,x), _(VI,x), _(VI,x), _(VI,x),
/* 0908 */ _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x),
/* 0910 */ _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(C,x), _(C,x), _(C,x),
/* 0918 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0920 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0928 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0930 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0938 */ _(C,x), _(C,x), _(M,T), _(M,R), _(N,B), _(A,x), _(M,R), _(M,L),
/* 0940 */ _(M,R), _(M,B), _(M,B), _(M,B), _(M,B), _(M,T), _(M,T), _(M,T),
/* 0948 */ _(M,T), _(M,R), _(M,R), _(M,R), _(M,R), _(V,B), _(M,L), _(M,R),
/* 0950 */ _(x,x), _(Ca,T), _(Ca,B), _(x,T), _(x,T), _(M,T), _(M,B), _(M,B),
/* 0958 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0960 */ _(VI,x), _(VI,x), _(M,B), _(M,B), _(x,x), _(x,x), _(Nd,x), _(Nd,x),
/* 0968 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 0970 */ _(x,x), _(x,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x),
/* 0978 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* Bengali */
/* 0980 */ _(CP,x), _(Bi,T), _(Bi,R), _(Vs,R), _(x,x), _(VI,x), _(VI,x), _(VI,x),
/* 0988 */ _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(x,x), _(x,x), _(VI,x),
/* 0990 */ _(VI,x), _(x,x), _(x,x), _(VI,x), _(VI,x), _(C,x), _(C,x), _(C,x),
/* 0998 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 09A0 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 09A8 */ _(C,x), _(x,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 09B0 */ _(C,x), _(x,x), _(C,x), _(x,x), _(x,x), _(x,x), _(C,x), _(C,x),
/* 09B8 */ _(C,x), _(C,x), _(x,x), _(x,x), _(N,B), _(A,x), _(M,R), _(M,L),
/* 09C0 */ _(M,R), _(M,B), _(M,B), _(M,B), _(M,B), _(x,x), _(x,x), _(M,L),
/* 09C8 */ _(M,L), _(x,x), _(x,x), _(M,LR), _(M,LR), _(V,B), _(CD,x), _(x,x),
/* 09D0 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(M,R),
/* 09D8 */ _(x,x), _(x,x), _(x,x), _(x,x), _(C,x), _(C,x), _(x,x), _(C,x),
/* 09E0 */ _(VI,x), _(VI,x), _(M,B), _(M,B), _(x,x), _(x,x), _(Nd,x), _(Nd,x),
/* 09E8 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 09F0 */ _(C,x), _(C,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 09F8 */ _(x,x), _(x,x), _(x,x), _(x,x), _(Bi,x), _(x,x), _(SM,T), _(x,x),
/* Gurmukhi */
/* 0A00 */ _(x,x), _(Bi,T), _(Bi,T), _(Vs,R), _(x,x), _(VI,x), _(VI,x), _(VI,x),
/* 0A08 */ _(VI,x), _(VI,x), _(VI,x), _(x,x), _(x,x), _(x,x), _(x,x), _(VI,x),
/* 0A10 */ _(VI,x), _(x,x), _(x,x), _(VI,x), _(VI,x), _(C,x), _(C,x), _(C,x),
/* 0A18 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0A20 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0A28 */ _(C,x), _(x,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0A30 */ _(C,x), _(x,x), _(C,x), _(C,x), _(x,x), _(C,x), _(C,x), _(x,x),
/* 0A38 */ _(C,x), _(C,x), _(x,x), _(x,x), _(N,B), _(x,x), _(M,R), _(M,L),
/* 0A40 */ _(M,R), _(M,B), _(M,B), _(x,x), _(x,x), _(x,x), _(x,x), _(M,T),
/* 0A48 */ _(M,T), _(x,x), _(x,x), _(M,T), _(M,T), _(V,B), _(x,x), _(x,x),
/* 0A50 */ _(x,x), _(Ca,B), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 0A58 */ _(x,x), _(C,x), _(C,x), _(C,x), _(C,x), _(x,x), _(C,x), _(x,x),
/* 0A60 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(Nd,x), _(Nd,x),
/* 0A68 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 0A70 */ _(Bi,T), _(GM,T), _(CP,x), _(CP,x), _(x,x), _(CM,B), _(x,x), _(x,x),
/* 0A78 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* Gujarati */
/* 0A80 */ _(x,x), _(Bi,T), _(Bi,T), _(Vs,R), _(x,x), _(VI,x), _(VI,x), _(VI,x),
/* 0A88 */ _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(x,x), _(VI,x),
/* 0A90 */ _(VI,x), _(VI,x), _(x,x), _(VI,x), _(VI,x), _(C,x), _(C,x), _(C,x),
/* 0A98 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0AA0 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0AA8 */ _(C,x), _(x,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0AB0 */ _(C,x), _(x,x), _(C,x), _(C,x), _(x,x), _(C,x), _(C,x), _(C,x),
/* 0AB8 */ _(C,x), _(C,x), _(x,x), _(x,x), _(N,B), _(A,x), _(M,R), _(M,L),
/* 0AC0 */ _(M,R), _(M,B), _(M,B), _(M,B), _(M,B), _(M,T), _(x,x), _(M,T),
/* 0AC8 */ _(M,T), _(M,TR), _(x,x), _(M,R), _(M,R), _(V,B), _(x,x), _(x,x),
/* 0AD0 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 0AD8 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 0AE0 */ _(VI,x), _(VI,x), _(M,B), _(M,B), _(x,x), _(x,x), _(Nd,x), _(Nd,x),
/* 0AE8 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 0AF0 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 0AF8 */ _(x,x), _(C,x), _(Ca,T), _(Ca,T), _(Ca,T), _(N,T), _(N,T), _(N,T),
/* Oriya */
/* 0B00 */ _(x,x), _(Bi,T), _(Bi,R), _(Vs,R), _(x,x), _(VI,x), _(VI,x), _(VI,x),
/* 0B08 */ _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(x,x), _(x,x), _(VI,x),
/* 0B10 */ _(VI,x), _(x,x), _(x,x), _(VI,x), _(VI,x), _(C,x), _(C,x), _(C,x),
/* 0B18 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0B20 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0B28 */ _(C,x), _(x,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0B30 */ _(C,x), _(x,x), _(C,x), _(C,x), _(x,x), _(C,x), _(C,x), _(C,x),
/* 0B38 */ _(C,x), _(C,x), _(x,x), _(x,x), _(N,B), _(A,x), _(M,R), _(M,T),
/* 0B40 */ _(M,R), _(M,B), _(M,B), _(M,B), _(M,B), _(x,x), _(x,x), _(M,L),
/* 0B48 */ _(M,TL), _(x,x), _(x,x), _(M,LR),_(M,TLR), _(V,B), _(x,x), _(x,x),
/* 0B50 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(M,T), _(M,T), _(M,TR),
/* 0B58 */ _(x,x), _(x,x), _(x,x), _(x,x), _(C,x), _(C,x), _(x,x), _(C,x),
/* 0B60 */ _(VI,x), _(VI,x), _(M,B), _(M,B), _(x,x), _(x,x), _(Nd,x), _(Nd,x),
/* 0B68 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 0B70 */ _(x,x), _(C,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 0B78 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* Tamil */
/* 0B80 */ _(x,x), _(x,x), _(Bi,T), _(ML,x), _(x,x), _(VI,x), _(VI,x), _(VI,x),
/* 0B88 */ _(VI,x), _(VI,x), _(VI,x), _(x,x), _(x,x), _(x,x), _(VI,x), _(VI,x),
/* 0B90 */ _(VI,x), _(x,x), _(VI,x), _(VI,x), _(VI,x), _(C,x), _(x,x), _(x,x),
/* 0B98 */ _(x,x), _(C,x), _(C,x), _(x,x), _(C,x), _(x,x), _(C,x), _(C,x),
/* 0BA0 */ _(x,x), _(x,x), _(x,x), _(C,x), _(C,x), _(x,x), _(x,x), _(x,x),
/* 0BA8 */ _(C,x), _(C,x), _(C,x), _(x,x), _(x,x), _(x,x), _(C,x), _(C,x),
/* 0BB0 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0BB8 */ _(C,x), _(C,x), _(x,x), _(x,x), _(x,x), _(x,x), _(M,R), _(M,R),
/* 0BC0 */ _(M,T), _(M,R), _(M,R), _(x,x), _(x,x), _(x,x), _(M,L), _(M,L),
/* 0BC8 */ _(M,L), _(x,x), _(M,LR), _(M,LR), _(M,LR), _(V,T), _(x,x), _(x,x),
/* 0BD0 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(M,R),
/* 0BD8 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 0BE0 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(Nd,x), _(Nd,x),
/* 0BE8 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 0BF0 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 0BF8 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* Telugu */
/* 0C00 */ _(Bi,T), _(Bi,R), _(Bi,R), _(Vs,R), _(Bi,T), _(VI,x), _(VI,x), _(VI,x),
/* 0C08 */ _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(x,x), _(VI,x), _(VI,x),
/* 0C10 */ _(VI,x), _(x,x), _(VI,x), _(VI,x), _(VI,x), _(C,x), _(C,x), _(C,x),
/* 0C18 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0C20 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0C28 */ _(C,x), _(x,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0C30 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0C38 */ _(C,x), _(C,x), _(x,x), _(x,x), _(N,B), _(A,x), _(M,T), _(M,T),
/* 0C40 */ _(M,T), _(M,R), _(M,R), _(M,R), _(M,R), _(x,x), _(M,T), _(M,T),
/* 0C48 */ _(M,TB), _(x,x), _(M,T), _(M,T), _(M,T), _(V,T), _(x,x), _(x,x),
/* 0C50 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(M,T), _(M,B), _(x,x),
/* 0C58 */ _(C,x), _(C,x), _(C,x), _(x,x), _(x,x), _(CD,x), _(x,x), _(x,x),
/* 0C60 */ _(VI,x), _(VI,x), _(M,B), _(M,B), _(x,x), _(x,x), _(Nd,x), _(Nd,x),
/* 0C68 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 0C70 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 0C78 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* Kannada */
/* 0C80 */ _(Bi,x), _(Bi,T), _(Bi,R), _(Vs,R), _(x,x), _(VI,x), _(VI,x), _(VI,x),
/* 0C88 */ _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(x,x), _(VI,x), _(VI,x),
/* 0C90 */ _(VI,x), _(x,x), _(VI,x), _(VI,x), _(VI,x), _(C,x), _(C,x), _(C,x),
/* 0C98 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0CA0 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0CA8 */ _(C,x), _(x,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0CB0 */ _(C,x), _(C,x), _(C,x), _(C,x), _(x,x), _(C,x), _(C,x), _(C,x),
/* 0CB8 */ _(C,x), _(C,x), _(x,x), _(x,x), _(N,B), _(A,x), _(M,R), _(M,T),
/* 0CC0 */ _(M,TR), _(M,R), _(M,R), _(M,R), _(M,R), _(x,x), _(M,T), _(M,TR),
/* 0CC8 */ _(M,TR), _(x,x), _(M,TR), _(M,TR), _(M,T), _(V,T), _(x,x), _(x,x),
/* 0CD0 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(M,R), _(M,R), _(x,x),
/* 0CD8 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(CD,x), _(C,x), _(x,x),
/* 0CE0 */ _(VI,x), _(VI,x), _(M,B), _(M,B), _(x,x), _(x,x), _(Nd,x), _(Nd,x),
/* 0CE8 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 0CF0 */ _(x,x),_(CWS,x),_(CWS,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 0CF8 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* Malayalam */
/* 0D00 */ _(Bi,T), _(Bi,T), _(Bi,R), _(Vs,R), _(Bi,x), _(VI,x), _(VI,x), _(VI,x),
/* 0D08 */ _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(x,x), _(VI,x), _(VI,x),
/* 0D10 */ _(VI,x), _(x,x), _(VI,x), _(VI,x), _(VI,x), _(C,x), _(C,x), _(C,x),
/* 0D18 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0D20 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0D28 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0D30 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0D38 */ _(C,x), _(C,x), _(C,x), _(PK,T), _(PK,T), _(A,x), _(M,R), _(M,R),
/* 0D40 */ _(M,R), _(M,R), _(M,R), _(M,B), _(M,B), _(x,x), _(M,L), _(M,L),
/* 0D48 */ _(M,L), _(x,x), _(M,LR), _(M,LR), _(M,LR), _(V,T),_(CPR,T), _(x,x),
/* 0D50 */ _(x,x), _(x,x), _(x,x), _(x,x), _(CD,x), _(CD,x), _(CD,x), _(M,R),
/* 0D58 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(VI,x),
/* 0D60 */ _(VI,x), _(VI,x), _(M,B), _(M,B), _(x,x), _(x,x), _(Nd,x), _(Nd,x),
/* 0D68 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 0D70 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 0D78 */ _(x,x), _(x,x), _(CD,x), _(CD,x), _(CD,x), _(CD,x), _(CD,x), _(CD,x),
/* Sinhala */
/* 0D80 */ _(x,x), _(Bi,T), _(Bi,R), _(Vs,R), _(x,x), _(VI,x), _(VI,x), _(VI,x),
/* 0D88 */ _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x),
/* 0D90 */ _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(x,x),
/* 0D98 */ _(x,x), _(x,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0DA0 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0DA8 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0DB0 */ _(C,x), _(C,x), _(x,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 0DB8 */ _(C,x), _(C,x), _(C,x), _(C,x), _(x,x), _(C,x), _(x,x), _(x,x),
/* 0DC0 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(x,x),
/* 0DC8 */ _(x,x), _(x,x), _(V,T), _(x,x), _(x,x), _(x,x), _(x,x), _(M,R),
/* 0DD0 */ _(M,R), _(M,R), _(M,T), _(M,T), _(M,B), _(x,x), _(M,B), _(x,x),
/* 0DD8 */ _(M,R), _(M,L), _(M,TL), _(M,L), _(M,LR),_(M,TLR), _(M,LR), _(M,R),
/* 0DE0 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(Nd,x), _(Nd,x),
/* 0DE8 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 0DF0 */ _(x,x), _(x,x), _(M,R), _(M,R), _(x,x), _(x,x), _(x,x), _(x,x),
#define indic_offset_0x1000u 1336
/* Myanmar */
/* 1000 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 1008 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 1010 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 1018 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 1020 */ _(C,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x),
/* 1028 */ _(VI,x), _(VI,x), _(VI,x), _(M,R), _(M,R), _(M,T), _(M,T), _(M,B),
/* 1030 */ _(M,B), _(M,L), _(M,T), _(M,T), _(M,T), _(M,T), _(Bi,T), _(TM,B),
/* 1038 */ _(Vs,R), _(IS,x), _(PK,T), _(CM,R),_(CM,TBL), _(CM,B), _(CM,B), _(C,x),
/* 1040 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 1048 */ _(Nd,x), _(Nd,x), _(x,x), _(CP,x), _(x,x), _(x,x), _(CP,x), _(x,x),
/* 1050 */ _(C,x), _(C,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(M,R), _(M,R),
/* 1058 */ _(M,B), _(M,B), _(C,x), _(C,x), _(C,x), _(C,x), _(CM,B), _(CM,B),
/* 1060 */ _(CM,B), _(C,x), _(M,R), _(TM,R), _(TM,R), _(C,x), _(C,x), _(M,R),
/* 1068 */ _(M,R), _(TM,R), _(TM,R), _(TM,R), _(TM,R), _(TM,R), _(C,x), _(C,x),
/* 1070 */ _(C,x), _(M,T), _(M,T), _(M,T), _(M,T), _(C,x), _(C,x), _(C,x),
/* 1078 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 1080 */ _(C,x), _(C,x), _(CM,B), _(M,R), _(M,L), _(M,T), _(M,T), _(TM,R),
/* 1088 */ _(TM,R), _(TM,R), _(TM,R), _(TM,R), _(TM,R), _(TM,B), _(C,x), _(TM,R),
/* 1090 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 1098 */ _(Nd,x), _(Nd,x), _(TM,R), _(TM,R), _(M,R), _(M,T), _(x,x), _(x,x),
#define indic_offset_0x1780u 1496
/* Khmer */
/* 1780 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 1788 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 1790 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 1798 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* 17A0 */ _(C,x), _(C,x), _(C,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x),
/* 17A8 */ _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(VI,x),
/* 17B0 */ _(VI,x), _(VI,x), _(VI,x), _(VI,x), _(x,x), _(x,x), _(M,R), _(M,T),
/* 17B8 */ _(M,T), _(M,T), _(M,T), _(M,B), _(M,B), _(M,B), _(M,TL),_(M,TLR),
/* 17C0 */ _(M,LR), _(M,L), _(M,L), _(M,L), _(M,LR), _(M,LR), _(Bi,T), _(Vs,R),
/* 17C8 */ _(M,R), _(RS,T), _(RS,T), _(SM,T),_(CSR,T), _(CK,T), _(SM,T), _(SM,T),
/* 17D0 */ _(SM,T), _(PK,T), _(IS,x), _(SM,T), _(x,x), _(x,x), _(x,x), _(x,x),
/* 17D8 */ _(x,x), _(x,x), _(x,x), _(x,x), _(A,x), _(SM,T), _(x,x), _(x,x),
/* 17E0 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* 17E8 */ _(Nd,x), _(Nd,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
#define indic_offset_0x1cd0u 1608
/* Vedic Extensions */
/* 1CD0 */ _(Ca,T), _(Ca,T), _(Ca,T), _(x,x), _(Ca,O), _(Ca,B), _(Ca,B), _(Ca,B),
/* 1CD8 */ _(Ca,B), _(Ca,B), _(Ca,T), _(Ca,T), _(Ca,B), _(Ca,B), _(Ca,B), _(Ca,B),
/* 1CE0 */ _(Ca,T), _(Ca,R), _(x,O), _(x,O), _(x,O), _(x,O), _(x,O), _(x,O),
/* 1CE8 */ _(x,O), _(x,x), _(x,x), _(x,x), _(x,x), _(x,B), _(x,x), _(x,x),
/* 1CF0 */ _(x,x), _(x,x), _(CD,x), _(CD,x), _(Ca,T),_(CWS,x),_(CWS,x), _(Ca,R),
/* 1CF8 */ _(Ca,x), _(Ca,x), _(CP,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
#define indic_offset_0x2008u 1656
/* General Punctuation */
/* 2008 */ _(x,x), _(x,x), _(x,x), _(x,x),_(ZWNJ,x),_(ZWJ,x), _(x,x), _(x,x),
/* 2010 */ _(CP,x), _(CP,x), _(CP,x), _(CP,x), _(CP,x), _(x,x), _(x,x), _(x,x),
#define indic_offset_0x2070u 1672
/* Superscripts and Subscripts */
/* 2070 */ _(x,x), _(x,x), _(x,x), _(x,x), _(SM,x), _(x,x), _(x,x), _(x,x),
/* 2078 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* 2080 */ _(x,x), _(x,x), _(SM,x), _(SM,x), _(SM,x), _(x,x), _(x,x), _(x,x),
#define indic_offset_0xa8e0u 1696
/* Devanagari Extended */
/* A8E0 */ _(Ca,T), _(Ca,T), _(Ca,T), _(Ca,T), _(Ca,T), _(Ca,T), _(Ca,T), _(Ca,T),
/* A8E8 */ _(Ca,T), _(Ca,T), _(Ca,T), _(Ca,T), _(Ca,T), _(Ca,T), _(Ca,T), _(Ca,T),
/* A8F0 */ _(Ca,T), _(Ca,T), _(Bi,x), _(Bi,x), _(x,x), _(x,x), _(x,x), _(x,x),
/* A8F8 */ _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(x,x), _(VI,x), _(M,T),
#define indic_offset_0xa9e0u 1728
/* Myanmar Extended-B */
/* A9E0 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(M,T), _(x,x), _(C,x),
/* A9E8 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* A9F0 */ _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x), _(Nd,x),
/* A9F8 */ _(Nd,x), _(Nd,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(x,x),
#define indic_offset_0xaa60u 1760
/* Myanmar Extended-A */
/* AA60 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* AA68 */ _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x), _(C,x),
/* AA70 */ _(x,x), _(C,x), _(C,x), _(C,x), _(CP,x), _(CP,x), _(CP,x), _(x,x),
/* AA78 */ _(x,x), _(x,x), _(C,x), _(TM,R), _(TM,T), _(TM,R), _(C,x), _(C,x),
}; /* Table items: 1792; occupancy: 71% */
uint16_t
hb_indic_get_categories (hb_codepoint_t u)
{
switch (u >> 12)
{
case 0x0u:
if (unlikely (u == 0x00A0u)) return _(CP,x);
if (hb_in_range<hb_codepoint_t> (u, 0x0028u, 0x003Fu)) return indic_table[u - 0x0028u + indic_offset_0x0028u];
if (hb_in_range<hb_codepoint_t> (u, 0x00B0u, 0x00D7u)) return indic_table[u - 0x00B0u + indic_offset_0x00b0u];
if (hb_in_range<hb_codepoint_t> (u, 0x0900u, 0x0DF7u)) return indic_table[u - 0x0900u + indic_offset_0x0900u];
break;
case 0x1u:
if (hb_in_range<hb_codepoint_t> (u, 0x1000u, 0x109Fu)) return indic_table[u - 0x1000u + indic_offset_0x1000u];
if (hb_in_range<hb_codepoint_t> (u, 0x1780u, 0x17EFu)) return indic_table[u - 0x1780u + indic_offset_0x1780u];
if (hb_in_range<hb_codepoint_t> (u, 0x1CD0u, 0x1CFFu)) return indic_table[u - 0x1CD0u + indic_offset_0x1cd0u];
break;
case 0x2u:
if (unlikely (u == 0x25CCu)) return _(CP,x);
if (hb_in_range<hb_codepoint_t> (u, 0x2008u, 0x2017u)) return indic_table[u - 0x2008u + indic_offset_0x2008u];
if (hb_in_range<hb_codepoint_t> (u, 0x2070u, 0x2087u)) return indic_table[u - 0x2070u + indic_offset_0x2070u];
break;
case 0xAu:
if (hb_in_range<hb_codepoint_t> (u, 0xA8E0u, 0xA8FFu)) return indic_table[u - 0xA8E0u + indic_offset_0xa8e0u];
if (hb_in_range<hb_codepoint_t> (u, 0xA9E0u, 0xA9FFu)) return indic_table[u - 0xA9E0u + indic_offset_0xa9e0u];
if (hb_in_range<hb_codepoint_t> (u, 0xAA60u, 0xAA7Fu)) return indic_table[u - 0xAA60u + indic_offset_0xaa60u];
break;
default:
break;
}
return _(x,x);
}
#undef _
#undef ISC_A
#undef ISC_Bi
#undef ISC_BJN
#undef ISC_Ca
#undef ISC_C
#undef ISC_CD
#undef ISC_CF
#undef ISC_CHL
#undef ISC_CIP
#undef ISC_CK
#undef ISC_CM
#undef ISC_CP
#undef ISC_CPR
#undef ISC_CPrf
#undef ISC_CS
#undef ISC_CSR
#undef ISC_CWS
#undef ISC_GM
#undef ISC_IS
#undef ISC_ZWJ
#undef ISC_ML
#undef ISC_ZWNJ
#undef ISC_N
#undef ISC_Nd
#undef ISC_NJ
#undef ISC_x
#undef ISC_PK
#undef ISC_RS
#undef ISC_SM
#undef ISC_TL
#undef ISC_TM
#undef ISC_V
#undef ISC_Vs
#undef ISC_Vo
#undef ISC_M
#undef ISC_VI
#undef IMC_B
#undef IMC_BL
#undef IMC_BR
#undef IMC_L
#undef IMC_LR
#undef IMC_x
#undef IMC_O
#undef IMC_R
#undef IMC_T
#undef IMC_TB
#undef IMC_TBL
#undef IMC_TBR
#undef IMC_TL
#undef IMC_TLR
#undef IMC_TR
#undef IMC_VOL
#endif
/* == End of generated table == */

Some files were not shown because too many files have changed in this diff Show more