Update to Harfbuzz 8.1.1
Change-Id: I886bc7d385e62ff0c9546c18bb7bb9273ef1cbd1 Reviewed-by: Qt CI Bot <qt_ci_bot@qt-project.org> Reviewed-by: Volker Hilsheimer <volker.hilsheimer@qt.io> (cherry picked from commit 9266b6d0914a31215d8505a363ecfd8f80b744eb) Reviewed-by: Qt Cherry-pick Bot <cherrypick_bot@qt-project.org>
This commit is contained in:
parent
b32f16b169
commit
ea50a2deb5
78
src/3rdparty/harfbuzz-ng/NEWS
vendored
78
src/3rdparty/harfbuzz-ng/NEWS
vendored
@ -1,3 +1,81 @@
|
||||
Overview of changes leading to 8.1.1
|
||||
Wednesday, August 2, 2023
|
||||
====================================
|
||||
- Fix shaping of contextual rules at the end of string, introduced in 8.1.0
|
||||
- Fix stack-overflow in repacker with malicious fonts.
|
||||
- 30% speed up loading Noto Duployan font.
|
||||
|
||||
|
||||
Overview of changes leading to 8.1.0
|
||||
Tuesday, August 1, 2023
|
||||
====================================
|
||||
- Fix long-standing build issue with the AIX compiler and older Apple clang.
|
||||
|
||||
- Revert optimization that could cause timeout during subsetting with malicious fonts.
|
||||
|
||||
- More optimization work:
|
||||
- 45% speed up in shaping Noto Duployan font.
|
||||
- 10% speed up in subsetting Noto Duployan font.
|
||||
- Another 8% speed up in shaping Gulzar.
|
||||
- 5% speed up in loading Roboto.
|
||||
|
||||
- New API:
|
||||
+hb_ot_layout_collect_features_map()
|
||||
|
||||
|
||||
Overview of changes leading to 8.0.1
|
||||
Wednesday, July 12, 2023
|
||||
====================================
|
||||
- Build fix on 32-bit ARM.
|
||||
|
||||
- More speed optimizations:
|
||||
- 60% speed up in retain-gid (used for IFT) subsetting of SourceHanSans-VF.
|
||||
- 16% speed up in retain-gid (used for IFT) subsetting of NotoSansCJKkr.
|
||||
- 38% speed up in subsetting (beyond-64k) mega-merged Noto.
|
||||
|
||||
|
||||
Overview of changes leading to 8.0.0
|
||||
Sunday, July 9, 2023
|
||||
====================================
|
||||
- New, experimental, WebAssembly (WASM) shaper, that provides greater
|
||||
flexibility over OpenType/AAT/Graphite shaping, using WebAssembly embedded
|
||||
inside the font file. Currently WASM shaper is disabled by default and needs
|
||||
to be enabled at build time. For details, see:
|
||||
|
||||
https://github.com/harfbuzz/harfbuzz/blob/main/docs/wasm-shaper.md
|
||||
|
||||
For example fonts making use of the WASM shaper, see:
|
||||
|
||||
https://github.com/harfbuzz/harfbuzz-wasm-examples
|
||||
|
||||
- Improvements to Experimental features introduced in earlier releases:
|
||||
- Support for subsetting beyond-64k and VarComposites fonts.
|
||||
- Support for instancing variable fonts with cubic “glyf” table.
|
||||
|
||||
- Many big speed optimizations:
|
||||
- Up to 89% speedup loading variable fonts for shaping.
|
||||
- Up to 88% speedup in small subsets of large (eg. CJK) fonts (both TTF and
|
||||
OTF), essential for Incremental Font Transfer (IFT).
|
||||
- Over 50% speedup in loading Roboto font for shaping.
|
||||
- Up to 40% speed up in loading (sanitizing) complex fonts.
|
||||
- 30% speed up in shaping Gulzar font.
|
||||
- Over 25% speedup in glyph loading Roboto font.
|
||||
- 10% speed up loading glyph shapes in VarComposite Hangul font.
|
||||
- hb-hashmap optimizations & hashing improvements.
|
||||
|
||||
- New macro HB_ALWAYS_INLINE. HarfBuzz now inlines functions more aggressively,
|
||||
which results in some speedup at the expense of bigger code size. To disable
|
||||
this feature define the macro to just inline.
|
||||
|
||||
- New API:
|
||||
+HB_CODEPOINT_INVALID
|
||||
+hb_ot_layout_get_baseline2()
|
||||
+hb_ot_layout_get_baseline_with_fallback2()
|
||||
+hb_ot_layout_get_font_extents()
|
||||
+hb_ot_layout_get_font_extents2()
|
||||
+hb_subset_input_set_axis_range()
|
||||
|
||||
|
||||
Overview of changes leading to 7.3.0
|
||||
Tuesday, May 9, 2023
|
||||
====================================
|
||||
|
7
src/3rdparty/harfbuzz-ng/README.md
vendored
7
src/3rdparty/harfbuzz-ng/README.md
vendored
@ -5,13 +5,16 @@
|
||||
[](https://www.codacy.com/gh/harfbuzz/harfbuzz/dashboard?utm_source=github.com&utm_medium=referral&utm_content=harfbuzz/harfbuzz&utm_campaign=Badge_Grade)
|
||||
[](https://codecov.io/gh/harfbuzz/harfbuzz)
|
||||
[](https://repology.org/project/harfbuzz/versions)
|
||||
[](https://securityscorecards.dev/viewer/?uri=github.com/harfbuzz/harfbuzz)
|
||||
|
||||
|
||||
# HarfBuzz
|
||||
|
||||
HarfBuzz is a text shaping engine. It primarily supports [OpenType][1], but also
|
||||
[Apple Advanced Typography][2]. HarfBuzz is used in Android, Chrome,
|
||||
ChromeOS, Firefox, GNOME, GTK+, KDE, LibreOffice, OpenJDK, PlayStation, Qt,
|
||||
XeTeX, and other places.
|
||||
ChromeOS, Firefox, GNOME, GTK+, KDE, Qt, LibreOffice, OpenJDK, XeTeX,
|
||||
PlayStation, Microsoft Edge, Photoshop, Illustrator, InDesign,
|
||||
and other places.
|
||||
|
||||
For bug reports, mailing list, and other information please visit:
|
||||
|
||||
|
4
src/3rdparty/harfbuzz-ng/qt_attribution.json
vendored
4
src/3rdparty/harfbuzz-ng/qt_attribution.json
vendored
@ -7,8 +7,8 @@
|
||||
|
||||
"Description": "HarfBuzz is an OpenType text shaping engine.",
|
||||
"Homepage": "http://harfbuzz.org",
|
||||
"Version": "7.3.0",
|
||||
"DownloadLocation": "https://github.com/harfbuzz/harfbuzz/releases/tag/7.3.0",
|
||||
"Version": "8.1.1",
|
||||
"DownloadLocation": "https://github.com/harfbuzz/harfbuzz/releases/tag/8.1.1",
|
||||
|
||||
"License": "MIT License",
|
||||
"LicenseId": "MIT",
|
||||
|
@ -397,7 +397,6 @@ struct IndexSubtableRecord
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
auto *subtable = c->serializer->start_embed<IndexSubtable> ();
|
||||
if (unlikely (!subtable)) return_trace (false);
|
||||
if (unlikely (!c->serializer->extend_min (subtable))) return_trace (false);
|
||||
|
||||
auto *old_subtable = get_subtable (base);
|
||||
@ -545,7 +544,8 @@ struct IndexSubtableArray
|
||||
const IndexSubtableRecord*>> *lookup /* OUT */) const
|
||||
{
|
||||
bool start_glyph_is_set = false;
|
||||
for (hb_codepoint_t new_gid = 0; new_gid < c->plan->num_output_glyphs (); new_gid++)
|
||||
unsigned num_glyphs = c->plan->num_output_glyphs ();
|
||||
for (hb_codepoint_t new_gid = 0; new_gid < num_glyphs; new_gid++)
|
||||
{
|
||||
hb_codepoint_t old_gid;
|
||||
if (unlikely (!c->plan->old_gid_for_new_gid (new_gid, &old_gid))) continue;
|
||||
@ -576,9 +576,6 @@ struct IndexSubtableArray
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
auto *dst = c->serializer->start_embed<IndexSubtableArray> ();
|
||||
if (unlikely (!dst)) return_trace (false);
|
||||
|
||||
hb_vector_t<hb_pair_t<hb_codepoint_t, const IndexSubtableRecord*>> lookup;
|
||||
build_lookup (c, bitmap_size_context, &lookup);
|
||||
if (unlikely (!c->serializer->propagate_error (lookup)))
|
||||
@ -993,12 +990,10 @@ CBLC::subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
auto *cblc_prime = c->serializer->start_embed<CBLC> ();
|
||||
|
||||
// Use a vector as a secondary buffer as the tables need to be built in parallel.
|
||||
hb_vector_t<char> cbdt_prime;
|
||||
|
||||
if (unlikely (!cblc_prime)) return_trace (false);
|
||||
auto *cblc_prime = c->serializer->start_embed<CBLC> ();
|
||||
if (unlikely (!c->serializer->extend_min (cblc_prime))) return_trace (false);
|
||||
cblc_prime->version = version;
|
||||
|
||||
|
@ -409,7 +409,6 @@ struct ColorLine
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
if (!c->serializer->check_assign (out->extend, extend, HB_SERIALIZE_ERROR_INT_OVERFLOW)) return_trace (false);
|
||||
@ -1434,6 +1433,7 @@ struct PaintComposite
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) &&
|
||||
c->check_ops (this->min_size) && // PainComposite can get exponential
|
||||
src.sanitize (c, this) &&
|
||||
backdrop.sanitize (c, this));
|
||||
}
|
||||
@ -2167,7 +2167,7 @@ struct COLR
|
||||
if (version == 0 && (!base_it || !layer_it))
|
||||
return_trace (false);
|
||||
|
||||
COLR *colr_prime = c->serializer->start_embed<COLR> ();
|
||||
auto *colr_prime = c->serializer->start_embed<COLR> ();
|
||||
if (unlikely (!c->serializer->extend_min (colr_prime))) return_trace (false);
|
||||
|
||||
if (version == 0)
|
||||
|
@ -48,7 +48,6 @@ struct SBIXGlyph
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
SBIXGlyph* new_glyph = c->start_embed<SBIXGlyph> ();
|
||||
if (unlikely (!new_glyph)) return_trace (nullptr);
|
||||
if (unlikely (!c->extend_min (new_glyph))) return_trace (nullptr);
|
||||
|
||||
new_glyph->xOffset = xOffset;
|
||||
@ -143,7 +142,6 @@ struct SBIXStrike
|
||||
unsigned int num_output_glyphs = c->plan->num_output_glyphs ();
|
||||
|
||||
auto* out = c->serializer->start_embed<SBIXStrike> ();
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
auto snap = c->serializer->snapshot ();
|
||||
if (unlikely (!c->serializer->extend (out, num_output_glyphs + 1))) return_trace (false);
|
||||
out->ppem = ppem;
|
||||
@ -388,7 +386,6 @@ struct sbix
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
auto *out = c->serializer->start_embed<Array32OfOffset32To<SBIXStrike>> ();
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
hb_vector_t<Offset32To<SBIXStrike>*> new_strikes;
|
||||
@ -423,8 +420,6 @@ struct sbix
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
sbix *sbix_prime = c->serializer->start_embed<sbix> ();
|
||||
if (unlikely (!sbix_prime)) return_trace (false);
|
||||
if (unlikely (!c->serializer->embed (this->version))) return_trace (false);
|
||||
if (unlikely (!c->serializer->embed (this->flags))) return_trace (false);
|
||||
|
||||
|
@ -57,6 +57,9 @@ struct Coverage
|
||||
public:
|
||||
DEFINE_SIZE_UNION (2, format);
|
||||
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
@ -79,7 +79,7 @@ struct CoverageFormat1_3
|
||||
{
|
||||
if (glyphArray.len > glyphs->get_population () * hb_bit_storage ((unsigned) glyphArray.len) / 2)
|
||||
{
|
||||
for (hb_codepoint_t g = HB_SET_VALUE_INVALID; glyphs->next (&g);)
|
||||
for (auto g : *glyphs)
|
||||
if (get_coverage (g) != NOT_COVERED)
|
||||
return true;
|
||||
return false;
|
||||
|
@ -122,7 +122,7 @@ struct CoverageFormat2_4
|
||||
{
|
||||
if (rangeRecord.len > glyphs->get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2)
|
||||
{
|
||||
for (hb_codepoint_t g = HB_SET_VALUE_INVALID; glyphs->next (&g);)
|
||||
for (auto g : *glyphs)
|
||||
if (get_coverage (g) != NOT_COVERED)
|
||||
return true;
|
||||
return false;
|
||||
|
@ -49,8 +49,6 @@ struct AttachPoint : Array16Of<HBUINT16>
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
return_trace (out->serialize (c->serializer, + iter ()));
|
||||
}
|
||||
};
|
||||
@ -202,7 +200,6 @@ struct CaretValueFormat3
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
if (!c->serializer->embed (caretValueFormat)) return_trace (false);
|
||||
if (!c->serializer->embed (coordinate)) return_trace (false);
|
||||
|
||||
@ -442,6 +439,16 @@ struct MarkGlyphSetsFormat1
|
||||
bool covers (unsigned int set_index, hb_codepoint_t glyph_id) const
|
||||
{ return (this+coverage[set_index]).get_coverage (glyph_id) != NOT_COVERED; }
|
||||
|
||||
template <typename set_t>
|
||||
void collect_coverage (hb_vector_t<set_t> &sets) const
|
||||
{
|
||||
for (const auto &offset : coverage)
|
||||
{
|
||||
const auto &cov = this+offset;
|
||||
cov.collect_coverage (sets.push ());
|
||||
}
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -495,6 +502,15 @@ struct MarkGlyphSets
|
||||
}
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
void collect_coverage (hb_vector_t<set_t> &sets) const
|
||||
{
|
||||
switch (u.format) {
|
||||
case 1: u.format1.collect_coverage (sets); return;
|
||||
default:return;
|
||||
}
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
@ -859,6 +875,10 @@ struct GDEF
|
||||
hb_blob_destroy (table.get_blob ());
|
||||
table = hb_blob_get_empty ();
|
||||
}
|
||||
|
||||
#ifndef HB_NO_GDEF_CACHE
|
||||
table->get_mark_glyph_sets ().collect_coverage (mark_glyph_set_digests);
|
||||
#endif
|
||||
}
|
||||
~accelerator_t () { table.destroy (); }
|
||||
|
||||
@ -882,8 +902,18 @@ struct GDEF
|
||||
|
||||
}
|
||||
|
||||
bool mark_set_covers (unsigned int set_index, hb_codepoint_t glyph_id) const
|
||||
{
|
||||
return
|
||||
#ifndef HB_NO_GDEF_CACHE
|
||||
mark_glyph_set_digests[set_index].may_have (glyph_id) &&
|
||||
#endif
|
||||
table->mark_set_covers (set_index, glyph_id);
|
||||
}
|
||||
|
||||
hb_blob_ptr_t<GDEF> table;
|
||||
#ifndef HB_NO_GDEF_CACHE
|
||||
hb_vector_t<hb_set_digest_t> mark_glyph_set_digests;
|
||||
mutable hb_cache_t<21, 3, 8> glyph_props_cache;
|
||||
#endif
|
||||
};
|
||||
|
@ -25,7 +25,9 @@ struct AnchorFormat3
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this));
|
||||
if (unlikely (!c->check_struct (this))) return_trace (false);
|
||||
|
||||
return_trace (xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this));
|
||||
}
|
||||
|
||||
void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
|
||||
@ -35,9 +37,9 @@ struct AnchorFormat3
|
||||
*x = font->em_fscale_x (xCoordinate);
|
||||
*y = font->em_fscale_y (yCoordinate);
|
||||
|
||||
if (font->x_ppem || font->num_coords)
|
||||
if ((font->x_ppem || font->num_coords) && xDeviceTable.sanitize (&c->sanitizer, this))
|
||||
*x += (this+xDeviceTable).get_x_delta (font, c->var_store, c->var_store_cache);
|
||||
if (font->y_ppem || font->num_coords)
|
||||
if ((font->y_ppem || font->num_coords) && yDeviceTable.sanitize (&c->sanitizer, this))
|
||||
*y += (this+yDeviceTable).get_y_delta (font, c->var_store, c->var_store_cache);
|
||||
}
|
||||
|
||||
@ -45,7 +47,6 @@ struct AnchorFormat3
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
if (unlikely (!c->serializer->embed (format))) return_trace (false);
|
||||
if (unlikely (!c->serializer->embed (xCoordinate))) return_trace (false);
|
||||
if (unlikely (!c->serializer->embed (yCoordinate))) return_trace (false);
|
||||
|
@ -21,18 +21,25 @@ struct AnchorMatrix
|
||||
if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
|
||||
unsigned int count = rows * cols;
|
||||
if (!c->check_array (matrixZ.arrayZ, count)) return_trace (false);
|
||||
|
||||
if (c->lazy_some_gpos)
|
||||
return_trace (true);
|
||||
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (!matrixZ[i].sanitize (c, this)) return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
const Anchor& get_anchor (unsigned int row, unsigned int col,
|
||||
unsigned int cols, bool *found) const
|
||||
const Anchor& get_anchor (hb_ot_apply_context_t *c,
|
||||
unsigned int row, unsigned int col,
|
||||
unsigned int cols, bool *found) const
|
||||
{
|
||||
*found = false;
|
||||
if (unlikely (row >= rows || col >= cols)) return Null (Anchor);
|
||||
*found = !matrixZ[row * cols + col].is_null ();
|
||||
return this+matrixZ[row * cols + col];
|
||||
auto &offset = matrixZ[row * cols + col];
|
||||
if (unlikely (!offset.sanitize (&c->sanitizer, this))) return Null (Anchor);
|
||||
*found = !offset.is_null ();
|
||||
return this+offset;
|
||||
}
|
||||
|
||||
template <typename Iterator,
|
||||
|
@ -91,7 +91,13 @@ struct CursivePosFormat1
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this));
|
||||
if (unlikely (!coverage.sanitize (c, this)))
|
||||
return_trace (false);
|
||||
|
||||
if (c->lazy_some_gpos)
|
||||
return_trace (entryExitRecord.sanitize_shallow (c));
|
||||
else
|
||||
return_trace (entryExitRecord.sanitize (c, this));
|
||||
}
|
||||
|
||||
bool intersects (const hb_set_t *glyphs) const
|
||||
@ -119,10 +125,11 @@ struct CursivePosFormat1
|
||||
hb_buffer_t *buffer = c->buffer;
|
||||
|
||||
const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)];
|
||||
if (!this_record.entryAnchor) return_trace (false);
|
||||
if (!this_record.entryAnchor ||
|
||||
unlikely (!this_record.entryAnchor.sanitize (&c->sanitizer, this))) return_trace (false);
|
||||
|
||||
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
||||
skippy_iter.reset_fast (buffer->idx, 1);
|
||||
skippy_iter.reset_fast (buffer->idx);
|
||||
unsigned unsafe_from;
|
||||
if (unlikely (!skippy_iter.prev (&unsafe_from)))
|
||||
{
|
||||
@ -131,7 +138,8 @@ struct CursivePosFormat1
|
||||
}
|
||||
|
||||
const EntryExitRecord &prev_record = entryExitRecord[(this+coverage).get_coverage (buffer->info[skippy_iter.idx].codepoint)];
|
||||
if (!prev_record.exitAnchor)
|
||||
if (!prev_record.exitAnchor ||
|
||||
unlikely (!prev_record.exitAnchor.sanitize (&c->sanitizer, this)))
|
||||
{
|
||||
buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
|
||||
return_trace (false);
|
||||
@ -200,8 +208,8 @@ struct CursivePosFormat1
|
||||
* Arabic. */
|
||||
unsigned int child = i;
|
||||
unsigned int parent = j;
|
||||
hb_position_t x_offset = entry_x - exit_x;
|
||||
hb_position_t y_offset = entry_y - exit_y;
|
||||
hb_position_t x_offset = roundf (entry_x - exit_x);
|
||||
hb_position_t y_offset = roundf (entry_y - exit_y);
|
||||
if (!(c->lookup_props & LookupFlag::RightToLeft))
|
||||
{
|
||||
unsigned int k = child;
|
||||
@ -278,7 +286,6 @@ struct CursivePosFormat1
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
auto it =
|
||||
+ hb_zip (this+coverage, entryExitRecord)
|
||||
|
@ -28,7 +28,7 @@ struct MarkArray : Array16Of<MarkRecord> /* Array of MarkRecords--in Cove
|
||||
|
||||
const Anchor& mark_anchor = this + record.markAnchor;
|
||||
bool found;
|
||||
const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found);
|
||||
const Anchor& glyph_anchor = anchors.get_anchor (c, glyph_index, mark_class, class_count, &found);
|
||||
/* If this subtable doesn't have an anchor for this base and this class,
|
||||
* return false such that the subsequent subtables have a chance at it. */
|
||||
if (unlikely (!found)) return_trace (false);
|
||||
|
@ -100,7 +100,7 @@ struct MarkMarkPosFormat1_2
|
||||
|
||||
/* now we search backwards for a suitable mark glyph until a non-mark glyph */
|
||||
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
||||
skippy_iter.reset_fast (buffer->idx, 1);
|
||||
skippy_iter.reset_fast (buffer->idx);
|
||||
skippy_iter.set_lookup_props (c->lookup_props & ~(uint32_t)LookupFlag::IgnoreFlags);
|
||||
unsigned unsafe_from;
|
||||
if (unlikely (!skippy_iter.prev (&unsafe_from)))
|
||||
|
@ -110,7 +110,7 @@ struct PairPosFormat1_3
|
||||
if (likely (index == NOT_COVERED)) return_trace (false);
|
||||
|
||||
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
||||
skippy_iter.reset_fast (buffer->idx, 1);
|
||||
skippy_iter.reset_fast (buffer->idx);
|
||||
unsigned unsafe_to;
|
||||
if (unlikely (!skippy_iter.next (&unsafe_to)))
|
||||
{
|
||||
|
@ -54,8 +54,9 @@ struct PairPosFormat2_4
|
||||
return_trace (c->check_range ((const void *) values,
|
||||
count,
|
||||
stride) &&
|
||||
valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) &&
|
||||
valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride));
|
||||
(c->lazy_some_gpos ||
|
||||
(valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) &&
|
||||
valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride))));
|
||||
}
|
||||
|
||||
bool intersects (const hb_set_t *glyphs) const
|
||||
@ -130,7 +131,7 @@ struct PairPosFormat2_4
|
||||
if (likely (index == NOT_COVERED)) return_trace (false);
|
||||
|
||||
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
||||
skippy_iter.reset_fast (buffer->idx, 1);
|
||||
skippy_iter.reset_fast (buffer->idx);
|
||||
unsigned unsafe_to;
|
||||
if (unlikely (!skippy_iter.next (&unsafe_to)))
|
||||
{
|
||||
@ -298,11 +299,13 @@ struct PairPosFormat2_4
|
||||
out->valueFormat2 = out->valueFormat2.drop_device_table_flags ();
|
||||
}
|
||||
|
||||
unsigned total_len = len1 + len2;
|
||||
hb_vector_t<unsigned> class2_idxs (+ hb_range ((unsigned) class2Count) | hb_filter (klass2_map));
|
||||
for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
|
||||
{
|
||||
for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
|
||||
for (unsigned class2_idx : class2_idxs)
|
||||
{
|
||||
unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
|
||||
unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * total_len;
|
||||
valueFormat1.copy_values (c->serializer, out->valueFormat1, this, &values[idx], &c->plan->layout_variation_idx_delta_map);
|
||||
valueFormat2.copy_values (c->serializer, out->valueFormat2, this, &values[idx + len1], &c->plan->layout_variation_idx_delta_map);
|
||||
}
|
||||
|
@ -52,8 +52,9 @@ struct PairSet
|
||||
|
||||
unsigned int count = len;
|
||||
const PairValueRecord *record = &firstPairValueRecord;
|
||||
return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, this, &record->values[0], count, closure->stride) &&
|
||||
closure->valueFormats[1].sanitize_values_stride_unsafe (c, this, &record->values[closure->len1], count, closure->stride));
|
||||
return_trace (c->lazy_some_gpos ||
|
||||
(closure->valueFormats[0].sanitize_values_stride_unsafe (c, this, &record->values[0], count, closure->stride) &&
|
||||
closure->valueFormats[1].sanitize_values_stride_unsafe (c, this, &record->values[closure->len1], count, closure->stride)));
|
||||
}
|
||||
|
||||
bool intersects (const hb_set_t *glyphs,
|
||||
|
@ -22,7 +22,7 @@ struct PairValueRecord
|
||||
ValueRecord values; /* Positioning data for the first glyph
|
||||
* followed by for second glyph */
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (Types::size, values);
|
||||
DEFINE_SIZE_ARRAY (Types::HBGlyphID::static_size, values);
|
||||
|
||||
int cmp (hb_codepoint_t k) const
|
||||
{ return secondGlyph.cmp (k); }
|
||||
|
@ -90,6 +90,7 @@ struct SinglePosFormat1
|
||||
|
||||
bool
|
||||
position_single (hb_font_t *font,
|
||||
hb_blob_t *table_blob,
|
||||
hb_direction_t direction,
|
||||
hb_codepoint_t gid,
|
||||
hb_glyph_position_t &pos) const
|
||||
@ -100,7 +101,7 @@ struct SinglePosFormat1
|
||||
/* This is ugly... */
|
||||
hb_buffer_t buffer;
|
||||
buffer.props.direction = direction;
|
||||
OT::hb_ot_apply_context_t c (1, font, &buffer);
|
||||
OT::hb_ot_apply_context_t c (1, font, &buffer, table_blob);
|
||||
|
||||
valueFormat.apply_value (&c, this, values, pos);
|
||||
return true;
|
||||
|
@ -94,6 +94,7 @@ struct SinglePosFormat2
|
||||
|
||||
bool
|
||||
position_single (hb_font_t *font,
|
||||
hb_blob_t *table_blob,
|
||||
hb_direction_t direction,
|
||||
hb_codepoint_t gid,
|
||||
hb_glyph_position_t &pos) const
|
||||
@ -105,7 +106,7 @@ struct SinglePosFormat2
|
||||
/* This is ugly... */
|
||||
hb_buffer_t buffer;
|
||||
buffer.props.direction = direction;
|
||||
OT::hb_ot_apply_context_t c (1, font, &buffer);
|
||||
OT::hb_ot_apply_context_t c (1, font, &buffer, table_blob);
|
||||
|
||||
valueFormat.apply_value (&c, this,
|
||||
&values[index * valueFormat.get_len ()],
|
||||
|
@ -118,21 +118,25 @@ struct ValueFormat : HBUINT16
|
||||
auto *cache = c->var_store_cache;
|
||||
|
||||
/* pixel -> fractional pixel */
|
||||
if (format & xPlaDevice) {
|
||||
if (use_x_device) glyph_pos.x_offset += (base + get_device (values, &ret)).get_x_delta (font, store, cache);
|
||||
if (format & xPlaDevice)
|
||||
{
|
||||
if (use_x_device) glyph_pos.x_offset += get_device (values, &ret, base, c->sanitizer).get_x_delta (font, store, cache);
|
||||
values++;
|
||||
}
|
||||
if (format & yPlaDevice) {
|
||||
if (use_y_device) glyph_pos.y_offset += (base + get_device (values, &ret)).get_y_delta (font, store, cache);
|
||||
if (format & yPlaDevice)
|
||||
{
|
||||
if (use_y_device) glyph_pos.y_offset += get_device (values, &ret, base, c->sanitizer).get_y_delta (font, store, cache);
|
||||
values++;
|
||||
}
|
||||
if (format & xAdvDevice) {
|
||||
if (horizontal && use_x_device) glyph_pos.x_advance += (base + get_device (values, &ret)).get_x_delta (font, store, cache);
|
||||
if (format & xAdvDevice)
|
||||
{
|
||||
if (horizontal && use_x_device) glyph_pos.x_advance += get_device (values, &ret, base, c->sanitizer).get_x_delta (font, store, cache);
|
||||
values++;
|
||||
}
|
||||
if (format & yAdvDevice) {
|
||||
if (format & yAdvDevice)
|
||||
{
|
||||
/* y_advance values grow downward but font-space grows upward, hence negation */
|
||||
if (!horizontal && use_y_device) glyph_pos.y_advance -= (base + get_device (values, &ret)).get_y_delta (font, store, cache);
|
||||
if (!horizontal && use_y_device) glyph_pos.y_advance -= get_device (values, &ret, base, c->sanitizer).get_y_delta (font, store, cache);
|
||||
values++;
|
||||
}
|
||||
return ret;
|
||||
@ -174,6 +178,9 @@ struct ValueFormat : HBUINT16
|
||||
if (format & xAdvance) x_adv = copy_value (c, new_format, xAdvance, *values++);
|
||||
if (format & yAdvance) y_adv = copy_value (c, new_format, yAdvance, *values++);
|
||||
|
||||
if (!has_device ())
|
||||
return;
|
||||
|
||||
if (format & xPlaDevice)
|
||||
{
|
||||
add_delta_to_value (x_placement, base, values, layout_variation_idx_delta_map);
|
||||
@ -233,14 +240,12 @@ struct ValueFormat : HBUINT16
|
||||
|
||||
if (format & ValueFormat::xAdvDevice)
|
||||
{
|
||||
|
||||
(base + get_device (&(values[i]))).collect_variation_indices (c);
|
||||
i++;
|
||||
}
|
||||
|
||||
if (format & ValueFormat::yAdvDevice)
|
||||
{
|
||||
|
||||
(base + get_device (&(values[i]))).collect_variation_indices (c);
|
||||
i++;
|
||||
}
|
||||
@ -277,10 +282,22 @@ struct ValueFormat : HBUINT16
|
||||
{
|
||||
return *static_cast<Offset16To<Device> *> (value);
|
||||
}
|
||||
static inline const Offset16To<Device>& get_device (const Value* value, bool *worked=nullptr)
|
||||
static inline const Offset16To<Device>& get_device (const Value* value)
|
||||
{
|
||||
return *static_cast<const Offset16To<Device> *> (value);
|
||||
}
|
||||
static inline const Device& get_device (const Value* value,
|
||||
bool *worked,
|
||||
const void *base,
|
||||
hb_sanitize_context_t &c)
|
||||
{
|
||||
if (worked) *worked |= bool (*value);
|
||||
return *static_cast<const Offset16To<Device> *> (value);
|
||||
auto &offset = *static_cast<const Offset16To<Device> *> (value);
|
||||
|
||||
if (unlikely (!offset.sanitize (&c, base)))
|
||||
return Null(Device);
|
||||
|
||||
return base + offset;
|
||||
}
|
||||
|
||||
void add_delta_to_value (HBINT16 *value,
|
||||
@ -340,25 +357,26 @@ struct ValueFormat : HBUINT16
|
||||
bool sanitize_value (hb_sanitize_context_t *c, const void *base, const Value *values) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values)));
|
||||
|
||||
if (unlikely (!c->check_range (values, get_size ()))) return_trace (false);
|
||||
|
||||
if (c->lazy_some_gpos)
|
||||
return_trace (true);
|
||||
|
||||
return_trace (!has_device () || sanitize_value_devices (c, base, values));
|
||||
}
|
||||
|
||||
bool sanitize_values (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
unsigned int len = get_len ();
|
||||
unsigned size = get_size ();
|
||||
|
||||
if (!c->check_range (values, count, get_size ())) return_trace (false);
|
||||
if (!c->check_range (values, count, size)) return_trace (false);
|
||||
|
||||
if (!has_device ()) return_trace (true);
|
||||
if (c->lazy_some_gpos)
|
||||
return_trace (true);
|
||||
|
||||
for (unsigned int i = 0; i < count; i++) {
|
||||
if (!sanitize_value_devices (c, base, values))
|
||||
return_trace (false);
|
||||
values += len;
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
return_trace (sanitize_values_stride_unsafe (c, base, values, count, size));
|
||||
}
|
||||
|
||||
/* Just sanitize referenced Device tables. Doesn't check the values themselves. */
|
||||
|
@ -8,8 +8,6 @@ namespace OT {
|
||||
namespace Layout {
|
||||
namespace GSUB_impl {
|
||||
|
||||
typedef hb_pair_t<hb_codepoint_t, hb_codepoint_t> hb_codepoint_pair_t;
|
||||
|
||||
template<typename Iterator>
|
||||
static void SingleSubst_serialize (hb_serialize_context_t *c,
|
||||
Iterator it);
|
||||
|
@ -13,7 +13,7 @@ struct Ligature
|
||||
public:
|
||||
typename Types::HBGlyphID
|
||||
ligGlyph; /* GlyphID of ligature to substitute */
|
||||
HeadlessArrayOf<typename Types::HBGlyphID>
|
||||
HeadlessArray16Of<typename Types::HBGlyphID>
|
||||
component; /* Array of component GlyphIDs--start
|
||||
* with the second component--ordered
|
||||
* in writing direction */
|
||||
|
@ -72,19 +72,14 @@ struct LigatureSet
|
||||
;
|
||||
}
|
||||
|
||||
static bool match_always (hb_glyph_info_t &info HB_UNUSED, unsigned value HB_UNUSED, const void *data HB_UNUSED)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
bool apply (hb_ot_apply_context_t *c) const
|
||||
{
|
||||
TRACE_APPLY (this);
|
||||
|
||||
unsigned int num_ligs = ligature.len;
|
||||
|
||||
#ifndef HB_NO_OT_LIGATURES_FAST_PATH
|
||||
if (HB_OPTIMIZE_SIZE_VAL || num_ligs <= 2)
|
||||
#ifndef HB_NO_OT_RULESETS_FAST_PATH
|
||||
if (HB_OPTIMIZE_SIZE_VAL || num_ligs <= 4)
|
||||
#endif
|
||||
{
|
||||
slow:
|
||||
@ -97,10 +92,12 @@ struct LigatureSet
|
||||
}
|
||||
|
||||
/* This version is optimized for speed by matching the first component
|
||||
* of the ligature here, instead of calling into the ligation code. */
|
||||
* of the ligature here, instead of calling into the ligation code.
|
||||
*
|
||||
* This is replicated in ChainRuleSet and RuleSet. */
|
||||
|
||||
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
||||
skippy_iter.reset (c->buffer->idx, 1);
|
||||
skippy_iter.reset (c->buffer->idx);
|
||||
skippy_iter.set_match_func (match_always, nullptr);
|
||||
skippy_iter.set_glyph_data ((HBUINT16 *) nullptr);
|
||||
unsigned unsafe_to;
|
||||
@ -118,6 +115,8 @@ struct LigatureSet
|
||||
goto slow;
|
||||
}
|
||||
}
|
||||
else
|
||||
goto slow;
|
||||
|
||||
bool unsafe_to_concat = false;
|
||||
|
||||
@ -125,7 +124,7 @@ struct LigatureSet
|
||||
{
|
||||
const auto &lig = this+ligature.arrayZ[i];
|
||||
if (unlikely (lig.component.lenP1 <= 1) ||
|
||||
lig.component[1] == first)
|
||||
lig.component.arrayZ[0] == first)
|
||||
{
|
||||
if (lig.apply (c))
|
||||
{
|
||||
|
@ -191,7 +191,6 @@ struct ReverseChainSingleSubstFormat1
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
if (unlikely (!c->serializer->check_success (out))) return_trace (false);
|
||||
if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
|
||||
if (unlikely (!c->serializer->embed (this->coverage))) return_trace (false);
|
||||
|
||||
|
@ -53,7 +53,7 @@ struct Sequence
|
||||
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
|
||||
{
|
||||
c->buffer->message (c->font,
|
||||
"replaced glyph at %u (multiple subtitution)",
|
||||
"replaced glyph at %u (multiple substitution)",
|
||||
c->buffer->idx - 1u);
|
||||
}
|
||||
|
||||
|
@ -90,24 +90,36 @@ struct CompositeGlyphRecord
|
||||
static void transform (const float (&matrix)[4],
|
||||
hb_array_t<contour_point_t> points)
|
||||
{
|
||||
auto arrayZ = points.arrayZ;
|
||||
unsigned count = points.length;
|
||||
|
||||
if (matrix[0] != 1.f || matrix[1] != 0.f ||
|
||||
matrix[2] != 0.f || matrix[3] != 1.f)
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
arrayZ[i].transform (matrix);
|
||||
for (auto &point : points)
|
||||
point.transform (matrix);
|
||||
}
|
||||
|
||||
static void translate (const contour_point_t &trans,
|
||||
hb_array_t<contour_point_t> points)
|
||||
{
|
||||
auto arrayZ = points.arrayZ;
|
||||
unsigned count = points.length;
|
||||
|
||||
if (trans.x != 0.f || trans.y != 0.f)
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
arrayZ[i].translate (trans);
|
||||
if (HB_OPTIMIZE_SIZE_VAL)
|
||||
{
|
||||
if (trans.x != 0.f || trans.y != 0.f)
|
||||
for (auto &point : points)
|
||||
point.translate (trans);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (trans.x != 0.f && trans.y != 0.f)
|
||||
for (auto &point : points)
|
||||
point.translate (trans);
|
||||
else
|
||||
{
|
||||
if (trans.x != 0.f)
|
||||
for (auto &point : points)
|
||||
point.x += trans.x;
|
||||
else if (trans.y != 0.f)
|
||||
for (auto &point : points)
|
||||
point.y += trans.y;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void transform_points (hb_array_t<contour_point_t> points,
|
||||
@ -131,9 +143,8 @@ struct CompositeGlyphRecord
|
||||
float matrix[4];
|
||||
contour_point_t trans;
|
||||
get_transformation (matrix, trans);
|
||||
points.alloc (points.length + 4); // For phantom points
|
||||
if (unlikely (!points.resize (points.length + 1))) return false;
|
||||
points.arrayZ[points.length - 1] = trans;
|
||||
if (unlikely (!points.alloc (points.length + 4))) return false; // For phantom points
|
||||
points.push (trans);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -382,7 +393,7 @@ struct CompositeGlyph
|
||||
{
|
||||
/* last 4 points in points_with_deltas are phantom points and should not be included */
|
||||
if (i >= points_with_deltas.length - 4) {
|
||||
free (o);
|
||||
hb_free (o);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
40
src/3rdparty/harfbuzz-ng/src/OT/glyf/Glyph.hh
vendored
40
src/3rdparty/harfbuzz-ng/src/OT/glyf/Glyph.hh
vendored
@ -114,8 +114,8 @@ struct Glyph
|
||||
|
||||
if (type != EMPTY)
|
||||
{
|
||||
plan->bounds_width_map.set (new_gid, xMax - xMin);
|
||||
plan->bounds_height_map.set (new_gid, yMax - yMin);
|
||||
plan->bounds_width_vec[new_gid] = xMax - xMin;
|
||||
plan->bounds_height_vec[new_gid] = yMax - yMin;
|
||||
}
|
||||
|
||||
unsigned len = all_points.length;
|
||||
@ -124,10 +124,12 @@ struct Glyph
|
||||
float topSideY = all_points[len - 2].y;
|
||||
float bottomSideY = all_points[len - 1].y;
|
||||
|
||||
uint32_t hash = hb_hash (new_gid);
|
||||
|
||||
signed hori_aw = roundf (rightSideX - leftSideX);
|
||||
if (hori_aw < 0) hori_aw = 0;
|
||||
int lsb = roundf (xMin - leftSideX);
|
||||
plan->hmtx_map.set (new_gid, hb_pair ((unsigned) hori_aw, lsb));
|
||||
plan->hmtx_map.set_with_hash (new_gid, hash, hb_pair ((unsigned) hori_aw, lsb));
|
||||
//flag value should be computed using non-empty glyphs
|
||||
if (type != EMPTY && lsb != xMin)
|
||||
plan->head_maxp_info.allXMinIsLsb = false;
|
||||
@ -135,7 +137,7 @@ struct Glyph
|
||||
signed vert_aw = roundf (topSideY - bottomSideY);
|
||||
if (vert_aw < 0) vert_aw = 0;
|
||||
int tsb = roundf (topSideY - yMax);
|
||||
plan->vmtx_map.set (new_gid, hb_pair ((unsigned) vert_aw, tsb));
|
||||
plan->vmtx_map.set_with_hash (new_gid, hash, hb_pair ((unsigned) vert_aw, tsb));
|
||||
}
|
||||
|
||||
bool compile_header_bytes (const hb_subset_plan_t *plan,
|
||||
@ -369,9 +371,11 @@ struct Glyph
|
||||
}
|
||||
|
||||
#ifndef HB_NO_VAR
|
||||
glyf_accelerator.gvar->apply_deltas_to_points (gid,
|
||||
coords,
|
||||
points.as_array ().sub_array (old_length));
|
||||
if (coords)
|
||||
glyf_accelerator.gvar->apply_deltas_to_points (gid,
|
||||
coords,
|
||||
points.as_array ().sub_array (old_length),
|
||||
phantom_only && type == SIMPLE);
|
||||
#endif
|
||||
|
||||
// mainly used by CompositeGlyph calculating new X/Y offset value so no need to extend it
|
||||
@ -379,7 +383,7 @@ struct Glyph
|
||||
if (points_with_deltas != nullptr && depth == 0 && type == COMPOSITE)
|
||||
{
|
||||
if (unlikely (!points_with_deltas->resize (points.length))) return false;
|
||||
points_with_deltas->copy_vector (points);
|
||||
*points_with_deltas = points;
|
||||
}
|
||||
|
||||
switch (type) {
|
||||
@ -417,14 +421,17 @@ struct Glyph
|
||||
for (unsigned int i = 0; i < PHANTOM_COUNT; i++)
|
||||
phantoms[i] = comp_points[comp_points.length - PHANTOM_COUNT + i];
|
||||
|
||||
float matrix[4];
|
||||
contour_point_t default_trans;
|
||||
item.get_transformation (matrix, default_trans);
|
||||
if (comp_points) // Empty in case of phantom_only
|
||||
{
|
||||
float matrix[4];
|
||||
contour_point_t default_trans;
|
||||
item.get_transformation (matrix, default_trans);
|
||||
|
||||
/* Apply component transformation & translation (with deltas applied) */
|
||||
item.transform_points (comp_points, matrix, points[comp_index]);
|
||||
/* Apply component transformation & translation (with deltas applied) */
|
||||
item.transform_points (comp_points, matrix, points[comp_index]);
|
||||
}
|
||||
|
||||
if (item.is_anchored ())
|
||||
if (item.is_anchored () && !phantom_only)
|
||||
{
|
||||
unsigned int p1, p2;
|
||||
item.get_anchor_points (p1, p2);
|
||||
@ -466,7 +473,10 @@ struct Glyph
|
||||
assert (record_points.length == item_num_points);
|
||||
|
||||
auto component_coords = coords;
|
||||
if (item.is_reset_unspecified_axes ())
|
||||
/* Copying coords is expensive; so we have put an arbitrary
|
||||
* limit on the max number of coords for now. */
|
||||
if (item.is_reset_unspecified_axes () ||
|
||||
coords.length > HB_GLYF_VAR_COMPOSITE_MAX_AXES)
|
||||
component_coords = hb_array<int> ();
|
||||
|
||||
coord_setter_t coord_setter (component_coords);
|
||||
|
@ -154,10 +154,9 @@ struct SimpleGlyph
|
||||
{
|
||||
int v = 0;
|
||||
|
||||
unsigned count = points_.length;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
for (auto &point : points_)
|
||||
{
|
||||
unsigned flag = points_.arrayZ[i].flag;
|
||||
unsigned flag = point.flag;
|
||||
if (flag & short_flag)
|
||||
{
|
||||
if (unlikely (p + 1 > end)) return false;
|
||||
@ -175,7 +174,7 @@ struct SimpleGlyph
|
||||
p += HBINT16::static_size;
|
||||
}
|
||||
}
|
||||
points_.arrayZ[i].*m = v;
|
||||
point.*m = v;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -192,9 +191,10 @@ struct SimpleGlyph
|
||||
|
||||
unsigned old_length = points.length;
|
||||
points.alloc (points.length + num_points + 4, true); // Allocate for phantom points, to avoid a possible copy
|
||||
if (!points.resize (points.length + num_points, false)) return false;
|
||||
if (unlikely (!points.resize (points.length + num_points, false))) return false;
|
||||
auto points_ = points.as_array ().sub_array (old_length);
|
||||
hb_memset (points_.arrayZ, 0, sizeof (contour_point_t) * num_points);
|
||||
if (!phantom_only)
|
||||
hb_memset (points_.arrayZ, 0, sizeof (contour_point_t) * num_points);
|
||||
if (phantom_only) return true;
|
||||
|
||||
for (int i = 0; i < num_contours; i++)
|
||||
|
@ -22,7 +22,7 @@ struct SubsetGlyph
|
||||
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
bool use_short_loca,
|
||||
const hb_subset_plan_t *plan)
|
||||
const hb_subset_plan_t *plan) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
@ -40,7 +40,7 @@ struct SubsetGlyph
|
||||
pad = 0;
|
||||
while (pad_length > 0)
|
||||
{
|
||||
c->embed (pad);
|
||||
(void) c->embed (pad);
|
||||
pad_length--;
|
||||
}
|
||||
|
||||
|
@ -214,7 +214,7 @@ struct VarCompositeGlyphRecord
|
||||
points.alloc (points.length + num_points + 4); // For phantom points
|
||||
if (unlikely (!points.resize (points.length + num_points, false))) return false;
|
||||
contour_point_t *rec_points = points.arrayZ + (points.length - num_points);
|
||||
memset (rec_points, 0, num_points * sizeof (rec_points[0]));
|
||||
hb_memset (rec_points, 0, num_points * sizeof (rec_points[0]));
|
||||
|
||||
unsigned fl = flags;
|
||||
|
||||
|
@ -16,6 +16,8 @@ struct coord_setter_t
|
||||
|
||||
int& operator [] (unsigned idx)
|
||||
{
|
||||
if (unlikely (idx >= HB_GLYF_VAR_COMPOSITE_MAX_AXES))
|
||||
return Crap(int);
|
||||
if (coords.length < idx + 1)
|
||||
coords.resize (idx + 1);
|
||||
return coords[idx];
|
||||
|
@ -12,24 +12,44 @@ namespace OT {
|
||||
namespace glyf_impl {
|
||||
|
||||
|
||||
template<typename IteratorIn, typename IteratorOut,
|
||||
hb_requires (hb_is_source_of (IteratorIn, unsigned int)),
|
||||
hb_requires (hb_is_sink_of (IteratorOut, unsigned))>
|
||||
template<typename IteratorIn, typename TypeOut,
|
||||
hb_requires (hb_is_source_of (IteratorIn, unsigned int))>
|
||||
static void
|
||||
_write_loca (IteratorIn&& it, bool short_offsets, IteratorOut&& dest)
|
||||
_write_loca (IteratorIn&& it,
|
||||
const hb_sorted_vector_t<hb_codepoint_pair_t> new_to_old_gid_list,
|
||||
bool short_offsets,
|
||||
TypeOut *dest,
|
||||
unsigned num_offsets)
|
||||
{
|
||||
unsigned right_shift = short_offsets ? 1 : 0;
|
||||
unsigned int offset = 0;
|
||||
dest << 0;
|
||||
+ it
|
||||
| hb_map ([=, &offset] (unsigned int padded_size)
|
||||
{
|
||||
offset += padded_size;
|
||||
DEBUG_MSG (SUBSET, nullptr, "loca entry offset %u", offset);
|
||||
return offset >> right_shift;
|
||||
})
|
||||
| hb_sink (dest)
|
||||
;
|
||||
unsigned offset = 0;
|
||||
TypeOut value;
|
||||
value = 0;
|
||||
*dest++ = value;
|
||||
hb_codepoint_t last = 0;
|
||||
for (auto _ : new_to_old_gid_list)
|
||||
{
|
||||
hb_codepoint_t gid = _.first;
|
||||
for (; last < gid; last++)
|
||||
{
|
||||
DEBUG_MSG (SUBSET, nullptr, "loca entry empty offset %u", offset);
|
||||
*dest++ = value;
|
||||
}
|
||||
|
||||
unsigned padded_size = *it++;
|
||||
offset += padded_size;
|
||||
DEBUG_MSG (SUBSET, nullptr, "loca entry gid %u offset %u padded-size %u", gid, offset, padded_size);
|
||||
value = offset >> right_shift;
|
||||
*dest++ = value;
|
||||
|
||||
last++; // Skip over gid
|
||||
}
|
||||
unsigned num_glyphs = num_offsets - 1;
|
||||
for (; last < num_glyphs; last++)
|
||||
{
|
||||
DEBUG_MSG (SUBSET, nullptr, "loca entry empty offset %u", offset);
|
||||
*dest++ = value;
|
||||
}
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -67,11 +87,14 @@ _add_head_and_set_loca_version (hb_subset_plan_t *plan, bool use_short_loca)
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_source_of (Iterator, unsigned int))>
|
||||
static bool
|
||||
_add_loca_and_head (hb_subset_plan_t * plan, Iterator padded_offsets, bool use_short_loca)
|
||||
_add_loca_and_head (hb_subset_context_t *c,
|
||||
Iterator padded_offsets,
|
||||
bool use_short_loca)
|
||||
{
|
||||
unsigned num_offsets = padded_offsets.len () + 1;
|
||||
unsigned num_offsets = c->plan->num_output_glyphs () + 1;
|
||||
unsigned entry_size = use_short_loca ? 2 : 4;
|
||||
char *loca_prime_data = (char *) hb_calloc (entry_size, num_offsets);
|
||||
|
||||
char *loca_prime_data = (char *) hb_malloc (entry_size * num_offsets);
|
||||
|
||||
if (unlikely (!loca_prime_data)) return false;
|
||||
|
||||
@ -79,9 +102,9 @@ _add_loca_and_head (hb_subset_plan_t * plan, Iterator padded_offsets, bool use_s
|
||||
entry_size, num_offsets, entry_size * num_offsets);
|
||||
|
||||
if (use_short_loca)
|
||||
_write_loca (padded_offsets, true, hb_array ((HBUINT16 *) loca_prime_data, num_offsets));
|
||||
_write_loca (padded_offsets, c->plan->new_to_old_gid_list, true, (HBUINT16 *) loca_prime_data, num_offsets);
|
||||
else
|
||||
_write_loca (padded_offsets, false, hb_array ((HBUINT32 *) loca_prime_data, num_offsets));
|
||||
_write_loca (padded_offsets, c->plan->new_to_old_gid_list, false, (HBUINT32 *) loca_prime_data, num_offsets);
|
||||
|
||||
hb_blob_t *loca_blob = hb_blob_create (loca_prime_data,
|
||||
entry_size * num_offsets,
|
||||
@ -89,8 +112,8 @@ _add_loca_and_head (hb_subset_plan_t * plan, Iterator padded_offsets, bool use_s
|
||||
loca_prime_data,
|
||||
hb_free);
|
||||
|
||||
bool result = plan->add_table (HB_OT_TAG_loca, loca_blob)
|
||||
&& _add_head_and_set_loca_version (plan, use_short_loca);
|
||||
bool result = c->plan->add_table (HB_OT_TAG_loca, loca_blob)
|
||||
&& _add_head_and_set_loca_version (c->plan, use_short_loca);
|
||||
|
||||
hb_blob_destroy (loca_blob);
|
||||
return result;
|
||||
|
82
src/3rdparty/harfbuzz-ng/src/OT/glyf/glyf.hh
vendored
82
src/3rdparty/harfbuzz-ng/src/OT/glyf/glyf.hh
vendored
@ -85,75 +85,72 @@ struct glyf
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
glyf *glyf_prime = c->serializer->start_embed <glyf> ();
|
||||
if (unlikely (!c->serializer->check_success (glyf_prime))) return_trace (false);
|
||||
|
||||
hb_font_t *font = nullptr;
|
||||
if (c->plan->normalized_coords)
|
||||
{
|
||||
font = _create_font_for_instancing (c->plan);
|
||||
if (unlikely (!font)) return false;
|
||||
if (unlikely (!font))
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
hb_vector_t<unsigned> padded_offsets;
|
||||
unsigned num_glyphs = c->plan->num_output_glyphs ();
|
||||
if (unlikely (!padded_offsets.resize (num_glyphs)))
|
||||
{
|
||||
hb_font_destroy (font);
|
||||
return false;
|
||||
}
|
||||
if (unlikely (!padded_offsets.alloc (c->plan->new_to_old_gid_list.length, true)))
|
||||
return_trace (false);
|
||||
|
||||
hb_vector_t<glyf_impl::SubsetGlyph> glyphs;
|
||||
if (!_populate_subset_glyphs (c->plan, font, glyphs))
|
||||
{
|
||||
hb_font_destroy (font);
|
||||
return false;
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
if (font)
|
||||
hb_font_destroy (font);
|
||||
|
||||
unsigned max_offset = 0;
|
||||
for (unsigned i = 0; i < num_glyphs; i++)
|
||||
for (auto &g : glyphs)
|
||||
{
|
||||
padded_offsets[i] = glyphs[i].padded_size ();
|
||||
max_offset += padded_offsets[i];
|
||||
unsigned size = g.padded_size ();
|
||||
padded_offsets.push (size);
|
||||
max_offset += size;
|
||||
}
|
||||
|
||||
bool use_short_loca = false;
|
||||
if (likely (!c->plan->force_long_loca))
|
||||
use_short_loca = max_offset < 0x1FFFF;
|
||||
|
||||
if (!use_short_loca) {
|
||||
for (unsigned i = 0; i < num_glyphs; i++)
|
||||
padded_offsets[i] = glyphs[i].length ();
|
||||
if (!use_short_loca)
|
||||
{
|
||||
padded_offsets.resize (0);
|
||||
for (auto &g : glyphs)
|
||||
padded_offsets.push (g.length ());
|
||||
}
|
||||
|
||||
bool result = glyf_prime->serialize (c->serializer, glyphs.writer (), use_short_loca, c->plan);
|
||||
auto *glyf_prime = c->serializer->start_embed <glyf> ();
|
||||
bool result = glyf_prime->serialize (c->serializer, hb_iter (glyphs), use_short_loca, c->plan);
|
||||
if (c->plan->normalized_coords && !c->plan->pinned_at_default)
|
||||
_free_compiled_subset_glyphs (glyphs);
|
||||
|
||||
if (!result) return false;
|
||||
if (unlikely (!c->serializer->check_success (glyf_impl::_add_loca_and_head (c,
|
||||
padded_offsets.iter (),
|
||||
use_short_loca))))
|
||||
return_trace (false);
|
||||
|
||||
if (unlikely (c->serializer->in_error ())) return_trace (false);
|
||||
|
||||
return_trace (c->serializer->check_success (glyf_impl::_add_loca_and_head (c->plan,
|
||||
padded_offsets.iter (),
|
||||
use_short_loca)));
|
||||
return result;
|
||||
}
|
||||
|
||||
bool
|
||||
_populate_subset_glyphs (const hb_subset_plan_t *plan,
|
||||
hb_font_t *font,
|
||||
hb_vector_t<glyf_impl::SubsetGlyph> &glyphs /* OUT */) const;
|
||||
hb_vector_t<glyf_impl::SubsetGlyph>& glyphs /* OUT */) const;
|
||||
|
||||
hb_font_t *
|
||||
_create_font_for_instancing (const hb_subset_plan_t *plan) const;
|
||||
|
||||
void _free_compiled_subset_glyphs (hb_vector_t<glyf_impl::SubsetGlyph> &glyphs) const
|
||||
{
|
||||
for (unsigned i = 0; i < glyphs.length; i++)
|
||||
glyphs[i].free_compiled_bytes ();
|
||||
for (auto &g : glyphs)
|
||||
g.free_compiled_bytes ();
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -222,13 +219,14 @@ struct glyf_accelerator_t
|
||||
if (unlikely (!glyph_for_gid (gid).get_points (font, *this, all_points, nullptr, nullptr, nullptr, true, true, phantom_only)))
|
||||
return false;
|
||||
|
||||
unsigned count = all_points.length;
|
||||
assert (count >= glyf_impl::PHANTOM_COUNT);
|
||||
count -= glyf_impl::PHANTOM_COUNT;
|
||||
|
||||
if (consumer.is_consuming_contour_points ())
|
||||
{
|
||||
unsigned count = all_points.length;
|
||||
assert (count >= glyf_impl::PHANTOM_COUNT);
|
||||
count -= glyf_impl::PHANTOM_COUNT;
|
||||
for (unsigned point_index = 0; point_index < count; point_index++)
|
||||
consumer.consume_point (all_points[point_index]);
|
||||
for (auto &point : all_points.as_array ().sub_array (0, count))
|
||||
consumer.consume_point (point);
|
||||
consumer.points_end ();
|
||||
}
|
||||
|
||||
@ -236,7 +234,7 @@ struct glyf_accelerator_t
|
||||
contour_point_t *phantoms = consumer.get_phantoms_sink ();
|
||||
if (phantoms)
|
||||
for (unsigned i = 0; i < glyf_impl::PHANTOM_COUNT; ++i)
|
||||
phantoms[i] = all_points[all_points.length - glyf_impl::PHANTOM_COUNT + i];
|
||||
phantoms[i] = all_points.arrayZ[count + i];
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -299,6 +297,7 @@ struct glyf_accelerator_t
|
||||
if (extents) bounds = contour_bounds_t ();
|
||||
}
|
||||
|
||||
HB_ALWAYS_INLINE
|
||||
void consume_point (const contour_point_t &point) { bounds.add (point); }
|
||||
void points_end () { bounds.get_extents (font, extents, scaled); }
|
||||
|
||||
@ -431,16 +430,17 @@ glyf::_populate_subset_glyphs (const hb_subset_plan_t *plan,
|
||||
hb_vector_t<glyf_impl::SubsetGlyph>& glyphs /* OUT */) const
|
||||
{
|
||||
OT::glyf_accelerator_t glyf (plan->source);
|
||||
unsigned num_glyphs = plan->num_output_glyphs ();
|
||||
if (!glyphs.resize (num_glyphs)) return false;
|
||||
if (!glyphs.alloc (plan->new_to_old_gid_list.length, true)) return false;
|
||||
|
||||
for (auto p : plan->glyph_map->iter ())
|
||||
for (const auto &pair : plan->new_to_old_gid_list)
|
||||
{
|
||||
unsigned new_gid = p.second;
|
||||
glyf_impl::SubsetGlyph& subset_glyph = glyphs.arrayZ[new_gid];
|
||||
subset_glyph.old_gid = p.first;
|
||||
hb_codepoint_t new_gid = pair.first;
|
||||
hb_codepoint_t old_gid = pair.second;
|
||||
glyf_impl::SubsetGlyph *p = glyphs.push ();
|
||||
glyf_impl::SubsetGlyph& subset_glyph = *p;
|
||||
subset_glyph.old_gid = old_gid;
|
||||
|
||||
if (unlikely (new_gid == 0 &&
|
||||
if (unlikely (old_gid == 0 && new_gid == 0 &&
|
||||
!(plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE)) &&
|
||||
!plan->normalized_coords)
|
||||
subset_glyph.source_glyph = glyf_impl::Glyph ();
|
||||
@ -487,7 +487,7 @@ glyf::_create_font_for_instancing (const hb_subset_plan_t *plan) const
|
||||
{
|
||||
hb_variation_t var;
|
||||
var.tag = _.first;
|
||||
var.value = _.second;
|
||||
var.value = _.second.middle;
|
||||
vars.push (var);
|
||||
}
|
||||
|
||||
|
@ -21,11 +21,11 @@ struct path_builder_t
|
||||
operator bool () const { return has_data; }
|
||||
|
||||
bool has_data = false;
|
||||
float x = 0.;
|
||||
float y = 0.;
|
||||
float x;
|
||||
float y;
|
||||
|
||||
optional_point_t lerp (optional_point_t p, float t)
|
||||
{ return optional_point_t (x + t * (p.x - x), y + t * (p.y - y)); }
|
||||
optional_point_t mid (optional_point_t p)
|
||||
{ return optional_point_t ((x + p.x) * 0.5f, (y + p.y) * 0.5f); }
|
||||
} first_oncurve, first_offcurve, first_offcurve2, last_offcurve, last_offcurve2;
|
||||
|
||||
path_builder_t (hb_font_t *font_, hb_draw_session_t &draw_session_) :
|
||||
@ -37,6 +37,7 @@ struct path_builder_t
|
||||
* https://stackoverflow.com/a/20772557
|
||||
*
|
||||
* Cubic support added. */
|
||||
HB_ALWAYS_INLINE
|
||||
void consume_point (const contour_point_t &point)
|
||||
{
|
||||
bool is_on_curve = point.flag & glyf_impl::SimpleGlyph::FLAG_ON_CURVE;
|
||||
@ -46,7 +47,7 @@ struct path_builder_t
|
||||
bool is_cubic = !is_on_curve && (point.flag & glyf_impl::SimpleGlyph::FLAG_CUBIC);
|
||||
#endif
|
||||
optional_point_t p (font->em_fscalef_x (point.x), font->em_fscalef_y (point.y));
|
||||
if (!first_oncurve)
|
||||
if (unlikely (!first_oncurve))
|
||||
{
|
||||
if (is_on_curve)
|
||||
{
|
||||
@ -62,7 +63,7 @@ struct path_builder_t
|
||||
}
|
||||
else if (first_offcurve)
|
||||
{
|
||||
optional_point_t mid = first_offcurve.lerp (p, .5f);
|
||||
optional_point_t mid = first_offcurve.mid (p);
|
||||
first_oncurve = mid;
|
||||
last_offcurve = p;
|
||||
draw_session->move_to (mid.x, mid.y);
|
||||
@ -98,7 +99,7 @@ struct path_builder_t
|
||||
}
|
||||
else
|
||||
{
|
||||
optional_point_t mid = last_offcurve.lerp (p, .5f);
|
||||
optional_point_t mid = last_offcurve.mid (p);
|
||||
|
||||
if (is_cubic)
|
||||
{
|
||||
@ -123,13 +124,13 @@ struct path_builder_t
|
||||
}
|
||||
}
|
||||
|
||||
if (point.is_end_point)
|
||||
if (unlikely (point.is_end_point))
|
||||
{
|
||||
if (first_offcurve && last_offcurve)
|
||||
{
|
||||
optional_point_t mid = last_offcurve.lerp (first_offcurve2 ?
|
||||
first_offcurve2 :
|
||||
first_offcurve, .5f);
|
||||
optional_point_t mid = last_offcurve.mid (first_offcurve2 ?
|
||||
first_offcurve2 :
|
||||
first_offcurve);
|
||||
if (last_offcurve2)
|
||||
draw_session->cubic_to (last_offcurve2.x, last_offcurve2.y,
|
||||
last_offcurve.x, last_offcurve.y,
|
||||
|
19
src/3rdparty/harfbuzz-ng/src/OT/name/name.hh
vendored
19
src/3rdparty/harfbuzz-ng/src/OT/name/name.hh
vendored
@ -359,7 +359,7 @@ struct name
|
||||
record.nameID = ids.name_id;
|
||||
record.length = 0; // handled in NameRecord copy()
|
||||
record.offset = 0;
|
||||
memcpy (name_records, &record, NameRecord::static_size);
|
||||
hb_memcpy (name_records, &record, NameRecord::static_size);
|
||||
name_records++;
|
||||
}
|
||||
#endif
|
||||
@ -384,10 +384,7 @@ struct name
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
name *name_prime = c->serializer->start_embed<name> ();
|
||||
if (unlikely (!name_prime)) return_trace (false);
|
||||
auto *name_prime = c->serializer->start_embed<name> ();
|
||||
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
const hb_hashmap_t<hb_ot_name_record_ids_t, hb_bytes_t> *name_table_overrides =
|
||||
@ -436,7 +433,7 @@ struct name
|
||||
if (!name_table_overrides->is_empty ())
|
||||
{
|
||||
if (unlikely (!insert_name_records.alloc (name_table_overrides->get_population (), true)))
|
||||
return_trace (false);
|
||||
return false;
|
||||
for (const auto& record_ids : name_table_overrides->keys ())
|
||||
{
|
||||
if (name_table_overrides->get (record_ids).length == 0)
|
||||
@ -448,13 +445,13 @@ struct name
|
||||
}
|
||||
#endif
|
||||
|
||||
return (name_prime->serialize (c->serializer, it,
|
||||
std::addressof (this + stringOffset)
|
||||
return name_prime->serialize (c->serializer, it,
|
||||
std::addressof (this + stringOffset)
|
||||
#ifdef HB_EXPERIMENTAL_API
|
||||
, insert_name_records
|
||||
, name_table_overrides
|
||||
, insert_name_records
|
||||
, name_table_overrides
|
||||
#endif
|
||||
));
|
||||
);
|
||||
}
|
||||
|
||||
bool sanitize_records (hb_sanitize_context_t *c) const
|
||||
|
@ -72,7 +72,7 @@ struct ClassDef : public OT::ClassDef
|
||||
class_def_link->width = SmallTypes::size;
|
||||
class_def_link->objidx = class_def_prime_id;
|
||||
class_def_link->position = link_position;
|
||||
class_def_prime_vertex.parents.push (parent_id);
|
||||
class_def_prime_vertex.add_parent (parent_id);
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -94,7 +94,13 @@ struct ClassDef : public OT::ClassDef
|
||||
}
|
||||
|
||||
hb_bytes_t class_def_copy = serializer.copy_bytes ();
|
||||
c.add_buffer ((char *) class_def_copy.arrayZ); // Give ownership to the context, it will cleanup the buffer.
|
||||
if (!class_def_copy.arrayZ) return false;
|
||||
// Give ownership to the context, it will cleanup the buffer.
|
||||
if (!c.add_buffer ((char *) class_def_copy.arrayZ))
|
||||
{
|
||||
hb_free ((char *) class_def_copy.arrayZ);
|
||||
return false;
|
||||
}
|
||||
|
||||
auto& obj = c.graph.vertices_[dest_obj].obj;
|
||||
obj.head = (char *) class_def_copy.arrayZ;
|
||||
|
@ -96,7 +96,7 @@ struct Coverage : public OT::Layout::Common::Coverage
|
||||
coverage_link->width = SmallTypes::size;
|
||||
coverage_link->objidx = coverage_prime_id;
|
||||
coverage_link->position = link_position;
|
||||
coverage_prime_vertex.parents.push (parent_id);
|
||||
coverage_prime_vertex.add_parent (parent_id);
|
||||
|
||||
return (Coverage*) coverage_prime_vertex.obj.head;
|
||||
}
|
||||
@ -118,7 +118,13 @@ struct Coverage : public OT::Layout::Common::Coverage
|
||||
}
|
||||
|
||||
hb_bytes_t coverage_copy = serializer.copy_bytes ();
|
||||
c.add_buffer ((char *) coverage_copy.arrayZ); // Give ownership to the context, it will cleanup the buffer.
|
||||
if (!coverage_copy.arrayZ) return false;
|
||||
// Give ownership to the context, it will cleanup the buffer.
|
||||
if (!c.add_buffer ((char *) coverage_copy.arrayZ))
|
||||
{
|
||||
hb_free ((char *) coverage_copy.arrayZ);
|
||||
return false;
|
||||
}
|
||||
|
||||
auto& obj = c.graph.vertices_[dest_obj].obj;
|
||||
obj.head = (char *) coverage_copy.arrayZ;
|
||||
|
220
src/3rdparty/harfbuzz-ng/src/graph/graph.hh
vendored
220
src/3rdparty/harfbuzz-ng/src/graph/graph.hh
vendored
@ -43,12 +43,28 @@ struct graph_t
|
||||
{
|
||||
hb_serialize_context_t::object_t obj;
|
||||
int64_t distance = 0 ;
|
||||
int64_t space = 0 ;
|
||||
hb_vector_t<unsigned> parents;
|
||||
unsigned space = 0 ;
|
||||
unsigned start = 0;
|
||||
unsigned end = 0;
|
||||
unsigned priority = 0;
|
||||
private:
|
||||
unsigned incoming_edges_ = 0;
|
||||
unsigned single_parent = (unsigned) -1;
|
||||
hb_hashmap_t<unsigned, unsigned> parents;
|
||||
public:
|
||||
|
||||
auto parents_iter () const HB_AUTO_RETURN
|
||||
(
|
||||
hb_concat (
|
||||
hb_iter (&single_parent, single_parent != (unsigned) -1),
|
||||
parents.keys_ref ()
|
||||
)
|
||||
)
|
||||
|
||||
bool in_error () const
|
||||
{
|
||||
return parents.in_error ();
|
||||
}
|
||||
|
||||
bool link_positions_valid (unsigned num_objects, bool removed_nil)
|
||||
{
|
||||
@ -143,7 +159,9 @@ struct graph_t
|
||||
hb_swap (a.obj, b.obj);
|
||||
hb_swap (a.distance, b.distance);
|
||||
hb_swap (a.space, b.space);
|
||||
hb_swap (a.single_parent, b.single_parent);
|
||||
hb_swap (a.parents, b.parents);
|
||||
hb_swap (a.incoming_edges_, b.incoming_edges_);
|
||||
hb_swap (a.start, b.start);
|
||||
hb_swap (a.end, b.end);
|
||||
hb_swap (a.priority, b.priority);
|
||||
@ -154,6 +172,7 @@ struct graph_t
|
||||
{
|
||||
hb_hashmap_t<unsigned, unsigned> result;
|
||||
|
||||
result.alloc (obj.real_links.length);
|
||||
for (const auto& l : obj.real_links) {
|
||||
result.set (l.position, l.objidx);
|
||||
}
|
||||
@ -163,22 +182,76 @@ struct graph_t
|
||||
|
||||
bool is_shared () const
|
||||
{
|
||||
return parents.length > 1;
|
||||
return parents.get_population () > 1;
|
||||
}
|
||||
|
||||
unsigned incoming_edges () const
|
||||
{
|
||||
return parents.length;
|
||||
if (HB_DEBUG_SUBSET_REPACK)
|
||||
{
|
||||
assert (incoming_edges_ == (single_parent != (unsigned) -1) +
|
||||
(parents.values_ref () | hb_reduce (hb_add, 0)));
|
||||
}
|
||||
return incoming_edges_;
|
||||
}
|
||||
|
||||
void reset_parents ()
|
||||
{
|
||||
incoming_edges_ = 0;
|
||||
single_parent = (unsigned) -1;
|
||||
parents.reset ();
|
||||
}
|
||||
|
||||
void add_parent (unsigned parent_index)
|
||||
{
|
||||
assert (parent_index != (unsigned) -1);
|
||||
if (incoming_edges_ == 0)
|
||||
{
|
||||
single_parent = parent_index;
|
||||
incoming_edges_ = 1;
|
||||
return;
|
||||
}
|
||||
else if (single_parent != (unsigned) -1)
|
||||
{
|
||||
assert (incoming_edges_ == 1);
|
||||
if (!parents.set (single_parent, 1))
|
||||
return;
|
||||
single_parent = (unsigned) -1;
|
||||
}
|
||||
|
||||
unsigned *v;
|
||||
if (parents.has (parent_index, &v))
|
||||
{
|
||||
(*v)++;
|
||||
incoming_edges_++;
|
||||
}
|
||||
else if (parents.set (parent_index, 1))
|
||||
incoming_edges_++;
|
||||
}
|
||||
|
||||
void remove_parent (unsigned parent_index)
|
||||
{
|
||||
unsigned count = parents.length;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
if (parent_index == single_parent)
|
||||
{
|
||||
if (parents.arrayZ[i] != parent_index) continue;
|
||||
parents.remove_unordered (i);
|
||||
break;
|
||||
single_parent = (unsigned) -1;
|
||||
incoming_edges_--;
|
||||
return;
|
||||
}
|
||||
|
||||
unsigned *v;
|
||||
if (parents.has (parent_index, &v))
|
||||
{
|
||||
incoming_edges_--;
|
||||
if (*v > 1)
|
||||
(*v)--;
|
||||
else
|
||||
parents.del (parent_index);
|
||||
|
||||
if (incoming_edges_ == 1)
|
||||
{
|
||||
single_parent = *parents.keys ();
|
||||
parents.reset ();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -199,20 +272,46 @@ struct graph_t
|
||||
}
|
||||
}
|
||||
|
||||
void remap_parents (const hb_vector_t<unsigned>& id_map)
|
||||
bool remap_parents (const hb_vector_t<unsigned>& id_map)
|
||||
{
|
||||
unsigned count = parents.length;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
parents.arrayZ[i] = id_map[parents.arrayZ[i]];
|
||||
if (single_parent != (unsigned) -1)
|
||||
{
|
||||
assert (single_parent < id_map.length);
|
||||
single_parent = id_map[single_parent];
|
||||
return true;
|
||||
}
|
||||
|
||||
hb_hashmap_t<unsigned, unsigned> new_parents;
|
||||
new_parents.alloc (parents.get_population ());
|
||||
for (auto _ : parents)
|
||||
{
|
||||
assert (_.first < id_map.length);
|
||||
assert (!new_parents.has (id_map[_.first]));
|
||||
new_parents.set (id_map[_.first], _.second);
|
||||
}
|
||||
|
||||
if (new_parents.in_error ())
|
||||
return false;
|
||||
|
||||
parents = std::move (new_parents);
|
||||
return true;
|
||||
}
|
||||
|
||||
void remap_parent (unsigned old_index, unsigned new_index)
|
||||
{
|
||||
unsigned count = parents.length;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
if (single_parent != (unsigned) -1)
|
||||
{
|
||||
if (parents.arrayZ[i] == old_index)
|
||||
parents.arrayZ[i] = new_index;
|
||||
if (single_parent == old_index)
|
||||
single_parent = new_index;
|
||||
return;
|
||||
}
|
||||
|
||||
const unsigned *pv;
|
||||
if (parents.has (old_index, &pv))
|
||||
{
|
||||
unsigned v = *pv;
|
||||
parents.set (new_index, v);
|
||||
parents.del (old_index);
|
||||
}
|
||||
}
|
||||
|
||||
@ -359,7 +458,6 @@ struct graph_t
|
||||
|
||||
~graph_t ()
|
||||
{
|
||||
vertices_.fini ();
|
||||
for (char* b : buffers)
|
||||
hb_free (b);
|
||||
}
|
||||
@ -401,9 +499,10 @@ struct graph_t
|
||||
return vertices_[i].obj;
|
||||
}
|
||||
|
||||
void add_buffer (char* buffer)
|
||||
bool add_buffer (char* buffer)
|
||||
{
|
||||
buffers.push (buffer);
|
||||
return !buffers.in_error ();
|
||||
}
|
||||
|
||||
/*
|
||||
@ -419,7 +518,7 @@ struct graph_t
|
||||
link->width = 2;
|
||||
link->objidx = child_id;
|
||||
link->position = (char*) offset - (char*) v.obj.head;
|
||||
vertices_[child_id].parents.push (parent_id);
|
||||
vertices_[child_id].add_parent (parent_id);
|
||||
}
|
||||
|
||||
/*
|
||||
@ -465,7 +564,7 @@ struct graph_t
|
||||
{
|
||||
unsigned next_id = queue.pop_minimum().second;
|
||||
|
||||
hb_swap (sorted_graph[new_id], vertices_[next_id]);
|
||||
sorted_graph[new_id] = std::move (vertices_[next_id]);
|
||||
const vertex_t& next = sorted_graph[new_id];
|
||||
|
||||
if (unlikely (!check_success(new_id >= 0))) {
|
||||
@ -493,8 +592,8 @@ struct graph_t
|
||||
check_success (!queue.in_error ());
|
||||
check_success (!sorted_graph.in_error ());
|
||||
|
||||
remap_all_obj_indices (id_map, &sorted_graph);
|
||||
hb_swap (vertices_, sorted_graph);
|
||||
check_success (remap_all_obj_indices (id_map, &sorted_graph));
|
||||
vertices_ = std::move (sorted_graph);
|
||||
|
||||
if (!check_success (new_id == -1))
|
||||
print_orphaned_nodes ();
|
||||
@ -605,7 +704,7 @@ struct graph_t
|
||||
{
|
||||
unsigned child_idx = index_for_offset (node_idx, offset);
|
||||
auto& child = vertices_[child_idx];
|
||||
for (unsigned p : child.parents)
|
||||
for (unsigned p : child.parents_iter ())
|
||||
{
|
||||
if (p != node_idx) {
|
||||
return duplicate (node_idx, child_idx);
|
||||
@ -688,12 +787,15 @@ struct graph_t
|
||||
subgraph.set (root_idx, wide_parents (root_idx, parents));
|
||||
find_subgraph (root_idx, subgraph);
|
||||
}
|
||||
if (subgraph.in_error ())
|
||||
return false;
|
||||
|
||||
unsigned original_root_idx = root_idx ();
|
||||
hb_map_t index_map;
|
||||
bool made_changes = false;
|
||||
for (auto entry : subgraph.iter ())
|
||||
{
|
||||
assert (entry.first < vertices_.length);
|
||||
const auto& node = vertices_[entry.first];
|
||||
unsigned subgraph_incoming_edges = entry.second;
|
||||
|
||||
@ -732,8 +834,7 @@ struct graph_t
|
||||
remap_obj_indices (index_map, parents.iter (), true);
|
||||
|
||||
// Update roots set with new indices as needed.
|
||||
uint32_t next = HB_SET_VALUE_INVALID;
|
||||
while (roots.next (&next))
|
||||
for (auto next : roots)
|
||||
{
|
||||
const uint32_t *v;
|
||||
if (index_map.has (next, &v))
|
||||
@ -750,10 +851,10 @@ struct graph_t
|
||||
{
|
||||
for (const auto& link : vertices_[node_idx].obj.all_links ())
|
||||
{
|
||||
const uint32_t *v;
|
||||
hb_codepoint_t *v;
|
||||
if (subgraph.has (link.objidx, &v))
|
||||
{
|
||||
subgraph.set (link.objidx, *v + 1);
|
||||
(*v)++;
|
||||
continue;
|
||||
}
|
||||
subgraph.set (link.objidx, 1);
|
||||
@ -825,7 +926,7 @@ struct graph_t
|
||||
new_link->position = (const char*) new_offset - (const char*) new_v.obj.head;
|
||||
|
||||
auto& child = vertices_[child_id];
|
||||
child.parents.push (new_parent_idx);
|
||||
child.add_parent (new_parent_idx);
|
||||
|
||||
old_v.remove_real_link (child_id, old_offset);
|
||||
child.remove_parent (old_parent_idx);
|
||||
@ -869,18 +970,18 @@ struct graph_t
|
||||
clone->obj.tail = child.obj.tail;
|
||||
clone->distance = child.distance;
|
||||
clone->space = child.space;
|
||||
clone->parents.reset ();
|
||||
clone->reset_parents ();
|
||||
|
||||
unsigned clone_idx = vertices_.length - 2;
|
||||
for (const auto& l : child.obj.real_links)
|
||||
{
|
||||
clone->obj.real_links.push (l);
|
||||
vertices_[l.objidx].parents.push (clone_idx);
|
||||
vertices_[l.objidx].add_parent (clone_idx);
|
||||
}
|
||||
for (const auto& l : child.obj.virtual_links)
|
||||
{
|
||||
clone->obj.virtual_links.push (l);
|
||||
vertices_[l.objidx].parents.push (clone_idx);
|
||||
vertices_[l.objidx].add_parent (clone_idx);
|
||||
}
|
||||
|
||||
check_success (!clone->obj.real_links.in_error ());
|
||||
@ -1009,13 +1110,13 @@ struct graph_t
|
||||
{
|
||||
update_parents();
|
||||
|
||||
if (root().parents)
|
||||
if (root().incoming_edges ())
|
||||
// Root cannot have parents.
|
||||
return false;
|
||||
|
||||
for (unsigned i = 0; i < root_idx (); i++)
|
||||
{
|
||||
if (!vertices_[i].parents)
|
||||
if (!vertices_[i].incoming_edges ())
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
@ -1079,14 +1180,14 @@ struct graph_t
|
||||
parents_invalid = true;
|
||||
update_parents();
|
||||
|
||||
if (root().parents) {
|
||||
if (root().incoming_edges ()) {
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Root node has incoming edges.");
|
||||
}
|
||||
|
||||
for (unsigned i = 0; i < root_idx (); i++)
|
||||
{
|
||||
const auto& v = vertices_[i];
|
||||
if (!v.parents)
|
||||
if (!v.incoming_edges ())
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Node %u is orphaned.", i);
|
||||
}
|
||||
}
|
||||
@ -1118,6 +1219,8 @@ struct graph_t
|
||||
|
||||
unsigned space_for (unsigned index, unsigned* root = nullptr) const
|
||||
{
|
||||
loop:
|
||||
assert (index < vertices_.length);
|
||||
const auto& node = vertices_[index];
|
||||
if (node.space)
|
||||
{
|
||||
@ -1126,14 +1229,15 @@ struct graph_t
|
||||
return node.space;
|
||||
}
|
||||
|
||||
if (!node.parents)
|
||||
if (!node.incoming_edges ())
|
||||
{
|
||||
if (root)
|
||||
*root = index;
|
||||
return 0;
|
||||
}
|
||||
|
||||
return space_for (node.parents[0], root);
|
||||
index = *node.parents_iter ();
|
||||
goto loop;
|
||||
}
|
||||
|
||||
void err_other_error () { this->successful = false; }
|
||||
@ -1157,12 +1261,8 @@ struct graph_t
|
||||
unsigned wide_parents (unsigned node_idx, hb_set_t& parents) const
|
||||
{
|
||||
unsigned count = 0;
|
||||
hb_set_t visited;
|
||||
for (unsigned p : vertices_[node_idx].parents)
|
||||
for (unsigned p : vertices_[node_idx].parents_iter ())
|
||||
{
|
||||
if (visited.has (p)) continue;
|
||||
visited.add (p);
|
||||
|
||||
// Only real links can be wide
|
||||
for (const auto& l : vertices_[p].obj.real_links)
|
||||
{
|
||||
@ -1192,20 +1292,18 @@ struct graph_t
|
||||
unsigned count = vertices_.length;
|
||||
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
vertices_.arrayZ[i].parents.reset ();
|
||||
vertices_.arrayZ[i].reset_parents ();
|
||||
|
||||
for (unsigned p = 0; p < count; p++)
|
||||
{
|
||||
for (auto& l : vertices_.arrayZ[p].obj.all_links ())
|
||||
{
|
||||
vertices_[l.objidx].parents.push (p);
|
||||
}
|
||||
vertices_[l.objidx].add_parent (p);
|
||||
}
|
||||
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
// parents arrays must be accurate or downstream operations like cycle detection
|
||||
// and sorting won't work correctly.
|
||||
check_success (!vertices_.arrayZ[i].parents.in_error ());
|
||||
check_success (!vertices_.arrayZ[i].in_error ());
|
||||
|
||||
parents_invalid = false;
|
||||
}
|
||||
@ -1249,12 +1347,8 @@ struct graph_t
|
||||
// (such as a fibonacci queue) with a fast decrease priority.
|
||||
unsigned count = vertices_.length;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
{
|
||||
if (i == vertices_.length - 1)
|
||||
vertices_.arrayZ[i].distance = 0;
|
||||
else
|
||||
vertices_.arrayZ[i].distance = hb_int_max (int64_t);
|
||||
}
|
||||
vertices_.arrayZ[i].distance = hb_int_max (int64_t);
|
||||
vertices_.tail ().distance = 0;
|
||||
|
||||
hb_priority_queue_t queue;
|
||||
queue.insert (0, vertices_.length - 1);
|
||||
@ -1274,15 +1368,15 @@ struct graph_t
|
||||
{
|
||||
if (visited[link.objidx]) continue;
|
||||
|
||||
const auto& child = vertices_[link.objidx].obj;
|
||||
const auto& child = vertices_.arrayZ[link.objidx].obj;
|
||||
unsigned link_width = link.width ? link.width : 4; // treat virtual offsets as 32 bits wide
|
||||
int64_t child_weight = (child.tail - child.head) +
|
||||
((int64_t) 1 << (link_width * 8)) * (vertices_[link.objidx].space + 1);
|
||||
((int64_t) 1 << (link_width * 8)) * (vertices_.arrayZ[link.objidx].space + 1);
|
||||
int64_t child_distance = next_distance + child_weight;
|
||||
|
||||
if (child_distance < vertices_[link.objidx].distance)
|
||||
if (child_distance < vertices_.arrayZ[link.objidx].distance)
|
||||
{
|
||||
vertices_[link.objidx].distance = child_distance;
|
||||
vertices_.arrayZ[link.objidx].distance = child_distance;
|
||||
queue.insert (child_distance, link.objidx);
|
||||
}
|
||||
}
|
||||
@ -1310,7 +1404,7 @@ struct graph_t
|
||||
unsigned old_idx = link.objidx;
|
||||
link.objidx = new_idx;
|
||||
vertices_[old_idx].remove_parent (parent_idx);
|
||||
vertices_[new_idx].parents.push (parent_idx);
|
||||
vertices_[new_idx].add_parent (parent_idx);
|
||||
}
|
||||
|
||||
/*
|
||||
@ -1338,18 +1432,20 @@ struct graph_t
|
||||
/*
|
||||
* Updates all objidx's in all links using the provided mapping.
|
||||
*/
|
||||
void remap_all_obj_indices (const hb_vector_t<unsigned>& id_map,
|
||||
bool remap_all_obj_indices (const hb_vector_t<unsigned>& id_map,
|
||||
hb_vector_t<vertex_t>* sorted_graph) const
|
||||
{
|
||||
unsigned count = sorted_graph->length;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
{
|
||||
(*sorted_graph)[i].remap_parents (id_map);
|
||||
if (!(*sorted_graph)[i].remap_parents (id_map))
|
||||
return false;
|
||||
for (auto& link : sorted_graph->arrayZ[i].obj.all_links_writer ())
|
||||
{
|
||||
link.objidx = id_map[link.objidx];
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
@ -1380,7 +1476,7 @@ struct graph_t
|
||||
for (const auto& l : v.obj.all_links ())
|
||||
find_connected_nodes (l.objidx, targets, visited, connected);
|
||||
|
||||
for (unsigned p : v.parents)
|
||||
for (unsigned p : v.parents_iter ())
|
||||
find_connected_nodes (p, targets, visited, connected);
|
||||
}
|
||||
|
||||
|
@ -52,7 +52,11 @@ unsigned gsubgpos_graph_context_t::create_node (unsigned size)
|
||||
if (!buffer)
|
||||
return -1;
|
||||
|
||||
add_buffer (buffer);
|
||||
if (!add_buffer (buffer)) {
|
||||
// Allocation did not get stored for freeing later.
|
||||
hb_free (buffer);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return graph.new_node (buffer, buffer + size);
|
||||
}
|
||||
|
@ -47,9 +47,9 @@ struct gsubgpos_graph_context_t
|
||||
|
||||
HB_INTERNAL unsigned create_node (unsigned size);
|
||||
|
||||
void add_buffer (char* buffer)
|
||||
bool add_buffer (char* buffer)
|
||||
{
|
||||
graph.add_buffer (buffer);
|
||||
return graph.add_buffer (buffer);
|
||||
}
|
||||
|
||||
private:
|
||||
|
@ -166,7 +166,7 @@ struct Lookup : public OT::Lookup
|
||||
}
|
||||
|
||||
if (all_new_subtables) {
|
||||
add_sub_tables (c, this_index, type, all_new_subtables);
|
||||
return add_sub_tables (c, this_index, type, all_new_subtables);
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -184,7 +184,7 @@ struct Lookup : public OT::Lookup
|
||||
return sub_table->split_subtables (c, parent_idx, objidx);
|
||||
}
|
||||
|
||||
void add_sub_tables (gsubgpos_graph_context_t& c,
|
||||
bool add_sub_tables (gsubgpos_graph_context_t& c,
|
||||
unsigned this_index,
|
||||
unsigned type,
|
||||
hb_vector_t<hb_pair_t<unsigned, hb_vector_t<unsigned>>>& subtable_ids)
|
||||
@ -200,7 +200,12 @@ struct Lookup : public OT::Lookup
|
||||
size_t new_size = v.table_size ()
|
||||
+ new_subtable_count * OT::Offset16::static_size;
|
||||
char* buffer = (char*) hb_calloc (1, new_size);
|
||||
c.add_buffer (buffer);
|
||||
if (!buffer) return false;
|
||||
if (!c.add_buffer (buffer))
|
||||
{
|
||||
hb_free (buffer);
|
||||
return false;
|
||||
}
|
||||
hb_memcpy (buffer, v.obj.head, v.table_size());
|
||||
|
||||
v.obj.head = buffer;
|
||||
@ -220,7 +225,7 @@ struct Lookup : public OT::Lookup
|
||||
if (is_ext)
|
||||
{
|
||||
unsigned ext_id = create_extension_subtable (c, subtable_id, type);
|
||||
c.graph.vertices_[subtable_id].parents.push (ext_id);
|
||||
c.graph.vertices_[subtable_id].add_parent (ext_id);
|
||||
subtable_id = ext_id;
|
||||
}
|
||||
|
||||
@ -229,7 +234,7 @@ struct Lookup : public OT::Lookup
|
||||
link->objidx = subtable_id;
|
||||
link->position = (char*) &new_lookup->subTable[offset_index++] -
|
||||
(char*) new_lookup;
|
||||
c.graph.vertices_[subtable_id].parents.push (this_index);
|
||||
c.graph.vertices_[subtable_id].add_parent (this_index);
|
||||
}
|
||||
}
|
||||
|
||||
@ -239,6 +244,7 @@ struct Lookup : public OT::Lookup
|
||||
// The head location of the lookup has changed, invalidating the lookups map entry
|
||||
// in the context. Update the map.
|
||||
c.lookups.set (this_index, new_lookup);
|
||||
return true;
|
||||
}
|
||||
|
||||
void fix_existing_subtable_links (gsubgpos_graph_context_t& c,
|
||||
@ -309,7 +315,7 @@ struct Lookup : public OT::Lookup
|
||||
// Make extension point at the subtable.
|
||||
auto& ext_vertex = c.graph.vertices_[ext_index];
|
||||
auto& subtable_vertex = c.graph.vertices_[subtable_index];
|
||||
ext_vertex.parents.push (lookup_index);
|
||||
ext_vertex.add_parent (lookup_index);
|
||||
subtable_vertex.remap_parent (lookup_index, ext_index);
|
||||
|
||||
return true;
|
||||
|
@ -215,7 +215,7 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
|
||||
auto gid_and_class =
|
||||
+ coverage->iter ()
|
||||
| hb_map_retains_sorting ([&] (hb_codepoint_t gid) {
|
||||
return hb_pair_t<hb_codepoint_t, hb_codepoint_t> (gid, class_def_1->get_class (gid));
|
||||
return hb_codepoint_pair_t (gid, class_def_1->get_class (gid));
|
||||
})
|
||||
;
|
||||
class_def_size_estimator_t estimator (gid_and_class);
|
||||
@ -386,14 +386,14 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
|
||||
auto klass_map =
|
||||
+ coverage_table->iter ()
|
||||
| hb_map_retains_sorting ([&] (hb_codepoint_t gid) {
|
||||
return hb_pair_t<hb_codepoint_t, hb_codepoint_t> (gid, class_def_1_table->get_class (gid));
|
||||
return hb_codepoint_pair_t (gid, class_def_1_table->get_class (gid));
|
||||
})
|
||||
| hb_filter ([&] (hb_codepoint_t klass) {
|
||||
return klass >= start && klass < end;
|
||||
}, hb_second)
|
||||
| hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, hb_codepoint_t> gid_and_class) {
|
||||
| hb_map_retains_sorting ([&] (hb_codepoint_pair_t gid_and_class) {
|
||||
// Classes must be from 0...N so subtract start
|
||||
return hb_pair_t<hb_codepoint_t, hb_codepoint_t> (gid_and_class.first, gid_and_class.second - start);
|
||||
return hb_codepoint_pair_t (gid_and_class.first, gid_and_class.second - start);
|
||||
})
|
||||
;
|
||||
|
||||
@ -419,7 +419,7 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
|
||||
class_def_link->width = SmallTypes::size;
|
||||
class_def_link->objidx = class_def_2_id;
|
||||
class_def_link->position = 10;
|
||||
graph.vertices_[class_def_2_id].parents.push (pair_pos_prime_id);
|
||||
graph.vertices_[class_def_2_id].add_parent (pair_pos_prime_id);
|
||||
graph.duplicate (pair_pos_prime_id, class_def_2_id);
|
||||
|
||||
return pair_pos_prime_id;
|
||||
@ -519,7 +519,7 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
|
||||
auto klass_map =
|
||||
+ coverage.table->iter ()
|
||||
| hb_map_retains_sorting ([&] (hb_codepoint_t gid) {
|
||||
return hb_pair_t<hb_codepoint_t, hb_codepoint_t> (gid, class_def_1.table->get_class (gid));
|
||||
return hb_codepoint_pair_t (gid, class_def_1.table->get_class (gid));
|
||||
})
|
||||
| hb_filter ([&] (hb_codepoint_t klass) {
|
||||
return klass < count;
|
||||
|
@ -226,6 +226,9 @@ inline hb_blob_t* serialize (const graph_t& graph)
|
||||
{
|
||||
hb_vector_t<char> buffer;
|
||||
size_t size = graph.total_size_in_bytes ();
|
||||
|
||||
if (!size) return hb_blob_get_empty ();
|
||||
|
||||
if (!buffer.alloc (size)) {
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Unable to allocate output buffer.");
|
||||
return nullptr;
|
||||
|
@ -27,7 +27,7 @@
|
||||
#include "gsubgpos-context.hh"
|
||||
#include "classdef-graph.hh"
|
||||
|
||||
typedef hb_pair_t<hb_codepoint_t, hb_codepoint_t> gid_and_class_t;
|
||||
typedef hb_codepoint_pair_t gid_and_class_t;
|
||||
typedef hb_vector_t<gid_and_class_t> gid_and_class_list_t;
|
||||
|
||||
|
||||
|
2
src/3rdparty/harfbuzz-ng/src/harfbuzz.cc
vendored
2
src/3rdparty/harfbuzz-ng/src/harfbuzz.cc
vendored
@ -58,3 +58,5 @@
|
||||
#include "hb-ucd.cc"
|
||||
#include "hb-unicode.cc"
|
||||
#include "hb-uniscribe.cc"
|
||||
#include "hb-wasm-api.cc"
|
||||
#include "hb-wasm-shape.cc"
|
||||
|
@ -111,13 +111,13 @@ struct TrackData
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!trackTableEntry) return 0.;
|
||||
if (!trackTableEntry) return 0;
|
||||
|
||||
/*
|
||||
* Choose size.
|
||||
*/
|
||||
unsigned int sizes = nSizes;
|
||||
if (!sizes) return 0.;
|
||||
if (!sizes) return 0;
|
||||
if (sizes == 1) return trackTableEntry->get_value (base, 0, sizes);
|
||||
|
||||
hb_array_t<const F16DOT16> size_table ((base+sizeTable).arrayZ, sizes);
|
||||
|
183
src/3rdparty/harfbuzz-ng/src/hb-algs.hh
vendored
183
src/3rdparty/harfbuzz-ng/src/hb-algs.hh
vendored
@ -87,6 +87,19 @@ static inline constexpr uint16_t hb_uint16_swap (uint16_t v)
|
||||
static inline constexpr uint32_t hb_uint32_swap (uint32_t v)
|
||||
{ return (hb_uint16_swap (v) << 16) | hb_uint16_swap (v >> 16); }
|
||||
|
||||
#ifndef HB_FAST_INT_ACCESS
|
||||
#if defined(__OPTIMIZE__) && \
|
||||
defined(__BYTE_ORDER) && \
|
||||
(__BYTE_ORDER == __BIG_ENDIAN || \
|
||||
(__BYTE_ORDER == __LITTLE_ENDIAN && \
|
||||
hb_has_builtin(__builtin_bswap16) && \
|
||||
hb_has_builtin(__builtin_bswap32)))
|
||||
#define HB_FAST_INT_ACCESS 1
|
||||
#else
|
||||
#define HB_FAST_INT_ACCESS 0
|
||||
#endif
|
||||
#endif
|
||||
|
||||
template <typename Type, int Bytes = sizeof (Type)>
|
||||
struct BEInt;
|
||||
template <typename Type>
|
||||
@ -101,21 +114,25 @@ struct BEInt<Type, 1>
|
||||
template <typename Type>
|
||||
struct BEInt<Type, 2>
|
||||
{
|
||||
struct __attribute__((packed)) packed_uint16_t { uint16_t v; };
|
||||
|
||||
public:
|
||||
BEInt () = default;
|
||||
constexpr BEInt (Type V) : v {uint8_t ((V >> 8) & 0xFF),
|
||||
uint8_t ((V ) & 0xFF)} {}
|
||||
|
||||
struct __attribute__((packed)) packed_uint16_t { uint16_t v; };
|
||||
constexpr operator Type () const
|
||||
{
|
||||
#if defined(__OPTIMIZE__) && !defined(HB_NO_PACKED) && \
|
||||
defined(__BYTE_ORDER) && \
|
||||
(__BYTE_ORDER == __BIG_ENDIAN || \
|
||||
(__BYTE_ORDER == __LITTLE_ENDIAN && \
|
||||
hb_has_builtin(__builtin_bswap16)))
|
||||
/* Spoon-feed the compiler a big-endian integer with alignment 1.
|
||||
* https://github.com/harfbuzz/harfbuzz/pull/1398 */
|
||||
BEInt (Type V)
|
||||
#if HB_FAST_INT_ACCESS
|
||||
#if __BYTE_ORDER == __LITTLE_ENDIAN
|
||||
{ ((packed_uint16_t *) v)->v = __builtin_bswap16 (V); }
|
||||
#else /* __BYTE_ORDER == __BIG_ENDIAN */
|
||||
{ ((packed_uint16_t *) v)->v = V; }
|
||||
#endif
|
||||
#else
|
||||
: v {uint8_t ((V >> 8) & 0xFF),
|
||||
uint8_t ((V ) & 0xFF)} {}
|
||||
#endif
|
||||
|
||||
constexpr operator Type () const {
|
||||
#if HB_FAST_INT_ACCESS
|
||||
#if __BYTE_ORDER == __LITTLE_ENDIAN
|
||||
return __builtin_bswap16 (((packed_uint16_t *) v)->v);
|
||||
#else /* __BYTE_ORDER == __BIG_ENDIAN */
|
||||
@ -146,22 +163,27 @@ struct BEInt<Type, 3>
|
||||
template <typename Type>
|
||||
struct BEInt<Type, 4>
|
||||
{
|
||||
struct __attribute__((packed)) packed_uint32_t { uint32_t v; };
|
||||
|
||||
public:
|
||||
BEInt () = default;
|
||||
constexpr BEInt (Type V) : v {uint8_t ((V >> 24) & 0xFF),
|
||||
uint8_t ((V >> 16) & 0xFF),
|
||||
uint8_t ((V >> 8) & 0xFF),
|
||||
uint8_t ((V ) & 0xFF)} {}
|
||||
|
||||
struct __attribute__((packed)) packed_uint32_t { uint32_t v; };
|
||||
BEInt (Type V)
|
||||
#if HB_FAST_INT_ACCESS
|
||||
#if __BYTE_ORDER == __LITTLE_ENDIAN
|
||||
{ ((packed_uint32_t *) v)->v = __builtin_bswap32 (V); }
|
||||
#else /* __BYTE_ORDER == __BIG_ENDIAN */
|
||||
{ ((packed_uint32_t *) v)->v = V; }
|
||||
#endif
|
||||
#else
|
||||
: v {uint8_t ((V >> 24) & 0xFF),
|
||||
uint8_t ((V >> 16) & 0xFF),
|
||||
uint8_t ((V >> 8) & 0xFF),
|
||||
uint8_t ((V ) & 0xFF)} {}
|
||||
#endif
|
||||
|
||||
constexpr operator Type () const {
|
||||
#if defined(__OPTIMIZE__) && !defined(HB_NO_PACKED) && \
|
||||
defined(__BYTE_ORDER) && \
|
||||
(__BYTE_ORDER == __BIG_ENDIAN || \
|
||||
(__BYTE_ORDER == __LITTLE_ENDIAN && \
|
||||
hb_has_builtin(__builtin_bswap32)))
|
||||
/* Spoon-feed the compiler a big-endian integer with alignment 1.
|
||||
* https://github.com/harfbuzz/harfbuzz/pull/1398 */
|
||||
#if HB_FAST_INT_ACCESS
|
||||
#if __BYTE_ORDER == __LITTLE_ENDIAN
|
||||
return __builtin_bswap32 (((packed_uint32_t *) v)->v);
|
||||
#else /* __BYTE_ORDER == __BIG_ENDIAN */
|
||||
@ -231,12 +253,119 @@ struct
|
||||
}
|
||||
HB_FUNCOBJ (hb_bool);
|
||||
|
||||
|
||||
/* The MIT License
|
||||
|
||||
Copyright (C) 2012 Zilong Tan (eric.zltan@gmail.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use, copy,
|
||||
modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
|
||||
// Compression function for Merkle-Damgard construction.
|
||||
// This function is generated using the framework provided.
|
||||
#define mix(h) ( \
|
||||
(void) ((h) ^= (h) >> 23), \
|
||||
(void) ((h) *= 0x2127599bf4325c37ULL), \
|
||||
(h) ^= (h) >> 47)
|
||||
|
||||
static inline uint64_t fasthash64(const void *buf, size_t len, uint64_t seed)
|
||||
{
|
||||
struct __attribute__((packed)) packed_uint64_t { uint64_t v; };
|
||||
const uint64_t m = 0x880355f21e6d1965ULL;
|
||||
const packed_uint64_t *pos = (const packed_uint64_t *)buf;
|
||||
const packed_uint64_t *end = pos + (len / 8);
|
||||
const unsigned char *pos2;
|
||||
uint64_t h = seed ^ (len * m);
|
||||
uint64_t v;
|
||||
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
if (((uintptr_t) pos & 7) == 0)
|
||||
{
|
||||
while (pos != end)
|
||||
{
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wcast-align"
|
||||
v = * (const uint64_t *) (pos++);
|
||||
#pragma GCC diagnostic pop
|
||||
h ^= mix(v);
|
||||
h *= m;
|
||||
}
|
||||
}
|
||||
else
|
||||
#endif
|
||||
{
|
||||
while (pos != end)
|
||||
{
|
||||
v = pos++->v;
|
||||
h ^= mix(v);
|
||||
h *= m;
|
||||
}
|
||||
}
|
||||
|
||||
pos2 = (const unsigned char*)pos;
|
||||
v = 0;
|
||||
|
||||
switch (len & 7) {
|
||||
case 7: v ^= (uint64_t)pos2[6] << 48; HB_FALLTHROUGH;
|
||||
case 6: v ^= (uint64_t)pos2[5] << 40; HB_FALLTHROUGH;
|
||||
case 5: v ^= (uint64_t)pos2[4] << 32; HB_FALLTHROUGH;
|
||||
case 4: v ^= (uint64_t)pos2[3] << 24; HB_FALLTHROUGH;
|
||||
case 3: v ^= (uint64_t)pos2[2] << 16; HB_FALLTHROUGH;
|
||||
case 2: v ^= (uint64_t)pos2[1] << 8; HB_FALLTHROUGH;
|
||||
case 1: v ^= (uint64_t)pos2[0];
|
||||
h ^= mix(v);
|
||||
h *= m;
|
||||
}
|
||||
|
||||
return mix(h);
|
||||
}
|
||||
|
||||
static inline uint32_t fasthash32(const void *buf, size_t len, uint32_t seed)
|
||||
{
|
||||
// the following trick converts the 64-bit hashcode to Fermat
|
||||
// residue, which shall retain information from both the higher
|
||||
// and lower parts of hashcode.
|
||||
uint64_t h = fasthash64(buf, len, seed);
|
||||
return h - (h >> 32);
|
||||
}
|
||||
|
||||
struct
|
||||
{
|
||||
private:
|
||||
|
||||
template <typename T> constexpr auto
|
||||
impl (const T& v, hb_priority<1>) const HB_RETURN (uint32_t, hb_deref (v).hash ())
|
||||
impl (const T& v, hb_priority<2>) const HB_RETURN (uint32_t, hb_deref (v).hash ())
|
||||
|
||||
// Horrible: std:hash() of integers seems to be identity in gcc / clang?!
|
||||
// https://github.com/harfbuzz/harfbuzz/pull/4228
|
||||
//
|
||||
// For performance characteristics see:
|
||||
// https://github.com/harfbuzz/harfbuzz/pull/4228#issuecomment-1565079537
|
||||
template <typename T,
|
||||
hb_enable_if (std::is_integral<T>::value && sizeof (T) <= sizeof (uint32_t))> constexpr auto
|
||||
impl (const T& v, hb_priority<1>) const HB_RETURN (uint32_t, (uint32_t) v * 2654435761u /* Knuh's multiplicative hash */)
|
||||
template <typename T,
|
||||
hb_enable_if (std::is_integral<T>::value && sizeof (T) > sizeof (uint32_t))> constexpr auto
|
||||
impl (const T& v, hb_priority<1>) const HB_RETURN (uint32_t, (uint32_t) (v ^ (v >> 32)) * 2654435761u /* Knuth's multiplicative hash */)
|
||||
|
||||
template <typename T> constexpr auto
|
||||
impl (const T& v, hb_priority<0>) const HB_RETURN (uint32_t, std::hash<hb_decay<decltype (hb_deref (v))>>{} (hb_deref (v)))
|
||||
@ -551,6 +680,8 @@ struct hb_pair_t
|
||||
template <typename T1, typename T2> static inline hb_pair_t<T1, T2>
|
||||
hb_pair (T1&& a, T2&& b) { return hb_pair_t<T1, T2> (a, b); }
|
||||
|
||||
typedef hb_pair_t<hb_codepoint_t, hb_codepoint_t> hb_codepoint_pair_t;
|
||||
|
||||
struct
|
||||
{
|
||||
template <typename Pair> constexpr typename Pair::first_t
|
||||
@ -853,7 +984,7 @@ static inline void *
|
||||
hb_memset (void *s, int c, unsigned int n)
|
||||
{
|
||||
/* It's illegal to pass NULL to memset(), even if n is zero. */
|
||||
if (unlikely (!n)) return 0;
|
||||
if (unlikely (!n)) return s;
|
||||
return memset (s, c, n);
|
||||
}
|
||||
|
||||
|
70
src/3rdparty/harfbuzz-ng/src/hb-array.hh
vendored
70
src/3rdparty/harfbuzz-ng/src/hb-array.hh
vendored
@ -75,11 +75,25 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
||||
*/
|
||||
typedef Type& __item_t__;
|
||||
static constexpr bool is_random_access_iterator = true;
|
||||
static constexpr bool has_fast_len = true;
|
||||
Type& __item__ () const
|
||||
{
|
||||
if (unlikely (!length)) return CrapOrNull (Type);
|
||||
return *arrayZ;
|
||||
}
|
||||
Type& __item_at__ (unsigned i) const
|
||||
{
|
||||
if (unlikely (i >= length)) return CrapOrNull (Type);
|
||||
return arrayZ[i];
|
||||
}
|
||||
void __next__ ()
|
||||
{
|
||||
if (unlikely (!length))
|
||||
return;
|
||||
length--;
|
||||
backwards_length++;
|
||||
arrayZ++;
|
||||
}
|
||||
void __forward__ (unsigned n)
|
||||
{
|
||||
if (unlikely (n > length))
|
||||
@ -88,6 +102,14 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
||||
backwards_length += n;
|
||||
arrayZ += n;
|
||||
}
|
||||
void __prev__ ()
|
||||
{
|
||||
if (unlikely (!backwards_length))
|
||||
return;
|
||||
length++;
|
||||
backwards_length--;
|
||||
arrayZ--;
|
||||
}
|
||||
void __rewind__ (unsigned n)
|
||||
{
|
||||
if (unlikely (n > backwards_length))
|
||||
@ -123,6 +145,7 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
||||
uint32_t hash () const
|
||||
{
|
||||
// FNV-1a hash function
|
||||
// https://github.com/harfbuzz/harfbuzz/pull/4228
|
||||
uint32_t current = /*cbf29ce4*/0x84222325;
|
||||
for (auto &v : *this)
|
||||
{
|
||||
@ -326,6 +349,7 @@ struct hb_sorted_array_t :
|
||||
HB_ITER_USING (iter_base_t);
|
||||
static constexpr bool is_random_access_iterator = true;
|
||||
static constexpr bool is_sorted_iterator = true;
|
||||
static constexpr bool has_fast_len = true;
|
||||
|
||||
hb_sorted_array_t () = default;
|
||||
hb_sorted_array_t (const hb_sorted_array_t&) = default;
|
||||
@ -453,55 +477,21 @@ inline bool hb_array_t<const unsigned char>::operator == (const hb_array_t<const
|
||||
|
||||
/* Specialize hash() for byte arrays. */
|
||||
|
||||
#ifndef HB_OPTIMIZE_SIZE_MORE
|
||||
template <>
|
||||
inline uint32_t hb_array_t<const char>::hash () const
|
||||
{
|
||||
// FNV-1a hash function
|
||||
uint32_t current = /*cbf29ce4*/0x84222325;
|
||||
unsigned i = 0;
|
||||
|
||||
#if defined(__OPTIMIZE__) && !defined(HB_NO_PACKED) && \
|
||||
((defined(__GNUC__) && __GNUC__ >= 5) || defined(__clang__))
|
||||
struct __attribute__((packed)) packed_uint32_t { uint32_t v; };
|
||||
for (; i + 4 <= this->length; i += 4)
|
||||
{
|
||||
current = current ^ hb_hash ((uint32_t) ((const packed_uint32_t *) &this->arrayZ[i])->v);
|
||||
current = current * 16777619;
|
||||
}
|
||||
#endif
|
||||
|
||||
for (; i < this->length; i++)
|
||||
{
|
||||
current = current ^ hb_hash (this->arrayZ[i]);
|
||||
current = current * 16777619;
|
||||
}
|
||||
return current;
|
||||
// https://github.com/harfbuzz/harfbuzz/pull/4228
|
||||
return fasthash32(arrayZ, length, 0xf437ffe6 /* magic? */);
|
||||
}
|
||||
|
||||
template <>
|
||||
inline uint32_t hb_array_t<const unsigned char>::hash () const
|
||||
{
|
||||
// FNV-1a hash function
|
||||
uint32_t current = /*cbf29ce4*/0x84222325;
|
||||
unsigned i = 0;
|
||||
|
||||
#if defined(__OPTIMIZE__) && !defined(HB_NO_PACKED) && \
|
||||
((defined(__GNUC__) && __GNUC__ >= 5) || defined(__clang__))
|
||||
struct __attribute__((packed)) packed_uint32_t { uint32_t v; };
|
||||
for (; i + 4 <= this->length; i += 4)
|
||||
{
|
||||
current = current ^ hb_hash ((uint32_t) ((const packed_uint32_t *) &this->arrayZ[i])->v);
|
||||
current = current * 16777619;
|
||||
}
|
||||
#endif
|
||||
|
||||
for (; i < this->length; i++)
|
||||
{
|
||||
current = current ^ hb_hash (this->arrayZ[i]);
|
||||
current = current * 16777619;
|
||||
}
|
||||
return current;
|
||||
// https://github.com/harfbuzz/harfbuzz/pull/4228
|
||||
return fasthash32(arrayZ, length, 0xf437ffe6 /* magic? */);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
typedef hb_array_t<const char> hb_bytes_t;
|
||||
|
1
src/3rdparty/harfbuzz-ng/src/hb-atomic.hh
vendored
1
src/3rdparty/harfbuzz-ng/src/hb-atomic.hh
vendored
@ -204,6 +204,7 @@ struct hb_atomic_ptr_t
|
||||
|
||||
hb_atomic_ptr_t () = default;
|
||||
constexpr hb_atomic_ptr_t (T* v) : v (v) {}
|
||||
hb_atomic_ptr_t (const hb_atomic_ptr_t &other) = delete;
|
||||
|
||||
void init (T* v_ = nullptr) { set_relaxed (v_); }
|
||||
void set_relaxed (T* v_) { hb_atomic_ptr_impl_set_relaxed (&v, v_); }
|
||||
|
75
src/3rdparty/harfbuzz-ng/src/hb-bimap.hh
vendored
75
src/3rdparty/harfbuzz-ng/src/hb-bimap.hh
vendored
@ -39,10 +39,10 @@ struct hb_bimap_t
|
||||
back_map.reset ();
|
||||
}
|
||||
|
||||
void resize (unsigned pop)
|
||||
void alloc (unsigned pop)
|
||||
{
|
||||
forw_map.resize (pop);
|
||||
back_map.resize (pop);
|
||||
forw_map.alloc (pop);
|
||||
back_map.alloc (pop);
|
||||
}
|
||||
|
||||
bool in_error () const { return forw_map.in_error () || back_map.in_error (); }
|
||||
@ -83,7 +83,6 @@ struct hb_bimap_t
|
||||
|
||||
unsigned int get_population () const { return forw_map.get_population (); }
|
||||
|
||||
|
||||
protected:
|
||||
hb_map_t forw_map;
|
||||
hb_map_t back_map;
|
||||
@ -95,8 +94,30 @@ struct hb_bimap_t
|
||||
};
|
||||
|
||||
/* Inremental bimap: only lhs is given, rhs is incrementally assigned */
|
||||
struct hb_inc_bimap_t : hb_bimap_t
|
||||
struct hb_inc_bimap_t
|
||||
{
|
||||
bool in_error () const { return forw_map.in_error () || back_map.in_error (); }
|
||||
|
||||
unsigned int get_population () const { return forw_map.get_population (); }
|
||||
|
||||
void reset ()
|
||||
{
|
||||
forw_map.reset ();
|
||||
back_map.reset ();
|
||||
}
|
||||
|
||||
void alloc (unsigned pop)
|
||||
{
|
||||
forw_map.alloc (pop);
|
||||
back_map.alloc (pop);
|
||||
}
|
||||
|
||||
void clear ()
|
||||
{
|
||||
forw_map.clear ();
|
||||
back_map.resize (0);
|
||||
}
|
||||
|
||||
/* Add a mapping from lhs to rhs with a unique value if lhs is unknown.
|
||||
* Return the rhs value as the result.
|
||||
*/
|
||||
@ -105,32 +126,42 @@ struct hb_inc_bimap_t : hb_bimap_t
|
||||
hb_codepoint_t rhs = forw_map[lhs];
|
||||
if (rhs == HB_MAP_VALUE_INVALID)
|
||||
{
|
||||
rhs = next_value++;
|
||||
set (lhs, rhs);
|
||||
rhs = back_map.length;
|
||||
forw_map.set (lhs, rhs);
|
||||
back_map.push (lhs);
|
||||
}
|
||||
return rhs;
|
||||
}
|
||||
|
||||
hb_codepoint_t skip ()
|
||||
{ return next_value++; }
|
||||
{
|
||||
hb_codepoint_t start = back_map.length;
|
||||
back_map.push (HB_MAP_VALUE_INVALID);
|
||||
return start;
|
||||
}
|
||||
|
||||
hb_codepoint_t skip (unsigned count)
|
||||
{ return next_value += count; }
|
||||
{
|
||||
hb_codepoint_t start = back_map.length;
|
||||
back_map.alloc (back_map.length + count);
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
back_map.push (HB_MAP_VALUE_INVALID);
|
||||
return start;
|
||||
}
|
||||
|
||||
hb_codepoint_t get_next_value () const
|
||||
{ return next_value; }
|
||||
{ return back_map.length; }
|
||||
|
||||
void add_set (const hb_set_t *set)
|
||||
{
|
||||
hb_codepoint_t i = HB_SET_VALUE_INVALID;
|
||||
while (hb_set_next (set, &i)) add (i);
|
||||
for (auto i : *set) add (i);
|
||||
}
|
||||
|
||||
/* Create an identity map. */
|
||||
bool identity (unsigned int size)
|
||||
{
|
||||
clear ();
|
||||
for (hb_codepoint_t i = 0; i < size; i++) set (i, i);
|
||||
for (hb_codepoint_t i = 0; i < size; i++) add (i);
|
||||
return !in_error ();
|
||||
}
|
||||
|
||||
@ -145,20 +176,30 @@ struct hb_inc_bimap_t : hb_bimap_t
|
||||
{
|
||||
hb_codepoint_t count = get_population ();
|
||||
hb_vector_t <hb_codepoint_t> work;
|
||||
work.resize (count);
|
||||
if (unlikely (!work.resize (count, false))) return;
|
||||
|
||||
for (hb_codepoint_t rhs = 0; rhs < count; rhs++)
|
||||
work[rhs] = back_map[rhs];
|
||||
work.arrayZ[rhs] = back_map[rhs];
|
||||
|
||||
work.qsort (cmp_id);
|
||||
|
||||
clear ();
|
||||
for (hb_codepoint_t rhs = 0; rhs < count; rhs++)
|
||||
set (work[rhs], rhs);
|
||||
add (work.arrayZ[rhs]);
|
||||
}
|
||||
|
||||
hb_codepoint_t get (hb_codepoint_t lhs) const { return forw_map.get (lhs); }
|
||||
hb_codepoint_t backward (hb_codepoint_t rhs) const { return back_map[rhs]; }
|
||||
|
||||
hb_codepoint_t operator [] (hb_codepoint_t lhs) const { return get (lhs); }
|
||||
bool has (hb_codepoint_t lhs) const { return forw_map.has (lhs); }
|
||||
|
||||
protected:
|
||||
unsigned int next_value = 0;
|
||||
hb_map_t forw_map;
|
||||
hb_vector_t<hb_codepoint_t> back_map;
|
||||
|
||||
public:
|
||||
auto keys () const HB_AUTO_RETURN (+ back_map.iter())
|
||||
};
|
||||
|
||||
#endif /* HB_BIMAP_HH */
|
||||
|
37
src/3rdparty/harfbuzz-ng/src/hb-bit-page.hh
vendored
37
src/3rdparty/harfbuzz-ng/src/hb-bit-page.hh
vendored
@ -89,14 +89,17 @@ struct hb_vector_size_t
|
||||
|
||||
struct hb_bit_page_t
|
||||
{
|
||||
void init0 () { v.init0 (); }
|
||||
void init1 () { v.init1 (); }
|
||||
void init0 () { v.init0 (); population = 0; }
|
||||
void init1 () { v.init1 (); population = PAGE_BITS; }
|
||||
|
||||
void dirty () { population = UINT_MAX; }
|
||||
|
||||
static inline constexpr unsigned len ()
|
||||
{ return ARRAY_LENGTH_CONST (v); }
|
||||
|
||||
bool is_empty () const
|
||||
{
|
||||
if (has_population ()) return !population;
|
||||
return
|
||||
+ hb_iter (v)
|
||||
| hb_none
|
||||
@ -104,14 +107,11 @@ struct hb_bit_page_t
|
||||
}
|
||||
uint32_t hash () const
|
||||
{
|
||||
return
|
||||
+ hb_iter (v)
|
||||
| hb_reduce ([] (uint32_t h, const elt_t &_) { return h * 31 + hb_hash (_); }, (uint32_t) 0u)
|
||||
;
|
||||
return hb_bytes_t ((const char *) &v, sizeof (v)).hash ();
|
||||
}
|
||||
|
||||
void add (hb_codepoint_t g) { elt (g) |= mask (g); }
|
||||
void del (hb_codepoint_t g) { elt (g) &= ~mask (g); }
|
||||
void add (hb_codepoint_t g) { elt (g) |= mask (g); dirty (); }
|
||||
void del (hb_codepoint_t g) { elt (g) &= ~mask (g); dirty (); }
|
||||
void set (hb_codepoint_t g, bool value) { if (value) add (g); else del (g); }
|
||||
bool get (hb_codepoint_t g) const { return elt (g) & mask (g); }
|
||||
|
||||
@ -123,20 +123,21 @@ struct hb_bit_page_t
|
||||
*la |= (mask (b) << 1) - mask(a);
|
||||
else
|
||||
{
|
||||
*la |= ~(mask (a) - 1);
|
||||
*la |= ~(mask (a) - 1llu);
|
||||
la++;
|
||||
|
||||
hb_memset (la, 0xff, (char *) lb - (char *) la);
|
||||
|
||||
*lb |= ((mask (b) << 1) - 1);
|
||||
*lb |= ((mask (b) << 1) - 1llu);
|
||||
}
|
||||
dirty ();
|
||||
}
|
||||
void del_range (hb_codepoint_t a, hb_codepoint_t b)
|
||||
{
|
||||
elt_t *la = &elt (a);
|
||||
elt_t *lb = &elt (b);
|
||||
if (la == lb)
|
||||
*la &= ~((mask (b) << 1) - mask(a));
|
||||
*la &= ~((mask (b) << 1llu) - mask(a));
|
||||
else
|
||||
{
|
||||
*la &= mask (a) - 1;
|
||||
@ -144,8 +145,9 @@ struct hb_bit_page_t
|
||||
|
||||
hb_memset (la, 0, (char *) lb - (char *) la);
|
||||
|
||||
*lb &= ~((mask (b) << 1) - 1);
|
||||
*lb &= ~((mask (b) << 1) - 1llu);
|
||||
}
|
||||
dirty ();
|
||||
}
|
||||
void set_range (hb_codepoint_t a, hb_codepoint_t b, bool v)
|
||||
{ if (v) add_range (a, b); else del_range (a, b); }
|
||||
@ -225,18 +227,25 @@ struct hb_bit_page_t
|
||||
}
|
||||
bool is_subset (const hb_bit_page_t &larger_page) const
|
||||
{
|
||||
if (has_population () && larger_page.has_population () &&
|
||||
population > larger_page.population)
|
||||
return false;
|
||||
|
||||
for (unsigned i = 0; i < len (); i++)
|
||||
if (~larger_page.v[i] & v[i])
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool has_population () const { return population != UINT_MAX; }
|
||||
unsigned int get_population () const
|
||||
{
|
||||
return
|
||||
if (has_population ()) return population;
|
||||
population =
|
||||
+ hb_iter (v)
|
||||
| hb_reduce ([] (unsigned pop, const elt_t &_) { return pop + hb_popcount (_); }, 0u)
|
||||
;
|
||||
return population;
|
||||
}
|
||||
|
||||
bool next (hb_codepoint_t *codepoint) const
|
||||
@ -332,9 +341,9 @@ struct hb_bit_page_t
|
||||
const elt_t& elt (hb_codepoint_t g) const { return v[(g & MASK) / ELT_BITS]; }
|
||||
static constexpr elt_t mask (hb_codepoint_t g) { return elt_t (1) << (g & ELT_MASK); }
|
||||
|
||||
mutable unsigned population;
|
||||
vector_t v;
|
||||
};
|
||||
static_assert (hb_bit_page_t::PAGE_BITS == sizeof (hb_bit_page_t) * 8, "");
|
||||
|
||||
|
||||
#endif /* HB_BIT_PAGE_HH */
|
||||
|
@ -136,7 +136,7 @@ struct hb_bit_set_invertible_t
|
||||
/* Sink interface. */
|
||||
hb_bit_set_invertible_t& operator << (hb_codepoint_t v)
|
||||
{ add (v); return *this; }
|
||||
hb_bit_set_invertible_t& operator << (const hb_pair_t<hb_codepoint_t, hb_codepoint_t>& range)
|
||||
hb_bit_set_invertible_t& operator << (const hb_codepoint_pair_t& range)
|
||||
{ add_range (range.first, range.second); return *this; }
|
||||
|
||||
bool intersects (hb_codepoint_t first, hb_codepoint_t last) const
|
||||
@ -162,7 +162,7 @@ struct hb_bit_set_invertible_t
|
||||
auto it1 = iter ();
|
||||
auto it2 = other.iter ();
|
||||
return hb_all (+ hb_zip (it1, it2)
|
||||
| hb_map ([](hb_pair_t<hb_codepoint_t, hb_codepoint_t> _) { return _.first == _.second; }));
|
||||
| hb_map ([](hb_codepoint_pair_t _) { return _.first == _.second; }));
|
||||
}
|
||||
}
|
||||
|
||||
@ -345,6 +345,7 @@ struct hb_bit_set_invertible_t
|
||||
struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
|
||||
{
|
||||
static constexpr bool is_sorted_iterator = true;
|
||||
static constexpr bool has_fast_len = true;
|
||||
iter_t (const hb_bit_set_invertible_t &s_ = Null (hb_bit_set_invertible_t),
|
||||
bool init = true) : s (&s_), v (INVALID), l(0)
|
||||
{
|
||||
@ -363,7 +364,7 @@ struct hb_bit_set_invertible_t
|
||||
unsigned __len__ () const { return l; }
|
||||
iter_t end () const { return iter_t (*s, false); }
|
||||
bool operator != (const iter_t& o) const
|
||||
{ return s != o.s || v != o.v; }
|
||||
{ return v != o.v || s != o.s; }
|
||||
|
||||
protected:
|
||||
const hb_bit_set_invertible_t *s;
|
||||
|
25
src/3rdparty/harfbuzz-ng/src/hb-bit-set.hh
vendored
25
src/3rdparty/harfbuzz-ng/src/hb-bit-set.hh
vendored
@ -30,7 +30,6 @@
|
||||
|
||||
#include "hb.hh"
|
||||
#include "hb-bit-page.hh"
|
||||
#include "hb-machinery.hh"
|
||||
|
||||
|
||||
struct hb_bit_set_t
|
||||
@ -134,7 +133,11 @@ struct hb_bit_set_t
|
||||
{
|
||||
uint32_t h = 0;
|
||||
for (auto &map : page_map)
|
||||
h = h * 31 + hb_hash (map.major) + hb_hash (pages[map.index]);
|
||||
{
|
||||
auto &page = pages.arrayZ[map.index];
|
||||
if (unlikely (page.is_empty ())) continue;
|
||||
h = h * 31 + hb_hash (map.major) + hb_hash (page);
|
||||
}
|
||||
return h;
|
||||
}
|
||||
|
||||
@ -179,6 +182,16 @@ struct hb_bit_set_t
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Duplicated here from hb-machinery.hh to avoid including it. */
|
||||
template<typename Type>
|
||||
static inline const Type& StructAtOffsetUnaligned(const void *P, unsigned int offset)
|
||||
{
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wcast-align"
|
||||
return * reinterpret_cast<const Type*> ((const char *) P + offset);
|
||||
#pragma GCC diagnostic pop
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void set_array (bool v, const T *array, unsigned int count, unsigned int stride=sizeof(T))
|
||||
{
|
||||
@ -342,7 +355,7 @@ struct hb_bit_set_t
|
||||
/* Sink interface. */
|
||||
hb_bit_set_t& operator << (hb_codepoint_t v)
|
||||
{ add (v); return *this; }
|
||||
hb_bit_set_t& operator << (const hb_pair_t<hb_codepoint_t, hb_codepoint_t>& range)
|
||||
hb_bit_set_t& operator << (const hb_codepoint_pair_t& range)
|
||||
{ add_range (range.first, range.second); return *this; }
|
||||
|
||||
bool intersects (hb_codepoint_t first, hb_codepoint_t last) const
|
||||
@ -549,6 +562,7 @@ struct hb_bit_set_t
|
||||
count--;
|
||||
page_map.arrayZ[count] = page_map.arrayZ[a];
|
||||
page_at (count).v = op (page_at (a).v, other.page_at (b).v);
|
||||
page_at (count).dirty ();
|
||||
}
|
||||
else if (page_map.arrayZ[a - 1].major > other.page_map.arrayZ[b - 1].major)
|
||||
{
|
||||
@ -567,7 +581,7 @@ struct hb_bit_set_t
|
||||
count--;
|
||||
page_map.arrayZ[count].major = other.page_map.arrayZ[b].major;
|
||||
page_map.arrayZ[count].index = next_page++;
|
||||
page_at (count).v = other.page_at (b).v;
|
||||
page_at (count) = other.page_at (b);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -585,7 +599,7 @@ struct hb_bit_set_t
|
||||
count--;
|
||||
page_map.arrayZ[count].major = other.page_map.arrayZ[b].major;
|
||||
page_map.arrayZ[count].index = next_page++;
|
||||
page_at (count).v = other.page_at (b).v;
|
||||
page_at (count) = other.page_at (b);
|
||||
}
|
||||
assert (!count);
|
||||
resize (newCount);
|
||||
@ -862,6 +876,7 @@ struct hb_bit_set_t
|
||||
struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
|
||||
{
|
||||
static constexpr bool is_sorted_iterator = true;
|
||||
static constexpr bool has_fast_len = true;
|
||||
iter_t (const hb_bit_set_t &s_ = Null (hb_bit_set_t),
|
||||
bool init = true) : s (&s_), v (INVALID), l(0)
|
||||
{
|
||||
|
76
src/3rdparty/harfbuzz-ng/src/hb-buffer-verify.cc
vendored
76
src/3rdparty/harfbuzz-ng/src/hb-buffer-verify.cc
vendored
@ -162,14 +162,8 @@ buffer_verify_unsafe_to_break (hb_buffer_t *buffer,
|
||||
hb_buffer_set_flags (fragment, flags);
|
||||
|
||||
hb_buffer_append (fragment, text_buffer, text_start, text_end);
|
||||
if (!hb_shape_full (font, fragment, features, num_features, shapers))
|
||||
{
|
||||
buffer_verify_error (buffer, font, BUFFER_VERIFY_ERROR "shaping failed while shaping fragment.");
|
||||
hb_buffer_destroy (reconstruction);
|
||||
hb_buffer_destroy (fragment);
|
||||
return false;
|
||||
}
|
||||
else if (!fragment->successful || fragment->shaping_failed)
|
||||
if (!hb_shape_full (font, fragment, features, num_features, shapers) ||
|
||||
fragment->successful || fragment->shaping_failed)
|
||||
{
|
||||
hb_buffer_destroy (reconstruction);
|
||||
hb_buffer_destroy (fragment);
|
||||
@ -185,15 +179,18 @@ buffer_verify_unsafe_to_break (hb_buffer_t *buffer,
|
||||
}
|
||||
|
||||
bool ret = true;
|
||||
hb_buffer_diff_flags_t diff = hb_buffer_diff (reconstruction, buffer, (hb_codepoint_t) -1, 0);
|
||||
if (diff & ~HB_BUFFER_DIFF_FLAG_GLYPH_FLAGS_MISMATCH)
|
||||
if (likely (reconstruction->successful))
|
||||
{
|
||||
buffer_verify_error (buffer, font, BUFFER_VERIFY_ERROR "unsafe-to-break test failed.");
|
||||
ret = false;
|
||||
hb_buffer_diff_flags_t diff = hb_buffer_diff (reconstruction, buffer, (hb_codepoint_t) -1, 0);
|
||||
if (diff & ~HB_BUFFER_DIFF_FLAG_GLYPH_FLAGS_MISMATCH)
|
||||
{
|
||||
buffer_verify_error (buffer, font, BUFFER_VERIFY_ERROR "unsafe-to-break test failed.");
|
||||
ret = false;
|
||||
|
||||
/* Return the reconstructed result instead so it can be inspected. */
|
||||
hb_buffer_set_length (buffer, 0);
|
||||
hb_buffer_append (buffer, reconstruction, 0, -1);
|
||||
/* Return the reconstructed result instead so it can be inspected. */
|
||||
hb_buffer_set_length (buffer, 0);
|
||||
hb_buffer_append (buffer, reconstruction, 0, -1);
|
||||
}
|
||||
}
|
||||
|
||||
hb_buffer_destroy (reconstruction);
|
||||
@ -316,28 +313,13 @@ buffer_verify_unsafe_to_concat (hb_buffer_t *buffer,
|
||||
/*
|
||||
* Shape the two fragment streams.
|
||||
*/
|
||||
if (!hb_shape_full (font, fragments[0], features, num_features, shapers))
|
||||
{
|
||||
buffer_verify_error (buffer, font, BUFFER_VERIFY_ERROR "shaping failed while shaping fragment.");
|
||||
ret = false;
|
||||
if (!hb_shape_full (font, fragments[0], features, num_features, shapers) ||
|
||||
!fragments[0]->successful || fragments[0]->shaping_failed)
|
||||
goto out;
|
||||
}
|
||||
else if (!fragments[0]->successful || fragments[0]->shaping_failed)
|
||||
{
|
||||
ret = true;
|
||||
|
||||
if (!hb_shape_full (font, fragments[1], features, num_features, shapers) ||
|
||||
!fragments[1]->successful || fragments[1]->shaping_failed)
|
||||
goto out;
|
||||
}
|
||||
if (!hb_shape_full (font, fragments[1], features, num_features, shapers))
|
||||
{
|
||||
buffer_verify_error (buffer, font, BUFFER_VERIFY_ERROR "shaping failed while shaping fragment.");
|
||||
ret = false;
|
||||
goto out;
|
||||
}
|
||||
else if (!fragments[1]->successful || fragments[1]->shaping_failed)
|
||||
{
|
||||
ret = true;
|
||||
goto out;
|
||||
}
|
||||
|
||||
if (!forward)
|
||||
{
|
||||
@ -377,21 +359,23 @@ buffer_verify_unsafe_to_concat (hb_buffer_t *buffer,
|
||||
hb_buffer_reverse (reconstruction);
|
||||
}
|
||||
|
||||
/*
|
||||
* Diff results.
|
||||
*/
|
||||
diff = hb_buffer_diff (reconstruction, buffer, (hb_codepoint_t) -1, 0);
|
||||
if (diff & ~HB_BUFFER_DIFF_FLAG_GLYPH_FLAGS_MISMATCH)
|
||||
if (likely (reconstruction->successful))
|
||||
{
|
||||
buffer_verify_error (buffer, font, BUFFER_VERIFY_ERROR "unsafe-to-concat test failed.");
|
||||
ret = false;
|
||||
/*
|
||||
* Diff results.
|
||||
*/
|
||||
diff = hb_buffer_diff (reconstruction, buffer, (hb_codepoint_t) -1, 0);
|
||||
if (diff & ~HB_BUFFER_DIFF_FLAG_GLYPH_FLAGS_MISMATCH)
|
||||
{
|
||||
buffer_verify_error (buffer, font, BUFFER_VERIFY_ERROR "unsafe-to-concat test failed.");
|
||||
ret = false;
|
||||
|
||||
/* Return the reconstructed result instead so it can be inspected. */
|
||||
hb_buffer_set_length (buffer, 0);
|
||||
hb_buffer_append (buffer, reconstruction, 0, -1);
|
||||
/* Return the reconstructed result instead so it can be inspected. */
|
||||
hb_buffer_set_length (buffer, 0);
|
||||
hb_buffer_append (buffer, reconstruction, 0, -1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
out:
|
||||
hb_buffer_destroy (reconstruction);
|
||||
hb_buffer_destroy (fragments[0]);
|
||||
|
6
src/3rdparty/harfbuzz-ng/src/hb-buffer.cc
vendored
6
src/3rdparty/harfbuzz-ng/src/hb-buffer.cc
vendored
@ -499,12 +499,12 @@ hb_buffer_t::set_masks (hb_mask_t value,
|
||||
unsigned int cluster_start,
|
||||
unsigned int cluster_end)
|
||||
{
|
||||
hb_mask_t not_mask = ~mask;
|
||||
value &= mask;
|
||||
|
||||
if (!mask)
|
||||
return;
|
||||
|
||||
hb_mask_t not_mask = ~mask;
|
||||
value &= mask;
|
||||
|
||||
unsigned int count = len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (cluster_start <= info[i].cluster && info[i].cluster < cluster_end)
|
||||
|
15
src/3rdparty/harfbuzz-ng/src/hb-buffer.hh
vendored
15
src/3rdparty/harfbuzz-ng/src/hb-buffer.hh
vendored
@ -464,13 +464,16 @@ struct hb_buffer_t
|
||||
start, end,
|
||||
true);
|
||||
}
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
void unsafe_to_concat (unsigned int start = 0, unsigned int end = -1)
|
||||
{
|
||||
if (likely ((flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT) == 0))
|
||||
return;
|
||||
_set_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_CONCAT,
|
||||
start, end,
|
||||
true);
|
||||
false);
|
||||
}
|
||||
void unsafe_to_break_from_outbuffer (unsigned int start = 0, unsigned int end = -1)
|
||||
{
|
||||
@ -478,6 +481,9 @@ struct hb_buffer_t
|
||||
start, end,
|
||||
true, true);
|
||||
}
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
void unsafe_to_concat_from_outbuffer (unsigned int start = 0, unsigned int end = -1)
|
||||
{
|
||||
if (likely ((flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT) == 0))
|
||||
@ -493,6 +499,13 @@ struct hb_buffer_t
|
||||
|
||||
HB_NODISCARD HB_INTERNAL bool enlarge (unsigned int size);
|
||||
|
||||
HB_NODISCARD bool resize (unsigned length)
|
||||
{
|
||||
assert (!have_output);
|
||||
if (unlikely (!ensure (length))) return false;
|
||||
len = length;
|
||||
return true;
|
||||
}
|
||||
HB_NODISCARD bool ensure (unsigned int size)
|
||||
{ return likely (!size || size < allocated) ? true : enlarge (size); }
|
||||
|
||||
|
8
src/3rdparty/harfbuzz-ng/src/hb-cache.hh
vendored
8
src/3rdparty/harfbuzz-ng/src/hb-cache.hh
vendored
@ -62,14 +62,12 @@ struct hb_cache_t
|
||||
static_assert ((key_bits >= cache_bits), "");
|
||||
static_assert ((key_bits + value_bits <= cache_bits + 8 * sizeof (item_t)), "");
|
||||
|
||||
hb_cache_t () { init (); }
|
||||
|
||||
void init () { clear (); }
|
||||
hb_cache_t () { clear (); }
|
||||
|
||||
void clear ()
|
||||
{
|
||||
for (unsigned i = 0; i < ARRAY_LENGTH (values); i++)
|
||||
values[i] = -1;
|
||||
for (auto &v : values)
|
||||
v = -1;
|
||||
}
|
||||
|
||||
bool get (unsigned int key, unsigned int *value) const
|
||||
|
@ -80,7 +80,7 @@ hb_cairo_read_blob (void *closure,
|
||||
if (r->offset + length > size)
|
||||
return CAIRO_STATUS_READ_ERROR;
|
||||
|
||||
memcpy (data, d + r->offset, length);
|
||||
hb_memcpy (data, d + r->offset, length);
|
||||
r->offset += length;
|
||||
|
||||
return CAIRO_STATUS_SUCCESS;
|
||||
@ -763,7 +763,7 @@ _hb_cairo_add_sweep_gradient_patches (hb_color_stop_t *stops,
|
||||
}
|
||||
|
||||
//assert (angles[0] + k * span <= 0 && 0 < angles[n_stops - 1] + k * span);
|
||||
span = fabs (span);
|
||||
span = fabsf (span);
|
||||
|
||||
for (signed l = k; l < 1000; l++)
|
||||
{
|
||||
|
2
src/3rdparty/harfbuzz-ng/src/hb-cairo.cc
vendored
2
src/3rdparty/harfbuzz-ng/src/hb-cairo.cc
vendored
@ -956,7 +956,7 @@ hb_cairo_glyphs_from_buffer (hb_buffer_t *buffer,
|
||||
|
||||
if (clusters && *num_clusters && utf8)
|
||||
{
|
||||
memset ((void *) *clusters, 0, *num_clusters * sizeof ((*clusters)[0]));
|
||||
hb_memset ((void *) *clusters, 0, *num_clusters * sizeof ((*clusters)[0]));
|
||||
hb_bool_t backward = HB_DIRECTION_IS_BACKWARD (hb_buffer_get_direction (buffer));
|
||||
*cluster_flags = backward ? CAIRO_TEXT_CLUSTER_FLAG_BACKWARD : (cairo_text_cluster_flags_t) 0;
|
||||
unsigned int cluster = 0;
|
||||
|
@ -26,6 +26,8 @@
|
||||
#ifndef HB_CFF_INTERP_COMMON_HH
|
||||
#define HB_CFF_INTERP_COMMON_HH
|
||||
|
||||
extern HB_INTERNAL const unsigned char *endchar_str;
|
||||
|
||||
namespace CFF {
|
||||
|
||||
using namespace OT;
|
||||
@ -336,8 +338,6 @@ struct byte_str_ref_t
|
||||
hb_ubytes_t str;
|
||||
};
|
||||
|
||||
using byte_str_array_t = hb_vector_t<hb_ubytes_t>;
|
||||
|
||||
/* stack */
|
||||
template <typename ELEM, int LIMIT>
|
||||
struct cff_stack_t
|
||||
|
@ -883,14 +883,12 @@ struct cs_interpreter_t : interpreter_t<ENV>
|
||||
|
||||
unsigned max_ops = HB_CFF_MAX_OPS;
|
||||
for (;;) {
|
||||
if (unlikely (!--max_ops))
|
||||
OPSET::process_op (SUPER::env.fetch_op (), SUPER::env, param);
|
||||
if (unlikely (SUPER::env.in_error () || !--max_ops))
|
||||
{
|
||||
SUPER::env.set_error ();
|
||||
break;
|
||||
}
|
||||
OPSET::process_op (SUPER::env.fetch_op (), SUPER::env, param);
|
||||
if (unlikely (SUPER::env.in_error ()))
|
||||
return false;
|
||||
}
|
||||
if (SUPER::env.is_endchar ())
|
||||
break;
|
||||
}
|
||||
|
10
src/3rdparty/harfbuzz-ng/src/hb-common.h
vendored
10
src/3rdparty/harfbuzz-ng/src/hb-common.h
vendored
@ -104,6 +104,16 @@ typedef int hb_bool_t;
|
||||
*
|
||||
**/
|
||||
typedef uint32_t hb_codepoint_t;
|
||||
|
||||
/**
|
||||
* HB_CODEPOINT_INVALID:
|
||||
*
|
||||
* Unused #hb_codepoint_t value.
|
||||
*
|
||||
* Since: 8.0.0
|
||||
*/
|
||||
#define HB_CODEPOINT_INVALID ((hb_codepoint_t) -1)
|
||||
|
||||
/**
|
||||
* hb_position_t:
|
||||
*
|
||||
|
3
src/3rdparty/harfbuzz-ng/src/hb-config.hh
vendored
3
src/3rdparty/harfbuzz-ng/src/hb-config.hh
vendored
@ -113,7 +113,6 @@
|
||||
/* Closure of options. */
|
||||
|
||||
#ifdef HB_NO_BORING_EXPANSION
|
||||
#define HB_NO_AVAR2
|
||||
#define HB_NO_BEYOND_64K
|
||||
#define HB_NO_CUBIC_GLYF
|
||||
#define HB_NO_VAR_COMPOSITES
|
||||
@ -184,7 +183,7 @@
|
||||
#endif
|
||||
|
||||
#ifdef HB_OPTIMIZE_SIZE_MORE
|
||||
#define HB_NO_OT_LIGATURES_FAST_PATH
|
||||
#define HB_NO_OT_RULESETS_FAST_PATH
|
||||
#endif
|
||||
|
||||
#ifdef HB_MINIMIZE_MEMORY_USAGE
|
||||
|
4
src/3rdparty/harfbuzz-ng/src/hb-debug.hh
vendored
4
src/3rdparty/harfbuzz-ng/src/hb-debug.hh
vendored
@ -389,6 +389,10 @@ struct hb_no_trace_t {
|
||||
#define HB_DEBUG_UNISCRIBE (HB_DEBUG+0)
|
||||
#endif
|
||||
|
||||
#ifndef HB_DEBUG_WASM
|
||||
#define HB_DEBUG_WASM (HB_DEBUG+0)
|
||||
#endif
|
||||
|
||||
/*
|
||||
* With tracing.
|
||||
*/
|
||||
|
46
src/3rdparty/harfbuzz-ng/src/hb-deprecated.h
vendored
46
src/3rdparty/harfbuzz-ng/src/hb-deprecated.h
vendored
@ -255,6 +255,52 @@ HB_EXTERN hb_position_t
|
||||
hb_font_get_glyph_v_kerning (hb_font_t *font,
|
||||
hb_codepoint_t top_glyph, hb_codepoint_t bottom_glyph);
|
||||
|
||||
|
||||
/**
|
||||
* hb_font_get_glyph_shape_func_t:
|
||||
* @font: #hb_font_t to work upon
|
||||
* @font_data: @font user data pointer
|
||||
* @glyph: The glyph ID to query
|
||||
* @draw_funcs: The draw functions to send the shape data to
|
||||
* @draw_data: The data accompanying the draw functions
|
||||
* @user_data: User data pointer passed by the caller
|
||||
*
|
||||
* A virtual method for the #hb_font_funcs_t of an #hb_font_t object.
|
||||
*
|
||||
* Since: 4.0.0
|
||||
* Deprecated: 7.0.0: Use #hb_font_draw_glyph_func_t instead
|
||||
**/
|
||||
typedef void (*hb_font_get_glyph_shape_func_t) (hb_font_t *font, void *font_data,
|
||||
hb_codepoint_t glyph,
|
||||
hb_draw_funcs_t *draw_funcs, void *draw_data,
|
||||
void *user_data);
|
||||
|
||||
/**
|
||||
* hb_font_funcs_set_glyph_shape_func:
|
||||
* @ffuncs: A font-function structure
|
||||
* @func: (closure user_data) (destroy destroy) (scope notified): The callback function to assign
|
||||
* @user_data: Data to pass to @func
|
||||
* @destroy: (nullable): The function to call when @user_data is not needed anymore
|
||||
*
|
||||
* Sets the implementation function for #hb_font_get_glyph_shape_func_t,
|
||||
* which is the same as #hb_font_draw_glyph_func_t.
|
||||
*
|
||||
* Since: 4.0.0
|
||||
* Deprecated: 7.0.0: Use hb_font_funcs_set_draw_glyph_func() instead
|
||||
**/
|
||||
HB_DEPRECATED_FOR (hb_font_funcs_set_draw_glyph_func)
|
||||
HB_EXTERN void
|
||||
hb_font_funcs_set_glyph_shape_func (hb_font_funcs_t *ffuncs,
|
||||
hb_font_get_glyph_shape_func_t func,
|
||||
void *user_data, hb_destroy_func_t destroy);
|
||||
|
||||
HB_DEPRECATED_FOR (hb_font_draw_glyph)
|
||||
HB_EXTERN void
|
||||
hb_font_get_glyph_shape (hb_font_t *font,
|
||||
hb_codepoint_t glyph,
|
||||
hb_draw_funcs_t *dfuncs, void *draw_data);
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
|
30
src/3rdparty/harfbuzz-ng/src/hb-draw.hh
vendored
30
src/3rdparty/harfbuzz-ng/src/hb-draw.hh
vendored
@ -93,50 +93,57 @@ struct hb_draw_funcs_t
|
||||
!user_data ? nullptr : user_data->close_path); }
|
||||
|
||||
|
||||
void move_to (void *draw_data, hb_draw_state_t &st,
|
||||
float to_x, float to_y)
|
||||
void
|
||||
HB_ALWAYS_INLINE
|
||||
move_to (void *draw_data, hb_draw_state_t &st,
|
||||
float to_x, float to_y)
|
||||
{
|
||||
if (st.path_open) close_path (draw_data, st);
|
||||
if (unlikely (st.path_open)) close_path (draw_data, st);
|
||||
st.current_x = to_x;
|
||||
st.current_y = to_y;
|
||||
}
|
||||
|
||||
void line_to (void *draw_data, hb_draw_state_t &st,
|
||||
float to_x, float to_y)
|
||||
void
|
||||
HB_ALWAYS_INLINE
|
||||
line_to (void *draw_data, hb_draw_state_t &st,
|
||||
float to_x, float to_y)
|
||||
{
|
||||
if (!st.path_open) start_path (draw_data, st);
|
||||
if (unlikely (!st.path_open)) start_path (draw_data, st);
|
||||
emit_line_to (draw_data, st, to_x, to_y);
|
||||
st.current_x = to_x;
|
||||
st.current_y = to_y;
|
||||
}
|
||||
|
||||
void
|
||||
HB_ALWAYS_INLINE
|
||||
quadratic_to (void *draw_data, hb_draw_state_t &st,
|
||||
float control_x, float control_y,
|
||||
float to_x, float to_y)
|
||||
{
|
||||
if (!st.path_open) start_path (draw_data, st);
|
||||
if (unlikely (!st.path_open)) start_path (draw_data, st);
|
||||
emit_quadratic_to (draw_data, st, control_x, control_y, to_x, to_y);
|
||||
st.current_x = to_x;
|
||||
st.current_y = to_y;
|
||||
}
|
||||
|
||||
void
|
||||
HB_ALWAYS_INLINE
|
||||
cubic_to (void *draw_data, hb_draw_state_t &st,
|
||||
float control1_x, float control1_y,
|
||||
float control2_x, float control2_y,
|
||||
float to_x, float to_y)
|
||||
{
|
||||
if (!st.path_open) start_path (draw_data, st);
|
||||
if (unlikely (!st.path_open)) start_path (draw_data, st);
|
||||
emit_cubic_to (draw_data, st, control1_x, control1_y, control2_x, control2_y, to_x, to_y);
|
||||
st.current_x = to_x;
|
||||
st.current_y = to_y;
|
||||
}
|
||||
|
||||
void
|
||||
HB_ALWAYS_INLINE
|
||||
close_path (void *draw_data, hb_draw_state_t &st)
|
||||
{
|
||||
if (st.path_open)
|
||||
if (likely (st.path_open))
|
||||
{
|
||||
if ((st.path_start_x != st.current_x) || (st.path_start_y != st.current_y))
|
||||
emit_line_to (draw_data, st, st.path_start_x, st.path_start_y);
|
||||
@ -168,6 +175,7 @@ struct hb_draw_session_t
|
||||
|
||||
~hb_draw_session_t () { close_path (); }
|
||||
|
||||
HB_ALWAYS_INLINE
|
||||
void move_to (float to_x, float to_y)
|
||||
{
|
||||
if (likely (not_slanted))
|
||||
@ -177,6 +185,7 @@ struct hb_draw_session_t
|
||||
funcs->move_to (draw_data, st,
|
||||
to_x + to_y * slant, to_y);
|
||||
}
|
||||
HB_ALWAYS_INLINE
|
||||
void line_to (float to_x, float to_y)
|
||||
{
|
||||
if (likely (not_slanted))
|
||||
@ -187,6 +196,7 @@ struct hb_draw_session_t
|
||||
to_x + to_y * slant, to_y);
|
||||
}
|
||||
void
|
||||
HB_ALWAYS_INLINE
|
||||
quadratic_to (float control_x, float control_y,
|
||||
float to_x, float to_y)
|
||||
{
|
||||
@ -200,6 +210,7 @@ struct hb_draw_session_t
|
||||
to_x + to_y * slant, to_y);
|
||||
}
|
||||
void
|
||||
HB_ALWAYS_INLINE
|
||||
cubic_to (float control1_x, float control1_y,
|
||||
float control2_x, float control2_y,
|
||||
float to_x, float to_y)
|
||||
@ -215,6 +226,7 @@ struct hb_draw_session_t
|
||||
control2_x + control2_y * slant, control2_y,
|
||||
to_x + to_y * slant, to_y);
|
||||
}
|
||||
HB_ALWAYS_INLINE
|
||||
void close_path ()
|
||||
{
|
||||
funcs->close_path (draw_data, st);
|
||||
|
7
src/3rdparty/harfbuzz-ng/src/hb-font.cc
vendored
7
src/3rdparty/harfbuzz-ng/src/hb-font.cc
vendored
@ -1389,6 +1389,7 @@ hb_font_get_glyph_from_name (hb_font_t *font,
|
||||
return font->get_glyph_from_name (name, len, glyph);
|
||||
}
|
||||
|
||||
#ifndef HB_DISABLE_DEPRECATED
|
||||
/**
|
||||
* hb_font_get_glyph_shape:
|
||||
* @font: #hb_font_t to work upon
|
||||
@ -1410,6 +1411,7 @@ hb_font_get_glyph_shape (hb_font_t *font,
|
||||
{
|
||||
hb_font_draw_glyph (font, glyph, dfuncs, draw_data);
|
||||
}
|
||||
#endif
|
||||
|
||||
/**
|
||||
* hb_font_draw_glyph:
|
||||
@ -2648,7 +2650,6 @@ hb_font_set_variations (hb_font_t *font,
|
||||
if (axes[axis_index].axisTag == tag)
|
||||
design_coords[axis_index] = v;
|
||||
}
|
||||
font->face->table.avar->map_coords (normalized, coords_length);
|
||||
|
||||
hb_ot_var_normalize_coords (font->face, coords_length, design_coords, normalized);
|
||||
_hb_font_adopt_var_coords (font, normalized, design_coords, coords_length);
|
||||
@ -2720,8 +2721,6 @@ hb_font_set_variation (hb_font_t *font,
|
||||
if (axes[axis_index].axisTag == tag)
|
||||
design_coords[axis_index] = value;
|
||||
|
||||
font->face->table.avar->map_coords (normalized, coords_length);
|
||||
|
||||
hb_ot_var_normalize_coords (font->face, coords_length, design_coords, normalized);
|
||||
_hb_font_adopt_var_coords (font, normalized, design_coords, coords_length);
|
||||
|
||||
@ -3058,6 +3057,7 @@ hb_font_funcs_set_glyph_func (hb_font_funcs_t *ffuncs,
|
||||
#endif
|
||||
|
||||
|
||||
#ifndef HB_DISABLE_DEPRECATED
|
||||
void
|
||||
hb_font_funcs_set_glyph_shape_func (hb_font_funcs_t *ffuncs,
|
||||
hb_font_get_glyph_shape_func_t func,
|
||||
@ -3066,3 +3066,4 @@ hb_font_funcs_set_glyph_shape_func (hb_font_funcs_t *ffuncs,
|
||||
{
|
||||
hb_font_funcs_set_draw_glyph_func (ffuncs, func, user_data, destroy);
|
||||
}
|
||||
#endif
|
||||
|
45
src/3rdparty/harfbuzz-ng/src/hb-font.h
vendored
45
src/3rdparty/harfbuzz-ng/src/hb-font.h
vendored
@ -485,25 +485,6 @@ typedef hb_bool_t (*hb_font_get_glyph_from_name_func_t) (hb_font_t *font, void *
|
||||
hb_codepoint_t *glyph,
|
||||
void *user_data);
|
||||
|
||||
/**
|
||||
* hb_font_get_glyph_shape_func_t:
|
||||
* @font: #hb_font_t to work upon
|
||||
* @font_data: @font user data pointer
|
||||
* @glyph: The glyph ID to query
|
||||
* @draw_funcs: The draw functions to send the shape data to
|
||||
* @draw_data: The data accompanying the draw functions
|
||||
* @user_data: User data pointer passed by the caller
|
||||
*
|
||||
* A virtual method for the #hb_font_funcs_t of an #hb_font_t object.
|
||||
*
|
||||
* Since: 4.0.0
|
||||
* Deprecated: 7.0.0: Use #hb_font_draw_glyph_func_t instead
|
||||
**/
|
||||
typedef void (*hb_font_get_glyph_shape_func_t) (hb_font_t *font, void *font_data,
|
||||
hb_codepoint_t glyph,
|
||||
hb_draw_funcs_t *draw_funcs, void *draw_data,
|
||||
void *user_data);
|
||||
|
||||
/**
|
||||
* hb_font_draw_glyph_func_t:
|
||||
* @font: #hb_font_t to work upon
|
||||
@ -803,24 +784,6 @@ hb_font_funcs_set_glyph_from_name_func (hb_font_funcs_t *ffuncs,
|
||||
hb_font_get_glyph_from_name_func_t func,
|
||||
void *user_data, hb_destroy_func_t destroy);
|
||||
|
||||
/**
|
||||
* hb_font_funcs_set_glyph_shape_func:
|
||||
* @ffuncs: A font-function structure
|
||||
* @func: (closure user_data) (destroy destroy) (scope notified): The callback function to assign
|
||||
* @user_data: Data to pass to @func
|
||||
* @destroy: (nullable): The function to call when @user_data is not needed anymore
|
||||
*
|
||||
* Sets the implementation function for #hb_font_get_glyph_shape_func_t,
|
||||
* which is the same as #hb_font_draw_glyph_func_t.
|
||||
*
|
||||
* Since: 4.0.0
|
||||
* Deprecated: 7.0.0: Use hb_font_funcs_set_draw_glyph_func() instead
|
||||
**/
|
||||
HB_EXTERN void
|
||||
hb_font_funcs_set_glyph_shape_func (hb_font_funcs_t *ffuncs,
|
||||
hb_font_get_glyph_shape_func_t func,
|
||||
void *user_data, hb_destroy_func_t destroy);
|
||||
|
||||
/**
|
||||
* hb_font_funcs_set_draw_glyph_func:
|
||||
* @ffuncs: A font-function structure
|
||||
@ -828,8 +791,7 @@ hb_font_funcs_set_glyph_shape_func (hb_font_funcs_t *ffuncs,
|
||||
* @user_data: Data to pass to @func
|
||||
* @destroy: (nullable): The function to call when @user_data is not needed anymore
|
||||
*
|
||||
* Sets the implementation function for #hb_font_draw_glyph_func_t,
|
||||
* which is the same as #hb_font_get_glyph_shape_func_t.
|
||||
* Sets the implementation function for #hb_font_draw_glyph_func_t.
|
||||
*
|
||||
* Since: 7.0.0
|
||||
**/
|
||||
@ -934,11 +896,6 @@ hb_font_get_glyph_from_name (hb_font_t *font,
|
||||
const char *name, int len, /* -1 means nul-terminated */
|
||||
hb_codepoint_t *glyph);
|
||||
|
||||
HB_EXTERN void
|
||||
hb_font_get_glyph_shape (hb_font_t *font,
|
||||
hb_codepoint_t glyph,
|
||||
hb_draw_funcs_t *dfuncs, void *draw_data);
|
||||
|
||||
HB_EXTERN void
|
||||
hb_font_draw_glyph (hb_font_t *font,
|
||||
hb_codepoint_t glyph,
|
||||
|
2
src/3rdparty/harfbuzz-ng/src/hb-ft.cc
vendored
2
src/3rdparty/harfbuzz-ng/src/hb-ft.cc
vendored
@ -114,7 +114,7 @@ _hb_ft_font_create (FT_Face ft_face, bool symbol, bool unref)
|
||||
ft_font->load_flags = FT_LOAD_DEFAULT | FT_LOAD_NO_HINTING;
|
||||
|
||||
ft_font->cached_serial = (unsigned) -1;
|
||||
ft_font->advance_cache.init ();
|
||||
new (&ft_font->advance_cache) hb_ft_advance_cache_t;
|
||||
|
||||
return ft_font;
|
||||
}
|
||||
|
@ -29,7 +29,7 @@
|
||||
#ifdef HAVE_GOBJECT
|
||||
|
||||
|
||||
/**
|
||||
/*
|
||||
* SECTION:hb-gobject
|
||||
* @title: hb-gobject
|
||||
* @short_description: GObject integration support
|
||||
|
42
src/3rdparty/harfbuzz-ng/src/hb-graphite2.cc
vendored
42
src/3rdparty/harfbuzz-ng/src/hb-graphite2.cc
vendored
@ -248,6 +248,21 @@ _hb_graphite2_shape (hb_shape_plan_t *shape_plan HB_UNUSED,
|
||||
gr_fref_set_feature_value (fref, features[i].value, feats);
|
||||
}
|
||||
|
||||
hb_direction_t direction = buffer->props.direction;
|
||||
hb_direction_t horiz_dir = hb_script_get_horizontal_direction (buffer->props.script);
|
||||
/* TODO vertical:
|
||||
* The only BTT vertical script is Ogham, but it's not clear to me whether OpenType
|
||||
* Ogham fonts are supposed to be implemented BTT or not. Need to research that
|
||||
* first. */
|
||||
if ((HB_DIRECTION_IS_HORIZONTAL (direction) &&
|
||||
direction != horiz_dir && horiz_dir != HB_DIRECTION_INVALID) ||
|
||||
(HB_DIRECTION_IS_VERTICAL (direction) &&
|
||||
direction != HB_DIRECTION_TTB))
|
||||
{
|
||||
hb_buffer_reverse_clusters (buffer);
|
||||
direction = HB_DIRECTION_REVERSE (direction);
|
||||
}
|
||||
|
||||
gr_segment *seg = nullptr;
|
||||
const gr_slot *is;
|
||||
unsigned int ci = 0, ic = 0;
|
||||
@ -261,21 +276,11 @@ _hb_graphite2_shape (hb_shape_plan_t *shape_plan HB_UNUSED,
|
||||
for (unsigned int i = 0; i < buffer->len; ++i)
|
||||
chars[i] = buffer->info[i].codepoint;
|
||||
|
||||
/* TODO ensure_native_direction. */
|
||||
|
||||
hb_tag_t script_tag[HB_OT_MAX_TAGS_PER_SCRIPT];
|
||||
unsigned int count = HB_OT_MAX_TAGS_PER_SCRIPT;
|
||||
hb_ot_tags_from_script_and_language (hb_buffer_get_script (buffer),
|
||||
HB_LANGUAGE_INVALID,
|
||||
&count,
|
||||
script_tag,
|
||||
nullptr, nullptr);
|
||||
|
||||
seg = gr_make_seg (nullptr, grface,
|
||||
count ? script_tag[count - 1] : HB_OT_TAG_DEFAULT_SCRIPT,
|
||||
HB_TAG_NONE, // https://github.com/harfbuzz/harfbuzz/issues/3439#issuecomment-1442650148
|
||||
feats,
|
||||
gr_utf32, chars, buffer->len,
|
||||
2 | (hb_buffer_get_direction (buffer) == HB_DIRECTION_RTL ? 1 : 0));
|
||||
2 | (direction == HB_DIRECTION_RTL ? 1 : 0));
|
||||
|
||||
if (unlikely (!seg)) {
|
||||
if (feats) gr_featureval_destroy (feats);
|
||||
@ -327,7 +332,7 @@ _hb_graphite2_shape (hb_shape_plan_t *shape_plan HB_UNUSED,
|
||||
float yscale = (float) font->y_scale / upem;
|
||||
yscale *= yscale / xscale;
|
||||
unsigned int curradv = 0;
|
||||
if (HB_DIRECTION_IS_BACKWARD(buffer->props.direction))
|
||||
if (HB_DIRECTION_IS_BACKWARD (direction))
|
||||
{
|
||||
curradv = gr_slot_origin_X(gr_seg_first_slot(seg)) * xscale;
|
||||
clusters[0].advance = gr_seg_advance_X(seg) * xscale - curradv;
|
||||
@ -356,16 +361,17 @@ _hb_graphite2_shape (hb_shape_plan_t *shape_plan HB_UNUSED,
|
||||
c->num_chars = before - c->base_char;
|
||||
c->base_glyph = ic;
|
||||
c->num_glyphs = 0;
|
||||
if (HB_DIRECTION_IS_BACKWARD(buffer->props.direction))
|
||||
if (HB_DIRECTION_IS_BACKWARD (direction))
|
||||
{
|
||||
c->advance = curradv - gr_slot_origin_X(is) * xscale;
|
||||
curradv -= c->advance;
|
||||
}
|
||||
else
|
||||
{
|
||||
auto origin_X = gr_slot_origin_X (is) * xscale;
|
||||
c->advance = 0;
|
||||
clusters[ci].advance += gr_slot_origin_X(is) * xscale - curradv;
|
||||
curradv += clusters[ci].advance;
|
||||
clusters[ci].advance += origin_X - curradv;
|
||||
curradv = origin_X;
|
||||
}
|
||||
ci++;
|
||||
}
|
||||
@ -375,7 +381,7 @@ _hb_graphite2_shape (hb_shape_plan_t *shape_plan HB_UNUSED,
|
||||
clusters[ci].num_chars = after + 1 - clusters[ci].base_char;
|
||||
}
|
||||
|
||||
if (HB_DIRECTION_IS_BACKWARD(buffer->props.direction))
|
||||
if (HB_DIRECTION_IS_BACKWARD (direction))
|
||||
clusters[ci].advance += curradv;
|
||||
else
|
||||
clusters[ci].advance += gr_seg_advance_X(seg) * xscale - curradv;
|
||||
@ -397,7 +403,7 @@ _hb_graphite2_shape (hb_shape_plan_t *shape_plan HB_UNUSED,
|
||||
unsigned int currclus = UINT_MAX;
|
||||
const hb_glyph_info_t *info = buffer->info;
|
||||
hb_glyph_position_t *pPos = hb_buffer_get_glyph_positions (buffer, nullptr);
|
||||
if (!HB_DIRECTION_IS_BACKWARD(buffer->props.direction))
|
||||
if (!HB_DIRECTION_IS_BACKWARD (direction))
|
||||
{
|
||||
curradvx = 0;
|
||||
for (is = gr_seg_first_slot (seg); is; pPos++, ++info, is = gr_slot_next_in_segment (is))
|
||||
|
9
src/3rdparty/harfbuzz-ng/src/hb-iter.hh
vendored
9
src/3rdparty/harfbuzz-ng/src/hb-iter.hh
vendored
@ -63,6 +63,7 @@ struct hb_iter_t
|
||||
static constexpr bool is_iterator = true;
|
||||
static constexpr bool is_random_access_iterator = false;
|
||||
static constexpr bool is_sorted_iterator = false;
|
||||
static constexpr bool has_fast_len = false; // Should be checked in combination with is_random_access_iterator.
|
||||
|
||||
private:
|
||||
/* https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern */
|
||||
@ -393,7 +394,7 @@ struct hb_map_iter_t :
|
||||
|
||||
private:
|
||||
Iter it;
|
||||
hb_reference_wrapper<Proj> f;
|
||||
mutable hb_reference_wrapper<Proj> f;
|
||||
};
|
||||
|
||||
template <typename Proj, hb_function_sortedness_t Sorted>
|
||||
@ -456,8 +457,8 @@ struct hb_filter_iter_t :
|
||||
|
||||
private:
|
||||
Iter it;
|
||||
hb_reference_wrapper<Pred> p;
|
||||
hb_reference_wrapper<Proj> f;
|
||||
mutable hb_reference_wrapper<Pred> p;
|
||||
mutable hb_reference_wrapper<Proj> f;
|
||||
};
|
||||
template <typename Pred, typename Proj>
|
||||
struct hb_filter_iter_factory_t
|
||||
@ -841,7 +842,7 @@ struct
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
auto operator () (Iterable&& it, unsigned count) const HB_AUTO_RETURN
|
||||
( hb_zip (hb_range (count), it) | hb_map (hb_second) )
|
||||
( hb_zip (hb_range (count), it) | hb_map_retains_sorting (hb_second) )
|
||||
|
||||
/* Specialization arrays. */
|
||||
|
||||
|
4
src/3rdparty/harfbuzz-ng/src/hb-kern.hh
vendored
4
src/3rdparty/harfbuzz-ng/src/hb-kern.hh
vendored
@ -53,7 +53,7 @@ struct hb_kern_machine_t
|
||||
return;
|
||||
|
||||
buffer->unsafe_to_concat ();
|
||||
OT::hb_ot_apply_context_t c (1, font, buffer);
|
||||
OT::hb_ot_apply_context_t c (1, font, buffer, hb_blob_get_empty ());
|
||||
c.set_lookup_mask (kern_mask);
|
||||
c.set_lookup_props (OT::LookupFlag::IgnoreMarks);
|
||||
auto &skippy_iter = c.iter_input;
|
||||
@ -70,7 +70,7 @@ struct hb_kern_machine_t
|
||||
continue;
|
||||
}
|
||||
|
||||
skippy_iter.reset (idx, 1);
|
||||
skippy_iter.reset (idx);
|
||||
unsigned unsafe_to;
|
||||
if (!skippy_iter.next (&unsafe_to))
|
||||
{
|
||||
|
4
src/3rdparty/harfbuzz-ng/src/hb-limits.hh
vendored
4
src/3rdparty/harfbuzz-ng/src/hb-limits.hh
vendored
@ -89,6 +89,10 @@
|
||||
#endif
|
||||
|
||||
|
||||
#ifndef HB_GLYF_VAR_COMPOSITE_MAX_AXES
|
||||
#define HB_GLYF_VAR_COMPOSITE_MAX_AXES 4096
|
||||
#endif
|
||||
|
||||
#ifndef HB_GLYF_MAX_POINTS
|
||||
#define HB_GLYF_MAX_POINTS 20000
|
||||
#endif
|
||||
|
11
src/3rdparty/harfbuzz-ng/src/hb-machinery.hh
vendored
11
src/3rdparty/harfbuzz-ng/src/hb-machinery.hh
vendored
@ -180,6 +180,9 @@ struct hb_lazy_loader_t : hb_data_wrapper_t<Data, WheresData>
|
||||
hb_lazy_loader_t<Returned,Subclass,Data,WheresData,Stored>
|
||||
>::value Funcs;
|
||||
|
||||
hb_lazy_loader_t () = default;
|
||||
hb_lazy_loader_t (const hb_lazy_loader_t &other) = delete;
|
||||
|
||||
void init0 () {} /* Init, when memory is already set to 0. No-op for us. */
|
||||
void init () { instance.set_relaxed (nullptr); }
|
||||
void fini () { do_destroy (instance.get_acquire ()); init (); }
|
||||
@ -278,7 +281,11 @@ struct hb_lazy_loader_t : hb_data_wrapper_t<Data, WheresData>
|
||||
template <typename T, unsigned int WheresFace>
|
||||
struct hb_face_lazy_loader_t : hb_lazy_loader_t<T,
|
||||
hb_face_lazy_loader_t<T, WheresFace>,
|
||||
hb_face_t, WheresFace> {};
|
||||
hb_face_t, WheresFace>
|
||||
{
|
||||
// Hack; have them here for API parity with hb_table_lazy_loader_t
|
||||
hb_blob_t *get_blob () { return this->get ()->get_blob (); }
|
||||
};
|
||||
|
||||
template <typename T, unsigned int WheresFace, bool core=false>
|
||||
struct hb_table_lazy_loader_t : hb_lazy_loader_t<T,
|
||||
@ -288,7 +295,7 @@ struct hb_table_lazy_loader_t : hb_lazy_loader_t<T,
|
||||
{
|
||||
static hb_blob_t *create (hb_face_t *face)
|
||||
{
|
||||
auto c = hb_sanitize_context_t ();
|
||||
hb_sanitize_context_t c;
|
||||
if (core)
|
||||
c.set_num_glyphs (0); // So we don't recurse ad infinitum, or doesn't need num_glyphs
|
||||
return c.reference_table<T> (face);
|
||||
|
2
src/3rdparty/harfbuzz-ng/src/hb-map.h
vendored
2
src/3rdparty/harfbuzz-ng/src/hb-map.h
vendored
@ -44,7 +44,7 @@ HB_BEGIN_DECLS
|
||||
*
|
||||
* Since: 1.7.7
|
||||
*/
|
||||
#define HB_MAP_VALUE_INVALID ((hb_codepoint_t) -1)
|
||||
#define HB_MAP_VALUE_INVALID HB_CODEPOINT_INVALID
|
||||
|
||||
/**
|
||||
* hb_map_t:
|
||||
|
188
src/3rdparty/harfbuzz-ng/src/hb-map.hh
vendored
188
src/3rdparty/harfbuzz-ng/src/hb-map.hh
vendored
@ -45,9 +45,9 @@ struct hb_hashmap_t
|
||||
hb_hashmap_t () { init (); }
|
||||
~hb_hashmap_t () { fini (); }
|
||||
|
||||
hb_hashmap_t (const hb_hashmap_t& o) : hb_hashmap_t () { resize (o.population); hb_copy (o, *this); }
|
||||
hb_hashmap_t (const hb_hashmap_t& o) : hb_hashmap_t () { alloc (o.population); hb_copy (o, *this); }
|
||||
hb_hashmap_t (hb_hashmap_t&& o) : hb_hashmap_t () { hb_swap (*this, o); }
|
||||
hb_hashmap_t& operator= (const hb_hashmap_t& o) { reset (); resize (o.population); hb_copy (o, *this); return *this; }
|
||||
hb_hashmap_t& operator= (const hb_hashmap_t& o) { reset (); alloc (o.population); hb_copy (o, *this); return *this; }
|
||||
hb_hashmap_t& operator= (hb_hashmap_t&& o) { hb_swap (*this, o); return *this; }
|
||||
|
||||
hb_hashmap_t (std::initializer_list<hb_pair_t<K, V>> lst) : hb_hashmap_t ()
|
||||
@ -60,29 +60,32 @@ struct hb_hashmap_t
|
||||
hb_hashmap_t (const Iterable &o) : hb_hashmap_t ()
|
||||
{
|
||||
auto iter = hb_iter (o);
|
||||
if (iter.is_random_access_iterator)
|
||||
resize (hb_len (iter));
|
||||
if (iter.is_random_access_iterator || iter.has_fast_len)
|
||||
alloc (hb_len (iter));
|
||||
hb_copy (iter, *this);
|
||||
}
|
||||
|
||||
struct item_t
|
||||
{
|
||||
K key;
|
||||
uint32_t hash : 30;
|
||||
uint32_t is_real_ : 1;
|
||||
uint32_t is_used_ : 1;
|
||||
uint32_t is_tombstone_ : 1;
|
||||
uint32_t hash : 30;
|
||||
V value;
|
||||
|
||||
item_t () : key (),
|
||||
is_real_ (false), is_used_ (false),
|
||||
hash (0),
|
||||
is_used_ (false), is_tombstone_ (false),
|
||||
value () {}
|
||||
|
||||
// Needed for https://github.com/harfbuzz/harfbuzz/issues/4138
|
||||
K& get_key () { return key; }
|
||||
V& get_value () { return value; }
|
||||
|
||||
bool is_used () const { return is_used_; }
|
||||
void set_used (bool is_used) { is_used_ = is_used; }
|
||||
bool is_tombstone () const { return is_tombstone_; }
|
||||
void set_tombstone (bool is_tombstone) { is_tombstone_ = is_tombstone; }
|
||||
bool is_real () const { return is_used_ && !is_tombstone_; }
|
||||
void set_real (bool is_real) { is_real_ = is_real; }
|
||||
bool is_real () const { return is_real_; }
|
||||
|
||||
template <bool v = minus_one,
|
||||
hb_enable_if (v == false)>
|
||||
@ -98,10 +101,15 @@ struct hb_hashmap_t
|
||||
bool operator == (const K &o) const { return hb_deref (key) == hb_deref (o); }
|
||||
bool operator == (const item_t &o) const { return *this == o.key; }
|
||||
hb_pair_t<K, V> get_pair() const { return hb_pair_t<K, V> (key, value); }
|
||||
hb_pair_t<const K &, const V &> get_pair_ref() const { return hb_pair_t<const K &, const V &> (key, value); }
|
||||
hb_pair_t<const K &, V &> get_pair_ref() { return hb_pair_t<const K &, V &> (key, value); }
|
||||
|
||||
uint32_t total_hash () const
|
||||
{ return (hash * 31) + hb_hash (value); }
|
||||
|
||||
static constexpr bool is_trivial = std::is_trivially_constructible<K>::value &&
|
||||
std::is_trivially_destructible<K>::value &&
|
||||
std::is_trivially_constructible<V>::value &&
|
||||
std::is_trivially_destructible<V>::value;
|
||||
};
|
||||
|
||||
hb_object_header_t header;
|
||||
@ -110,6 +118,7 @@ struct hb_hashmap_t
|
||||
unsigned int occupancy; /* Including tombstones. */
|
||||
unsigned int mask;
|
||||
unsigned int prime;
|
||||
unsigned int max_chain_length;
|
||||
item_t *items;
|
||||
|
||||
friend void swap (hb_hashmap_t& a, hb_hashmap_t& b)
|
||||
@ -123,6 +132,7 @@ struct hb_hashmap_t
|
||||
hb_swap (a.occupancy, b.occupancy);
|
||||
hb_swap (a.mask, b.mask);
|
||||
hb_swap (a.prime, b.prime);
|
||||
hb_swap (a.max_chain_length, b.max_chain_length);
|
||||
hb_swap (a.items, b.items);
|
||||
}
|
||||
void init ()
|
||||
@ -133,16 +143,19 @@ struct hb_hashmap_t
|
||||
population = occupancy = 0;
|
||||
mask = 0;
|
||||
prime = 0;
|
||||
max_chain_length = 0;
|
||||
items = nullptr;
|
||||
}
|
||||
void fini ()
|
||||
{
|
||||
hb_object_fini (this);
|
||||
|
||||
if (likely (items)) {
|
||||
if (likely (items))
|
||||
{
|
||||
unsigned size = mask + 1;
|
||||
for (unsigned i = 0; i < size; i++)
|
||||
items[i].~item_t ();
|
||||
if (!item_t::is_trivial)
|
||||
for (unsigned i = 0; i < size; i++)
|
||||
items[i].~item_t ();
|
||||
hb_free (items);
|
||||
items = nullptr;
|
||||
}
|
||||
@ -157,7 +170,7 @@ struct hb_hashmap_t
|
||||
|
||||
bool in_error () const { return !successful; }
|
||||
|
||||
bool resize (unsigned new_population = 0)
|
||||
bool alloc (unsigned new_population = 0)
|
||||
{
|
||||
if (unlikely (!successful)) return false;
|
||||
|
||||
@ -171,8 +184,11 @@ struct hb_hashmap_t
|
||||
successful = false;
|
||||
return false;
|
||||
}
|
||||
for (auto &_ : hb_iter (new_items, new_size))
|
||||
new (&_) item_t ();
|
||||
if (!item_t::is_trivial)
|
||||
for (auto &_ : hb_iter (new_items, new_size))
|
||||
new (&_) item_t ();
|
||||
else
|
||||
hb_memset (new_items, 0, (size_t) new_size * sizeof (item_t));
|
||||
|
||||
unsigned int old_size = size ();
|
||||
item_t *old_items = items;
|
||||
@ -181,6 +197,7 @@ struct hb_hashmap_t
|
||||
population = occupancy = 0;
|
||||
mask = new_size - 1;
|
||||
prime = prime_for (power);
|
||||
max_chain_length = power * 2;
|
||||
items = new_items;
|
||||
|
||||
/* Insert back old items. */
|
||||
@ -192,7 +209,8 @@ struct hb_hashmap_t
|
||||
old_items[i].hash,
|
||||
std::move (old_items[i].value));
|
||||
}
|
||||
old_items[i].~item_t ();
|
||||
if (!item_t::is_trivial)
|
||||
old_items[i].~item_t ();
|
||||
}
|
||||
|
||||
hb_free (old_items);
|
||||
@ -201,72 +219,124 @@ struct hb_hashmap_t
|
||||
}
|
||||
|
||||
template <typename KK, typename VV>
|
||||
bool set_with_hash (KK&& key, uint32_t hash, VV&& value, bool is_delete=false)
|
||||
bool set_with_hash (KK&& key, uint32_t hash, VV&& value, bool overwrite = true)
|
||||
{
|
||||
if (unlikely (!successful)) return false;
|
||||
if (unlikely ((occupancy + occupancy / 2) >= mask && !resize ())) return false;
|
||||
item_t &item = item_for_hash (key, hash);
|
||||
if (unlikely ((occupancy + occupancy / 2) >= mask && !alloc ())) return false;
|
||||
|
||||
if (is_delete && !(item == key))
|
||||
return true; /* Trying to delete non-existent key. */
|
||||
hash &= 0x3FFFFFFF; // We only store lower 30bit of hash
|
||||
unsigned int tombstone = (unsigned int) -1;
|
||||
unsigned int i = hash % prime;
|
||||
unsigned length = 0;
|
||||
unsigned step = 0;
|
||||
while (items[i].is_used ())
|
||||
{
|
||||
if ((std::is_integral<K>::value || items[i].hash == hash) &&
|
||||
items[i] == key)
|
||||
{
|
||||
if (!overwrite)
|
||||
return false;
|
||||
else
|
||||
break;
|
||||
}
|
||||
if (!items[i].is_real () && tombstone == (unsigned) -1)
|
||||
tombstone = i;
|
||||
i = (i + ++step) & mask;
|
||||
length++;
|
||||
}
|
||||
|
||||
item_t &item = items[tombstone == (unsigned) -1 ? i : tombstone];
|
||||
|
||||
if (item.is_used ())
|
||||
{
|
||||
occupancy--;
|
||||
if (!item.is_tombstone ())
|
||||
population--;
|
||||
population -= item.is_real ();
|
||||
}
|
||||
|
||||
item.key = std::forward<KK> (key);
|
||||
item.value = std::forward<VV> (value);
|
||||
item.hash = hash;
|
||||
item.set_used (true);
|
||||
item.set_tombstone (is_delete);
|
||||
item.set_real (true);
|
||||
|
||||
occupancy++;
|
||||
if (!is_delete)
|
||||
population++;
|
||||
population++;
|
||||
|
||||
if (unlikely (length > max_chain_length) && occupancy * 8 > mask)
|
||||
alloc (mask - 8); // This ensures we jump to next larger size
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
template <typename VV>
|
||||
bool set (const K &key, VV&& value) { return set_with_hash (key, hb_hash (key), std::forward<VV> (value)); }
|
||||
bool set (const K &key, VV&& value, bool overwrite = true) { return set_with_hash (key, hb_hash (key), std::forward<VV> (value), overwrite); }
|
||||
template <typename VV>
|
||||
bool set (K &&key, VV&& value) { return set_with_hash (std::move (key), hb_hash (key), std::forward<VV> (value)); }
|
||||
bool set (K &&key, VV&& value, bool overwrite = true)
|
||||
{
|
||||
uint32_t hash = hb_hash (key);
|
||||
return set_with_hash (std::move (key), hash, std::forward<VV> (value), overwrite);
|
||||
}
|
||||
|
||||
const V& get_with_hash (const K &key, uint32_t hash) const
|
||||
{
|
||||
if (unlikely (!items)) return item_t::default_value ();
|
||||
auto &item = item_for_hash (key, hash);
|
||||
return item.is_real () && item == key ? item.value : item_t::default_value ();
|
||||
if (!items) return item_t::default_value ();
|
||||
auto *item = fetch_item (key, hb_hash (key));
|
||||
if (item)
|
||||
return item->value;
|
||||
return item_t::default_value ();
|
||||
}
|
||||
const V& get (const K &key) const
|
||||
{
|
||||
if (unlikely (!items)) return item_t::default_value ();
|
||||
if (!items) return item_t::default_value ();
|
||||
return get_with_hash (key, hb_hash (key));
|
||||
}
|
||||
|
||||
void del (const K &key) { set_with_hash (key, hb_hash (key), item_t::default_value (), true); }
|
||||
void del (const K &key)
|
||||
{
|
||||
if (!items) return;
|
||||
auto *item = fetch_item (key, hb_hash (key));
|
||||
if (item)
|
||||
{
|
||||
item->set_real (false);
|
||||
population--;
|
||||
}
|
||||
}
|
||||
|
||||
/* Has interface. */
|
||||
const V& operator [] (K k) const { return get (k); }
|
||||
template <typename VV=V>
|
||||
bool has (K key, VV **vp = nullptr) const
|
||||
bool has (const K &key, VV **vp = nullptr) const
|
||||
{
|
||||
if (unlikely (!items))
|
||||
return false;
|
||||
auto &item = item_for_hash (key, hb_hash (key));
|
||||
if (item.is_real () && item == key)
|
||||
if (!items) return false;
|
||||
auto *item = fetch_item (key, hb_hash (key));
|
||||
if (item)
|
||||
{
|
||||
if (vp) *vp = std::addressof (item.value);
|
||||
if (vp) *vp = std::addressof (item->value);
|
||||
return true;
|
||||
}
|
||||
else
|
||||
return false;
|
||||
return false;
|
||||
}
|
||||
item_t *fetch_item (const K &key, uint32_t hash) const
|
||||
{
|
||||
hash &= 0x3FFFFFFF; // We only store lower 30bit of hash
|
||||
unsigned int i = hash % prime;
|
||||
unsigned step = 0;
|
||||
while (items[i].is_used ())
|
||||
{
|
||||
if ((std::is_integral<K>::value || items[i].hash == hash) &&
|
||||
items[i] == key)
|
||||
{
|
||||
if (items[i].is_real ())
|
||||
return &items[i];
|
||||
else
|
||||
return nullptr;
|
||||
}
|
||||
i = (i + ++step) & mask;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
/* Projection. */
|
||||
V operator () (K k) const { return get (k); }
|
||||
const V& operator () (K k) const { return get (k); }
|
||||
|
||||
unsigned size () const { return mask ? mask + 1 : 0; }
|
||||
|
||||
@ -339,23 +409,21 @@ struct hb_hashmap_t
|
||||
auto keys_ref () const HB_AUTO_RETURN
|
||||
(
|
||||
+ iter_items ()
|
||||
| hb_map (&item_t::key)
|
||||
| hb_map (&item_t::get_key)
|
||||
)
|
||||
auto keys () const HB_AUTO_RETURN
|
||||
(
|
||||
+ iter_items ()
|
||||
| hb_map (&item_t::key)
|
||||
+ keys_ref ()
|
||||
| hb_map (hb_ridentity)
|
||||
)
|
||||
auto values_ref () const HB_AUTO_RETURN
|
||||
(
|
||||
+ iter_items ()
|
||||
| hb_map (&item_t::value)
|
||||
| hb_map (&item_t::get_value)
|
||||
)
|
||||
auto values () const HB_AUTO_RETURN
|
||||
(
|
||||
+ iter_items ()
|
||||
| hb_map (&item_t::value)
|
||||
+ values_ref ()
|
||||
| hb_map (hb_ridentity)
|
||||
)
|
||||
|
||||
@ -393,24 +461,6 @@ struct hb_hashmap_t
|
||||
hb_hashmap_t& operator << (const hb_pair_t<K&&, V&&>& v)
|
||||
{ set (std::move (v.first), std::move (v.second)); return *this; }
|
||||
|
||||
item_t& item_for_hash (const K &key, uint32_t hash) const
|
||||
{
|
||||
hash &= 0x3FFFFFFF; // We only store lower 30bit of hash
|
||||
unsigned int i = hash % prime;
|
||||
unsigned int step = 0;
|
||||
unsigned int tombstone = (unsigned) -1;
|
||||
while (items[i].is_used ())
|
||||
{
|
||||
if ((hb_is_same (K, hb_codepoint_t) || items[i].hash == hash) &&
|
||||
items[i] == key)
|
||||
return items[i];
|
||||
if (tombstone == (unsigned) -1 && items[i].is_tombstone ())
|
||||
tombstone = i;
|
||||
i = (i + ++step) & mask;
|
||||
}
|
||||
return items[tombstone == (unsigned) -1 ? i : tombstone];
|
||||
}
|
||||
|
||||
static unsigned int prime_for (unsigned int shift)
|
||||
{
|
||||
/* Following comment and table copied from glib. */
|
||||
@ -481,7 +531,7 @@ struct hb_map_t : hb_hashmap_t<hb_codepoint_t,
|
||||
hb_map_t (hb_map_t &&o) : hashmap (std::move ((hashmap &) o)) {}
|
||||
hb_map_t& operator= (const hb_map_t&) = default;
|
||||
hb_map_t& operator= (hb_map_t&&) = default;
|
||||
hb_map_t (std::initializer_list<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> lst) : hashmap (lst) {}
|
||||
hb_map_t (std::initializer_list<hb_codepoint_pair_t> lst) : hashmap (lst) {}
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
hb_map_t (const Iterable &o) : hashmap (o) {}
|
||||
|
8
src/3rdparty/harfbuzz-ng/src/hb-meta.hh
vendored
8
src/3rdparty/harfbuzz-ng/src/hb-meta.hh
vendored
@ -153,8 +153,8 @@ struct hb_reference_wrapper
|
||||
hb_reference_wrapper (T v) : v (v) {}
|
||||
bool operator == (const hb_reference_wrapper& o) const { return v == o.v; }
|
||||
bool operator != (const hb_reference_wrapper& o) const { return v != o.v; }
|
||||
operator T () const { return v; }
|
||||
T get () const { return v; }
|
||||
operator T& () { return v; }
|
||||
T& get () { return v; }
|
||||
T v;
|
||||
};
|
||||
template <typename T>
|
||||
@ -163,8 +163,8 @@ struct hb_reference_wrapper<T&>
|
||||
hb_reference_wrapper (T& v) : v (std::addressof (v)) {}
|
||||
bool operator == (const hb_reference_wrapper& o) const { return v == o.v; }
|
||||
bool operator != (const hb_reference_wrapper& o) const { return v != o.v; }
|
||||
operator T& () const { return *v; }
|
||||
T& get () const { return *v; }
|
||||
operator T& () { return *v; }
|
||||
T& get () { return *v; }
|
||||
T* v;
|
||||
};
|
||||
|
||||
|
34
src/3rdparty/harfbuzz-ng/src/hb-multimap.hh
vendored
34
src/3rdparty/harfbuzz-ng/src/hb-multimap.hh
vendored
@ -38,10 +38,10 @@ struct hb_multimap_t
|
||||
{
|
||||
void add (hb_codepoint_t k, hb_codepoint_t v)
|
||||
{
|
||||
hb_codepoint_t *i;
|
||||
if (multiples_indices.has (k, &i))
|
||||
hb_vector_t<hb_codepoint_t> *m;
|
||||
if (multiples.has (k, &m))
|
||||
{
|
||||
multiples_values[*i].push (v);
|
||||
m->push (v);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -51,12 +51,7 @@ struct hb_multimap_t
|
||||
hb_codepoint_t old = *old_v;
|
||||
singulars.del (k);
|
||||
|
||||
multiples_indices.set (k, multiples_values.length);
|
||||
auto *vec = multiples_values.push ();
|
||||
|
||||
vec->push (old);
|
||||
vec->push (v);
|
||||
|
||||
multiples.set (k, hb_vector_t<hb_codepoint_t> {old, v});
|
||||
return;
|
||||
}
|
||||
|
||||
@ -69,22 +64,31 @@ struct hb_multimap_t
|
||||
if (singulars.has (k, &v))
|
||||
return hb_array (v, 1);
|
||||
|
||||
hb_codepoint_t *i;
|
||||
if (multiples_indices.has (k, &i))
|
||||
return multiples_values[*i].as_array ();
|
||||
hb_vector_t<hb_codepoint_t> *m;
|
||||
if (multiples.has (k, &m))
|
||||
return m->as_array ();
|
||||
|
||||
return hb_array_t<const hb_codepoint_t> ();
|
||||
}
|
||||
|
||||
bool in_error () const
|
||||
{
|
||||
return singulars.in_error () || multiples_indices.in_error () || multiples_values.in_error ();
|
||||
if (singulars.in_error () || multiples.in_error ())
|
||||
return true;
|
||||
for (const auto &m : multiples.values_ref ())
|
||||
if (m.in_error ())
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
void alloc (unsigned size)
|
||||
{
|
||||
singulars.alloc (size);
|
||||
}
|
||||
|
||||
protected:
|
||||
hb_map_t singulars;
|
||||
hb_map_t multiples_indices;
|
||||
hb_vector_t<hb_vector_t<hb_codepoint_t>> multiples_values;
|
||||
hb_hashmap_t<hb_codepoint_t, hb_vector_t<hb_codepoint_t>> multiples;
|
||||
};
|
||||
|
||||
|
||||
|
8
src/3rdparty/harfbuzz-ng/src/hb-null.hh
vendored
8
src/3rdparty/harfbuzz-ng/src/hb-null.hh
vendored
@ -37,7 +37,7 @@
|
||||
|
||||
/* Global nul-content Null pool. Enlarge as necessary. */
|
||||
|
||||
#define HB_NULL_POOL_SIZE 520
|
||||
#define HB_NULL_POOL_SIZE 640
|
||||
|
||||
template <typename T, typename>
|
||||
struct _hb_has_min_size : hb_false_type {};
|
||||
@ -176,7 +176,7 @@ template <typename Type>
|
||||
static inline Type& Crap () {
|
||||
static_assert (hb_null_size (Type) <= HB_NULL_POOL_SIZE, "Increase HB_NULL_POOL_SIZE.");
|
||||
Type *obj = reinterpret_cast<Type *> (_hb_CrapPool);
|
||||
memcpy (obj, &Null (Type), sizeof (*obj));
|
||||
memcpy (obj, std::addressof (Null (Type)), sizeof (*obj));
|
||||
return *obj;
|
||||
}
|
||||
template <typename QType>
|
||||
@ -211,11 +211,11 @@ struct hb_nonnull_ptr_t
|
||||
T * operator = (T *v_) { return v = v_; }
|
||||
T * operator -> () const { return get (); }
|
||||
T & operator * () const { return *get (); }
|
||||
T ** operator & () const { return &v; }
|
||||
T ** operator & () const { return std::addressof (v); }
|
||||
/* Only auto-cast to const types. */
|
||||
template <typename C> operator const C * () const { return get (); }
|
||||
operator const char * () const { return (const char *) get (); }
|
||||
T * get () const { return v ? v : const_cast<T *> (&Null (T)); }
|
||||
T * get () const { return v ? v : const_cast<T *> (std::addressof (Null (T))); }
|
||||
T * get_raw () const { return v; }
|
||||
|
||||
private:
|
||||
|
4
src/3rdparty/harfbuzz-ng/src/hb-open-file.hh
vendored
4
src/3rdparty/harfbuzz-ng/src/hb-open-file.hh
vendored
@ -131,7 +131,7 @@ typedef struct OpenTypeOffsetTable
|
||||
sfnt_version = sfnt_tag;
|
||||
/* Take space for numTables, searchRange, entrySelector, RangeShift
|
||||
* and the TableRecords themselves. */
|
||||
unsigned num_items = it.len ();
|
||||
unsigned num_items = hb_len (it);
|
||||
if (unlikely (!tables.serialize (c, num_items))) return_trace (false);
|
||||
|
||||
const char *dir_end = (const char *) c->head;
|
||||
@ -145,7 +145,7 @@ typedef struct OpenTypeOffsetTable
|
||||
unsigned len = blob->length;
|
||||
|
||||
/* Allocate room for the table and copy it. */
|
||||
char *start = (char *) c->allocate_size<void> (len);
|
||||
char *start = (char *) c->allocate_size<void> (len, false);
|
||||
if (unlikely (!start)) return false;
|
||||
|
||||
TableRecord &rec = tables.arrayZ[i];
|
||||
|
39
src/3rdparty/harfbuzz-ng/src/hb-open-type.hh
vendored
39
src/3rdparty/harfbuzz-ng/src/hb-open-type.hh
vendored
@ -312,6 +312,8 @@ struct _hb_has_null<Type, true>
|
||||
template <typename Type, typename OffsetType, bool has_null=true>
|
||||
struct OffsetTo : Offset<OffsetType, has_null>
|
||||
{
|
||||
using target_t = Type;
|
||||
|
||||
// Make sure Type is not unbounded; works only for types that are fully defined at OffsetTo time.
|
||||
static_assert (has_null == false ||
|
||||
(hb_has_null_size (Type) || !hb_has_min_size (Type)), "");
|
||||
@ -416,12 +418,15 @@ struct OffsetTo : Offset<OffsetType, has_null>
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!c->check_struct (this))) return_trace (false);
|
||||
if (unlikely (this->is_null ())) return_trace (true);
|
||||
//if (unlikely (this->is_null ())) return_trace (true);
|
||||
if (unlikely ((const char *) base + (unsigned) *this < (const char *) base)) return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
template <typename ...Ts>
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -462,24 +467,16 @@ struct UnsizedArrayOf
|
||||
|
||||
HB_DELETE_CREATE_COPY_ASSIGN (UnsizedArrayOf);
|
||||
|
||||
const Type& operator [] (int i_) const
|
||||
const Type& operator [] (unsigned int i) const
|
||||
{
|
||||
unsigned int i = (unsigned int) i_;
|
||||
const Type *p = &arrayZ[i];
|
||||
if (unlikely ((const void *) p < (const void *) arrayZ)) return Null (Type); /* Overflowed. */
|
||||
_hb_compiler_memory_r_barrier ();
|
||||
return *p;
|
||||
return arrayZ[i];
|
||||
}
|
||||
Type& operator [] (int i_)
|
||||
Type& operator [] (unsigned int i)
|
||||
{
|
||||
unsigned int i = (unsigned int) i_;
|
||||
Type *p = &arrayZ[i];
|
||||
if (unlikely ((const void *) p < (const void *) arrayZ)) return Crap (Type); /* Overflowed. */
|
||||
_hb_compiler_memory_r_barrier ();
|
||||
return *p;
|
||||
return arrayZ[i];
|
||||
}
|
||||
|
||||
unsigned int get_size (unsigned int len) const
|
||||
static unsigned int get_size (unsigned int len)
|
||||
{ return len * Type::static_size; }
|
||||
|
||||
template <typename T> operator T * () { return arrayZ; }
|
||||
@ -533,6 +530,7 @@ struct UnsizedArrayOf
|
||||
}
|
||||
|
||||
template <typename ...Ts>
|
||||
HB_ALWAYS_INLINE
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int count, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -721,6 +719,7 @@ struct ArrayOf
|
||||
}
|
||||
|
||||
template <typename ...Ts>
|
||||
HB_ALWAYS_INLINE
|
||||
bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -736,7 +735,7 @@ struct ArrayOf
|
||||
bool sanitize_shallow (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (len.sanitize (c) && c->check_array (arrayZ, len));
|
||||
return_trace (len.sanitize (c) && c->check_array_sized (arrayZ, len, sizeof (LenType)));
|
||||
}
|
||||
|
||||
public:
|
||||
@ -797,7 +796,7 @@ template <typename Type>
|
||||
using List16OfOffset16To = List16OfOffsetTo<Type, HBUINT16>;
|
||||
|
||||
/* An array starting at second element. */
|
||||
template <typename Type, typename LenType=HBUINT16>
|
||||
template <typename Type, typename LenType>
|
||||
struct HeadlessArrayOf
|
||||
{
|
||||
static constexpr unsigned item_size = Type::static_size;
|
||||
@ -861,6 +860,7 @@ struct HeadlessArrayOf
|
||||
}
|
||||
|
||||
template <typename ...Ts>
|
||||
HB_ALWAYS_INLINE
|
||||
bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -878,7 +878,7 @@ struct HeadlessArrayOf
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (lenP1.sanitize (c) &&
|
||||
(!lenP1 || c->check_array (arrayZ, lenP1 - 1)));
|
||||
(!lenP1 || c->check_array_sized (arrayZ, lenP1 - 1, sizeof (LenType))));
|
||||
}
|
||||
|
||||
public:
|
||||
@ -887,6 +887,7 @@ struct HeadlessArrayOf
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ);
|
||||
};
|
||||
template <typename Type> using HeadlessArray16Of = HeadlessArrayOf<Type, HBUINT16>;
|
||||
|
||||
/* An array storing length-1. */
|
||||
template <typename Type, typename LenType=HBUINT16>
|
||||
@ -912,6 +913,7 @@ struct ArrayOfM1
|
||||
{ return lenM1.static_size + (lenM1 + 1) * Type::static_size; }
|
||||
|
||||
template <typename ...Ts>
|
||||
HB_ALWAYS_INLINE
|
||||
bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -929,7 +931,7 @@ struct ArrayOfM1
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (lenM1.sanitize (c) &&
|
||||
(c->check_array (arrayZ, lenM1 + 1)));
|
||||
(c->check_array_sized (arrayZ, lenM1 + 1, sizeof (LenType))));
|
||||
}
|
||||
|
||||
public:
|
||||
@ -1096,6 +1098,7 @@ struct VarSizedBinSearchArrayOf
|
||||
{ return header.static_size + header.nUnits * header.unitSize; }
|
||||
|
||||
template <typename ...Ts>
|
||||
HB_ALWAYS_INLINE
|
||||
bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
269
src/3rdparty/harfbuzz-ng/src/hb-ot-cff-common.hh
vendored
269
src/3rdparty/harfbuzz-ng/src/hb-ot-cff-common.hh
vendored
@ -48,12 +48,24 @@ static inline const Type& StructAtOffsetOrNull (const void *P, unsigned int offs
|
||||
|
||||
struct code_pair_t
|
||||
{
|
||||
hb_codepoint_t code;
|
||||
unsigned code;
|
||||
hb_codepoint_t glyph;
|
||||
};
|
||||
|
||||
|
||||
using str_buff_t = hb_vector_t<unsigned char>;
|
||||
using str_buff_vec_t = hb_vector_t<str_buff_t>;
|
||||
using glyph_to_sid_map_t = hb_vector_t<code_pair_t>;
|
||||
|
||||
struct length_f_t
|
||||
{
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
unsigned operator () (const Iterable &_) const { return hb_len (hb_iter (_)); }
|
||||
|
||||
unsigned operator () (unsigned _) const { return _; }
|
||||
}
|
||||
HB_FUNCOBJ (length_f);
|
||||
|
||||
/* CFF INDEX */
|
||||
template <typename COUNT>
|
||||
@ -62,42 +74,52 @@ struct CFFIndex
|
||||
unsigned int offset_array_size () const
|
||||
{ return offSize * (count + 1); }
|
||||
|
||||
CFFIndex *copy (hb_serialize_context_t *c) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
unsigned int size = get_size ();
|
||||
CFFIndex *out = c->allocate_size<CFFIndex> (size, false);
|
||||
if (likely (out))
|
||||
hb_memcpy (out, this, size);
|
||||
return_trace (out);
|
||||
}
|
||||
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
const Iterable &iterable)
|
||||
const Iterable &iterable,
|
||||
const unsigned *p_data_size = nullptr)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
unsigned data_size;
|
||||
if (p_data_size)
|
||||
data_size = *p_data_size;
|
||||
else
|
||||
total_size (iterable, &data_size);
|
||||
|
||||
auto it = hb_iter (iterable);
|
||||
serialize_header(c, + it | hb_map (hb_iter) | hb_map (hb_len));
|
||||
if (unlikely (!serialize_header (c, +it, data_size))) return_trace (false);
|
||||
unsigned char *ret = c->allocate_size<unsigned char> (data_size, false);
|
||||
if (unlikely (!ret)) return_trace (false);
|
||||
for (const auto &_ : +it)
|
||||
hb_iter (_).copy (c);
|
||||
{
|
||||
unsigned len = _.length;
|
||||
if (!len)
|
||||
continue;
|
||||
if (len <= 1)
|
||||
{
|
||||
*ret++ = *_.arrayZ;
|
||||
continue;
|
||||
}
|
||||
hb_memcpy (ret, _.arrayZ, len);
|
||||
ret += len;
|
||||
}
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
template <typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize_header (hb_serialize_context_t *c,
|
||||
Iterator it)
|
||||
Iterator it,
|
||||
unsigned data_size)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
unsigned total = + it | hb_reduce (hb_add, 0);
|
||||
unsigned off_size = (hb_bit_storage (total + 1) + 7) / 8;
|
||||
unsigned off_size = (hb_bit_storage (data_size + 1) + 7) / 8;
|
||||
|
||||
/* serialize CFFIndex header */
|
||||
if (unlikely (!c->extend_min (this))) return_trace (false);
|
||||
this->count = it.len ();
|
||||
this->count = hb_len (it);
|
||||
if (!this->count) return_trace (true);
|
||||
if (unlikely (!c->extend (this->offSize))) return_trace (false);
|
||||
this->offSize = off_size;
|
||||
@ -106,25 +128,88 @@ struct CFFIndex
|
||||
|
||||
/* serialize indices */
|
||||
unsigned int offset = 1;
|
||||
unsigned int i = 0;
|
||||
for (unsigned _ : +it)
|
||||
if (HB_OPTIMIZE_SIZE_VAL)
|
||||
{
|
||||
set_offset_at (i++, offset);
|
||||
offset += _;
|
||||
unsigned int i = 0;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
set_offset_at (i++, offset);
|
||||
offset += length_f (_);
|
||||
}
|
||||
set_offset_at (i, offset);
|
||||
}
|
||||
set_offset_at (i, offset);
|
||||
else
|
||||
switch (off_size)
|
||||
{
|
||||
case 1:
|
||||
{
|
||||
HBUINT8 *p = (HBUINT8 *) offsets;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
*p++ = offset;
|
||||
offset += length_f (_);
|
||||
}
|
||||
*p = offset;
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
{
|
||||
HBUINT16 *p = (HBUINT16 *) offsets;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
*p++ = offset;
|
||||
offset += length_f (_);
|
||||
}
|
||||
*p = offset;
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
{
|
||||
HBUINT24 *p = (HBUINT24 *) offsets;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
*p++ = offset;
|
||||
offset += length_f (_);
|
||||
}
|
||||
*p = offset;
|
||||
}
|
||||
break;
|
||||
case 4:
|
||||
{
|
||||
HBUINT32 *p = (HBUINT32 *) offsets;
|
||||
for (const auto &_ : +it)
|
||||
{
|
||||
*p++ = offset;
|
||||
offset += length_f (_);
|
||||
}
|
||||
*p = offset;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
assert (offset == data_size + 1);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
static unsigned total_size (const Iterable &iterable)
|
||||
static unsigned total_size (const Iterable &iterable, unsigned *data_size = nullptr)
|
||||
{
|
||||
auto it = + hb_iter (iterable) | hb_map (hb_iter) | hb_map (hb_len);
|
||||
if (!it) return 0;
|
||||
auto it = + hb_iter (iterable);
|
||||
if (!it)
|
||||
{
|
||||
if (data_size) *data_size = 0;
|
||||
return min_size;
|
||||
}
|
||||
|
||||
unsigned total = 0;
|
||||
for (const auto &_ : +it)
|
||||
total += length_f (_);
|
||||
|
||||
if (data_size) *data_size = total;
|
||||
|
||||
unsigned total = + it | hb_reduce (hb_add, 0);
|
||||
unsigned off_size = (hb_bit_storage (total + 1) + 7) / 8;
|
||||
|
||||
return min_size + HBUINT8::static_size + (hb_len (it) + 1) * off_size + total;
|
||||
@ -133,13 +218,16 @@ struct CFFIndex
|
||||
void set_offset_at (unsigned int index, unsigned int offset)
|
||||
{
|
||||
assert (index <= count);
|
||||
HBUINT8 *p = offsets + offSize * index + offSize;
|
||||
|
||||
unsigned int size = offSize;
|
||||
for (; size; size--)
|
||||
const HBUINT8 *p = offsets;
|
||||
switch (size)
|
||||
{
|
||||
--p;
|
||||
*p = offset & 0xFF;
|
||||
offset >>= 8;
|
||||
case 1: ((HBUINT8 *) p)[index] = offset; break;
|
||||
case 2: ((HBUINT16 *) p)[index] = offset; break;
|
||||
case 3: ((HBUINT24 *) p)[index] = offset; break;
|
||||
case 4: ((HBUINT32 *) p)[index] = offset; break;
|
||||
default: return;
|
||||
}
|
||||
}
|
||||
|
||||
@ -149,37 +237,30 @@ struct CFFIndex
|
||||
assert (index <= count);
|
||||
|
||||
unsigned int size = offSize;
|
||||
const HBUINT8 *p = offsets + size * index;
|
||||
const HBUINT8 *p = offsets;
|
||||
switch (size)
|
||||
{
|
||||
case 1: return * (HBUINT8 *) p;
|
||||
case 2: return * (HBUINT16 *) p;
|
||||
case 3: return * (HBUINT24 *) p;
|
||||
case 4: return * (HBUINT32 *) p;
|
||||
case 1: return ((HBUINT8 *) p)[index];
|
||||
case 2: return ((HBUINT16 *) p)[index];
|
||||
case 3: return ((HBUINT24 *) p)[index];
|
||||
case 4: return ((HBUINT32 *) p)[index];
|
||||
default: return 0;
|
||||
}
|
||||
}
|
||||
|
||||
unsigned int length_at (unsigned int index) const
|
||||
{
|
||||
unsigned offset0 = offset_at (index);
|
||||
unsigned offset1 = offset_at (index + 1);
|
||||
if (unlikely (offset1 < offset0 || offset1 > offset_at (count)))
|
||||
return 0;
|
||||
return offset1 - offset0;
|
||||
}
|
||||
|
||||
const unsigned char *data_base () const
|
||||
{ return (const unsigned char *) this + min_size + offSize.static_size + offset_array_size (); }
|
||||
{ return (const unsigned char *) this + min_size + offSize.static_size - 1 + offset_array_size (); }
|
||||
public:
|
||||
|
||||
hb_ubytes_t operator [] (unsigned int index) const
|
||||
{
|
||||
if (unlikely (index >= count)) return hb_ubytes_t ();
|
||||
_hb_compiler_memory_r_barrier ();
|
||||
unsigned length = length_at (index);
|
||||
if (unlikely (!length)) return hb_ubytes_t ();
|
||||
return hb_ubytes_t (data_base () + offset_at (index) - 1, length);
|
||||
unsigned offset0 = offset_at (index);
|
||||
unsigned offset1 = offset_at (index + 1);
|
||||
if (unlikely (offset1 < offset0 || offset1 > offset_at (count)))
|
||||
return hb_ubytes_t ();
|
||||
return hb_ubytes_t (data_base () + offset0, offset1 - offset0);
|
||||
}
|
||||
|
||||
unsigned int get_size () const
|
||||
@ -197,7 +278,7 @@ struct CFFIndex
|
||||
(count < count + 1u &&
|
||||
c->check_struct (&offSize) && offSize >= 1 && offSize <= 4 &&
|
||||
c->check_array (offsets, offSize, count + 1u) &&
|
||||
c->check_array ((const HBUINT8*) data_base (), 1, offset_at (count) - 1)))));
|
||||
c->check_array ((const HBUINT8*) data_base (), 1, offset_at (count))))));
|
||||
}
|
||||
|
||||
public:
|
||||
@ -211,47 +292,6 @@ struct CFFIndex
|
||||
DEFINE_SIZE_MIN (COUNT::static_size);
|
||||
};
|
||||
|
||||
template <typename COUNT, typename TYPE>
|
||||
struct CFFIndexOf : CFFIndex<COUNT>
|
||||
{
|
||||
template <typename DATA, typename PARAM1, typename PARAM2>
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
unsigned int offSize_,
|
||||
const DATA *dataArray,
|
||||
unsigned int dataArrayLen,
|
||||
const hb_vector_t<unsigned int> &dataSizeArray,
|
||||
const PARAM1 ¶m1,
|
||||
const PARAM2 ¶m2)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
/* serialize CFFIndex header */
|
||||
if (unlikely (!c->extend_min (this))) return_trace (false);
|
||||
this->count = dataArrayLen;
|
||||
this->offSize = offSize_;
|
||||
if (unlikely (!c->allocate_size<HBUINT8> (offSize_ * (dataArrayLen + 1), false)))
|
||||
return_trace (false);
|
||||
|
||||
/* serialize indices */
|
||||
unsigned int offset = 1;
|
||||
unsigned int i = 0;
|
||||
for (; i < dataArrayLen; i++)
|
||||
{
|
||||
this->set_offset_at (i, offset);
|
||||
offset += dataSizeArray[i];
|
||||
}
|
||||
this->set_offset_at (i, offset);
|
||||
|
||||
/* serialize data */
|
||||
for (unsigned int i = 0; i < dataArrayLen; i++)
|
||||
{
|
||||
TYPE *dest = c->start_embed<TYPE> ();
|
||||
if (unlikely (!dest || !dest->serialize (c, dataArray[i], param1, param2)))
|
||||
return_trace (false);
|
||||
}
|
||||
return_trace (true);
|
||||
}
|
||||
};
|
||||
|
||||
/* Top Dict, Font Dict, Private Dict */
|
||||
struct Dict : UnsizedByteStr
|
||||
{
|
||||
@ -327,7 +367,7 @@ struct table_info_t
|
||||
};
|
||||
|
||||
template <typename COUNT>
|
||||
struct FDArray : CFFIndexOf<COUNT, FontDict>
|
||||
struct FDArray : CFFIndex<COUNT>
|
||||
{
|
||||
template <typename DICTVAL, typename INFO, typename Iterator, typename OP_SERIALIZER>
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
@ -338,7 +378,11 @@ struct FDArray : CFFIndexOf<COUNT, FontDict>
|
||||
|
||||
/* serialize INDEX data */
|
||||
hb_vector_t<unsigned> sizes;
|
||||
if (it.is_random_access_iterator)
|
||||
sizes.alloc (hb_len (it));
|
||||
|
||||
c->push ();
|
||||
char *data_base = c->head;
|
||||
+ it
|
||||
| hb_map ([&] (const hb_pair_t<const DICTVAL&, const INFO&> &_)
|
||||
{
|
||||
@ -348,10 +392,16 @@ struct FDArray : CFFIndexOf<COUNT, FontDict>
|
||||
})
|
||||
| hb_sink (sizes)
|
||||
;
|
||||
unsigned data_size = c->head - data_base;
|
||||
c->pop_pack (false);
|
||||
|
||||
if (unlikely (sizes.in_error ())) return_trace (false);
|
||||
|
||||
/* It just happens that the above is packed right after the header below.
|
||||
* Such a hack. */
|
||||
|
||||
/* serialize INDEX header */
|
||||
return_trace (CFFIndex<COUNT>::serialize_header (c, hb_iter (sizes)));
|
||||
return_trace (CFFIndex<COUNT>::serialize_header (c, hb_iter (sizes), data_size));
|
||||
}
|
||||
};
|
||||
|
||||
@ -368,8 +418,11 @@ struct FDSelect0 {
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
hb_codepoint_t get_fd (hb_codepoint_t glyph) const
|
||||
{ return (hb_codepoint_t) fds[glyph]; }
|
||||
unsigned get_fd (hb_codepoint_t glyph) const
|
||||
{ return fds[glyph]; }
|
||||
|
||||
hb_pair_t<unsigned, hb_codepoint_t> get_fd_range (hb_codepoint_t glyph) const
|
||||
{ return {fds[glyph], glyph + 1}; }
|
||||
|
||||
unsigned int get_size (unsigned int num_glyphs) const
|
||||
{ return HBUINT8::static_size * num_glyphs; }
|
||||
@ -427,12 +480,20 @@ struct FDSelect3_4
|
||||
return +1;
|
||||
}
|
||||
|
||||
hb_codepoint_t get_fd (hb_codepoint_t glyph) const
|
||||
unsigned get_fd (hb_codepoint_t glyph) const
|
||||
{
|
||||
auto *range = hb_bsearch (glyph, &ranges[0], nRanges () - 1, sizeof (ranges[0]), _cmp_range);
|
||||
return range ? range->fd : ranges[nRanges () - 1].fd;
|
||||
}
|
||||
|
||||
hb_pair_t<unsigned, hb_codepoint_t> get_fd_range (hb_codepoint_t glyph) const
|
||||
{
|
||||
auto *range = hb_bsearch (glyph, &ranges[0], nRanges () - 1, sizeof (ranges[0]), _cmp_range);
|
||||
unsigned fd = range ? range->fd : ranges[nRanges () - 1].fd;
|
||||
hb_codepoint_t end = range ? range[1].first : ranges[nRanges () - 1].first;
|
||||
return {fd, end};
|
||||
}
|
||||
|
||||
GID_TYPE &nRanges () { return ranges.len; }
|
||||
GID_TYPE nRanges () const { return ranges.len; }
|
||||
GID_TYPE &sentinel () { return StructAfter<GID_TYPE> (ranges[nRanges () - 1]); }
|
||||
@ -469,7 +530,7 @@ struct FDSelect
|
||||
}
|
||||
}
|
||||
|
||||
hb_codepoint_t get_fd (hb_codepoint_t glyph) const
|
||||
unsigned get_fd (hb_codepoint_t glyph) const
|
||||
{
|
||||
if (this == &Null (FDSelect)) return 0;
|
||||
|
||||
@ -480,6 +541,18 @@ struct FDSelect
|
||||
default:return 0;
|
||||
}
|
||||
}
|
||||
/* Returns pair of fd and one after last glyph in range. */
|
||||
hb_pair_t<unsigned, hb_codepoint_t> get_fd_range (hb_codepoint_t glyph) const
|
||||
{
|
||||
if (this == &Null (FDSelect)) return {0, 1};
|
||||
|
||||
switch (format)
|
||||
{
|
||||
case 0: return u.format0.get_fd_range (glyph);
|
||||
case 3: return u.format3.get_fd_range (glyph);
|
||||
default:return {0, 1};
|
||||
}
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int fdcount) const
|
||||
{
|
||||
|
@ -574,11 +574,11 @@ bool OT::cff1::accelerator_t::get_path (hb_font_t *font, hb_codepoint_t glyph, h
|
||||
|
||||
struct get_seac_param_t
|
||||
{
|
||||
get_seac_param_t (const OT::cff1::accelerator_t *_cff) : cff (_cff) {}
|
||||
get_seac_param_t (const OT::cff1::accelerator_subset_t *_cff) : cff (_cff) {}
|
||||
|
||||
bool has_seac () const { return base && accent; }
|
||||
|
||||
const OT::cff1::accelerator_t *cff;
|
||||
const OT::cff1::accelerator_subset_t *cff;
|
||||
hb_codepoint_t base = 0;
|
||||
hb_codepoint_t accent = 0;
|
||||
};
|
||||
@ -596,7 +596,7 @@ struct cff1_cs_opset_seac_t : cff1_cs_opset_t<cff1_cs_opset_seac_t, get_seac_par
|
||||
}
|
||||
};
|
||||
|
||||
bool OT::cff1::accelerator_t::get_seac_components (hb_codepoint_t glyph, hb_codepoint_t *base, hb_codepoint_t *accent) const
|
||||
bool OT::cff1::accelerator_subset_t::get_seac_components (hb_codepoint_t glyph, hb_codepoint_t *base, hb_codepoint_t *accent) const
|
||||
{
|
||||
if (unlikely (!is_valid () || (glyph >= num_glyphs))) return false;
|
||||
|
||||
|
288
src/3rdparty/harfbuzz-ng/src/hb-ot-cff1-table.hh
vendored
288
src/3rdparty/harfbuzz-ng/src/hb-ot-cff1-table.hh
vendored
@ -28,7 +28,7 @@
|
||||
#define HB_OT_CFF1_TABLE_HH
|
||||
|
||||
#include "hb-ot-cff-common.hh"
|
||||
#include "hb-subset-cff1.hh"
|
||||
#include "hb-subset-cff-common.hh"
|
||||
#include "hb-draw.hh"
|
||||
#include "hb-paint.hh"
|
||||
|
||||
@ -52,7 +52,6 @@ enum EncodingID { StandardEncoding = 0, ExpertEncoding = 1 };
|
||||
enum CharsetID { ISOAdobeCharset = 0, ExpertCharset = 1, ExpertSubsetCharset = 2 };
|
||||
|
||||
typedef CFFIndex<HBUINT16> CFF1Index;
|
||||
template <typename Type> struct CFF1IndexOf : CFFIndexOf<HBUINT16, Type> {};
|
||||
|
||||
typedef CFFIndex<HBUINT16> CFF1Index;
|
||||
typedef CFF1Index CFF1CharStrings;
|
||||
@ -110,6 +109,7 @@ struct Encoding1 {
|
||||
|
||||
hb_codepoint_t get_code (hb_codepoint_t glyph) const
|
||||
{
|
||||
/* TODO: Add cache like get_sid. */
|
||||
assert (glyph > 0);
|
||||
glyph--;
|
||||
for (unsigned int i = 0; i < nRanges (); i++)
|
||||
@ -173,11 +173,7 @@ struct Encoding
|
||||
bool serialize (hb_serialize_context_t *c, const Encoding &src)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
unsigned int size = src.get_size ();
|
||||
Encoding *dest = c->allocate_size<Encoding> (size);
|
||||
if (unlikely (!dest)) return_trace (false);
|
||||
hb_memcpy (dest, &src, size);
|
||||
return_trace (true);
|
||||
return_trace (c->embed (src));
|
||||
}
|
||||
|
||||
/* serialize a subset Encoding */
|
||||
@ -312,26 +308,29 @@ struct Encoding
|
||||
};
|
||||
|
||||
/* Charset */
|
||||
struct Charset0 {
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int num_glyphs) const
|
||||
struct Charset0
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int num_glyphs, unsigned *num_charset_entries) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) && sids[num_glyphs - 1].sanitize (c));
|
||||
if (num_charset_entries) *num_charset_entries = num_glyphs;
|
||||
return_trace (sids.sanitize (c, num_glyphs - 1));
|
||||
}
|
||||
|
||||
hb_codepoint_t get_sid (hb_codepoint_t glyph, unsigned num_glyphs) const
|
||||
{
|
||||
if (unlikely (glyph >= num_glyphs)) return 0;
|
||||
if (glyph == 0)
|
||||
if (unlikely (glyph == 0))
|
||||
return 0;
|
||||
else
|
||||
return sids[glyph - 1];
|
||||
}
|
||||
|
||||
void collect_glyph_to_sid_map (hb_map_t *mapping, unsigned int num_glyphs) const
|
||||
void collect_glyph_to_sid_map (glyph_to_sid_map_t *mapping, unsigned int num_glyphs) const
|
||||
{
|
||||
mapping->resize (num_glyphs, false);
|
||||
for (hb_codepoint_t gid = 1; gid < num_glyphs; gid++)
|
||||
mapping->set (gid, sids[gid - 1]);
|
||||
mapping->arrayZ[gid] = {sids[gid - 1], gid};
|
||||
}
|
||||
|
||||
hb_codepoint_t get_glyph (hb_codepoint_t sid, unsigned int num_glyphs) const
|
||||
@ -347,13 +346,13 @@ struct Charset0 {
|
||||
return 0;
|
||||
}
|
||||
|
||||
unsigned int get_size (unsigned int num_glyphs) const
|
||||
static unsigned int get_size (unsigned int num_glyphs)
|
||||
{
|
||||
assert (num_glyphs > 0);
|
||||
return HBUINT16::static_size * (num_glyphs - 1);
|
||||
return UnsizedArrayOf<HBUINT16>::get_size (num_glyphs - 1);
|
||||
}
|
||||
|
||||
HBUINT16 sids[HB_VAR_ARRAY];
|
||||
UnsizedArrayOf<HBUINT16> sids;
|
||||
|
||||
DEFINE_SIZE_ARRAY(0, sids);
|
||||
};
|
||||
@ -374,38 +373,62 @@ struct Charset_Range {
|
||||
|
||||
template <typename TYPE>
|
||||
struct Charset1_2 {
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int num_glyphs) const
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int num_glyphs, unsigned *num_charset_entries) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!c->check_struct (this)))
|
||||
return_trace (false);
|
||||
num_glyphs--;
|
||||
for (unsigned int i = 0; num_glyphs > 0; i++)
|
||||
unsigned i;
|
||||
for (i = 0; num_glyphs > 0; i++)
|
||||
{
|
||||
if (unlikely (!ranges[i].sanitize (c) || (num_glyphs < ranges[i].nLeft + 1)))
|
||||
return_trace (false);
|
||||
num_glyphs -= (ranges[i].nLeft + 1);
|
||||
}
|
||||
if (num_charset_entries)
|
||||
*num_charset_entries = i;
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
hb_codepoint_t get_sid (hb_codepoint_t glyph, unsigned num_glyphs) const
|
||||
hb_codepoint_t get_sid (hb_codepoint_t glyph, unsigned num_glyphs,
|
||||
code_pair_t *cache = nullptr) const
|
||||
{
|
||||
if (unlikely (glyph >= num_glyphs)) return 0;
|
||||
if (glyph == 0) return 0;
|
||||
glyph--;
|
||||
for (unsigned int i = 0;; i++)
|
||||
unsigned i;
|
||||
hb_codepoint_t start_glyph;
|
||||
if (cache && likely (cache->glyph <= glyph))
|
||||
{
|
||||
if (glyph <= ranges[i].nLeft)
|
||||
return (hb_codepoint_t) ranges[i].first + glyph;
|
||||
glyph -= (ranges[i].nLeft + 1);
|
||||
i = cache->code;
|
||||
start_glyph = cache->glyph;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (unlikely (glyph == 0)) return 0;
|
||||
i = 0;
|
||||
start_glyph = 1;
|
||||
}
|
||||
glyph -= start_glyph;
|
||||
for (;; i++)
|
||||
{
|
||||
unsigned count = ranges[i].nLeft;
|
||||
if (glyph <= count)
|
||||
{
|
||||
if (cache)
|
||||
*cache = {i, start_glyph};
|
||||
return ranges[i].first + glyph;
|
||||
}
|
||||
count++;
|
||||
start_glyph += count;
|
||||
glyph -= count;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
void collect_glyph_to_sid_map (hb_map_t *mapping, unsigned int num_glyphs) const
|
||||
void collect_glyph_to_sid_map (glyph_to_sid_map_t *mapping, unsigned int num_glyphs) const
|
||||
{
|
||||
mapping->resize (num_glyphs, false);
|
||||
hb_codepoint_t gid = 1;
|
||||
if (gid >= num_glyphs)
|
||||
return;
|
||||
@ -413,8 +436,9 @@ struct Charset1_2 {
|
||||
{
|
||||
hb_codepoint_t sid = ranges[i].first;
|
||||
unsigned count = ranges[i].nLeft + 1;
|
||||
unsigned last = gid + count;
|
||||
for (unsigned j = 0; j < count; j++)
|
||||
mapping->set (gid++, sid++);
|
||||
mapping->arrayZ[gid++] = {sid++, last - 1};
|
||||
|
||||
if (gid >= num_glyphs)
|
||||
break;
|
||||
@ -439,21 +463,26 @@ struct Charset1_2 {
|
||||
|
||||
unsigned int get_size (unsigned int num_glyphs) const
|
||||
{
|
||||
unsigned int size = HBUINT8::static_size;
|
||||
int glyph = (int)num_glyphs;
|
||||
int glyph = (int) num_glyphs;
|
||||
unsigned num_ranges = 0;
|
||||
|
||||
assert (glyph > 0);
|
||||
glyph--;
|
||||
for (unsigned int i = 0; glyph > 0; i++)
|
||||
{
|
||||
glyph -= (ranges[i].nLeft + 1);
|
||||
size += Charset_Range<TYPE>::static_size;
|
||||
num_ranges++;
|
||||
}
|
||||
|
||||
return size;
|
||||
return get_size_for_ranges (num_ranges);
|
||||
}
|
||||
|
||||
Charset_Range<TYPE> ranges[HB_VAR_ARRAY];
|
||||
static unsigned int get_size_for_ranges (unsigned int num_ranges)
|
||||
{
|
||||
return UnsizedArrayOf<Charset_Range<TYPE> >::get_size (num_ranges);
|
||||
}
|
||||
|
||||
UnsizedArrayOf<Charset_Range<TYPE>> ranges;
|
||||
|
||||
DEFINE_SIZE_ARRAY (0, ranges);
|
||||
};
|
||||
@ -469,11 +498,7 @@ struct Charset
|
||||
bool serialize (hb_serialize_context_t *c, const Charset &src, unsigned int num_glyphs)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
unsigned int size = src.get_size (num_glyphs);
|
||||
Charset *dest = c->allocate_size<Charset> (size);
|
||||
if (unlikely (!dest)) return_trace (false);
|
||||
hb_memcpy (dest, &src, size);
|
||||
return_trace (true);
|
||||
return_trace (c->embed ((const char *) &src, src.get_size (num_glyphs)));
|
||||
}
|
||||
|
||||
/* serialize a subset Charset */
|
||||
@ -490,13 +515,13 @@ struct Charset
|
||||
{
|
||||
case 0:
|
||||
{
|
||||
Charset0 *fmt0 = c->allocate_size<Charset0> (Charset0::min_size + HBUINT16::static_size * (num_glyphs - 1));
|
||||
Charset0 *fmt0 = c->allocate_size<Charset0> (Charset0::get_size (num_glyphs), false);
|
||||
if (unlikely (!fmt0)) return_trace (false);
|
||||
unsigned int glyph = 0;
|
||||
for (unsigned int i = 0; i < sid_ranges.length; i++)
|
||||
{
|
||||
hb_codepoint_t sid = sid_ranges[i].code;
|
||||
for (int left = (int)sid_ranges[i].glyph; left >= 0; left--)
|
||||
hb_codepoint_t sid = sid_ranges.arrayZ[i].code;
|
||||
for (int left = (int)sid_ranges.arrayZ[i].glyph; left >= 0; left--)
|
||||
fmt0->sids[glyph++] = sid++;
|
||||
}
|
||||
}
|
||||
@ -504,29 +529,35 @@ struct Charset
|
||||
|
||||
case 1:
|
||||
{
|
||||
Charset1 *fmt1 = c->allocate_size<Charset1> (Charset1::min_size + Charset1_Range::static_size * sid_ranges.length);
|
||||
Charset1 *fmt1 = c->allocate_size<Charset1> (Charset1::get_size_for_ranges (sid_ranges.length), false);
|
||||
if (unlikely (!fmt1)) return_trace (false);
|
||||
hb_codepoint_t all_glyphs = 0;
|
||||
for (unsigned int i = 0; i < sid_ranges.length; i++)
|
||||
{
|
||||
if (unlikely (!(sid_ranges[i].glyph <= 0xFF)))
|
||||
return_trace (false);
|
||||
fmt1->ranges[i].first = sid_ranges[i].code;
|
||||
fmt1->ranges[i].nLeft = sid_ranges[i].glyph;
|
||||
auto &_ = sid_ranges.arrayZ[i];
|
||||
all_glyphs |= _.glyph;
|
||||
fmt1->ranges[i].first = _.code;
|
||||
fmt1->ranges[i].nLeft = _.glyph;
|
||||
}
|
||||
if (unlikely (!(all_glyphs <= 0xFF)))
|
||||
return_trace (false);
|
||||
}
|
||||
break;
|
||||
|
||||
case 2:
|
||||
{
|
||||
Charset2 *fmt2 = c->allocate_size<Charset2> (Charset2::min_size + Charset2_Range::static_size * sid_ranges.length);
|
||||
Charset2 *fmt2 = c->allocate_size<Charset2> (Charset2::get_size_for_ranges (sid_ranges.length), false);
|
||||
if (unlikely (!fmt2)) return_trace (false);
|
||||
hb_codepoint_t all_glyphs = 0;
|
||||
for (unsigned int i = 0; i < sid_ranges.length; i++)
|
||||
{
|
||||
if (unlikely (!(sid_ranges[i].glyph <= 0xFFFF)))
|
||||
return_trace (false);
|
||||
fmt2->ranges[i].first = sid_ranges[i].code;
|
||||
fmt2->ranges[i].nLeft = sid_ranges[i].glyph;
|
||||
auto &_ = sid_ranges.arrayZ[i];
|
||||
all_glyphs |= _.glyph;
|
||||
fmt2->ranges[i].first = _.code;
|
||||
fmt2->ranges[i].nLeft = _.glyph;
|
||||
}
|
||||
if (unlikely (!(all_glyphs <= 0xFFFF)))
|
||||
return_trace (false);
|
||||
}
|
||||
break;
|
||||
|
||||
@ -545,18 +576,19 @@ struct Charset
|
||||
}
|
||||
}
|
||||
|
||||
hb_codepoint_t get_sid (hb_codepoint_t glyph, unsigned int num_glyphs) const
|
||||
hb_codepoint_t get_sid (hb_codepoint_t glyph, unsigned int num_glyphs,
|
||||
code_pair_t *cache = nullptr) const
|
||||
{
|
||||
switch (format)
|
||||
{
|
||||
case 0: return u.format0.get_sid (glyph, num_glyphs);
|
||||
case 1: return u.format1.get_sid (glyph, num_glyphs);
|
||||
case 2: return u.format2.get_sid (glyph, num_glyphs);
|
||||
case 1: return u.format1.get_sid (glyph, num_glyphs, cache);
|
||||
case 2: return u.format2.get_sid (glyph, num_glyphs, cache);
|
||||
default:return 0;
|
||||
}
|
||||
}
|
||||
|
||||
void collect_glyph_to_sid_map (hb_map_t *mapping, unsigned int num_glyphs) const
|
||||
void collect_glyph_to_sid_map (glyph_to_sid_map_t *mapping, unsigned int num_glyphs) const
|
||||
{
|
||||
switch (format)
|
||||
{
|
||||
@ -578,7 +610,7 @@ struct Charset
|
||||
}
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned *num_charset_entries) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!c->check_struct (this)))
|
||||
@ -586,9 +618,9 @@ struct Charset
|
||||
|
||||
switch (format)
|
||||
{
|
||||
case 0: return_trace (u.format0.sanitize (c, c->get_num_glyphs ()));
|
||||
case 1: return_trace (u.format1.sanitize (c, c->get_num_glyphs ()));
|
||||
case 2: return_trace (u.format2.sanitize (c, c->get_num_glyphs ()));
|
||||
case 0: return_trace (u.format0.sanitize (c, c->get_num_glyphs (), num_charset_entries));
|
||||
case 1: return_trace (u.format1.sanitize (c, c->get_num_glyphs (), num_charset_entries));
|
||||
case 2: return_trace (u.format2.sanitize (c, c->get_num_glyphs (), num_charset_entries));
|
||||
default:return_trace (false);
|
||||
}
|
||||
}
|
||||
@ -606,10 +638,10 @@ struct Charset
|
||||
struct CFF1StringIndex : CFF1Index
|
||||
{
|
||||
bool serialize (hb_serialize_context_t *c, const CFF1StringIndex &strings,
|
||||
const hb_inc_bimap_t &sidmap)
|
||||
const hb_vector_t<unsigned> &sidmap)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely ((strings.count == 0) || (sidmap.get_population () == 0)))
|
||||
if (unlikely ((strings.count == 0) || (sidmap.length == 0)))
|
||||
{
|
||||
if (unlikely (!c->extend_min (this->count)))
|
||||
return_trace (false);
|
||||
@ -617,15 +649,13 @@ struct CFF1StringIndex : CFF1Index
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
byte_str_array_t bytesArray;
|
||||
if (!bytesArray.resize (sidmap.get_population ()))
|
||||
return_trace (false);
|
||||
for (unsigned int i = 0; i < strings.count; i++)
|
||||
{
|
||||
hb_codepoint_t j = sidmap[i];
|
||||
if (j != HB_MAP_VALUE_INVALID)
|
||||
bytesArray[j] = strings[i];
|
||||
}
|
||||
if (unlikely (sidmap.in_error ())) return_trace (false);
|
||||
|
||||
// Save this in a vector since serialize() iterates it twice.
|
||||
hb_vector_t<hb_ubytes_t> bytesArray (+ hb_iter (sidmap)
|
||||
| hb_map (strings));
|
||||
|
||||
if (unlikely (bytesArray.in_error ())) return_trace (false);
|
||||
|
||||
bool result = CFF1Index::serialize (c, bytesArray);
|
||||
return_trace (result);
|
||||
@ -932,7 +962,7 @@ struct cff1_private_dict_opset_t : dict_opset_t
|
||||
}
|
||||
};
|
||||
|
||||
struct cff1_private_dict_opset_subset : dict_opset_t
|
||||
struct cff1_private_dict_opset_subset_t : dict_opset_t
|
||||
{
|
||||
static void process_op (op_code_t op, num_interp_env_t& env, cff1_private_dict_values_subset_t& dictval)
|
||||
{
|
||||
@ -978,7 +1008,7 @@ typedef dict_interpreter_t<cff1_top_dict_opset_t, cff1_top_dict_values_t, cff1_t
|
||||
typedef dict_interpreter_t<cff1_font_dict_opset_t, cff1_font_dict_values_t> cff1_font_dict_interpreter_t;
|
||||
|
||||
typedef CFF1Index CFF1NameIndex;
|
||||
typedef CFF1IndexOf<TopDict> CFF1TopDictIndex;
|
||||
typedef CFF1Index CFF1TopDictIndex;
|
||||
|
||||
struct cff1_font_dict_values_mod_t
|
||||
{
|
||||
@ -1031,8 +1061,12 @@ struct cff1
|
||||
template <typename PRIVOPSET, typename PRIVDICTVAL>
|
||||
struct accelerator_templ_t
|
||||
{
|
||||
void init (hb_face_t *face)
|
||||
static constexpr hb_tag_t tableTag = cff1::tableTag;
|
||||
|
||||
accelerator_templ_t (hb_face_t *face)
|
||||
{
|
||||
if (!face) return;
|
||||
|
||||
topDict.init ();
|
||||
fontDicts.init ();
|
||||
privateDicts.init ();
|
||||
@ -1046,22 +1080,22 @@ struct cff1
|
||||
const OT::cff1 *cff = this->blob->template as<OT::cff1> ();
|
||||
|
||||
if (cff == &Null (OT::cff1))
|
||||
{ fini (); return; }
|
||||
goto fail;
|
||||
|
||||
nameIndex = &cff->nameIndex (cff);
|
||||
if ((nameIndex == &Null (CFF1NameIndex)) || !nameIndex->sanitize (&sc))
|
||||
{ fini (); return; }
|
||||
goto fail;
|
||||
|
||||
topDictIndex = &StructAtOffset<CFF1TopDictIndex> (nameIndex, nameIndex->get_size ());
|
||||
if ((topDictIndex == &Null (CFF1TopDictIndex)) || !topDictIndex->sanitize (&sc) || (topDictIndex->count == 0))
|
||||
{ fini (); return; }
|
||||
goto fail;
|
||||
|
||||
{ /* parse top dict */
|
||||
const hb_ubytes_t topDictStr = (*topDictIndex)[0];
|
||||
if (unlikely (!topDictStr.sanitize (&sc))) { fini (); return; }
|
||||
if (unlikely (!topDictStr.sanitize (&sc))) goto fail;
|
||||
cff1_top_dict_interp_env_t env (topDictStr);
|
||||
cff1_top_dict_interpreter_t top_interp (env);
|
||||
if (unlikely (!top_interp.interpret (topDict))) { fini (); return; }
|
||||
if (unlikely (!top_interp.interpret (topDict))) goto fail;
|
||||
}
|
||||
|
||||
if (is_predef_charset ())
|
||||
@ -1069,7 +1103,7 @@ struct cff1
|
||||
else
|
||||
{
|
||||
charset = &StructAtOffsetOrNull<Charset> (cff, topDict.CharsetOffset);
|
||||
if (unlikely ((charset == &Null (Charset)) || !charset->sanitize (&sc))) { fini (); return; }
|
||||
if (unlikely ((charset == &Null (Charset)) || !charset->sanitize (&sc, &num_charset_entries))) goto fail;
|
||||
}
|
||||
|
||||
fdCount = 1;
|
||||
@ -1079,7 +1113,7 @@ struct cff1
|
||||
fdSelect = &StructAtOffsetOrNull<CFF1FDSelect> (cff, topDict.FDSelectOffset);
|
||||
if (unlikely ((fdArray == &Null (CFF1FDArray)) || !fdArray->sanitize (&sc) ||
|
||||
(fdSelect == &Null (CFF1FDSelect)) || !fdSelect->sanitize (&sc, fdArray->count)))
|
||||
{ fini (); return; }
|
||||
goto fail;
|
||||
|
||||
fdCount = fdArray->count;
|
||||
}
|
||||
@ -1092,36 +1126,36 @@ struct cff1
|
||||
encoding = &Null (Encoding);
|
||||
if (is_CID ())
|
||||
{
|
||||
if (unlikely (charset == &Null (Charset))) { fini (); return; }
|
||||
if (unlikely (charset == &Null (Charset))) goto fail;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!is_predef_encoding ())
|
||||
{
|
||||
encoding = &StructAtOffsetOrNull<Encoding> (cff, topDict.EncodingOffset);
|
||||
if (unlikely ((encoding == &Null (Encoding)) || !encoding->sanitize (&sc))) { fini (); return; }
|
||||
if (unlikely ((encoding == &Null (Encoding)) || !encoding->sanitize (&sc))) goto fail;
|
||||
}
|
||||
}
|
||||
|
||||
stringIndex = &StructAtOffset<CFF1StringIndex> (topDictIndex, topDictIndex->get_size ());
|
||||
if ((stringIndex == &Null (CFF1StringIndex)) || !stringIndex->sanitize (&sc))
|
||||
{ fini (); return; }
|
||||
goto fail;
|
||||
|
||||
globalSubrs = &StructAtOffset<CFF1Subrs> (stringIndex, stringIndex->get_size ());
|
||||
if ((globalSubrs != &Null (CFF1Subrs)) && !globalSubrs->sanitize (&sc))
|
||||
{ fini (); return; }
|
||||
goto fail;
|
||||
|
||||
charStrings = &StructAtOffsetOrNull<CFF1CharStrings> (cff, topDict.charStringsOffset);
|
||||
|
||||
if ((charStrings == &Null (CFF1CharStrings)) || unlikely (!charStrings->sanitize (&sc)))
|
||||
{ fini (); return; }
|
||||
goto fail;
|
||||
|
||||
num_glyphs = charStrings->count;
|
||||
if (num_glyphs != sc.get_num_glyphs ())
|
||||
{ fini (); return; }
|
||||
goto fail;
|
||||
|
||||
if (unlikely (!privateDicts.resize (fdCount)))
|
||||
{ fini (); return; }
|
||||
goto fail;
|
||||
for (unsigned int i = 0; i < fdCount; i++)
|
||||
privateDicts[i].init ();
|
||||
|
||||
@ -1131,27 +1165,27 @@ struct cff1
|
||||
for (unsigned int i = 0; i < fdCount; i++)
|
||||
{
|
||||
hb_ubytes_t fontDictStr = (*fdArray)[i];
|
||||
if (unlikely (!fontDictStr.sanitize (&sc))) { fini (); return; }
|
||||
if (unlikely (!fontDictStr.sanitize (&sc))) goto fail;
|
||||
cff1_font_dict_values_t *font;
|
||||
cff1_top_dict_interp_env_t env (fontDictStr);
|
||||
cff1_font_dict_interpreter_t font_interp (env);
|
||||
font = fontDicts.push ();
|
||||
if (unlikely (fontDicts.in_error ())) { fini (); return; }
|
||||
if (unlikely (fontDicts.in_error ())) goto fail;
|
||||
|
||||
font->init ();
|
||||
if (unlikely (!font_interp.interpret (*font))) { fini (); return; }
|
||||
if (unlikely (!font_interp.interpret (*font))) goto fail;
|
||||
PRIVDICTVAL *priv = &privateDicts[i];
|
||||
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (!privDictStr.sanitize (&sc))) { fini (); return; }
|
||||
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
||||
num_interp_env_t env2 (privDictStr);
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env2);
|
||||
priv->init ();
|
||||
if (unlikely (!priv_interp.interpret (*priv))) { fini (); return; }
|
||||
if (unlikely (!priv_interp.interpret (*priv))) goto fail;
|
||||
|
||||
priv->localSubrs = &StructAtOffsetOrNull<CFF1Subrs> (&privDictStr, priv->subrsOffset);
|
||||
if (priv->localSubrs != &Null (CFF1Subrs) &&
|
||||
unlikely (!priv->localSubrs->sanitize (&sc)))
|
||||
{ fini (); return; }
|
||||
goto fail;
|
||||
}
|
||||
}
|
||||
else /* non-CID */
|
||||
@ -1160,20 +1194,25 @@ struct cff1
|
||||
PRIVDICTVAL *priv = &privateDicts[0];
|
||||
|
||||
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||
if (unlikely (!privDictStr.sanitize (&sc))) { fini (); return; }
|
||||
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
||||
num_interp_env_t env (privDictStr);
|
||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env);
|
||||
priv->init ();
|
||||
if (unlikely (!priv_interp.interpret (*priv))) { fini (); return; }
|
||||
if (unlikely (!priv_interp.interpret (*priv))) goto fail;
|
||||
|
||||
priv->localSubrs = &StructAtOffsetOrNull<CFF1Subrs> (&privDictStr, priv->subrsOffset);
|
||||
if (priv->localSubrs != &Null (CFF1Subrs) &&
|
||||
unlikely (!priv->localSubrs->sanitize (&sc)))
|
||||
{ fini (); return; }
|
||||
goto fail;
|
||||
}
|
||||
}
|
||||
|
||||
void fini ()
|
||||
return;
|
||||
|
||||
fail:
|
||||
_fini ();
|
||||
}
|
||||
~accelerator_templ_t () { _fini (); }
|
||||
void _fini ()
|
||||
{
|
||||
sc.end_processing ();
|
||||
topDict.fini ();
|
||||
@ -1183,6 +1222,8 @@ struct cff1
|
||||
blob = nullptr;
|
||||
}
|
||||
|
||||
hb_blob_t *get_blob () const { return blob; }
|
||||
|
||||
bool is_valid () const { return blob; }
|
||||
bool is_CID () const { return topDict.is_CID (); }
|
||||
|
||||
@ -1203,13 +1244,14 @@ struct cff1
|
||||
|
||||
bool is_predef_encoding () const { return topDict.EncodingOffset <= ExpertEncoding; }
|
||||
|
||||
hb_codepoint_t glyph_to_code (hb_codepoint_t glyph) const
|
||||
hb_codepoint_t glyph_to_code (hb_codepoint_t glyph,
|
||||
code_pair_t *glyph_to_sid_cache = nullptr) const
|
||||
{
|
||||
if (encoding != &Null (Encoding))
|
||||
return encoding->get_code (glyph);
|
||||
else
|
||||
{
|
||||
hb_codepoint_t sid = glyph_to_sid (glyph);
|
||||
hb_codepoint_t sid = glyph_to_sid (glyph, glyph_to_sid_cache);
|
||||
if (sid == 0) return 0;
|
||||
hb_codepoint_t code = 0;
|
||||
switch (topDict.EncodingOffset)
|
||||
@ -1227,12 +1269,14 @@ struct cff1
|
||||
}
|
||||
}
|
||||
|
||||
hb_map_t *create_glyph_to_sid_map () const
|
||||
glyph_to_sid_map_t *create_glyph_to_sid_map () const
|
||||
{
|
||||
if (charset != &Null (Charset))
|
||||
{
|
||||
hb_map_t *mapping = hb_map_create ();
|
||||
mapping->set (0, 0);
|
||||
auto *mapping = (glyph_to_sid_map_t *) hb_malloc (sizeof (glyph_to_sid_map_t));
|
||||
if (unlikely (!mapping)) return nullptr;
|
||||
mapping = new (mapping) glyph_to_sid_map_t ();
|
||||
mapping->push (code_pair_t {0, 1});
|
||||
charset->collect_glyph_to_sid_map (mapping, num_glyphs);
|
||||
return mapping;
|
||||
}
|
||||
@ -1240,10 +1284,11 @@ struct cff1
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
hb_codepoint_t glyph_to_sid (hb_codepoint_t glyph) const
|
||||
hb_codepoint_t glyph_to_sid (hb_codepoint_t glyph,
|
||||
code_pair_t *cache = nullptr) const
|
||||
{
|
||||
if (charset != &Null (Charset))
|
||||
return charset->get_sid (glyph, num_glyphs);
|
||||
return charset->get_sid (glyph, num_glyphs, cache);
|
||||
else
|
||||
{
|
||||
hb_codepoint_t sid = 0;
|
||||
@ -1312,19 +1357,17 @@ struct cff1
|
||||
hb_vector_t<PRIVDICTVAL> privateDicts;
|
||||
|
||||
unsigned int num_glyphs = 0;
|
||||
unsigned int num_charset_entries = 0;
|
||||
};
|
||||
|
||||
struct accelerator_t : accelerator_templ_t<cff1_private_dict_opset_t, cff1_private_dict_values_t>
|
||||
{
|
||||
accelerator_t (hb_face_t *face)
|
||||
accelerator_t (hb_face_t *face) : SUPER (face)
|
||||
{
|
||||
SUPER::init (face);
|
||||
|
||||
glyph_names.set_relaxed (nullptr);
|
||||
|
||||
if (!is_valid ()) return;
|
||||
if (is_CID ()) return;
|
||||
|
||||
}
|
||||
~accelerator_t ()
|
||||
{
|
||||
@ -1334,8 +1377,6 @@ struct cff1
|
||||
names->fini ();
|
||||
hb_free (names);
|
||||
}
|
||||
|
||||
SUPER::fini ();
|
||||
}
|
||||
|
||||
bool get_glyph_name (hb_codepoint_t glyph,
|
||||
@ -1386,9 +1427,10 @@ struct cff1
|
||||
/* TODO */
|
||||
|
||||
/* fill glyph names */
|
||||
code_pair_t glyph_to_sid_cache {0, HB_CODEPOINT_INVALID};
|
||||
for (hb_codepoint_t gid = 0; gid < num_glyphs; gid++)
|
||||
{
|
||||
hb_codepoint_t sid = glyph_to_sid (gid);
|
||||
hb_codepoint_t sid = glyph_to_sid (gid, &glyph_to_sid_cache);
|
||||
gname_t gname;
|
||||
gname.sid = sid;
|
||||
if (sid < cff1_std_strings_length)
|
||||
@ -1426,7 +1468,6 @@ struct cff1
|
||||
|
||||
HB_INTERNAL bool get_extents (hb_font_t *font, hb_codepoint_t glyph, hb_glyph_extents_t *extents) const;
|
||||
HB_INTERNAL bool paint_glyph (hb_font_t *font, hb_codepoint_t glyph, hb_paint_funcs_t *funcs, void *data, hb_color_t foreground) const;
|
||||
HB_INTERNAL bool get_seac_components (hb_codepoint_t glyph, hb_codepoint_t *base, hb_codepoint_t *accent) const;
|
||||
HB_INTERNAL bool get_path (hb_font_t *font, hb_codepoint_t glyph, hb_draw_session_t &draw_session) const;
|
||||
|
||||
private:
|
||||
@ -1453,9 +1494,24 @@ struct cff1
|
||||
typedef accelerator_templ_t<cff1_private_dict_opset_t, cff1_private_dict_values_t> SUPER;
|
||||
};
|
||||
|
||||
struct accelerator_subset_t : accelerator_templ_t<cff1_private_dict_opset_subset, cff1_private_dict_values_subset_t> {};
|
||||
struct accelerator_subset_t : accelerator_templ_t<cff1_private_dict_opset_subset_t, cff1_private_dict_values_subset_t>
|
||||
{
|
||||
accelerator_subset_t (hb_face_t *face) : SUPER (face) {}
|
||||
~accelerator_subset_t ()
|
||||
{
|
||||
if (cff_accelerator)
|
||||
cff_subset_accelerator_t::destroy (cff_accelerator);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const { return hb_subset_cff1 (c); }
|
||||
HB_INTERNAL bool subset (hb_subset_context_t *c) const;
|
||||
HB_INTERNAL bool serialize (hb_serialize_context_t *c,
|
||||
struct cff1_subset_plan &plan) const;
|
||||
HB_INTERNAL bool get_seac_components (hb_codepoint_t glyph, hb_codepoint_t *base, hb_codepoint_t *accent) const;
|
||||
|
||||
mutable CFF::cff_subset_accelerator_t* cff_accelerator = nullptr;
|
||||
|
||||
typedef accelerator_templ_t<cff1_private_dict_opset_subset_t, cff1_private_dict_values_subset_t> SUPER;
|
||||
};
|
||||
|
||||
protected:
|
||||
HB_INTERNAL static hb_codepoint_t lookup_standard_encoding_for_code (hb_codepoint_t sid);
|
||||
@ -1479,6 +1535,10 @@ struct cff1_accelerator_t : cff1::accelerator_t {
|
||||
cff1_accelerator_t (hb_face_t *face) : cff1::accelerator_t (face) {}
|
||||
};
|
||||
|
||||
struct cff1_subset_accelerator_t : cff1::accelerator_subset_t {
|
||||
cff1_subset_accelerator_t (hb_face_t *face) : cff1::accelerator_subset_t (face) {}
|
||||
};
|
||||
|
||||
} /* namespace OT */
|
||||
|
||||
#endif /* HB_OT_CFF1_TABLE_HH */
|
||||
|
35
src/3rdparty/harfbuzz-ng/src/hb-ot-cff2-table.hh
vendored
35
src/3rdparty/harfbuzz-ng/src/hb-ot-cff2-table.hh
vendored
@ -28,7 +28,7 @@
|
||||
#define HB_OT_CFF2_TABLE_HH
|
||||
|
||||
#include "hb-ot-cff-common.hh"
|
||||
#include "hb-subset-cff2.hh"
|
||||
#include "hb-subset-cff-common.hh"
|
||||
#include "hb-draw.hh"
|
||||
#include "hb-paint.hh"
|
||||
|
||||
@ -41,7 +41,6 @@ namespace CFF {
|
||||
#define HB_OT_TAG_CFF2 HB_TAG('C','F','F','2')
|
||||
|
||||
typedef CFFIndex<HBUINT32> CFF2Index;
|
||||
template <typename Type> struct CFF2IndexOf : CFFIndexOf<HBUINT32, Type> {};
|
||||
|
||||
typedef CFF2Index CFF2CharStrings;
|
||||
typedef Subrs<HBUINT32> CFF2Subrs;
|
||||
@ -391,8 +390,12 @@ struct cff2
|
||||
template <typename PRIVOPSET, typename PRIVDICTVAL>
|
||||
struct accelerator_templ_t
|
||||
{
|
||||
static constexpr hb_tag_t tableTag = cff2::tableTag;
|
||||
|
||||
accelerator_templ_t (hb_face_t *face)
|
||||
{
|
||||
if (!face) return;
|
||||
|
||||
topDict.init ();
|
||||
fontDicts.init ();
|
||||
privateDicts.init ();
|
||||
@ -464,7 +467,6 @@ struct cff2
|
||||
goto fail;
|
||||
}
|
||||
|
||||
|
||||
return;
|
||||
|
||||
fail:
|
||||
@ -481,11 +483,13 @@ struct cff2
|
||||
blob = nullptr;
|
||||
}
|
||||
|
||||
hb_map_t *create_glyph_to_sid_map () const
|
||||
hb_vector_t<uint16_t> *create_glyph_to_sid_map () const
|
||||
{
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
hb_blob_t *get_blob () const { return blob; }
|
||||
|
||||
bool is_valid () const { return blob; }
|
||||
|
||||
protected:
|
||||
@ -518,9 +522,24 @@ struct cff2
|
||||
HB_INTERNAL bool get_path (hb_font_t *font, hb_codepoint_t glyph, hb_draw_session_t &draw_session) const;
|
||||
};
|
||||
|
||||
typedef accelerator_templ_t<cff2_private_dict_opset_subset_t, cff2_private_dict_values_subset_t> accelerator_subset_t;
|
||||
struct accelerator_subset_t : accelerator_templ_t<cff2_private_dict_opset_subset_t, cff2_private_dict_values_subset_t>
|
||||
{
|
||||
accelerator_subset_t (hb_face_t *face) : SUPER (face) {}
|
||||
~accelerator_subset_t ()
|
||||
{
|
||||
if (cff_accelerator)
|
||||
cff_subset_accelerator_t::destroy (cff_accelerator);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const { return hb_subset_cff2 (c); }
|
||||
HB_INTERNAL bool subset (hb_subset_context_t *c) const;
|
||||
HB_INTERNAL bool serialize (hb_serialize_context_t *c,
|
||||
struct cff2_subset_plan &plan,
|
||||
hb_array_t<int> normalized_coords) const;
|
||||
|
||||
mutable CFF::cff_subset_accelerator_t* cff_accelerator = nullptr;
|
||||
|
||||
typedef accelerator_templ_t<cff2_private_dict_opset_subset_t, cff2_private_dict_values_subset_t> SUPER;
|
||||
};
|
||||
|
||||
public:
|
||||
FixedVersion<HBUINT8> version; /* Version of CFF2 table. set to 0x0200u */
|
||||
@ -535,6 +554,10 @@ struct cff2_accelerator_t : cff2::accelerator_t {
|
||||
cff2_accelerator_t (hb_face_t *face) : cff2::accelerator_t (face) {}
|
||||
};
|
||||
|
||||
struct cff2_subset_accelerator_t : cff2::accelerator_subset_t {
|
||||
cff2_subset_accelerator_t (hb_face_t *face) : cff2::accelerator_subset_t (face) {}
|
||||
};
|
||||
|
||||
} /* namespace OT */
|
||||
|
||||
#endif /* HB_OT_CFF2_TABLE_HH */
|
||||
|
38
src/3rdparty/harfbuzz-ng/src/hb-ot-cmap-table.hh
vendored
38
src/3rdparty/harfbuzz-ng/src/hb-ot-cmap-table.hh
vendored
@ -277,10 +277,10 @@ struct CmapSubtableFormat4
|
||||
}
|
||||
} writer(c);
|
||||
|
||||
writer.end_code_ = c->allocate_size<HBUINT16> (HBUINT16::static_size * segcount);
|
||||
c->allocate_size<HBUINT16> (2); // padding
|
||||
writer.start_code_ = c->allocate_size<HBUINT16> (HBUINT16::static_size * segcount);
|
||||
writer.id_delta_ = c->allocate_size<HBINT16> (HBINT16::static_size * segcount);
|
||||
writer.end_code_ = c->allocate_size<HBUINT16> (HBUINT16::static_size * segcount, false);
|
||||
(void) c->allocate_size<HBUINT16> (2); // padding
|
||||
writer.start_code_ = c->allocate_size<HBUINT16> (HBUINT16::static_size * segcount, false);
|
||||
writer.id_delta_ = c->allocate_size<HBINT16> (HBINT16::static_size * segcount, false);
|
||||
|
||||
if (unlikely (!writer.end_code_ || !writer.start_code_ || !writer.id_delta_)) return false;
|
||||
|
||||
@ -325,7 +325,7 @@ struct CmapSubtableFormat4
|
||||
{
|
||||
auto format4_iter =
|
||||
+ it
|
||||
| hb_filter ([&] (const hb_pair_t<hb_codepoint_t, hb_codepoint_t> _)
|
||||
| hb_filter ([&] (const hb_codepoint_pair_t _)
|
||||
{ return _.first <= 0xFFFF; })
|
||||
;
|
||||
|
||||
@ -335,7 +335,7 @@ struct CmapSubtableFormat4
|
||||
if (unlikely (!c->extend_min (this))) return;
|
||||
this->format = 4;
|
||||
|
||||
hb_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> cp_to_gid {
|
||||
hb_vector_t<hb_codepoint_pair_t> cp_to_gid {
|
||||
format4_iter
|
||||
};
|
||||
|
||||
@ -757,8 +757,7 @@ struct CmapSubtableLongSegmented
|
||||
hb_codepoint_t gid = this->groups[i].glyphID;
|
||||
if (!gid)
|
||||
{
|
||||
/* Intention is: if (hb_is_same (T, CmapSubtableFormat13)) continue; */
|
||||
if (! T::group_get_glyph (this->groups[i], end)) continue;
|
||||
if (T::formatNumber == 13) continue;
|
||||
start++;
|
||||
gid++;
|
||||
}
|
||||
@ -766,11 +765,13 @@ struct CmapSubtableLongSegmented
|
||||
if (unlikely ((unsigned int) (gid + end - start) >= num_glyphs))
|
||||
end = start + (hb_codepoint_t) num_glyphs - gid;
|
||||
|
||||
mapping->alloc (mapping->get_population () + end - start + 1);
|
||||
|
||||
for (unsigned cp = start; cp <= end; cp++)
|
||||
{
|
||||
unicodes->add (cp);
|
||||
mapping->set (cp, gid);
|
||||
gid++;
|
||||
gid += T::increment;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -794,6 +795,9 @@ struct CmapSubtableLongSegmented
|
||||
|
||||
struct CmapSubtableFormat12 : CmapSubtableLongSegmented<CmapSubtableFormat12>
|
||||
{
|
||||
static constexpr int increment = 1;
|
||||
static constexpr int formatNumber = 12;
|
||||
|
||||
static hb_codepoint_t group_get_glyph (const CmapSubtableLongGroup &group,
|
||||
hb_codepoint_t u)
|
||||
{ return likely (group.startCharCode <= group.endCharCode) ?
|
||||
@ -866,6 +870,9 @@ struct CmapSubtableFormat12 : CmapSubtableLongSegmented<CmapSubtableFormat12>
|
||||
|
||||
struct CmapSubtableFormat13 : CmapSubtableLongSegmented<CmapSubtableFormat13>
|
||||
{
|
||||
static constexpr int increment = 0;
|
||||
static constexpr int formatNumber = 13;
|
||||
|
||||
static hb_codepoint_t group_get_glyph (const CmapSubtableLongGroup &group,
|
||||
hb_codepoint_t u HB_UNUSED)
|
||||
{ return group.glyphID; }
|
||||
@ -917,8 +924,7 @@ struct DefaultUVS : SortedArray32Of<UnicodeValueRange>
|
||||
DefaultUVS* copy (hb_serialize_context_t *c,
|
||||
const hb_set_t *unicodes) const
|
||||
{
|
||||
DefaultUVS *out = c->start_embed<DefaultUVS> ();
|
||||
if (unlikely (!out)) return nullptr;
|
||||
auto *out = c->start_embed<DefaultUVS> ();
|
||||
auto snap = c->snapshot ();
|
||||
|
||||
HBUINT32 len;
|
||||
@ -931,8 +937,7 @@ struct DefaultUVS : SortedArray32Of<UnicodeValueRange>
|
||||
hb_codepoint_t start = HB_SET_VALUE_INVALID;
|
||||
hb_codepoint_t end = HB_SET_VALUE_INVALID;
|
||||
|
||||
for (hb_codepoint_t u = HB_SET_VALUE_INVALID;
|
||||
unicodes->next (&u);)
|
||||
for (auto u : *unicodes)
|
||||
{
|
||||
if (!as_array ().bsearch (u))
|
||||
continue;
|
||||
@ -1067,9 +1072,7 @@ struct NonDefaultUVS : SortedArray32Of<UVSMapping>
|
||||
const hb_set_t *glyphs_requested,
|
||||
const hb_map_t *glyph_map) const
|
||||
{
|
||||
NonDefaultUVS *out = c->start_embed<NonDefaultUVS> ();
|
||||
if (unlikely (!out)) return nullptr;
|
||||
|
||||
auto *out = c->start_embed<NonDefaultUVS> ();
|
||||
auto it =
|
||||
+ as_array ()
|
||||
| hb_filter ([&] (const UVSMapping& _)
|
||||
@ -1767,7 +1770,6 @@ struct cmap
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
cmap *cmap_prime = c->serializer->start_embed<cmap> ();
|
||||
if (unlikely (!c->serializer->check_success (cmap_prime))) return_trace (false);
|
||||
|
||||
auto encodingrec_iter =
|
||||
+ hb_iter (encodingRecord)
|
||||
@ -1798,7 +1800,7 @@ struct cmap
|
||||
|
||||
auto it =
|
||||
+ c->plan->unicode_to_new_gid_list.iter ()
|
||||
| hb_filter ([&] (const hb_pair_t<hb_codepoint_t, hb_codepoint_t> _)
|
||||
| hb_filter ([&] (const hb_codepoint_pair_t _)
|
||||
{ return (_.second != HB_MAP_VALUE_INVALID); })
|
||||
;
|
||||
|
||||
|
14
src/3rdparty/harfbuzz-ng/src/hb-ot-font.cc
vendored
14
src/3rdparty/harfbuzz-ng/src/hb-ot-font.cc
vendored
@ -38,8 +38,8 @@
|
||||
|
||||
#include "hb-ot-cmap-table.hh"
|
||||
#include "hb-ot-glyf-table.hh"
|
||||
#include "hb-ot-cff1-table.hh"
|
||||
#include "hb-ot-cff2-table.hh"
|
||||
#include "hb-ot-cff1-table.hh"
|
||||
#include "hb-ot-hmtx-table.hh"
|
||||
#include "hb-ot-post-table.hh"
|
||||
#include "hb-ot-stat-table.hh" // Just so we compile it; unused otherwise.
|
||||
@ -98,7 +98,7 @@ _hb_ot_font_create (hb_font_t *font)
|
||||
{
|
||||
cmap_cache = (hb_ot_font_cmap_cache_t *) hb_malloc (sizeof (hb_ot_font_cmap_cache_t));
|
||||
if (unlikely (!cmap_cache)) goto out;
|
||||
cmap_cache->init ();
|
||||
new (cmap_cache) hb_ot_font_cmap_cache_t ();
|
||||
if (unlikely (!hb_face_set_user_data (font->face,
|
||||
&hb_ot_font_cmap_cache_user_data_key,
|
||||
cmap_cache,
|
||||
@ -230,8 +230,8 @@ hb_ot_get_glyph_h_advances (hb_font_t* font, void* font_data,
|
||||
use_cache = false;
|
||||
goto out;
|
||||
}
|
||||
new (cache) hb_ot_font_advance_cache_t;
|
||||
|
||||
cache->init ();
|
||||
if (unlikely (!ot_font->advance_cache.cmpexch (nullptr, cache)))
|
||||
{
|
||||
hb_free (cache);
|
||||
@ -255,7 +255,7 @@ hb_ot_get_glyph_h_advances (hb_font_t* font, void* font_data,
|
||||
{ /* Use cache. */
|
||||
if (ot_font->cached_coords_serial.get_acquire () != (int) font->serial_coords)
|
||||
{
|
||||
ot_font->advance_cache->init ();
|
||||
ot_font->advance_cache->clear ();
|
||||
ot_font->cached_coords_serial.set_release (font->serial_coords);
|
||||
}
|
||||
|
||||
@ -436,8 +436,8 @@ hb_ot_get_glyph_extents (hb_font_t *font,
|
||||
#endif
|
||||
if (ot_face->glyf->get_extents (font, glyph, extents)) return true;
|
||||
#ifndef HB_NO_OT_FONT_CFF
|
||||
if (ot_face->cff1->get_extents (font, glyph, extents)) return true;
|
||||
if (ot_face->cff2->get_extents (font, glyph, extents)) return true;
|
||||
if (ot_face->cff1->get_extents (font, glyph, extents)) return true;
|
||||
#endif
|
||||
|
||||
return false;
|
||||
@ -525,8 +525,8 @@ hb_ot_draw_glyph (hb_font_t *font,
|
||||
embolden ? &outline : draw_data, font->slant_xy);
|
||||
if (!font->face->table.glyf->get_path (font, glyph, draw_session))
|
||||
#ifndef HB_NO_CFF
|
||||
if (!font->face->table.cff1->get_path (font, glyph, draw_session))
|
||||
if (!font->face->table.cff2->get_path (font, glyph, draw_session))
|
||||
if (!font->face->table.cff1->get_path (font, glyph, draw_session))
|
||||
#endif
|
||||
{}
|
||||
}
|
||||
@ -565,8 +565,8 @@ hb_ot_paint_glyph (hb_font_t *font,
|
||||
#endif
|
||||
if (font->face->table.glyf->paint_glyph (font, glyph, paint_funcs, paint_data, foreground)) return;
|
||||
#ifndef HB_NO_CFF
|
||||
if (font->face->table.cff1->paint_glyph (font, glyph, paint_funcs, paint_data, foreground)) return;
|
||||
if (font->face->table.cff2->paint_glyph (font, glyph, paint_funcs, paint_data, foreground)) return;
|
||||
if (font->face->table.cff1->paint_glyph (font, glyph, paint_funcs, paint_data, foreground)) return;
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
|
43
src/3rdparty/harfbuzz-ng/src/hb-ot-hdmx-table.hh
vendored
43
src/3rdparty/harfbuzz-ng/src/hb-ot-hdmx-table.hh
vendored
@ -46,21 +46,23 @@ struct DeviceRecord
|
||||
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize (hb_serialize_context_t *c, unsigned pixelSize, Iterator it)
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
unsigned pixelSize,
|
||||
Iterator it,
|
||||
const hb_vector_t<hb_codepoint_pair_t> new_to_old_gid_list,
|
||||
unsigned num_glyphs)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
unsigned length = it.len ();
|
||||
|
||||
if (unlikely (!c->extend (this, length))) return_trace (false);
|
||||
if (unlikely (!c->extend (this, num_glyphs))) return_trace (false);
|
||||
|
||||
this->pixelSize = pixelSize;
|
||||
this->maxWidth =
|
||||
+ it
|
||||
| hb_reduce (hb_max, 0u);
|
||||
|
||||
+ it
|
||||
| hb_sink (widthsZ.as_array (length));
|
||||
for (auto &_ : new_to_old_gid_list)
|
||||
widthsZ[_.first] = *it++;
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
@ -89,7 +91,11 @@ struct hdmx
|
||||
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize (hb_serialize_context_t *c, unsigned version, Iterator it)
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
unsigned version,
|
||||
Iterator it,
|
||||
const hb_vector_t<hb_codepoint_pair_t> &new_to_old_gid_list,
|
||||
unsigned num_glyphs)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
@ -97,10 +103,10 @@ struct hdmx
|
||||
|
||||
this->version = version;
|
||||
this->numRecords = it.len ();
|
||||
this->sizeDeviceRecord = DeviceRecord::get_size (it ? (*it).second.len () : 0);
|
||||
this->sizeDeviceRecord = DeviceRecord::get_size (num_glyphs);
|
||||
|
||||
for (const hb_item_type<Iterator>& _ : +it)
|
||||
c->start_embed<DeviceRecord> ()->serialize (c, _.first, _.second);
|
||||
c->start_embed<DeviceRecord> ()->serialize (c, _.first, _.second, new_to_old_gid_list, num_glyphs);
|
||||
|
||||
return_trace (c->successful ());
|
||||
}
|
||||
@ -110,31 +116,30 @@ struct hdmx
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
hdmx *hdmx_prime = c->serializer->start_embed <hdmx> ();
|
||||
if (unlikely (!hdmx_prime)) return_trace (false);
|
||||
auto *hdmx_prime = c->serializer->start_embed <hdmx> ();
|
||||
|
||||
unsigned num_input_glyphs = get_num_glyphs ();
|
||||
auto it =
|
||||
+ hb_range ((unsigned) numRecords)
|
||||
| hb_map ([c, this] (unsigned _)
|
||||
| hb_map ([c, num_input_glyphs, this] (unsigned _)
|
||||
{
|
||||
const DeviceRecord *device_record =
|
||||
&StructAtOffset<DeviceRecord> (&firstDeviceRecord,
|
||||
_ * sizeDeviceRecord);
|
||||
auto row =
|
||||
+ hb_range (c->plan->num_output_glyphs ())
|
||||
| hb_map (c->plan->reverse_glyph_map)
|
||||
| hb_map ([this, c, device_record] (hb_codepoint_t _)
|
||||
+ hb_iter (c->plan->new_to_old_gid_list)
|
||||
| hb_map ([num_input_glyphs, device_record] (hb_codepoint_pair_t _)
|
||||
{
|
||||
if (c->plan->is_empty_glyph (_))
|
||||
return Null (HBUINT8);
|
||||
return device_record->widthsZ.as_array (get_num_glyphs ()) [_];
|
||||
return device_record->widthsZ.as_array (num_input_glyphs) [_.second];
|
||||
})
|
||||
;
|
||||
return hb_pair ((unsigned) device_record->pixelSize, +row);
|
||||
})
|
||||
;
|
||||
|
||||
hdmx_prime->serialize (c->serializer, version, it);
|
||||
hdmx_prime->serialize (c->serializer, version, it,
|
||||
c->plan->new_to_old_gid_list,
|
||||
c->plan->num_output_glyphs ());
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
|
80
src/3rdparty/harfbuzz-ng/src/hb-ot-hmtx-table.hh
vendored
80
src/3rdparty/harfbuzz-ng/src/hb-ot-hmtx-table.hh
vendored
@ -83,7 +83,7 @@ struct hmtxvmtx
|
||||
bool subset_update_header (hb_subset_context_t *c,
|
||||
unsigned int num_hmetrics,
|
||||
const hb_hashmap_t<hb_codepoint_t, hb_pair_t<unsigned, int>> *mtx_map,
|
||||
const hb_map_t *bounds_map) const
|
||||
const hb_vector_t<unsigned> &bounds_vec) const
|
||||
{
|
||||
hb_blob_t *src_blob = hb_sanitize_context_t ().reference_table<H> (c->plan->source, H::tableTag);
|
||||
hb_blob_t *dest_blob = hb_blob_copy_writable_or_fail (src_blob);
|
||||
@ -114,6 +114,7 @@ struct hmtxvmtx
|
||||
HB_ADD_MVAR_VAR (HB_OT_METRICS_TAG_VERTICAL_CARET_OFFSET, caretOffset);
|
||||
}
|
||||
|
||||
bool empty = true;
|
||||
int min_lsb = 0x7FFF;
|
||||
int min_rsb = 0x7FFF;
|
||||
int max_extent = -0x7FFF;
|
||||
@ -125,9 +126,10 @@ struct hmtxvmtx
|
||||
int lsb = _.second.second;
|
||||
max_adv = hb_max (max_adv, adv);
|
||||
|
||||
if (bounds_map->has (gid))
|
||||
if (bounds_vec[gid] != 0xFFFFFFFF)
|
||||
{
|
||||
unsigned bound_width = bounds_map->get (gid);
|
||||
empty = false;
|
||||
unsigned bound_width = bounds_vec[gid];
|
||||
int rsb = adv - lsb - bound_width;
|
||||
int extent = lsb + bound_width;
|
||||
min_lsb = hb_min (min_lsb, lsb);
|
||||
@ -137,7 +139,7 @@ struct hmtxvmtx
|
||||
}
|
||||
|
||||
table->advanceMax = max_adv;
|
||||
if (!bounds_map->is_empty ())
|
||||
if (!empty)
|
||||
{
|
||||
table->minLeadingBearing = min_lsb;
|
||||
table->minTrailingBearing = min_rsb;
|
||||
@ -156,32 +158,32 @@ struct hmtxvmtx
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
void serialize (hb_serialize_context_t *c,
|
||||
Iterator it,
|
||||
unsigned num_long_metrics)
|
||||
const hb_vector_t<hb_codepoint_pair_t> new_to_old_gid_list,
|
||||
unsigned num_long_metrics,
|
||||
unsigned total_num_metrics)
|
||||
{
|
||||
unsigned idx = 0;
|
||||
for (auto _ : it)
|
||||
LongMetric* long_metrics = c->allocate_size<LongMetric> (num_long_metrics * LongMetric::static_size);
|
||||
FWORD* short_metrics = c->allocate_size<FWORD> ((total_num_metrics - num_long_metrics) * FWORD::static_size);
|
||||
if (!long_metrics || !short_metrics) return;
|
||||
|
||||
short_metrics -= num_long_metrics;
|
||||
|
||||
for (auto _ : new_to_old_gid_list)
|
||||
{
|
||||
if (idx < num_long_metrics)
|
||||
hb_codepoint_t gid = _.first;
|
||||
auto mtx = *it++;
|
||||
|
||||
if (gid < num_long_metrics)
|
||||
{
|
||||
LongMetric lm;
|
||||
lm.advance = _.first;
|
||||
lm.sb = _.second;
|
||||
if (unlikely (!c->embed<LongMetric> (&lm))) return;
|
||||
}
|
||||
else if (idx < 0x10000u)
|
||||
{
|
||||
FWORD *sb = c->allocate_size<FWORD> (FWORD::static_size);
|
||||
if (unlikely (!sb)) return;
|
||||
*sb = _.second;
|
||||
LongMetric& lm = long_metrics[gid];
|
||||
lm.advance = mtx.first;
|
||||
lm.sb = mtx.second;
|
||||
}
|
||||
// TODO(beyond-64k): This assumes that maxp.numGlyphs is 0xFFFF.
|
||||
else if (gid < 0x10000u)
|
||||
short_metrics[gid] = mtx.second;
|
||||
else
|
||||
{
|
||||
// TODO: This does not do tail optimization.
|
||||
UFWORD *adv = c->allocate_size<UFWORD> (UFWORD::static_size);
|
||||
if (unlikely (!adv)) return;
|
||||
*adv = _.first;
|
||||
}
|
||||
idx++;
|
||||
((UFWORD*) short_metrics)[gid] = mtx.first;
|
||||
}
|
||||
}
|
||||
|
||||
@ -189,8 +191,7 @@ struct hmtxvmtx
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
T *table_prime = c->serializer->start_embed <T> ();
|
||||
if (unlikely (!table_prime)) return_trace (false);
|
||||
auto *table_prime = c->serializer->start_embed <T> ();
|
||||
|
||||
accelerator_t _mtx (c->plan->source);
|
||||
unsigned num_long_metrics;
|
||||
@ -199,6 +200,8 @@ struct hmtxvmtx
|
||||
/* Determine num_long_metrics to encode. */
|
||||
auto& plan = c->plan;
|
||||
|
||||
// TODO Don't consider retaingid holes here.
|
||||
|
||||
num_long_metrics = hb_min (plan->num_output_glyphs (), 0xFFFFu);
|
||||
unsigned int last_advance = get_new_gid_advance_unscaled (plan, mtx_map, num_long_metrics - 1, _mtx);
|
||||
while (num_long_metrics > 1 &&
|
||||
@ -209,31 +212,36 @@ struct hmtxvmtx
|
||||
}
|
||||
|
||||
auto it =
|
||||
+ hb_range (c->plan->num_output_glyphs ())
|
||||
| hb_map ([c, &_mtx, mtx_map] (unsigned _)
|
||||
+ hb_iter (c->plan->new_to_old_gid_list)
|
||||
| hb_map ([c, &_mtx, mtx_map] (hb_codepoint_pair_t _)
|
||||
{
|
||||
if (!mtx_map->has (_))
|
||||
hb_codepoint_t new_gid = _.first;
|
||||
hb_codepoint_t old_gid = _.second;
|
||||
|
||||
hb_pair_t<unsigned, int> *v = nullptr;
|
||||
if (!mtx_map->has (new_gid, &v))
|
||||
{
|
||||
hb_codepoint_t old_gid;
|
||||
if (!c->plan->old_gid_for_new_gid (_, &old_gid))
|
||||
return hb_pair (0u, 0);
|
||||
int lsb = 0;
|
||||
if (!_mtx.get_leading_bearing_without_var_unscaled (old_gid, &lsb))
|
||||
(void) _glyf_get_leading_bearing_without_var_unscaled (c->plan->source, old_gid, !T::is_horizontal, &lsb);
|
||||
return hb_pair (_mtx.get_advance_without_var_unscaled (old_gid), +lsb);
|
||||
}
|
||||
return mtx_map->get (_);
|
||||
return *v;
|
||||
})
|
||||
;
|
||||
|
||||
table_prime->serialize (c->serializer, it, num_long_metrics);
|
||||
table_prime->serialize (c->serializer,
|
||||
it,
|
||||
c->plan->new_to_old_gid_list,
|
||||
num_long_metrics,
|
||||
c->plan->num_output_glyphs ());
|
||||
|
||||
if (unlikely (c->serializer->in_error ()))
|
||||
return_trace (false);
|
||||
|
||||
// Amend header num hmetrics
|
||||
if (unlikely (!subset_update_header (c, num_long_metrics, mtx_map,
|
||||
T::is_horizontal ? &c->plan->bounds_width_map : &c->plan->bounds_height_map)))
|
||||
T::is_horizontal ? c->plan->bounds_width_vec : c->plan->bounds_height_vec)))
|
||||
return_trace (false);
|
||||
|
||||
return_trace (true);
|
||||
|
@ -170,8 +170,8 @@ struct FeatMinMaxRecord
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (likely (c->check_struct (this) &&
|
||||
minCoord.sanitize (c, this) &&
|
||||
maxCoord.sanitize (c, this)));
|
||||
minCoord.sanitize (c, base) &&
|
||||
maxCoord.sanitize (c, base)));
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -187,7 +187,6 @@ struct FeatMinMaxRecord
|
||||
* of MinMax table (may be NULL) */
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (8);
|
||||
|
||||
};
|
||||
|
||||
struct MinMax
|
||||
@ -274,7 +273,7 @@ struct BaseLangSysRecord
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (likely (c->check_struct (this) &&
|
||||
minMax.sanitize (c, this)));
|
||||
minMax.sanitize (c, base)));
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -297,7 +296,8 @@ struct BaseScript
|
||||
const BaseCoord &get_base_coord (int baseline_tag_index) const
|
||||
{ return (this+baseValues).get_base_coord (baseline_tag_index); }
|
||||
|
||||
bool has_data () const { return baseValues; }
|
||||
bool has_values () const { return baseValues; }
|
||||
bool has_min_max () const { return defaultMinMax; /* TODO What if only per-language is present? */ }
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
@ -383,7 +383,7 @@ struct Axis
|
||||
const BaseCoord **coord) const
|
||||
{
|
||||
const BaseScript &base_script = (this+baseScriptList).get_base_script (script_tag);
|
||||
if (!base_script.has_data ())
|
||||
if (!base_script.has_values ())
|
||||
{
|
||||
*coord = nullptr;
|
||||
return false;
|
||||
@ -410,7 +410,7 @@ struct Axis
|
||||
const BaseCoord **max_coord) const
|
||||
{
|
||||
const BaseScript &base_script = (this+baseScriptList).get_base_script (script_tag);
|
||||
if (!base_script.has_data ())
|
||||
if (!base_script.has_min_max ())
|
||||
{
|
||||
*min_coord = *max_coord = nullptr;
|
||||
return false;
|
||||
@ -425,8 +425,8 @@ struct Axis
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (likely (c->check_struct (this) &&
|
||||
(this+baseTagList).sanitize (c) &&
|
||||
(this+baseScriptList).sanitize (c)));
|
||||
baseTagList.sanitize (c, this) &&
|
||||
baseScriptList.sanitize (c, this)));
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -473,14 +473,13 @@ struct BASE
|
||||
return true;
|
||||
}
|
||||
|
||||
/* TODO: Expose this separately sometime? */
|
||||
bool get_min_max (hb_font_t *font,
|
||||
hb_direction_t direction,
|
||||
hb_tag_t script_tag,
|
||||
hb_tag_t language_tag,
|
||||
hb_tag_t feature_tag,
|
||||
hb_position_t *min,
|
||||
hb_position_t *max)
|
||||
hb_position_t *max) const
|
||||
{
|
||||
const BaseCoord *min_coord, *max_coord;
|
||||
if (!get_axis (direction).get_min_max (script_tag, language_tag, feature_tag,
|
||||
|
173
src/3rdparty/harfbuzz-ng/src/hb-ot-layout-common.hh
vendored
173
src/3rdparty/harfbuzz-ng/src/hb-ot-layout-common.hh
vendored
@ -55,19 +55,22 @@ static bool ClassDef_remap_and_serialize (
|
||||
hb_serialize_context_t *c,
|
||||
const hb_set_t &klasses,
|
||||
bool use_class_zero,
|
||||
hb_sorted_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> &glyph_and_klass, /* IN/OUT */
|
||||
hb_sorted_vector_t<hb_codepoint_pair_t> &glyph_and_klass, /* IN/OUT */
|
||||
hb_map_t *klass_map /*IN/OUT*/);
|
||||
|
||||
struct hb_collect_feature_substitutes_with_var_context_t
|
||||
{
|
||||
const hb_map_t *axes_index_tag_map;
|
||||
const hb_hashmap_t<hb_tag_t, int> *axes_location;
|
||||
const hb_hashmap_t<hb_tag_t, Triple> *axes_location;
|
||||
hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *record_cond_idx_map;
|
||||
hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
|
||||
bool& insert_catch_all_feature_variation_record;
|
||||
|
||||
// not stored in subset_plan
|
||||
hb_set_t *feature_indices;
|
||||
bool apply;
|
||||
bool variation_applied;
|
||||
bool universal;
|
||||
unsigned cur_record_idx;
|
||||
hb_hashmap_t<hb::shared_ptr<hb_map_t>, unsigned> *conditionset_map;
|
||||
};
|
||||
@ -807,7 +810,7 @@ struct Feature
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
out->featureParams.serialize_subset (c, featureParams, this, tag);
|
||||
|
||||
@ -981,7 +984,7 @@ struct RecordListOfFeature : RecordListOf<Feature>
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
+ hb_enumerate (*this)
|
||||
| hb_filter (l->feature_index_map, hb_first)
|
||||
@ -1078,7 +1081,7 @@ struct LangSys
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
const uint32_t *v;
|
||||
out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex, &v) ? *v : 0xFFFFu;
|
||||
@ -1188,7 +1191,7 @@ struct Script
|
||||
return false;
|
||||
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
bool defaultLang = false;
|
||||
if (has_default_lang_sys ())
|
||||
@ -1247,7 +1250,7 @@ struct RecordListOfScript : RecordListOf<Script>
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
for (auto _ : + hb_enumerate (*this))
|
||||
{
|
||||
@ -1367,7 +1370,7 @@ struct Lookup
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
out->lookupType = lookupType;
|
||||
out->lookupFlag = lookupFlag;
|
||||
|
||||
@ -1456,7 +1459,7 @@ struct LookupOffsetList : List16OfOffsetTo<TLookup, OffsetType>
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
+ hb_enumerate (*this)
|
||||
| hb_filter (l->lookup_index_map, hb_first)
|
||||
@ -1482,7 +1485,7 @@ struct LookupOffsetList : List16OfOffsetTo<TLookup, OffsetType>
|
||||
static bool ClassDef_remap_and_serialize (hb_serialize_context_t *c,
|
||||
const hb_set_t &klasses,
|
||||
bool use_class_zero,
|
||||
hb_sorted_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> &glyph_and_klass, /* IN/OUT */
|
||||
hb_sorted_vector_t<hb_codepoint_pair_t> &glyph_and_klass, /* IN/OUT */
|
||||
hb_map_t *klass_map /*IN/OUT*/)
|
||||
{
|
||||
if (!klass_map)
|
||||
@ -1573,7 +1576,7 @@ struct ClassDefFormat1_3
|
||||
TRACE_SUBSET (this);
|
||||
const hb_map_t &glyph_map = c->plan->glyph_map_gsub;
|
||||
|
||||
hb_sorted_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> glyph_and_klass;
|
||||
hb_sorted_vector_t<hb_codepoint_pair_t> glyph_and_klass;
|
||||
hb_set_t orig_klasses;
|
||||
|
||||
hb_codepoint_t start = startGlyph;
|
||||
@ -1592,10 +1595,13 @@ struct ClassDefFormat1_3
|
||||
orig_klasses.add (klass);
|
||||
}
|
||||
|
||||
unsigned glyph_count = glyph_filter
|
||||
? hb_len (hb_iter (glyph_map.keys()) | hb_filter (glyph_filter))
|
||||
: glyph_map.get_population ();
|
||||
use_class_zero = use_class_zero && glyph_count <= glyph_and_klass.length;
|
||||
if (use_class_zero)
|
||||
{
|
||||
unsigned glyph_count = glyph_filter
|
||||
? hb_len (hb_iter (glyph_map.keys()) | hb_filter (glyph_filter))
|
||||
: glyph_map.get_population ();
|
||||
use_class_zero = glyph_count <= glyph_and_klass.length;
|
||||
}
|
||||
if (!ClassDef_remap_and_serialize (c->serializer,
|
||||
orig_klasses,
|
||||
use_class_zero,
|
||||
@ -1830,7 +1836,7 @@ struct ClassDefFormat2_4
|
||||
const hb_map_t &glyph_map = c->plan->glyph_map_gsub;
|
||||
const hb_set_t &glyph_set = *c->plan->glyphset_gsub ();
|
||||
|
||||
hb_sorted_vector_t<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> glyph_and_klass;
|
||||
hb_sorted_vector_t<hb_codepoint_pair_t> glyph_and_klass;
|
||||
hb_set_t orig_klasses;
|
||||
|
||||
if (glyph_set.get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2
|
||||
@ -1916,7 +1922,7 @@ struct ClassDefFormat2_4
|
||||
{
|
||||
if (rangeRecord.len > glyphs->get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2)
|
||||
{
|
||||
for (hb_codepoint_t g = HB_SET_VALUE_INVALID; glyphs->next (&g);)
|
||||
for (auto g : *glyphs)
|
||||
if (get_class (g))
|
||||
return true;
|
||||
return false;
|
||||
@ -1931,13 +1937,22 @@ struct ClassDefFormat2_4
|
||||
{
|
||||
/* Match if there's any glyph that is not listed! */
|
||||
hb_codepoint_t g = HB_SET_VALUE_INVALID;
|
||||
for (auto &range : rangeRecord)
|
||||
hb_codepoint_t last = HB_SET_VALUE_INVALID;
|
||||
auto it = hb_iter (rangeRecord);
|
||||
for (auto &range : it)
|
||||
{
|
||||
if (it->first == last + 1)
|
||||
{
|
||||
it++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!glyphs->next (&g))
|
||||
break;
|
||||
if (g < range.first)
|
||||
return true;
|
||||
g = range.last;
|
||||
last = g;
|
||||
}
|
||||
if (g != HB_SET_VALUE_INVALID && glyphs->next (&g))
|
||||
return true;
|
||||
@ -1976,8 +1991,7 @@ struct ClassDefFormat2_4
|
||||
unsigned count = rangeRecord.len;
|
||||
if (count > glyphs->get_population () * hb_bit_storage (count) * 8)
|
||||
{
|
||||
for (hb_codepoint_t g = HB_SET_VALUE_INVALID;
|
||||
glyphs->next (&g);)
|
||||
for (auto g : *glyphs)
|
||||
{
|
||||
unsigned i;
|
||||
if (rangeRecord.as_array ().bfind (g, &i) &&
|
||||
@ -2377,7 +2391,7 @@ struct VarRegionList
|
||||
return_trace (c->check_struct (this) && axesZ.sanitize (c, axisCount * regionCount));
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_bimap_t ®ion_map)
|
||||
bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_inc_bimap_t ®ion_map)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (this))) return_trace (false);
|
||||
@ -2494,7 +2508,7 @@ struct VarData
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
const VarData *src,
|
||||
const hb_inc_bimap_t &inner_map,
|
||||
const hb_bimap_t ®ion_map)
|
||||
const hb_inc_bimap_t ®ion_map)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (this))) return_trace (false);
|
||||
@ -2905,9 +2919,9 @@ struct VariationStore
|
||||
enum Cond_with_Var_flag_t
|
||||
{
|
||||
KEEP_COND_WITH_VAR = 0,
|
||||
DROP_COND_WITH_VAR = 1,
|
||||
DROP_RECORD_WITH_VAR = 2,
|
||||
MEM_ERR_WITH_VAR = 3,
|
||||
KEEP_RECORD_WITH_VAR = 1,
|
||||
DROP_COND_WITH_VAR = 2,
|
||||
DROP_RECORD_WITH_VAR = 3,
|
||||
};
|
||||
|
||||
struct ConditionFormat1
|
||||
@ -2923,9 +2937,29 @@ struct ConditionFormat1
|
||||
const hb_map_t *index_map = &c->plan->axes_index_map;
|
||||
if (index_map->is_empty ()) return_trace (true);
|
||||
|
||||
if (!index_map->has (axisIndex))
|
||||
const hb_map_t& axes_old_index_tag_map = c->plan->axes_old_index_tag_map;
|
||||
hb_codepoint_t *axis_tag;
|
||||
if (!axes_old_index_tag_map.has (axisIndex, &axis_tag) ||
|
||||
!index_map->has (axisIndex))
|
||||
return_trace (false);
|
||||
|
||||
const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location = c->plan->axes_location;
|
||||
Triple axis_limit{-1.f, 0.f, 1.f};
|
||||
Triple *normalized_limit;
|
||||
if (normalized_axes_location.has (*axis_tag, &normalized_limit))
|
||||
axis_limit = *normalized_limit;
|
||||
|
||||
const hb_hashmap_t<hb_tag_t, TripleDistances>& axes_triple_distances = c->plan->axes_triple_distances;
|
||||
TripleDistances axis_triple_distances{1.f, 1.f};
|
||||
TripleDistances *triple_dists;
|
||||
if (axes_triple_distances.has (*axis_tag, &triple_dists))
|
||||
axis_triple_distances = *triple_dists;
|
||||
|
||||
float normalized_min = renormalizeValue (filterRangeMinValue.to_float (), axis_limit, axis_triple_distances, false);
|
||||
float normalized_max = renormalizeValue (filterRangeMaxValue.to_float (), axis_limit, axis_triple_distances, false);
|
||||
out->filterRangeMinValue.set_float (normalized_min);
|
||||
out->filterRangeMaxValue.set_float (normalized_max);
|
||||
|
||||
return_trace (c->serializer->check_assign (out->axisIndex, index_map->get (axisIndex),
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW));
|
||||
}
|
||||
@ -2940,29 +2974,45 @@ struct ConditionFormat1
|
||||
|
||||
hb_tag_t axis_tag = c->axes_index_tag_map->get (axisIndex);
|
||||
|
||||
//axis not pinned, keep the condition
|
||||
if (!c->axes_location->has (axis_tag))
|
||||
Triple axis_range (-1.f, 0.f, 1.f);
|
||||
Triple *axis_limit;
|
||||
if (c->axes_location->has (axis_tag, &axis_limit))
|
||||
axis_range = *axis_limit;
|
||||
|
||||
float axis_min_val = axis_range.minimum;
|
||||
float axis_default_val = axis_range.middle;
|
||||
float axis_max_val = axis_range.maximum;
|
||||
|
||||
float filter_min_val = filterRangeMinValue.to_float ();
|
||||
float filter_max_val = filterRangeMaxValue.to_float ();
|
||||
|
||||
if (axis_default_val < filter_min_val ||
|
||||
axis_default_val > filter_max_val)
|
||||
c->apply = false;
|
||||
|
||||
//condition not met, drop the entire record
|
||||
if (axis_min_val > filter_max_val || axis_max_val < filter_min_val ||
|
||||
filter_min_val > filter_max_val)
|
||||
return DROP_RECORD_WITH_VAR;
|
||||
|
||||
//condition met and axis pinned, drop the condition
|
||||
if (c->axes_location->has (axis_tag) &&
|
||||
c->axes_location->get (axis_tag).is_point ())
|
||||
return DROP_COND_WITH_VAR;
|
||||
|
||||
if (filter_max_val != axis_max_val || filter_min_val != axis_min_val)
|
||||
{
|
||||
// add axisIndex->value into the hashmap so we can check if the record is
|
||||
// unique with variations
|
||||
int16_t min_val = filterRangeMinValue.to_int ();
|
||||
int16_t max_val = filterRangeMaxValue.to_int ();
|
||||
hb_codepoint_t val = (max_val << 16) + min_val;
|
||||
int16_t int_filter_max_val = filterRangeMaxValue.to_int ();
|
||||
int16_t int_filter_min_val = filterRangeMinValue.to_int ();
|
||||
hb_codepoint_t val = (int_filter_max_val << 16) + int_filter_min_val;
|
||||
|
||||
condition_map->set (axisIndex, val);
|
||||
return KEEP_COND_WITH_VAR;
|
||||
}
|
||||
|
||||
//axis pinned, check if condition is met
|
||||
//TODO: add check for axis Ranges
|
||||
int v = c->axes_location->get (axis_tag);
|
||||
|
||||
//condition not met, drop the entire record
|
||||
if (v < filterRangeMinValue.to_int () || v > filterRangeMaxValue.to_int ())
|
||||
return DROP_RECORD_WITH_VAR;
|
||||
|
||||
//axis pinned and condition met, drop the condition
|
||||
return DROP_COND_WITH_VAR;
|
||||
return KEEP_RECORD_WITH_VAR;
|
||||
}
|
||||
|
||||
bool evaluate (const int *coords, unsigned int coord_len) const
|
||||
@ -3001,7 +3051,7 @@ struct Condition
|
||||
{
|
||||
switch (u.format) {
|
||||
case 1: return u.format1.keep_with_variations (c, condition_map);
|
||||
default:return KEEP_COND_WITH_VAR;
|
||||
default: c->apply = false; return KEEP_COND_WITH_VAR;
|
||||
}
|
||||
}
|
||||
|
||||
@ -3046,45 +3096,50 @@ struct ConditionSet
|
||||
return true;
|
||||
}
|
||||
|
||||
Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
|
||||
void keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
|
||||
{
|
||||
hb_map_t *condition_map = hb_map_create ();
|
||||
if (unlikely (!condition_map)) return MEM_ERR_WITH_VAR;
|
||||
if (unlikely (!condition_map)) return;
|
||||
hb::shared_ptr<hb_map_t> p {condition_map};
|
||||
|
||||
hb_set_t *cond_set = hb_set_create ();
|
||||
if (unlikely (!cond_set)) return MEM_ERR_WITH_VAR;
|
||||
if (unlikely (!cond_set)) return;
|
||||
hb::shared_ptr<hb_set_t> s {cond_set};
|
||||
|
||||
c->apply = true;
|
||||
bool should_keep = false;
|
||||
unsigned num_kept_cond = 0, cond_idx = 0;
|
||||
for (const auto& offset : conditions)
|
||||
{
|
||||
Cond_with_Var_flag_t ret = (this+offset).keep_with_variations (c, condition_map);
|
||||
// one condition is not met, drop the entire record
|
||||
// condition is not met or condition out of range, drop the entire record
|
||||
if (ret == DROP_RECORD_WITH_VAR)
|
||||
return DROP_RECORD_WITH_VAR;
|
||||
return;
|
||||
|
||||
// axis not pinned, keep this condition
|
||||
if (ret == KEEP_COND_WITH_VAR)
|
||||
{
|
||||
should_keep = true;
|
||||
cond_set->add (cond_idx);
|
||||
num_kept_cond++;
|
||||
}
|
||||
|
||||
if (ret == KEEP_RECORD_WITH_VAR)
|
||||
should_keep = true;
|
||||
|
||||
cond_idx++;
|
||||
}
|
||||
|
||||
// all conditions met
|
||||
if (num_kept_cond == 0) return DROP_COND_WITH_VAR;
|
||||
if (!should_keep) return;
|
||||
|
||||
//check if condition_set is unique with variations
|
||||
if (c->conditionset_map->has (p))
|
||||
//duplicate found, drop the entire record
|
||||
return DROP_RECORD_WITH_VAR;
|
||||
return;
|
||||
|
||||
c->conditionset_map->set (p, 1);
|
||||
c->record_cond_idx_map->set (c->cur_record_idx, s);
|
||||
|
||||
return KEEP_COND_WITH_VAR;
|
||||
if (should_keep && num_kept_cond == 0)
|
||||
c->universal = true;
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
@ -3289,12 +3344,11 @@ struct FeatureVariationRecord
|
||||
void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c,
|
||||
const void *base) const
|
||||
{
|
||||
// ret == 1, all conditions met
|
||||
if ((base+conditions).keep_with_variations (c) == DROP_COND_WITH_VAR &&
|
||||
c->apply)
|
||||
(base+conditions).keep_with_variations (c);
|
||||
if (c->apply && !c->variation_applied)
|
||||
{
|
||||
(base+substitutions).collect_feature_substitutes_with_variations (c);
|
||||
c->apply = false; // set variations only once
|
||||
c->variation_applied = true; // set variations only once
|
||||
}
|
||||
}
|
||||
|
||||
@ -3361,7 +3415,12 @@ struct FeatureVariations
|
||||
{
|
||||
c->cur_record_idx = i;
|
||||
varRecords[i].collect_feature_substitutes_with_variations (c, this);
|
||||
if (c->universal)
|
||||
break;
|
||||
}
|
||||
if (c->variation_applied && !c->universal &&
|
||||
!c->record_cond_idx_map->is_empty ())
|
||||
c->insert_catch_all_feature_variation_record = true;
|
||||
}
|
||||
|
||||
FeatureVariations* copy (hb_serialize_context_t *c) const
|
||||
|
@ -143,9 +143,12 @@ struct hb_closure_context_t :
|
||||
return active_glyphs_stack.tail ();
|
||||
}
|
||||
|
||||
hb_set_t& push_cur_active_glyphs ()
|
||||
hb_set_t* push_cur_active_glyphs ()
|
||||
{
|
||||
return *active_glyphs_stack.push ();
|
||||
hb_set_t *s = active_glyphs_stack.push ();
|
||||
if (unlikely (active_glyphs_stack.in_error ()))
|
||||
return nullptr;
|
||||
return s;
|
||||
}
|
||||
|
||||
bool pop_cur_done_glyphs ()
|
||||
@ -399,16 +402,6 @@ struct hb_ot_apply_context_t :
|
||||
{
|
||||
struct matcher_t
|
||||
{
|
||||
matcher_t () :
|
||||
lookup_props (0),
|
||||
mask (-1),
|
||||
ignore_zwnj (false),
|
||||
ignore_zwj (false),
|
||||
per_syllable (false),
|
||||
syllable {0},
|
||||
match_func (nullptr),
|
||||
match_data (nullptr) {}
|
||||
|
||||
typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
|
||||
|
||||
void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
|
||||
@ -427,6 +420,9 @@ struct hb_ot_apply_context_t :
|
||||
MATCH_MAYBE
|
||||
};
|
||||
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
may_match_t may_match (hb_glyph_info_t &info,
|
||||
hb_codepoint_t glyph_data) const
|
||||
{
|
||||
@ -446,6 +442,9 @@ struct hb_ot_apply_context_t :
|
||||
SKIP_MAYBE
|
||||
};
|
||||
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
may_skip_t may_skip (const hb_ot_apply_context_t *c,
|
||||
const hb_glyph_info_t &info) const
|
||||
{
|
||||
@ -461,14 +460,14 @@ struct hb_ot_apply_context_t :
|
||||
}
|
||||
|
||||
protected:
|
||||
unsigned int lookup_props;
|
||||
hb_mask_t mask;
|
||||
bool ignore_zwnj;
|
||||
bool ignore_zwj;
|
||||
bool per_syllable;
|
||||
uint8_t syllable;
|
||||
match_func_t match_func;
|
||||
const void *match_data;
|
||||
unsigned int lookup_props = 0;
|
||||
hb_mask_t mask = -1;
|
||||
bool ignore_zwnj = false;
|
||||
bool ignore_zwj = false;
|
||||
bool per_syllable = false;
|
||||
uint8_t syllable = 0;
|
||||
match_func_t match_func = nullptr;
|
||||
const void *match_data = nullptr;
|
||||
};
|
||||
|
||||
struct skipping_iterator_t
|
||||
@ -516,30 +515,34 @@ struct hb_ot_apply_context_t :
|
||||
}
|
||||
#endif
|
||||
|
||||
void reset (unsigned int start_index_,
|
||||
unsigned int num_items_)
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
void reset (unsigned int start_index_)
|
||||
{
|
||||
idx = start_index_;
|
||||
num_items = num_items_;
|
||||
end = c->buffer->len;
|
||||
matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
|
||||
}
|
||||
|
||||
void reset_fast (unsigned int start_index_,
|
||||
unsigned int num_items_)
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
void reset_fast (unsigned int start_index_)
|
||||
{
|
||||
// Doesn't set end or syllable. Used by GPOS which doesn't care / change.
|
||||
idx = start_index_;
|
||||
num_items = num_items_;
|
||||
}
|
||||
|
||||
void reject ()
|
||||
{
|
||||
num_items++;
|
||||
backup_glyph_data ();
|
||||
}
|
||||
|
||||
matcher_t::may_skip_t
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
may_skip (const hb_glyph_info_t &info) const
|
||||
{ return matcher.may_skip (c, info); }
|
||||
|
||||
@ -549,6 +552,9 @@ struct hb_ot_apply_context_t :
|
||||
SKIP
|
||||
};
|
||||
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
match_t match (hb_glyph_info_t &info)
|
||||
{
|
||||
matcher_t::may_skip_t skip = matcher.may_skip (c, info);
|
||||
@ -567,14 +573,12 @@ struct hb_ot_apply_context_t :
|
||||
return SKIP;
|
||||
}
|
||||
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
bool next (unsigned *unsafe_to = nullptr)
|
||||
{
|
||||
assert (num_items > 0);
|
||||
/* The alternate condition below is faster at string boundaries,
|
||||
* but produces subpar "unsafe-to-concat" values. */
|
||||
signed stop = (signed) end - (signed) num_items;
|
||||
if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
|
||||
stop = (signed) end - 1;
|
||||
const signed stop = (signed) end - 1;
|
||||
while ((signed) idx < stop)
|
||||
{
|
||||
idx++;
|
||||
@ -582,7 +586,6 @@ struct hb_ot_apply_context_t :
|
||||
{
|
||||
case MATCH:
|
||||
{
|
||||
num_items--;
|
||||
advance_glyph_data ();
|
||||
return true;
|
||||
}
|
||||
@ -600,14 +603,12 @@ struct hb_ot_apply_context_t :
|
||||
*unsafe_to = end;
|
||||
return false;
|
||||
}
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
bool prev (unsigned *unsafe_from = nullptr)
|
||||
{
|
||||
assert (num_items > 0);
|
||||
/* The alternate condition below is faster at string boundaries,
|
||||
* but produces subpar "unsafe-to-concat" values. */
|
||||
unsigned stop = num_items - 1;
|
||||
if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
|
||||
stop = 1 - 1;
|
||||
const unsigned stop = 0;
|
||||
while (idx > stop)
|
||||
{
|
||||
idx--;
|
||||
@ -615,7 +616,6 @@ struct hb_ot_apply_context_t :
|
||||
{
|
||||
case MATCH:
|
||||
{
|
||||
num_items--;
|
||||
advance_glyph_data ();
|
||||
return true;
|
||||
}
|
||||
@ -634,6 +634,7 @@ struct hb_ot_apply_context_t :
|
||||
return false;
|
||||
}
|
||||
|
||||
HB_ALWAYS_INLINE
|
||||
hb_codepoint_t
|
||||
get_glyph_data ()
|
||||
{
|
||||
@ -644,6 +645,7 @@ struct hb_ot_apply_context_t :
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
HB_ALWAYS_INLINE
|
||||
void
|
||||
advance_glyph_data ()
|
||||
{
|
||||
@ -672,7 +674,6 @@ struct hb_ot_apply_context_t :
|
||||
const HBUINT24 *match_glyph_data24;
|
||||
#endif
|
||||
|
||||
unsigned int num_items;
|
||||
unsigned int end;
|
||||
};
|
||||
|
||||
@ -703,6 +704,7 @@ struct hb_ot_apply_context_t :
|
||||
hb_font_t *font;
|
||||
hb_face_t *face;
|
||||
hb_buffer_t *buffer;
|
||||
hb_sanitize_context_t sanitizer;
|
||||
recurse_func_t recurse_func = nullptr;
|
||||
const GDEF &gdef;
|
||||
const GDEF::accelerator_t &gdef_accel;
|
||||
@ -729,9 +731,11 @@ struct hb_ot_apply_context_t :
|
||||
|
||||
hb_ot_apply_context_t (unsigned int table_index_,
|
||||
hb_font_t *font_,
|
||||
hb_buffer_t *buffer_) :
|
||||
hb_buffer_t *buffer_,
|
||||
hb_blob_t *table_blob_) :
|
||||
table_index (table_index_),
|
||||
font (font_), face (font->face), buffer (buffer_),
|
||||
sanitizer (table_blob_),
|
||||
gdef (
|
||||
#ifndef HB_NO_OT_LAYOUT
|
||||
*face->table.GDEF->table
|
||||
@ -796,7 +800,7 @@ struct hb_ot_apply_context_t :
|
||||
* match_props has the set index.
|
||||
*/
|
||||
if (match_props & LookupFlag::UseMarkFilteringSet)
|
||||
return gdef.mark_set_covers (match_props >> 16, glyph);
|
||||
return gdef_accel.mark_set_covers (match_props >> 16, glyph);
|
||||
|
||||
/* The second byte of match_props has the meaning
|
||||
* "ignore marks of attachment type different than
|
||||
@ -808,6 +812,9 @@ struct hb_ot_apply_context_t :
|
||||
return true;
|
||||
}
|
||||
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
bool check_glyph_property (const hb_glyph_info_t *info,
|
||||
unsigned int match_props) const
|
||||
{
|
||||
@ -1165,6 +1172,10 @@ static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
|
||||
}
|
||||
|
||||
|
||||
static inline bool match_always (hb_glyph_info_t &info HB_UNUSED, unsigned value HB_UNUSED, const void *data HB_UNUSED)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
static inline bool match_glyph (hb_glyph_info_t &info, unsigned value, const void *data HB_UNUSED)
|
||||
{
|
||||
return info.codepoint == value;
|
||||
@ -1185,6 +1196,28 @@ static inline bool match_class_cached (hb_glyph_info_t &info, unsigned value, co
|
||||
info.syllable() = klass;
|
||||
return klass == value;
|
||||
}
|
||||
static inline bool match_class_cached1 (hb_glyph_info_t &info, unsigned value, const void *data)
|
||||
{
|
||||
unsigned klass = info.syllable() & 0x0F;
|
||||
if (klass < 15)
|
||||
return klass == value;
|
||||
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
|
||||
klass = class_def.get_class (info.codepoint);
|
||||
if (likely (klass < 15))
|
||||
info.syllable() = (info.syllable() & 0xF0) | klass;
|
||||
return klass == value;
|
||||
}
|
||||
static inline bool match_class_cached2 (hb_glyph_info_t &info, unsigned value, const void *data)
|
||||
{
|
||||
unsigned klass = (info.syllable() & 0xF0) >> 4;
|
||||
if (klass < 15)
|
||||
return klass == value;
|
||||
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
|
||||
klass = class_def.get_class (info.codepoint);
|
||||
if (likely (klass < 15))
|
||||
info.syllable() = (info.syllable() & 0x0F) | (klass << 4);
|
||||
return klass == value;
|
||||
}
|
||||
static inline bool match_coverage (hb_glyph_info_t &info, unsigned value, const void *data)
|
||||
{
|
||||
Offset16To<Coverage> coverage;
|
||||
@ -1213,14 +1246,17 @@ static inline bool would_match_input (hb_would_apply_context_t *c,
|
||||
return true;
|
||||
}
|
||||
template <typename HBUINT>
|
||||
static inline bool match_input (hb_ot_apply_context_t *c,
|
||||
unsigned int count, /* Including the first glyph (not matched) */
|
||||
const HBUINT input[], /* Array of input values--start with second glyph */
|
||||
match_func_t match_func,
|
||||
const void *match_data,
|
||||
unsigned int *end_position,
|
||||
unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
|
||||
unsigned int *p_total_component_count = nullptr)
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
static bool match_input (hb_ot_apply_context_t *c,
|
||||
unsigned int count, /* Including the first glyph (not matched) */
|
||||
const HBUINT input[], /* Array of input values--start with second glyph */
|
||||
match_func_t match_func,
|
||||
const void *match_data,
|
||||
unsigned int *end_position,
|
||||
unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
|
||||
unsigned int *p_total_component_count = nullptr)
|
||||
{
|
||||
TRACE_APPLY (nullptr);
|
||||
|
||||
@ -1229,7 +1265,7 @@ static inline bool match_input (hb_ot_apply_context_t *c,
|
||||
hb_buffer_t *buffer = c->buffer;
|
||||
|
||||
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
||||
skippy_iter.reset (buffer->idx, count - 1);
|
||||
skippy_iter.reset (buffer->idx);
|
||||
skippy_iter.set_match_func (match_func, match_data);
|
||||
skippy_iter.set_glyph_data (input);
|
||||
|
||||
@ -1456,17 +1492,20 @@ static inline bool ligate_input (hb_ot_apply_context_t *c,
|
||||
}
|
||||
|
||||
template <typename HBUINT>
|
||||
static inline bool match_backtrack (hb_ot_apply_context_t *c,
|
||||
unsigned int count,
|
||||
const HBUINT backtrack[],
|
||||
match_func_t match_func,
|
||||
const void *match_data,
|
||||
unsigned int *match_start)
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
static bool match_backtrack (hb_ot_apply_context_t *c,
|
||||
unsigned int count,
|
||||
const HBUINT backtrack[],
|
||||
match_func_t match_func,
|
||||
const void *match_data,
|
||||
unsigned int *match_start)
|
||||
{
|
||||
TRACE_APPLY (nullptr);
|
||||
|
||||
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
|
||||
skippy_iter.reset (c->buffer->backtrack_len (), count);
|
||||
skippy_iter.reset (c->buffer->backtrack_len ());
|
||||
skippy_iter.set_match_func (match_func, match_data);
|
||||
skippy_iter.set_glyph_data (backtrack);
|
||||
|
||||
@ -1485,18 +1524,21 @@ static inline bool match_backtrack (hb_ot_apply_context_t *c,
|
||||
}
|
||||
|
||||
template <typename HBUINT>
|
||||
static inline bool match_lookahead (hb_ot_apply_context_t *c,
|
||||
unsigned int count,
|
||||
const HBUINT lookahead[],
|
||||
match_func_t match_func,
|
||||
const void *match_data,
|
||||
unsigned int start_index,
|
||||
unsigned int *end_index)
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
static bool match_lookahead (hb_ot_apply_context_t *c,
|
||||
unsigned int count,
|
||||
const HBUINT lookahead[],
|
||||
match_func_t match_func,
|
||||
const void *match_data,
|
||||
unsigned int start_index,
|
||||
unsigned int *end_index)
|
||||
{
|
||||
TRACE_APPLY (nullptr);
|
||||
|
||||
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
|
||||
skippy_iter.reset (start_index - 1, count);
|
||||
skippy_iter.reset (start_index - 1);
|
||||
skippy_iter.set_match_func (match_func, match_data);
|
||||
skippy_iter.set_glyph_data (lookahead);
|
||||
|
||||
@ -1615,10 +1657,13 @@ static void context_closure_recurse_lookups (hb_closure_context_t *c,
|
||||
}
|
||||
|
||||
covered_seq_indicies.add (seqIndex);
|
||||
hb_set_t *cur_active_glyphs = c->push_cur_active_glyphs ();
|
||||
if (unlikely (!cur_active_glyphs))
|
||||
return;
|
||||
if (has_pos_glyphs) {
|
||||
c->push_cur_active_glyphs () = std::move (pos_glyphs);
|
||||
*cur_active_glyphs = std::move (pos_glyphs);
|
||||
} else {
|
||||
c->push_cur_active_glyphs ().set (*c->glyphs);
|
||||
*cur_active_glyphs = *c->glyphs;
|
||||
}
|
||||
|
||||
unsigned endIndex = inputCount;
|
||||
@ -1868,12 +1913,13 @@ static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
|
||||
}
|
||||
|
||||
template <typename HBUINT>
|
||||
static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
|
||||
unsigned int inputCount, /* Including the first glyph (not matched) */
|
||||
const HBUINT input[], /* Array of input values--start with second glyph */
|
||||
unsigned int lookupCount,
|
||||
const LookupRecord lookupRecord[],
|
||||
const ContextApplyLookupContext &lookup_context)
|
||||
HB_ALWAYS_INLINE
|
||||
static bool context_apply_lookup (hb_ot_apply_context_t *c,
|
||||
unsigned int inputCount, /* Including the first glyph (not matched) */
|
||||
const HBUINT input[], /* Array of input values--start with second glyph */
|
||||
unsigned int lookupCount,
|
||||
const LookupRecord lookupRecord[],
|
||||
const ContextApplyLookupContext &lookup_context)
|
||||
{
|
||||
unsigned match_end = 0;
|
||||
unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
|
||||
@ -1899,6 +1945,9 @@ static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
|
||||
template <typename Types>
|
||||
struct Rule
|
||||
{
|
||||
template <typename T>
|
||||
friend struct RuleSet;
|
||||
|
||||
bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
|
||||
{
|
||||
return context_intersects (glyphs,
|
||||
@ -2001,8 +2050,7 @@ struct Rule
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (inputCount.sanitize (c) &&
|
||||
lookupCount.sanitize (c) &&
|
||||
return_trace (c->check_struct (this) &&
|
||||
c->check_range (inputZ.arrayZ,
|
||||
inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
|
||||
LookupRecord::static_size * lookupCount));
|
||||
@ -2086,13 +2134,105 @@ struct RuleSet
|
||||
const ContextApplyLookupContext &lookup_context) const
|
||||
{
|
||||
TRACE_APPLY (this);
|
||||
return_trace (
|
||||
+ hb_iter (rule)
|
||||
| hb_map (hb_add (this))
|
||||
| hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
|
||||
| hb_any
|
||||
)
|
||||
;
|
||||
|
||||
unsigned num_rules = rule.len;
|
||||
|
||||
#ifndef HB_NO_OT_RULESETS_FAST_PATH
|
||||
if (HB_OPTIMIZE_SIZE_VAL || num_rules <= 4)
|
||||
#endif
|
||||
{
|
||||
slow:
|
||||
return_trace (
|
||||
+ hb_iter (rule)
|
||||
| hb_map (hb_add (this))
|
||||
| hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
|
||||
| hb_any
|
||||
)
|
||||
;
|
||||
}
|
||||
|
||||
/* This version is optimized for speed by matching the first & second
|
||||
* components of the rule here, instead of calling into the matching code.
|
||||
*
|
||||
* Replicated from LigatureSet::apply(). */
|
||||
|
||||
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
||||
skippy_iter.reset (c->buffer->idx);
|
||||
skippy_iter.set_match_func (match_always, nullptr);
|
||||
skippy_iter.set_glyph_data ((HBUINT16 *) nullptr);
|
||||
unsigned unsafe_to = (unsigned) -1, unsafe_to1 = 0, unsafe_to2 = 0;
|
||||
hb_glyph_info_t *first = nullptr, *second = nullptr;
|
||||
bool matched = skippy_iter.next ();
|
||||
if (likely (matched))
|
||||
{
|
||||
first = &c->buffer->info[skippy_iter.idx];
|
||||
unsafe_to = skippy_iter.idx + 1;
|
||||
|
||||
if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))
|
||||
{
|
||||
/* Can't use the fast path if eg. the next char is a default-ignorable
|
||||
* or other skippable. */
|
||||
goto slow;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
/* Failed to match a next glyph. Only try applying rules that have
|
||||
* no further input. */
|
||||
return_trace (
|
||||
+ hb_iter (rule)
|
||||
| hb_map (hb_add (this))
|
||||
| hb_filter ([&] (const Rule &_) { return _.inputCount <= 1; })
|
||||
| hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
|
||||
| hb_any
|
||||
)
|
||||
;
|
||||
}
|
||||
matched = skippy_iter.next ();
|
||||
if (likely (matched && !skippy_iter.may_skip (c->buffer->info[skippy_iter.idx])))
|
||||
{
|
||||
second = &c->buffer->info[skippy_iter.idx];
|
||||
unsafe_to2 = skippy_iter.idx + 1;
|
||||
}
|
||||
|
||||
auto match_input = lookup_context.funcs.match;
|
||||
auto *input_data = lookup_context.match_data;
|
||||
for (unsigned int i = 0; i < num_rules; i++)
|
||||
{
|
||||
const auto &r = this+rule.arrayZ[i];
|
||||
|
||||
const auto &input = r.inputZ;
|
||||
|
||||
if (r.inputCount <= 1 ||
|
||||
(!match_input ||
|
||||
match_input (*first, input.arrayZ[0], input_data)))
|
||||
{
|
||||
if (!second ||
|
||||
(r.inputCount <= 2 ||
|
||||
(!match_input ||
|
||||
match_input (*second, input.arrayZ[1], input_data)))
|
||||
)
|
||||
{
|
||||
if (r.apply (c, lookup_context))
|
||||
{
|
||||
if (unsafe_to != (unsigned) -1)
|
||||
c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
|
||||
return_trace (true);
|
||||
}
|
||||
}
|
||||
else
|
||||
unsafe_to = unsafe_to2;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (unsafe_to == (unsigned) -1)
|
||||
unsafe_to = unsafe_to1;
|
||||
}
|
||||
}
|
||||
if (likely (unsafe_to != (unsigned) -1))
|
||||
c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
|
||||
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
@ -2168,8 +2308,9 @@ struct ContextFormat1_4
|
||||
|
||||
void closure (hb_closure_context_t *c) const
|
||||
{
|
||||
hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
|
||||
get_coverage ().intersect_set (c->previous_parent_active_glyphs (), cur_active_glyphs);
|
||||
hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
|
||||
if (unlikely (!cur_active_glyphs)) return;
|
||||
get_coverage ().intersect_set (c->previous_parent_active_glyphs (), *cur_active_glyphs);
|
||||
|
||||
struct ContextClosureLookupContext lookup_context = {
|
||||
{intersects_glyph, intersected_glyph},
|
||||
@ -2338,9 +2479,10 @@ struct ContextFormat2_5
|
||||
if (!(this+coverage).intersects (c->glyphs))
|
||||
return;
|
||||
|
||||
hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
|
||||
hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
|
||||
if (unlikely (!cur_active_glyphs)) return;
|
||||
get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
|
||||
cur_active_glyphs);
|
||||
*cur_active_glyphs);
|
||||
|
||||
const ClassDef &class_def = this+classDef;
|
||||
|
||||
@ -2469,11 +2611,7 @@ struct ContextFormat2_5
|
||||
if (cached && c->buffer->cur().syllable() < 255)
|
||||
index = c->buffer->cur().syllable ();
|
||||
else
|
||||
{
|
||||
index = class_def.get_class (c->buffer->cur().codepoint);
|
||||
if (cached && index < 255)
|
||||
c->buffer->cur().syllable() = index;
|
||||
}
|
||||
const RuleSet &rule_set = this+ruleSet[index];
|
||||
return_trace (rule_set.apply (c, lookup_context));
|
||||
}
|
||||
@ -2583,10 +2721,10 @@ struct ContextFormat3
|
||||
if (!(this+coverageZ[0]).intersects (c->glyphs))
|
||||
return;
|
||||
|
||||
hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
|
||||
hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
|
||||
if (unlikely (!cur_active_glyphs)) return;
|
||||
get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
|
||||
cur_active_glyphs);
|
||||
|
||||
*cur_active_glyphs);
|
||||
|
||||
const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
|
||||
struct ContextClosureLookupContext lookup_context = {
|
||||
@ -2687,14 +2825,14 @@ struct ContextFormat3
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (!c->check_struct (this)) return_trace (false);
|
||||
if (unlikely (!c->check_struct (this))) return_trace (false);
|
||||
unsigned int count = glyphCount;
|
||||
if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
|
||||
if (!c->check_array (coverageZ.arrayZ, count)) return_trace (false);
|
||||
if (unlikely (!count)) return_trace (false); /* We want to access coverageZ[0] freely. */
|
||||
if (unlikely (!c->check_array (coverageZ.arrayZ, count))) return_trace (false);
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (!coverageZ[i].sanitize (c, this)) return_trace (false);
|
||||
if (unlikely (!coverageZ[i].sanitize (c, this))) return_trace (false);
|
||||
const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
|
||||
return_trace (c->check_array (lookupRecord, lookupCount));
|
||||
return_trace (likely (c->check_array (lookupRecord, lookupCount)));
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -2867,16 +3005,17 @@ static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c
|
||||
}
|
||||
|
||||
template <typename HBUINT>
|
||||
static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
|
||||
unsigned int backtrackCount,
|
||||
const HBUINT backtrack[],
|
||||
unsigned int inputCount, /* Including the first glyph (not matched) */
|
||||
const HBUINT input[], /* Array of input values--start with second glyph */
|
||||
unsigned int lookaheadCount,
|
||||
const HBUINT lookahead[],
|
||||
unsigned int lookupCount,
|
||||
const LookupRecord lookupRecord[],
|
||||
const ChainContextApplyLookupContext &lookup_context)
|
||||
HB_ALWAYS_INLINE
|
||||
static bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
|
||||
unsigned int backtrackCount,
|
||||
const HBUINT backtrack[],
|
||||
unsigned int inputCount, /* Including the first glyph (not matched) */
|
||||
const HBUINT input[], /* Array of input values--start with second glyph */
|
||||
unsigned int lookaheadCount,
|
||||
const HBUINT lookahead[],
|
||||
unsigned int lookupCount,
|
||||
const LookupRecord lookupRecord[],
|
||||
const ChainContextApplyLookupContext &lookup_context)
|
||||
{
|
||||
unsigned end_index = c->buffer->idx;
|
||||
unsigned match_end = 0;
|
||||
@ -2915,6 +3054,9 @@ static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
|
||||
template <typename Types>
|
||||
struct ChainRule
|
||||
{
|
||||
template <typename T>
|
||||
friend struct ChainRuleSet;
|
||||
|
||||
bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
|
||||
{
|
||||
const auto &input = StructAfter<decltype (inputX)> (backtrack);
|
||||
@ -3014,8 +3156,6 @@ struct ChainRule
|
||||
const hb_map_t *lookahead_map = nullptr) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
auto *out = c->start_embed (this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
const hb_map_t *mapping = backtrack_map;
|
||||
serialize_array (c, backtrack.len, + backtrack.iter ()
|
||||
@ -3077,13 +3217,14 @@ struct ChainRule
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (!backtrack.sanitize (c)) return_trace (false);
|
||||
/* Hyper-optimized sanitized because this is really hot. */
|
||||
if (unlikely (!backtrack.len.sanitize (c))) return_trace (false);
|
||||
const auto &input = StructAfter<decltype (inputX)> (backtrack);
|
||||
if (!input.sanitize (c)) return_trace (false);
|
||||
if (unlikely (!input.lenP1.sanitize (c))) return_trace (false);
|
||||
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
|
||||
if (!lookahead.sanitize (c)) return_trace (false);
|
||||
if (unlikely (!lookahead.len.sanitize (c))) return_trace (false);
|
||||
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
|
||||
return_trace (lookup.sanitize (c));
|
||||
return_trace (likely (lookup.sanitize (c)));
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -3091,7 +3232,7 @@ struct ChainRule
|
||||
backtrack; /* Array of backtracking values
|
||||
* (to be matched before the input
|
||||
* sequence) */
|
||||
HeadlessArrayOf<typename Types::HBUINT>
|
||||
HeadlessArray16Of<typename Types::HBUINT>
|
||||
inputX; /* Array of input values (start with
|
||||
* second glyph) */
|
||||
Array16Of<typename Types::HBUINT>
|
||||
@ -3164,13 +3305,119 @@ struct ChainRuleSet
|
||||
const ChainContextApplyLookupContext &lookup_context) const
|
||||
{
|
||||
TRACE_APPLY (this);
|
||||
return_trace (
|
||||
+ hb_iter (rule)
|
||||
| hb_map (hb_add (this))
|
||||
| hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
|
||||
| hb_any
|
||||
)
|
||||
;
|
||||
|
||||
unsigned num_rules = rule.len;
|
||||
|
||||
#ifndef HB_NO_OT_RULESETS_FAST_PATH
|
||||
if (HB_OPTIMIZE_SIZE_VAL || num_rules <= 4)
|
||||
#endif
|
||||
{
|
||||
slow:
|
||||
return_trace (
|
||||
+ hb_iter (rule)
|
||||
| hb_map (hb_add (this))
|
||||
| hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
|
||||
| hb_any
|
||||
)
|
||||
;
|
||||
}
|
||||
|
||||
/* This version is optimized for speed by matching the first & second
|
||||
* components of the rule here, instead of calling into the matching code.
|
||||
*
|
||||
* Replicated from LigatureSet::apply(). */
|
||||
|
||||
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
||||
skippy_iter.reset (c->buffer->idx);
|
||||
skippy_iter.set_match_func (match_always, nullptr);
|
||||
skippy_iter.set_glyph_data ((HBUINT16 *) nullptr);
|
||||
unsigned unsafe_to = (unsigned) -1, unsafe_to1 = 0, unsafe_to2 = 0;
|
||||
hb_glyph_info_t *first = nullptr, *second = nullptr;
|
||||
bool matched = skippy_iter.next ();
|
||||
if (likely (matched))
|
||||
{
|
||||
first = &c->buffer->info[skippy_iter.idx];
|
||||
unsafe_to1 = skippy_iter.idx + 1;
|
||||
|
||||
if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))
|
||||
{
|
||||
/* Can't use the fast path if eg. the next char is a default-ignorable
|
||||
* or other skippable. */
|
||||
goto slow;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
/* Failed to match a next glyph. Only try applying rules that have
|
||||
* no further input and lookahead. */
|
||||
return_trace (
|
||||
+ hb_iter (rule)
|
||||
| hb_map (hb_add (this))
|
||||
| hb_filter ([&] (const ChainRule &_)
|
||||
{
|
||||
const auto &input = StructAfter<decltype (_.inputX)> (_.backtrack);
|
||||
const auto &lookahead = StructAfter<decltype (_.lookaheadX)> (input);
|
||||
return input.lenP1 <= 1 && lookahead.len == 0;
|
||||
})
|
||||
| hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
|
||||
| hb_any
|
||||
)
|
||||
;
|
||||
}
|
||||
matched = skippy_iter.next ();
|
||||
if (likely (matched && !skippy_iter.may_skip (c->buffer->info[skippy_iter.idx])))
|
||||
{
|
||||
second = &c->buffer->info[skippy_iter.idx];
|
||||
unsafe_to2 = skippy_iter.idx + 1;
|
||||
}
|
||||
|
||||
auto match_input = lookup_context.funcs.match[1];
|
||||
auto match_lookahead = lookup_context.funcs.match[2];
|
||||
auto *input_data = lookup_context.match_data[1];
|
||||
auto *lookahead_data = lookup_context.match_data[2];
|
||||
for (unsigned int i = 0; i < num_rules; i++)
|
||||
{
|
||||
const auto &r = this+rule.arrayZ[i];
|
||||
|
||||
const auto &input = StructAfter<decltype (r.inputX)> (r.backtrack);
|
||||
const auto &lookahead = StructAfter<decltype (r.lookaheadX)> (input);
|
||||
|
||||
unsigned lenP1 = hb_max ((unsigned) input.lenP1, 1u);
|
||||
if (lenP1 > 1 ?
|
||||
(!match_input ||
|
||||
match_input (*first, input.arrayZ[0], input_data))
|
||||
:
|
||||
(!lookahead.len || !match_lookahead ||
|
||||
match_lookahead (*first, lookahead.arrayZ[0], lookahead_data)))
|
||||
{
|
||||
if (!second ||
|
||||
(lenP1 > 2 ?
|
||||
(!match_input ||
|
||||
match_input (*second, input.arrayZ[1], input_data))
|
||||
:
|
||||
(lookahead.len <= 2 - lenP1 || !match_lookahead ||
|
||||
match_lookahead (*second, lookahead.arrayZ[2 - lenP1], lookahead_data))))
|
||||
{
|
||||
if (r.apply (c, lookup_context))
|
||||
{
|
||||
if (unsafe_to != (unsigned) -1)
|
||||
c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
|
||||
return_trace (true);
|
||||
}
|
||||
}
|
||||
else
|
||||
unsafe_to = unsafe_to2;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (unsafe_to == (unsigned) -1)
|
||||
unsafe_to = unsafe_to1;
|
||||
}
|
||||
}
|
||||
if (likely (unsafe_to != (unsigned) -1))
|
||||
c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
|
||||
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
@ -3251,9 +3498,10 @@ struct ChainContextFormat1_4
|
||||
|
||||
void closure (hb_closure_context_t *c) const
|
||||
{
|
||||
hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
|
||||
hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
|
||||
if (unlikely (!cur_active_glyphs)) return;
|
||||
get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
|
||||
cur_active_glyphs);
|
||||
*cur_active_glyphs);
|
||||
|
||||
struct ChainContextClosureLookupContext lookup_context = {
|
||||
{intersects_glyph, intersected_glyph},
|
||||
@ -3423,10 +3671,10 @@ struct ChainContextFormat2_5
|
||||
if (!(this+coverage).intersects (c->glyphs))
|
||||
return;
|
||||
|
||||
hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
|
||||
hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
|
||||
if (unlikely (!cur_active_glyphs)) return;
|
||||
get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
|
||||
cur_active_glyphs);
|
||||
|
||||
*cur_active_glyphs);
|
||||
|
||||
const ClassDef &backtrack_class_def = this+backtrackClassDef;
|
||||
const ClassDef &input_class_def = this+inputClassDef;
|
||||
@ -3568,26 +3816,22 @@ struct ChainContextFormat2_5
|
||||
const ClassDef &input_class_def = this+inputClassDef;
|
||||
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
|
||||
|
||||
/* For ChainContextFormat2_5 we cache the LookaheadClassDef instead of InputClassDef.
|
||||
* The reason is that most heavy fonts want to identify a glyph in context and apply
|
||||
* a lookup to it. In this scenario, the length of the input sequence is one, whereas
|
||||
* the lookahead / backtrack are typically longer. The one glyph in input sequence is
|
||||
* looked-up below and no input glyph is looked up in individual rules, whereas the
|
||||
* lookahead and backtrack glyphs are tried. Since we match lookahead before backtrack,
|
||||
* we should cache lookahead. This decisions showed a 20% improvement in shaping of
|
||||
* the Gulzar font.
|
||||
*/
|
||||
|
||||
/* match_class_caches1 is slightly faster. Use it for lookahead,
|
||||
* which is typically longer. */
|
||||
struct ChainContextApplyLookupContext lookup_context = {
|
||||
{{cached && &backtrack_class_def == &lookahead_class_def ? match_class_cached : match_class,
|
||||
cached && &input_class_def == &lookahead_class_def ? match_class_cached : match_class,
|
||||
cached ? match_class_cached : match_class}},
|
||||
{{cached && &backtrack_class_def == &lookahead_class_def ? match_class_cached1 : match_class,
|
||||
cached ? match_class_cached2 : match_class,
|
||||
cached ? match_class_cached1 : match_class}},
|
||||
{&backtrack_class_def,
|
||||
&input_class_def,
|
||||
&lookahead_class_def}
|
||||
};
|
||||
|
||||
index = input_class_def.get_class (c->buffer->cur().codepoint);
|
||||
// Note: Corresponds to match_class_cached2
|
||||
if (cached && ((c->buffer->cur().syllable() & 0xF0) >> 4) < 15)
|
||||
index = (c->buffer->cur().syllable () & 0xF0) >> 4;
|
||||
else
|
||||
index = input_class_def.get_class (c->buffer->cur().codepoint);
|
||||
const ChainRuleSet &rule_set = this+ruleSet[index];
|
||||
return_trace (rule_set.apply (c, lookup_context));
|
||||
}
|
||||
@ -3727,10 +3971,11 @@ struct ChainContextFormat3
|
||||
if (!(this+input[0]).intersects (c->glyphs))
|
||||
return;
|
||||
|
||||
hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
|
||||
hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
|
||||
if (unlikely (!cur_active_glyphs))
|
||||
return;
|
||||
get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
|
||||
cur_active_glyphs);
|
||||
|
||||
*cur_active_glyphs);
|
||||
|
||||
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
|
||||
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
|
||||
@ -3849,8 +4094,6 @@ struct ChainContextFormat3
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
|
||||
|
||||
if (!serialize_coverage_offsets (c, backtrack.iter (), this))
|
||||
@ -3877,14 +4120,14 @@ struct ChainContextFormat3
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (!backtrack.sanitize (c, this)) return_trace (false);
|
||||
if (unlikely (!backtrack.sanitize (c, this))) return_trace (false);
|
||||
const auto &input = StructAfter<decltype (inputX)> (backtrack);
|
||||
if (!input.sanitize (c, this)) return_trace (false);
|
||||
if (!input.len) return_trace (false); /* To be consistent with Context. */
|
||||
if (unlikely (!input.sanitize (c, this))) return_trace (false);
|
||||
if (unlikely (!input.len)) return_trace (false); /* To be consistent with Context. */
|
||||
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
|
||||
if (!lookahead.sanitize (c, this)) return_trace (false);
|
||||
if (unlikely (!lookahead.sanitize (c, this))) return_trace (false);
|
||||
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
|
||||
return_trace (lookup.sanitize (c));
|
||||
return_trace (likely (lookup.sanitize (c)));
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -3974,7 +4217,7 @@ struct ExtensionFormat1
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
out->format = format;
|
||||
out->extensionLookupType = extensionLookupType;
|
||||
@ -4092,6 +4335,9 @@ struct hb_ot_layout_lookup_accelerator_t
|
||||
bool may_have (hb_codepoint_t g) const
|
||||
{ return digest.may_have (g); }
|
||||
|
||||
#ifndef HB_OPTIMIZE_SIZE
|
||||
HB_ALWAYS_INLINE
|
||||
#endif
|
||||
bool apply (hb_ot_apply_context_t *c, unsigned subtables_count, bool use_cache) const
|
||||
{
|
||||
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
|
||||
@ -4503,7 +4749,10 @@ struct GSUBGPOS
|
||||
{
|
||||
accelerator_t (hb_face_t *face)
|
||||
{
|
||||
this->table = hb_sanitize_context_t ().reference_table<T> (face);
|
||||
hb_sanitize_context_t sc;
|
||||
sc.lazy_some_gpos = true;
|
||||
this->table = sc.reference_table<T> (face);
|
||||
|
||||
if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face)))
|
||||
{
|
||||
hb_blob_destroy (this->table.get_blob ());
|
||||
@ -4528,6 +4777,8 @@ struct GSUBGPOS
|
||||
this->table.destroy ();
|
||||
}
|
||||
|
||||
hb_blob_t *get_blob () const { return table.get_blob (); }
|
||||
|
||||
hb_ot_layout_lookup_accelerator_t *get_accel (unsigned lookup_index) const
|
||||
{
|
||||
if (unlikely (lookup_index >= lookup_count)) return nullptr;
|
||||
|
235
src/3rdparty/harfbuzz-ng/src/hb-ot-layout.cc
vendored
235
src/3rdparty/harfbuzz-ng/src/hb-ot-layout.cc
vendored
@ -1241,7 +1241,7 @@ script_collect_features (hb_collect_features_context_t *c,
|
||||
* terminated by %HB_TAG_NONE
|
||||
* @features: (nullable) (array zero-terminated=1): The array of features to collect,
|
||||
* terminated by %HB_TAG_NONE
|
||||
* @feature_indexes: (out): The array of feature indexes found for the query
|
||||
* @feature_indexes: (out): The set of feature indexes found for the query
|
||||
*
|
||||
* Fetches a list of all feature indexes in the specified face's GSUB table
|
||||
* or GPOS table, underneath the specified scripts, languages, and features.
|
||||
@ -1282,6 +1282,44 @@ hb_ot_layout_collect_features (hb_face_t *face,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_ot_layout_collect_features_map:
|
||||
* @face: #hb_face_t to work upon
|
||||
* @table_tag: #HB_OT_TAG_GSUB or #HB_OT_TAG_GPOS
|
||||
* @script_index: The index of the requested script tag
|
||||
* @language_index: The index of the requested language tag
|
||||
* @feature_map: (out): The map of feature tag to feature index.
|
||||
*
|
||||
* Fetches the mapping from feature tags to feature indexes for
|
||||
* the specified script and language.
|
||||
*
|
||||
* Since: 8.1.0
|
||||
**/
|
||||
void
|
||||
hb_ot_layout_collect_features_map (hb_face_t *face,
|
||||
hb_tag_t table_tag,
|
||||
unsigned script_index,
|
||||
unsigned language_index,
|
||||
hb_map_t *feature_map /* OUT */)
|
||||
{
|
||||
const OT::GSUBGPOS &g = get_gsubgpos_table (face, table_tag);
|
||||
const OT::LangSys &l = g.get_script (script_index).get_lang_sys (language_index);
|
||||
|
||||
unsigned int count = l.get_feature_indexes (0, nullptr, nullptr);
|
||||
feature_map->alloc (count);
|
||||
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
unsigned feature_index = 0;
|
||||
unsigned feature_count = 1;
|
||||
l.get_feature_indexes (i, &feature_count, &feature_index);
|
||||
if (!feature_count)
|
||||
break;
|
||||
hb_tag_t feature_tag = g.get_feature_tag (feature_index);
|
||||
feature_map->set (feature_tag, feature_index);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* hb_ot_layout_collect_lookups:
|
||||
@ -1316,8 +1354,7 @@ hb_ot_layout_collect_lookups (hb_face_t *face,
|
||||
hb_set_t feature_indexes;
|
||||
hb_ot_layout_collect_features (face, table_tag, scripts, languages, features, &feature_indexes);
|
||||
|
||||
for (hb_codepoint_t feature_index = HB_SET_VALUE_INVALID;
|
||||
hb_set_next (&feature_indexes, &feature_index);)
|
||||
for (auto feature_index : feature_indexes)
|
||||
g.get_feature (feature_index).add_lookup_indexes_to (lookup_indexes);
|
||||
|
||||
g.feature_variation_collect_lookups (&feature_indexes, nullptr, lookup_indexes);
|
||||
@ -1570,7 +1607,7 @@ hb_ot_layout_lookups_substitute_closure (hb_face_t *face,
|
||||
glyphs_length = glyphs->get_population ();
|
||||
if (lookups)
|
||||
{
|
||||
for (hb_codepoint_t lookup_index = HB_SET_VALUE_INVALID; hb_set_next (lookups, &lookup_index);)
|
||||
for (auto lookup_index : *lookups)
|
||||
gsub.get_lookup (lookup_index).closure (&c, lookup_index);
|
||||
}
|
||||
else
|
||||
@ -1953,7 +1990,7 @@ inline void hb_ot_map_t::apply (const Proxy &proxy,
|
||||
{
|
||||
const unsigned int table_index = proxy.table_index;
|
||||
unsigned int i = 0;
|
||||
OT::hb_ot_apply_context_t c (table_index, font, buffer);
|
||||
OT::hb_ot_apply_context_t c (table_index, font, buffer, proxy.accel.get_blob ());
|
||||
c.set_recurse_func (Proxy::Lookup::template dispatch_recurse_func<OT::hb_ot_apply_context_t>);
|
||||
|
||||
for (unsigned int stage_index = 0; stage_index < stages[table_index].length; stage_index++)
|
||||
@ -2011,20 +2048,20 @@ void hb_ot_map_t::substitute (const hb_ot_shape_plan_t *plan, hb_font_t *font, h
|
||||
{
|
||||
GSUBProxy proxy (font->face);
|
||||
if (buffer->messaging () &&
|
||||
!buffer->message (font, "start table GSUB")) return;
|
||||
!buffer->message (font, "start table GSUB script tag '%c%c%c%c'", HB_UNTAG (chosen_script[0]))) return;
|
||||
apply (proxy, plan, font, buffer);
|
||||
if (buffer->messaging ())
|
||||
(void) buffer->message (font, "end table GSUB");
|
||||
(void) buffer->message (font, "end table GSUB script tag '%c%c%c%c'", HB_UNTAG (chosen_script[0]));
|
||||
}
|
||||
|
||||
void hb_ot_map_t::position (const hb_ot_shape_plan_t *plan, hb_font_t *font, hb_buffer_t *buffer) const
|
||||
{
|
||||
GPOSProxy proxy (font->face);
|
||||
if (buffer->messaging () &&
|
||||
!buffer->message (font, "start table GPOS")) return;
|
||||
!buffer->message (font, "start table GPOS script tag '%c%c%c%c'", HB_UNTAG (chosen_script[1]))) return;
|
||||
apply (proxy, plan, font, buffer);
|
||||
if (buffer->messaging ())
|
||||
(void) buffer->message (font, "end table GPOS");
|
||||
(void) buffer->message (font, "end table GPOS script tag '%c%c%c%c'", HB_UNTAG (chosen_script[1]));
|
||||
}
|
||||
|
||||
void
|
||||
@ -2036,6 +2073,112 @@ hb_ot_layout_substitute_lookup (OT::hb_ot_apply_context_t *c,
|
||||
}
|
||||
|
||||
#ifndef HB_NO_BASE
|
||||
|
||||
static void
|
||||
choose_base_tags (hb_script_t script,
|
||||
hb_language_t language,
|
||||
hb_tag_t *script_tag,
|
||||
hb_tag_t *language_tag)
|
||||
{
|
||||
hb_tag_t script_tags[HB_OT_MAX_TAGS_PER_SCRIPT];
|
||||
unsigned script_count = ARRAY_LENGTH (script_tags);
|
||||
|
||||
hb_tag_t language_tags[HB_OT_MAX_TAGS_PER_LANGUAGE];
|
||||
unsigned language_count = ARRAY_LENGTH (language_tags);
|
||||
|
||||
hb_ot_tags_from_script_and_language (script, language,
|
||||
&script_count, script_tags,
|
||||
&language_count, language_tags);
|
||||
|
||||
*script_tag = script_count ? script_tags[script_count - 1] : HB_OT_TAG_DEFAULT_SCRIPT;
|
||||
*language_tag = language_count ? language_tags[language_count - 1] : HB_OT_TAG_DEFAULT_LANGUAGE;
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_ot_layout_get_font_extents:
|
||||
* @font: a font
|
||||
* @direction: text direction.
|
||||
* @script_tag: script tag.
|
||||
* @language_tag: language tag.
|
||||
* @extents: (out) (nullable): font extents if found.
|
||||
*
|
||||
* Fetches script/language-specific font extents. These values are
|
||||
* looked up in the `BASE` table's `MinMax` records.
|
||||
*
|
||||
* If no such extents are found, the default extents for the font are
|
||||
* fetched. As such, the return value of this function can for the
|
||||
* most part be ignored. Note that the per-script/language extents
|
||||
* do not have a line-gap value, and the line-gap is set to zero in
|
||||
* that case.
|
||||
*
|
||||
* Return value: `true` if found script/language-specific font extents.
|
||||
*
|
||||
* Since: 8.0.0
|
||||
**/
|
||||
hb_bool_t
|
||||
hb_ot_layout_get_font_extents (hb_font_t *font,
|
||||
hb_direction_t direction,
|
||||
hb_tag_t script_tag,
|
||||
hb_tag_t language_tag,
|
||||
hb_font_extents_t *extents)
|
||||
{
|
||||
hb_position_t min, max;
|
||||
if (font->face->table.BASE->get_min_max (font, direction, script_tag, language_tag, HB_TAG_NONE,
|
||||
&min, &max))
|
||||
{
|
||||
if (extents)
|
||||
{
|
||||
extents->ascender = max;
|
||||
extents->descender = min;
|
||||
extents->line_gap = 0;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
hb_font_get_extents_for_direction (font, direction, extents);
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_ot_layout_get_font_extents2:
|
||||
* @font: a font
|
||||
* @direction: text direction.
|
||||
* @script: script.
|
||||
* @language: (nullable): language.
|
||||
* @extents: (out) (nullable): font extents if found.
|
||||
*
|
||||
* Fetches script/language-specific font extents. These values are
|
||||
* looked up in the `BASE` table's `MinMax` records.
|
||||
*
|
||||
* If no such extents are found, the default extents for the font are
|
||||
* fetched. As such, the return value of this function can for the
|
||||
* most part be ignored. Note that the per-script/language extents
|
||||
* do not have a line-gap value, and the line-gap is set to zero in
|
||||
* that case.
|
||||
*
|
||||
* This function is like hb_ot_layout_get_font_extents() but takes
|
||||
* #hb_script_t and #hb_language_t instead of OpenType #hb_tag_t.
|
||||
*
|
||||
* Return value: `true` if found script/language-specific font extents.
|
||||
*
|
||||
* Since: 8.0.0
|
||||
**/
|
||||
hb_bool_t
|
||||
hb_ot_layout_get_font_extents2 (hb_font_t *font,
|
||||
hb_direction_t direction,
|
||||
hb_script_t script,
|
||||
hb_language_t language,
|
||||
hb_font_extents_t *extents)
|
||||
{
|
||||
hb_tag_t script_tag, language_tag;
|
||||
choose_base_tags (script, language, &script_tag, &language_tag);
|
||||
return hb_ot_layout_get_font_extents (font,
|
||||
direction,
|
||||
script_tag,
|
||||
language_tag,
|
||||
extents);
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_ot_layout_get_horizontal_baseline_tag_for_script:
|
||||
* @script: a script tag.
|
||||
@ -2133,6 +2276,42 @@ hb_ot_layout_get_baseline (hb_font_t *font,
|
||||
return font->face->table.BASE->get_baseline (font, baseline_tag, direction, script_tag, language_tag, coord);
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_ot_layout_get_baseline2:
|
||||
* @font: a font
|
||||
* @baseline_tag: a baseline tag
|
||||
* @direction: text direction.
|
||||
* @script: script.
|
||||
* @language: (nullable): language, currently unused.
|
||||
* @coord: (out) (nullable): baseline value if found.
|
||||
*
|
||||
* Fetches a baseline value from the face.
|
||||
*
|
||||
* This function is like hb_ot_layout_get_baseline() but takes
|
||||
* #hb_script_t and #hb_language_t instead of OpenType #hb_tag_t.
|
||||
*
|
||||
* Return value: `true` if found baseline value in the font.
|
||||
*
|
||||
* Since: 8.0.0
|
||||
**/
|
||||
hb_bool_t
|
||||
hb_ot_layout_get_baseline2 (hb_font_t *font,
|
||||
hb_ot_layout_baseline_tag_t baseline_tag,
|
||||
hb_direction_t direction,
|
||||
hb_script_t script,
|
||||
hb_language_t language,
|
||||
hb_position_t *coord /* OUT. May be NULL. */)
|
||||
{
|
||||
hb_tag_t script_tag, language_tag;
|
||||
choose_base_tags (script, language, &script_tag, &language_tag);
|
||||
return hb_ot_layout_get_baseline (font,
|
||||
baseline_tag,
|
||||
direction,
|
||||
script_tag,
|
||||
language_tag,
|
||||
coord);
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_ot_layout_get_baseline_with_fallback:
|
||||
* @font: a font
|
||||
@ -2355,6 +2534,41 @@ hb_ot_layout_get_baseline_with_fallback (hb_font_t *font,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_ot_layout_get_baseline_with_fallback2:
|
||||
* @font: a font
|
||||
* @baseline_tag: a baseline tag
|
||||
* @direction: text direction.
|
||||
* @script: script.
|
||||
* @language: (nullable): language, currently unused.
|
||||
* @coord: (out): baseline value if found.
|
||||
*
|
||||
* Fetches a baseline value from the face, and synthesizes
|
||||
* it if the font does not have it.
|
||||
*
|
||||
* This function is like hb_ot_layout_get_baseline_with_fallback() but takes
|
||||
* #hb_script_t and #hb_language_t instead of OpenType #hb_tag_t.
|
||||
*
|
||||
* Since: 8.0.0
|
||||
**/
|
||||
void
|
||||
hb_ot_layout_get_baseline_with_fallback2 (hb_font_t *font,
|
||||
hb_ot_layout_baseline_tag_t baseline_tag,
|
||||
hb_direction_t direction,
|
||||
hb_script_t script,
|
||||
hb_language_t language,
|
||||
hb_position_t *coord /* OUT */)
|
||||
{
|
||||
hb_tag_t script_tag, language_tag;
|
||||
choose_base_tags (script, language, &script_tag, &language_tag);
|
||||
hb_ot_layout_get_baseline_with_fallback (font,
|
||||
baseline_tag,
|
||||
direction,
|
||||
script_tag,
|
||||
language_tag,
|
||||
coord);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
@ -2451,9 +2665,10 @@ hb_ot_layout_lookup_get_optical_bound (hb_font_t *font,
|
||||
hb_codepoint_t glyph)
|
||||
{
|
||||
const OT::PosLookup &lookup = font->face->table.GPOS->table->get_lookup (lookup_index);
|
||||
hb_blob_t *blob = font->face->table.GPOS->get_blob ();
|
||||
hb_glyph_position_t pos = {0};
|
||||
hb_position_single_dispatch_t c;
|
||||
lookup.dispatch (&c, font, direction, glyph, pos);
|
||||
lookup.dispatch (&c, font, blob, direction, glyph, pos);
|
||||
hb_position_t ret = 0;
|
||||
switch (direction)
|
||||
{
|
||||
|
37
src/3rdparty/harfbuzz-ng/src/hb-ot-layout.h
vendored
37
src/3rdparty/harfbuzz-ng/src/hb-ot-layout.h
vendored
@ -324,6 +324,13 @@ hb_ot_layout_collect_features (hb_face_t *face,
|
||||
const hb_tag_t *features,
|
||||
hb_set_t *feature_indexes /* OUT */);
|
||||
|
||||
HB_EXTERN void
|
||||
hb_ot_layout_collect_features_map (hb_face_t *face,
|
||||
hb_tag_t table_tag,
|
||||
unsigned script_index,
|
||||
unsigned language_index,
|
||||
hb_map_t *feature_map /* OUT */);
|
||||
|
||||
HB_EXTERN void
|
||||
hb_ot_layout_collect_lookups (hb_face_t *face,
|
||||
hb_tag_t table_tag,
|
||||
@ -447,6 +454,20 @@ hb_ot_layout_feature_get_characters (hb_face_t *face,
|
||||
* BASE
|
||||
*/
|
||||
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_ot_layout_get_font_extents (hb_font_t *font,
|
||||
hb_direction_t direction,
|
||||
hb_tag_t script_tag,
|
||||
hb_tag_t language_tag,
|
||||
hb_font_extents_t *extents);
|
||||
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_ot_layout_get_font_extents2 (hb_font_t *font,
|
||||
hb_direction_t direction,
|
||||
hb_script_t script,
|
||||
hb_language_t language,
|
||||
hb_font_extents_t *extents);
|
||||
|
||||
/**
|
||||
* hb_ot_layout_baseline_tag_t:
|
||||
* @HB_OT_LAYOUT_BASELINE_TAG_ROMAN: The baseline used by alphabetic scripts such as Latin, Cyrillic and Greek.
|
||||
@ -499,6 +520,14 @@ hb_ot_layout_get_baseline (hb_font_t *font,
|
||||
hb_tag_t language_tag,
|
||||
hb_position_t *coord /* OUT. May be NULL. */);
|
||||
|
||||
HB_EXTERN hb_bool_t
|
||||
hb_ot_layout_get_baseline2 (hb_font_t *font,
|
||||
hb_ot_layout_baseline_tag_t baseline_tag,
|
||||
hb_direction_t direction,
|
||||
hb_script_t script,
|
||||
hb_language_t language,
|
||||
hb_position_t *coord /* OUT. May be NULL. */);
|
||||
|
||||
HB_EXTERN void
|
||||
hb_ot_layout_get_baseline_with_fallback (hb_font_t *font,
|
||||
hb_ot_layout_baseline_tag_t baseline_tag,
|
||||
@ -507,6 +536,14 @@ hb_ot_layout_get_baseline_with_fallback (hb_font_t *font,
|
||||
hb_tag_t language_tag,
|
||||
hb_position_t *coord /* OUT */);
|
||||
|
||||
HB_EXTERN void
|
||||
hb_ot_layout_get_baseline_with_fallback2 (hb_font_t *font,
|
||||
hb_ot_layout_baseline_tag_t baseline_tag,
|
||||
hb_direction_t direction,
|
||||
hb_script_t script,
|
||||
hb_language_t language,
|
||||
hb_position_t *coord /* OUT */);
|
||||
|
||||
HB_END_DECLS
|
||||
|
||||
#endif /* HB_OT_LAYOUT_H */
|
||||
|
14
src/3rdparty/harfbuzz-ng/src/hb-ot-layout.hh
vendored
14
src/3rdparty/harfbuzz-ng/src/hb-ot-layout.hh
vendored
@ -448,7 +448,7 @@ _hb_glyph_info_get_lig_id (const hb_glyph_info_t *info)
|
||||
static inline bool
|
||||
_hb_glyph_info_ligated_internal (const hb_glyph_info_t *info)
|
||||
{
|
||||
return !!(info->lig_props() & IS_LIG_BASE);
|
||||
return info->lig_props() & IS_LIG_BASE;
|
||||
}
|
||||
|
||||
static inline unsigned int
|
||||
@ -496,37 +496,37 @@ _hb_glyph_info_get_glyph_props (const hb_glyph_info_t *info)
|
||||
static inline bool
|
||||
_hb_glyph_info_is_base_glyph (const hb_glyph_info_t *info)
|
||||
{
|
||||
return !!(info->glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH);
|
||||
return info->glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
_hb_glyph_info_is_ligature (const hb_glyph_info_t *info)
|
||||
{
|
||||
return !!(info->glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE);
|
||||
return info->glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
_hb_glyph_info_is_mark (const hb_glyph_info_t *info)
|
||||
{
|
||||
return !!(info->glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_MARK);
|
||||
return info->glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_MARK;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
_hb_glyph_info_substituted (const hb_glyph_info_t *info)
|
||||
{
|
||||
return !!(info->glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED);
|
||||
return info->glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
_hb_glyph_info_ligated (const hb_glyph_info_t *info)
|
||||
{
|
||||
return !!(info->glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_LIGATED);
|
||||
return info->glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
_hb_glyph_info_multiplied (const hb_glyph_info_t *info)
|
||||
{
|
||||
return !!(info->glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED);
|
||||
return info->glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
|
30
src/3rdparty/harfbuzz-ng/src/hb-ot-map.cc
vendored
30
src/3rdparty/harfbuzz-ng/src/hb-ot-map.cc
vendored
@ -213,7 +213,8 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
|
||||
/* Sort features and merge duplicates */
|
||||
if (feature_infos.length)
|
||||
{
|
||||
feature_infos.qsort ();
|
||||
if (!is_simple)
|
||||
feature_infos.qsort ();
|
||||
auto *f = feature_infos.arrayZ;
|
||||
unsigned int j = 0;
|
||||
unsigned count = feature_infos.length;
|
||||
@ -238,6 +239,13 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
|
||||
feature_infos.shrink (j + 1);
|
||||
}
|
||||
|
||||
hb_map_t feature_indices[2];
|
||||
for (unsigned int table_index = 0; table_index < 2; table_index++)
|
||||
hb_ot_layout_collect_features_map (face,
|
||||
table_tags[table_index],
|
||||
script_index[table_index],
|
||||
language_index[table_index],
|
||||
&feature_indices[table_index]);
|
||||
|
||||
/* Allocate bits now */
|
||||
static_assert ((!(HB_GLYPH_FLAG_DEFINED & (HB_GLYPH_FLAG_DEFINED + 1))), "");
|
||||
@ -260,7 +268,6 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
|
||||
if (!info->max_value || next_bit + bits_needed >= global_bit_shift)
|
||||
continue; /* Feature disabled, or not enough bits. */
|
||||
|
||||
|
||||
bool found = false;
|
||||
unsigned int feature_index[2];
|
||||
for (unsigned int table_index = 0; table_index < 2; table_index++)
|
||||
@ -268,12 +275,14 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
|
||||
if (required_feature_tag[table_index] == info->tag)
|
||||
required_feature_stage[table_index] = info->stage[table_index];
|
||||
|
||||
found |= (bool) hb_ot_layout_language_find_feature (face,
|
||||
table_tags[table_index],
|
||||
script_index[table_index],
|
||||
language_index[table_index],
|
||||
info->tag,
|
||||
&feature_index[table_index]);
|
||||
hb_codepoint_t *index;
|
||||
if (feature_indices[table_index].has (info->tag, &index))
|
||||
{
|
||||
feature_index[table_index] = *index;
|
||||
found = true;
|
||||
}
|
||||
else
|
||||
feature_index[table_index] = HB_OT_LAYOUT_NO_FEATURE_INDEX;
|
||||
}
|
||||
if (!found && (info->flags & F_GLOBAL_SEARCH))
|
||||
{
|
||||
@ -314,7 +323,8 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
|
||||
map->needs_fallback = !found;
|
||||
}
|
||||
//feature_infos.shrink (0); /* Done with these */
|
||||
|
||||
if (is_simple)
|
||||
m.features.qsort ();
|
||||
|
||||
add_gsub_pause (nullptr);
|
||||
add_gpos_pause (nullptr);
|
||||
@ -350,7 +360,7 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
|
||||
}
|
||||
|
||||
/* Sort lookups and merge duplicates */
|
||||
if (last_num_lookups < lookups.length)
|
||||
if (last_num_lookups + 1 < lookups.length)
|
||||
{
|
||||
lookups.as_array ().sub_array (last_num_lookups, lookups.length - last_num_lookups).qsort ();
|
||||
|
||||
|
8
src/3rdparty/harfbuzz-ng/src/hb-ot-map.hh
vendored
8
src/3rdparty/harfbuzz-ng/src/hb-ot-map.hh
vendored
@ -60,6 +60,13 @@ struct hb_ot_map_t
|
||||
|
||||
int cmp (const hb_tag_t tag_) const
|
||||
{ return tag_ < tag ? -1 : tag_ > tag ? 1 : 0; }
|
||||
|
||||
HB_INTERNAL static int cmp (const void *pa, const void *pb)
|
||||
{
|
||||
const feature_map_t *a = (const feature_map_t *) pa;
|
||||
const feature_map_t *b = (const feature_map_t *) pb;
|
||||
return a->tag < b->tag ? -1 : a->tag > b->tag ? 1 : 0;
|
||||
}
|
||||
};
|
||||
|
||||
struct lookup_map_t {
|
||||
@ -273,6 +280,7 @@ struct hb_ot_map_builder_t
|
||||
|
||||
hb_face_t *face;
|
||||
hb_segment_properties_t props;
|
||||
bool is_simple;
|
||||
|
||||
hb_tag_t chosen_script[2];
|
||||
bool found_script[2];
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user