Index: third_party/harfbuzz-ng/src/hb-ot-layout-gpos-table.hh |
diff --git a/third_party/harfbuzz-ng/src/hb-ot-layout-gpos-table.hh b/third_party/harfbuzz-ng/src/hb-ot-layout-gpos-table.hh |
index da9506c76653db0a90288effbdec86e9ccf88caa..568b5f63a96013f487a861202e10c6ad637b2512 100644 |
--- a/third_party/harfbuzz-ng/src/hb-ot-layout-gpos-table.hh |
+++ b/third_party/harfbuzz-ng/src/hb-ot-layout-gpos-table.hh |
@@ -181,7 +181,7 @@ struct ValueFormat : USHORT |
inline bool sanitize_value (hb_sanitize_context_t *c, const void *base, const Value *values) const |
{ |
TRACE_SANITIZE (this); |
- return TRACE_RETURN (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values))); |
+ return_trace (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values))); |
} |
inline bool sanitize_values (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count) const |
@@ -189,17 +189,17 @@ struct ValueFormat : USHORT |
TRACE_SANITIZE (this); |
unsigned int len = get_len (); |
- if (!c->check_array (values, get_size (), count)) return TRACE_RETURN (false); |
+ if (!c->check_array (values, get_size (), count)) return_trace (false); |
- if (!has_device ()) return TRACE_RETURN (true); |
+ if (!has_device ()) return_trace (true); |
for (unsigned int i = 0; i < count; i++) { |
if (!sanitize_value_devices (c, base, values)) |
- return TRACE_RETURN (false); |
+ return_trace (false); |
values += len; |
} |
- return TRACE_RETURN (true); |
+ return_trace (true); |
} |
/* Just sanitize referenced Device tables. Doesn't check the values themselves. */ |
@@ -207,15 +207,15 @@ struct ValueFormat : USHORT |
{ |
TRACE_SANITIZE (this); |
- if (!has_device ()) return TRACE_RETURN (true); |
+ if (!has_device ()) return_trace (true); |
for (unsigned int i = 0; i < count; i++) { |
if (!sanitize_value_devices (c, base, values)) |
- return TRACE_RETURN (false); |
+ return_trace (false); |
values += stride; |
} |
- return TRACE_RETURN (true); |
+ return_trace (true); |
} |
}; |
@@ -232,7 +232,7 @@ struct AnchorFormat1 |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
- return TRACE_RETURN (c->check_struct (this)); |
+ return_trace (c->check_struct (this)); |
} |
protected: |
@@ -262,7 +262,7 @@ struct AnchorFormat2 |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
- return TRACE_RETURN (c->check_struct (this)); |
+ return_trace (c->check_struct (this)); |
} |
protected: |
@@ -291,7 +291,7 @@ struct AnchorFormat3 |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
- return TRACE_RETURN (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this)); |
+ return_trace (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this)); |
} |
protected: |
@@ -327,12 +327,12 @@ struct Anchor |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
- if (!u.format.sanitize (c)) return TRACE_RETURN (false); |
+ if (!u.format.sanitize (c)) return_trace (false); |
switch (u.format) { |
- case 1: return TRACE_RETURN (u.format1.sanitize (c)); |
- case 2: return TRACE_RETURN (u.format2.sanitize (c)); |
- case 3: return TRACE_RETURN (u.format3.sanitize (c)); |
- default:return TRACE_RETURN (true); |
+ case 1: return_trace (u.format1.sanitize (c)); |
+ case 2: return_trace (u.format2.sanitize (c)); |
+ case 3: return_trace (u.format3.sanitize (c)); |
+ default:return_trace (true); |
} |
} |
@@ -360,13 +360,13 @@ struct AnchorMatrix |
inline bool sanitize (hb_sanitize_context_t *c, unsigned int cols) const |
{ |
TRACE_SANITIZE (this); |
- if (!c->check_struct (this)) return TRACE_RETURN (false); |
- if (unlikely (rows > 0 && cols >= ((unsigned int) -1) / rows)) return TRACE_RETURN (false); |
+ if (!c->check_struct (this)) return_trace (false); |
+ if (unlikely (rows > 0 && cols >= ((unsigned int) -1) / rows)) return_trace (false); |
unsigned int count = rows * cols; |
- if (!c->check_array (matrixZ, matrixZ[0].static_size, count)) return TRACE_RETURN (false); |
+ if (!c->check_array (matrixZ, matrixZ[0].static_size, count)) return_trace (false); |
for (unsigned int i = 0; i < count; i++) |
- if (!matrixZ[i].sanitize (c, this)) return TRACE_RETURN (false); |
- return TRACE_RETURN (true); |
+ if (!matrixZ[i].sanitize (c, this)) return_trace (false); |
+ return_trace (true); |
} |
USHORT rows; /* Number of rows */ |
@@ -386,7 +386,7 @@ struct MarkRecord |
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const |
{ |
TRACE_SANITIZE (this); |
- return TRACE_RETURN (c->check_struct (this) && markAnchor.sanitize (c, base)); |
+ return_trace (c->check_struct (this) && markAnchor.sanitize (c, base)); |
} |
protected: |
@@ -415,7 +415,7 @@ struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage orde |
const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found); |
/* If this subtable doesn't have an anchor for this base and this class, |
* return false such that the subsequent subtables have a chance at it. */ |
- if (unlikely (!found)) return TRACE_RETURN (false); |
+ if (unlikely (!found)) return_trace (false); |
hb_position_t mark_x, mark_y, base_x, base_y; |
@@ -428,13 +428,13 @@ struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage orde |
o.attach_lookback() = buffer->idx - glyph_pos; |
buffer->idx++; |
- return TRACE_RETURN (true); |
+ return_trace (true); |
} |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
- return TRACE_RETURN (ArrayOf<MarkRecord>::sanitize (c, this)); |
+ return_trace (ArrayOf<MarkRecord>::sanitize (c, this)); |
} |
}; |
@@ -459,21 +459,21 @@ struct SinglePosFormat1 |
TRACE_APPLY (this); |
hb_buffer_t *buffer = c->buffer; |
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint); |
- if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
+ if (likely (index == NOT_COVERED)) return_trace (false); |
valueFormat.apply_value (c->font, c->direction, this, |
values, buffer->cur_pos()); |
buffer->idx++; |
- return TRACE_RETURN (true); |
+ return_trace (true); |
} |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
- return TRACE_RETURN (c->check_struct (this) |
- && coverage.sanitize (c, this) |
- && valueFormat.sanitize_value (c, this, values)); |
+ return_trace (c->check_struct (this) && |
+ coverage.sanitize (c, this) && |
+ valueFormat.sanitize_value (c, this, values)); |
} |
protected: |
@@ -508,24 +508,24 @@ struct SinglePosFormat2 |
TRACE_APPLY (this); |
hb_buffer_t *buffer = c->buffer; |
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint); |
- if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
+ if (likely (index == NOT_COVERED)) return_trace (false); |
- if (likely (index >= valueCount)) return TRACE_RETURN (false); |
+ if (likely (index >= valueCount)) return_trace (false); |
valueFormat.apply_value (c->font, c->direction, this, |
&values[index * valueFormat.get_len ()], |
buffer->cur_pos()); |
buffer->idx++; |
- return TRACE_RETURN (true); |
+ return_trace (true); |
} |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
- return TRACE_RETURN (c->check_struct (this) |
- && coverage.sanitize (c, this) |
- && valueFormat.sanitize_values (c, this, values, valueCount)); |
+ return_trace (c->check_struct (this) && |
+ coverage.sanitize (c, this) && |
+ valueFormat.sanitize_values (c, this, values, valueCount)); |
} |
protected: |
@@ -548,11 +548,11 @@ struct SinglePos |
inline typename context_t::return_t dispatch (context_t *c) const |
{ |
TRACE_DISPATCH (this, u.format); |
- if (unlikely (!c->may_dispatch (this, &u.format))) TRACE_RETURN (c->default_return_value ()); |
+ if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); |
switch (u.format) { |
- case 1: return TRACE_RETURN (c->dispatch (u.format1)); |
- case 2: return TRACE_RETURN (c->dispatch (u.format2)); |
- default:return TRACE_RETURN (c->default_return_value ()); |
+ case 1: return_trace (c->dispatch (u.format1)); |
+ case 2: return_trace (c->dispatch (u.format2)); |
+ default:return_trace (c->default_return_value ()); |
} |
} |
@@ -615,7 +615,7 @@ struct PairSet |
/* Hand-coded bsearch. */ |
if (unlikely (!count)) |
- return TRACE_RETURN (false); |
+ return_trace (false); |
hb_codepoint_t x = buffer->info[pos].codepoint; |
int min = 0, max = (int) count - 1; |
while (min <= max) |
@@ -636,11 +636,11 @@ struct PairSet |
if (len2) |
pos++; |
buffer->idx = pos; |
- return TRACE_RETURN (true); |
+ return_trace (true); |
} |
} |
- return TRACE_RETURN (false); |
+ return_trace (false); |
} |
struct sanitize_closure_t { |
@@ -654,12 +654,12 @@ struct PairSet |
{ |
TRACE_SANITIZE (this); |
if (!(c->check_struct (this) |
- && c->check_array (arrayZ, USHORT::static_size * closure->stride, len))) return TRACE_RETURN (false); |
+ && c->check_array (arrayZ, USHORT::static_size * closure->stride, len))) return_trace (false); |
unsigned int count = len; |
const PairValueRecord *record = CastP<PairValueRecord> (arrayZ); |
- return TRACE_RETURN (closure->valueFormats[0].sanitize_values_stride_unsafe (c, closure->base, &record->values[0], count, closure->stride) |
- && closure->valueFormats[1].sanitize_values_stride_unsafe (c, closure->base, &record->values[closure->len1], count, closure->stride)); |
+ return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, closure->base, &record->values[0], count, closure->stride) && |
+ closure->valueFormats[1].sanitize_values_stride_unsafe (c, closure->base, &record->values[closure->len1], count, closure->stride)); |
} |
protected: |
@@ -691,19 +691,21 @@ struct PairPosFormat1 |
TRACE_APPLY (this); |
hb_buffer_t *buffer = c->buffer; |
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint); |
- if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
+ if (likely (index == NOT_COVERED)) return_trace (false); |
hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; |
skippy_iter.reset (buffer->idx, 1); |
- if (!skippy_iter.next ()) return TRACE_RETURN (false); |
+ if (!skippy_iter.next ()) return_trace (false); |
- return TRACE_RETURN ((this+pairSet[index]).apply (c, &valueFormat1, skippy_iter.idx)); |
+ return_trace ((this+pairSet[index]).apply (c, &valueFormat1, skippy_iter.idx)); |
} |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
+ if (!c->check_struct (this)) return_trace (false); |
+ |
unsigned int len1 = valueFormat1.get_len (); |
unsigned int len2 = valueFormat2.get_len (); |
PairSet::sanitize_closure_t closure = { |
@@ -713,7 +715,7 @@ struct PairPosFormat1 |
1 + len1 + len2 |
}; |
- return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure)); |
+ return_trace (coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure)); |
} |
protected: |
@@ -762,11 +764,11 @@ struct PairPosFormat2 |
TRACE_APPLY (this); |
hb_buffer_t *buffer = c->buffer; |
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint); |
- if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
+ if (likely (index == NOT_COVERED)) return_trace (false); |
hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; |
skippy_iter.reset (buffer->idx, 1); |
- if (!skippy_iter.next ()) return TRACE_RETURN (false); |
+ if (!skippy_iter.next ()) return_trace (false); |
unsigned int len1 = valueFormat1.get_len (); |
unsigned int len2 = valueFormat2.get_len (); |
@@ -774,7 +776,7 @@ struct PairPosFormat2 |
unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint); |
unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint); |
- if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return TRACE_RETURN (false); |
+ if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return_trace (false); |
const Value *v = &values[record_len * (klass1 * class2Count + klass2)]; |
valueFormat1.apply_value (c->font, c->direction, this, |
@@ -786,7 +788,7 @@ struct PairPosFormat2 |
if (len2) |
buffer->idx++; |
- return TRACE_RETURN (true); |
+ return_trace (true); |
} |
inline bool sanitize (hb_sanitize_context_t *c) const |
@@ -795,16 +797,16 @@ struct PairPosFormat2 |
if (!(c->check_struct (this) |
&& coverage.sanitize (c, this) |
&& classDef1.sanitize (c, this) |
- && classDef2.sanitize (c, this))) return TRACE_RETURN (false); |
+ && classDef2.sanitize (c, this))) return_trace (false); |
unsigned int len1 = valueFormat1.get_len (); |
unsigned int len2 = valueFormat2.get_len (); |
unsigned int stride = len1 + len2; |
unsigned int record_size = valueFormat1.get_size () + valueFormat2.get_size (); |
unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count; |
- return TRACE_RETURN (c->check_array (values, record_size, count) && |
- valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) && |
- valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride)); |
+ return_trace (c->check_array (values, record_size, count) && |
+ valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) && |
+ valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride)); |
} |
protected: |
@@ -843,11 +845,11 @@ struct PairPos |
inline typename context_t::return_t dispatch (context_t *c) const |
{ |
TRACE_DISPATCH (this, u.format); |
- if (unlikely (!c->may_dispatch (this, &u.format))) TRACE_RETURN (c->default_return_value ()); |
+ if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); |
switch (u.format) { |
- case 1: return TRACE_RETURN (c->dispatch (u.format1)); |
- case 2: return TRACE_RETURN (c->dispatch (u.format2)); |
- default:return TRACE_RETURN (c->default_return_value ()); |
+ case 1: return_trace (c->dispatch (u.format1)); |
+ case 2: return_trace (c->dispatch (u.format2)); |
+ default:return_trace (c->default_return_value ()); |
} |
} |
@@ -867,7 +869,7 @@ struct EntryExitRecord |
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const |
{ |
TRACE_SANITIZE (this); |
- return TRACE_RETURN (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base)); |
+ return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base)); |
} |
protected: |
@@ -883,6 +885,9 @@ struct EntryExitRecord |
DEFINE_SIZE_STATIC (4); |
}; |
+static void |
+reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent); |
+ |
struct CursivePosFormat1 |
{ |
inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
@@ -902,17 +907,17 @@ struct CursivePosFormat1 |
hb_buffer_t *buffer = c->buffer; |
/* We don't handle mark glyphs here. */ |
- if (unlikely (_hb_glyph_info_is_mark (&buffer->cur()))) return TRACE_RETURN (false); |
+ if (unlikely (_hb_glyph_info_is_mark (&buffer->cur()))) return_trace (false); |
const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)]; |
- if (!this_record.exitAnchor) return TRACE_RETURN (false); |
+ if (!this_record.exitAnchor) return_trace (false); |
hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; |
skippy_iter.reset (buffer->idx, 1); |
- if (!skippy_iter.next ()) return TRACE_RETURN (false); |
+ if (!skippy_iter.next ()) return_trace (false); |
const EntryExitRecord &next_record = entryExitRecord[(this+coverage).get_coverage (buffer->info[skippy_iter.idx].codepoint)]; |
- if (!next_record.entryAnchor) return TRACE_RETURN (false); |
+ if (!next_record.entryAnchor) return_trace (false); |
unsigned int i = buffer->idx; |
unsigned int j = skippy_iter.idx; |
@@ -960,28 +965,47 @@ struct CursivePosFormat1 |
} |
/* Cross-direction adjustment */ |
- if (c->lookup_props & LookupFlag::RightToLeft) { |
- pos[i].cursive_chain() = j - i; |
- if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction))) |
- pos[i].y_offset = entry_y - exit_y; |
- else |
- pos[i].x_offset = entry_x - exit_x; |
- } else { |
- pos[j].cursive_chain() = i - j; |
- if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction))) |
- pos[j].y_offset = exit_y - entry_y; |
- else |
- pos[j].x_offset = exit_x - entry_x; |
+ |
+ /* We attach child to parent (think graph theory and rooted trees whereas |
+ * the root stays on baseline and each node aligns itself against its |
+ * parent. |
+ * |
+ * Optimize things for the case of RightToLeft, as that's most common in |
+ * Arabinc. */ |
+ unsigned int child = i; |
+ unsigned int parent = j; |
+ hb_position_t x_offset = entry_x - exit_x; |
+ hb_position_t y_offset = entry_y - exit_y; |
+ if (!(c->lookup_props & LookupFlag::RightToLeft)) |
+ { |
+ unsigned int k = child; |
+ child = parent; |
+ parent = k; |
+ x_offset = -x_offset; |
+ y_offset = -y_offset; |
} |
+ /* If child was already connected to someone else, walk through its old |
+ * chain and reverse the link direction, such that the whole tree of its |
+ * previous connection now attaches to new parent. Watch out for case |
+ * where new parent is on the path from old chain... |
+ */ |
+ reverse_cursive_minor_offset (pos, child, c->direction, parent); |
+ |
+ pos[child].cursive_chain() = parent - child; |
+ if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction))) |
+ pos[child].y_offset = y_offset; |
+ else |
+ pos[child].x_offset = x_offset; |
+ |
buffer->idx = j; |
- return TRACE_RETURN (true); |
+ return_trace (true); |
} |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
- return TRACE_RETURN (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this)); |
+ return_trace (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this)); |
} |
protected: |
@@ -1002,10 +1026,10 @@ struct CursivePos |
inline typename context_t::return_t dispatch (context_t *c) const |
{ |
TRACE_DISPATCH (this, u.format); |
- if (unlikely (!c->may_dispatch (this, &u.format))) TRACE_RETURN (c->default_return_value ()); |
+ if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); |
switch (u.format) { |
- case 1: return TRACE_RETURN (c->dispatch (u.format1)); |
- default:return TRACE_RETURN (c->default_return_value ()); |
+ case 1: return_trace (c->dispatch (u.format1)); |
+ default:return_trace (c->default_return_value ()); |
} |
} |
@@ -1041,33 +1065,36 @@ struct MarkBasePosFormat1 |
TRACE_APPLY (this); |
hb_buffer_t *buffer = c->buffer; |
unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint); |
- if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false); |
+ if (likely (mark_index == NOT_COVERED)) return_trace (false); |
/* now we search backwards for a non-mark glyph */ |
hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; |
skippy_iter.reset (buffer->idx, 1); |
skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks); |
do { |
- if (!skippy_iter.prev ()) return TRACE_RETURN (false); |
+ if (!skippy_iter.prev ()) return_trace (false); |
/* We only want to attach to the first of a MultipleSubst sequence. Reject others. */ |
if (0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx])) break; |
skippy_iter.reject (); |
} while (1); |
/* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */ |
- if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { /*return TRACE_RETURN (false);*/ } |
+ if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { /*return_trace (false);*/ } |
unsigned int base_index = (this+baseCoverage).get_coverage (buffer->info[skippy_iter.idx].codepoint); |
- if (base_index == NOT_COVERED) return TRACE_RETURN (false); |
+ if (base_index == NOT_COVERED) return_trace (false); |
- return TRACE_RETURN ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx)); |
+ return_trace ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx)); |
} |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
- return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, this) && baseCoverage.sanitize (c, this) && |
- markArray.sanitize (c, this) && baseArray.sanitize (c, this, (unsigned int) classCount)); |
+ return_trace (c->check_struct (this) && |
+ markCoverage.sanitize (c, this) && |
+ baseCoverage.sanitize (c, this) && |
+ markArray.sanitize (c, this) && |
+ baseArray.sanitize (c, this, (unsigned int) classCount)); |
} |
protected: |
@@ -1095,10 +1122,10 @@ struct MarkBasePos |
inline typename context_t::return_t dispatch (context_t *c) const |
{ |
TRACE_DISPATCH (this, u.format); |
- if (unlikely (!c->may_dispatch (this, &u.format))) TRACE_RETURN (c->default_return_value ()); |
+ if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); |
switch (u.format) { |
- case 1: return TRACE_RETURN (c->dispatch (u.format1)); |
- default:return TRACE_RETURN (c->default_return_value ()); |
+ case 1: return_trace (c->dispatch (u.format1)); |
+ default:return_trace (c->default_return_value ()); |
} |
} |
@@ -1139,27 +1166,27 @@ struct MarkLigPosFormat1 |
TRACE_APPLY (this); |
hb_buffer_t *buffer = c->buffer; |
unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint); |
- if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false); |
+ if (likely (mark_index == NOT_COVERED)) return_trace (false); |
/* now we search backwards for a non-mark glyph */ |
hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; |
skippy_iter.reset (buffer->idx, 1); |
skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks); |
- if (!skippy_iter.prev ()) return TRACE_RETURN (false); |
+ if (!skippy_iter.prev ()) return_trace (false); |
/* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */ |
- if (!_hb_glyph_info_is_ligature (&buffer->info[skippy_iter.idx])) { /*return TRACE_RETURN (false);*/ } |
+ if (!_hb_glyph_info_is_ligature (&buffer->info[skippy_iter.idx])) { /*return_trace (false);*/ } |
unsigned int j = skippy_iter.idx; |
unsigned int lig_index = (this+ligatureCoverage).get_coverage (buffer->info[j].codepoint); |
- if (lig_index == NOT_COVERED) return TRACE_RETURN (false); |
+ if (lig_index == NOT_COVERED) return_trace (false); |
const LigatureArray& lig_array = this+ligatureArray; |
const LigatureAttach& lig_attach = lig_array[lig_index]; |
/* Find component to attach to */ |
unsigned int comp_count = lig_attach.rows; |
- if (unlikely (!comp_count)) return TRACE_RETURN (false); |
+ if (unlikely (!comp_count)) return_trace (false); |
/* We must now check whether the ligature ID of the current mark glyph |
* is identical to the ligature ID of the found ligature. If yes, we |
@@ -1174,14 +1201,17 @@ struct MarkLigPosFormat1 |
else |
comp_index = comp_count - 1; |
- return TRACE_RETURN ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j)); |
+ return_trace ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j)); |
} |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
- return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, this) && ligatureCoverage.sanitize (c, this) && |
- markArray.sanitize (c, this) && ligatureArray.sanitize (c, this, (unsigned int) classCount)); |
+ return_trace (c->check_struct (this) && |
+ markCoverage.sanitize (c, this) && |
+ ligatureCoverage.sanitize (c, this) && |
+ markArray.sanitize (c, this) && |
+ ligatureArray.sanitize (c, this, (unsigned int) classCount)); |
} |
protected: |
@@ -1210,10 +1240,10 @@ struct MarkLigPos |
inline typename context_t::return_t dispatch (context_t *c) const |
{ |
TRACE_DISPATCH (this, u.format); |
- if (unlikely (!c->may_dispatch (this, &u.format))) TRACE_RETURN (c->default_return_value ()); |
+ if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); |
switch (u.format) { |
- case 1: return TRACE_RETURN (c->dispatch (u.format1)); |
- default:return TRACE_RETURN (c->default_return_value ()); |
+ case 1: return_trace (c->dispatch (u.format1)); |
+ default:return_trace (c->default_return_value ()); |
} |
} |
@@ -1249,15 +1279,15 @@ struct MarkMarkPosFormat1 |
TRACE_APPLY (this); |
hb_buffer_t *buffer = c->buffer; |
unsigned int mark1_index = (this+mark1Coverage).get_coverage (buffer->cur().codepoint); |
- if (likely (mark1_index == NOT_COVERED)) return TRACE_RETURN (false); |
+ if (likely (mark1_index == NOT_COVERED)) return_trace (false); |
/* now we search backwards for a suitable mark glyph until a non-mark glyph */ |
hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; |
skippy_iter.reset (buffer->idx, 1); |
skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags); |
- if (!skippy_iter.prev ()) return TRACE_RETURN (false); |
+ if (!skippy_iter.prev ()) return_trace (false); |
- if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx])) { return TRACE_RETURN (false); } |
+ if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx])) { return_trace (false); } |
unsigned int j = skippy_iter.idx; |
@@ -1279,21 +1309,23 @@ struct MarkMarkPosFormat1 |
} |
/* Didn't match. */ |
- return TRACE_RETURN (false); |
+ return_trace (false); |
good: |
unsigned int mark2_index = (this+mark2Coverage).get_coverage (buffer->info[j].codepoint); |
- if (mark2_index == NOT_COVERED) return TRACE_RETURN (false); |
+ if (mark2_index == NOT_COVERED) return_trace (false); |
- return TRACE_RETURN ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j)); |
+ return_trace ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j)); |
} |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
- return TRACE_RETURN (c->check_struct (this) && mark1Coverage.sanitize (c, this) && |
- mark2Coverage.sanitize (c, this) && mark1Array.sanitize (c, this) |
- && mark2Array.sanitize (c, this, (unsigned int) classCount)); |
+ return_trace (c->check_struct (this) && |
+ mark1Coverage.sanitize (c, this) && |
+ mark2Coverage.sanitize (c, this) && |
+ mark1Array.sanitize (c, this) && |
+ mark2Array.sanitize (c, this, (unsigned int) classCount)); |
} |
protected: |
@@ -1323,10 +1355,10 @@ struct MarkMarkPos |
inline typename context_t::return_t dispatch (context_t *c) const |
{ |
TRACE_DISPATCH (this, u.format); |
- if (unlikely (!c->may_dispatch (this, &u.format))) TRACE_RETURN (c->default_return_value ()); |
+ if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); |
switch (u.format) { |
- case 1: return TRACE_RETURN (c->dispatch (u.format1)); |
- default:return TRACE_RETURN (c->default_return_value ()); |
+ case 1: return_trace (c->dispatch (u.format1)); |
+ default:return_trace (c->default_return_value ()); |
} |
} |
@@ -1374,19 +1406,18 @@ struct PosLookupSubTable |
inline typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type) const |
{ |
TRACE_DISPATCH (this, lookup_type); |
- /* The sub_format passed to may_dispatch is unnecessary but harmless. */ |
- if (unlikely (!c->may_dispatch (this, &u.sub_format))) TRACE_RETURN (c->default_return_value ()); |
+ if (unlikely (!c->may_dispatch (this, &u.sub_format))) return_trace (c->no_dispatch_return_value ()); |
switch (lookup_type) { |
- case Single: return TRACE_RETURN (u.single.dispatch (c)); |
- case Pair: return TRACE_RETURN (u.pair.dispatch (c)); |
- case Cursive: return TRACE_RETURN (u.cursive.dispatch (c)); |
- case MarkBase: return TRACE_RETURN (u.markBase.dispatch (c)); |
- case MarkLig: return TRACE_RETURN (u.markLig.dispatch (c)); |
- case MarkMark: return TRACE_RETURN (u.markMark.dispatch (c)); |
- case Context: return TRACE_RETURN (u.context.dispatch (c)); |
- case ChainContext: return TRACE_RETURN (u.chainContext.dispatch (c)); |
- case Extension: return TRACE_RETURN (u.extension.dispatch (c)); |
- default: return TRACE_RETURN (c->default_return_value ()); |
+ case Single: return_trace (u.single.dispatch (c)); |
+ case Pair: return_trace (u.pair.dispatch (c)); |
+ case Cursive: return_trace (u.cursive.dispatch (c)); |
+ case MarkBase: return_trace (u.markBase.dispatch (c)); |
+ case MarkLig: return_trace (u.markLig.dispatch (c)); |
+ case MarkMark: return_trace (u.markMark.dispatch (c)); |
+ case Context: return_trace (u.context.dispatch (c)); |
+ case ChainContext: return_trace (u.chainContext.dispatch (c)); |
+ case Extension: return_trace (u.extension.dispatch (c)); |
+ default: return_trace (c->default_return_value ()); |
} |
} |
@@ -1421,13 +1452,13 @@ struct PosLookup : Lookup |
inline bool apply (hb_apply_context_t *c) const |
{ |
TRACE_APPLY (this); |
- return TRACE_RETURN (dispatch (c)); |
+ return_trace (dispatch (c)); |
} |
inline hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const |
{ |
TRACE_COLLECT_GLYPHS (this); |
- return TRACE_RETURN (dispatch (c)); |
+ return_trace (dispatch (c)); |
} |
template <typename set_t> |
@@ -1449,8 +1480,8 @@ struct PosLookup : Lookup |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
- if (unlikely (!Lookup::sanitize (c))) return TRACE_RETURN (false); |
- return TRACE_RETURN (dispatch (c)); |
+ if (unlikely (!Lookup::sanitize (c))) return_trace (false); |
+ return_trace (dispatch (c)); |
} |
}; |
@@ -1473,9 +1504,9 @@ struct GPOS : GSUBGPOS |
inline bool sanitize (hb_sanitize_context_t *c) const |
{ |
TRACE_SANITIZE (this); |
- if (unlikely (!GSUBGPOS::sanitize (c))) return TRACE_RETURN (false); |
+ if (unlikely (!GSUBGPOS::sanitize (c))) return_trace (false); |
const OffsetTo<PosLookupList> &list = CastR<OffsetTo<PosLookupList> > (lookupList); |
- return TRACE_RETURN (list.sanitize (c, this)); |
+ return_trace (list.sanitize (c, this)); |
} |
public: |
DEFINE_SIZE_STATIC (10); |
@@ -1483,6 +1514,30 @@ struct GPOS : GSUBGPOS |
static void |
+reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent) |
+{ |
+ unsigned int j = pos[i].cursive_chain(); |
+ if (likely (!j)) |
+ return; |
+ |
+ j += i; |
+ |
+ pos[i].cursive_chain() = 0; |
+ |
+ /* Stop if we see new parent in the chain. */ |
+ if (j == new_parent) |
+ return; |
+ |
+ reverse_cursive_minor_offset (pos, j, direction, new_parent); |
+ |
+ if (HB_DIRECTION_IS_HORIZONTAL (direction)) |
+ pos[j].y_offset = -pos[i].y_offset; |
+ else |
+ pos[j].x_offset = -pos[i].x_offset; |
+ |
+ pos[j].cursive_chain() = i - j; |
+} |
+static void |
fix_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction) |
{ |
unsigned int j = pos[i].cursive_chain(); |