OLD | NEW |
1 /* | 1 /* |
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. | 2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. |
3 * Copyright © 2010,2012 Google, Inc. | 3 * Copyright © 2010,2012,2013 Google, Inc. |
4 * | 4 * |
5 * This is part of HarfBuzz, a text shaping library. | 5 * This is part of HarfBuzz, a text shaping library. |
6 * | 6 * |
7 * Permission is hereby granted, without written agreement and without | 7 * Permission is hereby granted, without written agreement and without |
8 * license or royalty fees, to use, copy, modify, and distribute this | 8 * license or royalty fees, to use, copy, modify, and distribute this |
9 * software and its documentation for any purpose, provided that the | 9 * software and its documentation for any purpose, provided that the |
10 * above copyright notice and the following two paragraphs appear in | 10 * above copyright notice and the following two paragraphs appear in |
11 * all copies of this software. | 11 * all copies of this software. |
12 * | 12 * |
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR | 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
(...skipping 369 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
383 }; | 383 }; |
384 | 384 |
385 struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage ord
er */ | 385 struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage ord
er */ |
386 { | 386 { |
387 inline bool apply (hb_apply_context_t *c, | 387 inline bool apply (hb_apply_context_t *c, |
388 unsigned int mark_index, unsigned int glyph_index, | 388 unsigned int mark_index, unsigned int glyph_index, |
389 const AnchorMatrix &anchors, unsigned int class_count, | 389 const AnchorMatrix &anchors, unsigned int class_count, |
390 unsigned int glyph_pos) const | 390 unsigned int glyph_pos) const |
391 { | 391 { |
392 TRACE_APPLY (this); | 392 TRACE_APPLY (this); |
| 393 hb_buffer_t *buffer = c->buffer; |
393 const MarkRecord &record = ArrayOf<MarkRecord>::operator[](mark_index); | 394 const MarkRecord &record = ArrayOf<MarkRecord>::operator[](mark_index); |
394 unsigned int mark_class = record.klass; | 395 unsigned int mark_class = record.klass; |
395 | 396 |
396 const Anchor& mark_anchor = this + record.markAnchor; | 397 const Anchor& mark_anchor = this + record.markAnchor; |
397 bool found; | 398 bool found; |
398 const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, cl
ass_count, &found); | 399 const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, cl
ass_count, &found); |
399 /* If this subtable doesn't have an anchor for this base and this class, | 400 /* If this subtable doesn't have an anchor for this base and this class, |
400 * return false such that the subsequent subtables have a chance at it. */ | 401 * return false such that the subsequent subtables have a chance at it. */ |
401 if (unlikely (!found)) return TRACE_RETURN (false); | 402 if (unlikely (!found)) return TRACE_RETURN (false); |
402 | 403 |
403 hb_position_t mark_x, mark_y, base_x, base_y; | 404 hb_position_t mark_x, mark_y, base_x, base_y; |
404 | 405 |
405 mark_anchor.get_anchor (c->font, c->buffer->cur().codepoint, &mark_x, &mark_
y); | 406 mark_anchor.get_anchor (c->font, buffer->cur().codepoint, &mark_x, &mark_y); |
406 glyph_anchor.get_anchor (c->font, c->buffer->info[glyph_pos].codepoint, &bas
e_x, &base_y); | 407 glyph_anchor.get_anchor (c->font, buffer->info[glyph_pos].codepoint, &base_x
, &base_y); |
407 | 408 |
408 hb_glyph_position_t &o = c->buffer->cur_pos(); | 409 hb_glyph_position_t &o = buffer->cur_pos(); |
409 o.x_offset = base_x - mark_x; | 410 o.x_offset = base_x - mark_x; |
410 o.y_offset = base_y - mark_y; | 411 o.y_offset = base_y - mark_y; |
411 o.attach_lookback() = c->buffer->idx - glyph_pos; | 412 o.attach_lookback() = buffer->idx - glyph_pos; |
412 | 413 |
413 c->buffer->idx++; | 414 buffer->idx++; |
414 return TRACE_RETURN (true); | 415 return TRACE_RETURN (true); |
415 } | 416 } |
416 | 417 |
417 inline bool sanitize (hb_sanitize_context_t *c) { | 418 inline bool sanitize (hb_sanitize_context_t *c) { |
418 TRACE_SANITIZE (this); | 419 TRACE_SANITIZE (this); |
419 return TRACE_RETURN (ArrayOf<MarkRecord>::sanitize (c, this)); | 420 return TRACE_RETURN (ArrayOf<MarkRecord>::sanitize (c, this)); |
420 } | 421 } |
421 }; | 422 }; |
422 | 423 |
423 | 424 |
424 /* Lookups */ | 425 /* Lookups */ |
425 | 426 |
426 struct SinglePosFormat1 | 427 struct SinglePosFormat1 |
427 { | 428 { |
428 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const | 429 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
429 { | 430 { |
430 TRACE_COLLECT_GLYPHS (this); | 431 TRACE_COLLECT_GLYPHS (this); |
431 (this+coverage).add_coverage (c->input); | 432 (this+coverage).add_coverage (c->input); |
432 } | 433 } |
433 | 434 |
434 inline const Coverage &get_coverage (void) const | 435 inline const Coverage &get_coverage (void) const |
435 { | 436 { |
436 return this+coverage; | 437 return this+coverage; |
437 } | 438 } |
438 | 439 |
439 inline bool apply (hb_apply_context_t *c) const | 440 inline bool apply (hb_apply_context_t *c) const |
440 { | 441 { |
441 TRACE_APPLY (this); | 442 TRACE_APPLY (this); |
442 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoi
nt); | 443 hb_buffer_t *buffer = c->buffer; |
| 444 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint)
; |
443 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); | 445 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
444 | 446 |
445 valueFormat.apply_value (c->font, c->direction, this, | 447 valueFormat.apply_value (c->font, c->direction, this, |
446 » » » values, c->buffer->cur_pos()); | 448 » » » values, buffer->cur_pos()); |
447 | 449 |
448 c->buffer->idx++; | 450 buffer->idx++; |
449 return TRACE_RETURN (true); | 451 return TRACE_RETURN (true); |
450 } | 452 } |
451 | 453 |
452 inline bool sanitize (hb_sanitize_context_t *c) { | 454 inline bool sanitize (hb_sanitize_context_t *c) { |
453 TRACE_SANITIZE (this); | 455 TRACE_SANITIZE (this); |
454 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) &
& valueFormat.sanitize_value (c, this, values)); | 456 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) &
& valueFormat.sanitize_value (c, this, values)); |
455 } | 457 } |
456 | 458 |
457 protected: | 459 protected: |
458 USHORT format; /* Format identifier--format = 1 */ | 460 USHORT format; /* Format identifier--format = 1 */ |
(...skipping 18 matching lines...) Expand all Loading... |
477 } | 479 } |
478 | 480 |
479 inline const Coverage &get_coverage (void) const | 481 inline const Coverage &get_coverage (void) const |
480 { | 482 { |
481 return this+coverage; | 483 return this+coverage; |
482 } | 484 } |
483 | 485 |
484 inline bool apply (hb_apply_context_t *c) const | 486 inline bool apply (hb_apply_context_t *c) const |
485 { | 487 { |
486 TRACE_APPLY (this); | 488 TRACE_APPLY (this); |
487 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoi
nt); | 489 hb_buffer_t *buffer = c->buffer; |
| 490 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint)
; |
488 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); | 491 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
489 | 492 |
490 if (likely (index >= valueCount)) return TRACE_RETURN (false); | 493 if (likely (index >= valueCount)) return TRACE_RETURN (false); |
491 | 494 |
492 valueFormat.apply_value (c->font, c->direction, this, | 495 valueFormat.apply_value (c->font, c->direction, this, |
493 &values[index * valueFormat.get_len ()], | 496 &values[index * valueFormat.get_len ()], |
494 » » » c->buffer->cur_pos()); | 497 » » » buffer->cur_pos()); |
495 | 498 |
496 c->buffer->idx++; | 499 buffer->idx++; |
497 return TRACE_RETURN (true); | 500 return TRACE_RETURN (true); |
498 } | 501 } |
499 | 502 |
500 inline bool sanitize (hb_sanitize_context_t *c) { | 503 inline bool sanitize (hb_sanitize_context_t *c) { |
501 TRACE_SANITIZE (this); | 504 TRACE_SANITIZE (this); |
502 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) &
& valueFormat.sanitize_values (c, this, values, valueCount)); | 505 return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) &
& valueFormat.sanitize_values (c, this, values, valueCount)); |
503 } | 506 } |
504 | 507 |
505 protected: | 508 protected: |
506 USHORT format; /* Format identifier--format = 2 */ | 509 USHORT format; /* Format identifier--format = 2 */ |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
581 c->input->add (record->secondGlyph); | 584 c->input->add (record->secondGlyph); |
582 record = &StructAtOffset<PairValueRecord> (record, record_size); | 585 record = &StructAtOffset<PairValueRecord> (record, record_size); |
583 } | 586 } |
584 } | 587 } |
585 | 588 |
586 inline bool apply (hb_apply_context_t *c, | 589 inline bool apply (hb_apply_context_t *c, |
587 const ValueFormat *valueFormats, | 590 const ValueFormat *valueFormats, |
588 unsigned int pos) const | 591 unsigned int pos) const |
589 { | 592 { |
590 TRACE_APPLY (this); | 593 TRACE_APPLY (this); |
| 594 hb_buffer_t *buffer = c->buffer; |
591 unsigned int len1 = valueFormats[0].get_len (); | 595 unsigned int len1 = valueFormats[0].get_len (); |
592 unsigned int len2 = valueFormats[1].get_len (); | 596 unsigned int len2 = valueFormats[1].get_len (); |
593 unsigned int record_size = USHORT::static_size * (1 + len1 + len2); | 597 unsigned int record_size = USHORT::static_size * (1 + len1 + len2); |
594 | 598 |
595 const PairValueRecord *record = CastP<PairValueRecord> (array); | 599 const PairValueRecord *record = CastP<PairValueRecord> (array); |
596 unsigned int count = len; | 600 unsigned int count = len; |
597 for (unsigned int i = 0; i < count; i++) | 601 for (unsigned int i = 0; i < count; i++) |
598 { | 602 { |
599 /* TODO bsearch */ | 603 /* TODO bsearch */ |
600 if (c->buffer->info[pos].codepoint == record->secondGlyph) | 604 if (buffer->info[pos].codepoint == record->secondGlyph) |
601 { | 605 { |
602 valueFormats[0].apply_value (c->font, c->direction, this, | 606 valueFormats[0].apply_value (c->font, c->direction, this, |
603 » » » » &record->values[0], c->buffer->cur_pos()); | 607 » » » » &record->values[0], buffer->cur_pos()); |
604 valueFormats[1].apply_value (c->font, c->direction, this, | 608 valueFormats[1].apply_value (c->font, c->direction, this, |
605 » » » » &record->values[len1], c->buffer->pos[pos])
; | 609 » » » » &record->values[len1], buffer->pos[pos]); |
606 if (len2) | 610 if (len2) |
607 pos++; | 611 pos++; |
608 » c->buffer->idx = pos; | 612 » buffer->idx = pos; |
609 return TRACE_RETURN (true); | 613 return TRACE_RETURN (true); |
610 } | 614 } |
611 record = &StructAtOffset<PairValueRecord> (record, record_size); | 615 record = &StructAtOffset<PairValueRecord> (record, record_size); |
612 } | 616 } |
613 | 617 |
614 return TRACE_RETURN (false); | 618 return TRACE_RETURN (false); |
615 } | 619 } |
616 | 620 |
617 struct sanitize_closure_t { | 621 struct sanitize_closure_t { |
618 void *base; | 622 void *base; |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
652 } | 656 } |
653 | 657 |
654 inline const Coverage &get_coverage (void) const | 658 inline const Coverage &get_coverage (void) const |
655 { | 659 { |
656 return this+coverage; | 660 return this+coverage; |
657 } | 661 } |
658 | 662 |
659 inline bool apply (hb_apply_context_t *c) const | 663 inline bool apply (hb_apply_context_t *c) const |
660 { | 664 { |
661 TRACE_APPLY (this); | 665 TRACE_APPLY (this); |
662 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->i
dx, 1); | 666 hb_buffer_t *buffer = c->buffer; |
| 667 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, buffer->idx,
1); |
663 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); | 668 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); |
664 | 669 |
665 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoi
nt); | 670 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint)
; |
666 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); | 671 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
667 | 672 |
668 if (!skippy_iter.next ()) return TRACE_RETURN (false); | 673 if (!skippy_iter.next ()) return TRACE_RETURN (false); |
669 | 674 |
670 return TRACE_RETURN ((this+pairSet[index]).apply (c, &valueFormat1, skippy_i
ter.idx)); | 675 return TRACE_RETURN ((this+pairSet[index]).apply (c, &valueFormat1, skippy_i
ter.idx)); |
671 } | 676 } |
672 | 677 |
673 inline bool sanitize (hb_sanitize_context_t *c) { | 678 inline bool sanitize (hb_sanitize_context_t *c) { |
674 TRACE_SANITIZE (this); | 679 TRACE_SANITIZE (this); |
675 | 680 |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
722 } | 727 } |
723 | 728 |
724 inline const Coverage &get_coverage (void) const | 729 inline const Coverage &get_coverage (void) const |
725 { | 730 { |
726 return this+coverage; | 731 return this+coverage; |
727 } | 732 } |
728 | 733 |
729 inline bool apply (hb_apply_context_t *c) const | 734 inline bool apply (hb_apply_context_t *c) const |
730 { | 735 { |
731 TRACE_APPLY (this); | 736 TRACE_APPLY (this); |
732 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->i
dx, 1); | 737 hb_buffer_t *buffer = c->buffer; |
| 738 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, buffer->idx,
1); |
733 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); | 739 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); |
734 | 740 |
735 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoi
nt); | 741 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint)
; |
736 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); | 742 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false); |
737 | 743 |
738 if (!skippy_iter.next ()) return TRACE_RETURN (false); | 744 if (!skippy_iter.next ()) return TRACE_RETURN (false); |
739 | 745 |
740 unsigned int len1 = valueFormat1.get_len (); | 746 unsigned int len1 = valueFormat1.get_len (); |
741 unsigned int len2 = valueFormat2.get_len (); | 747 unsigned int len2 = valueFormat2.get_len (); |
742 unsigned int record_len = len1 + len2; | 748 unsigned int record_len = len1 + len2; |
743 | 749 |
744 unsigned int klass1 = (this+classDef1).get_class (c->buffer->cur().codepoint
); | 750 unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint); |
745 unsigned int klass2 = (this+classDef2).get_class (c->buffer->info[skippy_ite
r.idx].codepoint); | 751 unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.i
dx].codepoint); |
746 if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return TRACE_
RETURN (false); | 752 if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return TRACE_
RETURN (false); |
747 | 753 |
748 const Value *v = &values[record_len * (klass1 * class2Count + klass2)]; | 754 const Value *v = &values[record_len * (klass1 * class2Count + klass2)]; |
749 valueFormat1.apply_value (c->font, c->direction, this, | 755 valueFormat1.apply_value (c->font, c->direction, this, |
750 » » » v, c->buffer->cur_pos()); | 756 » » » v, buffer->cur_pos()); |
751 valueFormat2.apply_value (c->font, c->direction, this, | 757 valueFormat2.apply_value (c->font, c->direction, this, |
752 » » » v + len1, c->buffer->pos[skippy_iter.idx]); | 758 » » » v + len1, buffer->pos[skippy_iter.idx]); |
753 | 759 |
754 c->buffer->idx = skippy_iter.idx; | 760 buffer->idx = skippy_iter.idx; |
755 if (len2) | 761 if (len2) |
756 c->buffer->idx++; | 762 buffer->idx++; |
757 | 763 |
758 return TRACE_RETURN (true); | 764 return TRACE_RETURN (true); |
759 } | 765 } |
760 | 766 |
761 inline bool sanitize (hb_sanitize_context_t *c) { | 767 inline bool sanitize (hb_sanitize_context_t *c) { |
762 TRACE_SANITIZE (this); | 768 TRACE_SANITIZE (this); |
763 if (!(c->check_struct (this) | 769 if (!(c->check_struct (this) |
764 && coverage.sanitize (c, this) | 770 && coverage.sanitize (c, this) |
765 && classDef1.sanitize (c, this) | 771 && classDef1.sanitize (c, this) |
766 && classDef2.sanitize (c, this))) return TRACE_RETURN (false); | 772 && classDef2.sanitize (c, this))) return TRACE_RETURN (false); |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
868 } | 874 } |
869 | 875 |
870 inline const Coverage &get_coverage (void) const | 876 inline const Coverage &get_coverage (void) const |
871 { | 877 { |
872 return this+coverage; | 878 return this+coverage; |
873 } | 879 } |
874 | 880 |
875 inline bool apply (hb_apply_context_t *c) const | 881 inline bool apply (hb_apply_context_t *c) const |
876 { | 882 { |
877 TRACE_APPLY (this); | 883 TRACE_APPLY (this); |
| 884 hb_buffer_t *buffer = c->buffer; |
878 | 885 |
879 /* We don't handle mark glyphs here. */ | 886 /* We don't handle mark glyphs here. */ |
880 if (c->buffer->cur().glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_MARK) return T
RACE_RETURN (false); | 887 if (unlikely (_hb_glyph_info_is_mark (&buffer->cur()))) return TRACE_RETURN
(false); |
881 | 888 |
882 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->i
dx, 1); | 889 hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, buffer->idx,
1); |
883 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); | 890 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false); |
884 | 891 |
885 const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_cov
erage (c->buffer->cur().codepoint)]; | 892 const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_cov
erage (buffer->cur().codepoint)]; |
886 if (!this_record.exitAnchor) return TRACE_RETURN (false); | 893 if (!this_record.exitAnchor) return TRACE_RETURN (false); |
887 | 894 |
888 if (!skippy_iter.next ()) return TRACE_RETURN (false); | 895 if (!skippy_iter.next ()) return TRACE_RETURN (false); |
889 | 896 |
890 const EntryExitRecord &next_record = entryExitRecord[(this+coverage).get_cov
erage (c->buffer->info[skippy_iter.idx].codepoint)]; | 897 const EntryExitRecord &next_record = entryExitRecord[(this+coverage).get_cov
erage (buffer->info[skippy_iter.idx].codepoint)]; |
891 if (!next_record.entryAnchor) return TRACE_RETURN (false); | 898 if (!next_record.entryAnchor) return TRACE_RETURN (false); |
892 | 899 |
893 unsigned int i = c->buffer->idx; | 900 unsigned int i = buffer->idx; |
894 unsigned int j = skippy_iter.idx; | 901 unsigned int j = skippy_iter.idx; |
895 | 902 |
896 hb_position_t entry_x, entry_y, exit_x, exit_y; | 903 hb_position_t entry_x, entry_y, exit_x, exit_y; |
897 (this+this_record.exitAnchor).get_anchor (c->font, c->buffer->info[i].codepo
int, &exit_x, &exit_y); | 904 (this+this_record.exitAnchor).get_anchor (c->font, buffer->info[i].codepoint
, &exit_x, &exit_y); |
898 (this+next_record.entryAnchor).get_anchor (c->font, c->buffer->info[j].codep
oint, &entry_x, &entry_y); | 905 (this+next_record.entryAnchor).get_anchor (c->font, buffer->info[j].codepoin
t, &entry_x, &entry_y); |
899 | 906 |
900 hb_glyph_position_t *pos = c->buffer->pos; | 907 hb_glyph_position_t *pos = buffer->pos; |
901 | 908 |
902 hb_position_t d; | 909 hb_position_t d; |
903 /* Main-direction adjustment */ | 910 /* Main-direction adjustment */ |
904 switch (c->direction) { | 911 switch (c->direction) { |
905 case HB_DIRECTION_LTR: | 912 case HB_DIRECTION_LTR: |
906 pos[i].x_advance = exit_x + pos[i].x_offset; | 913 pos[i].x_advance = exit_x + pos[i].x_offset; |
907 | 914 |
908 d = entry_x + pos[j].x_offset; | 915 d = entry_x + pos[j].x_offset; |
909 pos[j].x_advance -= d; | 916 pos[j].x_advance -= d; |
910 pos[j].x_offset -= d; | 917 pos[j].x_offset -= d; |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
943 else | 950 else |
944 pos[i].x_offset = entry_x - exit_x; | 951 pos[i].x_offset = entry_x - exit_x; |
945 } else { | 952 } else { |
946 pos[j].cursive_chain() = i - j; | 953 pos[j].cursive_chain() = i - j; |
947 if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction))) | 954 if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction))) |
948 pos[j].y_offset = exit_y - entry_y; | 955 pos[j].y_offset = exit_y - entry_y; |
949 else | 956 else |
950 pos[j].x_offset = exit_x - entry_x; | 957 pos[j].x_offset = exit_x - entry_x; |
951 } | 958 } |
952 | 959 |
953 c->buffer->idx = j; | 960 buffer->idx = j; |
954 return TRACE_RETURN (true); | 961 return TRACE_RETURN (true); |
955 } | 962 } |
956 | 963 |
957 inline bool sanitize (hb_sanitize_context_t *c) { | 964 inline bool sanitize (hb_sanitize_context_t *c) { |
958 TRACE_SANITIZE (this); | 965 TRACE_SANITIZE (this); |
959 return TRACE_RETURN (coverage.sanitize (c, this) && entryExitRecord.sanitize
(c, this)); | 966 return TRACE_RETURN (coverage.sanitize (c, this) && entryExitRecord.sanitize
(c, this)); |
960 } | 967 } |
961 | 968 |
962 protected: | 969 protected: |
963 USHORT format; /* Format identifier--format = 1 */ | 970 USHORT format; /* Format identifier--format = 1 */ |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1015 } | 1022 } |
1016 | 1023 |
1017 inline const Coverage &get_coverage (void) const | 1024 inline const Coverage &get_coverage (void) const |
1018 { | 1025 { |
1019 return this+markCoverage; | 1026 return this+markCoverage; |
1020 } | 1027 } |
1021 | 1028 |
1022 inline bool apply (hb_apply_context_t *c) const | 1029 inline bool apply (hb_apply_context_t *c) const |
1023 { | 1030 { |
1024 TRACE_APPLY (this); | 1031 TRACE_APPLY (this); |
1025 unsigned int mark_index = (this+markCoverage).get_coverage (c->buffer->cur(
).codepoint); | 1032 hb_buffer_t *buffer = c->buffer; |
| 1033 unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().c
odepoint); |
1026 if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false); | 1034 if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false); |
1027 | 1035 |
1028 /* now we search backwards for a non-mark glyph */ | 1036 /* now we search backwards for a non-mark glyph */ |
1029 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, c->buffer->
idx, 1); | 1037 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, buffer->idx
, 1); |
1030 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks); | 1038 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks); |
1031 do { | 1039 do { |
1032 if (!skippy_iter.prev ()) return TRACE_RETURN (false); | 1040 if (!skippy_iter.prev ()) return TRACE_RETURN (false); |
1033 /* We only want to attach to the first of a MultipleSubst sequence. Rejec
t others. */ | 1041 /* We only want to attach to the first of a MultipleSubst sequence. Rejec
t others. */ |
1034 if (0 == get_lig_comp (c->buffer->info[skippy_iter.idx])) break; | 1042 if (0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx])) bre
ak; |
1035 skippy_iter.reject (); | 1043 skippy_iter.reject (); |
1036 } while (1); | 1044 } while (1); |
1037 | 1045 |
1038 /* The following assertion is too strong, so we've disabled it. */ | 1046 /* Checking that matched glyph is actually a base glyph by GDEF is too stron
g; disabled */ |
1039 if (!(c->buffer->info[skippy_iter.idx].glyph_props() & HB_OT_LAYOUT_GLYPH_PR
OPS_BASE_GLYPH)) {/*return TRACE_RETURN (false);*/} | 1047 if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { /*retu
rn TRACE_RETURN (false);*/ } |
1040 | 1048 |
1041 unsigned int base_index = (this+baseCoverage).get_coverage (c->buffer->info
[skippy_iter.idx].codepoint); | 1049 unsigned int base_index = (this+baseCoverage).get_coverage (buffer->info[sk
ippy_iter.idx].codepoint); |
1042 if (base_index == NOT_COVERED) return TRACE_RETURN (false); | 1050 if (base_index == NOT_COVERED) return TRACE_RETURN (false); |
1043 | 1051 |
1044 return TRACE_RETURN ((this+markArray).apply (c, mark_index, base_index, this
+baseArray, classCount, skippy_iter.idx)); | 1052 return TRACE_RETURN ((this+markArray).apply (c, mark_index, base_index, this
+baseArray, classCount, skippy_iter.idx)); |
1045 } | 1053 } |
1046 | 1054 |
1047 inline bool sanitize (hb_sanitize_context_t *c) { | 1055 inline bool sanitize (hb_sanitize_context_t *c) { |
1048 TRACE_SANITIZE (this); | 1056 TRACE_SANITIZE (this); |
1049 return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, thi
s) && baseCoverage.sanitize (c, this) && | 1057 return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, thi
s) && baseCoverage.sanitize (c, this) && |
1050 markArray.sanitize (c, this) && baseArray.sanitize (c,
this, (unsigned int) classCount)); | 1058 markArray.sanitize (c, this) && baseArray.sanitize (c,
this, (unsigned int) classCount)); |
1051 } | 1059 } |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1118 } | 1126 } |
1119 | 1127 |
1120 inline const Coverage &get_coverage (void) const | 1128 inline const Coverage &get_coverage (void) const |
1121 { | 1129 { |
1122 return this+markCoverage; | 1130 return this+markCoverage; |
1123 } | 1131 } |
1124 | 1132 |
1125 inline bool apply (hb_apply_context_t *c) const | 1133 inline bool apply (hb_apply_context_t *c) const |
1126 { | 1134 { |
1127 TRACE_APPLY (this); | 1135 TRACE_APPLY (this); |
1128 unsigned int mark_index = (this+markCoverage).get_coverage (c->buffer->cur(
).codepoint); | 1136 hb_buffer_t *buffer = c->buffer; |
| 1137 unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().c
odepoint); |
1129 if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false); | 1138 if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false); |
1130 | 1139 |
1131 /* now we search backwards for a non-mark glyph */ | 1140 /* now we search backwards for a non-mark glyph */ |
1132 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, c->buffer->
idx, 1); | 1141 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, buffer->idx
, 1); |
1133 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks); | 1142 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks); |
1134 if (!skippy_iter.prev ()) return TRACE_RETURN (false); | 1143 if (!skippy_iter.prev ()) return TRACE_RETURN (false); |
1135 | 1144 |
1136 /* The following assertion is too strong, so we've disabled it. */ | 1145 /* Checking that matched glyph is actually a ligature by GDEF is too strong;
disabled */ |
1137 if (!(c->buffer->info[skippy_iter.idx].glyph_props() & HB_OT_LAYOUT_GLYPH_PR
OPS_LIGATURE)) {/*return TRACE_RETURN (false);*/} | 1146 if (!_hb_glyph_info_is_ligature (&buffer->info[skippy_iter.idx])) { /*return
TRACE_RETURN (false);*/ } |
1138 | 1147 |
1139 unsigned int j = skippy_iter.idx; | 1148 unsigned int j = skippy_iter.idx; |
1140 unsigned int lig_index = (this+ligatureCoverage).get_coverage (c->buffer->i
nfo[j].codepoint); | 1149 unsigned int lig_index = (this+ligatureCoverage).get_coverage (buffer->info
[j].codepoint); |
1141 if (lig_index == NOT_COVERED) return TRACE_RETURN (false); | 1150 if (lig_index == NOT_COVERED) return TRACE_RETURN (false); |
1142 | 1151 |
1143 const LigatureArray& lig_array = this+ligatureArray; | 1152 const LigatureArray& lig_array = this+ligatureArray; |
1144 const LigatureAttach& lig_attach = lig_array[lig_index]; | 1153 const LigatureAttach& lig_attach = lig_array[lig_index]; |
1145 | 1154 |
1146 /* Find component to attach to */ | 1155 /* Find component to attach to */ |
1147 unsigned int comp_count = lig_attach.rows; | 1156 unsigned int comp_count = lig_attach.rows; |
1148 if (unlikely (!comp_count)) return TRACE_RETURN (false); | 1157 if (unlikely (!comp_count)) return TRACE_RETURN (false); |
1149 | 1158 |
1150 /* We must now check whether the ligature ID of the current mark glyph | 1159 /* We must now check whether the ligature ID of the current mark glyph |
1151 * is identical to the ligature ID of the found ligature. If yes, we | 1160 * is identical to the ligature ID of the found ligature. If yes, we |
1152 * can directly use the component index. If not, we attach the mark | 1161 * can directly use the component index. If not, we attach the mark |
1153 * glyph to the last component of the ligature. */ | 1162 * glyph to the last component of the ligature. */ |
1154 unsigned int comp_index; | 1163 unsigned int comp_index; |
1155 unsigned int lig_id = get_lig_id (c->buffer->info[j]); | 1164 unsigned int lig_id = _hb_glyph_info_get_lig_id (&buffer->info[j]); |
1156 unsigned int mark_id = get_lig_id (c->buffer->cur()); | 1165 unsigned int mark_id = _hb_glyph_info_get_lig_id (&buffer->cur()); |
1157 unsigned int mark_comp = get_lig_comp (c->buffer->cur()); | 1166 unsigned int mark_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); |
1158 if (lig_id && lig_id == mark_id && mark_comp > 0) | 1167 if (lig_id && lig_id == mark_id && mark_comp > 0) |
1159 comp_index = MIN (comp_count, get_lig_comp (c->buffer->cur())) - 1; | 1168 comp_index = MIN (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())
) - 1; |
1160 else | 1169 else |
1161 comp_index = comp_count - 1; | 1170 comp_index = comp_count - 1; |
1162 | 1171 |
1163 return TRACE_RETURN ((this+markArray).apply (c, mark_index, comp_index, lig_
attach, classCount, j)); | 1172 return TRACE_RETURN ((this+markArray).apply (c, mark_index, comp_index, lig_
attach, classCount, j)); |
1164 } | 1173 } |
1165 | 1174 |
1166 inline bool sanitize (hb_sanitize_context_t *c) { | 1175 inline bool sanitize (hb_sanitize_context_t *c) { |
1167 TRACE_SANITIZE (this); | 1176 TRACE_SANITIZE (this); |
1168 return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, thi
s) && ligatureCoverage.sanitize (c, this) && | 1177 return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, thi
s) && ligatureCoverage.sanitize (c, this) && |
1169 markArray.sanitize (c, this) && ligatureArray.sanitize
(c, this, (unsigned int) classCount)); | 1178 markArray.sanitize (c, this) && ligatureArray.sanitize
(c, this, (unsigned int) classCount)); |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1233 } | 1242 } |
1234 | 1243 |
1235 inline const Coverage &get_coverage (void) const | 1244 inline const Coverage &get_coverage (void) const |
1236 { | 1245 { |
1237 return this+mark1Coverage; | 1246 return this+mark1Coverage; |
1238 } | 1247 } |
1239 | 1248 |
1240 inline bool apply (hb_apply_context_t *c) const | 1249 inline bool apply (hb_apply_context_t *c) const |
1241 { | 1250 { |
1242 TRACE_APPLY (this); | 1251 TRACE_APPLY (this); |
1243 unsigned int mark1_index = (this+mark1Coverage).get_coverage (c->buffer->cu
r().codepoint); | 1252 hb_buffer_t *buffer = c->buffer; |
| 1253 unsigned int mark1_index = (this+mark1Coverage).get_coverage (buffer->cur()
.codepoint); |
1244 if (likely (mark1_index == NOT_COVERED)) return TRACE_RETURN (false); | 1254 if (likely (mark1_index == NOT_COVERED)) return TRACE_RETURN (false); |
1245 | 1255 |
1246 /* now we search backwards for a suitable mark glyph until a non-mark glyph
*/ | 1256 /* now we search backwards for a suitable mark glyph until a non-mark glyph
*/ |
1247 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, c->buffer->
idx, 1); | 1257 hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, buffer->idx
, 1); |
1248 skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags); | 1258 skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags); |
1249 if (!skippy_iter.prev ()) return TRACE_RETURN (false); | 1259 if (!skippy_iter.prev ()) return TRACE_RETURN (false); |
1250 | 1260 |
1251 if (!(c->buffer->info[skippy_iter.idx].glyph_props() & HB_OT_LAYOUT_GLYPH_PR
OPS_MARK)) { return TRACE_RETURN (false); } | 1261 if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx])) { return TRACE
_RETURN (false); } |
1252 | 1262 |
1253 unsigned int j = skippy_iter.idx; | 1263 unsigned int j = skippy_iter.idx; |
1254 | 1264 |
1255 unsigned int id1 = get_lig_id (c->buffer->cur()); | 1265 unsigned int id1 = _hb_glyph_info_get_lig_id (&buffer->cur()); |
1256 unsigned int id2 = get_lig_id (c->buffer->info[j]); | 1266 unsigned int id2 = _hb_glyph_info_get_lig_id (&buffer->info[j]); |
1257 unsigned int comp1 = get_lig_comp (c->buffer->cur()); | 1267 unsigned int comp1 = _hb_glyph_info_get_lig_comp (&buffer->cur()); |
1258 unsigned int comp2 = get_lig_comp (c->buffer->info[j]); | 1268 unsigned int comp2 = _hb_glyph_info_get_lig_comp (&buffer->info[j]); |
1259 | 1269 |
1260 if (likely (id1 == id2)) { | 1270 if (likely (id1 == id2)) { |
1261 if (id1 == 0) /* Marks belonging to the same base. */ | 1271 if (id1 == 0) /* Marks belonging to the same base. */ |
1262 goto good; | 1272 goto good; |
1263 else if (comp1 == comp2) /* Marks belonging to the same ligature component
. */ | 1273 else if (comp1 == comp2) /* Marks belonging to the same ligature component
. */ |
1264 goto good; | 1274 goto good; |
1265 } else { | 1275 } else { |
1266 /* If ligature ids don't match, it may be the case that one of the marks | 1276 /* If ligature ids don't match, it may be the case that one of the marks |
1267 * itself is a ligature. In which case match. */ | 1277 * itself is a ligature. In which case match. */ |
1268 if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2)) | 1278 if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2)) |
1269 goto good; | 1279 goto good; |
1270 } | 1280 } |
1271 | 1281 |
1272 /* Didn't match. */ | 1282 /* Didn't match. */ |
1273 return TRACE_RETURN (false); | 1283 return TRACE_RETURN (false); |
1274 | 1284 |
1275 good: | 1285 good: |
1276 unsigned int mark2_index = (this+mark2Coverage).get_coverage (c->buffer->in
fo[j].codepoint); | 1286 unsigned int mark2_index = (this+mark2Coverage).get_coverage (buffer->info[
j].codepoint); |
1277 if (mark2_index == NOT_COVERED) return TRACE_RETURN (false); | 1287 if (mark2_index == NOT_COVERED) return TRACE_RETURN (false); |
1278 | 1288 |
1279 return TRACE_RETURN ((this+mark1Array).apply (c, mark1_index, mark2_index, t
his+mark2Array, classCount, j)); | 1289 return TRACE_RETURN ((this+mark1Array).apply (c, mark1_index, mark2_index, t
his+mark2Array, classCount, j)); |
1280 } | 1290 } |
1281 | 1291 |
1282 inline bool sanitize (hb_sanitize_context_t *c) { | 1292 inline bool sanitize (hb_sanitize_context_t *c) { |
1283 TRACE_SANITIZE (this); | 1293 TRACE_SANITIZE (this); |
1284 return TRACE_RETURN (c->check_struct (this) && mark1Coverage.sanitize (c, th
is) && | 1294 return TRACE_RETURN (c->check_struct (this) && mark1Coverage.sanitize (c, th
is) && |
1285 mark2Coverage.sanitize (c, this) && mark1Array.sanitize
(c, this) | 1295 mark2Coverage.sanitize (c, this) && mark1Array.sanitize
(c, this) |
1286 && mark2Array.sanitize (c, this, (unsigned int) classCo
unt)); | 1296 && mark2Array.sanitize (c, this, (unsigned int) classCo
unt)); |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1422 public: | 1432 public: |
1423 DEFINE_SIZE_UNION (2, header.sub_format); | 1433 DEFINE_SIZE_UNION (2, header.sub_format); |
1424 }; | 1434 }; |
1425 | 1435 |
1426 | 1436 |
1427 struct PosLookup : Lookup | 1437 struct PosLookup : Lookup |
1428 { | 1438 { |
1429 inline const PosLookupSubTable& get_subtable (unsigned int i) const | 1439 inline const PosLookupSubTable& get_subtable (unsigned int i) const |
1430 { return this+CastR<OffsetArrayOf<PosLookupSubTable> > (subTable)[i]; } | 1440 { return this+CastR<OffsetArrayOf<PosLookupSubTable> > (subTable)[i]; } |
1431 | 1441 |
1432 inline hb_collect_glyphs_context_t::return_t collect_glyphs_lookup (hb_collect
_glyphs_context_t *c) const | 1442 inline bool is_reverse (void) const |
| 1443 { |
| 1444 return false; |
| 1445 } |
| 1446 |
| 1447 inline hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs
_context_t *c) const |
1433 { | 1448 { |
1434 TRACE_COLLECT_GLYPHS (this); | 1449 TRACE_COLLECT_GLYPHS (this); |
1435 c->set_recurse_func (NULL); | 1450 c->set_recurse_func (NULL); |
1436 return TRACE_RETURN (dispatch (c)); | 1451 return TRACE_RETURN (dispatch (c)); |
1437 } | 1452 } |
1438 | 1453 |
1439 template <typename set_t> | 1454 template <typename set_t> |
1440 inline void add_coverage (set_t *glyphs) const | 1455 inline void add_coverage (set_t *glyphs) const |
1441 { | 1456 { |
1442 hb_get_coverage_context_t c; | 1457 hb_get_coverage_context_t c; |
(...skipping 11 matching lines...) Expand all Loading... |
1454 inline bool apply_once (hb_apply_context_t *c) const | 1469 inline bool apply_once (hb_apply_context_t *c) const |
1455 { | 1470 { |
1456 TRACE_APPLY (this); | 1471 TRACE_APPLY (this); |
1457 if (!c->check_glyph_property (&c->buffer->cur(), c->lookup_props)) | 1472 if (!c->check_glyph_property (&c->buffer->cur(), c->lookup_props)) |
1458 return TRACE_RETURN (false); | 1473 return TRACE_RETURN (false); |
1459 return TRACE_RETURN (dispatch (c)); | 1474 return TRACE_RETURN (dispatch (c)); |
1460 } | 1475 } |
1461 | 1476 |
1462 static bool apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_ind
ex); | 1477 static bool apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_ind
ex); |
1463 | 1478 |
1464 inline bool apply_string (hb_apply_context_t *c, const hb_set_digest_t *digest
) const | |
1465 { | |
1466 bool ret = false; | |
1467 | |
1468 if (unlikely (!c->buffer->len || !c->lookup_mask)) | |
1469 return false; | |
1470 | |
1471 c->set_recurse_func (apply_recurse_func); | |
1472 c->set_lookup (*this); | |
1473 | |
1474 c->buffer->idx = 0; | |
1475 | |
1476 while (c->buffer->idx < c->buffer->len) | |
1477 { | |
1478 if (digest->may_have (c->buffer->cur().codepoint) && | |
1479 (c->buffer->cur().mask & c->lookup_mask) && | |
1480 apply_once (c)) | |
1481 ret = true; | |
1482 else | |
1483 c->buffer->idx++; | |
1484 } | |
1485 | |
1486 return ret; | |
1487 } | |
1488 | |
1489 template <typename context_t> | 1479 template <typename context_t> |
1490 static inline typename context_t::return_t dispatch_recurse_func (context_t *c
, unsigned int lookup_index); | 1480 static inline typename context_t::return_t dispatch_recurse_func (context_t *c
, unsigned int lookup_index); |
1491 | 1481 |
1492 template <typename context_t> | 1482 template <typename context_t> |
1493 inline typename context_t::return_t dispatch (context_t *c) const | 1483 inline typename context_t::return_t dispatch (context_t *c) const |
1494 { | 1484 { |
1495 TRACE_DISPATCH (this); | 1485 TRACE_DISPATCH (this); |
1496 unsigned int lookup_type = get_type (); | 1486 unsigned int lookup_type = get_type (); |
1497 unsigned int count = get_subtable_count (); | 1487 unsigned int count = get_subtable_count (); |
1498 for (unsigned int i = 0; i < count; i++) { | 1488 for (unsigned int i = 0; i < count; i++) { |
(...skipping 13 matching lines...) Expand all Loading... |
1512 }; | 1502 }; |
1513 | 1503 |
1514 typedef OffsetListOf<PosLookup> PosLookupList; | 1504 typedef OffsetListOf<PosLookup> PosLookupList; |
1515 | 1505 |
1516 /* | 1506 /* |
1517 * GPOS -- The Glyph Positioning Table | 1507 * GPOS -- The Glyph Positioning Table |
1518 */ | 1508 */ |
1519 | 1509 |
1520 struct GPOS : GSUBGPOS | 1510 struct GPOS : GSUBGPOS |
1521 { | 1511 { |
1522 static const hb_tag_t Tag» = HB_OT_TAG_GPOS; | 1512 static const hb_tag_t tableTag» = HB_OT_TAG_GPOS; |
1523 | 1513 |
1524 inline const PosLookup& get_lookup (unsigned int i) const | 1514 inline const PosLookup& get_lookup (unsigned int i) const |
1525 { return CastR<PosLookup> (GSUBGPOS::get_lookup (i)); } | 1515 { return CastR<PosLookup> (GSUBGPOS::get_lookup (i)); } |
1526 | 1516 |
1527 static inline void position_start (hb_font_t *font, hb_buffer_t *buffer); | 1517 static inline void position_start (hb_font_t *font, hb_buffer_t *buffer); |
1528 static inline void position_finish (hb_font_t *font, hb_buffer_t *buffer); | 1518 static inline void position_finish (hb_font_t *font, hb_buffer_t *buffer); |
1529 | 1519 |
1530 inline bool sanitize (hb_sanitize_context_t *c) { | 1520 inline bool sanitize (hb_sanitize_context_t *c) { |
1531 TRACE_SANITIZE (this); | 1521 TRACE_SANITIZE (this); |
1532 if (unlikely (!GSUBGPOS::sanitize (c))) return TRACE_RETURN (false); | 1522 if (unlikely (!GSUBGPOS::sanitize (c))) return TRACE_RETURN (false); |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1598 hb_direction_t direction = buffer->props.direction; | 1588 hb_direction_t direction = buffer->props.direction; |
1599 | 1589 |
1600 /* Handle cursive connections */ | 1590 /* Handle cursive connections */ |
1601 for (unsigned int i = 0; i < len; i++) | 1591 for (unsigned int i = 0; i < len; i++) |
1602 fix_cursive_minor_offset (pos, i, direction); | 1592 fix_cursive_minor_offset (pos, i, direction); |
1603 | 1593 |
1604 /* Handle attachments */ | 1594 /* Handle attachments */ |
1605 for (unsigned int i = 0; i < len; i++) | 1595 for (unsigned int i = 0; i < len; i++) |
1606 fix_mark_attachment (pos, i, direction); | 1596 fix_mark_attachment (pos, i, direction); |
1607 | 1597 |
1608 HB_BUFFER_DEALLOCATE_VAR (buffer, syllable); | 1598 _hb_buffer_deallocate_gsubgpos_vars (buffer); |
1609 HB_BUFFER_DEALLOCATE_VAR (buffer, lig_props); | |
1610 HB_BUFFER_DEALLOCATE_VAR (buffer, glyph_props); | |
1611 } | 1599 } |
1612 | 1600 |
1613 | 1601 |
1614 /* Out-of-class implementation for methods recursing */ | 1602 /* Out-of-class implementation for methods recursing */ |
1615 | 1603 |
1616 template <typename context_t> | 1604 template <typename context_t> |
1617 inline typename context_t::return_t PosLookup::dispatch_recurse_func (context_t
*c, unsigned int lookup_index) | 1605 inline typename context_t::return_t PosLookup::dispatch_recurse_func (context_t
*c, unsigned int lookup_index) |
1618 { | 1606 { |
1619 const GPOS &gpos = *(hb_ot_layout_from_face (c->face)->gpos); | 1607 const GPOS &gpos = *(hb_ot_layout_from_face (c->face)->gpos); |
1620 const PosLookup &l = gpos.get_lookup (lookup_index); | 1608 const PosLookup &l = gpos.get_lookup (lookup_index); |
(...skipping 13 matching lines...) Expand all Loading... |
1634 | 1622 |
1635 | 1623 |
1636 #undef attach_lookback | 1624 #undef attach_lookback |
1637 #undef cursive_chain | 1625 #undef cursive_chain |
1638 | 1626 |
1639 | 1627 |
1640 } /* namespace OT */ | 1628 } /* namespace OT */ |
1641 | 1629 |
1642 | 1630 |
1643 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */ | 1631 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */ |
OLD | NEW |