diff --git a/make/modules/java.desktop/lib/Awt2dLibraries.gmk b/make/modules/java.desktop/lib/Awt2dLibraries.gmk index 7fbd1049f895..3203378d00a4 100644 --- a/make/modules/java.desktop/lib/Awt2dLibraries.gmk +++ b/make/modules/java.desktop/lib/Awt2dLibraries.gmk @@ -435,7 +435,6 @@ endif ifeq ($(USE_EXTERNAL_HARFBUZZ), true) LIBHARFBUZZ_LIBS := $(HARFBUZZ_LIBS) else - HARFBUZZ_CFLAGS := -DHAVE_OT -DHAVE_FALLBACK -DHAVE_UCDN -DHAVE_ROUND # This is better than adding EXPORT_ALL_SYMBOLS ifneq ($(filter $(TOOLCHAIN_TYPE), gcc clang), ) @@ -493,7 +492,7 @@ else maybe-uninitialized class-memaccess, \ DISABLED_WARNINGS_clang := unused-value incompatible-pointer-types \ tautological-constant-out-of-range-compare int-to-pointer-cast \ - undef missing-field-initializers, \ + undef missing-field-initializers range-loop-analysis, \ DISABLED_WARNINGS_microsoft := 4267 4244 4090 4146 4334 4819 4101 4068 4805 4138, \ LDFLAGS := $(LDFLAGS_JDKLIB) \ $(call SET_SHARED_LIBRARY_ORIGIN), \ diff --git a/src/java.desktop/share/legal/harfbuzz.md b/src/java.desktop/share/legal/harfbuzz.md index 16698bc86acf..465bcf5be3c9 100644 --- a/src/java.desktop/share/legal/harfbuzz.md +++ b/src/java.desktop/share/legal/harfbuzz.md @@ -1,16 +1,18 @@ -## Harfbuzz v2.3.1 +## Harfbuzz v2.7.2 ### Harfbuzz License -http://cgit.freedesktop.org/harfbuzz/tree/COPYING +https://github.com/harfbuzz/harfbuzz/blob/master/COPYING
 
-HarfBuzz is licensed under the so-called "Old MIT" license. Details follow.
+HarfBuzz is licensed under the so-called "Old MIT" license.  Details follow.
 For parts of HarfBuzz that are licensed under different licenses see individual
 files names COPYING in subdirectories where applicable.
 
-Copyright © 2010,2011,2012  Google, Inc.
+Copyright © 2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020  Google, Inc.
+Copyright © 2018,2019,2020  Ebrahim Byagowi
+Copyright © 2019,2020  Facebook, Inc. 
 Copyright © 2012  Mozilla Foundation
 Copyright © 2011  Codethink Limited
 Copyright © 2008,2010  Nokia Corporation and/or its subsidiary(-ies)
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-fdsc-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-fdsc-table.hh
deleted file mode 100644
index 4ee7353346d8..000000000000
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-fdsc-table.hh
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright © 2018  Ebrahim Byagowi
- *
- *  This is part of HarfBuzz, a text shaping library.
- *
- * Permission is hereby granted, without written agreement and without
- * license or royalty fees, to use, copy, modify, and distribute this
- * software and its documentation for any purpose, provided that the
- * above copyright notice and the following two paragraphs appear in
- * all copies of this software.
- *
- * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
- * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
- * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
- * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
- * DAMAGE.
- *
- * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
- * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
- * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
- * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
- * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- */
-
-#ifndef HB_AAT_FDSC_TABLE_HH
-#define HB_AAT_FDSC_TABLE_HH
-
-#include "hb-aat-layout-common.hh"
-#include "hb-open-type.hh"
-
-/*
- * fdsc -- Font descriptors
- * https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6fdsc.html
- */
-#define HB_AAT_TAG_fdsc HB_TAG('f','d','s','c')
-
-
-namespace AAT {
-
-
-struct FontDescriptor
-{
-  bool has_data () const { return tag; }
-
-  int cmp (hb_tag_t a) const { return tag.cmp (a); }
-
-  float get_value () const { return u.value.to_float (); }
-
-  enum non_alphabetic_value_t {
-    Alphabetic          = 0,
-    Dingbats            = 1,
-    PiCharacters        = 2,
-    Fleurons            = 3,
-    DecorativeBorders   = 4,
-    InternationalSymbols= 5,
-    MathSymbols         = 6
-  };
-
-  bool sanitize (hb_sanitize_context_t *c) const
-  {
-    TRACE_SANITIZE (this);
-    return_trace (c->check_struct (this));
-  }
-
-  protected:
-  Tag           tag;            /* The 4-byte table tag name. */
-  union {
-  Fixed         value;          /* The value for the descriptor tag. */
-  HBUINT32      nalfType;       /* If the tag is `nalf`, see non_alphabetic_value_t */
-  } u;
-  public:
-  DEFINE_SIZE_STATIC (8);
-};
-
-struct fdsc
-{
-  static constexpr hb_tag_t tableTag = HB_AAT_TAG_fdsc;
-
-  enum {
-    Weight       = HB_TAG ('w','g','h','t'),
-                                /* Percent weight relative to regular weight.
-                                 * (defaul value: 1.0) */
-    Width        = HB_TAG ('w','d','t','h'),
-                                /* Percent width relative to regular width.
-                                 * (default value: 1.0) */
-    Slant        = HB_TAG ('s','l','n','t'),
-                                /* Angle of slant in degrees, where positive
-                                 * is clockwise from straight up.
-                                 * (default value: 0.0) */
-    OpticalSize  = HB_TAG ('o','p','s','z'),
-                                /* Point size the font was designed for.
-                                 * (default value: 12.0) */
-    NonAlphabetic= HB_TAG ('n','a','l','f')
-                                /* These values are treated as integers,
-                                 * not fixed32s. 0 means alphabetic, and greater
-                                 * integers mean the font is non-alphabetic (e.g. symbols).
-                                 * (default value: 0) */
-  };
-
-  const FontDescriptor &get_descriptor (hb_tag_t style) const
-  { return descriptors.lsearch (style); }
-
-  bool sanitize (hb_sanitize_context_t *c) const
-  {
-    TRACE_SANITIZE (this);
-    return_trace (c->check_struct (this) &&
-                  descriptors.sanitize (c));
-  }
-
-  protected:
-  Fixed         version;        /* Version number of the font descriptors
-                                 * table (0x00010000 for the current version). */
-  LArrayOf
-                descriptors;    /* List of tagged-coordinate pairs style descriptors
-                                 * that will be included to characterize this font.
-                                 * Each descriptor consists of a  pair.
-                                 * These pairs are located in the gxFontDescriptor
-                                 * array that follows. */
-  public:
-  DEFINE_SIZE_ARRAY (8, descriptors);
-};
-
-} /* namespace AAT */
-
-
-#endif /* HB_AAT_FDSC_TABLE_HH */
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-ankr-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-ankr-table.hh
index f8495f384b98..90dd949a50a6 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-ankr-table.hh
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-ankr-table.hh
@@ -66,7 +66,7 @@ struct ankr
   {
     const NNOffsetTo *offset = (this+lookupTable).get_value (glyph_id, num_glyphs);
     if (!offset)
-      return Null(Anchor);
+      return Null (Anchor);
     const GlyphAnchors &anchors = &(this+anchorData) + *offset;
     return anchors[i];
   }
@@ -76,13 +76,14 @@ struct ankr
     TRACE_SANITIZE (this);
     return_trace (likely (c->check_struct (this) &&
                           version == 0 &&
+                          c->check_range (this, anchorData) &&
                           lookupTable.sanitize (c, this, &(this+anchorData))));
   }
 
   protected:
   HBUINT16      version;        /* Version number (set to zero) */
   HBUINT16      flags;          /* Flags (currently unused; set to zero) */
-  LOffsetTo > >
+  LOffsetTo>>
                 lookupTable;    /* Offset to the table's lookup table */
   LNNOffsetTo
                 anchorData;     /* Offset to the glyph data table */
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-bsln-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-bsln-table.hh
index 746da3ae5bce..7dcf1c3bd9d0 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-bsln-table.hh
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-bsln-table.hh
@@ -82,7 +82,7 @@ struct BaselineTableFormat2Part
   }
 
   protected:
-  GlyphID       stdGlyph;       /* The specific glyph index number in this
+  HBGlyphID     stdGlyph;       /* The specific glyph index number in this
                                  * font that is used to set the baseline values.
                                  * This is the standard glyph.
                                  * This glyph must contain a set of control points
@@ -101,11 +101,11 @@ struct BaselineTableFormat3Part
   bool sanitize (hb_sanitize_context_t *c) const
   {
     TRACE_SANITIZE (this);
-    return_trace (c->check_struct (this) && lookupTable.sanitize (c));
+    return_trace (likely (c->check_struct (this) && lookupTable.sanitize (c)));
   }
 
   protected:
-  GlyphID       stdGlyph;       /* ditto */
+  HBGlyphID     stdGlyph;       /* ditto */
   HBUINT16      ctlPoints[32];  /* ditto */
   Lookup
                 lookupTable;    /* Lookup table that maps glyphs to their
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-common.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-common.hh
index 7c8e3cec16fb..e1dcd6f7102d 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-common.hh
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-common.hh
@@ -93,8 +93,8 @@ struct LookupSegmentSingle
     return_trace (c->check_struct (this) && value.sanitize (c, base));
   }
 
-  GlyphID       last;           /* Last GlyphID in this segment */
-  GlyphID       first;          /* First GlyphID in this segment */
+  HBGlyphID     last;           /* Last GlyphID in this segment */
+  HBGlyphID     first;          /* First GlyphID in this segment */
   T             value;          /* The lookup value (only one) */
   public:
   DEFINE_SIZE_STATIC (4 + T::static_size);
@@ -125,7 +125,7 @@ struct LookupFormat2
 
   protected:
   HBUINT16      format;         /* Format identifier--format = 2 */
-  VarSizedBinSearchArrayOf >
+  VarSizedBinSearchArrayOf>
                 segments;       /* The actual segments. These must already be sorted,
                                  * according to the first word in each one (the last
                                  * glyph in each segment). */
@@ -153,18 +153,18 @@ struct LookupSegmentArray
                   first <= last &&
                   valuesZ.sanitize (c, base, last - first + 1));
   }
-  template 
-  bool sanitize (hb_sanitize_context_t *c, const void *base, T2 user_data) const
+  template 
+  bool sanitize (hb_sanitize_context_t *c, const void *base, Ts&&... ds) const
   {
     TRACE_SANITIZE (this);
     return_trace (c->check_struct (this) &&
                   first <= last &&
-                  valuesZ.sanitize (c, base, last - first + 1, user_data));
+                  valuesZ.sanitize (c, base, last - first + 1, hb_forward (ds)...));
   }
 
-  GlyphID       last;           /* Last GlyphID in this segment */
-  GlyphID       first;          /* First GlyphID in this segment */
-  NNOffsetTo >
+  HBGlyphID     last;           /* Last GlyphID in this segment */
+  HBGlyphID     first;          /* First GlyphID in this segment */
+  NNOffsetTo>
                 valuesZ;        /* A 16-bit offset from the start of
                                  * the table to the data. */
   public:
@@ -196,7 +196,7 @@ struct LookupFormat4
 
   protected:
   HBUINT16      format;         /* Format identifier--format = 4 */
-  VarSizedBinSearchArrayOf >
+  VarSizedBinSearchArrayOf>
                 segments;       /* The actual segments. These must already be sorted,
                                  * according to the first word in each one (the last
                                  * glyph in each segment). */
@@ -222,7 +222,7 @@ struct LookupSingle
     return_trace (c->check_struct (this) && value.sanitize (c, base));
   }
 
-  GlyphID       glyph;          /* Last GlyphID */
+  HBGlyphID     glyph;          /* Last GlyphID */
   T             value;          /* The lookup value (only one) */
   public:
   DEFINE_SIZE_STATIC (2 + T::static_size);
@@ -253,7 +253,7 @@ struct LookupFormat6
 
   protected:
   HBUINT16      format;         /* Format identifier--format = 6 */
-  VarSizedBinSearchArrayOf >
+  VarSizedBinSearchArrayOf>
                 entries;        /* The actual entries, sorted by glyph index. */
   public:
   DEFINE_SIZE_ARRAY (8, entries);
@@ -284,7 +284,7 @@ struct LookupFormat8
 
   protected:
   HBUINT16      format;         /* Format identifier--format = 8 */
-  GlyphID       firstGlyph;     /* First glyph index included in the trimmed array. */
+  HBGlyphID     firstGlyph;     /* First glyph index included in the trimmed array. */
   HBUINT16      glyphCount;     /* Total number of glyphs (equivalent to the last
                                  * glyph minus the value of firstGlyph plus 1). */
   UnsizedArrayOf
@@ -303,7 +303,7 @@ struct LookupFormat10
   const typename T::type get_value_or_null (hb_codepoint_t glyph_id) const
   {
     if (!(firstGlyph <= glyph_id && glyph_id - firstGlyph < glyphCount))
-      return Null(T);
+      return Null (T);
 
     const HBUINT8 *p = &valueArrayZ[(glyph_id - firstGlyph) * valueSize];
 
@@ -326,7 +326,7 @@ struct LookupFormat10
   protected:
   HBUINT16      format;         /* Format identifier--format = 8 */
   HBUINT16      valueSize;      /* Byte size of each value. */
-  GlyphID       firstGlyph;     /* First glyph index included in the trimmed array. */
+  HBGlyphID     firstGlyph;     /* First glyph index included in the trimmed array. */
   HBUINT16      glyphCount;     /* Total number of glyphs (equivalent to the last
                                  * glyph minus the value of firstGlyph plus 1). */
   UnsizedArrayOf
@@ -358,7 +358,7 @@ struct Lookup
       case 10: return u.format10.get_value_or_null (glyph_id);
       default:
       const T *v = get_value (glyph_id, num_glyphs);
-      return v ? *v : Null(T);
+      return v ? *v : Null (T);
     }
   }
 
@@ -418,15 +418,11 @@ struct Lookup
 } /* Close namespace. */
 /* Ugly hand-coded null objects for template Lookup<> :(. */
 extern HB_INTERNAL const unsigned char _hb_Null_AAT_Lookup[2];
-template <>
-/*static*/ inline const AAT::Lookup& Null > ()
-{ return *reinterpret_cast *> (_hb_Null_AAT_Lookup); }
-template <>
-/*static*/ inline const AAT::Lookup& Null > ()
-{ return *reinterpret_cast *> (_hb_Null_AAT_Lookup); }
-template <>
-/*static*/ inline const AAT::Lookup >& Null > > ()
-{ return *reinterpret_cast > *> (_hb_Null_AAT_Lookup); }
+template 
+struct Null> {
+  static AAT::Lookup const & get_null ()
+  { return *reinterpret_cast *> (_hb_Null_AAT_Lookup); }
+};
 namespace AAT {
 
 enum { DELETED_GLYPH = 0xFFFF };
@@ -514,7 +510,7 @@ struct StateTable
   const Entry &get_entry (int state, unsigned int klass) const
   {
     if (unlikely (klass >= nClasses))
-      klass = StateTable >::CLASS_OUT_OF_BOUNDS;
+      klass = StateTable>::CLASS_OUT_OF_BOUNDS;
 
     const HBUSHORT *states = (this+stateArrayTable).arrayZ;
     const Entry *entries = (this+entryTable).arrayZ;
@@ -580,7 +576,7 @@ struct StateTable
           if (unlikely (stop > states))
             return_trace (false);
           for (const HBUSHORT *p = states; stop < p; p--)
-            num_entries = MAX (num_entries, *(p - 1) + 1);
+            num_entries = hb_max (num_entries, *(p - 1) + 1);
           state_neg = min_state;
         }
       }
@@ -601,7 +597,7 @@ struct StateTable
           if (unlikely (stop < states))
             return_trace (false);
           for (const HBUSHORT *p = &states[state_pos * num_classes]; p < stop; p++)
-            num_entries = MAX (num_entries, *p + 1);
+            num_entries = hb_max (num_entries, *p + 1);
           state_pos = max_state + 1;
         }
       }
@@ -615,8 +611,8 @@ struct StateTable
         for (const Entry *p = &entries[entry]; p < stop; p++)
         {
           int newState = new_state (p->newState);
-          min_state = MIN (min_state, newState);
-          max_state = MAX (max_state, newState);
+          min_state = hb_min (min_state, newState);
+          max_state = hb_max (max_state, newState);
         }
         entry = num_entries;
       }
@@ -635,7 +631,7 @@ struct StateTable
                 classTable;     /* Offset to the class table. */
   NNOffsetTo, HBUINT>
                 stateArrayTable;/* Offset to the state array. */
-  NNOffsetTo >, HBUINT>
+  NNOffsetTo>, HBUINT>
                 entryTable;     /* Offset to the entry array. */
 
   public:
@@ -662,7 +658,7 @@ struct ClassTable
     return_trace (c->check_struct (this) && classArray.sanitize (c));
   }
   protected:
-  GlyphID               firstGlyph;     /* First glyph index included in the trimmed array. */
+  HBGlyphID             firstGlyph;     /* First glyph index included in the trimmed array. */
   ArrayOf      classArray;     /* The class codes (indexed by glyph index minus
                                          * firstGlyph). */
   public:
@@ -682,7 +678,7 @@ struct ObsoleteTypes
                                      const void *base,
                                      const T *array)
   {
-    return (offset - ((const char *) array - (const char *) base)) / sizeof (T);
+    return (offset - ((const char *) array - (const char *) base)) / T::static_size;
   }
   template 
   static unsigned int byteOffsetToIndex (unsigned int offset,
@@ -824,12 +820,11 @@ struct hb_aat_apply_context_t :
 
   /* Unused. For debug tracing only. */
   unsigned int lookup_index;
-  unsigned int debug_depth;
 
   HB_INTERNAL hb_aat_apply_context_t (const hb_ot_shape_plan_t *plan_,
                                       hb_font_t *font_,
                                       hb_buffer_t *buffer_,
-                                      hb_blob_t *blob = const_cast (&Null(hb_blob_t)));
+                                      hb_blob_t *blob = const_cast (&Null (hb_blob_t)));
 
   HB_INTERNAL ~hb_aat_apply_context_t ();
 
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-feat-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-feat-table.hh
index 910a94f0bc34..06c48d2f64a4 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-feat-table.hh
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-feat-table.hh
@@ -47,17 +47,16 @@ struct SettingName
   hb_aat_layout_feature_selector_t get_selector () const
   { return (hb_aat_layout_feature_selector_t) (unsigned) setting; }
 
-  void get_info (hb_aat_layout_feature_selector_info_t *s,
-                        hb_aat_layout_feature_selector_t default_selector) const
+  hb_aat_layout_feature_selector_info_t get_info (hb_aat_layout_feature_selector_t default_selector) const
   {
-    s->name_id = nameIndex;
-
-    s->enable = (hb_aat_layout_feature_selector_t) (unsigned int) setting;
-    s->disable = default_selector == HB_AAT_LAYOUT_FEATURE_SELECTOR_INVALID ?
-                 (hb_aat_layout_feature_selector_t) (s->enable + 1) :
-                 default_selector;
-
-    s->reserved = 0;
+    return {
+      nameIndex,
+      (hb_aat_layout_feature_selector_t) (unsigned int) setting,
+      default_selector == HB_AAT_LAYOUT_FEATURE_SELECTOR_INVALID
+        ? (hb_aat_layout_feature_selector_t) (setting + 1)
+        : default_selector,
+      0
+    };
   }
 
   bool sanitize (hb_sanitize_context_t *c) const
@@ -117,9 +116,10 @@ struct FeatureName
 
     if (selectors_count)
     {
-      hb_array_t arr = settings_table.sub_array (start_offset, selectors_count);
-      for (unsigned int i = 0; i < arr.length; i++)
-        settings_table[start_offset + i].get_info (&selectors[i], default_selector);
+      + settings_table.sub_array (start_offset, selectors_count)
+      | hb_map ([=] (const SettingName& setting) { return setting.get_info (default_selector); })
+      | hb_sink (hb_array (selectors, *selectors_count))
+      ;
     }
     return settings_table.length;
   }
@@ -129,6 +129,11 @@ struct FeatureName
 
   hb_ot_name_id_t get_feature_name_id () const { return nameIndex; }
 
+  bool is_exclusive () const { return featureFlags & Exclusive; }
+
+  /* A FeatureName with no settings is meaningless */
+  bool has_data () const { return nSettings; }
+
   bool sanitize (hb_sanitize_context_t *c, const void *base) const
   {
     TRACE_SANITIZE (this);
@@ -139,7 +144,7 @@ struct FeatureName
   protected:
   HBUINT16      feature;        /* Feature type. */
   HBUINT16      nSettings;      /* The number of records in the setting name array. */
-  LOffsetTo, false>
+  LNNOffsetTo>
                 settingTableZ;  /* Offset in bytes from the beginning of this table to
                                  * this feature's setting name array. The actual type of
                                  * record this offset refers to will depend on the
@@ -162,21 +167,21 @@ struct feat
                                   unsigned int                 *count,
                                   hb_aat_layout_feature_type_t *features) const
   {
-    unsigned int feature_count = featureNameCount;
-    if (count && *count)
+    if (count)
     {
-      unsigned int len = MIN (feature_count - start_offset, *count);
-      for (unsigned int i = 0; i < len; i++)
-        features[i] = namesZ[i + start_offset].get_feature_type ();
-      *count = len;
+      + namesZ.as_array (featureNameCount).sub_array (start_offset, count)
+      | hb_map (&FeatureName::get_feature_type)
+      | hb_sink (hb_array (features, *count))
+      ;
     }
     return featureNameCount;
   }
 
+  bool exposes_feature (hb_aat_layout_feature_type_t feature_type) const
+  { return get_feature (feature_type).has_data (); }
+
   const FeatureName& get_feature (hb_aat_layout_feature_type_t feature_type) const
-  {
-    return namesZ.bsearch (featureNameCount, feature_type);
-  }
+  { return namesZ.bsearch (featureNameCount, feature_type); }
 
   hb_ot_name_id_t get_feature_name_id (hb_aat_layout_feature_type_t feature) const
   { return get_feature (feature).get_feature_name_id (); }
@@ -209,7 +214,7 @@ struct feat
   SortedUnsizedArrayOf
                 namesZ;         /* The feature name array. */
   public:
-  DEFINE_SIZE_STATIC (24);
+  DEFINE_SIZE_ARRAY (12, namesZ);
 };
 
 } /* namespace AAT */
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-just-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-just-table.hh
index c3817ea0b9c3..7ebd6a5ec5e8 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-just-table.hh
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-just-table.hh
@@ -70,9 +70,9 @@ struct DecompositionAction
 
   ActionSubrecordHeader
                 header;
-  Fixed         lowerLimit;     /* If the distance factor is less than this value,
+  HBFixed       lowerLimit;     /* If the distance factor is less than this value,
                                  * then the ligature is decomposed. */
-  Fixed         upperLimit;     /* If the distance factor is greater than this value,
+  HBFixed       upperLimit;     /* If the distance factor is greater than this value,
                                  * then the ligature is decomposed. */
   HBUINT16      order;          /* Numerical order in which this ligature will
                                  * be decomposed; you may want infrequent ligatures
@@ -100,7 +100,7 @@ struct UnconditionalAddGlyphAction
   protected:
   ActionSubrecordHeader
                 header;
-  GlyphID       addGlyph;       /* Glyph that should be added if the distance factor
+  HBGlyphID     addGlyph;       /* Glyph that should be added if the distance factor
                                  * is growing. */
 
   public:
@@ -118,14 +118,14 @@ struct ConditionalAddGlyphAction
   protected:
   ActionSubrecordHeader
                 header;
-  Fixed         substThreshold; /* Distance growth factor (in ems) at which
+  HBFixed       substThreshold; /* Distance growth factor (in ems) at which
                                  * this glyph is replaced and the growth factor
                                  * recalculated. */
-  GlyphID       addGlyph;       /* Glyph to be added as kashida. If this value is
+  HBGlyphID     addGlyph;       /* Glyph to be added as kashida. If this value is
                                  * 0xFFFF, no extra glyph will be added. Note that
                                  * generally when a glyph is added, justification
                                  * will need to be redone. */
-  GlyphID       substGlyph;     /* Glyph to be substituted for this glyph if the
+  HBGlyphID     substGlyph;     /* Glyph to be substituted for this glyph if the
                                  * growth factor equals or exceeds the value of
                                  * substThreshold. */
   public:
@@ -146,13 +146,13 @@ struct DuctileGlyphAction
   HBUINT32      variationAxis;  /* The 4-byte tag identifying the ductile axis.
                                  * This would normally be 0x64756374 ('duct'),
                                  * but you may use any axis the font contains. */
-  Fixed         minimumLimit;   /* The lowest value for the ductility axis tha
+  HBFixed       minimumLimit;   /* The lowest value for the ductility axis tha
                                  * still yields an acceptable appearance. Normally
                                  * this will be 1.0. */
-  Fixed         noStretchValue; /* This is the default value that corresponds to
+  HBFixed       noStretchValue; /* This is the default value that corresponds to
                                  * no change in appearance. Normally, this will
                                  * be 1.0. */
-  Fixed         maximumLimit;   /* The highest value for the ductility axis that
+  HBFixed       maximumLimit;   /* The highest value for the ductility axis that
                                  * still yields an acceptable appearance. */
   public:
   DEFINE_SIZE_STATIC (22);
@@ -170,7 +170,7 @@ struct RepeatedAddGlyphAction
   ActionSubrecordHeader
                 header;
   HBUINT16      flags;          /* Currently unused; set to 0. */
-  GlyphID       glyph;          /* Glyph that should be added if the distance factor
+  HBGlyphID     glyph;          /* Glyph that should be added if the distance factor
                                  * is growing. */
   public:
   DEFINE_SIZE_STATIC (10);
@@ -271,14 +271,14 @@ struct JustWidthDeltaEntry
   };
 
   protected:
-  Fixed         beforeGrowLimit;/* The ratio by which the advance width of the
+  HBFixed       beforeGrowLimit;/* The ratio by which the advance width of the
                                  * glyph is permitted to grow on the left or top side. */
-  Fixed         beforeShrinkLimit;
+  HBFixed       beforeShrinkLimit;
                                 /* The ratio by which the advance width of the
                                  * glyph is permitted to shrink on the left or top side. */
-  Fixed         afterGrowLimit; /* The ratio by which the advance width of the glyph
+  HBFixed       afterGrowLimit; /* The ratio by which the advance width of the glyph
                                  * is permitted to shrink on the left or top side. */
-  Fixed         afterShrinkLimit;
+  HBFixed       afterShrinkLimit;
                                 /* The ratio by which the advance width of the glyph
                                  * is at most permitted to shrink on the right or
                                  * bottom side. */
@@ -371,7 +371,7 @@ struct JustificationHeader
                                  * of postcompensation subtable (set to zero if none).
                                  *
                                  * The postcompensation subtable, if present in the font. */
-  Lookup >
+  Lookup>
                 lookupTable;    /* Lookup table associating glyphs with width delta
                                  * clusters. See the description of Width Delta Clusters
                                  * table for details on how to interpret the lookup values. */
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-kerx-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-kerx-table.hh
index b5519480e239..76e1da06f3ef 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-kerx-table.hh
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-kerx-table.hh
@@ -82,8 +82,8 @@ struct KernPair
   }
 
   protected:
-  GlyphID       left;
-  GlyphID       right;
+  HBGlyphID     left;
+  HBGlyphID     right;
   FWORD         value;
   public:
   DEFINE_SIZE_STATIC (6);
@@ -229,9 +229,7 @@ struct KerxSubTableFormat1
 
     bool is_actionable (StateTableDriver *driver HB_UNUSED,
                         const Entry &entry)
-    {
-      return Format1EntryT::performAction (entry);
-    }
+    { return Format1EntryT::performAction (entry); }
     void transition (StateTableDriver *driver,
                      const Entry &entry)
     {
@@ -251,7 +249,7 @@ struct KerxSubTableFormat1
 
       if (Format1EntryT::performAction (entry) && depth)
       {
-        unsigned int tuple_count = MAX (1u, table->header.tuple_count ());
+        unsigned int tuple_count = hb_max (1u, table->header.tuple_count ());
 
         unsigned int kern_idx = Format1EntryT::kernActionIndex (entry);
         kern_idx = Types::byteOffsetToIndex (kern_idx, &table->machine, kernAction.arrayZ);
@@ -281,35 +279,28 @@ struct KerxSubTableFormat1
 
           hb_glyph_position_t &o = buffer->pos[idx];
 
-          /* Testing shows that CoreText only applies kern (cross-stream or not)
-           * if none has been applied by previous subtables.  That is, it does
-           * NOT seem to accumulate as otherwise implied by specs. */
-
-          /* The following flag is undocumented in the spec, but described
-           * in the 'kern' table example. */
-          if (v == -0x8000)
-          {
-            o.attach_type() = ATTACH_TYPE_NONE;
-            o.attach_chain() = 0;
-            o.x_offset = o.y_offset = 0;
-          }
-          else if (HB_DIRECTION_IS_HORIZONTAL (buffer->props.direction))
+          if (HB_DIRECTION_IS_HORIZONTAL (buffer->props.direction))
           {
             if (crossStream)
             {
-              if (buffer->pos[idx].attach_type() && !buffer->pos[idx].y_offset)
+              /* The following flag is undocumented in the spec, but described
+               * in the 'kern' table example. */
+              if (v == -0x8000)
               {
-                o.y_offset = c->font->em_scale_y (v);
+                o.attach_type() = ATTACH_TYPE_NONE;
+                o.attach_chain() = 0;
+                o.y_offset = 0;
+              }
+              else if (o.attach_type())
+              {
+                o.y_offset += c->font->em_scale_y (v);
                 buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
               }
             }
             else if (buffer->info[idx].mask & kern_mask)
             {
-              if (!buffer->pos[idx].x_offset)
-              {
-                buffer->pos[idx].x_advance += c->font->em_scale_x (v);
-                buffer->pos[idx].x_offset += c->font->em_scale_x (v);
-              }
+              o.x_advance += c->font->em_scale_x (v);
+              o.x_offset += c->font->em_scale_x (v);
             }
           }
           else
@@ -317,19 +308,22 @@ struct KerxSubTableFormat1
             if (crossStream)
             {
               /* CoreText doesn't do crossStream kerning in vertical.  We do. */
-              if (buffer->pos[idx].attach_type() && !buffer->pos[idx].x_offset)
+              if (v == -0x8000)
               {
-                o.x_offset = c->font->em_scale_x (v);
+                o.attach_type() = ATTACH_TYPE_NONE;
+                o.attach_chain() = 0;
+                o.x_offset = 0;
+              }
+              else if (o.attach_type())
+              {
+                o.x_offset += c->font->em_scale_x (v);
                 buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
               }
             }
             else if (buffer->info[idx].mask & kern_mask)
             {
-              if (!buffer->pos[idx].y_offset)
-              {
-                buffer->pos[idx].y_advance += c->font->em_scale_y (v);
-                buffer->pos[idx].y_offset += c->font->em_scale_y (v);
-              }
+              o.y_advance += c->font->em_scale_y (v);
+              o.y_offset += c->font->em_scale_y (v);
             }
           }
         }
@@ -392,7 +386,7 @@ struct KerxSubTableFormat2
 
     const UnsizedArrayOf &arrayZ = this+array;
     unsigned int kern_idx = l + r;
-    kern_idx = Types::offsetToIndex (kern_idx, this, &arrayZ);
+    kern_idx = Types::offsetToIndex (kern_idx, this, arrayZ.arrayZ);
     const FWORD *v = &arrayZ[kern_idx];
     if (unlikely (!v->sanitize (&c->sanitizer))) return 0;
 
@@ -488,7 +482,7 @@ struct KerxSubTableFormat4
     };
 
     driver_context_t (const KerxSubTableFormat4 *table,
-                             hb_aat_apply_context_t *c_) :
+                      hb_aat_apply_context_t *c_) :
         c (c_),
         action_type ((table->flags & ActionType) >> 30),
         ankrData ((HBUINT16 *) ((const char *) &table->machine + (table->flags & Offset))),
@@ -497,9 +491,7 @@ struct KerxSubTableFormat4
 
     bool is_actionable (StateTableDriver *driver HB_UNUSED,
                         const Entry &entry)
-    {
-      return entry.data.ankrActionIndex != 0xFFFF;
-    }
+    { return entry.data.ankrActionIndex != 0xFFFF; }
     void transition (StateTableDriver *driver,
                      const Entry &entry)
     {
@@ -512,11 +504,13 @@ struct KerxSubTableFormat4
         {
           case 0: /* Control Point Actions.*/
           {
-            /* indexed into glyph outline. */
-            const HBUINT16 *data = &ankrData[entry.data.ankrActionIndex];
+            /* Indexed into glyph outline. */
+            /* Each action (record in ankrData) contains two 16-bit fields, so we must
+               double the ankrActionIndex to get the correct offset here. */
+            const HBUINT16 *data = &ankrData[entry.data.ankrActionIndex * 2];
             if (!c->sanitizer.check_array (data, 2)) return;
-            HB_UNUSED unsigned int markControlPoint = *data++;
-            HB_UNUSED unsigned int currControlPoint = *data++;
+            unsigned int markControlPoint = *data++;
+            unsigned int currControlPoint = *data++;
             hb_position_t markX = 0;
             hb_position_t markY = 0;
             hb_position_t currX = 0;
@@ -538,8 +532,10 @@ struct KerxSubTableFormat4
 
           case 1: /* Anchor Point Actions. */
           {
-           /* Indexed into 'ankr' table. */
-            const HBUINT16 *data = &ankrData[entry.data.ankrActionIndex];
+            /* Indexed into 'ankr' table. */
+            /* Each action (record in ankrData) contains two 16-bit fields, so we must
+               double the ankrActionIndex to get the correct offset here. */
+            const HBUINT16 *data = &ankrData[entry.data.ankrActionIndex * 2];
             if (!c->sanitizer.check_array (data, 2)) return;
             unsigned int markAnchorPoint = *data++;
             unsigned int currAnchorPoint = *data++;
@@ -557,7 +553,9 @@ struct KerxSubTableFormat4
 
           case 2: /* Control Point Coordinate Actions. */
           {
-            const FWORD *data = (const FWORD *) &ankrData[entry.data.ankrActionIndex];
+            /* Each action contains four 16-bit fields, so we multiply the ankrActionIndex
+               by 4 to get the correct offset for the given action. */
+            const FWORD *data = (const FWORD *) &ankrData[entry.data.ankrActionIndex * 4];
             if (!c->sanitizer.check_array (data, 4)) return;
             int markX = *data++;
             int markY = *data++;
@@ -628,7 +626,7 @@ struct KerxSubTableFormat6
   bool is_long () const { return flags & ValuesAreLong; }
 
   int get_kerning (hb_codepoint_t left, hb_codepoint_t right,
-                          hb_aat_apply_context_t *c) const
+                   hb_aat_apply_context_t *c) const
   {
     unsigned int num_glyphs = c->sanitizer.get_num_glyphs ();
     if (is_long ())
@@ -712,18 +710,18 @@ struct KerxSubTableFormat6
   {
     struct Long
     {
-      LNNOffsetTo >            rowIndexTable;
-      LNNOffsetTo >            columnIndexTable;
-      LNNOffsetTo >     array;
+      LNNOffsetTo>             rowIndexTable;
+      LNNOffsetTo>             columnIndexTable;
+      LNNOffsetTo>      array;
     } l;
     struct Short
     {
-      LNNOffsetTo >            rowIndexTable;
-      LNNOffsetTo >            columnIndexTable;
-      LNNOffsetTo >       array;
+      LNNOffsetTo>             rowIndexTable;
+      LNNOffsetTo>             columnIndexTable;
+      LNNOffsetTo>        array;
     } s;
   } u;
-  LNNOffsetTo >   vector;
+  LNNOffsetTo>    vector;
   public:
   DEFINE_SIZE_STATIC (KernSubTableHeader::static_size + 24);
 };
@@ -733,8 +731,8 @@ struct KerxSubTableHeader
 {
   typedef ExtendedTypes Types;
 
-  unsigned int tuple_count () const { return tupleCount; }
-  bool is_horizontal () const       { return !(coverage & Vertical); }
+  unsigned   tuple_count () const { return tupleCount; }
+  bool     is_horizontal () const { return !(coverage & Vertical); }
 
   enum Coverage
   {
@@ -771,17 +769,17 @@ struct KerxSubTable
   unsigned int get_size () const { return u.header.length; }
   unsigned int get_type () const { return u.header.coverage & u.header.SubtableType; }
 
-  template 
-  typename context_t::return_t dispatch (context_t *c) const
+  template 
+  typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
   {
     unsigned int subtable_type = get_type ();
     TRACE_DISPATCH (this, subtable_type);
     switch (subtable_type) {
-    case 0:     return_trace (c->dispatch (u.format0));
-    case 1:     return_trace (c->dispatch (u.format1));
-    case 2:     return_trace (c->dispatch (u.format2));
-    case 4:     return_trace (c->dispatch (u.format4));
-    case 6:     return_trace (c->dispatch (u.format6));
+    case 0:     return_trace (c->dispatch (u.format0, hb_forward (ds)...));
+    case 1:     return_trace (c->dispatch (u.format1, hb_forward (ds)...));
+    case 2:     return_trace (c->dispatch (u.format2, hb_forward (ds)...));
+    case 4:     return_trace (c->dispatch (u.format4, hb_forward (ds)...));
+    case 6:     return_trace (c->dispatch (u.format6, hb_forward (ds)...));
     default:    return_trace (c->default_return_value ());
     }
   }
@@ -891,7 +889,7 @@ struct KerxTable
       reverse = bool (st->u.header.coverage & st->u.header.Backwards) !=
                 HB_DIRECTION_IS_BACKWARD (c->buffer->props.direction);
 
-      if (!c->buffer->message (c->font, "start %c%c%c%c subtable %d", HB_UNTAG (thiz()->tableTag), c->lookup_index))
+      if (!c->buffer->message (c->font, "start subtable %d", c->lookup_index))
         goto skip;
 
       if (!seenCrossStream &&
@@ -923,7 +921,7 @@ struct KerxTable
       if (reverse)
         c->buffer->reverse ();
 
-      (void) c->buffer->message (c->font, "end %c%c%c%c subtable %d", HB_UNTAG (thiz()->tableTag), c->lookup_index);
+      (void) c->buffer->message (c->font, "end subtable %d", c->lookup_index);
 
     skip:
       st = &StructAfter (*st);
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-lcar-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-lcar-table.hh
deleted file mode 100644
index 58f1ee02fce7..000000000000
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-lcar-table.hh
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Copyright © 2018  Ebrahim Byagowi
- *
- *  This is part of HarfBuzz, a text shaping library.
- *
- * Permission is hereby granted, without written agreement and without
- * license or royalty fees, to use, copy, modify, and distribute this
- * software and its documentation for any purpose, provided that the
- * above copyright notice and the following two paragraphs appear in
- * all copies of this software.
- *
- * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
- * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
- * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
- * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
- * DAMAGE.
- *
- * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
- * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
- * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
- * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
- * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
- */
-#ifndef HB_AAT_LAYOUT_LCAR_TABLE_HH
-#define HB_AAT_LAYOUT_LCAR_TABLE_HH
-
-#include "hb-open-type.hh"
-#include "hb-aat-layout-common.hh"
-
-/*
- * lcar -- Ligature caret
- * https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6lcar.html
- */
-#define HB_AAT_TAG_lcar HB_TAG('l','c','a','r')
-
-
-namespace AAT {
-
-typedef ArrayOf LigCaretClassEntry;
-
-struct lcar
-{
-  static constexpr hb_tag_t tableTag = HB_AAT_TAG_lcar;
-
-  unsigned int get_lig_carets (hb_font_t      *font,
-                               hb_direction_t  direction,
-                               hb_codepoint_t  glyph,
-                               unsigned int    start_offset,
-                               unsigned int   *caret_count /* IN/OUT */,
-                               hb_position_t  *caret_array /* OUT */) const
-  {
-    const OffsetTo* entry_offset = lookup.get_value (glyph,
-                                                                         font->face->get_num_glyphs ());
-    const LigCaretClassEntry& array = entry_offset ? this+*entry_offset : Null (LigCaretClassEntry);
-    if (caret_count)
-    {
-      hb_array_t arr = array.sub_array (start_offset, caret_count);
-      unsigned int count = arr.length;
-      for (unsigned int i = 0; i < count; ++i)
-        switch (format)
-        {
-        case 0: caret_array[i] = font->em_scale_dir (arr[i], direction); break;
-        case 1:
-          hb_position_t x, y;
-          font->get_glyph_contour_point_for_origin (glyph, arr[i], direction, &x, &y);
-          caret_array[i] = HB_DIRECTION_IS_HORIZONTAL (direction) ? x : y;
-          break;
-        }
-    }
-    return array.len;
-  }
-
-  bool sanitize (hb_sanitize_context_t *c) const
-  {
-    TRACE_SANITIZE (this);
-    return_trace (likely (c->check_struct (this) &&
-                          version.major == 1 &&
-                          lookup.sanitize (c, this)));
-  }
-
-  protected:
-  FixedVersion<>version;        /* Version number of the ligature caret table */
-  HBUINT16      format;         /* Format of the ligature caret table. */
-  Lookup >
-                lookup;         /* data Lookup table associating glyphs */
-
-  public:
-  DEFINE_SIZE_MIN (8);
-};
-
-} /* namespace AAT */
-
-#endif /* HB_AAT_LAYOUT_LCAR_TABLE_HH */
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-morx-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-morx-table.hh
index f52d2ab301bd..a0d137836b16 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-morx-table.hh
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-morx-table.hh
@@ -88,7 +88,7 @@ struct RearrangementSubtable
         start = buffer->idx;
 
       if (flags & MarkLast)
-        end = MIN (buffer->idx + 1, buffer->len);
+        end = hb_min (buffer->idx + 1, buffer->len);
 
       if ((flags & Verb) && start < end)
       {
@@ -117,14 +117,14 @@ struct RearrangementSubtable
         };
 
         unsigned int m = map[flags & Verb];
-        unsigned int l = MIN (2, m >> 4);
-        unsigned int r = MIN (2, m & 0x0F);
+        unsigned int l = hb_min (2u, m >> 4);
+        unsigned int r = hb_min (2u, m & 0x0F);
         bool reverse_l = 3 == (m >> 4);
         bool reverse_r = 3 == (m & 0x0F);
 
         if (end - start >= l + r)
         {
-          buffer->merge_clusters (start, MIN (buffer->idx + 1, buffer->len));
+          buffer->merge_clusters (start, hb_min (buffer->idx + 1, buffer->len));
           buffer->merge_clusters (start, end);
 
           hb_glyph_info_t *info = buffer->info;
@@ -240,46 +240,46 @@ struct ContextualSubtable
       if (buffer->idx == buffer->len && !mark_set)
         return;
 
-      const GlyphID *replacement;
+      const HBGlyphID *replacement;
 
       replacement = nullptr;
       if (Types::extended)
       {
         if (entry.data.markIndex != 0xFFFF)
         {
-          const Lookup &lookup = subs[entry.data.markIndex];
+          const Lookup &lookup = subs[entry.data.markIndex];
           replacement = lookup.get_value (buffer->info[mark].codepoint, driver->num_glyphs);
         }
       }
       else
       {
         unsigned int offset = entry.data.markIndex + buffer->info[mark].codepoint;
-        const UnsizedArrayOf &subs_old = (const UnsizedArrayOf &) subs;
+        const UnsizedArrayOf &subs_old = (const UnsizedArrayOf &) subs;
         replacement = &subs_old[Types::wordOffsetToIndex (offset, table, subs_old.arrayZ)];
         if (!replacement->sanitize (&c->sanitizer) || !*replacement)
           replacement = nullptr;
       }
       if (replacement)
       {
-        buffer->unsafe_to_break (mark, MIN (buffer->idx + 1, buffer->len));
+        buffer->unsafe_to_break (mark, hb_min (buffer->idx + 1, buffer->len));
         buffer->info[mark].codepoint = *replacement;
         ret = true;
       }
 
       replacement = nullptr;
-      unsigned int idx = MIN (buffer->idx, buffer->len - 1);
+      unsigned int idx = hb_min (buffer->idx, buffer->len - 1);
       if (Types::extended)
       {
         if (entry.data.currentIndex != 0xFFFF)
         {
-          const Lookup &lookup = subs[entry.data.currentIndex];
+          const Lookup &lookup = subs[entry.data.currentIndex];
           replacement = lookup.get_value (buffer->info[idx].codepoint, driver->num_glyphs);
         }
       }
       else
       {
         unsigned int offset = entry.data.currentIndex + buffer->info[idx].codepoint;
-        const UnsizedArrayOf &subs_old = (const UnsizedArrayOf &) subs;
+        const UnsizedArrayOf &subs_old = (const UnsizedArrayOf &) subs;
         replacement = &subs_old[Types::wordOffsetToIndex (offset, table, subs_old.arrayZ)];
         if (!replacement->sanitize (&c->sanitizer) || !*replacement)
           replacement = nullptr;
@@ -304,7 +304,7 @@ struct ContextualSubtable
     bool mark_set;
     unsigned int mark;
     const ContextualSubtable *table;
-    const UnsizedOffsetListOf, HBUINT, false> &subs;
+    const UnsizedOffsetListOf, HBUINT, false> &subs;
   };
 
   bool apply (hb_aat_apply_context_t *c) const
@@ -337,9 +337,9 @@ struct ContextualSubtable
       const EntryData &data = entries[i].data;
 
       if (data.markIndex != 0xFFFF)
-        num_lookups = MAX (num_lookups, 1 + data.markIndex);
+        num_lookups = hb_max (num_lookups, 1 + data.markIndex);
       if (data.currentIndex != 0xFFFF)
-        num_lookups = MAX (num_lookups, 1 + data.currentIndex);
+        num_lookups = hb_max (num_lookups, 1 + data.currentIndex);
     }
 
     return_trace (substitutionTables.sanitize (c, this, num_lookups));
@@ -348,7 +348,7 @@ struct ContextualSubtable
   protected:
   StateTable
                 machine;
-  NNOffsetTo, HBUINT, false>, HBUINT>
+  NNOffsetTo, HBUINT, false>, HBUINT>
                 substitutionTables;
   public:
   DEFINE_SIZE_STATIC (20);
@@ -520,7 +520,7 @@ struct LigatureSubtable
           if (action & (LigActionStore | LigActionLast))
           {
             ligature_idx = Types::offsetToIndex (ligature_idx, table, ligature.arrayZ);
-            const GlyphID &ligatureData = ligature[ligature_idx];
+            const HBGlyphID &ligatureData = ligature[ligature_idx];
             if (unlikely (!ligatureData.sanitize (&c->sanitizer))) break;
             hb_codepoint_t lig = ligatureData;
 
@@ -554,7 +554,7 @@ struct LigatureSubtable
     const LigatureSubtable *table;
     const UnsizedArrayOf &ligAction;
     const UnsizedArrayOf &component;
-    const UnsizedArrayOf &ligature;
+    const UnsizedArrayOf &ligature;
     unsigned int match_length;
     unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
   };
@@ -586,7 +586,7 @@ struct LigatureSubtable
                 ligAction;      /* Offset to the ligature action table. */
   NNOffsetTo, HBUINT>
                 component;      /* Offset to the component table. */
-  NNOffsetTo, HBUINT>
+  NNOffsetTo, HBUINT>
                 ligature;       /* Offset to the actual ligature lists. */
   public:
   DEFINE_SIZE_STATIC (28);
@@ -606,7 +606,7 @@ struct NoncontextualSubtable
     unsigned int count = c->buffer->len;
     for (unsigned int i = 0; i < count; i++)
     {
-      const GlyphID *replacement = substitute.get_value (info[i].codepoint, num_glyphs);
+      const HBGlyphID *replacement = substitute.get_value (info[i].codepoint, num_glyphs);
       if (replacement)
       {
         info[i].codepoint = *replacement;
@@ -624,7 +624,7 @@ struct NoncontextualSubtable
   }
 
   protected:
-  Lookup       substitute;
+  Lookup     substitute;
   public:
   DEFINE_SIZE_MIN (2);
 };
@@ -725,8 +725,9 @@ struct InsertionSubtable
       if (entry.data.markedInsertIndex != 0xFFFF)
       {
         unsigned int count = (flags & MarkedInsertCount);
+        if (unlikely ((buffer->max_ops -= count) <= 0)) return;
         unsigned int start = entry.data.markedInsertIndex;
-        const GlyphID *glyphs = &insertionAction[start];
+        const HBGlyphID *glyphs = &insertionAction[start];
         if (unlikely (!c->sanitizer.check_array (glyphs, count))) count = 0;
 
         bool before = flags & MarkedInsertBefore;
@@ -744,7 +745,7 @@ struct InsertionSubtable
 
         buffer->move_to (end + count);
 
-        buffer->unsafe_to_break_from_outbuffer (mark, MIN (buffer->idx + 1, buffer->len));
+        buffer->unsafe_to_break_from_outbuffer (mark, hb_min (buffer->idx + 1, buffer->len));
       }
 
       if (flags & SetMark)
@@ -753,8 +754,9 @@ struct InsertionSubtable
       if (entry.data.currentInsertIndex != 0xFFFF)
       {
         unsigned int count = (flags & CurrentInsertCount) >> 5;
+        if (unlikely ((buffer->max_ops -= count) <= 0)) return;
         unsigned int start = entry.data.currentInsertIndex;
-        const GlyphID *glyphs = &insertionAction[start];
+        const HBGlyphID *glyphs = &insertionAction[start];
         if (unlikely (!c->sanitizer.check_array (glyphs, count))) count = 0;
 
         bool before = flags & CurrentInsertBefore;
@@ -793,7 +795,7 @@ struct InsertionSubtable
     private:
     hb_aat_apply_context_t *c;
     unsigned int mark;
-    const UnsizedArrayOf &insertionAction;
+    const UnsizedArrayOf &insertionAction;
   };
 
   bool apply (hb_aat_apply_context_t *c) const
@@ -819,7 +821,7 @@ struct InsertionSubtable
   protected:
   StateTable
                 machine;
-  NNOffsetTo, HBUINT>
+  NNOffsetTo, HBUINT>
                 insertionAction;        /* Byte offset from stateHeader to the start of
                                          * the insertion glyph table. */
   public:
@@ -883,17 +885,17 @@ struct ChainSubtable
     Insertion           = 5
   };
 
-  template 
-  typename context_t::return_t dispatch (context_t *c) const
+  template 
+  typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
   {
     unsigned int subtable_type = get_type ();
     TRACE_DISPATCH (this, subtable_type);
     switch (subtable_type) {
-    case Rearrangement:         return_trace (c->dispatch (u.rearrangement));
-    case Contextual:            return_trace (c->dispatch (u.contextual));
-    case Ligature:              return_trace (c->dispatch (u.ligature));
-    case Noncontextual:         return_trace (c->dispatch (u.noncontextual));
-    case Insertion:             return_trace (c->dispatch (u.insertion));
+    case Rearrangement:         return_trace (c->dispatch (u.rearrangement, hb_forward (ds)...));
+    case Contextual:            return_trace (c->dispatch (u.contextual, hb_forward (ds)...));
+    case Ligature:              return_trace (c->dispatch (u.ligature, hb_forward (ds)...));
+    case Noncontextual:         return_trace (c->dispatch (u.noncontextual, hb_forward (ds)...));
+    case Insertion:             return_trace (c->dispatch (u.insertion, hb_forward (ds)...));
     default:                    return_trace (c->default_return_value ());
     }
   }
@@ -948,8 +950,10 @@ struct Chain
         hb_aat_layout_feature_type_t type = (hb_aat_layout_feature_type_t) (unsigned int) feature.featureType;
         hb_aat_layout_feature_selector_t setting = (hb_aat_layout_feature_selector_t) (unsigned int) feature.featureSetting;
       retry:
-        const hb_aat_map_builder_t::feature_info_t *info = map->features.bsearch (type);
-        if (info && info->setting == setting)
+        // Check whether this type/setting pair was requested in the map, and if so, apply its flags.
+        // (The search here only looks at the type and setting fields of feature_info_t.)
+        hb_aat_map_builder_t::feature_info_t info = { type, setting, false, 0 };
+        if (map->features.bsearch (info))
         {
           flags &= feature.disableFlags;
           flags |= feature.enableFlags;
@@ -967,9 +971,9 @@ struct Chain
   }
 
   void apply (hb_aat_apply_context_t *c,
-                     hb_mask_t flags) const
+              hb_mask_t flags) const
   {
-    const ChainSubtable *subtable = &StructAfter > (featureZ.as_array (featureCount));
+    const ChainSubtable *subtable = &StructAfter> (featureZ.as_array (featureCount));
     unsigned int count = subtableCount;
     for (unsigned int i = 0; i < count; i++)
     {
@@ -1015,7 +1019,7 @@ struct Chain
                 bool (subtable->get_coverage () & ChainSubtable::Backwards) !=
                 HB_DIRECTION_IS_BACKWARD (c->buffer->props.direction);
 
-      if (!c->buffer->message (c->font, "start chain subtable %d", c->lookup_index))
+      if (!c->buffer->message (c->font, "start chainsubtable %d", c->lookup_index))
         goto skip;
 
       if (reverse)
@@ -1026,12 +1030,12 @@ struct Chain
       if (reverse)
         c->buffer->reverse ();
 
-      (void) c->buffer->message (c->font, "end chain subtable %d", c->lookup_index);
+      (void) c->buffer->message (c->font, "end chainsubtable %d", c->lookup_index);
 
       if (unlikely (!c->buffer->successful)) return;
 
     skip:
-      subtable = &StructAfter > (*subtable);
+      subtable = &StructAfter> (*subtable);
       c->set_lookup_index (c->lookup_index + 1);
     }
   }
@@ -1049,13 +1053,13 @@ struct Chain
     if (!c->check_array (featureZ.arrayZ, featureCount))
       return_trace (false);
 
-    const ChainSubtable *subtable = &StructAfter > (featureZ.as_array (featureCount));
+    const ChainSubtable *subtable = &StructAfter> (featureZ.as_array (featureCount));
     unsigned int count = subtableCount;
     for (unsigned int i = 0; i < count; i++)
     {
       if (!subtable->sanitize (c))
         return_trace (false);
-      subtable = &StructAfter > (*subtable);
+      subtable = &StructAfter> (*subtable);
     }
 
     return_trace (true);
@@ -1080,10 +1084,10 @@ struct Chain
  * The 'mort'/'morx' Table
  */
 
-template 
+template 
 struct mortmorx
 {
-  static constexpr hb_tag_t tableTag = HB_AAT_TAG_morx;
+  static constexpr hb_tag_t tableTag = TAG;
 
   bool has_data () const { return version != 0; }
 
@@ -1095,7 +1099,7 @@ struct mortmorx
     for (unsigned int i = 0; i < count; i++)
     {
       map->chain_flags.push (chain->compile_flags (mapper));
-      chain = &StructAfter > (*chain);
+      chain = &StructAfter> (*chain);
     }
   }
 
@@ -1109,7 +1113,7 @@ struct mortmorx
     {
       chain->apply (c, c->plan->aat_map.chain_flags[i]);
       if (unlikely (!c->buffer->successful)) return;
-      chain = &StructAfter > (*chain);
+      chain = &StructAfter> (*chain);
     }
   }
 
@@ -1125,7 +1129,7 @@ struct mortmorx
     {
       if (!chain->sanitize (c, version))
         return_trace (false);
-      chain = &StructAfter > (*chain);
+      chain = &StructAfter> (*chain);
     }
 
     return_trace (true);
@@ -1143,14 +1147,8 @@ struct mortmorx
   DEFINE_SIZE_MIN (8);
 };
 
-struct morx : mortmorx
-{
-  static constexpr hb_tag_t tableTag = HB_AAT_TAG_morx;
-};
-struct mort : mortmorx
-{
-  static constexpr hb_tag_t tableTag = HB_AAT_TAG_mort;
-};
+struct morx : mortmorx {};
+struct mort : mortmorx {};
 
 
 } /* namespace AAT */
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-opbd-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-opbd-table.hh
new file mode 100644
index 000000000000..bfd476c77e07
--- /dev/null
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-opbd-table.hh
@@ -0,0 +1,173 @@
+/*
+ * Copyright © 2019  Ebrahim Byagowi
+ *
+ *  This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ */
+
+#ifndef HB_AAT_LAYOUT_OPBD_TABLE_HH
+#define HB_AAT_LAYOUT_OPBD_TABLE_HH
+
+#include "hb-aat-layout-common.hh"
+#include "hb-open-type.hh"
+
+/*
+ * opbd -- Optical Bounds
+ * https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6opbd.html
+ */
+#define HB_AAT_TAG_opbd HB_TAG('o','p','b','d')
+
+
+namespace AAT {
+
+struct OpticalBounds
+{
+  bool sanitize (hb_sanitize_context_t *c) const
+  {
+    TRACE_SANITIZE (this);
+    return_trace (likely (c->check_struct (this)));
+  }
+
+  FWORD         leftSide;
+  FWORD         topSide;
+  FWORD         rightSide;
+  FWORD         bottomSide;
+  public:
+  DEFINE_SIZE_STATIC (8);
+};
+
+struct opbdFormat0
+{
+  bool get_bounds (hb_font_t *font, hb_codepoint_t glyph_id,
+                   hb_glyph_extents_t *extents, const void *base) const
+  {
+    const OffsetTo *bounds_offset = lookupTable.get_value (glyph_id, font->face->get_num_glyphs ());
+    if (!bounds_offset) return false;
+    const OpticalBounds &bounds = base+*bounds_offset;
+
+    if (extents)
+      *extents = {
+        font->em_scale_x (bounds.leftSide),
+        font->em_scale_y (bounds.topSide),
+        font->em_scale_x (bounds.rightSide),
+        font->em_scale_y (bounds.bottomSide)
+      };
+    return true;
+  }
+
+  bool sanitize (hb_sanitize_context_t *c, const void *base) const
+  {
+    TRACE_SANITIZE (this);
+    return_trace (likely (c->check_struct (this) && lookupTable.sanitize (c, base)));
+  }
+
+  protected:
+  Lookup>
+                lookupTable;    /* Lookup table associating glyphs with the four
+                                 * int16 values for the left-side, top-side,
+                                 * right-side, and bottom-side optical bounds. */
+  public:
+  DEFINE_SIZE_MIN (2);
+};
+
+struct opbdFormat1
+{
+  bool get_bounds (hb_font_t *font, hb_codepoint_t glyph_id,
+                   hb_glyph_extents_t *extents, const void *base) const
+  {
+    const OffsetTo *bounds_offset = lookupTable.get_value (glyph_id, font->face->get_num_glyphs ());
+    if (!bounds_offset) return false;
+    const OpticalBounds &bounds = base+*bounds_offset;
+
+    hb_position_t left = 0, top = 0, right = 0, bottom = 0, ignore;
+    if (font->get_glyph_contour_point (glyph_id, bounds.leftSide, &left, &ignore) ||
+        font->get_glyph_contour_point (glyph_id, bounds.topSide, &ignore, &top) ||
+        font->get_glyph_contour_point (glyph_id, bounds.rightSide, &right, &ignore) ||
+        font->get_glyph_contour_point (glyph_id, bounds.bottomSide, &ignore, &bottom))
+    {
+      if (extents)
+        *extents = {left, top, right, bottom};
+      return true;
+    }
+    return false;
+  }
+
+  bool sanitize (hb_sanitize_context_t *c, const void *base) const
+  {
+    TRACE_SANITIZE (this);
+    return_trace (likely (c->check_struct (this) && lookupTable.sanitize (c, base)));
+  }
+
+  protected:
+  Lookup>
+                lookupTable;    /* Lookup table associating glyphs with the four
+                                 * int16 values for the left-side, top-side,
+                                 * right-side, and bottom-side optical bounds. */
+  public:
+  DEFINE_SIZE_MIN (2);
+};
+
+struct opbd
+{
+  static constexpr hb_tag_t tableTag = HB_AAT_TAG_opbd;
+
+  bool get_bounds (hb_font_t *font, hb_codepoint_t glyph_id,
+                   hb_glyph_extents_t *extents) const
+  {
+    switch (format)
+    {
+    case 0: return u.format0.get_bounds (font, glyph_id, extents, this);
+    case 1: return u.format1.get_bounds (font, glyph_id, extents, this);
+    default:return false;
+    }
+  }
+
+  bool sanitize (hb_sanitize_context_t *c) const
+  {
+    TRACE_SANITIZE (this);
+    if (unlikely (!c->check_struct (this) || version.major != 1))
+      return_trace (false);
+
+    switch (format)
+    {
+    case 0: return_trace (u.format0.sanitize (c, this));
+    case 1: return_trace (u.format1.sanitize (c, this));
+    default:return_trace (true);
+    }
+  }
+
+  protected:
+  FixedVersion<>version;        /* Version number of the optical bounds
+                                 * table (0x00010000 for the current version). */
+  HBUINT16      format;         /* Format of the optical bounds table.
+                                 * Format 0 indicates distance and Format 1 indicates
+                                 * control point. */
+  union {
+  opbdFormat0   format0;
+  opbdFormat1   format1;
+  } u;
+  public:
+  DEFINE_SIZE_MIN (8);
+};
+
+} /* namespace AAT */
+
+
+#endif /* HB_AAT_LAYOUT_OPBD_TABLE_HH */
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-trak-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-trak-table.hh
index 469cae5a677b..1643e1422295 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-trak-table.hh
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout-trak-table.hh
@@ -62,11 +62,11 @@ struct TrackTableEntry
   }
 
   protected:
-  Fixed         track;          /* Track value for this record. */
+  HBFixed       track;          /* Track value for this record. */
   NameID        trackNameID;    /* The 'name' table index for this track.
                                  * (a short word or phrase like "loose"
                                  * or "very tight") */
-  NNOffsetTo >
+  NNOffsetTo>
                 valuesZ;        /* Offset from start of tracking table to
                                  * per-size tracking values for this track. */
 
@@ -82,7 +82,7 @@ struct TrackData
                         const void *base) const
   {
     unsigned int sizes = nSizes;
-    hb_array_t size_table ((base+sizeTable).arrayZ, sizes);
+    hb_array_t size_table ((base+sizeTable).arrayZ, sizes);
 
     float s0 = size_table[idx].to_float ();
     float s1 = size_table[idx + 1].to_float ();
@@ -93,13 +93,6 @@ struct TrackData
 
   int get_tracking (const void *base, float ptem) const
   {
-    /* CoreText points are CSS pixels (96 per inch),
-     * NOT typographic points (72 per inch).
-     *
-     * https://developer.apple.com/library/content/documentation/GraphicsAnimation/Conceptual/HighResolutionOSX/Explained/Explained.html
-     */
-    float csspx = ptem * 96.f / 72.f;
-
     /*
      * Choose track.
      */
@@ -127,14 +120,14 @@ struct TrackData
     if (!sizes) return 0.;
     if (sizes == 1) return trackTableEntry->get_value (base, 0, sizes);
 
-    hb_array_t size_table ((base+sizeTable).arrayZ, sizes);
+    hb_array_t size_table ((base+sizeTable).arrayZ, sizes);
     unsigned int size_index;
     for (size_index = 0; size_index < sizes - 1; size_index++)
-      if (size_table[size_index].to_float () >= csspx)
+      if (size_table[size_index].to_float () >= ptem)
         break;
 
-    return round (interpolate_at (size_index ? size_index - 1 : 0, csspx,
-                                  *trackTableEntry, base));
+    return roundf (interpolate_at (size_index ? size_index - 1 : 0, ptem,
+                                   *trackTableEntry, base));
   }
 
   bool sanitize (hb_sanitize_context_t *c, const void *base) const
@@ -148,7 +141,7 @@ struct TrackData
   protected:
   HBUINT16      nTracks;        /* Number of separate tracks included in this table. */
   HBUINT16      nSizes;         /* Number of point sizes included in this table. */
-  LOffsetTo, false>
+  LNNOffsetTo>
                 sizeTable;      /* Offset from start of the tracking table to
                                  * Array[nSizes] of size values.. */
   UnsizedArrayOf
@@ -217,7 +210,7 @@ struct trak
 
   protected:
   FixedVersion<>version;        /* Version of the tracking table
-                                         * (0x00010000u for version 1.0). */
+                                 * (0x00010000u for version 1.0). */
   HBUINT16      format;         /* Format of the tracking table (set to 0). */
   OffsetTo
                 horizData;      /* Offset from start of tracking table to TrackData
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout.cc b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout.cc
index a49729dd7619..38a5c57519df 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout.cc
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout.cc
@@ -25,11 +25,9 @@
  * Google Author(s): Behdad Esfahbod
  */
 
-#include "hb-open-type.hh"
+#include "hb.hh"
 
-#include "hb-ot-face.hh"
 #include "hb-aat-layout.hh"
-#include "hb-aat-fdsc-table.hh" // Just so we compile it; unused otherwise.
 #include "hb-aat-layout-ankr-table.hh"
 #include "hb-aat-layout-bsln-table.hh" // Just so we compile it; unused otherwise.
 #include "hb-aat-layout-feat-table.hh"
@@ -40,6 +38,41 @@
 #include "hb-aat-ltag-table.hh"
 
 
+/*
+ * hb_aat_apply_context_t
+ */
+
+/* Note: This context is used for kerning, even without AAT, hence the condition. */
+#if !defined(HB_NO_AAT) || !defined(HB_NO_OT_KERN)
+
+AAT::hb_aat_apply_context_t::hb_aat_apply_context_t (const hb_ot_shape_plan_t *plan_,
+                                                     hb_font_t *font_,
+                                                     hb_buffer_t *buffer_,
+                                                     hb_blob_t *blob) :
+                                                       plan (plan_),
+                                                       font (font_),
+                                                       face (font->face),
+                                                       buffer (buffer_),
+                                                       sanitizer (),
+                                                       ankr_table (&Null (AAT::ankr)),
+                                                       lookup_index (0)
+{
+  sanitizer.init (blob);
+  sanitizer.set_num_glyphs (face->get_num_glyphs ());
+  sanitizer.start_processing ();
+  sanitizer.set_max_ops (HB_SANITIZE_MAX_OPS_MAX);
+}
+
+AAT::hb_aat_apply_context_t::~hb_aat_apply_context_t ()
+{ sanitizer.end_processing (); }
+
+void
+AAT::hb_aat_apply_context_t::set_ankr_table (const AAT::ankr *ankr_table_)
+{ ankr_table = ankr_table_; }
+
+#endif
+
+
 /**
  * SECTION:hb-aat-layout
  * @title: hb-aat-layout
@@ -50,6 +83,8 @@
  **/
 
 
+#if !defined(HB_NO_AAT) || defined(HAVE_CORETEXT)
+
 /* Table data courtesy of Apple.  Converted from mnemonics to integers
  * when moving to this file. */
 static const hb_aat_feature_mapping_t feature_mappings[] =
@@ -135,44 +170,12 @@ static const hb_aat_feature_mapping_t feature_mappings[] =
 const hb_aat_feature_mapping_t *
 hb_aat_layout_find_feature_mapping (hb_tag_t tag)
 {
-  return (const hb_aat_feature_mapping_t *) bsearch (&tag,
-                                                     feature_mappings,
-                                                     ARRAY_LENGTH (feature_mappings),
-                                                     sizeof (feature_mappings[0]),
-                                                     hb_aat_feature_mapping_t::cmp);
-}
-
-
-/*
- * hb_aat_apply_context_t
- */
-
-AAT::hb_aat_apply_context_t::hb_aat_apply_context_t (const hb_ot_shape_plan_t *plan_,
-                                                     hb_font_t *font_,
-                                                     hb_buffer_t *buffer_,
-                                                     hb_blob_t *blob) :
-                                                       plan (plan_),
-                                                       font (font_),
-                                                       face (font->face),
-                                                       buffer (buffer_),
-                                                       sanitizer (),
-                                                       ankr_table (&Null(AAT::ankr)),
-                                                       lookup_index (0),
-                                                       debug_depth (0)
-{
-  sanitizer.init (blob);
-  sanitizer.set_num_glyphs (face->get_num_glyphs ());
-  sanitizer.start_processing ();
-  sanitizer.set_max_ops (HB_SANITIZE_MAX_OPS_MAX);
+  return hb_sorted_array (feature_mappings).bsearch (tag);
 }
+#endif
 
-AAT::hb_aat_apply_context_t::~hb_aat_apply_context_t ()
-{ sanitizer.end_processing (); }
-
-void
-AAT::hb_aat_apply_context_t::set_ankr_table (const AAT::ankr *ankr_table_)
-{ ankr_table = ankr_table_; }
 
+#ifndef HB_NO_AAT
 
 /*
  * mort/morx/kerx/trak
@@ -311,14 +314,6 @@ hb_aat_layout_track (const hb_ot_shape_plan_t *plan,
   trak.apply (&c);
 }
 
-
-hb_language_t
-_hb_aat_language_get (hb_face_t *face,
-                      unsigned int i)
-{
-  return face->table.ltag->get_language (i);
-}
-
 /**
  * hb_aat_layout_get_feature_types:
  * @face: a face object
@@ -382,3 +377,6 @@ hb_aat_layout_feature_type_get_selector_infos (hb_face_t
 {
   return face->table.feat->get_selector_infos (feature_type, start_offset, selector_count, selectors, default_index);
 }
+
+
+#endif
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout.h b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout.h
index 42540f264e3a..977599e6cc49 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout.h
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout.h
@@ -85,7 +85,7 @@ typedef enum
   HB_AAT_LAYOUT_FEATURE_TYPE_LANGUAGE_TAG_TYPE                  = 39,
   HB_AAT_LAYOUT_FEATURE_TYPE_CJK_ROMAN_SPACING_TYPE             = 103,
 
-  _HB_AAT_LAYOUT_FEATURE_TYPE_MAX_VALUE= 0x7FFFFFFFu, /*< skip >*/
+  _HB_AAT_LAYOUT_FEATURE_TYPE_MAX_VALUE = HB_TAG_MAX_SIGNED /*< skip >*/
 } hb_aat_layout_feature_type_t;
 
 /**
@@ -424,7 +424,7 @@ typedef enum
   HB_AAT_LAYOUT_FEATURE_SELECTOR_DEFAULT_CJK_ROMAN              = 2,
   HB_AAT_LAYOUT_FEATURE_SELECTOR_FULL_WIDTH_CJK_ROMAN           = 3,
 
-  _HB_AAT_LAYOUT_FEATURE_SELECTOR_MAX_VALUE= 0x7FFFFFFFu, /*< skip >*/
+  _HB_AAT_LAYOUT_FEATURE_SELECTOR_MAX_VALUE = HB_TAG_MAX_SIGNED /*< skip >*/
 } hb_aat_layout_feature_selector_t;
 
 HB_EXTERN unsigned int
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout.hh
index 80b6a1d0973d..1a95507ccee5 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-layout.hh
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-layout.hh
@@ -30,7 +30,7 @@
 #include "hb.hh"
 
 #include "hb-ot-shape.hh"
-
+#include "hb-aat-ltag-table.hh"
 
 struct hb_aat_feature_mapping_t
 {
@@ -39,14 +39,8 @@ struct hb_aat_feature_mapping_t
   hb_aat_layout_feature_selector_t selectorToEnable;
   hb_aat_layout_feature_selector_t selectorToDisable;
 
-  static int cmp (const void *key_, const void *entry_)
-  {
-    hb_tag_t key = * (unsigned int *) key_;
-    const hb_aat_feature_mapping_t * entry = (const hb_aat_feature_mapping_t *) entry_;
-    return key < entry->otFeatureTag ? -1 :
-           key > entry->otFeatureTag ? 1 :
-           0;
-  }
+  int cmp (hb_tag_t key) const
+  { return key < otFeatureTag ? -1 : key > otFeatureTag ? 1 : 0; }
 };
 
 HB_INTERNAL const hb_aat_feature_mapping_t *
@@ -77,9 +71,5 @@ hb_aat_layout_track (const hb_ot_shape_plan_t *plan,
                      hb_font_t *font,
                      hb_buffer_t *buffer);
 
-HB_INTERNAL hb_language_t
-_hb_aat_language_get (hb_face_t *face,
-                      unsigned int i);
-
 
 #endif /* HB_AAT_LAYOUT_HH */
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-ltag-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-ltag-table.hh
index 23649f827ff0..f42ca23e0d87 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-ltag-table.hh
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-ltag-table.hh
@@ -50,7 +50,7 @@ struct FTStringRange
   }
 
   protected:
-  NNOffsetTo >
+  NNOffsetTo>
                 tag;            /* Offset from the start of the table to
                                  * the beginning of the string */
   HBUINT16      length;         /* String length (in bytes) */
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-map.cc b/src/java.desktop/share/native/libharfbuzz/hb-aat-map.cc
index c3d078dbd652..ad3eff7935fe 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-map.cc
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-map.cc
@@ -26,28 +26,55 @@
  * Google Author(s): Behdad Esfahbod
  */
 
+#include "hb.hh"
+
+#ifndef HB_NO_AAT_SHAPE
+
 #include "hb-aat-map.hh"
 
 #include "hb-aat-layout.hh"
+#include "hb-aat-layout-feat-table.hh"
 
 
-void hb_aat_map_builder_t::add_feature (hb_tag_t tag,
-                                        unsigned int value)
+void hb_aat_map_builder_t::add_feature (hb_tag_t tag, unsigned value)
 {
+  if (!face->table.feat->has_data ()) return;
+
   if (tag == HB_TAG ('a','a','l','t'))
   {
+    if (!face->table.feat->exposes_feature (HB_AAT_LAYOUT_FEATURE_TYPE_CHARACTER_ALTERNATIVES))
+      return;
     feature_info_t *info = features.push();
     info->type = HB_AAT_LAYOUT_FEATURE_TYPE_CHARACTER_ALTERNATIVES;
     info->setting = (hb_aat_layout_feature_selector_t) value;
+    info->seq = features.length;
+    info->is_exclusive = true;
     return;
   }
 
   const hb_aat_feature_mapping_t *mapping = hb_aat_layout_find_feature_mapping (tag);
   if (!mapping) return;
 
+  const AAT::FeatureName* feature = &face->table.feat->get_feature (mapping->aatFeatureType);
+  if (!feature->has_data ())
+  {
+    /* Special case: Chain::compile_flags will fall back to the deprecated version of
+     * small-caps if necessary, so we need to check for that possibility.
+     * https://github.com/harfbuzz/harfbuzz/issues/2307 */
+    if (mapping->aatFeatureType == HB_AAT_LAYOUT_FEATURE_TYPE_LOWER_CASE &&
+        mapping->selectorToEnable == HB_AAT_LAYOUT_FEATURE_SELECTOR_LOWER_CASE_SMALL_CAPS)
+    {
+      feature = &face->table.feat->get_feature (HB_AAT_LAYOUT_FEATURE_TYPE_LETTER_CASE);
+      if (!feature->has_data ()) return;
+    }
+    else return;
+  }
+
   feature_info_t *info = features.push();
   info->type = mapping->aatFeatureType;
   info->setting = value ? mapping->selectorToEnable : mapping->selectorToDisable;
+  info->seq = features.length;
+  info->is_exclusive = feature->is_exclusive ();
 }
 
 void
@@ -59,10 +86,17 @@ hb_aat_map_builder_t::compile (hb_aat_map_t  &m)
     features.qsort ();
     unsigned int j = 0;
     for (unsigned int i = 1; i < features.length; i++)
-      if (features[i].type != features[j].type)
+      if (features[i].type != features[j].type ||
+          /* Nonexclusive feature selectors come in even/odd pairs to turn a setting on/off
+           * respectively, so we mask out the low-order bit when checking for "duplicates"
+           * (selectors referring to the same feature setting) here. */
+          (!features[i].is_exclusive && ((features[i].setting & ~1) != (features[j].setting & ~1))))
         features[++j] = features[i];
     features.shrink (j + 1);
   }
 
   hb_aat_layout_compile_map (this, &m);
 }
+
+
+#endif
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-aat-map.hh b/src/java.desktop/share/native/libharfbuzz/hb-aat-map.hh
index 594d48e7cd51..ce30daa60845 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-aat-map.hh
+++ b/src/java.desktop/share/native/libharfbuzz/hb-aat-map.hh
@@ -64,19 +64,24 @@ struct hb_aat_map_builder_t
   {
     hb_aat_layout_feature_type_t  type;
     hb_aat_layout_feature_selector_t  setting;
+    bool is_exclusive;
     unsigned  seq; /* For stable sorting only. */
 
-    static int cmp (const void *pa, const void *pb)
+    HB_INTERNAL static int cmp (const void *pa, const void *pb)
     {
       const feature_info_t *a = (const feature_info_t *) pa;
       const feature_info_t *b = (const feature_info_t *) pb;
-      return (a->type != b->type) ? (a->type < b->type ? -1 : 1) :
-             (a->seq < b->seq ? -1 : a->seq > b->seq ? 1 : 0);
+      if (a->type != b->type) return (a->type < b->type ? -1 : 1);
+      if (!a->is_exclusive &&
+          (a->setting & ~1) != (b->setting & ~1)) return (a->setting < b->setting ? -1 : 1);
+            return (a->seq < b->seq ? -1 : a->seq > b->seq ? 1 : 0);
     }
 
-    int cmp (hb_aat_layout_feature_type_t ty) const
+    /* compares type & setting only, not is_exclusive flag or seq number */
+    int cmp (const feature_info_t& f) const
     {
-      return (type != ty) ? (type < ty ? -1 : 1) : 0;
+      return (f.type != type) ? (f.type < type ? -1 : 1) :
+             (f.setting != setting) ? (f.setting < setting ? -1 : 1) : 0;
     }
   };
 
@@ -84,7 +89,7 @@ struct hb_aat_map_builder_t
   hb_face_t *face;
 
   public:
-  hb_vector_t features;
+  hb_sorted_vector_t features;
 };
 
 
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-algs.hh b/src/java.desktop/share/native/libharfbuzz/hb-algs.hh
new file mode 100644
index 000000000000..e21cb5429c90
--- /dev/null
+++ b/src/java.desktop/share/native/libharfbuzz/hb-algs.hh
@@ -0,0 +1,1127 @@
+/*
+ * Copyright © 2017  Google, Inc.
+ * Copyright © 2019  Facebook, Inc.
+ *
+ *  This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ * Google Author(s): Behdad Esfahbod
+ * Facebook Author(s): Behdad Esfahbod
+ */
+
+#ifndef HB_ALGS_HH
+#define HB_ALGS_HH
+
+#include "hb.hh"
+#include "hb-meta.hh"
+#include "hb-null.hh"
+#include "hb-number.hh"
+
+
+/* Encodes three unsigned integers in one 64-bit number.  If the inputs have more than 21 bits,
+ * values will be truncated / overlap, and might not decode exactly. */
+#define HB_CODEPOINT_ENCODE3(x,y,z) (((uint64_t) (x) << 42) | ((uint64_t) (y) << 21) | (uint64_t) (z))
+#define HB_CODEPOINT_DECODE3_1(v) ((hb_codepoint_t) ((v) >> 42))
+#define HB_CODEPOINT_DECODE3_2(v) ((hb_codepoint_t) ((v) >> 21) & 0x1FFFFFu)
+#define HB_CODEPOINT_DECODE3_3(v) ((hb_codepoint_t) (v) & 0x1FFFFFu)
+
+/* Custom encoding used by hb-ucd. */
+#define HB_CODEPOINT_ENCODE3_11_7_14(x,y,z) (((uint32_t) ((x) & 0x07FFu) << 21) | (((uint32_t) (y) & 0x007Fu) << 14) | (uint32_t) ((z) & 0x3FFFu))
+#define HB_CODEPOINT_DECODE3_11_7_14_1(v) ((hb_codepoint_t) ((v) >> 21))
+#define HB_CODEPOINT_DECODE3_11_7_14_2(v) ((hb_codepoint_t) (((v) >> 14) & 0x007Fu) | 0x0300)
+#define HB_CODEPOINT_DECODE3_11_7_14_3(v) ((hb_codepoint_t) (v) & 0x3FFFu)
+
+struct
+{
+  /* Note.  This is dangerous in that if it's passed an rvalue, it returns rvalue-reference. */
+  template  constexpr auto
+  operator () (T&& v) const HB_AUTO_RETURN ( hb_forward (v) )
+}
+HB_FUNCOBJ (hb_identity);
+struct
+{
+  /* Like identity(), but only retains lvalue-references.  Rvalues are returned as rvalues. */
+  template  constexpr T&
+  operator () (T& v) const { return v; }
+
+  template  constexpr hb_remove_reference
+  operator () (T&& v) const { return v; }
+}
+HB_FUNCOBJ (hb_lidentity);
+struct
+{
+  /* Like identity(), but always returns rvalue. */
+  template  constexpr hb_remove_reference
+  operator () (T&& v) const { return v; }
+}
+HB_FUNCOBJ (hb_ridentity);
+
+struct
+{
+  template  constexpr bool
+  operator () (T&& v) const { return bool (hb_forward (v)); }
+}
+HB_FUNCOBJ (hb_bool);
+
+struct
+{
+  private:
+
+  template  constexpr auto
+  impl (const T& v, hb_priority<1>) const HB_RETURN (uint32_t, hb_deref (v).hash ())
+
+  template  constexpr auto
+  impl (const T& v, hb_priority<0>) const HB_AUTO_RETURN
+  (
+    /* Knuth's multiplicative method: */
+    (uint32_t) v * 2654435761u
+  )
+
+  public:
+
+  template  constexpr auto
+  operator () (const T& v) const HB_RETURN (uint32_t, impl (v, hb_prioritize))
+}
+HB_FUNCOBJ (hb_hash);
+
+
+struct
+{
+  private:
+
+  /* Pointer-to-member-function. */
+  template  auto
+  impl (Appl&& a, hb_priority<2>, T &&v, Ts&&... ds) const HB_AUTO_RETURN
+  ((hb_deref (hb_forward (v)).*hb_forward (a)) (hb_forward (ds)...))
+
+  /* Pointer-to-member. */
+  template  auto
+  impl (Appl&& a, hb_priority<1>, T &&v) const HB_AUTO_RETURN
+  ((hb_deref (hb_forward (v))).*hb_forward (a))
+
+  /* Operator(). */
+  template  auto
+  impl (Appl&& a, hb_priority<0>, Ts&&... ds) const HB_AUTO_RETURN
+  (hb_deref (hb_forward (a)) (hb_forward (ds)...))
+
+  public:
+
+  template  auto
+  operator () (Appl&& a, Ts&&... ds) const HB_AUTO_RETURN
+  (
+    impl (hb_forward (a),
+          hb_prioritize,
+          hb_forward (ds)...)
+  )
+}
+HB_FUNCOBJ (hb_invoke);
+
+template 
+struct hb_partial_t
+{
+  hb_partial_t (Appl a, V v) : a (a), v (v) {}
+
+  static_assert (Pos > 0, "");
+
+  template  auto
+  operator () (Ts&& ...ds) -> decltype (hb_invoke (hb_declval (Appl),
+                                                   hb_declval (V),
+                                                   hb_declval (Ts)...))
+  {
+    return hb_invoke (hb_forward (a),
+                      hb_forward (v),
+                      hb_forward (ds)...);
+  }
+  template  auto
+  operator () (T0&& d0, Ts&& ...ds) -> decltype (hb_invoke (hb_declval (Appl),
+                                                            hb_declval (T0),
+                                                            hb_declval (V),
+                                                            hb_declval (Ts)...))
+  {
+    return hb_invoke (hb_forward (a),
+                      hb_forward (d0),
+                      hb_forward (v),
+                      hb_forward (ds)...);
+  }
+
+  private:
+  hb_reference_wrapper a;
+  V v;
+};
+template 
+auto hb_partial (Appl&& a, V&& v) HB_AUTO_RETURN
+(( hb_partial_t (a, v) ))
+
+/* The following, HB_PARTIALIZE, macro uses a particular corner-case
+ * of C++11 that is not particularly well-supported by all compilers.
+ * What's happening is that it's using "this" in a trailing return-type
+ * via decltype().  Broken compilers deduce the type of "this" pointer
+ * in that context differently from what it resolves to in the body
+ * of the function.
+ *
+ * One probable cause of this is that at the time of trailing return
+ * type declaration, "this" points to an incomplete type, whereas in
+ * the function body the type is complete.  That doesn't justify the
+ * error in any way, but is probably what's happening.
+ *
+ * In the case of MSVC, we get around this by using C++14 "decltype(auto)"
+ * which deduces the type from the actual return statement.  For gcc 4.8
+ * we use "+this" instead of "this" which produces an rvalue that seems
+ * to be deduced as the same type with this particular compiler, and seem
+ * to be fine as default code path as well.
+ */
+#ifdef _MSC_VER
+/* https://github.com/harfbuzz/harfbuzz/issues/1730 */ \
+#define HB_PARTIALIZE(Pos) \
+  template  \
+  decltype(auto) operator () (_T&& _v) const \
+  { return hb_partial (this, hb_forward<_T> (_v)); } \
+  static_assert (true, "")
+#else
+/* https://github.com/harfbuzz/harfbuzz/issues/1724 */
+#define HB_PARTIALIZE(Pos) \
+  template  \
+  auto operator () (_T&& _v) const HB_AUTO_RETURN \
+  (hb_partial (+this, hb_forward<_T> (_v))) \
+  static_assert (true, "")
+#endif
+
+
+struct
+{
+  private:
+
+  template  auto
+  impl (Pred&& p, Val &&v, hb_priority<1>) const HB_AUTO_RETURN
+  (hb_deref (hb_forward (p)).has (hb_forward (v)))
+
+  template  auto
+  impl (Pred&& p, Val &&v, hb_priority<0>) const HB_AUTO_RETURN
+  (
+    hb_invoke (hb_forward (p),
+               hb_forward (v))
+  )
+
+  public:
+
+  template  auto
+  operator () (Pred&& p, Val &&v) const HB_RETURN (bool,
+    impl (hb_forward (p),
+          hb_forward (v),
+          hb_prioritize)
+  )
+}
+HB_FUNCOBJ (hb_has);
+
+struct
+{
+  private:
+
+  template  auto
+  impl (Pred&& p, Val &&v, hb_priority<1>) const HB_AUTO_RETURN
+  (
+    hb_has (hb_forward (p),
+            hb_forward (v))
+  )
+
+  template  auto
+  impl (Pred&& p, Val &&v, hb_priority<0>) const HB_AUTO_RETURN
+  (
+    hb_forward (p) == hb_forward (v)
+  )
+
+  public:
+
+  template  auto
+  operator () (Pred&& p, Val &&v) const HB_RETURN (bool,
+    impl (hb_forward (p),
+          hb_forward (v),
+          hb_prioritize)
+  )
+}
+HB_FUNCOBJ (hb_match);
+
+struct
+{
+  private:
+
+  template  auto
+  impl (Proj&& f, Val &&v, hb_priority<2>) const HB_AUTO_RETURN
+  (hb_deref (hb_forward (f)).get (hb_forward (v)))
+
+  template  auto
+  impl (Proj&& f, Val &&v, hb_priority<1>) const HB_AUTO_RETURN
+  (
+    hb_invoke (hb_forward (f),
+               hb_forward (v))
+  )
+
+  template  auto
+  impl (Proj&& f, Val &&v, hb_priority<0>) const HB_AUTO_RETURN
+  (
+    hb_forward (f)[hb_forward (v)]
+  )
+
+  public:
+
+  template  auto
+  operator () (Proj&& f, Val &&v) const HB_AUTO_RETURN
+  (
+    impl (hb_forward (f),
+          hb_forward (v),
+          hb_prioritize)
+  )
+}
+HB_FUNCOBJ (hb_get);
+
+
+template 
+struct hb_pair_t
+{
+  typedef T1 first_t;
+  typedef T2 second_t;
+  typedef hb_pair_t pair_t;
+
+  hb_pair_t (T1 a, T2 b) : first (a), second (b) {}
+
+  template 
+  operator hb_pair_t () { return hb_pair_t (first, second); }
+
+  hb_pair_t reverse () const
+  { return hb_pair_t (second, first); }
+
+  bool operator == (const pair_t& o) const { return first == o.first && second == o.second; }
+  bool operator != (const pair_t& o) const { return !(*this == o); }
+  bool operator < (const pair_t& o) const { return first < o.first || (first == o.first && second < o.second); }
+  bool operator >= (const pair_t& o) const { return !(*this < o); }
+  bool operator > (const pair_t& o) const { return first > o.first || (first == o.first && second > o.second); }
+  bool operator <= (const pair_t& o) const { return !(*this > o); }
+
+  T1 first;
+  T2 second;
+};
+#define hb_pair_t(T1,T2) hb_pair_t
+template  static inline hb_pair_t
+hb_pair (T1&& a, T2&& b) { return hb_pair_t (a, b); }
+
+struct
+{
+  template  constexpr typename Pair::first_t
+  operator () (const Pair& pair) const { return pair.first; }
+}
+HB_FUNCOBJ (hb_first);
+
+struct
+{
+  template  constexpr typename Pair::second_t
+  operator () (const Pair& pair) const { return pair.second; }
+}
+HB_FUNCOBJ (hb_second);
+
+/* Note.  In min/max impl, we can use hb_type_identity for second argument.
+ * However, that would silently convert between different-signedness integers.
+ * Instead we accept two different types, such that compiler can err if
+ * comparing integers of different signedness. */
+struct
+{
+  template  constexpr auto
+  operator () (T&& a, T2&& b) const HB_AUTO_RETURN
+  (hb_forward (a) <= hb_forward (b) ? hb_forward (a) : hb_forward (b))
+}
+HB_FUNCOBJ (hb_min);
+struct
+{
+  template  constexpr auto
+  operator () (T&& a, T2&& b) const HB_AUTO_RETURN
+  (hb_forward (a) >= hb_forward (b) ? hb_forward (a) : hb_forward (b))
+}
+HB_FUNCOBJ (hb_max);
+struct
+{
+  template  constexpr auto
+  operator () (T&& x, T2&& min, T3&& max) const HB_AUTO_RETURN
+  (hb_min (hb_max (hb_forward (x), hb_forward (min)), hb_forward (max)))
+}
+HB_FUNCOBJ (hb_clamp);
+
+
+/*
+ * Bithacks.
+ */
+
+/* Return the number of 1 bits in v. */
+template 
+static inline HB_CONST_FUNC unsigned int
+hb_popcount (T v)
+{
+#if (defined(__GNUC__) && (__GNUC__ >= 4)) || defined(__clang__)
+  if (sizeof (T) <= sizeof (unsigned int))
+    return __builtin_popcount (v);
+
+  if (sizeof (T) <= sizeof (unsigned long))
+    return __builtin_popcountl (v);
+
+  if (sizeof (T) <= sizeof (unsigned long long))
+    return __builtin_popcountll (v);
+#endif
+
+  if (sizeof (T) <= 4)
+  {
+    /* "HACKMEM 169" */
+    uint32_t y;
+    y = (v >> 1) &033333333333;
+    y = v - y - ((y >>1) & 033333333333);
+    return (((y + (y >> 3)) & 030707070707) % 077);
+  }
+
+  if (sizeof (T) == 8)
+  {
+    unsigned int shift = 32;
+    return hb_popcount ((uint32_t) v) + hb_popcount ((uint32_t) (v >> shift));
+  }
+
+  if (sizeof (T) == 16)
+  {
+    unsigned int shift = 64;
+    return hb_popcount ((uint64_t) v) + hb_popcount ((uint64_t) (v >> shift));
+  }
+
+  assert (0);
+  return 0; /* Shut up stupid compiler. */
+}
+
+/* Returns the number of bits needed to store number */
+template 
+static inline HB_CONST_FUNC unsigned int
+hb_bit_storage (T v)
+{
+  if (unlikely (!v)) return 0;
+
+#if (defined(__GNUC__) && (__GNUC__ >= 4)) || defined(__clang__)
+  if (sizeof (T) <= sizeof (unsigned int))
+    return sizeof (unsigned int) * 8 - __builtin_clz (v);
+
+  if (sizeof (T) <= sizeof (unsigned long))
+    return sizeof (unsigned long) * 8 - __builtin_clzl (v);
+
+  if (sizeof (T) <= sizeof (unsigned long long))
+    return sizeof (unsigned long long) * 8 - __builtin_clzll (v);
+#endif
+
+#if (defined(_MSC_VER) && _MSC_VER >= 1500) || (defined(__MINGW32__) && (__GNUC__ < 4))
+  if (sizeof (T) <= sizeof (unsigned int))
+  {
+    unsigned long where;
+    _BitScanReverse (&where, v);
+    return 1 + where;
+  }
+# if defined(_WIN64)
+  if (sizeof (T) <= 8)
+  {
+    unsigned long where;
+    _BitScanReverse64 (&where, v);
+    return 1 + where;
+  }
+# endif
+#endif
+
+  if (sizeof (T) <= 4)
+  {
+    /* "bithacks" */
+    const unsigned int b[] = {0x2, 0xC, 0xF0, 0xFF00, 0xFFFF0000};
+    const unsigned int S[] = {1, 2, 4, 8, 16};
+    unsigned int r = 0;
+    for (int i = 4; i >= 0; i--)
+      if (v & b[i])
+      {
+        v >>= S[i];
+        r |= S[i];
+      }
+    return r + 1;
+  }
+  if (sizeof (T) <= 8)
+  {
+    /* "bithacks" */
+    const uint64_t b[] = {0x2ULL, 0xCULL, 0xF0ULL, 0xFF00ULL, 0xFFFF0000ULL, 0xFFFFFFFF00000000ULL};
+    const unsigned int S[] = {1, 2, 4, 8, 16, 32};
+    unsigned int r = 0;
+    for (int i = 5; i >= 0; i--)
+      if (v & b[i])
+      {
+        v >>= S[i];
+        r |= S[i];
+      }
+    return r + 1;
+  }
+  if (sizeof (T) == 16)
+  {
+    unsigned int shift = 64;
+    return (v >> shift) ? hb_bit_storage ((uint64_t) (v >> shift)) + shift :
+                          hb_bit_storage ((uint64_t) v);
+  }
+
+  assert (0);
+  return 0; /* Shut up stupid compiler. */
+}
+
+/* Returns the number of zero bits in the least significant side of v */
+template 
+static inline HB_CONST_FUNC unsigned int
+hb_ctz (T v)
+{
+  if (unlikely (!v)) return 8 * sizeof (T);
+
+#if (defined(__GNUC__) && (__GNUC__ >= 4)) || defined(__clang__)
+  if (sizeof (T) <= sizeof (unsigned int))
+    return __builtin_ctz (v);
+
+  if (sizeof (T) <= sizeof (unsigned long))
+    return __builtin_ctzl (v);
+
+  if (sizeof (T) <= sizeof (unsigned long long))
+    return __builtin_ctzll (v);
+#endif
+
+#if (defined(_MSC_VER) && _MSC_VER >= 1500) || (defined(__MINGW32__) && (__GNUC__ < 4))
+  if (sizeof (T) <= sizeof (unsigned int))
+  {
+    unsigned long where;
+    _BitScanForward (&where, v);
+    return where;
+  }
+# if defined(_WIN64)
+  if (sizeof (T) <= 8)
+  {
+    unsigned long where;
+    _BitScanForward64 (&where, v);
+    return where;
+  }
+# endif
+#endif
+
+  if (sizeof (T) <= 4)
+  {
+    /* "bithacks" */
+    unsigned int c = 32;
+    v &= - (int32_t) v;
+    if (v) c--;
+    if (v & 0x0000FFFF) c -= 16;
+    if (v & 0x00FF00FF) c -= 8;
+    if (v & 0x0F0F0F0F) c -= 4;
+    if (v & 0x33333333) c -= 2;
+    if (v & 0x55555555) c -= 1;
+    return c;
+  }
+  if (sizeof (T) <= 8)
+  {
+    /* "bithacks" */
+    unsigned int c = 64;
+    v &= - (int64_t) (v);
+    if (v) c--;
+    if (v & 0x00000000FFFFFFFFULL) c -= 32;
+    if (v & 0x0000FFFF0000FFFFULL) c -= 16;
+    if (v & 0x00FF00FF00FF00FFULL) c -= 8;
+    if (v & 0x0F0F0F0F0F0F0F0FULL) c -= 4;
+    if (v & 0x3333333333333333ULL) c -= 2;
+    if (v & 0x5555555555555555ULL) c -= 1;
+    return c;
+  }
+  if (sizeof (T) == 16)
+  {
+    unsigned int shift = 64;
+    return (uint64_t) v ? hb_bit_storage ((uint64_t) v) :
+                          hb_bit_storage ((uint64_t) (v >> shift)) + shift;
+  }
+
+  assert (0);
+  return 0; /* Shut up stupid compiler. */
+}
+
+
+/*
+ * Tiny stuff.
+ */
+
+/* ASCII tag/character handling */
+static inline bool ISALPHA (unsigned char c)
+{ return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z'); }
+static inline bool ISALNUM (unsigned char c)
+{ return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9'); }
+static inline bool ISSPACE (unsigned char c)
+{ return c == ' ' || c =='\f'|| c =='\n'|| c =='\r'|| c =='\t'|| c =='\v'; }
+static inline unsigned char TOUPPER (unsigned char c)
+{ return (c >= 'a' && c <= 'z') ? c - 'a' + 'A' : c; }
+static inline unsigned char TOLOWER (unsigned char c)
+{ return (c >= 'A' && c <= 'Z') ? c - 'A' + 'a' : c; }
+static inline bool ISHEX (unsigned char c)
+{ return (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F'); }
+static inline unsigned char TOHEX (uint8_t c)
+{ return (c & 0xF) <= 9 ? (c & 0xF) + '0' : (c & 0xF) + 'a' - 10; }
+static inline uint8_t FROMHEX (unsigned char c)
+{ return (c >= '0' && c <= '9') ? c - '0' : TOLOWER (c) - 'a' + 10; }
+
+static inline unsigned int DIV_CEIL (const unsigned int a, unsigned int b)
+{ return (a + (b - 1)) / b; }
+
+
+#undef  ARRAY_LENGTH
+template 
+static inline unsigned int ARRAY_LENGTH (const Type (&)[n]) { return n; }
+/* A const version, but does not detect erratically being called on pointers. */
+#define ARRAY_LENGTH_CONST(__array) ((signed int) (sizeof (__array) / sizeof (__array[0])))
+
+
+static inline int
+hb_memcmp (const void *a, const void *b, unsigned int len)
+{
+  /* It's illegal to pass NULL to memcmp(), even if len is zero.
+   * So, wrap it.
+   * https://sourceware.org/bugzilla/show_bug.cgi?id=23878 */
+  if (unlikely (!len)) return 0;
+  return memcmp (a, b, len);
+}
+
+static inline void *
+hb_memset (void *s, int c, unsigned int n)
+{
+  /* It's illegal to pass NULL to memset(), even if n is zero. */
+  if (unlikely (!n)) return 0;
+  return memset (s, c, n);
+}
+
+static inline unsigned int
+hb_ceil_to_4 (unsigned int v)
+{
+  return ((v - 1) | 3) + 1;
+}
+
+template  static inline bool
+hb_in_range (T u, T lo, T hi)
+{
+  static_assert (!hb_is_signed::value, "");
+
+  /* The casts below are important as if T is smaller than int,
+   * the subtract results will become a signed int! */
+  return (T)(u - lo) <= (T)(hi - lo);
+}
+template  static inline bool
+hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2)
+{
+  return hb_in_range (u, lo1, hi1) || hb_in_range (u, lo2, hi2);
+}
+template  static inline bool
+hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2, T lo3, T hi3)
+{
+  return hb_in_range (u, lo1, hi1) || hb_in_range (u, lo2, hi2) || hb_in_range (u, lo3, hi3);
+}
+
+
+/*
+ * Overflow checking.
+ */
+
+/* Consider __builtin_mul_overflow use here also */
+static inline bool
+hb_unsigned_mul_overflows (unsigned int count, unsigned int size)
+{
+  return (size > 0) && (count >= ((unsigned int) -1) / size);
+}
+
+
+/*
+ * Sort and search.
+ */
+
+template 
+static int
+_hb_cmp_method (const void *pkey, const void *pval, Ts... ds)
+{
+  const K& key = * (const K*) pkey;
+  const V& val = * (const V*) pval;
+
+  return val.cmp (key, ds...);
+}
+
+template 
+static inline bool
+hb_bsearch_impl (unsigned *pos, /* Out */
+                 const K& key,
+                 V* base, size_t nmemb, size_t stride,
+                 int (*compar)(const void *_key, const void *_item, Ts... _ds),
+                 Ts... ds)
+{
+  /* This is our *only* bsearch implementation. */
+
+  int min = 0, max = (int) nmemb - 1;
+  while (min <= max)
+  {
+    int mid = ((unsigned int) min + (unsigned int) max) / 2;
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wcast-align"
+    V* p = (V*) (((const char *) base) + (mid * stride));
+#pragma GCC diagnostic pop
+    int c = compar ((const void *) hb_addressof (key), (const void *) p, ds...);
+    if (c < 0)
+      max = mid - 1;
+    else if (c > 0)
+      min = mid + 1;
+    else
+    {
+      *pos = mid;
+      return true;
+    }
+  }
+  *pos = min;
+  return false;
+}
+
+template 
+static inline V*
+hb_bsearch (const K& key, V* base,
+            size_t nmemb, size_t stride = sizeof (V),
+            int (*compar)(const void *_key, const void *_item) = _hb_cmp_method)
+{
+  unsigned pos;
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wcast-align"
+  return hb_bsearch_impl (&pos, key, base, nmemb, stride, compar) ?
+         (V*) (((const char *) base) + (pos * stride)) : nullptr;
+#pragma GCC diagnostic pop
+}
+template 
+static inline V*
+hb_bsearch (const K& key, V* base,
+            size_t nmemb, size_t stride,
+            int (*compar)(const void *_key, const void *_item, Ts... _ds),
+            Ts... ds)
+{
+  unsigned pos;
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wcast-align"
+  return hb_bsearch_impl (&pos, key, base, nmemb, stride, compar, ds...) ?
+         (V*) (((const char *) base) + (pos * stride)) : nullptr;
+#pragma GCC diagnostic pop
+}
+
+
+/* From https://github.com/noporpoise/sort_r
+   Feb 5, 2019 (c8c65c1e)
+   Modified to support optional argument using templates */
+
+/* Isaac Turner 29 April 2014 Public Domain */
+
+/*
+hb_qsort function to be exported.
+Parameters:
+  base is the array to be sorted
+  nel is the number of elements in the array
+  width is the size in bytes of each element of the array
+  compar is the comparison function
+  arg (optional) is a pointer to be passed to the comparison function
+
+void hb_qsort(void *base, size_t nel, size_t width,
+              int (*compar)(const void *_a, const void *_b, [void *_arg]),
+              [void *arg]);
+*/
+
+#define SORT_R_SWAP(a,b,tmp) ((tmp) = (a), (a) = (b), (b) = (tmp))
+
+/* swap a and b */
+/* a and b must not be equal! */
+static inline void sort_r_swap(char *__restrict a, char *__restrict b,
+                               size_t w)
+{
+  char tmp, *end = a+w;
+  for(; a < end; a++, b++) { SORT_R_SWAP(*a, *b, tmp); }
+}
+
+/* swap a, b iff a>b */
+/* a and b must not be equal! */
+/* __restrict is same as restrict but better support on old machines */
+template 
+static inline int sort_r_cmpswap(char *__restrict a,
+                                 char *__restrict b, size_t w,
+                                 int (*compar)(const void *_a,
+                                               const void *_b,
+                                               Ts... _ds),
+                                 Ts... ds)
+{
+  if(compar(a, b, ds...) > 0) {
+    sort_r_swap(a, b, w);
+    return 1;
+  }
+  return 0;
+}
+
+/*
+Swap consecutive blocks of bytes of size na and nb starting at memory addr ptr,
+with the smallest swap so that the blocks are in the opposite order. Blocks may
+be internally re-ordered e.g.
+  12345ab  ->   ab34512
+  123abc   ->   abc123
+  12abcde  ->   deabc12
+*/
+static inline void sort_r_swap_blocks(char *ptr, size_t na, size_t nb)
+{
+  if(na > 0 && nb > 0) {
+    if(na > nb) { sort_r_swap(ptr, ptr+na, nb); }
+    else { sort_r_swap(ptr, ptr+nb, na); }
+  }
+}
+
+/* Implement recursive quicksort ourselves */
+/* Note: quicksort is not stable, equivalent values may be swapped */
+template 
+static inline void sort_r_simple(void *base, size_t nel, size_t w,
+                                 int (*compar)(const void *_a,
+                                               const void *_b,
+                                               Ts... _ds),
+                                 Ts... ds)
+{
+  char *b = (char *)base, *end = b + nel*w;
+
+  /* for(size_t i=0; i b && sort_r_cmpswap(pj-w,pj,w,compar,ds...); pj -= w) {}
+    }
+  }
+  else
+  {
+    /* nel > 9; Quicksort */
+
+    int cmp;
+    char *pl, *ple, *pr, *pre, *pivot;
+    char *last = b+w*(nel-1), *tmp;
+
+    /*
+    Use median of second, middle and second-last items as pivot.
+    First and last may have been swapped with pivot and therefore be extreme
+    */
+    char *l[3];
+    l[0] = b + w;
+    l[1] = b+w*(nel/2);
+    l[2] = last - w;
+
+    /* printf("pivots: %i, %i, %i\n", *(int*)l[0], *(int*)l[1], *(int*)l[2]); */
+
+    if(compar(l[0],l[1],ds...) > 0) { SORT_R_SWAP(l[0], l[1], tmp); }
+    if(compar(l[1],l[2],ds...) > 0) {
+      SORT_R_SWAP(l[1], l[2], tmp);
+      if(compar(l[0],l[1],ds...) > 0) { SORT_R_SWAP(l[0], l[1], tmp); }
+    }
+
+    /* swap mid value (l[1]), and last element to put pivot as last element */
+    if(l[1] != last) { sort_r_swap(l[1], last, w); }
+
+    /*
+    pl is the next item on the left to be compared to the pivot
+    pr is the last item on the right that was compared to the pivot
+    ple is the left position to put the next item that equals the pivot
+    ple is the last right position where we put an item that equals the pivot
+                                           v- end (beyond the array)
+      EEEEEELLLLLLLLuuuuuuuuGGGGGGGEEEEEEEE.
+      ^- b  ^- ple  ^- pl   ^- pr  ^- pre ^- last (where the pivot is)
+    Pivot comparison key:
+      E = equal, L = less than, u = unknown, G = greater than, E = equal
+    */
+    pivot = last;
+    ple = pl = b;
+    pre = pr = last;
+
+    /*
+    Strategy:
+    Loop into the list from the left and right at the same time to find:
+    - an item on the left that is greater than the pivot
+    - an item on the right that is less than the pivot
+    Once found, they are swapped and the loop continues.
+    Meanwhile items that are equal to the pivot are moved to the edges of the
+    array.
+    */
+    while(pl < pr) {
+      /* Move left hand items which are equal to the pivot to the far left.
+         break when we find an item that is greater than the pivot */
+      for(; pl < pr; pl += w) {
+        cmp = compar(pl, pivot, ds...);
+        if(cmp > 0) { break; }
+        else if(cmp == 0) {
+          if(ple < pl) { sort_r_swap(ple, pl, w); }
+          ple += w;
+        }
+      }
+      /* break if last batch of left hand items were equal to pivot */
+      if(pl >= pr) { break; }
+      /* Move right hand items which are equal to the pivot to the far right.
+         break when we find an item that is less than the pivot */
+      for(; pl < pr; ) {
+        pr -= w; /* Move right pointer onto an unprocessed item */
+        cmp = compar(pr, pivot, ds...);
+        if(cmp == 0) {
+          pre -= w;
+          if(pr < pre) { sort_r_swap(pr, pre, w); }
+        }
+        else if(cmp < 0) {
+          if(pl < pr) { sort_r_swap(pl, pr, w); }
+          pl += w;
+          break;
+        }
+      }
+    }
+
+    pl = pr; /* pr may have gone below pl */
+
+    /*
+    Now we need to go from: EEELLLGGGGEEEE
+                        to: LLLEEEEEEEGGGG
+    Pivot comparison key:
+      E = equal, L = less than, u = unknown, G = greater than, E = equal
+    */
+    sort_r_swap_blocks(b, ple-b, pl-ple);
+    sort_r_swap_blocks(pr, pre-pr, end-pre);
+
+    /*for(size_t i=0; i static inline void
+hb_stable_sort (T *array, unsigned int len, int(*compar)(const T2 *, const T2 *), T3 *array2)
+{
+  for (unsigned int i = 1; i < len; i++)
+  {
+    unsigned int j = i;
+    while (j && compar (&array[j - 1], &array[i]) > 0)
+      j--;
+    if (i == j)
+      continue;
+    /* Move item i to occupy place for item j, shift what's in between. */
+    {
+      T t = array[i];
+      memmove (&array[j + 1], &array[j], (i - j) * sizeof (T));
+      array[j] = t;
+    }
+    if (array2)
+    {
+      T3 t = array2[i];
+      memmove (&array2[j + 1], &array2[j], (i - j) * sizeof (T3));
+      array2[j] = t;
+    }
+  }
+}
+
+template  static inline void
+hb_stable_sort (T *array, unsigned int len, int(*compar)(const T *, const T *))
+{
+  hb_stable_sort (array, len, compar, (int *) nullptr);
+}
+
+static inline hb_bool_t
+hb_codepoint_parse (const char *s, unsigned int len, int base, hb_codepoint_t *out)
+{
+  unsigned int v;
+  const char *p = s;
+  const char *end = p + len;
+  if (unlikely (!hb_parse_uint (&p, end, &v, true/* whole buffer */, base)))
+    return false;
+
+  *out = v;
+  return true;
+}
+
+
+/* Operators. */
+
+struct hb_bitwise_and
+{ HB_PARTIALIZE(2);
+  static constexpr bool passthru_left = false;
+  static constexpr bool passthru_right = false;
+  template  constexpr auto
+  operator () (const T &a, const T &b) const HB_AUTO_RETURN (a & b)
+}
+HB_FUNCOBJ (hb_bitwise_and);
+struct hb_bitwise_or
+{ HB_PARTIALIZE(2);
+  static constexpr bool passthru_left = true;
+  static constexpr bool passthru_right = true;
+  template  constexpr auto
+  operator () (const T &a, const T &b) const HB_AUTO_RETURN (a | b)
+}
+HB_FUNCOBJ (hb_bitwise_or);
+struct hb_bitwise_xor
+{ HB_PARTIALIZE(2);
+  static constexpr bool passthru_left = true;
+  static constexpr bool passthru_right = true;
+  template  constexpr auto
+  operator () (const T &a, const T &b) const HB_AUTO_RETURN (a ^ b)
+}
+HB_FUNCOBJ (hb_bitwise_xor);
+struct hb_bitwise_sub
+{ HB_PARTIALIZE(2);
+  static constexpr bool passthru_left = true;
+  static constexpr bool passthru_right = false;
+  template  constexpr auto
+  operator () (const T &a, const T &b) const HB_AUTO_RETURN (a & ~b)
+}
+HB_FUNCOBJ (hb_bitwise_sub);
+struct
+{
+  template  constexpr auto
+  operator () (const T &a) const HB_AUTO_RETURN (~a)
+}
+HB_FUNCOBJ (hb_bitwise_neg);
+
+struct
+{ HB_PARTIALIZE(2);
+  template  constexpr auto
+  operator () (const T &a, const T2 &b) const HB_AUTO_RETURN (a + b)
+}
+HB_FUNCOBJ (hb_add);
+struct
+{ HB_PARTIALIZE(2);
+  template  constexpr auto
+  operator () (const T &a, const T2 &b) const HB_AUTO_RETURN (a - b)
+}
+HB_FUNCOBJ (hb_sub);
+struct
+{ HB_PARTIALIZE(2);
+  template  constexpr auto
+  operator () (const T &a, const T2 &b) const HB_AUTO_RETURN (a * b)
+}
+HB_FUNCOBJ (hb_mul);
+struct
+{ HB_PARTIALIZE(2);
+  template  constexpr auto
+  operator () (const T &a, const T2 &b) const HB_AUTO_RETURN (a / b)
+}
+HB_FUNCOBJ (hb_div);
+struct
+{ HB_PARTIALIZE(2);
+  template  constexpr auto
+  operator () (const T &a, const T2 &b) const HB_AUTO_RETURN (a % b)
+}
+HB_FUNCOBJ (hb_mod);
+struct
+{
+  template  constexpr auto
+  operator () (const T &a) const HB_AUTO_RETURN (+a)
+}
+HB_FUNCOBJ (hb_pos);
+struct
+{
+  template  constexpr auto
+  operator () (const T &a) const HB_AUTO_RETURN (-a)
+}
+HB_FUNCOBJ (hb_neg);
+struct
+{
+  template  constexpr auto
+  operator () (T &a) const HB_AUTO_RETURN (++a)
+}
+HB_FUNCOBJ (hb_inc);
+struct
+{
+  template  constexpr auto
+  operator () (T &a) const HB_AUTO_RETURN (--a)
+}
+HB_FUNCOBJ (hb_dec);
+
+
+/* Compiler-assisted vectorization. */
+
+/* Type behaving similar to vectorized vars defined using __attribute__((vector_size(...))),
+ * basically a fixed-size bitset. */
+template 
+struct hb_vector_size_t
+{
+  elt_t& operator [] (unsigned int i) { return v[i]; }
+  const elt_t& operator [] (unsigned int i) const { return v[i]; }
+
+  void clear (unsigned char v = 0) { memset (this, v, sizeof (*this)); }
+
+  template 
+  hb_vector_size_t process (const Op& op) const
+  {
+    hb_vector_size_t r;
+    for (unsigned int i = 0; i < ARRAY_LENGTH (v); i++)
+      r.v[i] = op (v[i]);
+    return r;
+  }
+  template 
+  hb_vector_size_t process (const Op& op, const hb_vector_size_t &o) const
+  {
+    hb_vector_size_t r;
+    for (unsigned int i = 0; i < ARRAY_LENGTH (v); i++)
+      r.v[i] = op (v[i], o.v[i]);
+    return r;
+  }
+  hb_vector_size_t operator | (const hb_vector_size_t &o) const
+  { return process (hb_bitwise_or, o); }
+  hb_vector_size_t operator & (const hb_vector_size_t &o) const
+  { return process (hb_bitwise_and, o); }
+  hb_vector_size_t operator ^ (const hb_vector_size_t &o) const
+  { return process (hb_bitwise_xor, o); }
+  hb_vector_size_t operator ~ () const
+  { return process (hb_bitwise_neg); }
+
+  private:
+  static_assert (0 == byte_size % sizeof (elt_t), "");
+  elt_t v[byte_size / sizeof (elt_t)];
+};
+
+
+#endif /* HB_ALGS_HH */
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-array.hh b/src/java.desktop/share/native/libharfbuzz/hb-array.hh
index 4034d92903a8..c6766e61edd4 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-array.hh
+++ b/src/java.desktop/share/native/libharfbuzz/hb-array.hh
@@ -28,7 +28,7 @@
 #define HB_ARRAY_HH
 
 #include "hb.hh"
-#include "hb-dsalgs.hh"
+#include "hb-algs.hh"
 #include "hb-iter.hh"
 #include "hb-null.hh"
 
@@ -37,22 +37,31 @@ template 
 struct hb_sorted_array_t;
 
 template 
-struct hb_array_t :
-        hb_iter_t, Type>,
-        hb_iter_mixin_t, Type>
+struct hb_array_t : hb_iter_with_fallback_t, Type&>
 {
   /*
    * Constructors.
    */
-  hb_array_t () : arrayZ (nullptr), length (0) {}
-  hb_array_t (Type *array_, unsigned int length_) : arrayZ (array_), length (length_) {}
-  template  hb_array_t (Type (&array_)[length_]) : arrayZ (array_), length (length_) {}
+  hb_array_t () : arrayZ (nullptr), length (0), backwards_length (0) {}
+  hb_array_t (Type *array_, unsigned int length_) : arrayZ (array_), length (length_), backwards_length (0) {}
+  template 
+  hb_array_t (Type (&array_)[length_]) : arrayZ (array_), length (length_), backwards_length (0) {}
 
+  template 
+  hb_array_t (const hb_array_t &o) :
+    hb_iter_with_fallback_t (),
+    arrayZ (o.arrayZ), length (o.length), backwards_length (o.backwards_length) {}
+  template 
+  hb_array_t& operator = (const hb_array_t &o)
+  { arrayZ = o.arrayZ; length = o.length; backwards_length = o.backwards_length; return *this; }
 
   /*
    * Iterator implementation.
    */
-  typedef Type __item_type__;
+  typedef Type& __item_t__;
+  static constexpr bool is_random_access_iterator = true;
   Type& __item_at__ (unsigned i) const
   {
     if (unlikely (i >= length)) return CrapOrNull (Type);
@@ -63,16 +72,25 @@ struct hb_array_t :
     if (unlikely (n > length))
       n = length;
     length -= n;
+    backwards_length += n;
     arrayZ += n;
   }
   void __rewind__ (unsigned n)
   {
-    if (unlikely (n > length))
-      n = length;
-    length -= n;
+    if (unlikely (n > backwards_length))
+      n = backwards_length;
+    length += n;
+    backwards_length -= n;
+    arrayZ -= n;
   }
   unsigned __len__ () const { return length; }
-  bool __random_access__ () const { return true; }
+  /* Ouch. The operator== compares the contents of the array.  For range-based for loops,
+   * it's best if we can just compare arrayZ, though comparing contents is still fast,
+   * but also would require that Type has operator==.  As such, we optimize this operator
+   * for range-based for loop and just compare arrayZ.  No need to compare length, as we
+   * assume we're only compared to .end(). */
+  bool operator != (const hb_array_t& o) const
+  { return arrayZ != o.arrayZ; }
 
   /* Extra operators.
    */
@@ -80,70 +98,105 @@ struct hb_array_t :
   operator hb_array_t () { return hb_array_t (arrayZ, length); }
   template  operator T * () const { return arrayZ; }
 
+  HB_INTERNAL bool operator == (const hb_array_t &o) const;
+
+  uint32_t hash () const {
+    uint32_t current = 0;
+    for (unsigned int i = 0; i < this->length; i++) {
+      current = current * 31 + hb_hash (this->arrayZ[i]);
+    }
+    return current;
+  }
+
   /*
    * Compare, Sort, and Search.
    */
 
   /* Note: our compare is NOT lexicographic; it also does NOT call Type::cmp. */
-  int cmp (const hb_array_t &a) const
+  int cmp (const hb_array_t &a) const
   {
     if (length != a.length)
       return (int) a.length - (int) length;
     return hb_memcmp (a.arrayZ, arrayZ, get_size ());
   }
-  static int cmp (const void *pa, const void *pb)
+  HB_INTERNAL static int cmp (const void *pa, const void *pb)
   {
-    hb_array_t *a = (hb_array_t *) pa;
-    hb_array_t *b = (hb_array_t *) pb;
+    hb_array_t *a = (hb_array_t *) pa;
+    hb_array_t *b = (hb_array_t *) pb;
     return b->cmp (*a);
   }
 
   template 
   Type *lsearch (const T &x, Type *not_found = nullptr)
   {
-    unsigned int count = length;
-    for (unsigned int i = 0; i < count; i++)
-      if (!this->arrayZ[i].cmp (x))
-        return &this->arrayZ[i];
-    return not_found;
+    unsigned i;
+    return lfind (x, &i) ? &this->arrayZ[i] : not_found;
   }
   template 
   const Type *lsearch (const T &x, const Type *not_found = nullptr) const
   {
-    unsigned int count = length;
-    for (unsigned int i = 0; i < count; i++)
+    unsigned i;
+    return lfind (x, &i) ? &this->arrayZ[i] : not_found;
+  }
+  template 
+  bool lfind (const T &x, unsigned *pos = nullptr) const
+  {
+    for (unsigned i = 0; i < length; ++i)
       if (!this->arrayZ[i].cmp (x))
-        return &this->arrayZ[i];
-    return not_found;
+      {
+        if (pos)
+          *pos = i;
+        return true;
+      }
+
+    return false;
   }
 
   hb_sorted_array_t qsort (int (*cmp_)(const void*, const void*))
   {
     if (likely (length))
-      ::qsort (arrayZ, length, this->item_size, cmp_);
+      hb_qsort (arrayZ, length, this->get_item_size (), cmp_);
     return hb_sorted_array_t (*this);
   }
   hb_sorted_array_t qsort ()
   {
     if (likely (length))
-      ::qsort (arrayZ, length, this->item_size, Type::cmp);
+      hb_qsort (arrayZ, length, this->get_item_size (), Type::cmp);
     return hb_sorted_array_t (*this);
   }
   void qsort (unsigned int start, unsigned int end)
   {
-    end = MIN (end, length);
+    end = hb_min (end, length);
     assert (start <= end);
     if (likely (start < end))
-      ::qsort (arrayZ + start, end - start, this->item_size, Type::cmp);
+      hb_qsort (arrayZ + start, end - start, this->get_item_size (), Type::cmp);
   }
 
   /*
    * Other methods.
    */
 
-  unsigned int get_size () const { return length * this->item_size; }
+  unsigned int get_size () const { return length * this->get_item_size (); }
 
-  hb_array_t sub_array (unsigned int start_offset = 0, unsigned int *seg_count = nullptr /* IN/OUT */) const
+  /*
+   * Reverse the order of items in this array in the range [start, end).
+   */
+  void reverse (unsigned start = 0, unsigned end = -1)
+  {
+    start = hb_min (start, length);
+    end = hb_min (end, length);
+
+    if (end < start + 2)
+      return;
+
+    for (unsigned lhs = start, rhs = end - 1; lhs < rhs; lhs++, rhs--) {
+      Type temp = arrayZ[rhs];
+      arrayZ[rhs] = arrayZ[lhs];
+      arrayZ[lhs] = temp;
+    }
+  }
+
+  hb_array_t sub_array (unsigned int start_offset = 0, unsigned int *seg_count = nullptr /* IN/OUT */) const
   {
     if (!start_offset && !seg_count)
       return *this;
@@ -154,16 +207,45 @@ struct hb_array_t :
     else
       count -= start_offset;
     if (seg_count)
-      count = *seg_count = MIN (count, *seg_count);
-    return hb_array_t (arrayZ + start_offset, count);
+      count = *seg_count = hb_min (count, *seg_count);
+    return hb_array_t (arrayZ + start_offset, count);
   }
-  hb_array_t sub_array (unsigned int start_offset, unsigned int seg_count) const
+  hb_array_t sub_array (unsigned int start_offset, unsigned int seg_count) const
   { return sub_array (start_offset, &seg_count); }
 
+  hb_array_t truncate (unsigned length) const { return sub_array (0, length); }
+
+  template 
+  const T *as () const
+  { return length < hb_null_size (T) ? &Null (T) : reinterpret_cast (arrayZ); }
+
+  template 
+  bool check_range (const T *p, unsigned int size = T::static_size) const
+  {
+    return arrayZ <= ((const char *) p)
+        && ((const char *) p) <= arrayZ + length
+        && (unsigned int) (arrayZ + length - (const char *) p) >= size;
+  }
+
   /* Only call if you allocated the underlying array using malloc() or similar. */
   void free ()
   { ::free ((void *) arrayZ); arrayZ = nullptr; length = 0; }
 
+  template 
+  hb_array_t copy (hb_serialize_context_t *c) const
+  {
+    TRACE_SERIALIZE (this);
+    auto* out = c->start_embed (arrayZ);
+    if (unlikely (!c->extend_size (out, get_size ()))) return_trace (hb_array_t ());
+    for (unsigned i = 0; i < length; i++)
+      out[i] = arrayZ[i]; /* TODO: add version that calls c->copy() */
+    return_trace (hb_array_t (out, length));
+  }
+
   template 
   bool sanitize (hb_sanitize_context_t *c) const
   { return c->check_array (arrayZ, length); }
@@ -175,6 +257,7 @@ struct hb_array_t :
   public:
   Type *arrayZ;
   unsigned int length;
+  unsigned int backwards_length;
 };
 template  inline hb_array_t
 hb_array (T *array, unsigned int length)
@@ -183,7 +266,6 @@ template  inline hb_array_t
 hb_array (T (&array_)[length_])
 { return hb_array_t (array_); }
 
-
 enum hb_bfind_not_found_t
 {
   HB_BFIND_NOT_FOUND_DONT_STORE,
@@ -193,20 +275,40 @@ enum hb_bfind_not_found_t
 
 template 
 struct hb_sorted_array_t :
-        hb_sorted_iter_t, Type>,
-        hb_array_t,
-        hb_iter_mixin_t, Type>
+        hb_iter_t, Type&>,
+        hb_array_t
 {
+  typedef hb_iter_t iter_base_t;
+  HB_ITER_USING (iter_base_t);
+  static constexpr bool is_random_access_iterator = true;
+  static constexpr bool is_sorted_iterator = true;
+
   hb_sorted_array_t () : hb_array_t () {}
-  hb_sorted_array_t (const hb_array_t &o) : hb_array_t (o) {}
   hb_sorted_array_t (Type *array_, unsigned int length_) : hb_array_t (array_, length_) {}
-  template  hb_sorted_array_t (Type (&array_)[length_]) : hb_array_t (array_) {}
+  template 
+  hb_sorted_array_t (Type (&array_)[length_]) : hb_array_t (array_) {}
+
+  template 
+  hb_sorted_array_t (const hb_array_t &o) :
+    hb_iter_t (),
+    hb_array_t (o) {}
+  template 
+  hb_sorted_array_t& operator = (const hb_array_t &o)
+  { hb_array_t (*this) = o; return *this; }
+
+  /* Iterator implementation. */
+  bool operator != (const hb_sorted_array_t& o) const
+  { return this->arrayZ != o.arrayZ || this->length != o.length; }
 
-  hb_sorted_array_t sub_array (unsigned int start_offset, unsigned int *seg_count /* IN/OUT */) const
-  { return hb_sorted_array_t (((const hb_array_t *) (this))->sub_array (start_offset, seg_count)); }
-  hb_sorted_array_t sub_array (unsigned int start_offset, unsigned int seg_count) const
+  hb_sorted_array_t sub_array (unsigned int start_offset, unsigned int *seg_count /* IN/OUT */) const
+  { return hb_sorted_array_t (((const hb_array_t *) (this))->sub_array (start_offset, seg_count)); }
+  hb_sorted_array_t sub_array (unsigned int start_offset, unsigned int seg_count) const
   { return sub_array (start_offset, &seg_count); }
 
+  hb_sorted_array_t truncate (unsigned length) const { return sub_array (0, length); }
+
   template 
   Type *bsearch (const T &x, Type *not_found = nullptr)
   {
@@ -221,26 +323,18 @@ struct hb_sorted_array_t :
   }
   template 
   bool bfind (const T &x, unsigned int *i = nullptr,
-                     hb_bfind_not_found_t not_found = HB_BFIND_NOT_FOUND_DONT_STORE,
-                     unsigned int to_store = (unsigned int) -1) const
+              hb_bfind_not_found_t not_found = HB_BFIND_NOT_FOUND_DONT_STORE,
+              unsigned int to_store = (unsigned int) -1) const
   {
-    int min = 0, max = (int) this->length - 1;
-    const Type *array = this->arrayZ;
-    while (min <= max)
+    unsigned pos;
+
+    if (bsearch_impl (x, &pos))
     {
-      int mid = ((unsigned int) min + (unsigned int) max) / 2;
-      int c = array[mid].cmp (x);
-      if (c < 0)
-        max = mid - 1;
-      else if (c > 0)
-        min = mid + 1;
-      else
-      {
-        if (i)
-          *i = mid;
-        return true;
-      }
+      if (i)
+        *i = pos;
+      return true;
     }
+
     if (i)
     {
       switch (not_found)
@@ -253,14 +347,22 @@ struct hb_sorted_array_t :
           break;
 
         case HB_BFIND_NOT_FOUND_STORE_CLOSEST:
-          if (max < 0 || (max < (int) this->length && array[max].cmp (x) > 0))
-            max++;
-          *i = max;
+          *i = pos;
           break;
       }
     }
     return false;
   }
+  template 
+  bool bsearch_impl (const T &x, unsigned *pos) const
+  {
+    return hb_bsearch_impl (pos,
+                            x,
+                            this->arrayZ,
+                            this->length,
+                            sizeof (Type),
+                            _hb_cmp_method);
+  }
 };
 template  inline hb_sorted_array_t
 hb_sorted_array (T *array, unsigned int length)
@@ -269,9 +371,38 @@ template  inline hb_sorted_array_t
 hb_sorted_array (T (&array_)[length_])
 { return hb_sorted_array_t (array_); }
 
+template 
+bool hb_array_t::operator == (const hb_array_t &o) const
+{
+  if (o.length != this->length) return false;
+  for (unsigned int i = 0; i < this->length; i++) {
+    if (this->arrayZ[i] != o.arrayZ[i]) return false;
+  }
+  return true;
+}
+
+/* TODO Specialize opeator== for hb_bytes_t and hb_ubytes_t. */
+
+template <>
+inline uint32_t hb_array_t::hash () const {
+  uint32_t current = 0;
+  for (unsigned int i = 0; i < this->length; i++)
+    current = current * 31 + (uint32_t) (this->arrayZ[i] * 2654435761u);
+  return current;
+}
+
+template <>
+inline uint32_t hb_array_t::hash () const {
+  uint32_t current = 0;
+  for (unsigned int i = 0; i < this->length; i++)
+    current = current * 31 + (uint32_t) (this->arrayZ[i] * 2654435761u);
+  return current;
+}
+
 
 typedef hb_array_t hb_bytes_t;
 typedef hb_array_t hb_ubytes_t;
 
 
+
 #endif /* HB_ARRAY_HH */
diff --git a/src/java.desktop/share/native/libharfbuzz/hb-atomic.hh b/src/java.desktop/share/native/libharfbuzz/hb-atomic.hh
index d1ff7a722ef9..a6877f8e6745 100644
--- a/src/java.desktop/share/native/libharfbuzz/hb-atomic.hh
+++ b/src/java.desktop/share/native/libharfbuzz/hb-atomic.hh
@@ -33,6 +33,7 @@
 #define HB_ATOMIC_HH
 
 #include "hb.hh"
+#include "hb-meta.hh"
 
 
 /*
@@ -85,11 +86,11 @@ _hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
 #define hb_atomic_int_impl_add(AI, V)           (reinterpret_cast *> (AI)->fetch_add ((V), std::memory_order_acq_rel))
 #define hb_atomic_int_impl_set_relaxed(AI, V)   (reinterpret_cast *> (AI)->store ((V), std::memory_order_relaxed))
 #define hb_atomic_int_impl_set(AI, V)           (reinterpret_cast *> (AI)->store ((V), std::memory_order_release))
-#define hb_atomic_int_impl_get_relaxed(AI)      (reinterpret_cast *> (AI)->load (std::memory_order_relaxed))
-#define hb_atomic_int_impl_get(AI)              (reinterpret_cast *> (AI)->load (std::memory_order_acquire))
+#define hb_atomic_int_impl_get_relaxed(AI)      (reinterpret_cast const *> (AI)->load (std::memory_order_relaxed))
+#define hb_atomic_int_impl_get(AI)              (reinterpret_cast const *> (AI)->load (std::memory_order_acquire))
 
 #define hb_atomic_ptr_impl_set_relaxed(P, V)    (reinterpret_cast *> (P)->store ((V), std::memory_order_relaxed))
-#define hb_atomic_ptr_impl_get_relaxed(P)       (reinterpret_cast *> (P)->load (std::memory_order_relaxed))
+#define hb_atomic_ptr_impl_get_relaxed(P)       (reinterpret_cast const *> (P)->load (std::memory_order_relaxed))
 #define hb_atomic_ptr_impl_get(P)               (reinterpret_cast *> (P)->load (std::memory_order_acquire))
 static inline bool
 _hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
@@ -106,7 +107,7 @@ _hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
 
 static inline void _hb_memory_barrier ()
 {
-#if !defined(MemoryBarrier)
+#if !defined(MemoryBarrier) && !defined(__MINGW32_VERSION)
   /* MinGW has a convoluted history of supporting MemoryBarrier. */
   LONG dummy = 0;
   InterlockedExchange (&dummy, 1);
@@ -211,25 +212,19 @@ static inline bool _hb_compare_and_swaplp (long *P, long O, long N)
 static_assert ((sizeof (long) == sizeof (void *)), "");
 
 
-#elif !defined(HB_NO_MT)
-
-#define HB_ATOMIC_INT_NIL 1 /* Warn that fallback implementation is in use. */
-
-#define _hb_memory_barrier()
+#elif defined(HB_NO_MT)
 
 #define hb_atomic_int_impl_add(AI, V)           ((*(AI) += (V)) - (V))
 
-#define hb_atomic_ptr_impl_cmpexch(P,O,N)       (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
-
-
-#else /* HB_NO_MT */
+#define _hb_memory_barrier()                    do {} while (0)
 
-#define hb_atomic_int_impl_add(AI, V)           ((*(AI) += (V)) - (V))
+#define hb_atomic_ptr_impl_cmpexch(P,O,N)       (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
 
-#define _hb_memory_barrier()
 
-#define hb_atomic_ptr_impl_cmpexch(P,O,N)       (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
+#else
 
+#error "Could not find any system to define atomic_int macros."
+#error "Check hb-atomic.hh for possible resolutions."
 
 #endif
 
@@ -282,7 +277,7 @@ struct hb_atomic_int_t
 template 
 struct hb_atomic_ptr_t
 {
-  typedef typename hb_remove_pointer (P) T;
+  typedef hb_remove_pointer

T; void init (T* v_ = nullptr) { set_relaxed (v_); } void set_relaxed (T* v_) { hb_atomic_ptr_impl_set_relaxed (&v, v_); } diff --git a/src/java.desktop/share/native/libharfbuzz/hb-bimap.hh b/src/java.desktop/share/native/libharfbuzz/hb-bimap.hh new file mode 100644 index 000000000000..ff47c6f2476a --- /dev/null +++ b/src/java.desktop/share/native/libharfbuzz/hb-bimap.hh @@ -0,0 +1,166 @@ +/* + * Copyright © 2019 Adobe Inc. + * + * This is part of HarfBuzz, a text shaping library. + * + * Permission is hereby granted, without written agreement and without + * license or royalty fees, to use, copy, modify, and distribute this + * software and its documentation for any purpose, provided that the + * above copyright notice and the following two paragraphs appear in + * all copies of this software. + * + * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN + * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + * + * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, + * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + * + * Adobe Author(s): Michiharu Ariza + */ + +#ifndef HB_BIMAP_HH +#define HB_BIMAP_HH + +#include "hb.hh" +#include "hb-map.hh" + +/* Bi-directional map */ +struct hb_bimap_t +{ + hb_bimap_t () { init (); } + ~hb_bimap_t () { fini (); } + + void init () + { + forw_map.init (); + back_map.init (); + } + + void fini () + { + forw_map.fini (); + back_map.fini (); + } + + void reset () + { + forw_map.reset (); + back_map.reset (); + } + + bool in_error () const { return forw_map.in_error () || back_map.in_error (); } + + void set (hb_codepoint_t lhs, hb_codepoint_t rhs) + { + if (unlikely (lhs == HB_MAP_VALUE_INVALID)) return; + if (unlikely (rhs == HB_MAP_VALUE_INVALID)) { del (lhs); return; } + forw_map.set (lhs, rhs); + back_map.set (rhs, lhs); + } + + hb_codepoint_t get (hb_codepoint_t lhs) const { return forw_map.get (lhs); } + hb_codepoint_t backward (hb_codepoint_t rhs) const { return back_map.get (rhs); } + + hb_codepoint_t operator [] (hb_codepoint_t lhs) const { return get (lhs); } + bool has (hb_codepoint_t lhs, hb_codepoint_t *vp = nullptr) const { return forw_map.has (lhs, vp); } + + void del (hb_codepoint_t lhs) + { + back_map.del (get (lhs)); + forw_map.del (lhs); + } + + void clear () + { + forw_map.clear (); + back_map.clear (); + } + + bool is_empty () const { return get_population () == 0; } + + unsigned int get_population () const { return forw_map.get_population (); } + + protected: + hb_map_t forw_map; + hb_map_t back_map; +}; + +/* Inremental bimap: only lhs is given, rhs is incrementally assigned */ +struct hb_inc_bimap_t : hb_bimap_t +{ + hb_inc_bimap_t () { init (); } + + void init () + { + hb_bimap_t::init (); + next_value = 0; + } + + /* Add a mapping from lhs to rhs with a unique value if lhs is unknown. + * Return the rhs value as the result. + */ + hb_codepoint_t add (hb_codepoint_t lhs) + { + hb_codepoint_t rhs = forw_map[lhs]; + if (rhs == HB_MAP_VALUE_INVALID) + { + rhs = next_value++; + set (lhs, rhs); + } + return rhs; + } + + hb_codepoint_t skip () + { return next_value++; } + + hb_codepoint_t get_next_value () const + { return next_value; } + + void add_set (const hb_set_t *set) + { + hb_codepoint_t i = HB_SET_VALUE_INVALID; + while (hb_set_next (set, &i)) add (i); + } + + /* Create an identity map. */ + bool identity (unsigned int size) + { + clear (); + for (hb_codepoint_t i = 0; i < size; i++) set (i, i); + return !in_error (); + } + + protected: + static int cmp_id (const void* a, const void* b) + { return (int)*(const hb_codepoint_t *)a - (int)*(const hb_codepoint_t *)b; } + + public: + /* Optional: after finished adding all mappings in a random order, + * reassign rhs to lhs so that they are in the same order. */ + void sort () + { + hb_codepoint_t count = get_population (); + hb_vector_t work; + work.resize (count); + + for (hb_codepoint_t rhs = 0; rhs < count; rhs++) + work[rhs] = back_map[rhs]; + + work.qsort (cmp_id); + + clear (); + for (hb_codepoint_t rhs = 0; rhs < count; rhs++) + set (work[rhs], rhs); + } + + protected: + unsigned int next_value; +}; + +#endif /* HB_BIMAP_HH */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-blob.cc b/src/java.desktop/share/native/libharfbuzz/hb-blob.cc index ffeecc2a1682..fc18b61c9131 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-blob.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-blob.cc @@ -25,18 +25,6 @@ * Red Hat Author(s): Behdad Esfahbod */ - -/* https://github.com/harfbuzz/harfbuzz/issues/1308 - * http://www.gnu.org/software/libc/manual/html_node/Feature-Test-Macros.html - * https://www.oracle.com/technetwork/articles/servers-storage-dev/standardheaderfiles-453865.html - */ -#ifndef _POSIX_C_SOURCE -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-macros" -#define _POSIX_C_SOURCE 200809L -#pragma GCC diagnostic pop -#endif - #include "hb.hh" #include "hb-blob.hh" @@ -48,7 +36,6 @@ #endif /* HAVE_SYS_MMAN_H */ #include -#include #include @@ -155,7 +142,7 @@ hb_blob_create_sub_blob (hb_blob_t *parent, hb_blob_make_immutable (parent); blob = hb_blob_create (parent->data + offset, - MIN (length, parent->length - offset), + hb_min (length, parent->length - offset), HB_MEMORY_MODE_READONLY, hb_blob_reference (parent), _hb_blob_destroy); @@ -202,7 +189,7 @@ hb_blob_copy_writable_or_fail (hb_blob_t *blob) hb_blob_t * hb_blob_get_empty () { - return const_cast (&Null(hb_blob_t)); + return const_cast (&Null (hb_blob_t)); } /** @@ -487,7 +474,11 @@ hb_blob_t::try_make_writable () * Mmap */ +#ifndef HB_NO_OPEN #ifdef HAVE_MMAP +# if !defined(HB_NO_RESOURCE_FORK) && defined(__APPLE__) +# include +# endif # include # include # include @@ -532,6 +523,39 @@ _hb_mapped_file_destroy (void *file_) } #endif +#ifdef _PATH_RSRCFORKSPEC +static int +_open_resource_fork (const char *file_name, hb_mapped_file_t *file) +{ + size_t name_len = strlen (file_name); + size_t len = name_len + sizeof (_PATH_RSRCFORKSPEC); + + char *rsrc_name = (char *) malloc (len); + if (unlikely (!rsrc_name)) return -1; + + strncpy (rsrc_name, file_name, name_len); + strncpy (rsrc_name + name_len, _PATH_RSRCFORKSPEC, + sizeof (_PATH_RSRCFORKSPEC) - 1); + + int fd = open (rsrc_name, O_RDONLY | O_BINARY, 0); + free (rsrc_name); + + if (fd != -1) + { + struct stat st; + if (fstat (fd, &st) != -1) + file->length = (unsigned long) st.st_size; + else + { + close (fd); + fd = -1; + } + } + + return fd; +} +#endif + /** * hb_blob_create_from_file: * @file_name: font filename. @@ -556,6 +580,19 @@ hb_blob_create_from_file (const char *file_name) if (unlikely (fstat (fd, &st) == -1)) goto fail; file->length = (unsigned long) st.st_size; + +#ifdef _PATH_RSRCFORKSPEC + if (unlikely (file->length == 0)) + { + int rfd = _open_resource_fork (file_name, file); + if (rfd != -1) + { + close (fd); + fd = rfd; + } + } +#endif + file->contents = (char *) mmap (nullptr, file->length, PROT_READ, MAP_PRIVATE | MAP_NORESERVE, fd, 0); @@ -579,9 +616,9 @@ hb_blob_create_from_file (const char *file_name) HANDLE fd; unsigned int size = strlen (file_name) + 1; wchar_t * wchar_file_name = (wchar_t *) malloc (sizeof (wchar_t) * size); - if (unlikely (wchar_file_name == nullptr)) goto fail_without_close; + if (unlikely (!wchar_file_name)) goto fail_without_close; mbstowcs (wchar_file_name, file_name, size); -#if defined(WINAPI_FAMILY) && (WINAPI_FAMILY==WINAPI_FAMILY_PC_APP || WINAPI_FAMILY==WINAPI_FAMILY_PHONE_APP) +#if !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) { CREATEFILE2_EXTENDED_PARAMETERS ceparams = { 0 }; ceparams.dwSize = sizeof(CREATEFILE2_EXTENDED_PARAMETERS); @@ -602,7 +639,7 @@ hb_blob_create_from_file (const char *file_name) if (unlikely (fd == INVALID_HANDLE_VALUE)) goto fail_without_close; -#if defined(WINAPI_FAMILY) && (WINAPI_FAMILY==WINAPI_FAMILY_PC_APP || WINAPI_FAMILY==WINAPI_FAMILY_PHONE_APP) +#if !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) { LARGE_INTEGER length; GetFileSizeEx (fd, &length); @@ -613,14 +650,14 @@ hb_blob_create_from_file (const char *file_name) file->length = (unsigned long) GetFileSize (fd, nullptr); file->mapping = CreateFileMapping (fd, nullptr, PAGE_READONLY, 0, 0, nullptr); #endif - if (unlikely (file->mapping == nullptr)) goto fail; + if (unlikely (!file->mapping)) goto fail; -#if defined(WINAPI_FAMILY) && (WINAPI_FAMILY==WINAPI_FAMILY_PC_APP || WINAPI_FAMILY==WINAPI_FAMILY_PHONE_APP) +#if !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) file->contents = (char *) MapViewOfFileFromApp (file->mapping, FILE_MAP_READ, 0, 0); #else file->contents = (char *) MapViewOfFile (file->mapping, FILE_MAP_READ, 0, 0, 0); #endif - if (unlikely (file->contents == nullptr)) goto fail; + if (unlikely (!file->contents)) goto fail; CloseHandle (fd); return hb_blob_create (file->contents, file->length, @@ -638,10 +675,10 @@ hb_blob_create_from_file (const char *file_name) It's used as a fallback for systems without mmap or to read from pipes */ unsigned long len = 0, allocated = BUFSIZ * 16; char *data = (char *) malloc (allocated); - if (unlikely (data == nullptr)) return hb_blob_get_empty (); + if (unlikely (!data)) return hb_blob_get_empty (); FILE *fp = fopen (file_name, "rb"); - if (unlikely (fp == nullptr)) goto fread_fail_without_close; + if (unlikely (!fp)) goto fread_fail_without_close; while (!feof (fp)) { @@ -652,7 +689,7 @@ hb_blob_create_from_file (const char *file_name) can cover files like that but lets limit our fallback reader */ if (unlikely (allocated > (2 << 28))) goto fread_fail; char *new_data = (char *) realloc (data, allocated); - if (unlikely (new_data == nullptr)) goto fread_fail; + if (unlikely (!new_data)) goto fread_fail; data = new_data; } @@ -666,6 +703,7 @@ hb_blob_create_from_file (const char *file_name) len += addition; } + fclose (fp); return hb_blob_create (data, len, HB_MEMORY_MODE_WRITABLE, data, (hb_destroy_func_t) free); @@ -676,3 +714,4 @@ hb_blob_create_from_file (const char *file_name) free (data); return hb_blob_get_empty (); } +#endif /* !HB_NO_OPEN */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-blob.h b/src/java.desktop/share/native/libharfbuzz/hb-blob.h index a714fb2057b2..ddbcd1a999c9 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-blob.h +++ b/src/java.desktop/share/native/libharfbuzz/hb-blob.h @@ -71,6 +71,9 @@ hb_blob_create (const char *data, void *user_data, hb_destroy_func_t destroy); +HB_EXTERN hb_blob_t * +hb_blob_create_from_file (const char *file_name); + /* Always creates with MEMORY_MODE_READONLY. * Even if the parent blob is writable, we don't * want the user of the sub-blob to be able to @@ -123,9 +126,6 @@ hb_blob_get_data (hb_blob_t *blob, unsigned int *length); HB_EXTERN char * hb_blob_get_data_writable (hb_blob_t *blob, unsigned int *length); -HB_EXTERN hb_blob_t * -hb_blob_create_from_file (const char *file_name); - HB_END_DECLS #endif /* HB_BLOB_H */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-blob.hh b/src/java.desktop/share/native/libharfbuzz/hb-blob.hh index 4ea13f813701..d85bd823b004 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-blob.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-blob.hh @@ -54,13 +54,9 @@ struct hb_blob_t HB_INTERNAL bool try_make_writable_inplace (); HB_INTERNAL bool try_make_writable_inplace_unix (); + hb_bytes_t as_bytes () const { return hb_bytes_t (data, length); } template - const Type* as () const - { - return length < hb_null_size (Type) ? &Null(Type) : reinterpret_cast (data); - } - hb_bytes_t as_bytes () const - { return hb_bytes_t (data, length); } + const Type* as () const { return as_bytes ().as (); } public: hb_object_header_t header; @@ -81,7 +77,7 @@ struct hb_blob_t template struct hb_blob_ptr_t { - typedef typename hb_remove_pointer (P) T; + typedef hb_remove_pointer

T; hb_blob_ptr_t (hb_blob_t *b_ = nullptr) : b (b_) {} hb_blob_t * operator = (hb_blob_t *b_) { return b = b_; } diff --git a/src/java.desktop/share/native/libharfbuzz/hb-buffer-serialize.cc b/src/java.desktop/share/native/libharfbuzz/hb-buffer-serialize.cc index 14a9a568c7e6..52dbb84bb3f5 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-buffer-serialize.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-buffer-serialize.cc @@ -24,6 +24,10 @@ * Google Author(s): Behdad Esfahbod */ +#include "hb.hh" + +#ifndef HB_NO_BUFFER_SERIALIZE + #include "hb-buffer.hh" @@ -85,7 +89,7 @@ hb_buffer_serialize_format_from_string (const char *str, int len) const char * hb_buffer_serialize_format_to_string (hb_buffer_serialize_format_t format) { - switch (format) + switch ((unsigned) format) { case HB_BUFFER_SERIALIZE_FORMAT_TEXT: return serialize_formats[0]; case HB_BUFFER_SERIALIZE_FORMAT_JSON: return serialize_formats[1]; @@ -138,34 +142,34 @@ _hb_buffer_serialize_glyphs_json (hb_buffer_t *buffer, *p++ = '"'; } else - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "%u", info[i].codepoint)); + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "%u", info[i].codepoint)); if (!(flags & HB_BUFFER_SERIALIZE_FLAG_NO_CLUSTERS)) { - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"cl\":%u", info[i].cluster)); + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"cl\":%u", info[i].cluster)); } if (!(flags & HB_BUFFER_SERIALIZE_FLAG_NO_POSITIONS)) { - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"dx\":%d,\"dy\":%d", + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"dx\":%d,\"dy\":%d", x+pos[i].x_offset, y+pos[i].y_offset)); if (!(flags & HB_BUFFER_SERIALIZE_FLAG_NO_ADVANCES)) - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"ax\":%d,\"ay\":%d", + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"ax\":%d,\"ay\":%d", pos[i].x_advance, pos[i].y_advance)); } if (flags & HB_BUFFER_SERIALIZE_FLAG_GLYPH_FLAGS) { if (info[i].mask & HB_GLYPH_FLAG_DEFINED) - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"fl\":%u", info[i].mask & HB_GLYPH_FLAG_DEFINED)); + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"fl\":%u", info[i].mask & HB_GLYPH_FLAG_DEFINED)); } if (flags & HB_BUFFER_SERIALIZE_FLAG_GLYPH_EXTENTS) { hb_glyph_extents_t extents; hb_font_get_glyph_extents(font, info[i].codepoint, &extents); - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"xb\":%d,\"yb\":%d", + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"xb\":%d,\"yb\":%d", extents.x_bearing, extents.y_bearing)); - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"w\":%d,\"h\":%d", + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"w\":%d,\"h\":%d", extents.width, extents.height)); } @@ -224,37 +228,37 @@ _hb_buffer_serialize_glyphs_text (hb_buffer_t *buffer, p += strlen (p); } else - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "%u", info[i].codepoint)); + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "%u", info[i].codepoint)); if (!(flags & HB_BUFFER_SERIALIZE_FLAG_NO_CLUSTERS)) { - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "=%u", info[i].cluster)); + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "=%u", info[i].cluster)); } if (!(flags & HB_BUFFER_SERIALIZE_FLAG_NO_POSITIONS)) { if (x+pos[i].x_offset || y+pos[i].y_offset) - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "@%d,%d", x+pos[i].x_offset, y+pos[i].y_offset)); + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "@%d,%d", x+pos[i].x_offset, y+pos[i].y_offset)); if (!(flags & HB_BUFFER_SERIALIZE_FLAG_NO_ADVANCES)) { *p++ = '+'; - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "%d", pos[i].x_advance)); + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "%d", pos[i].x_advance)); if (pos[i].y_advance) - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",%d", pos[i].y_advance)); + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",%d", pos[i].y_advance)); } } if (flags & HB_BUFFER_SERIALIZE_FLAG_GLYPH_FLAGS) { if (info[i].mask & HB_GLYPH_FLAG_DEFINED) - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "#%X", info[i].mask &HB_GLYPH_FLAG_DEFINED)); + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "#%X", info[i].mask &HB_GLYPH_FLAG_DEFINED)); } if (flags & HB_BUFFER_SERIALIZE_FLAG_GLYPH_EXTENTS) { hb_glyph_extents_t extents; hb_font_get_glyph_extents(font, info[i].codepoint, &extents); - p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "<%d,%d,%d,%d>", extents.x_bearing, extents.y_bearing, extents.width, extents.height)); + p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "<%d,%d,%d,%d>", extents.x_bearing, extents.y_bearing, extents.width, extents.height)); } unsigned int l = p - b; @@ -344,8 +348,8 @@ hb_buffer_serialize_glyphs (hb_buffer_t *buffer, if (buf_size) *buf = '\0'; - assert ((!buffer->len && buffer->content_type == HB_BUFFER_CONTENT_TYPE_INVALID) || - buffer->content_type == HB_BUFFER_CONTENT_TYPE_GLYPHS); + assert ((!buffer->len && (buffer->content_type == HB_BUFFER_CONTENT_TYPE_INVALID)) || + (buffer->content_type == HB_BUFFER_CONTENT_TYPE_GLYPHS)); if (!buffer->have_positions) flags |= HB_BUFFER_SERIALIZE_FLAG_NO_POSITIONS; @@ -375,43 +379,24 @@ hb_buffer_serialize_glyphs (hb_buffer_t *buffer, } } - -static hb_bool_t -parse_uint (const char *pp, const char *end, uint32_t *pv) +static bool +parse_int (const char *pp, const char *end, int32_t *pv) { - char buf[32]; - unsigned int len = MIN (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - pp)); - strncpy (buf, pp, len); - buf[len] = '\0'; - - char *p = buf; - char *pend = p; - uint32_t v; - - errno = 0; - v = strtol (p, &pend, 10); - if (errno || p == pend || pend - p != end - pp) + int v; + const char *p = pp; + if (unlikely (!hb_parse_int (&p, end, &v, true/* whole buffer */))) return false; *pv = v; return true; } -static hb_bool_t -parse_int (const char *pp, const char *end, int32_t *pv) +static bool +parse_uint (const char *pp, const char *end, uint32_t *pv) { - char buf[32]; - unsigned int len = MIN (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - pp)); - strncpy (buf, pp, len); - buf[len] = '\0'; - - char *p = buf; - char *pend = p; - int32_t v; - - errno = 0; - v = strtol (p, &pend, 10); - if (errno || p == pend || pend - p != end - pp) + unsigned int v; + const char *p = pp; + if (unlikely (!hb_parse_uint (&p, end, &v, true/* whole buffer */))) return false; *pv = v; @@ -449,8 +434,8 @@ hb_buffer_deserialize_glyphs (hb_buffer_t *buffer, end_ptr = &end; *end_ptr = buf; - assert ((!buffer->len && buffer->content_type == HB_BUFFER_CONTENT_TYPE_INVALID) || - buffer->content_type == HB_BUFFER_CONTENT_TYPE_GLYPHS); + assert ((!buffer->len && (buffer->content_type == HB_BUFFER_CONTENT_TYPE_INVALID)) || + (buffer->content_type == HB_BUFFER_CONTENT_TYPE_GLYPHS)); if (buf_len == -1) buf_len = strlen (buf); @@ -484,3 +469,6 @@ hb_buffer_deserialize_glyphs (hb_buffer_t *buffer, } } + + +#endif diff --git a/src/java.desktop/share/native/libharfbuzz/hb-buffer.cc b/src/java.desktop/share/native/libharfbuzz/hb-buffer.cc index f9a46d12b3c2..2da3c486e226 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-buffer.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-buffer.cc @@ -324,7 +324,7 @@ hb_buffer_t::clear_positions () out_len = 0; out_info = info; - memset (pos, 0, sizeof (pos[0]) * len); + hb_memset (pos, 0, sizeof (pos[0]) * len); } void @@ -438,13 +438,6 @@ hb_buffer_t::set_masks (hb_mask_t value, if (!mask) return; - if (cluster_start == 0 && cluster_end == (unsigned int)-1) { - unsigned int count = len; - for (unsigned int i = 0; i < count; i++) - info[i].mask = (info[i].mask & not_mask) | value; - return; - } - unsigned int count = len; for (unsigned int i = 0; i < count; i++) if (cluster_start <= info[i].cluster && info[i].cluster < cluster_end) @@ -455,27 +448,13 @@ void hb_buffer_t::reverse_range (unsigned int start, unsigned int end) { - unsigned int i, j; - if (end - start < 2) return; - for (i = start, j = end - 1; i < j; i++, j--) { - hb_glyph_info_t t; - - t = info[i]; - info[i] = info[j]; - info[j] = t; - } + hb_array_t (info, len).reverse (start, end); if (have_positions) { - for (i = start, j = end - 1; i < j; i++, j--) { - hb_glyph_position_t t; - - t = pos[i]; - pos[i] = pos[j]; - pos[j] = t; - } + hb_array_t (pos, len).reverse (start, end); } } @@ -524,7 +503,7 @@ hb_buffer_t::merge_clusters_impl (unsigned int start, unsigned int cluster = info[start].cluster; for (unsigned int i = start + 1; i < end; i++) - cluster = MIN (cluster, info[i].cluster); + cluster = hb_min (cluster, info[i].cluster); /* Extend end */ while (end < len && info[end - 1].cluster == info[end].cluster) @@ -555,7 +534,7 @@ hb_buffer_t::merge_out_clusters (unsigned int start, unsigned int cluster = out_info[start].cluster; for (unsigned int i = start + 1; i < end; i++) - cluster = MIN (cluster, out_info[i].cluster); + cluster = hb_min (cluster, out_info[i].cluster); /* Extend start */ while (start && out_info[start - 1].cluster == out_info[start].cluster) @@ -612,7 +591,7 @@ hb_buffer_t::delete_glyph () void hb_buffer_t::unsafe_to_break_impl (unsigned int start, unsigned int end) { - unsigned int cluster = (unsigned int) -1; + unsigned int cluster = UINT_MAX; cluster = _unsafe_to_break_find_min_cluster (info, start, end, cluster); _unsafe_to_break_set_mask (info, start, end, cluster); } @@ -628,7 +607,7 @@ hb_buffer_t::unsafe_to_break_from_outbuffer (unsigned int start, unsigned int en assert (start <= out_len); assert (idx <= end); - unsigned int cluster = (unsigned int) -1; + unsigned int cluster = UINT_MAX; cluster = _unsafe_to_break_find_min_cluster (out_info, start, out_len, cluster); cluster = _unsafe_to_break_find_min_cluster (info, idx, end, cluster); _unsafe_to_break_set_mask (out_info, start, out_len, cluster); @@ -638,8 +617,8 @@ hb_buffer_t::unsafe_to_break_from_outbuffer (unsigned int start, unsigned int en void hb_buffer_t::guess_segment_properties () { - assert (content_type == HB_BUFFER_CONTENT_TYPE_UNICODE || - (!len && content_type == HB_BUFFER_CONTENT_TYPE_INVALID)); + assert ((content_type == HB_BUFFER_CONTENT_TYPE_UNICODE) || + (!len && (content_type == HB_BUFFER_CONTENT_TYPE_INVALID))); /* If script is set to INVALID, guess from buffer contents */ if (props.script == HB_SCRIPT_INVALID) { @@ -736,7 +715,7 @@ hb_buffer_create () hb_buffer_t * hb_buffer_get_empty () { - return const_cast (&Null(hb_buffer_t)); + return const_cast (&Null (hb_buffer_t)); } /** @@ -776,8 +755,10 @@ hb_buffer_destroy (hb_buffer_t *buffer) free (buffer->info); free (buffer->pos); +#ifndef HB_NO_BUFFER_MESSAGE if (buffer->message_destroy) buffer->message_destroy (buffer->message_data); +#endif free (buffer); } @@ -956,7 +937,7 @@ hb_buffer_get_direction (hb_buffer_t *buffer) * * You can pass one of the predefined #hb_script_t values, or use * hb_script_from_string() or hb_script_from_iso15924_tag() to get the - * corresponding script from an ISO 15924 script tag. + * corresponding script from an ISO 15924 script tag. * * Since: 0.9.2 **/ @@ -999,7 +980,7 @@ hb_buffer_get_script (hb_buffer_t *buffer) * are orthogonal to the scripts, and though they are related, they are * different concepts and should not be confused with each other. * - * Use hb_language_from_string() to convert from BCP 47 language tags to + * Use hb_language_from_string() to convert from BCP 47 language tags to * #hb_language_t. * * Since: 0.9.2 @@ -1115,8 +1096,8 @@ hb_buffer_get_flags (hb_buffer_t *buffer) * Since: 0.9.42 **/ void -hb_buffer_set_cluster_level (hb_buffer_t *buffer, - hb_buffer_cluster_level_t cluster_level) +hb_buffer_set_cluster_level (hb_buffer_t *buffer, + hb_buffer_cluster_level_t cluster_level) { if (unlikely (hb_object_is_immutable (buffer))) return; @@ -1532,8 +1513,8 @@ hb_buffer_add_utf (hb_buffer_t *buffer, typedef typename utf_t::codepoint_t T; const hb_codepoint_t replacement = buffer->replacement; - assert (buffer->content_type == HB_BUFFER_CONTENT_TYPE_UNICODE || - (!buffer->len && buffer->content_type == HB_BUFFER_CONTENT_TYPE_INVALID)); + assert ((buffer->content_type == HB_BUFFER_CONTENT_TYPE_UNICODE) || + (!buffer->len && (buffer->content_type == HB_BUFFER_CONTENT_TYPE_INVALID))); if (unlikely (hb_object_is_immutable (buffer))) return; @@ -1736,7 +1717,7 @@ hb_buffer_add_codepoints (hb_buffer_t *buffer, * @buffer: an #hb_buffer_t. * @source: source #hb_buffer_t. * @start: start index into source buffer to copy. Use 0 to copy from start of buffer. - * @end: end index into source buffer to copy. Use (unsigned int) -1 to copy to end of buffer. + * @end: end index into source buffer to copy. Use @HB_FEATURE_GLOBAL_END to copy to end of buffer. * * Append (part of) contents of another buffer to this buffer. * @@ -1853,23 +1834,13 @@ void hb_buffer_normalize_glyphs (hb_buffer_t *buffer) { assert (buffer->have_positions); - assert (buffer->content_type == HB_BUFFER_CONTENT_TYPE_GLYPHS || - (!buffer->len && buffer->content_type == HB_BUFFER_CONTENT_TYPE_INVALID)); + assert ((buffer->content_type == HB_BUFFER_CONTENT_TYPE_GLYPHS) || + (!buffer->len && (buffer->content_type == HB_BUFFER_CONTENT_TYPE_INVALID))); bool backward = HB_DIRECTION_IS_BACKWARD (buffer->props.direction); - unsigned int count = buffer->len; - if (unlikely (!count)) return; - hb_glyph_info_t *info = buffer->info; - - unsigned int start = 0; - unsigned int end; - for (end = start + 1; end < count; end++) - if (info[start].cluster != info[end].cluster) { - normalize_glyphs_cluster (buffer, start, end, backward); - start = end; - } - normalize_glyphs_cluster (buffer, start, end, backward); + foreach_cluster (buffer, start, end) + normalize_glyphs_cluster (buffer, start, end, backward); } void @@ -1993,6 +1964,7 @@ hb_buffer_diff (hb_buffer_t *buffer, * Debugging. */ +#ifndef HB_NO_BUFFER_MESSAGE /** * hb_buffer_set_message_func: * @buffer: an #hb_buffer_t. @@ -2022,11 +1994,11 @@ hb_buffer_set_message_func (hb_buffer_t *buffer, buffer->message_destroy = nullptr; } } - bool hb_buffer_t::message_impl (hb_font_t *font, const char *fmt, va_list ap) { char buf[100]; - vsnprintf (buf, sizeof (buf), fmt, ap); + vsnprintf (buf, sizeof (buf), fmt, ap); return (bool) this->message_func (this, font, buf, this->message_data); } +#endif diff --git a/src/java.desktop/share/native/libharfbuzz/hb-buffer.h b/src/java.desktop/share/native/libharfbuzz/hb-buffer.h index f5a724cfb43f..1a7ca4069e57 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-buffer.h +++ b/src/java.desktop/share/native/libharfbuzz/hb-buffer.h @@ -284,6 +284,10 @@ hb_buffer_guess_segment_properties (hb_buffer_t *buffer); * space glyph and zeroing the advance width.) * @HB_BUFFER_FLAG_PRESERVE_DEFAULT_IGNORABLES takes * precedence over this flag. Since: 1.8.0 + * @HB_BUFFER_FLAG_DO_NOT_INSERT_DOTTED_CIRCLE: + * flag indicating that a dotted circle should + * not be inserted in the rendering of incorrect + * character sequences (such at <0905 093E>). Since: 2.4 * * Since: 0.9.20 */ @@ -292,7 +296,8 @@ typedef enum { /*< flags >*/ HB_BUFFER_FLAG_BOT = 0x00000001u, /* Beginning-of-text */ HB_BUFFER_FLAG_EOT = 0x00000002u, /* End-of-text */ HB_BUFFER_FLAG_PRESERVE_DEFAULT_IGNORABLES = 0x00000004u, - HB_BUFFER_FLAG_REMOVE_DEFAULT_IGNORABLES = 0x00000008u + HB_BUFFER_FLAG_REMOVE_DEFAULT_IGNORABLES = 0x00000008u, + HB_BUFFER_FLAG_DO_NOT_INSERT_DOTTED_CIRCLE = 0x00000010u } hb_buffer_flags_t; HB_EXTERN void diff --git a/src/java.desktop/share/native/libharfbuzz/hb-buffer.hh b/src/java.desktop/share/native/libharfbuzz/hb-buffer.hh index 6416a5328d9a..c4ef466b7a72 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-buffer.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-buffer.hh @@ -124,9 +124,11 @@ struct hb_buffer_t unsigned int context_len[2]; /* Debugging API */ +#ifndef HB_NO_BUFFER_MESSAGE hb_buffer_message_func_t message_func; void *message_data; hb_destroy_func_t message_destroy; +#endif /* Internal debugging. */ /* The bits here reflect current allocations of the bytes in glyph_info_t's var1 and var2. */ @@ -226,10 +228,10 @@ struct hb_buffer_t /* Makes a copy of the glyph at idx to output and replace glyph_index */ hb_glyph_info_t & output_glyph (hb_codepoint_t glyph_index) { - if (unlikely (!make_room_for (0, 1))) return Crap(hb_glyph_info_t); + if (unlikely (!make_room_for (0, 1))) return Crap (hb_glyph_info_t); if (unlikely (idx == len && !out_len)) - return Crap(hb_glyph_info_t); + return Crap (hb_glyph_info_t); out_info[out_len] = idx < len ? info[idx] : out_info[out_len - 1]; out_info[out_len].codepoint = glyph_index; @@ -316,7 +318,7 @@ struct hb_buffer_t HB_INTERNAL void delete_glyph (); void unsafe_to_break (unsigned int start, - unsigned int end) + unsigned int end) { if (end - start < 2) return; @@ -347,9 +349,19 @@ struct hb_buffer_t HB_INTERNAL void sort (unsigned int start, unsigned int end, int(*compar)(const hb_glyph_info_t *, const hb_glyph_info_t *)); - bool messaging () { return unlikely (message_func); } + bool messaging () + { +#ifdef HB_NO_BUFFER_MESSAGE + return false; +#else + return unlikely (message_func); +#endif + } bool message (hb_font_t *font, const char *fmt, ...) HB_PRINTF_FUNC(3, 4) { +#ifdef HB_NO_BUFFER_MESSAGE + return true; +#else if (!messaging ()) return true; va_list ap; @@ -357,6 +369,7 @@ struct hb_buffer_t bool ret = message_impl (font, fmt, ap); va_end (ap); return ret; +#endif } HB_INTERNAL bool message_impl (hb_font_t *font, const char *fmt, va_list ap) HB_PRINTF_FUNC(3, 0); @@ -373,13 +386,13 @@ struct hb_buffer_t inf.cluster = cluster; } - int + unsigned int _unsafe_to_break_find_min_cluster (const hb_glyph_info_t *infos, unsigned int start, unsigned int end, unsigned int cluster) const { for (unsigned int i = start; i < end; i++) - cluster = MIN (cluster, infos[i].cluster); + cluster = hb_min (cluster, infos[i].cluster); return cluster; } void @@ -395,8 +408,7 @@ struct hb_buffer_t } } - void unsafe_to_break_all () - { unsafe_to_break_impl (0, len); } + void unsafe_to_break_all () { unsafe_to_break_impl (0, len); } void safe_to_break_all () { for (unsigned int i = 0; i < len; i++) diff --git a/src/java.desktop/share/native/libharfbuzz/hb-cff-interp-common.hh b/src/java.desktop/share/native/libharfbuzz/hb-cff-interp-common.hh index a013e96dc431..3fcd5575122d 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-cff-interp-common.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-cff-interp-common.hh @@ -220,32 +220,22 @@ struct number_t void init () { set_real (0.0); } void fini () {} - void set_int (int v) { value = (double) v; } - int to_int () const { return (int) value; } + void set_int (int v) { value = v; } + int to_int () const { return value; } void set_fixed (int32_t v) { value = v / 65536.0; } - int32_t to_fixed () const { return (int32_t) (value * 65536.0); } + int32_t to_fixed () const { return value * 65536.0; } - void set_real (double v) { value = v; } + void set_real (double v) { value = v; } double to_real () const { return value; } - int ceil () const { return (int) ::ceil (value); } - int floor () const { return (int) ::floor (value); } - bool in_int_range () const { return ((double) (int16_t) to_int () == value); } - bool operator > (const number_t &n) const - { return value > n.to_real (); } - - bool operator < (const number_t &n) const - { return n > *this; } - - bool operator >= (const number_t &n) const - { return !(*this < n); } - - bool operator <= (const number_t &n) const - { return !(*this > n); } + bool operator > (const number_t &n) const { return value > n.to_real (); } + bool operator < (const number_t &n) const { return n > *this; } + bool operator >= (const number_t &n) const { return !(*this < n); } + bool operator <= (const number_t &n) const { return !(*this > n); } const number_t &operator += (const number_t &n) { @@ -255,37 +245,34 @@ struct number_t } protected: - double value; + double value; }; /* byte string */ struct UnsizedByteStr : UnsizedArrayOf { // encode 2-byte int (Dict/CharString) or 4-byte int (Dict) - template - static bool serialize_int (hb_serialize_context_t *c, op_code_t intOp, int value) + template + static bool serialize_int (hb_serialize_context_t *c, op_code_t intOp, V value) { TRACE_SERIALIZE (this); - if (unlikely ((value < minVal || value > maxVal))) - return_trace (false); - HBUINT8 *p = c->allocate_size (1); - if (unlikely (p == nullptr)) return_trace (false); - p->set (intOp); - - INTTYPE *ip = c->allocate_size (INTTYPE::static_size); - if (unlikely (ip == nullptr)) return_trace (false); - ip->set ((unsigned int)value); + if (unlikely (!p)) return_trace (false); + *p = intOp; - return_trace (true); + T *ip = c->allocate_size (T::static_size); + if (unlikely (!ip)) return_trace (false); + return_trace (c->check_assign (*ip, value)); } - static bool serialize_int4 (hb_serialize_context_t *c, int value) - { return serialize_int (c, OpCode_longintdict, value); } + template + static bool serialize_int4 (hb_serialize_context_t *c, V value) + { return serialize_int (c, OpCode_longintdict, value); } - static bool serialize_int2 (hb_serialize_context_t *c, int value) - { return serialize_int (c, OpCode_shortint, value); } + template + static bool serialize_int2 (hb_serialize_context_t *c, V value) + { return serialize_int (c, OpCode_shortint, value); } /* Defining null_size allows a Null object may be created. Should be safe because: * A descendent struct Dict uses a Null pointer to indicate a missing table, @@ -320,8 +307,7 @@ struct byte_str_t : hb_ubytes_t /* A byte string associated with the current offset and an error condition */ struct byte_str_ref_t { - byte_str_ref_t () - { init (); } + byte_str_ref_t () { init (); } void init () { @@ -343,13 +329,12 @@ struct byte_str_ref_t } const unsigned char& operator [] (int i) { - if (unlikely ((unsigned int)(offset + i) >= str.length)) + if (unlikely ((unsigned int) (offset + i) >= str.length)) { set_error (); - return Null(unsigned char); + return Null (unsigned char); } - else - return str[offset + i]; + return str[offset + i]; } /* Conversion to byte_str_t */ @@ -359,9 +344,7 @@ struct byte_str_ref_t { return str.sub_str (offset_, len_); } bool avail (unsigned int count=1) const - { - return (!in_error () && str.check_limit (offset, count)); - } + { return (!in_error () && str.check_limit (offset, count)); } void inc (unsigned int count=1) { if (likely (!in_error () && (offset <= str.length) && (offset + count <= str.length))) @@ -389,7 +372,7 @@ typedef hb_vector_t byte_str_array_t; /* stack */ template -struct stack_t +struct cff_stack_t { void init () { @@ -400,11 +383,7 @@ struct stack_t for (unsigned int i = 0; i < elements.length; i++) elements[i].init (); } - - void fini () - { - elements.fini_deep (); - } + void fini () { elements.fini_deep (); } ELEM& operator [] (unsigned int i) { @@ -419,7 +398,6 @@ struct stack_t else set_error (); } - ELEM &push () { if (likely (count < elements.length)) @@ -427,7 +405,7 @@ struct stack_t else { set_error (); - return Crap(ELEM); + return Crap (ELEM); } } @@ -438,10 +416,9 @@ struct stack_t else { set_error (); - return Crap(ELEM); + return Crap (ELEM); } } - void pop (unsigned int n) { if (likely (count >= n)) @@ -452,13 +429,12 @@ struct stack_t const ELEM& peek () { - if (likely (count > 0)) - return elements[count-1]; - else + if (unlikely (count < 0)) { set_error (); - return Null(ELEM); + return Null (ELEM); } + return elements[count - 1]; } void unpop () @@ -475,7 +451,7 @@ struct stack_t void set_error () { error = true; } unsigned int get_count () const { return count; } - bool is_empty () const { return count == 0; } + bool is_empty () const { return !count; } static constexpr unsigned kSizeLimit = LIMIT; @@ -487,7 +463,7 @@ struct stack_t /* argument stack */ template -struct arg_stack_t : stack_t +struct arg_stack_t : cff_stack_t { void push_int (int v) { @@ -519,7 +495,7 @@ struct arg_stack_t : stack_t i = 0; S::set_error (); } - return (unsigned)i; + return (unsigned) i; } void push_longint_from_substr (byte_str_ref_t& str_ref) @@ -538,12 +514,10 @@ struct arg_stack_t : stack_t } hb_array_t get_subarray (unsigned int start) const - { - return S::elements.sub_array (start); - } + { return S::elements.sub_array (start); } private: - typedef stack_t S; + typedef cff_stack_t S; }; /* an operator prefixed by its operands in a byte string */ @@ -565,7 +539,7 @@ struct op_serializer_t TRACE_SERIALIZE (this); HBUINT8 *d = c->allocate_size (opstr.str.length); - if (unlikely (d == nullptr)) return_trace (false); + if (unlikely (!d)) return_trace (false); memcpy (d, &opstr.str[0], opstr.str.length); return_trace (true); } @@ -605,7 +579,7 @@ struct parsed_values_t } unsigned get_count () const { return values.length; } - const VAL &get_value (unsigned int i) const { return values[i]; } + const VAL &get_value (unsigned int i) const { return values[i]; } const VAL &operator [] (unsigned int i) const { return get_value (i); } unsigned int opStart; @@ -644,30 +618,19 @@ struct interp_env_t return op; } - const ARG& eval_arg (unsigned int i) - { - return argStack[i]; - } + const ARG& eval_arg (unsigned int i) { return argStack[i]; } - ARG& pop_arg () - { - return argStack.pop (); - } + ARG& pop_arg () { return argStack.pop (); } + void pop_n_args (unsigned int n) { argStack.pop (n); } - void pop_n_args (unsigned int n) - { - argStack.pop (n); - } - - void clear_args () - { - pop_n_args (argStack.get_count ()); - } + void clear_args () { pop_n_args (argStack.get_count ()); } - byte_str_ref_t str_ref; - arg_stack_t argStack; + byte_str_ref_t + str_ref; + arg_stack_t + argStack; protected: - bool error; + bool error; }; typedef interp_env_t<> num_interp_env_t; @@ -691,7 +654,7 @@ struct opset_t case OpCode_TwoByteNegInt0: case OpCode_TwoByteNegInt1: case OpCode_TwoByteNegInt2: case OpCode_TwoByteNegInt3: - env.argStack.push_int ((int16_t)(-(op - OpCode_TwoByteNegInt0) * 256 - env.str_ref[0] - 108)); + env.argStack.push_int ((-(int16_t)(op - OpCode_TwoByteNegInt0) * 256 - env.str_ref[0] - 108)); env.str_ref.inc (); break; @@ -711,8 +674,8 @@ struct opset_t }; template -struct interpreter_t { - +struct interpreter_t +{ ~interpreter_t() { fini (); } void fini () { env.fini (); } diff --git a/src/java.desktop/share/native/libharfbuzz/hb-cff-interp-cs-common.hh b/src/java.desktop/share/native/libharfbuzz/hb-cff-interp-cs-common.hh index d6d7f857ecb0..1b0d795c7cb2 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-cff-interp-cs-common.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-cff-interp-cs-common.hh @@ -57,14 +57,14 @@ struct call_context_t /* call stack */ const unsigned int kMaxCallLimit = 10; -struct call_stack_t : stack_t {}; +struct call_stack_t : cff_stack_t {}; template struct biased_subrs_t { - void init (const SUBRS &subrs_) + void init (const SUBRS *subrs_) { - subrs = &subrs_; + subrs = subrs_; unsigned int nSubrs = get_count (); if (nSubrs < 1240) bias = 107; @@ -76,13 +76,13 @@ struct biased_subrs_t void fini () {} - unsigned int get_count () const { return (subrs == nullptr)? 0: subrs->count; } - unsigned int get_bias () const { return bias; } + unsigned int get_count () const { return subrs ? subrs->count : 0; } + unsigned int get_bias () const { return bias; } byte_str_t operator [] (unsigned int index) const { - if (unlikely ((subrs == nullptr) || index >= subrs->count)) - return Null(byte_str_t); + if (unlikely (!subrs || index >= subrs->count)) + return Null (byte_str_t); else return (*subrs)[index]; } @@ -118,7 +118,7 @@ struct point_t template struct cs_interp_env_t : interp_env_t { - void init (const byte_str_t &str, const SUBRS &globalSubrs_, const SUBRS &localSubrs_) + void init (const byte_str_t &str, const SUBRS *globalSubrs_, const SUBRS *localSubrs_) { interp_env_t::init (str); @@ -147,8 +147,9 @@ struct cs_interp_env_t : interp_env_t return callStack.in_error () || SUPER::in_error (); } - bool popSubrNum (const biased_subrs_t& biasedSubrs, unsigned int &subr_num) + bool pop_subr_num (const biased_subrs_t& biasedSubrs, unsigned int &subr_num) { + subr_num = 0; int n = SUPER::argStack.pop_int (); n += biasedSubrs.get_bias (); if (unlikely ((n < 0) || ((unsigned int)n >= biasedSubrs.get_count ()))) @@ -158,11 +159,11 @@ struct cs_interp_env_t : interp_env_t return true; } - void callSubr (const biased_subrs_t& biasedSubrs, cs_type_t type) + void call_subr (const biased_subrs_t& biasedSubrs, cs_type_t type) { - unsigned int subr_num; + unsigned int subr_num = 0; - if (unlikely (!popSubrNum (biasedSubrs, subr_num) + if (unlikely (!pop_subr_num (biasedSubrs, subr_num) || callStack.get_count () >= kMaxCallLimit)) { SUPER::set_error (); @@ -175,7 +176,7 @@ struct cs_interp_env_t : interp_env_t SUPER::str_ref = context.str_ref; } - void returnFromSubr () + void return_from_subr () { if (unlikely (SUPER::str_ref.in_error ())) SUPER::set_error (); @@ -246,7 +247,7 @@ struct path_procs_null_t static void flex1 (ENV &env, PARAM& param) {} }; -template > +template > struct cs_opset_t : opset_t { static void process_op (op_code_t op, ENV &env, PARAM& param) @@ -254,7 +255,7 @@ struct cs_opset_t : opset_t switch (op) { case OpCode_return: - env.returnFromSubr (); + env.return_from_subr (); break; case OpCode_endchar: OPSET::check_width (op, env, param); @@ -267,11 +268,11 @@ struct cs_opset_t : opset_t break; case OpCode_callsubr: - env.callSubr (env.localSubrs, CSType_LocalSubr); + env.call_subr (env.localSubrs, CSType_LocalSubr); break; case OpCode_callgsubr: - env.callSubr (env.globalSubrs, CSType_GlobalSubr); + env.call_subr (env.globalSubrs, CSType_GlobalSubr); break; case OpCode_hstem: @@ -550,8 +551,13 @@ struct path_procs_t static void rcurveline (ENV &env, PARAM& param) { + unsigned int arg_count = env.argStack.get_count (); + if (unlikely (arg_count < 8)) + return; + unsigned int i = 0; - for (; i + 6 <= env.argStack.get_count (); i += 6) + unsigned int curve_limit = arg_count - 2; + for (; i + 6 <= curve_limit; i += 6) { point_t pt1 = env.get_pt (); pt1.move (env.eval_arg (i), env.eval_arg (i+1)); @@ -561,34 +567,34 @@ struct path_procs_t pt3.move (env.eval_arg (i+4), env.eval_arg (i+5)); PATH::curve (env, param, pt1, pt2, pt3); } - for (; i + 2 <= env.argStack.get_count (); i += 2) - { - point_t pt1 = env.get_pt (); - pt1.move (env.eval_arg (i), env.eval_arg (i+1)); - PATH::line (env, param, pt1); - } + + point_t pt1 = env.get_pt (); + pt1.move (env.eval_arg (i), env.eval_arg (i+1)); + PATH::line (env, param, pt1); } static void rlinecurve (ENV &env, PARAM& param) { + unsigned int arg_count = env.argStack.get_count (); + if (unlikely (arg_count < 8)) + return; + unsigned int i = 0; - unsigned int line_limit = (env.argStack.get_count () % 6); + unsigned int line_limit = arg_count - 6; for (; i + 2 <= line_limit; i += 2) { point_t pt1 = env.get_pt (); pt1.move (env.eval_arg (i), env.eval_arg (i+1)); PATH::line (env, param, pt1); } - for (; i + 6 <= env.argStack.get_count (); i += 6) - { - point_t pt1 = env.get_pt (); - pt1.move (env.eval_arg (i), env.eval_arg (i+1)); - point_t pt2 = pt1; - pt2.move (env.eval_arg (i+2), env.eval_arg (i+3)); - point_t pt3 = pt2; - pt3.move (env.eval_arg (i+4), env.eval_arg (i+5)); - PATH::curve (env, param, pt1, pt2, pt3); - } + + point_t pt1 = env.get_pt (); + pt1.move (env.eval_arg (i), env.eval_arg (i+1)); + point_t pt2 = pt1; + pt2.move (env.eval_arg (i+2), env.eval_arg (i+3)); + point_t pt3 = pt2; + pt3.move (env.eval_arg (i+4), env.eval_arg (i+5)); + PATH::curve (env, param, pt1, pt2, pt3); } static void vvcurveto (ENV &env, PARAM& param) diff --git a/src/java.desktop/share/native/libharfbuzz/hb-cff-interp-dict-common.hh b/src/java.desktop/share/native/libharfbuzz/hb-cff-interp-dict-common.hh index 256c96c04620..a0a9429bf9bf 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-cff-interp-dict-common.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-cff-interp-dict-common.hh @@ -27,8 +27,6 @@ #define HB_CFF_INTERP_DICT_COMMON_HH #include "hb-cff-interp-common.hh" -#include -#include namespace CFF { @@ -58,19 +56,6 @@ struct top_dict_values_t : dict_values_t } void fini () { dict_values_t::fini (); } - unsigned int calculate_serialized_op_size (const OPSTR& opstr) const - { - switch (opstr.op) - { - case OpCode_CharStrings: - case OpCode_FDArray: - return OpCode_Size (OpCode_longintdict) + 4 + OpCode_Size (opstr.op); - - default: - return opstr.str.length; - } - } - unsigned int charStringsOffset; unsigned int FDArrayOffset; }; @@ -94,130 +79,52 @@ struct dict_opset_t : opset_t } } + /* Turns CFF's BCD format into strtod understandable string */ static double parse_bcd (byte_str_ref_t& str_ref) { - bool neg = false; - double int_part = 0; - uint64_t frac_part = 0; - uint32_t frac_count = 0; - bool exp_neg = false; - uint32_t exp_part = 0; - bool exp_overflow = false; - enum Part { INT_PART=0, FRAC_PART, EXP_PART } part = INT_PART; + if (unlikely (str_ref.in_error ())) return .0; + enum Nibble { DECIMAL=10, EXP_POS, EXP_NEG, RESERVED, NEG, END }; - const uint64_t MAX_FRACT = 0xFFFFFFFFFFFFFull; /* 1^52-1 */ - const uint32_t MAX_EXP = 0x7FFu; /* 1^11-1 */ - double value = 0.0; + char buf[32]; unsigned char byte = 0; - for (uint32_t i = 0;; i++) + for (unsigned i = 0, count = 0; count < ARRAY_LENGTH (buf); ++i, ++count) { - char d; - if ((i & 1) == 0) + unsigned nibble; + if (!(i & 1)) { - if (!str_ref.avail ()) - { - str_ref.set_error (); - return 0.0; - } + if (unlikely (!str_ref.avail ())) break; + byte = str_ref[0]; str_ref.inc (); - d = byte >> 4; + nibble = byte >> 4; } else - d = byte & 0x0F; + nibble = byte & 0x0F; - switch (d) + if (unlikely (nibble == RESERVED)) break; + else if (nibble == END) { - case RESERVED: - str_ref.set_error (); - return value; - - case END: - value = (double)(neg? -int_part: int_part); - if (frac_count > 0) - { - double frac = (frac_part / pow (10.0, (double)frac_count)); - if (neg) frac = -frac; - value += frac; - } - if (unlikely (exp_overflow)) - { - if (value == 0.0) - return value; - if (exp_neg) - return neg? -DBL_MIN: DBL_MIN; - else - return neg? -DBL_MAX: DBL_MAX; - } - if (exp_part != 0) - { - if (exp_neg) - value /= pow (10.0, (double)exp_part); - else - value *= pow (10.0, (double)exp_part); - } - return value; - - case NEG: - if (i != 0) - { - str_ref.set_error (); - return 0.0; - } - neg = true; - break; - - case DECIMAL: - if (part != INT_PART) - { - str_ref.set_error (); - return value; - } - part = FRAC_PART; + const char *p = buf; + double pv; + if (unlikely (!hb_parse_double (&p, p + count, &pv, true/* whole buffer */))) break; - - case EXP_NEG: - exp_neg = true; - HB_FALLTHROUGH; - - case EXP_POS: - if (part == EXP_PART) - { - str_ref.set_error (); - return value; - } - part = EXP_PART; - break; - - default: - switch (part) { - default: - case INT_PART: - int_part = (int_part * 10) + d; - break; - - case FRAC_PART: - if (likely (frac_part <= MAX_FRACT / 10)) - { - frac_part = (frac_part * 10) + (unsigned)d; - frac_count++; - } - break; - - case EXP_PART: - if (likely (exp_part * 10 + d <= MAX_EXP)) - { - exp_part = (exp_part * 10) + d; - } - else - exp_overflow = true; - break; - } + return pv; + } + else + { + buf[count] = "0123456789.EE?-?"[nibble]; + if (nibble == EXP_NEG) + { + ++count; + if (unlikely (count == ARRAY_LENGTH (buf))) break; + buf[count] = '-'; + } } } - return value; + str_ref.set_error (); + return .0; } static bool is_hint_op (op_code_t op) diff --git a/src/java.desktop/share/native/libharfbuzz/hb-cff1-interp-cs.hh b/src/java.desktop/share/native/libharfbuzz/hb-cff1-interp-cs.hh index a8208a3d1968..96718f4a6986 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-cff1-interp-cs.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-cff1-interp-cs.hh @@ -40,7 +40,7 @@ struct cff1_cs_interp_env_t : cs_interp_env_t template void init (const byte_str_t &str, ACC &acc, unsigned int fd) { - SUPER::init (str, *acc.globalSubrs, *acc.privateDicts[fd].localSubrs); + SUPER::init (str, acc.globalSubrs, acc.privateDicts[fd].localSubrs); processed_width = false; has_width = false; arg_start = 0; @@ -81,7 +81,7 @@ struct cff1_cs_interp_env_t : cs_interp_env_t typedef cs_interp_env_t SUPER; }; -template > +template > struct cff1_cs_opset_t : cs_opset_t { /* PostScript-originated legacy opcodes (OpCode_add etc) are unsupported */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-cff2-interp-cs.hh b/src/java.desktop/share/native/libharfbuzz/hb-cff2-interp-cs.hh index 6971c2eec55f..f1de1e32981a 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-cff2-interp-cs.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-cff2-interp-cs.hh @@ -52,7 +52,7 @@ struct blend_arg_t : number_t void set_real (double v) { reset_blends (); number_t::set_real (v); } void set_blends (unsigned int numValues_, unsigned int valueIndex_, - unsigned int numBlends, hb_array_t blends_) + unsigned int numBlends, hb_array_t blends_) { numValues = numValues_; valueIndex = valueIndex_; @@ -80,9 +80,9 @@ struct cff2_cs_interp_env_t : cs_interp_env_t { template void init (const byte_str_t &str, ACC &acc, unsigned int fd, - const int *coords_=nullptr, unsigned int num_coords_=0) + const int *coords_=nullptr, unsigned int num_coords_=0) { - SUPER::init (str, *acc.globalSubrs, *acc.privateDicts[fd].localSubrs); + SUPER::init (str, acc.globalSubrs, acc.privateDicts[fd].localSubrs); coords = coords_; num_coords = num_coords_; @@ -90,7 +90,7 @@ struct cff2_cs_interp_env_t : cs_interp_env_t seen_blend = false; seen_vsindex_ = false; scalars.init (); - do_blend = (coords != nullptr) && num_coords && (varStore != &Null(CFF2VariationStore)); + do_blend = num_coords && coords && varStore->size; set_ivs (acc.privateDicts[fd].ivs); } @@ -133,10 +133,11 @@ struct cff2_cs_interp_env_t : cs_interp_env_t region_count = varStore->varStore.get_region_index_count (get_ivs ()); if (do_blend) { - scalars.resize (region_count); - varStore->varStore.get_scalars (get_ivs (), - (int *)coords, num_coords, - &scalars[0], region_count); + if (unlikely (!scalars.resize (region_count))) + set_error (); + else + varStore->varStore.get_scalars (get_ivs (), coords, num_coords, + &scalars[0], region_count); } seen_blend = true; } @@ -193,7 +194,7 @@ struct cff2_cs_interp_env_t : cs_interp_env_t typedef cs_interp_env_t SUPER; }; -template > +template > struct cff2_cs_opset_t : cs_opset_t { static void process_op (op_code_t op, cff2_cs_interp_env_t &env, PARAM& param) diff --git a/src/java.desktop/share/native/libharfbuzz/hb-common.cc b/src/java.desktop/share/native/libharfbuzz/hb-common.cc index 890697cee1bc..4dd2479a0e60 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-common.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-common.cc @@ -27,14 +27,13 @@ */ #include "hb.hh" - #include "hb-machinery.hh" #include -#ifdef HAVE_XLOCALE_H -#include -#endif +#ifdef HB_NO_SETLOCALE +#define setlocale(Category, Locale) "C" +#endif /** * SECTION:hb-common @@ -67,10 +66,9 @@ _hb_options_init () p = c + strlen (c); #define OPTION(name, symbol) \ - if (0 == strncmp (c, name, p - c) && strlen (name) == p - c) u.opts.symbol = true; + if (0 == strncmp (c, name, p - c) && strlen (name) == static_cast(p - c)) do { u.opts.symbol = true; } while (0) OPTION ("uniscribe-bug-compatible", uniscribe_bug_compatible); - OPTION ("aat", aat); #undef OPTION @@ -334,14 +332,14 @@ lang_find_or_insert (const char *key) /** * hb_language_from_string: * @str: (array length=len) (element-type uint8_t): a string representing - * a BCP 47 language tag + * a BCP 47 language tag * @len: length of the @str, or -1 if it is %NULL-terminated. * - * Converts @str representing a BCP 47 language tag to the corresponding + * Converts @str representing a BCP 47 language tag to the corresponding * #hb_language_t. * * Return value: (transfer none): - * The #hb_language_t corresponding to the BCP 47 language tag. + * The #hb_language_t corresponding to the BCP 47 language tag. * * Since: 0.9.2 **/ @@ -356,7 +354,7 @@ hb_language_from_string (const char *str, int len) { /* NUL-terminate it. */ char strbuf[64]; - len = MIN (len, (int) sizeof (strbuf) - 1); + len = hb_min (len, (int) sizeof (strbuf) - 1); memcpy (strbuf, str, len); strbuf[len] = '\0'; item = lang_find_or_insert (strbuf); @@ -382,7 +380,8 @@ hb_language_from_string (const char *str, int len) const char * hb_language_to_string (hb_language_t language) { - /* This is actually nullptr-safe! */ + if (unlikely (!language)) return nullptr; + return language->s; } @@ -422,12 +421,12 @@ hb_language_get_default () /** * hb_script_from_iso15924_tag: - * @tag: an #hb_tag_t representing an ISO 15924 tag. + * @tag: an #hb_tag_t representing an ISO 15924 tag. * - * Converts an ISO 15924 script tag to a corresponding #hb_script_t. + * Converts an ISO 15924 script tag to a corresponding #hb_script_t. * * Return value: - * An #hb_script_t corresponding to the ISO 15924 tag. + * An #hb_script_t corresponding to the ISO 15924 tag. * * Since: 0.9.2 **/ @@ -468,15 +467,15 @@ hb_script_from_iso15924_tag (hb_tag_t tag) /** * hb_script_from_string: * @str: (array length=len) (element-type uint8_t): a string representing an - * ISO 15924 tag. + * ISO 15924 tag. * @len: length of the @str, or -1 if it is %NULL-terminated. * - * Converts a string @str representing an ISO 15924 script tag to a + * Converts a string @str representing an ISO 15924 script tag to a * corresponding #hb_script_t. Shorthand for hb_tag_from_string() then * hb_script_from_iso15924_tag(). * * Return value: - * An #hb_script_t corresponding to the ISO 15924 tag. + * An #hb_script_t corresponding to the ISO 15924 tag. * * Since: 0.9.2 **/ @@ -488,12 +487,12 @@ hb_script_from_string (const char *str, int len) /** * hb_script_to_iso15924_tag: - * @script: an #hb_script_ to convert. + * @script: an #hb_script_t to convert. * * See hb_script_from_iso15924_tag(). * * Return value: - * An #hb_tag_t representing an ISO 15924 script tag. + * An #hb_tag_t representing an ISO 15924 script tag. * * Since: 0.9.2 **/ @@ -575,6 +574,13 @@ hb_script_get_horizontal_direction (hb_script_t script) case HB_SCRIPT_OLD_SOGDIAN: case HB_SCRIPT_SOGDIAN: + /* Unicode-12.0 additions */ + case HB_SCRIPT_ELYMAIC: + + /* Unicode-13.0 additions */ + case HB_SCRIPT_CHORASMIAN: + case HB_SCRIPT_YEZIDI: + return HB_DIRECTION_RTL; @@ -590,38 +596,6 @@ hb_script_get_horizontal_direction (hb_script_t script) } -/* hb_user_data_array_t */ - -bool -hb_user_data_array_t::set (hb_user_data_key_t *key, - void * data, - hb_destroy_func_t destroy, - hb_bool_t replace) -{ - if (!key) - return false; - - if (replace) { - if (!data && !destroy) { - items.remove (key, lock); - return true; - } - } - hb_user_data_item_t item = {key, data, destroy}; - bool ret = !!items.replace_or_insert (item, lock, (bool) replace); - - return ret; -} - -void * -hb_user_data_array_t::get (hb_user_data_key_t *key) -{ - hb_user_data_item_t item = {nullptr, nullptr, nullptr}; - - return items.find (key, &item, lock) ? item.data : nullptr; -} - - /* hb_version */ @@ -719,131 +693,24 @@ parse_char (const char **pp, const char *end, char c) static bool parse_uint (const char **pp, const char *end, unsigned int *pv) { - char buf[32]; - unsigned int len = MIN (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - *pp)); - strncpy (buf, *pp, len); - buf[len] = '\0'; - - char *p = buf; - char *pend = p; - unsigned int v; - - /* Intentionally use strtol instead of strtoul, such that - * -1 turns into "big number"... */ - errno = 0; - v = strtol (p, &pend, 0); - if (errno || p == pend) - return false; + /* Intentionally use hb_parse_int inside instead of hb_parse_uint, + * such that -1 turns into "big number"... */ + int v; + if (unlikely (!hb_parse_int (pp, end, &v))) return false; *pv = v; - *pp += pend - p; return true; } static bool parse_uint32 (const char **pp, const char *end, uint32_t *pv) { - char buf[32]; - unsigned int len = MIN (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - *pp)); - strncpy (buf, *pp, len); - buf[len] = '\0'; - - char *p = buf; - char *pend = p; - unsigned int v; - - /* Intentionally use strtol instead of strtoul, such that - * -1 turns into "big number"... */ - errno = 0; - v = strtol (p, &pend, 0); - if (errno || p == pend) - return false; + /* Intentionally use hb_parse_int inside instead of hb_parse_uint, + * such that -1 turns into "big number"... */ + int v; + if (unlikely (!hb_parse_int (pp, end, &v))) return false; *pv = v; - *pp += pend - p; - return true; -} - -#if defined (HAVE_NEWLOCALE) && defined (HAVE_STRTOD_L) -#define USE_XLOCALE 1 -#define HB_LOCALE_T locale_t -#define HB_CREATE_LOCALE(locName) newlocale (LC_ALL_MASK, locName, nullptr) -#define HB_FREE_LOCALE(loc) freelocale (loc) -#elif defined(_MSC_VER) -#define USE_XLOCALE 1 -#define HB_LOCALE_T _locale_t -#define HB_CREATE_LOCALE(locName) _create_locale (LC_ALL, locName) -#define HB_FREE_LOCALE(loc) _free_locale (loc) -#define strtod_l(a, b, c) _strtod_l ((a), (b), (c)) -#endif - -#ifdef USE_XLOCALE - -#if HB_USE_ATEXIT -static void free_static_C_locale (); -#endif - -static struct hb_C_locale_lazy_loader_t : hb_lazy_loader_t -{ - static HB_LOCALE_T create () - { - HB_LOCALE_T C_locale = HB_CREATE_LOCALE ("C"); - -#if HB_USE_ATEXIT - atexit (free_static_C_locale); -#endif - - return C_locale; - } - static void destroy (HB_LOCALE_T p) - { - HB_FREE_LOCALE (p); - } - static HB_LOCALE_T get_null () - { - return nullptr; - } -} static_C_locale; - -#if HB_USE_ATEXIT -static -void free_static_C_locale () -{ - static_C_locale.free_instance (); -} -#endif - -static HB_LOCALE_T -get_C_locale () -{ - return static_C_locale.get_unconst (); -} -#endif /* USE_XLOCALE */ - -static bool -parse_float (const char **pp, const char *end, float *pv) -{ - char buf[32]; - unsigned int len = MIN (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - *pp)); - strncpy (buf, *pp, len); - buf[len] = '\0'; - - char *p = buf; - char *pend = p; - float v; - - errno = 0; -#ifdef USE_XLOCALE - v = strtod_l (p, &pend, get_C_locale ()); -#else - v = strtod (p, &pend); -#endif - if (errno || p == pend) - return false; - - *pv = v; - *pp += pend - p; return true; } @@ -857,9 +724,14 @@ parse_bool (const char **pp, const char *end, uint32_t *pv) (*pp)++; /* CSS allows on/off as aliases 1/0. */ - if (*pp - p == 2 && 0 == strncmp (p, "on", 2)) + if (*pp - p == 2 + && TOLOWER (p[0]) == 'o' + && TOLOWER (p[1]) == 'n') *pv = 1; - else if (*pp - p == 3 && 0 == strncmp (p, "off", 3)) + else if (*pp - p == 3 + && TOLOWER (p[0]) == 'o' + && TOLOWER (p[1]) == 'f' + && TOLOWER (p[2]) == 'f') *pv = 0; else return false; @@ -974,7 +846,41 @@ parse_one_feature (const char **pp, const char *end, hb_feature_t *feature) * * Parses a string into a #hb_feature_t. * - * TODO: document the syntax here. + * The format for specifying feature strings follows. All valid CSS + * font-feature-settings values other than 'normal' and the global values are + * also accepted, though not documented below. CSS string escapes are not + * supported. + * + * The range indices refer to the positions between Unicode characters. The + * position before the first character is always 0. + * + * The format is Python-esque. Here is how it all works: + * + * + * + * + * Syntax Value Start End + * + * + * Setting value: + * kern 1 0 Turn feature on + * +kern 1 0 Turn feature on + * -kern 0 0 Turn feature off + * kern=0 0 0 Turn feature off + * kern=1 1 0 Turn feature on + * aalt=2 2 0 Choose 2nd alternate + * Setting index: + * kern[] 1 0 Turn feature on + * kern[:] 1 0 Turn feature on + * kern[5:] 1 5 Turn feature on, partial + * kern[:5] 1 0 5 Turn feature on, partial + * kern[3:5] 1 3 5 Turn feature on, range + * kern[3] 1 3 3+1 Turn feature on, single char + * Mixing it all: + * aalt[3:5]=2 2 3 5 Turn 2nd alternate on for range + * + * + * * * Return value: * %true if @str is successfully parsed, %false otherwise. @@ -1028,25 +934,25 @@ hb_feature_to_string (hb_feature_t *feature, len += 4; while (len && s[len - 1] == ' ') len--; - if (feature->start != 0 || feature->end != (unsigned int) -1) + if (feature->start != HB_FEATURE_GLOBAL_START || feature->end != HB_FEATURE_GLOBAL_END) { s[len++] = '['; if (feature->start) - len += MAX (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->start)); + len += hb_max (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->start)); if (feature->end != feature->start + 1) { s[len++] = ':'; - if (feature->end != (unsigned int) -1) - len += MAX (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->end)); + if (feature->end != HB_FEATURE_GLOBAL_END) + len += hb_max (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->end)); } s[len++] = ']'; } if (feature->value > 1) { s[len++] = '='; - len += MAX (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->value)); + len += hb_max (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->value)); } assert (len < ARRAY_LENGTH (s)); - len = MIN (len, size - 1); + len = hb_min (len, size - 1); memcpy (buf, s, len); buf[len] = '\0'; } @@ -1057,7 +963,11 @@ static bool parse_variation_value (const char **pp, const char *end, hb_variation_t *variation) { parse_char (pp, end, '='); /* Optional. */ - return parse_float (pp, end, &variation->value); + double v; + if (unlikely (!hb_parse_double (pp, end, &v))) return false; + + variation->value = v; + return true; } static bool @@ -1113,14 +1023,71 @@ hb_variation_to_string (hb_variation_t *variation, while (len && s[len - 1] == ' ') len--; s[len++] = '='; - len += MAX (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%g", (double) variation->value)); + len += hb_max (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%g", (double) variation->value)); assert (len < ARRAY_LENGTH (s)); - len = MIN (len, size - 1); + len = hb_min (len, size - 1); memcpy (buf, s, len); buf[len] = '\0'; } +/** + * hb_color_get_alpha: + * color: a #hb_color_t we are interested in its channels. + * + * Return value: Alpha channel value of the given color + * + * Since: 2.1.0 + */ +uint8_t +(hb_color_get_alpha) (hb_color_t color) +{ + return hb_color_get_alpha (color); +} + +/** + * hb_color_get_red: + * color: a #hb_color_t we are interested in its channels. + * + * Return value: Red channel value of the given color + * + * Since: 2.1.0 + */ +uint8_t +(hb_color_get_red) (hb_color_t color) +{ + return hb_color_get_red (color); +} + +/** + * hb_color_get_green: + * color: a #hb_color_t we are interested in its channels. + * + * Return value: Green channel value of the given color + * + * Since: 2.1.0 + */ +uint8_t +(hb_color_get_green) (hb_color_t color) +{ + return hb_color_get_green (color); +} + +/** + * hb_color_get_blue: + * color: a #hb_color_t we are interested in its channels. + * + * Return value: Blue channel value of the given color + * + * Since: 2.1.0 + */ +uint8_t +(hb_color_get_blue) (hb_color_t color) +{ + return hb_color_get_blue (color); +} + + /* If there is no visibility control, then hb-static.cc will NOT * define anything. Instead, we get it to define one set in here * only, so only libharfbuzz.so defines them, not other libs. */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-common.h b/src/java.desktop/share/native/libharfbuzz/hb-common.h index fea193ada71b..9614e720b326 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-common.h +++ b/src/java.desktop/share/native/libharfbuzz/hb-common.h @@ -63,6 +63,8 @@ typedef __int32 int32_t; typedef unsigned __int32 uint32_t; typedef __int64 int64_t; typedef unsigned __int64 uint64_t; +#elif defined (__KERNEL__) +# include #else # include #endif @@ -357,6 +359,22 @@ typedef enum /*11.0*/HB_SCRIPT_OLD_SOGDIAN = HB_TAG ('S','o','g','o'), /*11.0*/HB_SCRIPT_SOGDIAN = HB_TAG ('S','o','g','d'), + /* + * Since 2.4.0 + */ + /*12.0*/HB_SCRIPT_ELYMAIC = HB_TAG ('E','l','y','m'), + /*12.0*/HB_SCRIPT_NANDINAGARI = HB_TAG ('N','a','n','d'), + /*12.0*/HB_SCRIPT_NYIAKENG_PUACHUE_HMONG = HB_TAG ('H','m','n','p'), + /*12.0*/HB_SCRIPT_WANCHO = HB_TAG ('W','c','h','o'), + + /* + * Since 2.6.7 + */ + /*13.0*/HB_SCRIPT_CHORASMIAN = HB_TAG ('C','h','r','s'), + /*13.0*/HB_SCRIPT_DIVES_AKURU = HB_TAG ('D','i','a','k'), + /*13.0*/HB_SCRIPT_KHITAN_SMALL_SCRIPT = HB_TAG ('K','i','t','s'), + /*13.0*/HB_SCRIPT_YEZIDI = HB_TAG ('Y','e','z','i'), + /* No script set. */ HB_SCRIPT_INVALID = HB_TAG_NONE, @@ -415,6 +433,21 @@ typedef void (*hb_destroy_func_t) (void *user_data); */ #define HB_FEATURE_GLOBAL_END ((unsigned int) -1) +/** + * hb_feature_t: + * @tag: a feature tag + * @value: 0 disables the feature, non-zero (usually 1) enables the feature. + * For features implemented as lookup type 3 (like 'salt') the @value is a one + * based index into the alternates. + * @start: the cluster to start applying this feature setting (inclusive). + * @end: the cluster to end applying this feature setting (exclusive). + * + * The #hb_feature_t is the structure that holds information about requested + * feature application. The feature will be applied with the given value to all + * glyphs which are in clusters between @start (inclusive) and @end (exclusive). + * Setting start to @HB_FEATURE_GLOBAL_START and end to @HB_FEATURE_GLOBAL_END + * specifies that the feature always applies to the entire buffer. + */ typedef struct hb_feature_t { hb_tag_t tag; uint32_t value; @@ -459,39 +492,21 @@ typedef uint32_t hb_color_t; #define HB_COLOR(b,g,r,a) ((hb_color_t) HB_TAG ((b),(g),(r),(a))) -/** - * hb_color_get_alpha: - * - * - * - * Since: 2.1.0 - */ +HB_EXTERN uint8_t +hb_color_get_alpha (hb_color_t color); #define hb_color_get_alpha(color) ((color) & 0xFF) -/** - * hb_color_get_red: - * - * - * - * Since: 2.1.0 - */ + +HB_EXTERN uint8_t +hb_color_get_red (hb_color_t color); #define hb_color_get_red(color) (((color) >> 8) & 0xFF) -/** - * hb_color_get_green: - * - * - * - * Since: 2.1.0 - */ + +HB_EXTERN uint8_t +hb_color_get_green (hb_color_t color); #define hb_color_get_green(color) (((color) >> 16) & 0xFF) -/** - * hb_color_get_blue: - * - * - * - * Since: 2.1.0 - */ -#define hb_color_get_blue(color) (((color) >> 24) & 0xFF) +HB_EXTERN uint8_t +hb_color_get_blue (hb_color_t color); +#define hb_color_get_blue(color) (((color) >> 24) & 0xFF) HB_END_DECLS diff --git a/src/java.desktop/share/native/libharfbuzz/hb-config.hh b/src/java.desktop/share/native/libharfbuzz/hb-config.hh new file mode 100644 index 000000000000..fc8d424bfb0b --- /dev/null +++ b/src/java.desktop/share/native/libharfbuzz/hb-config.hh @@ -0,0 +1,163 @@ +/* + * Copyright © 2019 Facebook, Inc. + * + * This is part of HarfBuzz, a text shaping library. + * + * Permission is hereby granted, without written agreement and without + * license or royalty fees, to use, copy, modify, and distribute this + * software and its documentation for any purpose, provided that the + * above copyright notice and the following two paragraphs appear in + * all copies of this software. + * + * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN + * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + * + * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, + * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + * + * Facebook Author(s): Behdad Esfahbod + */ + +#ifndef HB_CONFIG_HH +#define HB_CONFIG_HH + +#if 0 /* Make test happy. */ +#include "hb.hh" +#endif + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + + +#ifdef HB_TINY +#define HB_LEAN +#define HB_MINI +#define HB_NO_MT +#define HB_NO_UCD_UNASSIGNED +#ifndef NDEBUG +#define NDEBUG +#endif +#ifndef __OPTIMIZE_SIZE__ +#define __OPTIMIZE_SIZE__ +#endif +#endif + +#ifdef HB_LEAN +#define HB_DISABLE_DEPRECATED +#define HB_NDEBUG +#define HB_NO_ATEXIT +#define HB_NO_BUFFER_MESSAGE +#define HB_NO_BUFFER_SERIALIZE +#define HB_NO_BITMAP +#define HB_NO_CFF +#define HB_NO_COLOR +#define HB_NO_DRAW +#define HB_NO_ERRNO +#define HB_NO_FACE_COLLECT_UNICODES +#define HB_NO_GETENV +#define HB_NO_HINTING +#define HB_NO_LANGUAGE_PRIVATE_SUBTAG +#define HB_NO_LAYOUT_FEATURE_PARAMS +#define HB_NO_LAYOUT_COLLECT_GLYPHS +#define HB_NO_LAYOUT_UNUSED +#define HB_NO_MATH +#define HB_NO_META +#define HB_NO_METRICS +#define HB_NO_MMAP +#define HB_NO_NAME +#define HB_NO_OPEN +#define HB_NO_SETLOCALE +#define HB_NO_OT_FONT_GLYPH_NAMES +#define HB_NO_OT_SHAPE_FRACTIONS +#define HB_NO_STYLE +#define HB_NO_SUBSET_LAYOUT +#define HB_NO_VAR +#endif + +#ifdef HB_MINI +#define HB_NO_AAT +#define HB_NO_LEGACY +#endif + + +/* Closure of options. */ + +#ifdef HB_DISABLE_DEPRECATED +#define HB_IF_NOT_DEPRECATED(x) +#else +#define HB_IF_NOT_DEPRECATED(x) x +#endif + +#ifdef HB_NO_AAT +#define HB_NO_OT_NAME_LANGUAGE_AAT +#define HB_NO_AAT_SHAPE +#endif + +#ifdef HB_NO_BITMAP +#define HB_NO_OT_FONT_BITMAP +#endif + +#ifdef HB_NO_CFF +#define HB_NO_OT_FONT_CFF +#define HB_NO_SUBSET_CFF +#endif + +#ifdef HB_NO_GETENV +#define HB_NO_UNISCRIBE_BUG_COMPATIBLE +#endif + +#ifdef HB_NO_LEGACY +#define HB_NO_CMAP_LEGACY_SUBTABLES +#define HB_NO_FALLBACK_SHAPE +#define HB_NO_OT_KERN +#define HB_NO_OT_LAYOUT_BLACKLIST +#define HB_NO_OT_SHAPE_FALLBACK +#endif + +#ifdef HB_NO_NAME +#define HB_NO_OT_NAME_LANGUAGE +#endif + +#ifdef HB_NO_OT +#define HB_NO_OT_FONT +#define HB_NO_OT_LAYOUT +#define HB_NO_OT_TAG +#define HB_NO_OT_SHAPE +#endif + +#ifdef HB_NO_OT_SHAPE +#define HB_NO_AAT_SHAPE +#endif + +#ifdef HB_NO_OT_SHAPE_FALLBACK +#define HB_NO_OT_SHAPE_COMPLEX_ARABIC_FALLBACK +#define HB_NO_OT_SHAPE_COMPLEX_HEBREW_FALLBACK +#define HB_NO_OT_SHAPE_COMPLEX_THAI_FALLBACK +#define HB_NO_OT_SHAPE_COMPLEX_VOWEL_CONSTRAINTS +#endif + +#ifdef NDEBUG +#ifndef HB_NDEBUG +#define HB_NDEBUG +#endif +#endif + +#ifdef __OPTIMIZE_SIZE__ +#ifndef HB_OPTIMIZE_SIZE +#define HB_OPTIMIZE_SIZE +#endif +#endif + +#ifdef HAVE_CONFIG_OVERRIDE_H +#include "config-override.h" +#endif + + +#endif /* HB_CONFIG_HH */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-coretext.cc b/src/java.desktop/share/native/libharfbuzz/hb-coretext.cc index f8d03085bd90..a382228f20d8 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-coretext.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-coretext.cc @@ -27,6 +27,9 @@ */ #include "hb.hh" + +#ifdef HAVE_CORETEXT + #include "hb-shaper-impl.hh" #include "hb-coretext.h" @@ -46,24 +49,6 @@ /* https://developer.apple.com/documentation/coretext/1508745-ctfontcreatewithgraphicsfont */ #define HB_CORETEXT_DEFAULT_FONT_SIZE 12.f -static CGFloat -coretext_font_size_from_ptem (float ptem) -{ - /* CoreText points are CSS pixels (96 per inch), - * NOT typographic points (72 per inch). - * - * https://developer.apple.com/library/content/documentation/GraphicsAnimation/Conceptual/HighResolutionOSX/Explained/Explained.html - */ - ptem *= 96.f / 72.f; - return ptem <= 0.f ? HB_CORETEXT_DEFAULT_FONT_SIZE : ptem; -} -static float -coretext_font_size_to_ptem (CGFloat size) -{ - size *= 72.f / 96.f; - return size <= 0.f ? 0 : size; -} - static void release_table_data (void *user_data) { @@ -72,7 +57,7 @@ release_table_data (void *user_data) } static hb_blob_t * -reference_table (hb_face_t *face HB_UNUSED, hb_tag_t tag, void *user_data) +_hb_cg_reference_table (hb_face_t *face HB_UNUSED, hb_tag_t tag, void *user_data) { CGFontRef cg_font = reinterpret_cast (user_data); CFDataRef cf_data = CGFontCopyTableForTag (cg_font, tag); @@ -171,7 +156,7 @@ create_ct_font (CGFontRef cg_font, CGFloat font_size) if (CFStringHasPrefix (cg_postscript_name, CFSTR (".SFNSText")) || CFStringHasPrefix (cg_postscript_name, CFSTR (".SFNSDisplay"))) { -#if MAC_OS_X_VERSION_MIN_REQUIRED < 1080 +#if !(defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE) && MAC_OS_X_VERSION_MIN_REQUIRED < 1080 # define kCTFontUIFontSystem kCTFontSystemFontType # define kCTFontUIFontEmphasizedSystem kCTFontEmphasizedSystemFontType #endif @@ -214,7 +199,7 @@ create_ct_font (CGFontRef cg_font, CGFloat font_size) } CFURLRef original_url = nullptr; -#if TARGET_OS_OSX && MAC_OS_X_VERSION_MIN_REQUIRED < 1060 +#if !(defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE) && MAC_OS_X_VERSION_MIN_REQUIRED < 1060 ATSFontRef atsFont; FSRef fsref; OSStatus status; @@ -244,7 +229,7 @@ create_ct_font (CGFontRef cg_font, CGFloat font_size) * process in Blink. This can be detected by the new file URL location * that the newly found font points to. */ CFURLRef new_url = nullptr; -#if TARGET_OS_OSX && MAC_OS_X_VERSION_MIN_REQUIRED < 1060 +#if !(defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE) && MAC_OS_X_VERSION_MIN_REQUIRED < 1060 atsFont = CTFontGetPlatformFont (new_ct_font, NULL); status = ATSFontGetFileReference (atsFont, &fsref); if (status == noErr) @@ -293,13 +278,32 @@ _hb_coretext_shaper_face_data_destroy (hb_coretext_face_data_t *data) CFRelease ((CGFontRef) data); } +/** + * hb_coretext_face_create: + * @cg_font: The CGFontRef to work upon + * + * Creates an #hb_face_t face object from the specified + * CGFontRef. + * + * Return value: the new #hb_face_t face object + * + * Since: 0.9.10 + */ hb_face_t * hb_coretext_face_create (CGFontRef cg_font) { - return hb_face_create_for_tables (reference_table, CGFontRetain (cg_font), _hb_cg_font_release); + return hb_face_create_for_tables (_hb_cg_reference_table, CGFontRetain (cg_font), _hb_cg_font_release); } -/* +/** + * hb_coretext_face_get_cg_font: + * @face: The #hb_face_t to work upon + * + * Fetches the CGFontRef associated with an #hb_face_t + * face object + * + * Return value: the CGFontRef found + * * Since: 0.9.10 */ CGFontRef @@ -317,7 +321,8 @@ _hb_coretext_shaper_font_data_create (hb_font_t *font) if (unlikely (!face_data)) return nullptr; CGFontRef cg_font = (CGFontRef) (const void *) face->data.coretext; - CTFontRef ct_font = create_ct_font (cg_font, coretext_font_size_from_ptem (font->ptem)); + CGFloat font_size = (CGFloat) (font->ptem <= 0.f ? HB_CORETEXT_DEFAULT_FONT_SIZE : font->ptem); + CTFontRef ct_font = create_ct_font (cg_font, font_size); if (unlikely (!ct_font)) { @@ -341,7 +346,7 @@ hb_coretext_font_data_sync (hb_font_t *font) const hb_coretext_font_data_t *data = font->data.coretext; if (unlikely (!data)) return nullptr; - if (fabs (CTFontGetSize((CTFontRef) data) - coretext_font_size_from_ptem (font->ptem)) > .5) + if (fabs (CTFontGetSize ((CTFontRef) data) - (CGFloat) font->ptem) > .5) { /* XXX-MT-bug * Note that evaluating condition above can be dangerous if another thread @@ -365,10 +370,17 @@ hb_coretext_font_data_sync (hb_font_t *font) return font->data.coretext; } - -/* +/** + * hb_coretext_font_create: + * @ct_font: The CTFontRef to work upon + * + * Creates an #hb_font_t font object from the specified + * CTFontRef. + * + * Return value: the new #hb_font_t font object + * * Since: 1.7.2 - */ + **/ hb_font_t * hb_coretext_font_create (CTFontRef ct_font) { @@ -381,7 +393,7 @@ hb_coretext_font_create (CTFontRef ct_font) if (unlikely (hb_object_is_immutable (font))) return font; - hb_font_set_ptem (font, coretext_font_size_to_ptem (CTFontGetSize(ct_font))); + hb_font_set_ptem (font, CTFontGetSize (ct_font)); /* Let there be dragons here... */ font->data.coretext.cmpexch (nullptr, (hb_coretext_font_data_t *) CFRetain (ct_font)); @@ -389,6 +401,17 @@ hb_coretext_font_create (CTFontRef ct_font) return font; } +/** + * hb_coretext_face_get_ct_font: + * @font: #hb_font_t to work upon + * + * Fetches the CTFontRef associated with the specified + * #hb_font_t font object. + * + * Return value: the CTFontRef found + * + * Since: 0.9.10 + */ CTFontRef hb_coretext_font_get_ct_font (hb_font_t *font) { @@ -410,7 +433,7 @@ struct active_feature_t { feature_record_t rec; unsigned int order; - static int cmp (const void *pa, const void *pb) { + HB_INTERNAL static int cmp (const void *pa, const void *pb) { const active_feature_t *a = (const active_feature_t *) pa; const active_feature_t *b = (const active_feature_t *) pb; return a->rec.feature < b->rec.feature ? -1 : a->rec.feature > b->rec.feature ? 1 : @@ -428,7 +451,7 @@ struct feature_event_t { bool start; active_feature_t feature; - static int cmp (const void *pa, const void *pb) { + HB_INTERNAL static int cmp (const void *pa, const void *pb) { const feature_event_t *a = (const feature_event_t *) pa; const feature_event_t *b = (const feature_event_t *) pb; return a->index < b->index ? -1 : a->index > b->index ? 1 : @@ -489,13 +512,19 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, hb_vector_t feature_events; for (unsigned int i = 0; i < num_features; i++) { + active_feature_t feature; + +#if MAC_OS_X_VERSION_MIN_REQUIRED < 101000 const hb_aat_feature_mapping_t * mapping = hb_aat_layout_find_feature_mapping (features[i].tag); if (!mapping) continue; - active_feature_t feature; feature.rec.feature = mapping->aatFeatureType; feature.rec.setting = features[i].value ? mapping->selectorToEnable : mapping->selectorToDisable; +#else + feature.rec.feature = features[i].tag; + feature.rec.setting = features[i].value; +#endif feature.order = i; feature_event_t *event; @@ -544,6 +573,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, /* active_features.qsort (); */ for (unsigned int j = 0; j < active_features.length; j++) { +#if MAC_OS_X_VERSION_MIN_REQUIRED < 101000 CFStringRef keys[] = { kCTFontFeatureTypeIdentifierKey, kCTFontFeatureSelectorIdentifierKey @@ -552,6 +582,17 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, CFNumberCreate (kCFAllocatorDefault, kCFNumberIntType, &active_features[j].rec.feature), CFNumberCreate (kCFAllocatorDefault, kCFNumberIntType, &active_features[j].rec.setting) }; +#else + char tag[5] = {HB_UNTAG (active_features[j].rec.feature)}; + CFTypeRef keys[] = { + kCTFontOpenTypeFeatureTag, + kCTFontOpenTypeFeatureValue + }; + CFTypeRef values[] = { + CFStringCreateWithCString (kCFAllocatorDefault, tag, kCFStringEncodingASCII), + CFNumberCreate (kCFAllocatorDefault, kCFNumberIntType, &active_features[j].rec.setting) + }; +#endif static_assert ((ARRAY_LENGTH_CONST (keys) == ARRAY_LENGTH_CONST (values)), ""); CFDictionaryRef dict = CFDictionaryCreate (kCFAllocatorDefault, (const void **) keys, @@ -598,7 +639,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, } else { active_feature_t *feature = active_features.find (&event->feature); if (feature) - active_features.remove (feature - active_features.arrayZ ()); + active_features.remove (feature - active_features.arrayZ); } } } @@ -608,7 +649,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, #define ALLOCATE_ARRAY(Type, name, len, on_no_room) \ Type *name = (Type *) scratch; \ - { \ + do { \ unsigned int _consumed = DIV_CEIL ((len) * sizeof (Type), sizeof (*scratch)); \ if (unlikely (_consumed > scratch_size)) \ { \ @@ -617,9 +658,9 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, } \ scratch += _consumed; \ scratch_size -= _consumed; \ - } + } while (0) - ALLOCATE_ARRAY (UniChar, pchars, buffer->len * 2, /*nothing*/); + ALLOCATE_ARRAY (UniChar, pchars, buffer->len * 2, ((void)nullptr) /*nothing*/); unsigned int chars_len = 0; for (unsigned int i = 0; i < buffer->len; i++) { hb_codepoint_t c = buffer->info[i].codepoint; @@ -633,7 +674,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, } } - ALLOCATE_ARRAY (unsigned int, log_clusters, chars_len, /*nothing*/); + ALLOCATE_ARRAY (unsigned int, log_clusters, chars_len, ((void)nullptr) /*nothing*/); chars_len = 0; for (unsigned int i = 0; i < buffer->len; i++) { @@ -649,7 +690,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, DEBUG_MSG (CORETEXT, nullptr, __VA_ARGS__); \ ret = false; \ goto fail; \ - } HB_STMT_END; + } HB_STMT_END bool ret = true; CFStringRef string_ref = nullptr; @@ -711,7 +752,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, /* What's the iOS equivalent of this check? * The symbols was introduced in iOS 7.0. * At any rate, our fallback is safe and works fine. */ -#if MAC_OS_X_VERSION_MIN_REQUIRED < 1090 +#if !(defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE) && MAC_OS_X_VERSION_MIN_REQUIRED < 1090 # define kCTLanguageAttributeName CFSTR ("NSLanguage") #endif CFStringRef lang = CFStringCreateWithCStringNoCopy (kCFAllocatorDefault, @@ -771,7 +812,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, feature.start < chars_len && feature.start < feature.end) { CFRange feature_range = CFRangeMake (feature.start, - MIN (feature.end, chars_len) - feature.start); + hb_min (feature.end, chars_len) - feature.start); if (feature.value) CFAttributedStringRemoveAttribute (attr_string, feature_range, kCTKernAttributeName); else @@ -783,7 +824,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, int level = HB_DIRECTION_IS_FORWARD (buffer->props.direction) ? 0 : 1; CFNumberRef level_number = CFNumberCreate (kCFAllocatorDefault, kCFNumberIntType, &level); -#if MAC_OS_X_VERSION_MIN_REQUIRED < 1060 +#if !(defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE) && MAC_OS_X_VERSION_MIN_REQUIRED < 1060 extern const CFStringRef kCTTypesetterOptionForcedEmbeddingLevel; #endif CFDictionaryRef options = CFDictionaryCreate (kCFAllocatorDefault, @@ -977,7 +1018,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, #define SCRATCH_RESTORE() \ scratch_size = scratch_size_saved; \ - scratch = scratch_saved; + scratch = scratch_saved { /* Setup glyphs */ SCRATCH_SAVE(); @@ -1069,7 +1110,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, if (false) { /* Make sure all runs had the expected direction. */ - bool backward = HB_DIRECTION_IS_BACKWARD (buffer->props.direction); + HB_UNUSED bool backward = HB_DIRECTION_IS_BACKWARD (buffer->props.direction); assert (bool (status_and & kCTRunStatusRightToLeft) == backward); assert (bool (status_or & kCTRunStatusRightToLeft) == backward); } @@ -1116,7 +1157,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, unsigned int cluster = info[count - 1].cluster; for (unsigned int i = count - 1; i > 0; i--) { - cluster = MIN (cluster, info[i - 1].cluster); + cluster = hb_min (cluster, info[i - 1].cluster); info[i - 1].cluster = cluster; } } @@ -1125,7 +1166,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, unsigned int cluster = info[0].cluster; for (unsigned int i = 1; i < count; i++) { - cluster = MIN (cluster, info[i].cluster); + cluster = hb_min (cluster, info[i].cluster); info[i].cluster = cluster; } } @@ -1150,57 +1191,4 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan, } -/* - * AAT shaper - */ - -/* - * shaper face data - */ - -struct hb_coretext_aat_face_data_t {}; - -hb_coretext_aat_face_data_t * -_hb_coretext_aat_shaper_face_data_create (hb_face_t *face) -{ - return hb_aat_layout_has_substitution (face) || hb_aat_layout_has_positioning (face) ? - (hb_coretext_aat_face_data_t *) HB_SHAPER_DATA_SUCCEEDED : nullptr; -} - -void -_hb_coretext_aat_shaper_face_data_destroy (hb_coretext_aat_face_data_t *data HB_UNUSED) -{ -} - - -/* - * shaper font data - */ - -struct hb_coretext_aat_font_data_t {}; - -hb_coretext_aat_font_data_t * -_hb_coretext_aat_shaper_font_data_create (hb_font_t *font) -{ - return font->data.coretext ? (hb_coretext_aat_font_data_t *) HB_SHAPER_DATA_SUCCEEDED : nullptr; -} - -void -_hb_coretext_aat_shaper_font_data_destroy (hb_coretext_aat_font_data_t *data HB_UNUSED) -{ -} - - -/* - * shaper - */ - -hb_bool_t -_hb_coretext_aat_shape (hb_shape_plan_t *shape_plan, - hb_font_t *font, - hb_buffer_t *buffer, - const hb_feature_t *features, - unsigned int num_features) -{ - return _hb_coretext_shape (shape_plan, font, buffer, features, num_features); -} +#endif diff --git a/src/java.desktop/share/native/libharfbuzz/hb-coretext.h b/src/java.desktop/share/native/libharfbuzz/hb-coretext.h index 4b0a6f01b6f6..55cac7e294a6 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-coretext.h +++ b/src/java.desktop/share/native/libharfbuzz/hb-coretext.h @@ -40,8 +40,40 @@ HB_BEGIN_DECLS +/** + * HB_CORETEXT_TAG_MORT: + * + * The #hb_tag_t tag for the `mort` (glyph metamorphosis) table, + * which holds AAT features. + * + * For more information, see + * https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6mort.html + * + **/ #define HB_CORETEXT_TAG_MORT HB_TAG('m','o','r','t') + +/** + * HB_CORETEXT_TAG_MORX: + * + * The #hb_tag_t tag for the `morx` (extended glyph metamorphosis) + * table, which holds AAT features. + * + * For more information, see + * https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6morx.html + * + **/ #define HB_CORETEXT_TAG_MORX HB_TAG('m','o','r','x') + +/** + * HB_CORETEXT_TAG_KERX: + * + * The #hb_tag_t tag for the `kerx` (extended kerning) table, which + * holds AAT kerning information. + * + * For more information, see + * https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6kerx.html + * + **/ #define HB_CORETEXT_TAG_KERX HB_TAG('k','e','r','x') diff --git a/src/java.desktop/share/native/libharfbuzz/hb-debug.hh b/src/java.desktop/share/native/libharfbuzz/hb-debug.hh index d5ec94f4e471..db60837748fa 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-debug.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-debug.hh @@ -29,7 +29,7 @@ #include "hb.hh" #include "hb-atomic.hh" -#include "hb-dsalgs.hh" +#include "hb-algs.hh" #ifndef HB_DEBUG @@ -46,7 +46,6 @@ struct hb_options_t bool unused : 1; /* In-case sign bit is here. */ bool initialized : 1; bool uniscribe_bug_compatible : 1; - bool aat : 1; }; union hb_options_union_t { @@ -63,6 +62,9 @@ extern HB_INTERNAL hb_atomic_int_t _hb_options; static inline hb_options_t hb_options () { +#ifdef HB_NO_GETENV + return hb_options_t (); +#endif /* Make a local copy, so we can access bitfield threadsafely. */ hb_options_union_t u; u.i = _hb_options.get_relaxed (); @@ -158,7 +160,7 @@ _hb_debug_msg_va (const char *what, VBAR VBAR VBAR VBAR VBAR VBAR VBAR VBAR VBAR VBAR; fprintf (stderr, "%2u %s" VRBAR "%s", level, - bars + sizeof (bars) - 1 - MIN ((unsigned int) sizeof (bars) - 1, (unsigned int) (sizeof (VBAR) - 1) * level), + bars + sizeof (bars) - 1 - hb_min ((unsigned int) sizeof (bars) - 1, (unsigned int) (sizeof (VBAR) - 1) * level), level_dir ? (level_dir > 0 ? DLBAR : ULBAR) : LBAR); } else fprintf (stderr, " " VRBAR LBAR); @@ -246,8 +248,8 @@ struct hb_printer_t { }; template <> -struct hb_printer_t { - const char *print (hb_void_t) { return ""; } +struct hb_printer_t { + const char *print (hb_empty_t) { return ""; } }; @@ -263,7 +265,7 @@ static inline void _hb_warn_no_return (bool returned) } } template <> -/*static*/ inline void _hb_warn_no_return (bool returned HB_UNUSED) +/*static*/ inline void _hb_warn_no_return (bool returned HB_UNUSED) {} template @@ -293,22 +295,23 @@ struct hb_auto_trace_t if (plevel) --*plevel; } - ret_t ret (ret_t v, - const char *func = "", - unsigned int line = 0) + template + T ret (T&& v, + const char *func = "", + unsigned int line = 0) { if (unlikely (returned)) { fprintf (stderr, "OUCH, double calls to return_trace(). This is a bug, please report.\n"); - return v; + return hb_forward (v); } _hb_debug_msg (what, obj, func, true, plevel ? *plevel : 1, -1, "return %s (line %d)", - hb_printer_t().print (v), line); + hb_printer_t().print (v), line); if (plevel) --*plevel; plevel = nullptr; returned = true; - return v; + return hb_forward (v); } private: @@ -327,18 +330,20 @@ struct hb_auto_trace_t<0, ret_t> const char *message, ...) HB_PRINTF_FUNC(6, 7) {} - ret_t ret (ret_t v, - const char *func HB_UNUSED = nullptr, - unsigned int line HB_UNUSED = 0) { return v; } + template + T ret (T&& v, + const char *func HB_UNUSED = nullptr, + unsigned int line HB_UNUSED = 0) { return hb_forward (v); } }; /* For disabled tracing; optimize out everything. * https://github.com/harfbuzz/harfbuzz/pull/605 */ template struct hb_no_trace_t { - ret_t ret (ret_t v, - const char *func HB_UNUSED = "", - unsigned int line HB_UNUSED = 0) { return v; } + template + T ret (T&& v, + const char *func HB_UNUSED = nullptr, + unsigned int line HB_UNUSED = 0) { return hb_forward (v); } }; #define return_trace(RET) return trace.ret (RET, HB_FUNC, __LINE__) @@ -368,10 +373,6 @@ struct hb_no_trace_t { #define HB_DEBUG_FT (HB_DEBUG+0) #endif -#ifndef HB_DEBUG_GET_COVERAGE -#define HB_DEBUG_GET_COVERAGE (HB_DEBUG+0) -#endif - #ifndef HB_DEBUG_OBJECT #define HB_DEBUG_OBJECT (HB_DEBUG+0) #endif @@ -408,7 +409,7 @@ struct hb_no_trace_t { #define TRACE_SANITIZE(this) \ hb_auto_trace_t trace \ (&c->debug_depth, c->get_name (), this, HB_FUNC, \ - " "); + " ") #else #define TRACE_SANITIZE(this) hb_no_trace_t trace #endif @@ -420,7 +421,7 @@ struct hb_no_trace_t { #define TRACE_SERIALIZE(this) \ hb_auto_trace_t trace \ (&c->debug_depth, "SERIALIZE", c, HB_FUNC, \ - " "); + " ") #else #define TRACE_SERIALIZE(this) hb_no_trace_t trace #endif @@ -432,37 +433,24 @@ struct hb_no_trace_t { #define TRACE_SUBSET(this) \ hb_auto_trace_t trace \ (&c->debug_depth, c->get_name (), this, HB_FUNC, \ - " "); + " ") #else #define TRACE_SUBSET(this) hb_no_trace_t trace #endif -#ifndef HB_DEBUG_WOULD_APPLY -#define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0) -#endif -#if HB_DEBUG_WOULD_APPLY -#define TRACE_WOULD_APPLY(this) \ - hb_auto_trace_t trace \ - (&c->debug_depth, c->get_name (), this, HB_FUNC, \ - "%d glyphs", c->len); -#else -#define TRACE_WOULD_APPLY(this) hb_no_trace_t trace -#endif - #ifndef HB_DEBUG_DISPATCH #define HB_DEBUG_DISPATCH ( \ HB_DEBUG_APPLY + \ HB_DEBUG_SANITIZE + \ HB_DEBUG_SERIALIZE + \ - HB_DEBUG_SUBSET + \ - HB_DEBUG_WOULD_APPLY + \ + HB_DEBUG_SUBSET + \ 0) #endif #if HB_DEBUG_DISPATCH #define TRACE_DISPATCH(this, format) \ hb_auto_trace_t trace \ (&c->debug_depth, c->get_name (), this, HB_FUNC, \ - "format %d", (int) format); + "format %d", (int) format) #else #define TRACE_DISPATCH(this, format) hb_no_trace_t trace #endif diff --git a/src/java.desktop/share/native/libharfbuzz/hb-deprecated.h b/src/java.desktop/share/native/libharfbuzz/hb-deprecated.h index 9409f320781f..5dd04050b8e3 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-deprecated.h +++ b/src/java.desktop/share/native/libharfbuzz/hb-deprecated.h @@ -63,7 +63,7 @@ typedef hb_bool_t (*hb_font_get_glyph_func_t) (hb_font_t *font, void *font_data, hb_codepoint_t *glyph, void *user_data); -HB_EXTERN HB_DEPRECATED_FOR(hb_font_funcs_set_nominal_glyph_func or hb_font_funcs_set_variation_glyph_func) void +HB_EXTERN HB_DEPRECATED_FOR(hb_font_funcs_set_nominal_glyph_func and hb_font_funcs_set_variation_glyph_func) void hb_font_funcs_set_glyph_func (hb_font_funcs_t *ffuncs, hb_font_get_glyph_func_t func, void *user_data, hb_destroy_func_t destroy); @@ -165,29 +165,8 @@ hb_unicode_decompose_compatibility (hb_unicode_funcs_t *ufuncs, hb_codepoint_t *decomposed); -typedef hb_position_t (*hb_font_get_glyph_kerning_func_t) (hb_font_t *font, void *font_data, - hb_codepoint_t first_glyph, hb_codepoint_t second_glyph, - void *user_data); -typedef hb_font_get_glyph_kerning_func_t hb_font_get_glyph_h_kerning_func_t; typedef hb_font_get_glyph_kerning_func_t hb_font_get_glyph_v_kerning_func_t; -/** - * hb_font_funcs_set_glyph_h_kerning_func: - * @ffuncs: font functions. - * @func: (closure user_data) (destroy destroy) (scope notified): - * @user_data: - * @destroy: - * - * - * - * Since: 0.9.2 - * Deprecated: 2.0.0 - **/ -HB_EXTERN void -hb_font_funcs_set_glyph_h_kerning_func (hb_font_funcs_t *ffuncs, - hb_font_get_glyph_h_kerning_func_t func, - void *user_data, hb_destroy_func_t destroy); - /** * hb_font_funcs_set_glyph_v_kerning_func: * @ffuncs: font functions. @@ -206,19 +185,9 @@ hb_font_funcs_set_glyph_v_kerning_func (hb_font_funcs_t *ffuncs, void *user_data, hb_destroy_func_t destroy); HB_EXTERN hb_position_t -hb_font_get_glyph_h_kerning (hb_font_t *font, - hb_codepoint_t left_glyph, hb_codepoint_t right_glyph); -HB_EXTERN hb_position_t hb_font_get_glyph_v_kerning (hb_font_t *font, hb_codepoint_t top_glyph, hb_codepoint_t bottom_glyph); -HB_EXTERN void -hb_font_get_glyph_kerning_for_direction (hb_font_t *font, - hb_codepoint_t first_glyph, hb_codepoint_t second_glyph, - hb_direction_t direction, - hb_position_t *x, hb_position_t *y); - - #endif HB_END_DECLS diff --git a/src/java.desktop/share/native/libharfbuzz/hb-dispatch.hh b/src/java.desktop/share/native/libharfbuzz/hb-dispatch.hh new file mode 100644 index 000000000000..e946f21c114b --- /dev/null +++ b/src/java.desktop/share/native/libharfbuzz/hb-dispatch.hh @@ -0,0 +1,61 @@ +/* + * Copyright © 2007,2008,2009,2010 Red Hat, Inc. + * Copyright © 2012,2018 Google, Inc. + * + * This is part of HarfBuzz, a text shaping library. + * + * Permission is hereby granted, without written agreement and without + * license or royalty fees, to use, copy, modify, and distribute this + * software and its documentation for any purpose, provided that the + * above copyright notice and the following two paragraphs appear in + * all copies of this software. + * + * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN + * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + * + * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, + * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + * + * Red Hat Author(s): Behdad Esfahbod + * Google Author(s): Behdad Esfahbod + */ + +#ifndef HB_DISPATCH_HH +#define HB_DISPATCH_HH + +#include "hb.hh" + +/* + * Dispatch + */ + +template +struct hb_dispatch_context_t +{ + hb_dispatch_context_t () : debug_depth (0) {} + private: + /* https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern */ + const Context* thiz () const { return static_cast (this); } + Context* thiz () { return static_cast< Context *> (this); } + public: + const char *get_name () { return "UNKNOWN"; } + static constexpr unsigned max_debug_depth = MaxDebugDepth; + typedef Return return_t; + template + bool may_dispatch (const T *obj HB_UNUSED, const F *format HB_UNUSED) { return true; } + template + return_t dispatch (const T &obj, Ts&&... ds) + { return obj.dispatch (thiz (), hb_forward (ds)...); } + static return_t no_dispatch_return_value () { return Context::default_return_value (); } + static bool stop_sublookup_iteration (const return_t r HB_UNUSED) { return false; } + unsigned debug_depth; +}; + + +#endif /* HB_DISPATCH_HH */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-draw.cc b/src/java.desktop/share/native/libharfbuzz/hb-draw.cc new file mode 100644 index 000000000000..72444a94c8a9 --- /dev/null +++ b/src/java.desktop/share/native/libharfbuzz/hb-draw.cc @@ -0,0 +1,261 @@ +/* + * Copyright © 2019-2020 Ebrahim Byagowi + * + * This is part of HarfBuzz, a text shaping library. + * + * Permission is hereby granted, without written agreement and without + * license or royalty fees, to use, copy, modify, and distribute this + * software and its documentation for any purpose, provided that the + * above copyright notice and the following two paragraphs appear in + * all copies of this software. + * + * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN + * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + * + * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, + * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + */ + +#include "hb.hh" + +#ifndef HB_NO_DRAW +#ifdef HB_EXPERIMENTAL_API + +#include "hb-draw.hh" +#include "hb-ot.h" +#include "hb-ot-glyf-table.hh" +#include "hb-ot-cff1-table.hh" +#include "hb-ot-cff2-table.hh" + +/** + * hb_draw_funcs_set_move_to_func: + * @funcs: draw functions object + * @move_to: move-to callback + * + * Sets move-to callback to the draw functions object. + * + * Since: EXPERIMENTAL + **/ +void +hb_draw_funcs_set_move_to_func (hb_draw_funcs_t *funcs, + hb_draw_move_to_func_t move_to) +{ + if (unlikely (hb_object_is_immutable (funcs))) return; + funcs->move_to = move_to; +} + +/** + * hb_draw_funcs_set_line_to_func: + * @funcs: draw functions object + * @line_to: line-to callback + * + * Sets line-to callback to the draw functions object. + * + * Since: EXPERIMENTAL + **/ +void +hb_draw_funcs_set_line_to_func (hb_draw_funcs_t *funcs, + hb_draw_line_to_func_t line_to) +{ + if (unlikely (hb_object_is_immutable (funcs))) return; + funcs->line_to = line_to; +} + +/** + * hb_draw_funcs_set_quadratic_to_func: + * @funcs: draw functions object + * @move_to: quadratic-to callback + * + * Sets quadratic-to callback to the draw functions object. + * + * Since: EXPERIMENTAL + **/ +void +hb_draw_funcs_set_quadratic_to_func (hb_draw_funcs_t *funcs, + hb_draw_quadratic_to_func_t quadratic_to) +{ + if (unlikely (hb_object_is_immutable (funcs))) return; + funcs->quadratic_to = quadratic_to; + funcs->is_quadratic_to_set = true; +} + +/** + * hb_draw_funcs_set_cubic_to_func: + * @funcs: draw functions + * @cubic_to: cubic-to callback + * + * Sets cubic-to callback to the draw functions object. + * + * Since: EXPERIMENTAL + **/ +void +hb_draw_funcs_set_cubic_to_func (hb_draw_funcs_t *funcs, + hb_draw_cubic_to_func_t cubic_to) +{ + if (unlikely (hb_object_is_immutable (funcs))) return; + funcs->cubic_to = cubic_to; +} + +/** + * hb_draw_funcs_set_close_path_func: + * @funcs: draw functions object + * @close_path: close-path callback + * + * Sets close-path callback to the draw functions object. + * + * Since: EXPERIMENTAL + **/ +void +hb_draw_funcs_set_close_path_func (hb_draw_funcs_t *funcs, + hb_draw_close_path_func_t close_path) +{ + if (unlikely (hb_object_is_immutable (funcs))) return; + funcs->close_path = close_path; +} + +static void +_move_to_nil (hb_position_t to_x HB_UNUSED, hb_position_t to_y HB_UNUSED, void *user_data HB_UNUSED) {} + +static void +_line_to_nil (hb_position_t to_x HB_UNUSED, hb_position_t to_y HB_UNUSED, void *user_data HB_UNUSED) {} + +static void +_quadratic_to_nil (hb_position_t control_x HB_UNUSED, hb_position_t control_y HB_UNUSED, + hb_position_t to_x HB_UNUSED, hb_position_t to_y HB_UNUSED, + void *user_data HB_UNUSED) {} + +static void +_cubic_to_nil (hb_position_t control1_x HB_UNUSED, hb_position_t control1_y HB_UNUSED, + hb_position_t control2_x HB_UNUSED, hb_position_t control2_y HB_UNUSED, + hb_position_t to_x HB_UNUSED, hb_position_t to_y HB_UNUSED, + void *user_data HB_UNUSED) {} + +static void +_close_path_nil (void *user_data HB_UNUSED) {} + +/** + * hb_draw_funcs_create: + * + * Creates a new draw callbacks object. + * + * Since: EXPERIMENTAL + **/ +hb_draw_funcs_t * +hb_draw_funcs_create () +{ + hb_draw_funcs_t *funcs; + if (unlikely (!(funcs = hb_object_create ()))) + return const_cast (&Null (hb_draw_funcs_t)); + + funcs->move_to = (hb_draw_move_to_func_t) _move_to_nil; + funcs->line_to = (hb_draw_line_to_func_t) _line_to_nil; + funcs->quadratic_to = (hb_draw_quadratic_to_func_t) _quadratic_to_nil; + funcs->is_quadratic_to_set = false; + funcs->cubic_to = (hb_draw_cubic_to_func_t) _cubic_to_nil; + funcs->close_path = (hb_draw_close_path_func_t) _close_path_nil; + return funcs; +} + +/** + * hb_draw_funcs_reference: + * @funcs: draw functions + * + * Add to callbacks object refcount. + * + * Returns: The same object. + * Since: EXPERIMENTAL + **/ +hb_draw_funcs_t * +hb_draw_funcs_reference (hb_draw_funcs_t *funcs) +{ + return hb_object_reference (funcs); +} + +/** + * hb_draw_funcs_destroy: + * @funcs: draw functions + * + * Decreases refcount of callbacks object and deletes the object if it reaches + * to zero. + * + * Since: EXPERIMENTAL + **/ +void +hb_draw_funcs_destroy (hb_draw_funcs_t *funcs) +{ + if (!hb_object_destroy (funcs)) return; + + free (funcs); +} + +/** + * hb_draw_funcs_make_immutable: + * @funcs: draw functions + * + * Makes funcs object immutable. + * + * Since: EXPERIMENTAL + **/ +void +hb_draw_funcs_make_immutable (hb_draw_funcs_t *funcs) +{ + if (hb_object_is_immutable (funcs)) + return; + + hb_object_make_immutable (funcs); +} + +/** + * hb_draw_funcs_is_immutable: + * @funcs: draw functions + * + * Checks whether funcs is immutable. + * + * Returns: If is immutable. + * Since: EXPERIMENTAL + **/ +hb_bool_t +hb_draw_funcs_is_immutable (hb_draw_funcs_t *funcs) +{ + return hb_object_is_immutable (funcs); +} + +/** + * hb_font_draw_glyph: + * @font: a font object + * @glyph: a glyph id + * @funcs: draw callbacks object + * @user_data: parameter you like be passed to the callbacks when are called + * + * Draw a glyph. + * + * Returns: Whether the font had the glyph and the operation completed successfully. + * Since: EXPERIMENTAL + **/ +hb_bool_t +hb_font_draw_glyph (hb_font_t *font, hb_codepoint_t glyph, + const hb_draw_funcs_t *funcs, + void *user_data) +{ + if (unlikely (funcs == &Null (hb_draw_funcs_t) || + glyph >= font->face->get_num_glyphs ())) + return false; + + draw_helper_t draw_helper (funcs, user_data); + if (font->face->table.glyf->get_path (font, glyph, draw_helper)) return true; +#ifndef HB_NO_CFF + if (font->face->table.cff1->get_path (font, glyph, draw_helper)) return true; + if (font->face->table.cff2->get_path (font, glyph, draw_helper)) return true; +#endif + + return false; +} + +#endif +#endif diff --git a/src/java.desktop/share/native/libharfbuzz/hb-draw.h b/src/java.desktop/share/native/libharfbuzz/hb-draw.h new file mode 100644 index 000000000000..d5eb6ecc65b1 --- /dev/null +++ b/src/java.desktop/share/native/libharfbuzz/hb-draw.h @@ -0,0 +1,98 @@ +/* + * Copyright © 2019-2020 Ebrahim Byagowi + * + * This is part of HarfBuzz, a text shaping library. + * + * Permission is hereby granted, without written agreement and without + * license or royalty fees, to use, copy, modify, and distribute this + * software and its documentation for any purpose, provided that the + * above copyright notice and the following two paragraphs appear in + * all copies of this software. + * + * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN + * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + * + * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, + * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + */ + +#ifndef HB_H_IN +#error "Include instead." +#endif + +#ifndef HB_DRAW_H +#define HB_DRAW_H + +#include "hb.h" + +HB_BEGIN_DECLS + +#ifdef HB_EXPERIMENTAL_API +typedef void (*hb_draw_move_to_func_t) (hb_position_t to_x, hb_position_t to_y, void *user_data); +typedef void (*hb_draw_line_to_func_t) (hb_position_t to_x, hb_position_t to_y, void *user_data); +typedef void (*hb_draw_quadratic_to_func_t) (hb_position_t control_x, hb_position_t control_y, + hb_position_t to_x, hb_position_t to_y, + void *user_data); +typedef void (*hb_draw_cubic_to_func_t) (hb_position_t control1_x, hb_position_t control1_y, + hb_position_t control2_x, hb_position_t control2_y, + hb_position_t to_x, hb_position_t to_y, + void *user_data); +typedef void (*hb_draw_close_path_func_t) (void *user_data); + +/** + * hb_draw_funcs_t: + * + * Glyph draw callbacks. + * + * _move_to, _line_to and _cubic_to calls are nessecary to be defined but we + * translate _quadratic_to calls to _cubic_to if the callback isn't defined. + * + * Since: EXPERIMENTAL + **/ +typedef struct hb_draw_funcs_t hb_draw_funcs_t; + +HB_EXTERN void +hb_draw_funcs_set_move_to_func (hb_draw_funcs_t *funcs, + hb_draw_move_to_func_t move_to); + +HB_EXTERN void +hb_draw_funcs_set_line_to_func (hb_draw_funcs_t *funcs, + hb_draw_line_to_func_t line_to); + +HB_EXTERN void +hb_draw_funcs_set_quadratic_to_func (hb_draw_funcs_t *funcs, + hb_draw_quadratic_to_func_t quadratic_to); + +HB_EXTERN void +hb_draw_funcs_set_cubic_to_func (hb_draw_funcs_t *funcs, + hb_draw_cubic_to_func_t cubic_to); + +HB_EXTERN void +hb_draw_funcs_set_close_path_func (hb_draw_funcs_t *funcs, + hb_draw_close_path_func_t close_path); + +HB_EXTERN hb_draw_funcs_t * +hb_draw_funcs_create (void); + +HB_EXTERN hb_draw_funcs_t * +hb_draw_funcs_reference (hb_draw_funcs_t *funcs); + +HB_EXTERN void +hb_draw_funcs_destroy (hb_draw_funcs_t *funcs); + +HB_EXTERN void +hb_draw_funcs_make_immutable (hb_draw_funcs_t *funcs); + +HB_EXTERN hb_bool_t +hb_draw_funcs_is_immutable (hb_draw_funcs_t *funcs); +#endif + +HB_END_DECLS + +#endif /* HB_DRAW_H */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-draw.hh b/src/java.desktop/share/native/libharfbuzz/hb-draw.hh new file mode 100644 index 000000000000..0e5101f9cd24 --- /dev/null +++ b/src/java.desktop/share/native/libharfbuzz/hb-draw.hh @@ -0,0 +1,139 @@ +/* + * Copyright © 2020 Ebrahim Byagowi + * + * This is part of HarfBuzz, a text shaping library. + * + * Permission is hereby granted, without written agreement and without + * license or royalty fees, to use, copy, modify, and distribute this + * software and its documentation for any purpose, provided that the + * above copyright notice and the following two paragraphs appear in + * all copies of this software. + * + * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN + * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + * + * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, + * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + */ + +#ifndef HB_DRAW_HH +#define HB_DRAW_HH + +#include "hb.hh" + +#ifdef HB_EXPERIMENTAL_API +struct hb_draw_funcs_t +{ + hb_object_header_t header; + + hb_draw_move_to_func_t move_to; + hb_draw_line_to_func_t line_to; + hb_draw_quadratic_to_func_t quadratic_to; + bool is_quadratic_to_set; + hb_draw_cubic_to_func_t cubic_to; + hb_draw_close_path_func_t close_path; +}; + +struct draw_helper_t +{ + draw_helper_t (const hb_draw_funcs_t *funcs_, void *user_data_) + { + funcs = funcs_; + user_data = user_data_; + path_open = false; + path_start_x = current_x = path_start_y = current_y = 0; + } + ~draw_helper_t () { end_path (); } + + void move_to (hb_position_t x, hb_position_t y) + { + if (path_open) end_path (); + current_x = path_start_x = x; + current_y = path_start_y = y; + } + + void line_to (hb_position_t x, hb_position_t y) + { + if (equal_to_current (x, y)) return; + if (!path_open) start_path (); + funcs->line_to (x, y, user_data); + current_x = x; + current_y = y; + } + + void + quadratic_to (hb_position_t control_x, hb_position_t control_y, + hb_position_t to_x, hb_position_t to_y) + { + if (equal_to_current (control_x, control_y) && equal_to_current (to_x, to_y)) + return; + if (!path_open) start_path (); + if (funcs->is_quadratic_to_set) + funcs->quadratic_to (control_x, control_y, to_x, to_y, user_data); + else + funcs->cubic_to (roundf ((current_x + 2.f * control_x) / 3.f), + roundf ((current_y + 2.f * control_y) / 3.f), + roundf ((to_x + 2.f * control_x) / 3.f), + roundf ((to_y + 2.f * control_y) / 3.f), + to_x, to_y, user_data); + current_x = to_x; + current_y = to_y; + } + + void + cubic_to (hb_position_t control1_x, hb_position_t control1_y, + hb_position_t control2_x, hb_position_t control2_y, + hb_position_t to_x, hb_position_t to_y) + { + if (equal_to_current (control1_x, control1_y) && + equal_to_current (control2_x, control2_y) && + equal_to_current (to_x, to_y)) + return; + if (!path_open) start_path (); + funcs->cubic_to (control1_x, control1_y, control2_x, control2_y, to_x, to_y, user_data); + current_x = to_x; + current_y = to_y; + } + + void end_path () + { + if (path_open) + { + if ((path_start_x != current_x) || (path_start_y != current_y)) + funcs->line_to (path_start_x, path_start_y, user_data); + funcs->close_path (user_data); + } + path_open = false; + path_start_x = current_x = path_start_y = current_y = 0; + } + + protected: + bool equal_to_current (hb_position_t x, hb_position_t y) + { return current_x == x && current_y == y; } + + void start_path () + { + if (path_open) end_path (); + path_open = true; + funcs->move_to (path_start_x, path_start_y, user_data); + } + + hb_position_t path_start_x; + hb_position_t path_start_y; + + hb_position_t current_x; + hb_position_t current_y; + + bool path_open; + const hb_draw_funcs_t *funcs; + void *user_data; +}; +#endif + +#endif /* HB_DRAW_HH */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-dsalgs.hh b/src/java.desktop/share/native/libharfbuzz/hb-dsalgs.hh deleted file mode 100644 index f5f286883272..000000000000 --- a/src/java.desktop/share/native/libharfbuzz/hb-dsalgs.hh +++ /dev/null @@ -1,632 +0,0 @@ -/* - * Copyright © 2017 Google, Inc. - * - * This is part of HarfBuzz, a text shaping library. - * - * Permission is hereby granted, without written agreement and without - * license or royalty fees, to use, copy, modify, and distribute this - * software and its documentation for any purpose, provided that the - * above copyright notice and the following two paragraphs appear in - * all copies of this software. - * - * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR - * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES - * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN - * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH - * DAMAGE. - * - * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, - * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND - * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS - * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO - * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. - * - * Google Author(s): Behdad Esfahbod - */ - -#ifndef HB_DSALGS_HH -#define HB_DSALGS_HH - -#include "hb.hh" -#include "hb-null.hh" - - -/* Void! For when we need a expression-type of void. */ -typedef const struct _hb_void_t *hb_void_t; -#define HB_VOID ((const _hb_void_t *) nullptr) - - -/* - * Bithacks. - */ - -/* Return the number of 1 bits in v. */ -template -static inline HB_CONST_FUNC unsigned int -hb_popcount (T v) -{ -#if (defined(__GNUC__) && (__GNUC__ >= 4)) || defined(__clang__) - if (sizeof (T) <= sizeof (unsigned int)) - return __builtin_popcount (v); - - if (sizeof (T) <= sizeof (unsigned long)) - return __builtin_popcountl (v); - - if (sizeof (T) <= sizeof (unsigned long long)) - return __builtin_popcountll (v); -#endif - - if (sizeof (T) <= 4) - { - /* "HACKMEM 169" */ - uint32_t y; - y = (v >> 1) &033333333333; - y = v - y - ((y >>1) & 033333333333); - return (((y + (y >> 3)) & 030707070707) % 077); - } - - if (sizeof (T) == 8) - { - unsigned int shift = 32; - return hb_popcount ((uint32_t) v) + hb_popcount ((uint32_t) (v >> shift)); - } - - if (sizeof (T) == 16) - { - unsigned int shift = 64; - return hb_popcount ((uint64_t) v) + hb_popcount ((uint64_t) (v >> shift)); - } - - assert (0); - return 0; /* Shut up stupid compiler. */ -} - -/* Returns the number of bits needed to store number */ -template -static inline HB_CONST_FUNC unsigned int -hb_bit_storage (T v) -{ - if (unlikely (!v)) return 0; - -#if (defined(__GNUC__) && (__GNUC__ >= 4)) || defined(__clang__) - if (sizeof (T) <= sizeof (unsigned int)) - return sizeof (unsigned int) * 8 - __builtin_clz (v); - - if (sizeof (T) <= sizeof (unsigned long)) - return sizeof (unsigned long) * 8 - __builtin_clzl (v); - - if (sizeof (T) <= sizeof (unsigned long long)) - return sizeof (unsigned long long) * 8 - __builtin_clzll (v); -#endif - -#if (defined(_MSC_VER) && _MSC_VER >= 1500) || defined(__MINGW32__) - if (sizeof (T) <= sizeof (unsigned int)) - { - unsigned long where; - _BitScanReverse (&where, v); - return 1 + where; - } -# if defined(_WIN64) - if (sizeof (T) <= 8) - { - unsigned long where; - _BitScanReverse64 (&where, v); - return 1 + where; - } -# endif -#endif - - if (sizeof (T) <= 4) - { - /* "bithacks" */ - const unsigned int b[] = {0x2, 0xC, 0xF0, 0xFF00, 0xFFFF0000}; - const unsigned int S[] = {1, 2, 4, 8, 16}; - unsigned int r = 0; - for (int i = 4; i >= 0; i--) - if (v & b[i]) - { - v >>= S[i]; - r |= S[i]; - } - return r + 1; - } - if (sizeof (T) <= 8) - { - /* "bithacks" */ - const uint64_t b[] = {0x2ULL, 0xCULL, 0xF0ULL, 0xFF00ULL, 0xFFFF0000ULL, 0xFFFFFFFF00000000ULL}; - const unsigned int S[] = {1, 2, 4, 8, 16, 32}; - unsigned int r = 0; - for (int i = 5; i >= 0; i--) - if (v & b[i]) - { - v >>= S[i]; - r |= S[i]; - } - return r + 1; - } - if (sizeof (T) == 16) - { - unsigned int shift = 64; - return (v >> shift) ? hb_bit_storage ((uint64_t) (v >> shift)) + shift : - hb_bit_storage ((uint64_t) v); - } - - assert (0); - return 0; /* Shut up stupid compiler. */ -} - -/* Returns the number of zero bits in the least significant side of v */ -template -static inline HB_CONST_FUNC unsigned int -hb_ctz (T v) -{ - if (unlikely (!v)) return 0; - -#if (defined(__GNUC__) && (__GNUC__ >= 4)) || defined(__clang__) - if (sizeof (T) <= sizeof (unsigned int)) - return __builtin_ctz (v); - - if (sizeof (T) <= sizeof (unsigned long)) - return __builtin_ctzl (v); - - if (sizeof (T) <= sizeof (unsigned long long)) - return __builtin_ctzll (v); -#endif - -#if (defined(_MSC_VER) && _MSC_VER >= 1500) || defined(__MINGW32__) - if (sizeof (T) <= sizeof (unsigned int)) - { - unsigned long where; - _BitScanForward (&where, v); - return where; - } -# if defined(_WIN64) - if (sizeof (T) <= 8) - { - unsigned long where; - _BitScanForward64 (&where, v); - return where; - } -# endif -#endif - - if (sizeof (T) <= 4) - { - /* "bithacks" */ - unsigned int c = 32; - v &= - (int32_t) v; - if (v) c--; - if (v & 0x0000FFFF) c -= 16; - if (v & 0x00FF00FF) c -= 8; - if (v & 0x0F0F0F0F) c -= 4; - if (v & 0x33333333) c -= 2; - if (v & 0x55555555) c -= 1; - return c; - } - if (sizeof (T) <= 8) - { - /* "bithacks" */ - unsigned int c = 64; - v &= - (int64_t) (v); - if (v) c--; - if (v & 0x00000000FFFFFFFFULL) c -= 32; - if (v & 0x0000FFFF0000FFFFULL) c -= 16; - if (v & 0x00FF00FF00FF00FFULL) c -= 8; - if (v & 0x0F0F0F0F0F0F0F0FULL) c -= 4; - if (v & 0x3333333333333333ULL) c -= 2; - if (v & 0x5555555555555555ULL) c -= 1; - return c; - } - if (sizeof (T) == 16) - { - unsigned int shift = 64; - return (uint64_t) v ? hb_bit_storage ((uint64_t) v) : - hb_bit_storage ((uint64_t) (v >> shift)) + shift; - } - - assert (0); - return 0; /* Shut up stupid compiler. */ -} - - -/* - * Tiny stuff. - */ - -template -static inline T* hb_addressof (T& arg) -{ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wcast-align" - /* https://en.cppreference.com/w/cpp/memory/addressof */ - return reinterpret_cast( - &const_cast( - reinterpret_cast(arg))); -#pragma GCC diagnostic pop -} - -/* ASCII tag/character handling */ -static inline bool ISALPHA (unsigned char c) -{ return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z'); } -static inline bool ISALNUM (unsigned char c) -{ return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9'); } -static inline bool ISSPACE (unsigned char c) -{ return c == ' ' || c =='\f'|| c =='\n'|| c =='\r'|| c =='\t'|| c =='\v'; } -static inline unsigned char TOUPPER (unsigned char c) -{ return (c >= 'a' && c <= 'z') ? c - 'a' + 'A' : c; } -static inline unsigned char TOLOWER (unsigned char c) -{ return (c >= 'A' && c <= 'Z') ? c - 'A' + 'a' : c; } - -#undef MIN -template -static inline Type MIN (const Type &a, const Type &b) { return a < b ? a : b; } - -#undef MAX -template -static inline Type MAX (const Type &a, const Type &b) { return a > b ? a : b; } - -static inline unsigned int DIV_CEIL (const unsigned int a, unsigned int b) -{ return (a + (b - 1)) / b; } - - -#undef ARRAY_LENGTH -template -static inline unsigned int ARRAY_LENGTH (const Type (&)[n]) { return n; } -/* A const version, but does not detect erratically being called on pointers. */ -#define ARRAY_LENGTH_CONST(__array) ((signed int) (sizeof (__array) / sizeof (__array[0]))) - - -static inline int -hb_memcmp (const void *a, const void *b, unsigned int len) -{ - /* It's illegal to pass NULL to memcmp(), even if len is zero. - * So, wrap it. - * https://sourceware.org/bugzilla/show_bug.cgi?id=23878 */ - if (!len) return 0; - return memcmp (a, b, len); -} - -static inline bool -hb_unsigned_mul_overflows (unsigned int count, unsigned int size) -{ - return (size > 0) && (count >= ((unsigned int) -1) / size); -} - -static inline unsigned int -hb_ceil_to_4 (unsigned int v) -{ - return ((v - 1) | 3) + 1; -} - -template struct hb_is_signed; -/* https://github.com/harfbuzz/harfbuzz/issues/1535 */ -template <> struct hb_is_signed { enum { value = true }; }; -template <> struct hb_is_signed { enum { value = true }; }; -template <> struct hb_is_signed { enum { value = true }; }; -template <> struct hb_is_signed { enum { value = true }; }; -template <> struct hb_is_signed { enum { value = false }; }; -template <> struct hb_is_signed { enum { value = false }; }; -template <> struct hb_is_signed { enum { value = false }; }; -template <> struct hb_is_signed { enum { value = false }; }; - -template static inline bool -hb_in_range (T u, T lo, T hi) -{ - /* The sizeof() is here to force template instantiation. - * I'm sure there are better ways to do this but can't think of - * one right now. Declaring a variable won't work as HB_UNUSED - * is unusable on some platforms and unused types are less likely - * to generate a warning than unused variables. */ - static_assert (!hb_is_signed::value, ""); - - /* The casts below are important as if T is smaller than int, - * the subtract results will become a signed int! */ - return (T)(u - lo) <= (T)(hi - lo); -} -template static inline bool -hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2) -{ - return hb_in_range (u, lo1, hi1) || hb_in_range (u, lo2, hi2); -} -template static inline bool -hb_in_ranges (T u, T lo1, T hi1, T lo2, T hi2, T lo3, T hi3) -{ - return hb_in_range (u, lo1, hi1) || hb_in_range (u, lo2, hi2) || hb_in_range (u, lo3, hi3); -} - - -/* - * Sort and search. - */ - -static inline void * -hb_bsearch (const void *key, const void *base, - size_t nmemb, size_t size, - int (*compar)(const void *_key, const void *_item)) -{ - int min = 0, max = (int) nmemb - 1; - while (min <= max) - { - int mid = (min + max) / 2; - const void *p = (const void *) (((const char *) base) + (mid * size)); - int c = compar (key, p); - if (c < 0) - max = mid - 1; - else if (c > 0) - min = mid + 1; - else - return (void *) p; - } - return nullptr; -} - -static inline void * -hb_bsearch_r (const void *key, const void *base, - size_t nmemb, size_t size, - int (*compar)(const void *_key, const void *_item, void *_arg), - void *arg) -{ - int min = 0, max = (int) nmemb - 1; - while (min <= max) - { - int mid = ((unsigned int) min + (unsigned int) max) / 2; - const void *p = (const void *) (((const char *) base) + (mid * size)); - int c = compar (key, p, arg); - if (c < 0) - max = mid - 1; - else if (c > 0) - min = mid + 1; - else - return (void *) p; - } - return nullptr; -} - - -/* From https://github.com/noporpoise/sort_r - * With following modifications: - * - * 10 November 2018: - * https://github.com/noporpoise/sort_r/issues/7 - */ - -/* Isaac Turner 29 April 2014 Public Domain */ - -/* - -hb_sort_r function to be exported. - -Parameters: - base is the array to be sorted - nel is the number of elements in the array - width is the size in bytes of each element of the array - compar is the comparison function - arg is a pointer to be passed to the comparison function - -void hb_sort_r(void *base, size_t nel, size_t width, - int (*compar)(const void *_a, const void *_b, void *_arg), - void *arg); -*/ - - -/* swap a, b iff a>b */ -/* __restrict is same as restrict but better support on old machines */ -static int sort_r_cmpswap(char *__restrict a, char *__restrict b, size_t w, - int (*compar)(const void *_a, const void *_b, - void *_arg), - void *arg) -{ - char tmp, *end = a+w; - if(compar(a, b, arg) > 0) { - for(; a < end; a++, b++) { tmp = *a; *a = *b; *b = tmp; } - return 1; - } - return 0; -} - -/* Note: quicksort is not stable, equivalent values may be swapped */ -static inline void sort_r_simple(void *base, size_t nel, size_t w, - int (*compar)(const void *_a, const void *_b, - void *_arg), - void *arg) -{ - char *b = (char *)base, *end = b + nel*w; - if(nel < 7) { - /* Insertion sort for arbitrarily small inputs */ - char *pi, *pj; - for(pi = b+w; pi < end; pi += w) { - for(pj = pi; pj > b && sort_r_cmpswap(pj-w,pj,w,compar,arg); pj -= w) {} - } - } - else - { - /* nel > 6; Quicksort */ - - /* Use median of first, middle and last items as pivot */ - char *x, *y, *xend, ch; - char *pl, *pm, *pr; - char *last = b+w*(nel-1), *tmp; - char *l[3]; - l[0] = b; - l[1] = b+w*(nel/2); - l[2] = last; - - if(compar(l[0],l[1],arg) > 0) { tmp=l[0]; l[0]=l[1]; l[1]=tmp; } - if(compar(l[1],l[2],arg) > 0) { - tmp=l[1]; l[1]=l[2]; l[2]=tmp; /* swap(l[1],l[2]) */ - if(compar(l[0],l[1],arg) > 0) { tmp=l[0]; l[0]=l[1]; l[1]=tmp; } - } - - /* swap l[id], l[2] to put pivot as last element */ - for(x = l[1], y = last, xend = x+w; x>1); - for(; pl < pm; pl += w) { - if(sort_r_cmpswap(pl, pr, w, compar, arg)) { - pr -= w; /* pivot now at pl */ - break; - } - } - pm = pl+((pr-pl)>>1); - for(; pm < pr; pr -= w) { - if(sort_r_cmpswap(pl, pr, w, compar, arg)) { - pl += w; /* pivot now at pr */ - break; - } - } - } - - sort_r_simple(b, (pl-b)/w, w, compar, arg); - sort_r_simple(pl+w, (end-(pl+w))/w, w, compar, arg); - } -} - -static inline void hb_sort_r(void *base, size_t nel, size_t width, - int (*compar)(const void *_a, const void *_b, void *_arg), - void *arg) -{ - sort_r_simple(base, nel, width, compar, arg); -} - - -template static inline void -hb_stable_sort (T *array, unsigned int len, int(*compar)(const T *, const T *), T2 *array2) -{ - for (unsigned int i = 1; i < len; i++) - { - unsigned int j = i; - while (j && compar (&array[j - 1], &array[i]) > 0) - j--; - if (i == j) - continue; - /* Move item i to occupy place for item j, shift what's in between. */ - { - T t = array[i]; - memmove (&array[j + 1], &array[j], (i - j) * sizeof (T)); - array[j] = t; - } - if (array2) - { - T2 t = array2[i]; - memmove (&array2[j + 1], &array2[j], (i - j) * sizeof (T2)); - array2[j] = t; - } - } -} - -template static inline void -hb_stable_sort (T *array, unsigned int len, int(*compar)(const T *, const T *)) -{ - hb_stable_sort (array, len, compar, (int *) nullptr); -} - -static inline hb_bool_t -hb_codepoint_parse (const char *s, unsigned int len, int base, hb_codepoint_t *out) -{ - /* Pain because we don't know whether s is nul-terminated. */ - char buf[64]; - len = MIN (ARRAY_LENGTH (buf) - 1, len); - strncpy (buf, s, len); - buf[len] = '\0'; - - char *end; - errno = 0; - unsigned long v = strtoul (buf, &end, base); - if (errno) return false; - if (*end) return false; - *out = v; - return true; -} - - -struct HbOpOr -{ - static constexpr bool passthru_left = true; - static constexpr bool passthru_right = true; - template static void process (T &o, const T &a, const T &b) { o = a | b; } -}; -struct HbOpAnd -{ - static constexpr bool passthru_left = false; - static constexpr bool passthru_right = false; - template static void process (T &o, const T &a, const T &b) { o = a & b; } -}; -struct HbOpMinus -{ - static constexpr bool passthru_left = true; - static constexpr bool passthru_right = false; - template static void process (T &o, const T &a, const T &b) { o = a & ~b; } -}; -struct HbOpXor -{ - static constexpr bool passthru_left = true; - static constexpr bool passthru_right = true; - template static void process (T &o, const T &a, const T &b) { o = a ^ b; } -}; - - -/* Compiler-assisted vectorization. */ - -/* Type behaving similar to vectorized vars defined using __attribute__((vector_size(...))), - * using vectorized operations if HB_VECTOR_SIZE is set to **bit** numbers (eg 128). - * Define that to 0 to disable. */ -template -struct hb_vector_size_t -{ - elt_t& operator [] (unsigned int i) { return u.v[i]; } - const elt_t& operator [] (unsigned int i) const { return u.v[i]; } - - void clear (unsigned char v = 0) { memset (this, v, sizeof (*this)); } - - template - hb_vector_size_t process (const hb_vector_size_t &o) const - { - hb_vector_size_t r; -#if HB_VECTOR_SIZE - if (HB_VECTOR_SIZE && 0 == (byte_size * 8) % HB_VECTOR_SIZE) - for (unsigned int i = 0; i < ARRAY_LENGTH (u.vec); i++) - Op::process (r.u.vec[i], u.vec[i], o.u.vec[i]); - else -#endif - for (unsigned int i = 0; i < ARRAY_LENGTH (u.v); i++) - Op::process (r.u.v[i], u.v[i], o.u.v[i]); - return r; - } - hb_vector_size_t operator | (const hb_vector_size_t &o) const - { return process (o); } - hb_vector_size_t operator & (const hb_vector_size_t &o) const - { return process (o); } - hb_vector_size_t operator ^ (const hb_vector_size_t &o) const - { return process (o); } - hb_vector_size_t operator ~ () const - { - hb_vector_size_t r; -#if HB_VECTOR_SIZE && 0 - if (HB_VECTOR_SIZE && 0 == (byte_size * 8) % HB_VECTOR_SIZE) - for (unsigned int i = 0; i < ARRAY_LENGTH (u.vec); i++) - r.u.vec[i] = ~u.vec[i]; - else -#endif - for (unsigned int i = 0; i < ARRAY_LENGTH (u.v); i++) - r.u.v[i] = ~u.v[i]; - return r; - } - - private: - static_assert (byte_size / sizeof (elt_t) * sizeof (elt_t) == byte_size, ""); - union { - elt_t v[byte_size / sizeof (elt_t)]; -#if HB_VECTOR_SIZE - hb_vector_size_impl_t vec[byte_size / sizeof (hb_vector_size_impl_t)]; -#endif - } u; -}; - - -#endif /* HB_DSALGS_HH */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-face.cc b/src/java.desktop/share/native/libharfbuzz/hb-face.cc index e3dc46922f53..6d96dcc38485 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-face.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-face.cc @@ -200,10 +200,15 @@ hb_face_create (hb_blob_t *blob, if (unlikely (!blob)) blob = hb_blob_get_empty (); - hb_face_for_data_closure_t *closure = _hb_face_for_data_closure_create (hb_sanitize_context_t ().sanitize_blob (hb_blob_reference (blob)), index); + blob = hb_sanitize_context_t ().sanitize_blob (hb_blob_reference (blob)); + + hb_face_for_data_closure_t *closure = _hb_face_for_data_closure_create (blob, index); if (unlikely (!closure)) + { + hb_blob_destroy (blob); return hb_face_get_empty (); + } face = hb_face_create_for_tables (_hb_face_for_data_reference_table, closure, @@ -226,7 +231,7 @@ hb_face_create (hb_blob_t *blob, hb_face_t * hb_face_get_empty () { - return const_cast (&Null(hb_face_t)); + return const_cast (&Null (hb_face_t)); } @@ -367,6 +372,9 @@ hb_blob_t * hb_face_reference_table (const hb_face_t *face, hb_tag_t tag) { + if (unlikely (tag == HB_TAG_NONE)) + return hb_blob_get_empty (); + return face->reference_table (tag); } @@ -531,6 +539,7 @@ hb_face_get_table_tags (const hb_face_t *face, */ +#ifndef HB_NO_FACE_COLLECT_UNICODES /** * hb_face_collect_unicodes: * @face: font face. @@ -542,9 +551,8 @@ void hb_face_collect_unicodes (hb_face_t *face, hb_set_t *out) { - face->table.cmap->collect_unicodes (out); + face->table.cmap->collect_unicodes (out, face->get_num_glyphs ()); } - /** * hb_face_collect_variation_selectors: * @face: font face. @@ -560,7 +568,6 @@ hb_face_collect_variation_selectors (hb_face_t *face, { face->table.cmap->collect_variation_selectors (out); } - /** * hb_face_collect_variation_unicodes: * @face: font face. @@ -577,7 +584,7 @@ hb_face_collect_variation_unicodes (hb_face_t *face, { face->table.cmap->collect_variation_unicodes (variation_selector, out); } - +#endif /* @@ -714,7 +721,10 @@ hb_face_builder_add_table (hb_face_t *face, hb_tag_t tag, hb_blob_t *blob) return false; hb_face_builder_data_t *data = (hb_face_builder_data_t *) face->user_data; + hb_face_builder_data_t::table_entry_t *entry = data->tables.push (); + if (data->tables.in_error()) + return false; entry->tag = tag; entry->blob = hb_blob_reference (blob); diff --git a/src/java.desktop/share/native/libharfbuzz/hb-face.hh b/src/java.desktop/share/native/libharfbuzz/hb-face.hh index b2730eb0a42c..010eaba9e6f6 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-face.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-face.hh @@ -94,7 +94,7 @@ struct hb_face_t unsigned int get_num_glyphs () const { unsigned int ret = num_glyphs.get_relaxed (); - if (unlikely (ret == (unsigned int) -1)) + if (unlikely (ret == UINT_MAX)) return load_num_glyphs (); return ret; } diff --git a/src/java.desktop/share/native/libharfbuzz/hb-fallback-shape.cc b/src/java.desktop/share/native/libharfbuzz/hb-fallback-shape.cc index df30871ecde2..d1d26b6a9f2f 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-fallback-shape.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-fallback-shape.cc @@ -26,6 +26,7 @@ #include "hb-shaper-impl.hh" +#ifndef HB_NO_FALLBACK_SHAPE /* * shaper face data @@ -120,3 +121,5 @@ _hb_fallback_shape (hb_shape_plan_t *shape_plan HB_UNUSED, return true; } + +#endif diff --git a/src/java.desktop/share/native/libharfbuzz/hb-font.cc b/src/java.desktop/share/native/libharfbuzz/hb-font.cc index 855c99833285..42bf3e948d23 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-font.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-font.cc @@ -33,6 +33,9 @@ #include "hb-ot.h" +#include "hb-ot-var-avar-table.hh" +#include "hb-ot-var-fvar-table.hh" + /** * SECTION:hb-font @@ -355,6 +358,7 @@ hb_font_get_glyph_h_kerning_default (hb_font_t *font, return font->parent_scale_x_distance (font->parent->get_glyph_h_kerning (left_glyph, right_glyph)); } +#ifndef HB_DISABLE_DEPRECATED static hb_position_t hb_font_get_glyph_v_kerning_nil (hb_font_t *font HB_UNUSED, void *font_data HB_UNUSED, @@ -373,6 +377,7 @@ hb_font_get_glyph_v_kerning_default (hb_font_t *font, { return font->parent_scale_y_distance (font->parent->get_glyph_v_kerning (top_glyph, bottom_glyph)); } +#endif static hb_bool_t hb_font_get_glyph_extents_nil (hb_font_t *font HB_UNUSED, @@ -672,7 +677,8 @@ hb_font_funcs_set_##name##_func (hb_font_funcs_t *ffuncs, \ void *user_data, \ hb_destroy_func_t destroy) \ { \ - if (hb_object_is_immutable (ffuncs)) { \ + if (hb_object_is_immutable (ffuncs)) \ + { \ if (destroy) \ destroy (user_data); \ return; \ @@ -789,6 +795,29 @@ hb_font_get_nominal_glyph (hb_font_t *font, return font->get_nominal_glyph (unicode, glyph); } +/** + * hb_font_get_nominal_glyphs: + * @font: a font. + * + * + * + * Return value: + * + * Since: 2.6.3 + **/ +unsigned int +hb_font_get_nominal_glyphs (hb_font_t *font, + unsigned int count, + const hb_codepoint_t *first_unicode, + unsigned int unicode_stride, + hb_codepoint_t *first_glyph, + unsigned int glyph_stride) +{ + return font->get_nominal_glyphs (count, + first_unicode, unicode_stride, + first_glyph, glyph_stride); +} + /** * hb_font_get_variation_glyph: * @font: a font. @@ -936,7 +965,6 @@ hb_font_get_glyph_v_origin (hb_font_t *font, * Return value: * * Since: 0.9.2 - * Deprecated: 2.0.0 **/ hb_position_t hb_font_get_glyph_h_kerning (hb_font_t *font, @@ -945,6 +973,7 @@ hb_font_get_glyph_h_kerning (hb_font_t *font, return font->get_glyph_h_kerning (left_glyph, right_glyph); } +#ifndef HB_DISABLE_DEPRECATED /** * hb_font_get_glyph_v_kerning: * @font: a font. @@ -964,6 +993,7 @@ hb_font_get_glyph_v_kerning (hb_font_t *font, { return font->get_glyph_v_kerning (top_glyph, bottom_glyph); } +#endif /** * hb_font_get_glyph_extents: @@ -1185,7 +1215,6 @@ hb_font_subtract_glyph_origin_for_direction (hb_font_t *font, * * * Since: 0.9.2 - * Deprecated: 2.0.0 **/ void hb_font_get_glyph_kerning_for_direction (hb_font_t *font, @@ -1298,6 +1327,8 @@ DEFINE_NULL_INSTANCE (hb_font_t) = 1000, /* x_scale */ 1000, /* y_scale */ + 1<<16, /* x_mult */ + 1<<16, /* y_mult */ 0, /* x_ppem */ 0, /* y_ppem */ @@ -1305,6 +1336,7 @@ DEFINE_NULL_INSTANCE (hb_font_t) = 0, /* num_coords */ nullptr, /* coords */ + nullptr, /* design_coords */ const_cast (&_hb_Null_hb_font_funcs_t), @@ -1328,6 +1360,7 @@ _hb_font_create (hb_face_t *face) font->klass = hb_font_funcs_get_empty (); font->data.init0 (font); font->x_scale = font->y_scale = hb_face_get_upem (face); + font->x_mult = font->y_mult = 1 << 16; return font; } @@ -1347,12 +1380,28 @@ hb_font_create (hb_face_t *face) { hb_font_t *font = _hb_font_create (face); +#ifndef HB_NO_OT_FONT /* Install our in-house, very lightweight, funcs. */ hb_ot_font_set_funcs (font); +#endif return font; } +static void +_hb_font_adopt_var_coords (hb_font_t *font, + int *coords, /* 2.14 normalized */ + float *design_coords, + unsigned int coords_length) +{ + free (font->coords); + free (font->design_coords); + + font->coords = coords; + font->design_coords = design_coords; + font->num_coords = coords_length; +} + /** * hb_font_create_sub_font: * @parent: parent font. @@ -1378,21 +1427,27 @@ hb_font_create_sub_font (hb_font_t *parent) font->x_scale = parent->x_scale; font->y_scale = parent->y_scale; + font->mults_changed (); font->x_ppem = parent->x_ppem; font->y_ppem = parent->y_ppem; font->ptem = parent->ptem; - font->num_coords = parent->num_coords; - if (!font->num_coords) - font->coords = nullptr; - else + unsigned int num_coords = parent->num_coords; + if (num_coords) { - unsigned int size = parent->num_coords * sizeof (parent->coords[0]); - font->coords = (int *) malloc (size); - if (unlikely (!font->coords)) - font->num_coords = 0; + int *coords = (int *) calloc (num_coords, sizeof (parent->coords[0])); + float *design_coords = (float *) calloc (num_coords, sizeof (parent->design_coords[0])); + if (likely (coords && design_coords)) + { + memcpy (coords, parent->coords, num_coords * sizeof (parent->coords[0])); + memcpy (design_coords, parent->design_coords, num_coords * sizeof (parent->design_coords[0])); + _hb_font_adopt_var_coords (font, coords, design_coords, num_coords); + } else - memcpy (font->coords, parent->coords, size); + { + free (coords); + free (design_coords); + } } return font; @@ -1410,7 +1465,7 @@ hb_font_create_sub_font (hb_font_t *parent) hb_font_t * hb_font_get_empty () { - return const_cast (&Null(hb_font_t)); + return const_cast (&Null (hb_font_t)); } /** @@ -1452,6 +1507,7 @@ hb_font_destroy (hb_font_t *font) hb_font_funcs_destroy (font->klass); free (font->coords); + free (font->design_coords); free (font); } @@ -1597,7 +1653,9 @@ hb_font_set_face (hb_font_t *font, hb_face_t *old = font->face; + hb_face_make_immutable (face); font->face = hb_face_reference (face); + font->mults_changed (); hb_face_destroy (old); } @@ -1707,6 +1765,7 @@ hb_font_set_scale (hb_font_t *font, font->x_scale = x_scale; font->y_scale = y_scale; + font->mults_changed (); } /** @@ -1805,21 +1864,11 @@ hb_font_get_ptem (hb_font_t *font) return font->ptem; } +#ifndef HB_NO_VAR /* * Variations */ -static void -_hb_font_adopt_var_coords_normalized (hb_font_t *font, - int *coords, /* 2.14 normalized */ - unsigned int coords_length) -{ - free (font->coords); - - font->coords = coords; - font->num_coords = coords_length; -} - /** * hb_font_set_variations: * @@ -1842,13 +1891,30 @@ hb_font_set_variations (hb_font_t *font, unsigned int coords_length = hb_ot_var_get_axis_count (font->face); int *normalized = coords_length ? (int *) calloc (coords_length, sizeof (int)) : nullptr; - if (unlikely (coords_length && !normalized)) + float *design_coords = coords_length ? (float *) calloc (coords_length, sizeof (float)) : nullptr; + + if (unlikely (coords_length && !(normalized && design_coords))) + { + free (normalized); + free (design_coords); return; + } - hb_ot_var_normalize_variations (font->face, - variations, variations_length, - normalized, coords_length); - _hb_font_adopt_var_coords_normalized (font, normalized, coords_length); + const OT::fvar &fvar = *font->face->table.fvar; + for (unsigned int i = 0; i < variations_length; i++) + { + hb_ot_var_axis_info_t info; + if (hb_ot_var_find_axis_info (font->face, variations[i].tag, &info) && + info.axis_index < coords_length) + { + float v = variations[i].value; + design_coords[info.axis_index] = v; + normalized[info.axis_index] = fvar.normalize_axis_value (info.axis_index, v); + } + } + font->face->table.avar->map_coords (normalized, coords_length); + + _hb_font_adopt_var_coords (font, normalized, design_coords, coords_length); } /** @@ -1865,11 +1931,47 @@ hb_font_set_var_coords_design (hb_font_t *font, return; int *normalized = coords_length ? (int *) calloc (coords_length, sizeof (int)) : nullptr; - if (unlikely (coords_length && !normalized)) + float *design_coords = coords_length ? (float *) calloc (coords_length, sizeof (float)) : nullptr; + + if (unlikely (coords_length && !(normalized && design_coords))) + { + free (normalized); + free (design_coords); return; + } + + if (coords_length) + memcpy (design_coords, coords, coords_length * sizeof (font->design_coords[0])); hb_ot_var_normalize_coords (font->face, coords_length, coords, normalized); - _hb_font_adopt_var_coords_normalized (font, normalized, coords_length); + _hb_font_adopt_var_coords (font, normalized, design_coords, coords_length); +} + +/** + * hb_font_set_var_named_instance: + * @font: a font. + * @instance_index: named instance index. + * + * Sets design coords of a font from a named instance index. + * + * Since: 2.6.0 + */ +void +hb_font_set_var_named_instance (hb_font_t *font, + unsigned instance_index) +{ + if (hb_object_is_immutable (font)) + return; + + unsigned int coords_length = hb_ot_var_named_instance_get_design_coords (font->face, instance_index, nullptr, nullptr); + + float *coords = coords_length ? (float *) calloc (coords_length, sizeof (float)) : nullptr; + if (unlikely (coords_length && !coords)) + return; + + hb_ot_var_named_instance_get_design_coords (font->face, instance_index, &coords_length, coords); + hb_font_set_var_coords_design (font, coords, coords_length); + free (coords); } /** @@ -1886,13 +1988,30 @@ hb_font_set_var_coords_normalized (hb_font_t *font, return; int *copy = coords_length ? (int *) calloc (coords_length, sizeof (coords[0])) : nullptr; - if (unlikely (coords_length && !copy)) + int *unmapped = coords_length ? (int *) calloc (coords_length, sizeof (coords[0])) : nullptr; + float *design_coords = coords_length ? (float *) calloc (coords_length, sizeof (design_coords[0])) : nullptr; + + if (unlikely (coords_length && !(copy && unmapped && design_coords))) + { + free (copy); + free (unmapped); + free (design_coords); return; + } if (coords_length) + { memcpy (copy, coords, coords_length * sizeof (coords[0])); + memcpy (unmapped, coords, coords_length * sizeof (coords[0])); + } + + /* Best effort design coords simulation */ + font->face->table.avar->unmap_coords (unmapped, coords_length); + for (unsigned int i = 0; i < coords_length; ++i) + design_coords[i] = font->face->table.fvar->unnormalize_axis_value (i, unmapped[i]); + free (unmapped); - _hb_font_adopt_var_coords_normalized (font, copy, coords_length); + _hb_font_adopt_var_coords (font, copy, design_coords, coords_length); } /** @@ -1913,7 +2032,28 @@ hb_font_get_var_coords_normalized (hb_font_t *font, return font->coords; } +#ifdef HB_EXPERIMENTAL_API +/** + * hb_font_get_var_coords_design: + * + * Return value is valid as long as variation coordinates of the font + * are not modified. + * + * Since: EXPERIMENTAL + */ +const float * +hb_font_get_var_coords_design (hb_font_t *font, + unsigned int *length) +{ + if (length) + *length = font->num_coords; + + return font->design_coords; +} +#endif +#endif +#ifndef HB_DISABLE_DEPRECATED /* * Deprecated get_glyph_func(): */ @@ -2015,6 +2155,13 @@ hb_font_funcs_set_glyph_func (hb_font_funcs_t *ffuncs, hb_font_get_glyph_func_t func, void *user_data, hb_destroy_func_t destroy) { + if (hb_object_is_immutable (ffuncs)) + { + if (destroy) + destroy (user_data); + return; + } + hb_font_get_glyph_trampoline_t *trampoline; trampoline = trampoline_create (func, user_data, destroy); @@ -2036,3 +2183,4 @@ hb_font_funcs_set_glyph_func (hb_font_funcs_t *ffuncs, trampoline, trampoline_destroy); } +#endif diff --git a/src/java.desktop/share/native/libharfbuzz/hb-font.h b/src/java.desktop/share/native/libharfbuzz/hb-font.h index 85893f97c63f..217bf33f4406 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-font.h +++ b/src/java.desktop/share/native/libharfbuzz/hb-font.h @@ -33,6 +33,7 @@ #include "hb-common.h" #include "hb-face.h" +#include "hb-draw.h" HB_BEGIN_DECLS @@ -157,6 +158,11 @@ typedef hb_bool_t (*hb_font_get_glyph_origin_func_t) (hb_font_t *font, void *fon typedef hb_font_get_glyph_origin_func_t hb_font_get_glyph_h_origin_func_t; typedef hb_font_get_glyph_origin_func_t hb_font_get_glyph_v_origin_func_t; +typedef hb_position_t (*hb_font_get_glyph_kerning_func_t) (hb_font_t *font, void *font_data, + hb_codepoint_t first_glyph, hb_codepoint_t second_glyph, + void *user_data); +typedef hb_font_get_glyph_kerning_func_t hb_font_get_glyph_h_kerning_func_t; + typedef hb_bool_t (*hb_font_get_glyph_extents_func_t) (hb_font_t *font, void *font_data, hb_codepoint_t glyph, @@ -356,6 +362,22 @@ hb_font_funcs_set_glyph_v_origin_func (hb_font_funcs_t *ffuncs, hb_font_get_glyph_v_origin_func_t func, void *user_data, hb_destroy_func_t destroy); +/** + * hb_font_funcs_set_glyph_h_kerning_func: + * @ffuncs: font functions. + * @func: (closure user_data) (destroy destroy) (scope notified): + * @user_data: + * @destroy: + * + * + * + * Since: 0.9.2 + **/ +HB_EXTERN void +hb_font_funcs_set_glyph_h_kerning_func (hb_font_funcs_t *ffuncs, + hb_font_get_glyph_h_kerning_func_t func, + void *user_data, hb_destroy_func_t destroy); + /** * hb_font_funcs_set_glyph_extents_func: * @ffuncs: font functions. @@ -438,6 +460,14 @@ hb_font_get_variation_glyph (hb_font_t *font, hb_codepoint_t unicode, hb_codepoint_t variation_selector, hb_codepoint_t *glyph); +HB_EXTERN unsigned int +hb_font_get_nominal_glyphs (hb_font_t *font, + unsigned int count, + const hb_codepoint_t *first_unicode, + unsigned int unicode_stride, + hb_codepoint_t *first_glyph, + unsigned int glyph_stride); + HB_EXTERN hb_position_t hb_font_get_glyph_h_advance (hb_font_t *font, hb_codepoint_t glyph); @@ -469,6 +499,10 @@ hb_font_get_glyph_v_origin (hb_font_t *font, hb_codepoint_t glyph, hb_position_t *x, hb_position_t *y); +HB_EXTERN hb_position_t +hb_font_get_glyph_h_kerning (hb_font_t *font, + hb_codepoint_t left_glyph, hb_codepoint_t right_glyph); + HB_EXTERN hb_bool_t hb_font_get_glyph_extents (hb_font_t *font, hb_codepoint_t glyph, @@ -531,6 +565,12 @@ hb_font_subtract_glyph_origin_for_direction (hb_font_t *font, hb_direction_t direction, hb_position_t *x, hb_position_t *y); +HB_EXTERN void +hb_font_get_glyph_kerning_for_direction (hb_font_t *font, + hb_codepoint_t first_glyph, hb_codepoint_t second_glyph, + hb_direction_t direction, + hb_position_t *x, hb_position_t *y); + HB_EXTERN hb_bool_t hb_font_get_glyph_extents_for_origin (hb_font_t *font, hb_codepoint_t glyph, @@ -665,6 +705,12 @@ hb_font_set_var_coords_design (hb_font_t *font, const float *coords, unsigned int coords_length); +#ifdef HB_EXPERIMENTAL_API +HB_EXTERN const float * +hb_font_get_var_coords_design (hb_font_t *font, + unsigned int *length); +#endif + HB_EXTERN void hb_font_set_var_coords_normalized (hb_font_t *font, const int *coords, /* 2.14 normalized */ @@ -674,6 +720,16 @@ HB_EXTERN const int * hb_font_get_var_coords_normalized (hb_font_t *font, unsigned int *length); +HB_EXTERN void +hb_font_set_var_named_instance (hb_font_t *font, + unsigned instance_index); + +#ifdef HB_EXPERIMENTAL_API +HB_EXTERN hb_bool_t +hb_font_draw_glyph (hb_font_t *font, hb_codepoint_t glyph, + const hb_draw_funcs_t *funcs, void *user_data); +#endif + HB_END_DECLS #endif /* HB_FONT_H */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-font.hh b/src/java.desktop/share/native/libharfbuzz/hb-font.hh index dd33d2f7d7cd..2fa9cea73889 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-font.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-font.hh @@ -52,7 +52,7 @@ HB_FONT_FUNC_IMPLEMENT (glyph_h_origin) \ HB_FONT_FUNC_IMPLEMENT (glyph_v_origin) \ HB_FONT_FUNC_IMPLEMENT (glyph_h_kerning) \ - HB_FONT_FUNC_IMPLEMENT (glyph_v_kerning) \ + HB_IF_NOT_DEPRECATED (HB_FONT_FUNC_IMPLEMENT (glyph_v_kerning)) \ HB_FONT_FUNC_IMPLEMENT (glyph_extents) \ HB_FONT_FUNC_IMPLEMENT (glyph_contour_point) \ HB_FONT_FUNC_IMPLEMENT (glyph_name) \ @@ -107,8 +107,10 @@ struct hb_font_t hb_font_t *parent; hb_face_t *face; - int x_scale; - int y_scale; + int32_t x_scale; + int32_t y_scale; + int64_t x_mult; + int64_t y_mult; unsigned int x_ppem; unsigned int y_ppem; @@ -118,6 +120,7 @@ struct hb_font_t /* Font variation coordinates. */ unsigned int num_coords; int *coords; + float *design_coords; hb_font_funcs_t *klass; void *user_data; @@ -127,16 +130,16 @@ struct hb_font_t /* Convert from font-space to user-space */ - int dir_scale (hb_direction_t direction) - { return HB_DIRECTION_IS_VERTICAL(direction) ? y_scale : x_scale; } - hb_position_t em_scale_x (int16_t v) { return em_scale (v, x_scale); } - hb_position_t em_scale_y (int16_t v) { return em_scale (v, y_scale); } - hb_position_t em_scalef_x (float v) { return em_scalef (v, this->x_scale); } - hb_position_t em_scalef_y (float v) { return em_scalef (v, this->y_scale); } + int64_t dir_mult (hb_direction_t direction) + { return HB_DIRECTION_IS_VERTICAL(direction) ? y_mult : x_mult; } + hb_position_t em_scale_x (int16_t v) { return em_mult (v, x_mult); } + hb_position_t em_scale_y (int16_t v) { return em_mult (v, y_mult); } + hb_position_t em_scalef_x (float v) { return em_scalef (v, x_scale); } + hb_position_t em_scalef_y (float v) { return em_scalef (v, y_scale); } float em_fscale_x (int16_t v) { return em_fscale (v, x_scale); } float em_fscale_y (int16_t v) { return em_fscale (v, y_scale); } hb_position_t em_scale_dir (int16_t v, hb_direction_t direction) - { return em_scale (v, dir_scale (direction)); } + { return em_mult (v, dir_mult (direction)); } /* Convert from parent-font user-space to our user-space */ hb_position_t parent_scale_x_distance (hb_position_t v) @@ -214,7 +217,7 @@ struct hb_font_t } hb_bool_t get_nominal_glyph (hb_codepoint_t unicode, - hb_codepoint_t *glyph) + hb_codepoint_t *glyph) { *glyph = 0; return klass->get.f.nominal_glyph (this, user_data, @@ -284,7 +287,7 @@ struct hb_font_t } hb_bool_t get_glyph_h_origin (hb_codepoint_t glyph, - hb_position_t *x, hb_position_t *y) + hb_position_t *x, hb_position_t *y) { *x = *y = 0; return klass->get.f.glyph_h_origin (this, user_data, @@ -304,21 +307,29 @@ struct hb_font_t hb_position_t get_glyph_h_kerning (hb_codepoint_t left_glyph, hb_codepoint_t right_glyph) { +#ifdef HB_DISABLE_DEPRECATED + return 0; +#else return klass->get.f.glyph_h_kerning (this, user_data, left_glyph, right_glyph, klass->user_data.glyph_h_kerning); +#endif } hb_position_t get_glyph_v_kerning (hb_codepoint_t top_glyph, hb_codepoint_t bottom_glyph) { +#ifdef HB_DISABLE_DEPRECATED + return 0; +#else return klass->get.f.glyph_v_kerning (this, user_data, top_glyph, bottom_glyph, klass->user_data.glyph_v_kerning); +#endif } hb_bool_t get_glyph_extents (hb_codepoint_t glyph, - hb_glyph_extents_t *extents) + hb_glyph_extents_t *extents) { memset (extents, 0, sizeof (*extents)); return klass->get.f.glyph_extents (this, user_data, @@ -328,7 +339,7 @@ struct hb_font_t } hb_bool_t get_glyph_contour_point (hb_codepoint_t glyph, unsigned int point_index, - hb_position_t *x, hb_position_t *y) + hb_position_t *x, hb_position_t *y) { *x = *y = 0; return klass->get.f.glyph_contour_point (this, user_data, @@ -599,15 +610,19 @@ struct hb_font_t return false; } - hb_position_t em_scale (int16_t v, int scale) + void mults_changed () + { + signed upem = face->get_upem (); + x_mult = ((int64_t) x_scale << 16) / upem; + y_mult = ((int64_t) y_scale << 16) / upem; + } + + hb_position_t em_mult (int16_t v, int64_t mult) { - int upem = face->get_upem (); - int64_t scaled = v * (int64_t) scale; - scaled += scaled >= 0 ? upem/2 : -upem/2; /* Round. */ - return (hb_position_t) (scaled / upem); + return (hb_position_t) ((v * mult) >> 16); } hb_position_t em_scalef (float v, int scale) - { return (hb_position_t) round (v * scale / face->get_upem ()); } + { return (hb_position_t) roundf (v * scale / face->get_upem ()); } float em_fscale (int16_t v, int scale) { return (float) v * scale / face->get_upem (); } }; diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ft.cc b/src/java.desktop/share/native/libharfbuzz/hb-ft.cc index d73c4aaac737..6b7b9e087711 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ft.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-ft.cc @@ -29,6 +29,8 @@ #include "hb.hh" +#ifdef HAVE_FREETYPE + #include "hb-ft.h" #include "hb-font.hh" @@ -46,8 +48,13 @@ * @short_description: FreeType integration * @include: hb-ft.h * - * Functions for using HarfBuzz with the FreeType library to provide face and + * Functions for using HarfBuzz with the FreeType library. + * + * HarfBuzz supports using FreeType to provide face and * font data. + * + * Note that FreeType is not thread-safe, therefore these + * functions are not thread-safe either. **/ @@ -85,9 +92,7 @@ static hb_ft_font_t * _hb_ft_font_create (FT_Face ft_face, bool symbol, bool unref) { hb_ft_font_t *ft_font = (hb_ft_font_t *) calloc (1, sizeof (hb_ft_font_t)); - - if (unlikely (!ft_font)) - return nullptr; + if (unlikely (!ft_font)) return nullptr; ft_font->lock.init (); ft_font->ft_face = ft_face; @@ -96,7 +101,7 @@ _hb_ft_font_create (FT_Face ft_face, bool symbol, bool unref) ft_font->load_flags = FT_LOAD_DEFAULT | FT_LOAD_NO_HINTING; - ft_font->cached_x_scale.set (0); + ft_font->cached_x_scale.set_relaxed (0); ft_font->advance_cache.init (); return ft_font; @@ -125,10 +130,13 @@ _hb_ft_font_destroy (void *data) /** * hb_ft_font_set_load_flags: - * @font: - * @load_flags: + * @font: #hb_font_t to work upon + * @load_flags: The FreeType load flags to set * + * Sets the FT_Load_Glyph load flags for the specified #hb_font_t. * + * For more information, see + * https://www.freetype.org/freetype2/docs/reference/ft2-base_interface.html#ft_load_xxx * * Since: 1.0.5 **/ @@ -138,7 +146,7 @@ hb_ft_font_set_load_flags (hb_font_t *font, int load_flags) if (hb_object_is_immutable (font)) return; - if (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy) + if (unlikely (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy)) return; hb_ft_font_t *ft_font = (hb_ft_font_t *) font->user_data; @@ -148,17 +156,21 @@ hb_ft_font_set_load_flags (hb_font_t *font, int load_flags) /** * hb_ft_font_get_load_flags: - * @font: + * @font: #hb_font_t to work upon * + * Fetches the FT_Load_Glyph load flags of the specified #hb_font_t. * + * For more information, see + * https://www.freetype.org/freetype2/docs/reference/ft2-base_interface.html#ft_load_xxx + * + * Return value: FT_Load_Glyph flags found * - * Return value: * Since: 1.0.5 **/ int hb_ft_font_get_load_flags (hb_font_t *font) { - if (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy) + if (unlikely (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy)) return 0; const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font->user_data; @@ -166,17 +178,69 @@ hb_ft_font_get_load_flags (hb_font_t *font) return ft_font->load_flags; } +/** + * hb_ft_get_face: + * @font: #hb_font_t to work upon + * + * Fetches the FT_Face associated with the specified #hb_font_t + * font object. + * + * Return value: the FT_Face found + * + * Since: 0.9.2 + **/ FT_Face hb_ft_font_get_face (hb_font_t *font) { - if (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy) + if (unlikely (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy)) + return nullptr; + + const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font->user_data; + + return ft_font->ft_face; +} + +/** + * hb_ft_font_lock_face: + * @font: + * + * + * + * Return value: + * Since: 2.6.5 + **/ +FT_Face +hb_ft_font_lock_face (hb_font_t *font) +{ + if (unlikely (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy)) return nullptr; const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font->user_data; + ft_font->lock.lock (); + return ft_font->ft_face; } +/** + * hb_ft_font_unlock_face: + * @font: + * + * + * + * Return value: + * Since: 2.6.5 + **/ +void +hb_ft_font_unlock_face (hb_font_t *font) +{ + if (unlikely (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy)) + return; + + const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font->user_data; + + ft_font->lock.unlock (); +} static hb_bool_t @@ -346,6 +410,25 @@ hb_ft_get_glyph_v_origin (hb_font_t *font, return true; } +#ifndef HB_NO_OT_SHAPE_FALLBACK +static hb_position_t +hb_ft_get_glyph_h_kerning (hb_font_t *font, + void *font_data, + hb_codepoint_t left_glyph, + hb_codepoint_t right_glyph, + void *user_data HB_UNUSED) +{ + const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font_data; + FT_Vector kerningv; + + FT_Kerning_Mode mode = font->x_ppem ? FT_KERNING_DEFAULT : FT_KERNING_UNFITTED; + if (FT_Get_Kerning (ft_font->ft_face, left_glyph, right_glyph, mode, &kerningv)) + return 0; + + return kerningv.x; +} +#endif + static hb_bool_t hb_ft_get_glyph_extents (hb_font_t *font, void *font_data, @@ -439,7 +522,7 @@ hb_ft_get_glyph_from_name (hb_font_t *font HB_UNUSED, else { /* Make a nul-terminated version. */ char buf[128]; - len = MIN (len, (int) sizeof (buf) - 1); + len = hb_min (len, (int) sizeof (buf) - 1); strncpy (buf, name, len); buf[len] = '\0'; *glyph = FT_Get_Name_Index (ft_face, buf); @@ -497,6 +580,10 @@ static struct hb_ft_font_funcs_lazy_loader_t : hb_font_funcs_lazy_loader_tcharmap && ft_face->charmap->encoding == FT_ENCODING_MS_SYMBOL; + hb_ft_font_t *ft_font = _hb_ft_font_create (ft_face, symbol, unref); + if (unlikely (!ft_font)) return; + hb_font_set_funcs (font, _hb_ft_get_font_funcs (), - _hb_ft_font_create (ft_face, symbol, unref), + ft_font, _hb_ft_font_destroy); } static hb_blob_t * -reference_table (hb_face_t *face HB_UNUSED, hb_tag_t tag, void *user_data) +_hb_ft_reference_table (hb_face_t *face HB_UNUSED, hb_tag_t tag, void *user_data) { FT_Face ft_face = (FT_Face) user_data; FT_Byte *buffer; @@ -570,12 +660,22 @@ reference_table (hb_face_t *face HB_UNUSED, hb_tag_t tag, void *user_data) /** * hb_ft_face_create: - * @ft_face: (destroy destroy) (scope notified): - * @destroy: + * @ft_face: (destroy destroy) (scope notified): FT_Face to work upon + * @destroy: A callback to call when the face object is not needed anymore + * + * Creates an #hb_face_t face object from the specified FT_Face. * + * This variant of the function does not provide any life-cycle management. * + * Most client programs should use hb_ft_face_create_referenced() + * (or, perhaps, hb_ft_face_create_cached()) instead. + * + * If you know you have valid reasons not to use hb_ft_face_create_referenced(), + * then it is the client program's responsibility to destroy @ft_face + * after the #hb_face_t face object has been destroyed. + * + * Return value: (transfer full): the new #hb_face_t face object * - * Return value: (transfer full): * Since: 0.9.2 **/ hb_face_t * @@ -594,7 +694,7 @@ hb_ft_face_create (FT_Face ft_face, face = hb_face_create (blob, ft_face->face_index); hb_blob_destroy (blob); } else { - face = hb_face_create_for_tables (reference_table, ft_face, destroy); + face = hb_face_create_for_tables (_hb_ft_reference_table, ft_face, destroy); } hb_face_set_index (face, ft_face->face_index); @@ -605,11 +705,20 @@ hb_ft_face_create (FT_Face ft_face, /** * hb_ft_face_create_referenced: - * @ft_face: + * @ft_face: FT_Face to work upon + * + * Creates an #hb_face_t face object from the specified FT_Face. * + * This is the preferred variant of the hb_ft_face_create* + * function family, because it calls FT_Reference_Face() on @ft_face, + * ensuring that @ft_face remains alive as long as the resulting + * #hb_face_t face object remains alive. Also calls FT_Done_Face() + * when the #hb_face_t face object is destroyed. * + * Use this version unless you know you have good reasons not to. + * + * Return value: (transfer full): the new #hb_face_t face object * - * Return value: (transfer full): * Since: 0.9.38 **/ hb_face_t * @@ -627,11 +736,21 @@ hb_ft_face_finalize (FT_Face ft_face) /** * hb_ft_face_create_cached: - * @ft_face: + * @ft_face: FT_Face to work upon + * + * Creates an #hb_face_t face object from the specified FT_Face. + * + * This variant of the function caches the newly created #hb_face_t + * face object, using the @generic pointer of @ft_face. Subsequent function + * calls that are passed the same @ft_face parameter will have the same + * #hb_face_t returned to them, and that #hb_face_t will be correctly + * reference counted. * + * However, client programs are still responsible for destroying + * @ft_face after the last #hb_face_t face object has been destroyed. * + * Return value: (transfer full): the new #hb_face_t face object * - * Return value: (transfer full): * Since: 0.9.2 **/ hb_face_t * @@ -649,15 +768,34 @@ hb_ft_face_create_cached (FT_Face ft_face) return hb_face_reference ((hb_face_t *) ft_face->generic.data); } - /** * hb_ft_font_create: - * @ft_face: (destroy destroy) (scope notified): - * @destroy: + * @ft_face: (destroy destroy) (scope notified): FT_Face to work upon + * @destroy: (optional): A callback to call when the font object is not needed anymore + * + * Creates an #hb_font_t font object from the specified FT_Face. + * + * Note: You must set the face size on @ft_face before calling + * hb_ft_font_create() on it. Otherwise, HarfBuzz will not pick up + * the face size. * + * This variant of the function does not provide any life-cycle management. * + * Most client programs should use hb_ft_font_create_referenced() + * instead. + * + * If you know you have valid reasons not to use hb_ft_font_create_referenced(), + * then it is the client program's responsibility to destroy @ft_face + * after the #hb_font_t font object has been destroyed. + * + * HarfBuzz will use the @destroy callback on the #hb_font_t font object + * if it is supplied when you use this function. However, even if @destroy + * is provided, it is the client program's responsibility to destroy @ft_face, + * and it is the client program's responsibility to ensure that @ft_face is + * destroyed only after the #hb_font_t font object has been destroyed. + * + * Return value: (transfer full): the new #hb_font_t font object * - * Return value: (transfer full): * Since: 0.9.2 **/ hb_font_t * @@ -675,6 +813,16 @@ hb_ft_font_create (FT_Face ft_face, return font; } +/** + * hb_ft_font_has_changed: + * @font: #hb_font_t to work upon + * + * Refreshes the state of @font when the underlying FT_Face has changed. + * This function should be called after changing the size or + * variation-axis settings on the FT_Face. + * + * Since: 1.0.5 + **/ void hb_ft_font_changed (hb_font_t *font) { @@ -682,6 +830,7 @@ hb_ft_font_changed (hb_font_t *font) return; hb_ft_font_t *ft_font = (hb_ft_font_t *) font->user_data; + FT_Face ft_face = ft_font->ft_face; hb_font_set_scale (font, @@ -693,7 +842,7 @@ hb_ft_font_changed (hb_font_t *font) ft_face->size->metrics.y_ppem); #endif -#ifdef HAVE_FT_GET_VAR_BLEND_COORDINATES +#if defined(HAVE_FT_GET_VAR_BLEND_COORDINATES) && !defined(HB_NO_VAR) FT_MM_Var *mm_var = nullptr; if (!FT_Get_MM_Var (ft_face, &mm_var)) { @@ -730,11 +879,23 @@ hb_ft_font_changed (hb_font_t *font) /** * hb_ft_font_create_referenced: - * @ft_face: + * @ft_face: FT_Face to work upon + * + * Creates an #hb_font_t font object from the specified FT_Face. + * + * Note: You must set the face size on @ft_face before calling + * hb_ft_font_create_references() on it. Otherwise, HarfBuzz will not pick up + * the face size. + * + * This is the preferred variant of the hb_ft_font_create* + * function family, because it calls FT_Reference_Face() on @ft_face, + * ensuring that @ft_face remains alive as long as the resulting + * #hb_font_t font object remains alive. * + * Use this version unless you know you have good reasons not to. * + * Return value: (transfer full): the new #hb_font_t font object * - * Return value: (transfer full): * Since: 0.9.38 **/ hb_font_t * @@ -748,7 +909,7 @@ hb_ft_font_create_referenced (FT_Face ft_face) static void free_static_ft_library (); #endif -static struct hb_ft_library_lazy_loader_t : hb_lazy_loader_t, hb_ft_library_lazy_loader_t> { static FT_Library create () @@ -793,6 +954,28 @@ _release_blob (FT_Face ft_face) hb_blob_destroy ((hb_blob_t *) ft_face->generic.data); } +/** + * hb_ft_font_set_funcs: + * @font: #hb_font_t to work upon + * + * Configures the font-functions structure of the specified + * #hb_font_t font object to use FreeType font functions. + * + * In particular, you can use this function to configure an + * existing #hb_face_t face object for use with FreeType font + * functions even if that #hb_face_t face object was initially + * created with hb_face_create(), and therefore was not + * initially configured to use FreeType font functions. + * + * An #hb_face_t face object created with hb_ft_face_create() + * is preconfigured for FreeType font functions and does not + * require this function to be used. + * + * Note: Internally, this function creates an FT_Face. +* + * + * Since: 1.0.5 + **/ void hb_ft_font_set_funcs (hb_font_t *font) { @@ -815,8 +998,8 @@ hb_ft_font_set_funcs (hb_font_t *font) return; } - if (FT_Select_Charmap (ft_face, FT_ENCODING_UNICODE)) - FT_Select_Charmap (ft_face, FT_ENCODING_MS_SYMBOL); + if (FT_Select_Charmap (ft_face, FT_ENCODING_MS_SYMBOL)) + FT_Select_Charmap (ft_face, FT_ENCODING_UNICODE); FT_Set_Char_Size (ft_face, abs (font->x_scale), abs (font->y_scale), @@ -832,7 +1015,7 @@ hb_ft_font_set_funcs (hb_font_t *font) FT_Set_Transform (ft_face, &matrix, nullptr); } -#ifdef HAVE_FT_SET_VAR_BLEND_COORDINATES +#if defined(HAVE_FT_GET_VAR_BLEND_COORDINATES) && !defined(HB_NO_VAR) unsigned int num_coords; const int *coords = hb_font_get_var_coords_normalized (font, &num_coords); if (num_coords) @@ -841,7 +1024,7 @@ hb_ft_font_set_funcs (hb_font_t *font) if (ft_coords) { for (unsigned int i = 0; i < num_coords; i++) - ft_coords[i] = coords[i] << 2; + ft_coords[i] = coords[i] * 4; FT_Set_Var_Blend_Coordinates (ft_face, num_coords, ft_coords); free (ft_coords); } @@ -854,3 +1037,6 @@ hb_ft_font_set_funcs (hb_font_t *font) _hb_ft_font_set_funcs (font, ft_face, true); hb_ft_font_set_load_flags (font, FT_LOAD_DEFAULT | FT_LOAD_NO_HINTING); } + + +#endif diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ft.h b/src/java.desktop/share/native/libharfbuzz/hb-ft.h index dda30d381574..4962eaf38d4c 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ft.h +++ b/src/java.desktop/share/native/libharfbuzz/hb-ft.h @@ -110,6 +110,12 @@ hb_ft_font_create_referenced (FT_Face ft_face); HB_EXTERN FT_Face hb_ft_font_get_face (hb_font_t *font); +HB_EXTERN FT_Face +hb_ft_font_lock_face (hb_font_t *font); + +HB_EXTERN void +hb_ft_font_unlock_face (hb_font_t *font); + HB_EXTERN void hb_ft_font_set_load_flags (hb_font_t *font, int load_flags); diff --git a/src/java.desktop/share/native/libharfbuzz/hb-iter.hh b/src/java.desktop/share/native/libharfbuzz/hb-iter.hh index c4ab26dc0630..99a441af44f2 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-iter.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-iter.hh @@ -1,5 +1,6 @@ /* * Copyright © 2018 Google, Inc. + * Copyright © 2019 Facebook, Inc. * * This is part of HarfBuzz, a text shaping library. * @@ -22,13 +23,15 @@ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. * * Google Author(s): Behdad Esfahbod + * Facebook Author(s): Behdad Esfahbod */ #ifndef HB_ITER_HH #define HB_ITER_HH #include "hb.hh" -#include "hb-null.hh" +#include "hb-algs.hh" +#include "hb-meta.hh" /* Unified iterator object. @@ -39,16 +42,32 @@ * copied by value. If the collection / object being iterated on * is writable, then the iterator returns lvalues, otherwise it * returns rvalues. + * + * TODO Document more. + * + * If iterator implementation implements operator!=, then can be + * used in range-based for loop. That comes free if the iterator + * is random-access. Otherwise, the range-based for loop incurs + * one traversal to find end(), which can be avoided if written + * as a while-style for loop, or if iterator implements a faster + * __end__() method. + * TODO When opting in for C++17, address this by changing return + * type of .end()? + */ + +/* + * Base classes for iterators. */ /* Base class for all iterators. */ -template +template struct hb_iter_t { - typedef Iter iter_t; - typedef iter_t const_iter_t; typedef Item item_t; - static constexpr unsigned item_size = hb_static_size (Item); + constexpr unsigned get_item_size () const { return hb_static_size (Item); } + static constexpr bool is_iterator = true; + static constexpr bool is_random_access_iterator = false; + static constexpr bool is_sorted_iterator = false; private: /* https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern */ @@ -56,53 +75,119 @@ struct hb_iter_t iter_t* thiz () { return static_cast< iter_t *> (this); } public: - /* Operators. */ - operator iter_t () { return iter(); } - explicit_operator bool () const { return more (); } - item_t& operator * () const { return item (); } - item_t& operator [] (signed i) const { return item_at ((unsigned) i); } - iter_t& operator += (unsigned count) { forward (count); return *thiz(); } - iter_t& operator ++ () { next (); return *thiz(); } - iter_t& operator -= (unsigned count) { rewind (count); return *thiz(); } - iter_t& operator -- () { prev (); return *thiz(); } - iter_t operator + (unsigned count) { iter_t c (*thiz()); c += count; return c; } - iter_t operator ++ (int) { iter_t c (*thiz()); ++*thiz(); return c; } - iter_t operator - (unsigned count) { iter_t c (*thiz()); c -= count; return c; } - iter_t operator -- (int) { iter_t c (*thiz()); --*thiz(); return c; } + /* TODO: + * Port operators below to use hb_enable_if to sniff which method implements + * an operator and use it, and remove hb_iter_fallback_mixin_t completely. */ - /* Methods. */ + /* Operators. */ iter_t iter () const { return *thiz(); } - const_iter_t const_iter () const { return iter (); } - item_t& item () const { return thiz()->__item__ (); } - item_t& item_at (unsigned i) const { return thiz()->__item_at__ (i); } - bool more () const { return thiz()->__more__ (); } + iter_t operator + () const { return *thiz(); } + iter_t begin () const { return *thiz(); } + iter_t end () const { return thiz()->__end__ (); } + explicit operator bool () const { return thiz()->__more__ (); } unsigned len () const { return thiz()->__len__ (); } - void next () { thiz()->__next__ (); } - void forward (unsigned n) { thiz()->__forward__ (n); } - void prev () { thiz()->__prev__ (); } - void rewind (unsigned n) { thiz()->__rewind__ (n); } - bool random_access () const { return thiz()->__random_access__ (); } + /* The following can only be enabled if item_t is reference type. Otherwise + * it will be returning pointer to temporary rvalue. + * TODO Use a wrapper return type to fix for non-reference type. */ + template + hb_remove_reference* operator -> () const { return hb_addressof (**thiz()); } + item_t operator * () const { return thiz()->__item__ (); } + item_t operator * () { return thiz()->__item__ (); } + item_t operator [] (unsigned i) const { return thiz()->__item_at__ (i); } + item_t operator [] (unsigned i) { return thiz()->__item_at__ (i); } + iter_t& operator += (unsigned count) & { thiz()->__forward__ (count); return *thiz(); } + iter_t operator += (unsigned count) && { thiz()->__forward__ (count); return *thiz(); } + iter_t& operator ++ () & { thiz()->__next__ (); return *thiz(); } + iter_t operator ++ () && { thiz()->__next__ (); return *thiz(); } + iter_t& operator -= (unsigned count) & { thiz()->__rewind__ (count); return *thiz(); } + iter_t operator -= (unsigned count) && { thiz()->__rewind__ (count); return *thiz(); } + iter_t& operator -- () & { thiz()->__prev__ (); return *thiz(); } + iter_t operator -- () && { thiz()->__prev__ (); return *thiz(); } + iter_t operator + (unsigned count) const { auto c = thiz()->iter (); c += count; return c; } + friend iter_t operator + (unsigned count, const iter_t &it) { return it + count; } + iter_t operator ++ (int) { iter_t c (*thiz()); ++*thiz(); return c; } + iter_t operator - (unsigned count) const { auto c = thiz()->iter (); c -= count; return c; } + iter_t operator -- (int) { iter_t c (*thiz()); --*thiz(); return c; } + template + iter_t& operator >> (T &v) & { v = **thiz(); ++*thiz(); return *thiz(); } + template + iter_t operator >> (T &v) && { v = **thiz(); ++*thiz(); return *thiz(); } + template + iter_t& operator << (const T v) & { **thiz() = v; ++*thiz(); return *thiz(); } + template + iter_t operator << (const T v) && { **thiz() = v; ++*thiz(); return *thiz(); } protected: - hb_iter_t () {} - hb_iter_t (const hb_iter_t &o HB_UNUSED) {} - void operator = (const hb_iter_t &o HB_UNUSED) {} + hb_iter_t () = default; + hb_iter_t (const hb_iter_t &o HB_UNUSED) = default; + hb_iter_t (hb_iter_t &&o HB_UNUSED) = default; + hb_iter_t& operator = (const hb_iter_t &o HB_UNUSED) = default; + hb_iter_t& operator = (hb_iter_t &&o HB_UNUSED) = default; }; -/* Base class for sorted iterators. Does not enforce anything. - * Just for class taxonomy and requirements. */ -template -struct hb_sorted_iter_t : hb_iter_t +#define HB_ITER_USING(Name) \ + using item_t = typename Name::item_t; \ + using Name::begin; \ + using Name::end; \ + using Name::get_item_size; \ + using Name::is_iterator; \ + using Name::iter; \ + using Name::operator bool; \ + using Name::len; \ + using Name::operator ->; \ + using Name::operator *; \ + using Name::operator []; \ + using Name::operator +=; \ + using Name::operator ++; \ + using Name::operator -=; \ + using Name::operator --; \ + using Name::operator +; \ + using Name::operator -; \ + using Name::operator >>; \ + using Name::operator <<; \ + static_assert (true, "") + +/* Returns iterator / item type of a type. */ +template +using hb_iter_type = decltype (hb_deref (hb_declval (Iterable)).iter ()); +template +using hb_item_type = decltype (*hb_deref (hb_declval (Iterable)).iter ()); + + +template struct hb_array_t; +template struct hb_sorted_array_t; + +struct { - protected: - hb_sorted_iter_t () {} - hb_sorted_iter_t (const hb_sorted_iter_t &o) : hb_iter_t (o) {} - void operator = (const hb_sorted_iter_t &o HB_UNUSED) {} -}; + template hb_iter_type + operator () (T&& c) const + { return hb_deref (hb_forward (c)).iter (); } + + /* Specialization for C arrays. */ + + template inline hb_array_t + operator () (Type *array, unsigned int length) const + { return hb_array_t (array, length); } + + template hb_array_t + operator () (Type (&array)[length]) const + { return hb_array_t (array, length); } + +} +HB_FUNCOBJ (hb_iter); +struct +{ + template unsigned + operator () (T&& c) const + { return c.len (); } + +} +HB_FUNCOBJ (hb_len); /* Mixin to fill in what the subclass doesn't provide. */ -template -struct hb_iter_mixin_t +template +struct hb_iter_fallback_mixin_t { private: /* https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern */ @@ -111,42 +196,743 @@ struct hb_iter_mixin_t public: /* Access: Implement __item__(), or __item_at__() if random-access. */ - item_t& __item__ () const { return thiz()->item_at (0); } - item_t& __item_at__ (unsigned i) const { return *(thiz() + i); } + item_t __item__ () const { return (*thiz())[0]; } + item_t __item_at__ (unsigned i) const { return *(*thiz() + i); } /* Termination: Implement __more__(), or __len__() if random-access. */ - bool __more__ () const { return thiz()->__len__ (); } + bool __more__ () const { return bool (thiz()->len ()); } unsigned __len__ () const - { iter_t c (*thiz()); unsigned l = 0; while (c) { c++; l++; }; return l; } + { iter_t c (*thiz()); unsigned l = 0; while (c) { c++; l++; } return l; } /* Advancing: Implement __next__(), or __forward__() if random-access. */ - void __next__ () { thiz()->forward (1); } - void __forward__ (unsigned n) { while (n--) thiz()->next (); } + void __next__ () { *thiz() += 1; } + void __forward__ (unsigned n) { while (*thiz() && n--) ++*thiz(); } /* Rewinding: Implement __prev__() or __rewind__() if bidirectional. */ - void __prev__ () { thiz()->rewind (1); } - void __rewind__ (unsigned n) { while (n--) thiz()->prev (); } + void __prev__ () { *thiz() -= 1; } + void __rewind__ (unsigned n) { while (*thiz() && n--) --*thiz(); } + + /* Range-based for: Implement __end__() if can be done faster, + * and operator!=. */ + iter_t __end__ () const + { + if (thiz()->is_random_access_iterator) + return *thiz() + thiz()->len (); + /* Above expression loops twice. Following loops once. */ + auto it = *thiz(); + while (it) ++it; + return it; + } + + protected: + hb_iter_fallback_mixin_t () = default; + hb_iter_fallback_mixin_t (const hb_iter_fallback_mixin_t &o HB_UNUSED) = default; + hb_iter_fallback_mixin_t (hb_iter_fallback_mixin_t &&o HB_UNUSED) = default; + hb_iter_fallback_mixin_t& operator = (const hb_iter_fallback_mixin_t &o HB_UNUSED) = default; + hb_iter_fallback_mixin_t& operator = (hb_iter_fallback_mixin_t &&o HB_UNUSED) = default; +}; + +template +struct hb_iter_with_fallback_t : + hb_iter_t, + hb_iter_fallback_mixin_t +{ + protected: + hb_iter_with_fallback_t () = default; + hb_iter_with_fallback_t (const hb_iter_with_fallback_t &o HB_UNUSED) = default; + hb_iter_with_fallback_t (hb_iter_with_fallback_t &&o HB_UNUSED) = default; + hb_iter_with_fallback_t& operator = (const hb_iter_with_fallback_t &o HB_UNUSED) = default; + hb_iter_with_fallback_t& operator = (hb_iter_with_fallback_t &&o HB_UNUSED) = default; +}; + +/* + * Meta-programming predicates. + */ + +/* hb_is_iterator() / hb_is_iterator_of() */ + +template +struct hb_is_iterator_of +{ + template + static hb_true_type impl (hb_priority<2>, hb_iter_t> *); + static hb_false_type impl (hb_priority<0>, const void *); + + public: + static constexpr bool value = decltype (impl (hb_prioritize, hb_declval (Iter*)))::value; +}; +#define hb_is_iterator_of(Iter, Item) hb_is_iterator_of::value +#define hb_is_iterator(Iter) hb_is_iterator_of (Iter, typename Iter::item_t) + +/* hb_is_iterable() */ + +template +struct hb_is_iterable +{ + private: + + template + static auto impl (hb_priority<1>) -> decltype (hb_declval (U).iter (), hb_true_type ()); + + template + static hb_false_type impl (hb_priority<0>); + + public: + static constexpr bool value = decltype (impl (hb_prioritize))::value; +}; +#define hb_is_iterable(Iterable) hb_is_iterable::value + +/* hb_is_source_of() / hb_is_sink_of() */ + +template +struct hb_is_source_of +{ + private: + template >))> + static hb_true_type impl (hb_priority<2>); + template + static auto impl (hb_priority<1>) -> decltype (hb_declval (Iter2) >> hb_declval (Item &), hb_true_type ()); + static hb_false_type impl (hb_priority<0>); + + public: + static constexpr bool value = decltype (impl (hb_prioritize))::value; +}; +#define hb_is_source_of(Iter, Item) hb_is_source_of::value + +template +struct hb_is_sink_of +{ + private: + template ))> + static hb_true_type impl (hb_priority<2>); + template + static auto impl (hb_priority<1>) -> decltype (hb_declval (Iter2) << hb_declval (Item), hb_true_type ()); + static hb_false_type impl (hb_priority<0>); + + public: + static constexpr bool value = decltype (impl (hb_prioritize))::value; +}; +#define hb_is_sink_of(Iter, Item) hb_is_sink_of::value + +/* This is commonly used, so define: */ +#define hb_is_sorted_source_of(Iter, Item) \ + (hb_is_source_of(Iter, Item) && Iter::is_sorted_iterator) + + +/* Range-based 'for' for iterables. */ + +template +static inline auto begin (Iterable&& iterable) HB_AUTO_RETURN (hb_iter (iterable).begin ()) + +template +static inline auto end (Iterable&& iterable) HB_AUTO_RETURN (hb_iter (iterable).end ()) + +/* begin()/end() are NOT looked up non-ADL. So each namespace must declare them. + * Do it for namespace OT. */ +namespace OT { + +template +static inline auto begin (Iterable&& iterable) HB_AUTO_RETURN (hb_iter (iterable).begin ()) + +template +static inline auto end (Iterable&& iterable) HB_AUTO_RETURN (hb_iter (iterable).end ()) + +} + + +/* + * Adaptors, combiners, etc. + */ + +template +static inline auto +operator | (Lhs&& lhs, Rhs&& rhs) HB_AUTO_RETURN (hb_forward (rhs) (hb_forward (lhs))) + +/* hb_map(), hb_filter(), hb_reduce() */ + +enum class hb_function_sortedness_t { + NOT_SORTED, + RETAINS_SORTING, + SORTED, +}; + +template +struct hb_map_iter_t : + hb_iter_t, + decltype (hb_get (hb_declval (Proj), *hb_declval (Iter)))> +{ + hb_map_iter_t (const Iter& it, Proj f_) : it (it), f (f_) {} + + typedef decltype (hb_get (hb_declval (Proj), *hb_declval (Iter))) __item_t__; + static constexpr bool is_random_access_iterator = Iter::is_random_access_iterator; + static constexpr bool is_sorted_iterator = + Sorted == hb_function_sortedness_t::SORTED ? true : + Sorted == hb_function_sortedness_t::RETAINS_SORTING ? Iter::is_sorted_iterator : + false; + __item_t__ __item__ () const { return hb_get (f.get (), *it); } + __item_t__ __item_at__ (unsigned i) const { return hb_get (f.get (), it[i]); } + bool __more__ () const { return bool (it); } + unsigned __len__ () const { return it.len (); } + void __next__ () { ++it; } + void __forward__ (unsigned n) { it += n; } + void __prev__ () { --it; } + void __rewind__ (unsigned n) { it -= n; } + hb_map_iter_t __end__ () const { return hb_map_iter_t (it.end (), f); } + bool operator != (const hb_map_iter_t& o) const + { return it != o.it; } + + private: + Iter it; + hb_reference_wrapper f; +}; + +template +struct hb_map_iter_factory_t +{ + hb_map_iter_factory_t (Proj f) : f (f) {} + + template + hb_map_iter_t + operator () (Iter it) + { return hb_map_iter_t (it, f); } + + private: + Proj f; +}; +struct +{ + template + hb_map_iter_factory_t + operator () (Proj&& f) const + { return hb_map_iter_factory_t (f); } +} +HB_FUNCOBJ (hb_map); +struct +{ + template + hb_map_iter_factory_t + operator () (Proj&& f) const + { return hb_map_iter_factory_t (f); } +} +HB_FUNCOBJ (hb_map_retains_sorting); +struct +{ + template + hb_map_iter_factory_t + operator () (Proj&& f) const + { return hb_map_iter_factory_t (f); } +} +HB_FUNCOBJ (hb_map_sorted); + +template +struct hb_filter_iter_t : + hb_iter_with_fallback_t, + typename Iter::item_t> +{ + hb_filter_iter_t (const Iter& it_, Pred p_, Proj f_) : it (it_), p (p_), f (f_) + { while (it && !hb_has (p.get (), hb_get (f.get (), *it))) ++it; } + + typedef typename Iter::item_t __item_t__; + static constexpr bool is_sorted_iterator = Iter::is_sorted_iterator; + __item_t__ __item__ () const { return *it; } + bool __more__ () const { return bool (it); } + void __next__ () { do ++it; while (it && !hb_has (p.get (), hb_get (f.get (), *it))); } + void __prev__ () { do --it; while (it && !hb_has (p.get (), hb_get (f.get (), *it))); } + hb_filter_iter_t __end__ () const { return hb_filter_iter_t (it.end (), p, f); } + bool operator != (const hb_filter_iter_t& o) const + { return it != o.it; } + + private: + Iter it; + hb_reference_wrapper p; + hb_reference_wrapper f; +}; +template +struct hb_filter_iter_factory_t +{ + hb_filter_iter_factory_t (Pred p, Proj f) : p (p), f (f) {} + + template + hb_filter_iter_t + operator () (Iter it) + { return hb_filter_iter_t (it, p, f); } + + private: + Pred p; + Proj f; +}; +struct +{ + template + hb_filter_iter_factory_t + operator () (Pred&& p = hb_identity, Proj&& f = hb_identity) const + { return hb_filter_iter_factory_t (p, f); } +} +HB_FUNCOBJ (hb_filter); + +template +struct hb_reduce_t +{ + hb_reduce_t (Redu r, InitT init_value) : r (r), init_value (init_value) {} + + template > + AccuT + operator () (Iter it) + { + AccuT value = init_value; + for (; it; ++it) + value = r (value, *it); + return value; + } + + private: + Redu r; + InitT init_value; +}; +struct +{ + template + hb_reduce_t + operator () (Redu&& r, InitT init_value) const + { return hb_reduce_t (r, init_value); } +} +HB_FUNCOBJ (hb_reduce); + + +/* hb_zip() */ - /* Random access: Return true if item_at(), len(), forward() are fast. */ - bool __random_access__ () const { return false; } +template +struct hb_zip_iter_t : + hb_iter_t, + hb_pair_t> +{ + hb_zip_iter_t () {} + hb_zip_iter_t (const A& a, const B& b) : a (a), b (b) {} + + typedef hb_pair_t __item_t__; + static constexpr bool is_random_access_iterator = + A::is_random_access_iterator && + B::is_random_access_iterator; + /* Note. The following categorization is only valid if A is strictly sorted, + * ie. does NOT have duplicates. Previously I tried to categorize sortedness + * more granularly, see commits: + * + * 513762849a683914fc266a17ddf38f133cccf072 + * 4d3cf2adb669c345cc43832d11689271995e160a + * + * However, that was not enough, since hb_sorted_array_t, hb_sorted_vector_t, + * SortedArrayOf, etc all needed to be updated to add more variants. At that + * point I saw it not worth the effort, and instead we now deem all sorted + * collections as essentially strictly-sorted for the purposes of zip. + * + * The above assumption is not as bad as it sounds. Our "sorted" comes with + * no guarantees. It's just a contract, put in place to help you remember, + * and think about, whether an iterator you receive is expected to be + * sorted or not. As such, it's not perfect by definition, and should not + * be treated so. The inaccuracy here just errs in the direction of being + * more permissive, so your code compiles instead of erring on the side of + * marking your zipped iterator unsorted in which case your code won't + * compile. + * + * This semantical limitation does NOT affect logic in any other place I + * know of as of this writing. + */ + static constexpr bool is_sorted_iterator = A::is_sorted_iterator; + + __item_t__ __item__ () const { return __item_t__ (*a, *b); } + __item_t__ __item_at__ (unsigned i) const { return __item_t__ (a[i], b[i]); } + bool __more__ () const { return bool (a) && bool (b); } + unsigned __len__ () const { return hb_min (a.len (), b.len ()); } + void __next__ () { ++a; ++b; } + void __forward__ (unsigned n) { a += n; b += n; } + void __prev__ () { --a; --b; } + void __rewind__ (unsigned n) { a -= n; b -= n; } + hb_zip_iter_t __end__ () const { return hb_zip_iter_t (a.end (), b.end ()); } + /* Note, we should stop if ANY of the iters reaches end. As such two compare + * unequal if both items are unequal, NOT if either is unequal. */ + bool operator != (const hb_zip_iter_t& o) const + { return a != o.a && b != o.b; } + + private: + A a; + B b; +}; +struct +{ HB_PARTIALIZE(2); + template + hb_zip_iter_t, hb_iter_type> + operator () (A&& a, B&& b) const + { return hb_zip_iter_t, hb_iter_type> (hb_iter (a), hb_iter (b)); } +} +HB_FUNCOBJ (hb_zip); + +/* hb_apply() */ + +template +struct hb_apply_t +{ + hb_apply_t (Appl a) : a (a) {} + + template + void operator () (Iter it) + { + for (; it; ++it) + (void) hb_invoke (a, *it); + } + + private: + Appl a; }; +struct +{ + template hb_apply_t + operator () (Appl&& a) const + { return hb_apply_t (a); } + template hb_apply_t + operator () (Appl *a) const + { return hb_apply_t (*a); } +} +HB_FUNCOBJ (hb_apply); + +/* hb_range()/hb_iota()/hb_repeat() */ + +template +struct hb_range_iter_t : + hb_iter_t, T> +{ + hb_range_iter_t (T start, T end_, S step) : v (start), end_ (end_for (start, end_, step)), step (step) {} + + typedef T __item_t__; + static constexpr bool is_random_access_iterator = true; + static constexpr bool is_sorted_iterator = true; + __item_t__ __item__ () const { return hb_ridentity (v); } + __item_t__ __item_at__ (unsigned j) const { return v + j * step; } + bool __more__ () const { return v != end_; } + unsigned __len__ () const { return !step ? UINT_MAX : (end_ - v) / step; } + void __next__ () { v += step; } + void __forward__ (unsigned n) { v += n * step; } + void __prev__ () { v -= step; } + void __rewind__ (unsigned n) { v -= n * step; } + hb_range_iter_t __end__ () const { return hb_range_iter_t (end_, end_, step); } + bool operator != (const hb_range_iter_t& o) const + { return v != o.v; } + + private: + static inline T end_for (T start, T end_, S step) + { + if (!step) + return end_; + auto res = (end_ - start) % step; + if (!res) + return end_; + end_ += step - res; + return end_; + } + + private: + T v; + T end_; + S step; +}; +struct +{ + template hb_range_iter_t + operator () (T end = (unsigned) -1) const + { return hb_range_iter_t (0, end, 1u); } + + template hb_range_iter_t + operator () (T start, T end, S step = 1u) const + { return hb_range_iter_t (start, end, step); } +} +HB_FUNCOBJ (hb_range); + +template +struct hb_iota_iter_t : + hb_iter_with_fallback_t, T> +{ + hb_iota_iter_t (T start, S step) : v (start), step (step) {} + + private: + + template + auto + inc (hb_type_identity s, hb_priority<1>) + -> hb_void_t (s), hb_declval ()))> + { v = hb_invoke (hb_forward (s), v); } + + void + inc (S s, hb_priority<0>) + { v += s; } + + public: + + typedef T __item_t__; + static constexpr bool is_random_access_iterator = true; + static constexpr bool is_sorted_iterator = true; + __item_t__ __item__ () const { return hb_ridentity (v); } + bool __more__ () const { return true; } + unsigned __len__ () const { return UINT_MAX; } + void __next__ () { inc (step, hb_prioritize); } + void __prev__ () { v -= step; } + hb_iota_iter_t __end__ () const { return *this; } + bool operator != (const hb_iota_iter_t& o) const { return true; } + + private: + T v; + S step; +}; +struct +{ + template hb_iota_iter_t + operator () (T start = 0u, S step = 1u) const + { return hb_iota_iter_t (start, step); } +} +HB_FUNCOBJ (hb_iota); -/* Functions operating on iterators or iteratables. */ +template +struct hb_repeat_iter_t : + hb_iter_t, T> +{ + hb_repeat_iter_t (T value) : v (value) {} + + typedef T __item_t__; + static constexpr bool is_random_access_iterator = true; + static constexpr bool is_sorted_iterator = true; + __item_t__ __item__ () const { return v; } + __item_t__ __item_at__ (unsigned j) const { return v; } + bool __more__ () const { return true; } + unsigned __len__ () const { return UINT_MAX; } + void __next__ () {} + void __forward__ (unsigned) {} + void __prev__ () {} + void __rewind__ (unsigned) {} + hb_repeat_iter_t __end__ () const { return *this; } + bool operator != (const hb_repeat_iter_t& o) const { return true; } + + private: + T v; +}; +struct +{ + template hb_repeat_iter_t + operator () (T value) const + { return hb_repeat_iter_t (value); } +} +HB_FUNCOBJ (hb_repeat); + +/* hb_enumerate()/hb_take() */ + +struct +{ + template + auto operator () (Iterable&& it, Index start = 0u) const HB_AUTO_RETURN + ( hb_zip (hb_iota (start), it) ) +} +HB_FUNCOBJ (hb_enumerate); + +struct +{ HB_PARTIALIZE(2); + template + auto operator () (Iterable&& it, unsigned count) const HB_AUTO_RETURN + ( hb_zip (hb_range (count), it) | hb_map (hb_second) ) + + /* Specialization arrays. */ + + template inline hb_array_t + operator () (hb_array_t array, unsigned count) const + { return array.sub_array (0, count); } + + template inline hb_sorted_array_t + operator () (hb_sorted_array_t array, unsigned count) const + { return array.sub_array (0, count); } +} +HB_FUNCOBJ (hb_take); + +struct +{ HB_PARTIALIZE(2); + template + auto operator () (Iter it, unsigned count) const HB_AUTO_RETURN + ( + + hb_iota (it, hb_add (count)) + | hb_map (hb_take (count)) + | hb_take ((hb_len (it) + count - 1) / count) + ) +} +HB_FUNCOBJ (hb_chop); + +/* hb_sink() */ + +template +struct hb_sink_t +{ + hb_sink_t (Sink s) : s (s) {} + + template + void operator () (Iter it) + { + for (; it; ++it) + s << *it; + } + + private: + Sink s; +}; +struct +{ + template hb_sink_t + operator () (Sink&& s) const + { return hb_sink_t (s); } + + template hb_sink_t + operator () (Sink *s) const + { return hb_sink_t (*s); } +} +HB_FUNCOBJ (hb_sink); + +/* hb-drain: hb_sink to void / blackhole / /dev/null. */ + +struct +{ + template + void operator () (Iter it) const + { + for (; it; ++it) + (void) *it; + } +} +HB_FUNCOBJ (hb_drain); + +/* hb_unzip(): unzip and sink to two sinks. */ + +template +struct hb_unzip_t +{ + hb_unzip_t (Sink1 s1, Sink2 s2) : s1 (s1), s2 (s2) {} + + template + void operator () (Iter it) + { + for (; it; ++it) + { + const auto &v = *it; + s1 << v.first; + s2 << v.second; + } + } + + private: + Sink1 s1; + Sink2 s2; +}; +struct +{ + template hb_unzip_t + operator () (Sink1&& s1, Sink2&& s2) const + { return hb_unzip_t (s1, s2); } + + template hb_unzip_t + operator () (Sink1 *s1, Sink2 *s2) const + { return hb_unzip_t (*s1, *s2); } +} +HB_FUNCOBJ (hb_unzip); + + +/* hb-all, hb-any, hb-none. */ + +struct +{ + template + bool operator () (Iterable&& c, + Pred&& p = hb_identity, + Proj&& f = hb_identity) const + { + for (auto it = hb_iter (c); it; ++it) + if (!hb_match (hb_forward (p), hb_get (hb_forward (f), *it))) + return false; + return true; + } +} +HB_FUNCOBJ (hb_all); +struct +{ + template + bool operator () (Iterable&& c, + Pred&& p = hb_identity, + Proj&& f = hb_identity) const + { + for (auto it = hb_iter (c); it; ++it) + if (hb_match (hb_forward (p), hb_get (hb_forward (f), *it))) + return true; + return false; + } +} +HB_FUNCOBJ (hb_any); +struct +{ + template + bool operator () (Iterable&& c, + Pred&& p = hb_identity, + Proj&& f = hb_identity) const + { + for (auto it = hb_iter (c); it; ++it) + if (hb_match (hb_forward (p), hb_get (hb_forward (f), *it))) + return false; + return true; + } +} +HB_FUNCOBJ (hb_none); + +/* + * Algorithms operating on iterators. + */ -template inline void -hb_fill (const C& c, const V &v) +template +inline void +hb_fill (C& c, const V &v) { - for (typename C::iter_t i (c); i; i++) - hb_assign (*i, v); + for (auto i = hb_iter (c); i; i++) + *i = v; } -template inline bool -hb_copy (hb_iter_t &id, hb_iter_t &is) +template +inline void +hb_copy (S&& is, D&& id) { - for (; id && is; ++id, ++is) - *id = *is; - return !is; + hb_iter (is) | hb_sink (id); } diff --git a/src/java.desktop/share/native/libharfbuzz/hb-kern.hh b/src/java.desktop/share/native/libharfbuzz/hb-kern.hh index 43d70d7f2216..42e549364105 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-kern.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-kern.hh @@ -52,8 +52,7 @@ struct hb_kern_machine_t OT::hb_ot_apply_context_t c (1, font, buffer); c.set_lookup_mask (kern_mask); c.set_lookup_props (OT::LookupFlag::IgnoreMarks); - OT::hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c.iter_input; - skippy_iter.init (&c); + auto &skippy_iter = c.iter_input; bool horizontal = HB_DIRECTION_IS_HORIZONTAL (buffer->props.direction); unsigned int count = buffer->len; diff --git a/src/java.desktop/share/native/libharfbuzz/hb-machinery.hh b/src/java.desktop/share/native/libharfbuzz/hb-machinery.hh index 2ae288494f44..0c820e7429fc 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-machinery.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-machinery.hh @@ -32,30 +32,15 @@ #include "hb.hh" #include "hb-blob.hh" -#include "hb-array.hh" -#include "hb-vector.hh" +#include "hb-dispatch.hh" +#include "hb-sanitize.hh" +#include "hb-serialize.hh" /* * Casts */ -/* Cast to struct T, reference to reference */ -template -static inline const Type& CastR(const TObject &X) -{ return reinterpret_cast (X); } -template -static inline Type& CastR(TObject &X) -{ return reinterpret_cast (X); } - -/* Cast to struct T, pointer to pointer */ -template -static inline const Type* CastP(const TObject *X) -{ return reinterpret_cast (X); } -template -static inline Type* CastP(TObject *X) -{ return reinterpret_cast (X); } - /* StructAtOffset(P,Ofs) returns the struct T& that is placed at memory * location pointed to by P plus Ofs bytes. */ template @@ -69,7 +54,7 @@ static inline const Type& StructAtOffsetUnaligned(const void *P, unsigned int of { #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wcast-align" - return * reinterpret_cast ((char *) P + offset); + return * reinterpret_cast ((const char *) P + offset); #pragma GCC diagnostic pop } template @@ -134,7 +119,7 @@ static inline Type& StructAfter(TObject &X) #define DEFINE_SIZE_ARRAY(size, array) \ DEFINE_COMPILES_ASSERTION ((void) (array)[0].static_size) \ - DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + VAR * sizeof ((array)[0])) \ + DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + (HB_VAR_ARRAY+0) * sizeof ((array)[0])) \ static constexpr unsigned null_size = (size); \ static constexpr unsigned min_size = (size) @@ -143,615 +128,6 @@ static inline Type& StructAfter(TObject &X) DEFINE_SIZE_ARRAY(size, array) -/* - * Dispatch - */ - -template -struct hb_dispatch_context_t -{ - static constexpr unsigned max_debug_depth = MaxDebugDepth; - typedef Return return_t; - template - bool may_dispatch (const T *obj HB_UNUSED, const F *format HB_UNUSED) { return true; } - static return_t no_dispatch_return_value () { return Context::default_return_value (); } - static bool stop_sublookup_iteration (const return_t r HB_UNUSED) { return false; } -}; - - -/* - * Sanitize - * - * - * === Introduction === - * - * The sanitize machinery is at the core of our zero-cost font loading. We - * mmap() font file into memory and create a blob out of it. Font subtables - * are returned as a readonly sub-blob of the main font blob. These table - * blobs are then sanitized before use, to ensure invalid memory access does - * not happen. The toplevel sanitize API use is like, eg. to load the 'head' - * table: - * - * hb_blob_t *head_blob = hb_sanitize_context_t ().reference_table (face); - * - * The blob then can be converted to a head table struct with: - * - * const head *head_table = head_blob->as (); - * - * What the reference_table does is, to call hb_face_reference_table() to load - * the table blob, sanitize it and return either the sanitized blob, or empty - * blob if sanitization failed. The blob->as() function returns the null - * object of its template type argument if the blob is empty. Otherwise, it - * just casts the blob contents to the desired type. - * - * Sanitizing a blob of data with a type T works as follows (with minor - * simplification): - * - * - Cast blob content to T*, call sanitize() method of it, - * - If sanitize succeeded, return blob. - * - Otherwise, if blob is not writable, try making it writable, - * or copy if cannot be made writable in-place, - * - Call sanitize() again. Return blob if sanitize succeeded. - * - Return empty blob otherwise. - * - * - * === The sanitize() contract === - * - * The sanitize() method of each object type shall return true if it's safe to - * call other methods of the object, and false otherwise. - * - * Note that what sanitize() checks for might align with what the specification - * describes as valid table data, but does not have to be. In particular, we - * do NOT want to be pedantic and concern ourselves with validity checks that - * are irrelevant to our use of the table. On the contrary, we want to be - * lenient with error handling and accept invalid data to the extent that it - * does not impose extra burden on us. - * - * Based on the sanitize contract, one can see that what we check for depends - * on how we use the data in other table methods. Ie. if other table methods - * assume that offsets do NOT point out of the table data block, then that's - * something sanitize() must check for (GSUB/GPOS/GDEF/etc work this way). On - * the other hand, if other methods do such checks themselves, then sanitize() - * does not have to bother with them (glyf/local work this way). The choice - * depends on the table structure and sanitize() performance. For example, to - * check glyf/loca offsets in sanitize() would cost O(num-glyphs). We try hard - * to avoid such costs during font loading. By postponing such checks to the - * actual glyph loading, we reduce the sanitize cost to O(1) and total runtime - * cost to O(used-glyphs). As such, this is preferred. - * - * The same argument can be made re GSUB/GPOS/GDEF, but there, the table - * structure is so complicated that by checking all offsets at sanitize() time, - * we make the code much simpler in other methods, as offsets and referenced - * objects do not need to be validated at each use site. - */ - -/* This limits sanitizing time on really broken fonts. */ -#ifndef HB_SANITIZE_MAX_EDITS -#define HB_SANITIZE_MAX_EDITS 32 -#endif -#ifndef HB_SANITIZE_MAX_OPS_FACTOR -#define HB_SANITIZE_MAX_OPS_FACTOR 8 -#endif -#ifndef HB_SANITIZE_MAX_OPS_MIN -#define HB_SANITIZE_MAX_OPS_MIN 16384 -#endif -#ifndef HB_SANITIZE_MAX_OPS_MAX -#define HB_SANITIZE_MAX_OPS_MAX 0x3FFFFFFF -#endif - -struct hb_sanitize_context_t : - hb_dispatch_context_t -{ - hb_sanitize_context_t () : - debug_depth (0), - start (nullptr), end (nullptr), - max_ops (0), - writable (false), edit_count (0), - blob (nullptr), - num_glyphs (65536), - num_glyphs_set (false) {} - - const char *get_name () { return "SANITIZE"; } - template - bool may_dispatch (const T *obj HB_UNUSED, const F *format) - { return format->sanitize (this); } - template - return_t dispatch (const T &obj) { return obj.sanitize (this); } - static return_t default_return_value () { return true; } - static return_t no_dispatch_return_value () { return false; } - bool stop_sublookup_iteration (const return_t r) const { return !r; } - - void init (hb_blob_t *b) - { - this->blob = hb_blob_reference (b); - this->writable = false; - } - - void set_num_glyphs (unsigned int num_glyphs_) - { - num_glyphs = num_glyphs_; - num_glyphs_set = true; - } - unsigned int get_num_glyphs () { return num_glyphs; } - - void set_max_ops (int max_ops_) { max_ops = max_ops_; } - - template - void set_object (const T *obj) - { - reset_object (); - - if (!obj) return; - - const char *obj_start = (const char *) obj; - if (unlikely (obj_start < this->start || this->end <= obj_start)) - this->start = this->end = nullptr; - else - { - this->start = obj_start; - this->end = obj_start + MIN (this->end - obj_start, obj->get_size ()); - } - } - - void reset_object () - { - this->start = this->blob->data; - this->end = this->start + this->blob->length; - assert (this->start <= this->end); /* Must not overflow. */ - } - - void start_processing () - { - reset_object (); - this->max_ops = MAX ((unsigned int) (this->end - this->start) * HB_SANITIZE_MAX_OPS_FACTOR, - (unsigned) HB_SANITIZE_MAX_OPS_MIN); - this->edit_count = 0; - this->debug_depth = 0; - - DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1, - "start [%p..%p] (%lu bytes)", - this->start, this->end, - (unsigned long) (this->end - this->start)); - } - - void end_processing () - { - DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1, - "end [%p..%p] %u edit requests", - this->start, this->end, this->edit_count); - - hb_blob_destroy (this->blob); - this->blob = nullptr; - this->start = this->end = nullptr; - } - - bool check_range (const void *base, - unsigned int len) const - { - const char *p = (const char *) base; - bool ok = this->start <= p && - p <= this->end && - (unsigned int) (this->end - p) >= len && - this->max_ops-- > 0; - - DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0, - "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s", - p, p + len, len, - this->start, this->end, - ok ? "OK" : "OUT-OF-RANGE"); - - return likely (ok); - } - - template - bool check_range (const T *base, - unsigned int a, - unsigned int b) const - { - return !hb_unsigned_mul_overflows (a, b) && - this->check_range (base, a * b); - } - - template - bool check_range (const T *base, - unsigned int a, - unsigned int b, - unsigned int c) const - { - return !hb_unsigned_mul_overflows (a, b) && - this->check_range (base, a * b, c); - } - - template - bool check_array (const T *base, unsigned int len) const - { - return this->check_range (base, len, hb_static_size (T)); - } - - template - bool check_array (const T *base, - unsigned int a, - unsigned int b) const - { - return this->check_range (base, a, b, hb_static_size (T)); - } - - template - bool check_struct (const Type *obj) const - { return likely (this->check_range (obj, obj->min_size)); } - - bool may_edit (const void *base, unsigned int len) - { - if (this->edit_count >= HB_SANITIZE_MAX_EDITS) - return false; - - const char *p = (const char *) base; - this->edit_count++; - - DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0, - "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s", - this->edit_count, - p, p + len, len, - this->start, this->end, - this->writable ? "GRANTED" : "DENIED"); - - return this->writable; - } - - template - bool try_set (const Type *obj, const ValueType &v) - { - if (this->may_edit (obj, hb_static_size (Type))) - { - hb_assign (* const_cast (obj), v); - return true; - } - return false; - } - - template - hb_blob_t *sanitize_blob (hb_blob_t *blob) - { - bool sane; - - init (blob); - - retry: - DEBUG_MSG_FUNC (SANITIZE, start, "start"); - - start_processing (); - - if (unlikely (!start)) - { - end_processing (); - return blob; - } - - Type *t = CastP (const_cast (start)); - - sane = t->sanitize (this); - if (sane) - { - if (edit_count) - { - DEBUG_MSG_FUNC (SANITIZE, start, "passed first round with %d edits; going for second round", edit_count); - - /* sanitize again to ensure no toe-stepping */ - edit_count = 0; - sane = t->sanitize (this); - if (edit_count) { - DEBUG_MSG_FUNC (SANITIZE, start, "requested %d edits in second round; FAILLING", edit_count); - sane = false; - } - } - } - else - { - if (edit_count && !writable) { - start = hb_blob_get_data_writable (blob, nullptr); - end = start + blob->length; - - if (start) - { - writable = true; - /* ok, we made it writable by relocating. try again */ - DEBUG_MSG_FUNC (SANITIZE, start, "retry"); - goto retry; - } - } - } - - end_processing (); - - DEBUG_MSG_FUNC (SANITIZE, start, sane ? "PASSED" : "FAILED"); - if (sane) - { - hb_blob_make_immutable (blob); - return blob; - } - else - { - hb_blob_destroy (blob); - return hb_blob_get_empty (); - } - } - - template - hb_blob_t *reference_table (const hb_face_t *face, hb_tag_t tableTag = Type::tableTag) - { - if (!num_glyphs_set) - set_num_glyphs (hb_face_get_glyph_count (face)); - return sanitize_blob (hb_face_reference_table (face, tableTag)); - } - - mutable unsigned int debug_depth; - const char *start, *end; - mutable int max_ops; - private: - bool writable; - unsigned int edit_count; - hb_blob_t *blob; - unsigned int num_glyphs; - bool num_glyphs_set; -}; - -struct hb_sanitize_with_object_t -{ - template - hb_sanitize_with_object_t (hb_sanitize_context_t *c, - const T& obj) : c (c) - { c->set_object (obj); } - ~hb_sanitize_with_object_t () - { c->reset_object (); } - - private: - hb_sanitize_context_t *c; -}; - - -/* - * Serialize - */ - -struct hb_serialize_context_t -{ - hb_serialize_context_t (void *start_, unsigned int size) - { - this->start = (char *) start_; - this->end = this->start + size; - reset (); - } - - bool in_error () const { return !this->successful; } - - void reset () - { - this->successful = true; - this->head = this->start; - this->debug_depth = 0; - } - - bool propagate_error (bool e) - { return this->successful = this->successful && e; } - template bool propagate_error (const T &obj) - { return this->successful = this->successful && !obj.in_error (); } - template bool propagate_error (const T *obj) - { return this->successful = this->successful && !obj->in_error (); } - template bool propagate_error (T1 &o1, T2 &o2) - { return propagate_error (o1) && propagate_error (o2); } - template bool propagate_error (T1 *o1, T2 *o2) - { return propagate_error (o1) && propagate_error (o2); } - template - bool propagate_error (T1 &o1, T2 &o2, T3 &o3) - { return propagate_error (o1) && propagate_error (o2, o3); } - template - bool propagate_error (T1 *o1, T2 *o2, T3 *o3) - { return propagate_error (o1) && propagate_error (o2, o3); } - - /* To be called around main operation. */ - template - Type *start_serialize () - { - DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1, - "start [%p..%p] (%lu bytes)", - this->start, this->end, - (unsigned long) (this->end - this->start)); - - return start_embed (); - } - void end_serialize () - { - DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1, - "end [%p..%p] serialized %d bytes; %s", - this->start, this->end, - (int) (this->head - this->start), - this->successful ? "successful" : "UNSUCCESSFUL"); - } - - unsigned int length () const { return this->head - this->start; } - - void align (unsigned int alignment) - { - unsigned int l = length () % alignment; - if (l) - allocate_size (alignment - l); - } - - template - Type *start_embed (const Type *_ HB_UNUSED = nullptr) const - { - Type *ret = reinterpret_cast (this->head); - return ret; - } - - template - Type *allocate_size (unsigned int size) - { - if (unlikely (!this->successful || this->end - this->head < ptrdiff_t (size))) { - this->successful = false; - return nullptr; - } - memset (this->head, 0, size); - char *ret = this->head; - this->head += size; - return reinterpret_cast (ret); - } - - template - Type *allocate_min () - { - return this->allocate_size (Type::min_size); - } - - template - Type *embed (const Type &obj) - { - unsigned int size = obj.get_size (); - Type *ret = this->allocate_size (size); - if (unlikely (!ret)) return nullptr; - memcpy (ret, &obj, size); - return ret; - } - template - hb_serialize_context_t &operator << (const Type &obj) { embed (obj); return *this; } - - template - Type *extend_size (Type &obj, unsigned int size) - { - assert (this->start <= (char *) &obj); - assert ((char *) &obj <= this->head); - assert ((char *) &obj + size >= this->head); - if (unlikely (!this->allocate_size (((char *) &obj) + size - this->head))) return nullptr; - return reinterpret_cast (&obj); - } - - template - Type *extend_min (Type &obj) { return extend_size (obj, obj.min_size); } - - template - Type *extend (Type &obj) { return extend_size (obj, obj.get_size ()); } - - /* Output routines. */ - template - Type *copy () const - { - assert (this->successful); - unsigned int len = this->head - this->start; - void *p = malloc (len); - if (p) - memcpy (p, this->start, len); - return reinterpret_cast (p); - } - hb_bytes_t copy_bytes () const - { - assert (this->successful); - unsigned int len = this->head - this->start; - void *p = malloc (len); - if (p) - memcpy (p, this->start, len); - else - return hb_bytes_t (); - return hb_bytes_t ((char *) p, len); - } - hb_blob_t *copy_blob () const - { - assert (this->successful); - return hb_blob_create (this->start, - this->head - this->start, - HB_MEMORY_MODE_DUPLICATE, - nullptr, nullptr); - } - - public: - unsigned int debug_depth; - char *start, *end, *head; - bool successful; -}; - - - -/* - * Big-endian integers. - */ - -template struct BEInt; - -template -struct BEInt -{ - public: - void set (Type V) { v = V; } - operator Type () const { return v; } - private: uint8_t v; -}; -template -struct BEInt -{ - public: - void set (Type V) - { - v[0] = (V >> 8) & 0xFF; - v[1] = (V ) & 0xFF; - } - operator Type () const - { -#if ((defined(__GNUC__) && __GNUC__ >= 5) || defined(__clang__)) && \ - defined(__BYTE_ORDER) && \ - (__BYTE_ORDER == __LITTLE_ENDIAN || __BYTE_ORDER == __BIG_ENDIAN) - /* Spoon-feed the compiler a big-endian integer with alignment 1. - * https://github.com/harfbuzz/harfbuzz/pull/1398 */ - struct __attribute__((packed)) packed_uint16_t { uint16_t v; }; -#if __BYTE_ORDER == __LITTLE_ENDIAN - return __builtin_bswap16 (((packed_uint16_t *) this)->v); -#else /* __BYTE_ORDER == __BIG_ENDIAN */ - return ((packed_uint16_t *) this)->v; -#endif -#endif - return (v[0] << 8) - + (v[1] ); - } - private: uint8_t v[2]; -}; -template -struct BEInt -{ - public: - void set (Type V) - { - v[0] = (V >> 16) & 0xFF; - v[1] = (V >> 8) & 0xFF; - v[2] = (V ) & 0xFF; - } - operator Type () const - { - return (v[0] << 16) - + (v[1] << 8) - + (v[2] ); - } - private: uint8_t v[3]; -}; -template -struct BEInt -{ - public: - typedef Type type; - void set (Type V) - { - v[0] = (V >> 24) & 0xFF; - v[1] = (V >> 16) & 0xFF; - v[2] = (V >> 8) & 0xFF; - v[3] = (V ) & 0xFF; - } - operator Type () const - { - return (v[0] << 24) - + (v[1] << 16) - + (v[2] << 8) - + (v[3] ); - } - private: uint8_t v[4]; -}; - /* * Lazy loaders. @@ -814,7 +190,7 @@ struct hb_lazy_loader_t : hb_data_wrapper_t const Returned * operator -> () const { return get (); } const Returned & operator * () const { return *get (); } - explicit_operator bool () const + explicit operator bool () const { return get_stored () != Funcs::get_null (); } template operator const C * () const { return get (); } @@ -858,7 +234,7 @@ struct hb_lazy_loader_t : hb_data_wrapper_t static Returned* convert (Stored *p) { return p; } /* By default null/init/fini the object. */ - static const Stored* get_null () { return &Null(Stored); } + static const Stored* get_null () { return &Null (Stored); } static Stored *create (Data *data) { Stored *p = (Stored *) calloc (1, sizeof (Stored)); diff --git a/src/java.desktop/share/native/libharfbuzz/hb-map.cc b/src/java.desktop/share/native/libharfbuzz/hb-map.cc index 3fccfa0fa01b..114efcb3d75d 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-map.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-map.cc @@ -69,7 +69,7 @@ hb_map_create () hb_map_t * hb_map_get_empty () { - return const_cast (&Null(hb_map_t)); + return const_cast (&Null (hb_map_t)); } /** diff --git a/src/java.desktop/share/native/libharfbuzz/hb-map.hh b/src/java.desktop/share/native/libharfbuzz/hb-map.hh index c04a8dff9663..a5c997c32e83 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-map.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-map.hh @@ -30,31 +30,36 @@ #include "hb.hh" -template -inline uint32_t Hash (const T &v) -{ - /* Knuth's multiplicative method: */ - return (uint32_t) v * 2654435761u; -} - - /* - * hb_map_t + * hb_hashmap_t */ -struct hb_map_t +template +struct hb_hashmap_t { - HB_NO_COPY_ASSIGN (hb_map_t); - hb_map_t () { init (); } - ~hb_map_t () { fini (); } + HB_DELETE_COPY_ASSIGN (hb_hashmap_t); + hb_hashmap_t () { init (); } + ~hb_hashmap_t () { fini (); } + + static_assert (hb_is_integral (K) || hb_is_pointer (K), ""); + static_assert (hb_is_integral (V) || hb_is_pointer (V), ""); struct item_t { - hb_codepoint_t key; - hb_codepoint_t value; - - bool is_unused () const { return key == INVALID; } - bool is_tombstone () const { return key != INVALID && value == INVALID; } + K key; + V value; + uint32_t hash; + + void clear () { key = kINVALID; value = vINVALID; hash = 0; } + + bool operator == (const K &o) { return hb_deref (key) == hb_deref (o); } + bool operator == (const item_t &o) { return *this == o.key; } + bool is_unused () const { return key == kINVALID; } + bool is_tombstone () const { return key != kINVALID && value == vINVALID; } + bool is_real () const { return key != kINVALID && value != vINVALID; } + hb_pair_t get_pair() const { return hb_pair_t (key, value); } }; hb_object_header_t header; @@ -82,14 +87,22 @@ struct hb_map_t { free (items); items = nullptr; + population = occupancy = 0; } void fini () { - population = occupancy = 0; hb_object_fini (this); fini_shallow (); } + void reset () + { + if (unlikely (hb_object_is_immutable (this))) + return; + successful = true; + clear (); + } + bool in_error () const { return !successful; } bool resize () @@ -104,7 +117,8 @@ struct hb_map_t successful = false; return false; } - memset (new_items, 0xFF, (size_t) new_size * sizeof (item_t)); + for (auto &_ : hb_iter (new_items, new_size)) + _.clear (); unsigned int old_size = mask + 1; item_t *old_items = items; @@ -118,22 +132,96 @@ struct hb_map_t /* Insert back old items. */ if (old_items) for (unsigned int i = 0; i < old_size; i++) - if (old_items[i].key != INVALID && old_items[i].value != INVALID) - set (old_items[i].key, old_items[i].value); + if (old_items[i].is_real ()) + set_with_hash (old_items[i].key, + old_items[i].hash, + old_items[i].value); free (old_items); return true; } - void set (hb_codepoint_t key, hb_codepoint_t value) + void set (K key, V value) + { + set_with_hash (key, hb_hash (key), value); + } + + V get (K key) const + { + if (unlikely (!items)) return vINVALID; + unsigned int i = bucket_for (key); + return items[i].is_real () && items[i] == key ? items[i].value : vINVALID; + } + + void del (K key) { set (key, vINVALID); } + + /* Has interface. */ + static constexpr V SENTINEL = vINVALID; + typedef V value_t; + value_t operator [] (K k) const { return get (k); } + bool has (K k, V *vp = nullptr) const + { + V v = (*this)[k]; + if (vp) *vp = v; + return v != SENTINEL; + } + /* Projection. */ + V operator () (K k) const { return get (k); } + + void clear () + { + if (unlikely (hb_object_is_immutable (this))) + return; + if (items) + for (auto &_ : hb_iter (items, mask + 1)) + _.clear (); + + population = occupancy = 0; + } + + bool is_empty () const { return population == 0; } + + unsigned int get_population () const { return population; } + + /* + * Iterator + */ + auto iter () const HB_AUTO_RETURN + ( + + hb_array (items, mask ? mask + 1 : 0) + | hb_filter (&item_t::is_real) + | hb_map (&item_t::get_pair) + ) + auto keys () const HB_AUTO_RETURN + ( + + hb_array (items, mask ? mask + 1 : 0) + | hb_filter (&item_t::is_real) + | hb_map (&item_t::key) + | hb_map (hb_ridentity) + ) + auto values () const HB_AUTO_RETURN + ( + + hb_array (items, mask ? mask + 1 : 0) + | hb_filter (&item_t::is_real) + | hb_map (&item_t::value) + | hb_map (hb_ridentity) + ) + + /* Sink interface. */ + hb_hashmap_t& operator << (const hb_pair_t& v) + { set (v.first, v.second); return *this; } + + protected: + + void set_with_hash (K key, uint32_t hash, V value) { if (unlikely (!successful)) return; - if (unlikely (key == INVALID)) return; + if (unlikely (key == kINVALID)) return; if ((occupancy + occupancy / 2) >= mask && !resize ()) return; - unsigned int i = bucket_for (key); + unsigned int i = bucket_for_hash (key, hash); - if (value == INVALID && items[i].key != key) + if (value == vINVALID && items[i].key != key) return; /* Trying to delete non-existent key. */ if (!items[i].is_unused ()) @@ -145,55 +233,32 @@ struct hb_map_t items[i].key = key; items[i].value = value; + items[i].hash = hash; occupancy++; if (!items[i].is_tombstone ()) population++; - - } - hb_codepoint_t get (hb_codepoint_t key) const - { - if (unlikely (!items)) return INVALID; - unsigned int i = bucket_for (key); - return items[i].key == key ? items[i].value : INVALID; } - void del (hb_codepoint_t key) { set (key, INVALID); } - - bool has (hb_codepoint_t key) const - { return get (key) != INVALID; } - - hb_codepoint_t operator [] (unsigned int key) const - { return get (key); } - - static constexpr hb_codepoint_t INVALID = HB_MAP_VALUE_INVALID; - - void clear () + unsigned int bucket_for (K key) const { - memset (items, 0xFF, ((size_t) mask + 1) * sizeof (item_t)); - population = occupancy = 0; + return bucket_for_hash (key, hb_hash (key)); } - bool is_empty () const { return population == 0; } - - unsigned int get_population () const { return population; } - - protected: - - unsigned int bucket_for (hb_codepoint_t key) const + unsigned int bucket_for_hash (K key, uint32_t hash) const { - unsigned int i = Hash (key) % prime; + unsigned int i = hash % prime; unsigned int step = 0; - unsigned int tombstone = INVALID; + unsigned int tombstone = (unsigned) -1; while (!items[i].is_unused ()) { - if (items[i].key == key) + if (items[i].hash == hash && items[i] == key) return i; - if (tombstone == INVALID && items[i].is_tombstone ()) + if (tombstone == (unsigned) -1 && items[i].is_tombstone ()) tombstone = i; i = (i + ++step) & mask; } - return tombstone == INVALID ? i : tombstone; + return tombstone == (unsigned) -1 ? i : tombstone; } static unsigned int prime_for (unsigned int shift) @@ -248,5 +313,14 @@ struct hb_map_t } }; +/* + * hb_map_t + */ + +struct hb_map_t : hb_hashmap_t {}; + #endif /* HB_MAP_HH */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-meta.hh b/src/java.desktop/share/native/libharfbuzz/hb-meta.hh new file mode 100644 index 000000000000..ea64416f0d29 --- /dev/null +++ b/src/java.desktop/share/native/libharfbuzz/hb-meta.hh @@ -0,0 +1,410 @@ +/* + * Copyright © 2018 Google, Inc. + * + * This is part of HarfBuzz, a text shaping library. + * + * Permission is hereby granted, without written agreement and without + * license or royalty fees, to use, copy, modify, and distribute this + * software and its documentation for any purpose, provided that the + * above copyright notice and the following two paragraphs appear in + * all copies of this software. + * + * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN + * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + * + * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, + * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + * + * Google Author(s): Behdad Esfahbod + */ + +#ifndef HB_META_HH +#define HB_META_HH + +#include "hb.hh" + + +/* + * C++ template meta-programming & fundamentals used with them. + */ + +/* Void! For when we need a expression-type of void. */ +struct hb_empty_t {}; + +/* https://en.cppreference.com/w/cpp/types/void_t */ +template struct _hb_void_t { typedef void type; }; +template using hb_void_t = typename _hb_void_t::type; + +template struct _hb_head_t { typedef Head type; }; +template using hb_head_t = typename _hb_head_t::type; + +template struct hb_integral_constant { static constexpr T value = v; }; +template using hb_bool_constant = hb_integral_constant; +using hb_true_type = hb_bool_constant; +using hb_false_type = hb_bool_constant; + + +/* Basic type SFINAE. */ + +template struct hb_enable_if {}; +template struct hb_enable_if { typedef T type; }; +#define hb_enable_if(Cond) typename hb_enable_if<(Cond)>::type* = nullptr +/* Concepts/Requires alias: */ +#define hb_requires(Cond) hb_enable_if((Cond)) + +template struct hb_is_same : hb_false_type {}; +template struct hb_is_same : hb_true_type {}; +#define hb_is_same(T, T2) hb_is_same::value + +/* Function overloading SFINAE and priority. */ + +#define HB_RETURN(Ret, E) -> hb_head_t { return (E); } +#define HB_AUTO_RETURN(E) -> decltype ((E)) { return (E); } +#define HB_VOID_RETURN(E) -> hb_void_t { (E); } + +template struct hb_priority : hb_priority {}; +template <> struct hb_priority<0> {}; +#define hb_prioritize hb_priority<16> () + +#define HB_FUNCOBJ(x) static_const x HB_UNUSED + + +template struct hb_type_identity_t { typedef T type; }; +template using hb_type_identity = typename hb_type_identity_t::type; + +struct +{ + template constexpr T* + operator () (T& arg) const + { +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wcast-align" + /* https://en.cppreference.com/w/cpp/memory/addressof */ + return reinterpret_cast ( + &const_cast ( + reinterpret_cast (arg))); +#pragma GCC diagnostic pop + } +} +HB_FUNCOBJ (hb_addressof); + +template static inline T hb_declval (); +#define hb_declval(T) (hb_declval ()) + +template struct hb_match_const : hb_type_identity_t, hb_bool_constant{}; +template struct hb_match_const : hb_type_identity_t, hb_bool_constant {}; +template using hb_remove_const = typename hb_match_const::type; +template using hb_add_const = const T; +#define hb_is_const(T) hb_match_const::value +template struct hb_match_reference : hb_type_identity_t, hb_bool_constant{}; +template struct hb_match_reference : hb_type_identity_t, hb_bool_constant {}; +template struct hb_match_reference : hb_type_identity_t, hb_bool_constant {}; +template using hb_remove_reference = typename hb_match_reference::type; +template auto _hb_try_add_lvalue_reference (hb_priority<1>) -> hb_type_identity; +template auto _hb_try_add_lvalue_reference (hb_priority<0>) -> hb_type_identity; +template using hb_add_lvalue_reference = decltype (_hb_try_add_lvalue_reference (hb_prioritize)); +template auto _hb_try_add_rvalue_reference (hb_priority<1>) -> hb_type_identity; +template auto _hb_try_add_rvalue_reference (hb_priority<0>) -> hb_type_identity; +template using hb_add_rvalue_reference = decltype (_hb_try_add_rvalue_reference (hb_prioritize)); +#define hb_is_reference(T) hb_match_reference::value +template struct hb_match_pointer : hb_type_identity_t, hb_bool_constant{}; +template struct hb_match_pointer : hb_type_identity_t, hb_bool_constant {}; +template using hb_remove_pointer = typename hb_match_pointer::type; +template auto _hb_try_add_pointer (hb_priority<1>) -> hb_type_identity*>; +template auto _hb_try_add_pointer (hb_priority<1>) -> hb_type_identity; +template using hb_add_pointer = decltype (_hb_try_add_pointer (hb_prioritize)); +#define hb_is_pointer(T) hb_match_pointer::value + + +/* TODO Add feature-parity to std::decay. */ +template using hb_decay = hb_remove_const>; + + +template +struct _hb_conditional { typedef T type; }; +template +struct _hb_conditional { typedef F type; }; +template +using hb_conditional = typename _hb_conditional::type; + + +template +struct hb_is_convertible +{ + private: + static constexpr bool from_void = hb_is_same (void, hb_decay); + static constexpr bool to_void = hb_is_same (void, hb_decay ); + static constexpr bool either_void = from_void || to_void; + static constexpr bool both_void = from_void && to_void; + + static hb_true_type impl2 (hb_conditional); + + template + static auto impl (hb_priority<1>) -> decltype (impl2 (hb_declval (T))); + template + static hb_false_type impl (hb_priority<0>); + public: + static constexpr bool value = both_void || + (!either_void && + decltype (impl> (hb_prioritize))::value); +}; +#define hb_is_convertible(From,To) hb_is_convertible::value + +template +using hb_is_base_of = hb_is_convertible *, hb_decay *>; +#define hb_is_base_of(Base,Derived) hb_is_base_of::value + +template +using hb_is_cr_convertible = hb_bool_constant< + hb_is_same (hb_decay, hb_decay) && + (!hb_is_const (From) || hb_is_const (To)) && + (!hb_is_reference (To) || hb_is_const (To) || hb_is_reference (To)) +>; +#define hb_is_cr_convertible(From,To) hb_is_cr_convertible::value + +/* std::move and std::forward */ + +template +static constexpr hb_remove_reference&& hb_move (T&& t) { return (hb_remove_reference&&) (t); } + +template +static constexpr T&& hb_forward (hb_remove_reference& t) { return (T&&) t; } +template +static constexpr T&& hb_forward (hb_remove_reference&& t) { return (T&&) t; } + +struct +{ + template constexpr auto + operator () (T&& v) const HB_AUTO_RETURN (hb_forward (v)) + + template constexpr auto + operator () (T *v) const HB_AUTO_RETURN (*v) +} +HB_FUNCOBJ (hb_deref); + +struct +{ + template constexpr auto + operator () (T&& v) const HB_AUTO_RETURN (hb_forward (v)) + + template constexpr auto + operator () (T& v) const HB_AUTO_RETURN (hb_addressof (v)) +} +HB_FUNCOBJ (hb_ref); + +template +struct hb_reference_wrapper +{ + hb_reference_wrapper (T v) : v (v) {} + bool operator == (const hb_reference_wrapper& o) const { return v == o.v; } + bool operator != (const hb_reference_wrapper& o) const { return v != o.v; } + operator T () const { return v; } + T get () const { return v; } + T v; +}; +template +struct hb_reference_wrapper +{ + hb_reference_wrapper (T& v) : v (hb_addressof (v)) {} + bool operator == (const hb_reference_wrapper& o) const { return v == o.v; } + bool operator != (const hb_reference_wrapper& o) const { return v != o.v; } + operator T& () const { return *v; } + T& get () const { return *v; } + T* v; +}; + + +template +using hb_is_integral = hb_bool_constant< + hb_is_same (hb_decay, char) || + hb_is_same (hb_decay, signed char) || + hb_is_same (hb_decay, unsigned char) || + hb_is_same (hb_decay, signed int) || + hb_is_same (hb_decay, unsigned int) || + hb_is_same (hb_decay, signed short) || + hb_is_same (hb_decay, unsigned short) || + hb_is_same (hb_decay, signed long) || + hb_is_same (hb_decay, unsigned long) || + hb_is_same (hb_decay, signed long long) || + hb_is_same (hb_decay, unsigned long long) || + false +>; +#define hb_is_integral(T) hb_is_integral::value +template +using hb_is_floating_point = hb_bool_constant< + hb_is_same (hb_decay, float) || + hb_is_same (hb_decay, double) || + hb_is_same (hb_decay, long double) || + false +>; +#define hb_is_floating_point(T) hb_is_floating_point::value +template +using hb_is_arithmetic = hb_bool_constant< + hb_is_integral (T) || + hb_is_floating_point (T) || + false +>; +#define hb_is_arithmetic(T) hb_is_arithmetic::value + + +template +using hb_is_signed = hb_conditional, + hb_false_type>; +#define hb_is_signed(T) hb_is_signed::value +template +using hb_is_unsigned = hb_conditional, + hb_false_type>; +#define hb_is_unsigned(T) hb_is_unsigned::value + +template struct hb_int_min; +template <> struct hb_int_min : hb_integral_constant {}; +template <> struct hb_int_min : hb_integral_constant {}; +template <> struct hb_int_min : hb_integral_constant {}; +template <> struct hb_int_min : hb_integral_constant {}; +template <> struct hb_int_min : hb_integral_constant {}; +template <> struct hb_int_min : hb_integral_constant {}; +template <> struct hb_int_min : hb_integral_constant {}; +template <> struct hb_int_min : hb_integral_constant {}; +template <> struct hb_int_min : hb_integral_constant {}; +template <> struct hb_int_min : hb_integral_constant {}; +template <> struct hb_int_min : hb_integral_constant {}; +#define hb_int_min(T) hb_int_min::value +template struct hb_int_max; +template <> struct hb_int_max : hb_integral_constant {}; +template <> struct hb_int_max : hb_integral_constant {}; +template <> struct hb_int_max : hb_integral_constant {}; +template <> struct hb_int_max : hb_integral_constant {}; +template <> struct hb_int_max : hb_integral_constant {}; +template <> struct hb_int_max : hb_integral_constant {}; +template <> struct hb_int_max : hb_integral_constant {}; +template <> struct hb_int_max : hb_integral_constant {}; +template <> struct hb_int_max : hb_integral_constant {}; +template <> struct hb_int_max : hb_integral_constant {}; +template <> struct hb_int_max : hb_integral_constant {}; +#define hb_int_max(T) hb_int_max::value + + + +template +struct _hb_is_destructible : hb_false_type {}; +template +struct _hb_is_destructible> : hb_true_type {}; +template +using hb_is_destructible = _hb_is_destructible; +#define hb_is_destructible(T) hb_is_destructible::value + +template +struct _hb_is_constructible : hb_false_type {}; +template +struct _hb_is_constructible, Ts...> : hb_true_type {}; +template +using hb_is_constructible = _hb_is_constructible; +#define hb_is_constructible(...) hb_is_constructible<__VA_ARGS__>::value + +template +using hb_is_default_constructible = hb_is_constructible; +#define hb_is_default_constructible(T) hb_is_default_constructible::value + +template +using hb_is_copy_constructible = hb_is_constructible>>; +#define hb_is_copy_constructible(T) hb_is_copy_constructible::value + +template +using hb_is_move_constructible = hb_is_constructible>>; +#define hb_is_move_constructible(T) hb_is_move_constructible::value + +template +struct _hb_is_assignable : hb_false_type {}; +template +struct _hb_is_assignable> : hb_true_type {}; +template +using hb_is_assignable = _hb_is_assignable; +#define hb_is_assignable(T,U) hb_is_assignable::value + +template +using hb_is_copy_assignable = hb_is_assignable, + hb_add_lvalue_reference>>; +#define hb_is_copy_assignable(T) hb_is_copy_assignable::value + +template +using hb_is_move_assignable = hb_is_assignable, + hb_add_rvalue_reference>; +#define hb_is_move_assignable(T) hb_is_move_assignable::value + +/* Trivial versions. */ + +template union hb_trivial { T value; }; + +template +using hb_is_trivially_destructible= hb_is_destructible>; +#define hb_is_trivially_destructible(T) hb_is_trivially_destructible::value + +/* Don't know how to do the following. */ +//template +//using hb_is_trivially_constructible= hb_is_constructible, hb_trivial...>; +//#define hb_is_trivially_constructible(...) hb_is_trivially_constructible<__VA_ARGS__>::value + +template +using hb_is_trivially_default_constructible= hb_is_default_constructible>; +#define hb_is_trivially_default_constructible(T) hb_is_trivially_default_constructible::value + +template +using hb_is_trivially_copy_constructible= hb_is_copy_constructible>; +#define hb_is_trivially_copy_constructible(T) hb_is_trivially_copy_constructible::value + +template +using hb_is_trivially_move_constructible= hb_is_move_constructible>; +#define hb_is_trivially_move_constructible(T) hb_is_trivially_move_constructible::value + +/* Don't know how to do the following. */ +//template +//using hb_is_trivially_assignable= hb_is_assignable, hb_trivial>; +//#define hb_is_trivially_assignable(T,U) hb_is_trivially_assignable::value + +template +using hb_is_trivially_copy_assignable= hb_is_copy_assignable>; +#define hb_is_trivially_copy_assignable(T) hb_is_trivially_copy_assignable::value + +template +using hb_is_trivially_move_assignable= hb_is_move_assignable>; +#define hb_is_trivially_move_assignable(T) hb_is_trivially_move_assignable::value + +template +using hb_is_trivially_copyable= hb_bool_constant< + hb_is_trivially_destructible (T) && + (!hb_is_move_assignable (T) || hb_is_trivially_move_assignable (T)) && + (!hb_is_move_constructible (T) || hb_is_trivially_move_constructible (T)) && + (!hb_is_copy_assignable (T) || hb_is_trivially_copy_assignable (T)) && + (!hb_is_copy_constructible (T) || hb_is_trivially_copy_constructible (T)) && + true +>; +#define hb_is_trivially_copyable(T) hb_is_trivially_copyable::value + +template +using hb_is_trivial= hb_bool_constant< + hb_is_trivially_copyable (T) && + hb_is_trivially_default_constructible (T) +>; +#define hb_is_trivial(T) hb_is_trivial::value + +/* hb_unwrap_type (T) + * If T has no T::type, returns T. Otherwise calls itself on T::type recursively. + */ + +template +struct _hb_unwrap_type : hb_type_identity_t {}; +template +struct _hb_unwrap_type> : _hb_unwrap_type {}; +template +using hb_unwrap_type = _hb_unwrap_type; +#define hb_unwrap_type(T) typename hb_unwrap_type::type + +#endif /* HB_META_HH */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-mutex.hh b/src/java.desktop/share/native/libharfbuzz/hb-mutex.hh index 1582b40cb048..f2d2962b7138 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-mutex.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-mutex.hh @@ -48,12 +48,22 @@ /* Defined externally, i.e. in config.h; must have typedef'ed hb_mutex_impl_t as well. */ +#elif !defined(HB_NO_MT) && (defined(HAVE_PTHREAD) || defined(__APPLE__)) + +#include +typedef pthread_mutex_t hb_mutex_impl_t; +#define HB_MUTEX_IMPL_INIT PTHREAD_MUTEX_INITIALIZER +#define hb_mutex_impl_init(M) pthread_mutex_init (M, nullptr) +#define hb_mutex_impl_lock(M) pthread_mutex_lock (M) +#define hb_mutex_impl_unlock(M) pthread_mutex_unlock (M) +#define hb_mutex_impl_finish(M) pthread_mutex_destroy (M) + + #elif !defined(HB_NO_MT) && defined(_WIN32) -#include typedef CRITICAL_SECTION hb_mutex_impl_t; #define HB_MUTEX_IMPL_INIT {0} -#if defined(WINAPI_FAMILY) && (WINAPI_FAMILY==WINAPI_FAMILY_PC_APP || WINAPI_FAMILY==WINAPI_FAMILY_PHONE_APP) +#if !WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) #define hb_mutex_impl_init(M) InitializeCriticalSectionEx (M, 0, 0) #else #define hb_mutex_impl_init(M) InitializeCriticalSection (M) @@ -63,17 +73,6 @@ typedef CRITICAL_SECTION hb_mutex_impl_t; #define hb_mutex_impl_finish(M) DeleteCriticalSection (M) -#elif !defined(HB_NO_MT) && (defined(HAVE_PTHREAD) || defined(__APPLE__)) - -#include -typedef pthread_mutex_t hb_mutex_impl_t; -#define HB_MUTEX_IMPL_INIT PTHREAD_MUTEX_INITIALIZER -#define hb_mutex_impl_init(M) pthread_mutex_init (M, nullptr) -#define hb_mutex_impl_lock(M) pthread_mutex_lock (M) -#define hb_mutex_impl_unlock(M) pthread_mutex_unlock (M) -#define hb_mutex_impl_finish(M) pthread_mutex_destroy (M) - - #elif !defined(HB_NO_MT) && defined(HAVE_INTEL_ATOMIC_PRIMITIVES) #if defined(HAVE_SCHED_H) && defined(HAVE_SCHED_YIELD) @@ -92,25 +91,7 @@ typedef volatile int hb_mutex_impl_t; #define hb_mutex_impl_finish(M) HB_STMT_START {} HB_STMT_END -#elif !defined(HB_NO_MT) - -#if defined(HAVE_SCHED_H) && defined(HAVE_SCHED_YIELD) -# include -# define HB_SCHED_YIELD() sched_yield () -#else -# define HB_SCHED_YIELD() HB_STMT_START {} HB_STMT_END -#endif - -#define HB_MUTEX_INT_NIL 1 /* Warn that fallback implementation is in use. */ -typedef volatile int hb_mutex_impl_t; -#define HB_MUTEX_IMPL_INIT 0 -#define hb_mutex_impl_init(M) *(M) = 0 -#define hb_mutex_impl_lock(M) HB_STMT_START { while (*(M)) HB_SCHED_YIELD (); (*(M))++; } HB_STMT_END -#define hb_mutex_impl_unlock(M) (*(M))--; -#define hb_mutex_impl_finish(M) HB_STMT_START {} HB_STMT_END - - -#else /* HB_NO_MT */ +#elif defined(HB_NO_MT) typedef int hb_mutex_impl_t; #define HB_MUTEX_IMPL_INIT 0 @@ -120,6 +101,11 @@ typedef int hb_mutex_impl_t; #define hb_mutex_impl_finish(M) HB_STMT_START {} HB_STMT_END +#else + +#error "Could not find any system to define mutex macros." +#error "Check hb-mutex.hh for possible resolutions." + #endif @@ -127,8 +113,6 @@ typedef int hb_mutex_impl_t; struct hb_mutex_t { - /* TODO Add tracing. */ - hb_mutex_impl_t m; void init () { hb_mutex_impl_init (&m); } diff --git a/src/java.desktop/share/native/libharfbuzz/hb-null.hh b/src/java.desktop/share/native/libharfbuzz/hb-null.hh index 8a0e2d7dd05f..d2bc3322e53c 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-null.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-null.hh @@ -28,6 +28,7 @@ #define HB_NULL_HH #include "hb.hh" +#include "hb-meta.hh" /* @@ -36,7 +37,7 @@ /* Global nul-content Null pool. Enlarge as necessary. */ -#define HB_NULL_POOL_SIZE 9880 +#define HB_NULL_POOL_SIZE 384 /* Use SFINAE to sniff whether T has min_size; in which case return T::null_size, * otherwise return sizeof(T). */ @@ -45,18 +46,13 @@ * https://stackoverflow.com/questions/7776448/sfinae-tried-with-bool-gives-compiler-error-template-argument-tvalue-invol */ -template struct _hb_bool_type {}; - -template -struct _hb_null_size -{ enum { value = sizeof (T) }; }; +template +struct _hb_null_size : hb_integral_constant {}; template -struct _hb_null_size > -{ enum { value = T::null_size }; }; +struct _hb_null_size> : hb_integral_constant {}; template -struct hb_null_size -{ enum { value = _hb_null_size >::value }; }; +using hb_null_size = _hb_null_size; #define hb_null_size(T) hb_null_size::value /* These doesn't belong here, but since is copy/paste from above, put it here. */ @@ -64,56 +60,36 @@ struct hb_null_size /* hb_static_size (T) * Returns T::static_size if T::min_size is defined, or sizeof (T) otherwise. */ -template -struct _hb_static_size -{ enum { value = sizeof (T) }; }; +template +struct _hb_static_size : hb_integral_constant {}; template -struct _hb_static_size > -{ enum { value = T::static_size }; }; - +struct _hb_static_size> : hb_integral_constant {}; template -struct hb_static_size -{ enum { value = _hb_static_size >::value }; }; +using hb_static_size = _hb_static_size; #define hb_static_size(T) hb_static_size::value -/* hb_assign (obj, value) - * Calls obj.set (value) if obj.min_size is defined and value has different type - * from obj, or obj = v otherwise. */ - -template -struct _hb_assign -{ static inline void value (T &o, const V v) { o = v; } }; -template -struct _hb_assign > -{ static inline void value (T &o, const V v) { o.set (v); } }; -template -struct _hb_assign > -{ static inline void value (T &o, const T v) { o = v; } }; - -template -static inline void hb_assign (T &o, const V v) -{ _hb_assign >::value (o, v); } - - /* * Null() */ extern HB_INTERNAL -hb_vector_size_impl_t const _hb_NullPool[(HB_NULL_POOL_SIZE + sizeof (hb_vector_size_impl_t) - 1) / sizeof (hb_vector_size_impl_t)]; +uint64_t const _hb_NullPool[(HB_NULL_POOL_SIZE + sizeof (uint64_t) - 1) / sizeof (uint64_t)]; /* Generic nul-content Null objects. */ template -static inline Type const & Null () { - static_assert (hb_null_size (Type) <= HB_NULL_POOL_SIZE, "Increase HB_NULL_POOL_SIZE."); - return *reinterpret_cast (_hb_NullPool); -} +struct Null { + static Type const & get_null () + { + static_assert (hb_null_size (Type) <= HB_NULL_POOL_SIZE, "Increase HB_NULL_POOL_SIZE."); + return *reinterpret_cast (_hb_NullPool); + } +}; template struct NullHelper { - typedef typename hb_remove_const (typename hb_remove_reference (QType)) Type; - static const Type & get_null () { return Null (); } + typedef hb_remove_const> Type; + static const Type & get_null () { return Null::get_null (); } }; #define Null(Type) NullHelper::get_null () @@ -122,11 +98,13 @@ struct NullHelper } /* Close namespace. */ \ extern HB_INTERNAL const unsigned char _hb_Null_##Namespace##_##Type[Namespace::Type::null_size]; \ template <> \ - /*static*/ inline const Namespace::Type& Null () { \ - return *reinterpret_cast (_hb_Null_##Namespace##_##Type); \ - } \ + struct Null { \ + static Namespace::Type const & get_null () { \ + return *reinterpret_cast (_hb_Null_##Namespace##_##Type); \ + } \ + }; \ namespace Namespace { \ - static_assert (true, "Just so we take semicolon after.") + static_assert (true, "") /* Require semicolon after. */ #define DEFINE_NULL_NAMESPACE_BYTES(Namespace, Type) \ const unsigned char _hb_Null_##Namespace##_##Type[Namespace::Type::null_size] @@ -134,10 +112,12 @@ struct NullHelper #define DECLARE_NULL_INSTANCE(Type) \ extern HB_INTERNAL const Type _hb_Null_##Type; \ template <> \ - /*static*/ inline const Type& Null () { \ - return _hb_Null_##Type; \ - } \ -static_assert (true, "Just so we take semicolon after.") + struct Null { \ + static Type const & get_null () { \ + return _hb_Null_##Type; \ + } \ + }; \ + static_assert (true, "") /* Require semicolon after. */ #define DEFINE_NULL_INSTANCE(Type) \ const Type _hb_Null_##Type @@ -148,31 +128,31 @@ static_assert (true, "Just so we take semicolon after.") * causing bad memory access. So, races there are not actually introducing incorrectness * in the code. Has ~12kb binary size overhead to have it, also clang build fails with it. */ extern HB_INTERNAL -/*thread_local*/ hb_vector_size_impl_t _hb_CrapPool[(HB_NULL_POOL_SIZE + sizeof (hb_vector_size_impl_t) - 1) / sizeof (hb_vector_size_impl_t)]; +/*thread_local*/ uint64_t _hb_CrapPool[(HB_NULL_POOL_SIZE + sizeof (uint64_t) - 1) / sizeof (uint64_t)]; /* CRAP pool: Common Region for Access Protection. */ template static inline Type& Crap () { static_assert (hb_null_size (Type) <= HB_NULL_POOL_SIZE, "Increase HB_NULL_POOL_SIZE."); Type *obj = reinterpret_cast (_hb_CrapPool); - memcpy (obj, &Null(Type), sizeof (*obj)); + memcpy (obj, &Null (Type), sizeof (*obj)); return *obj; } template struct CrapHelper { - typedef typename hb_remove_const (typename hb_remove_reference (QType)) Type; + typedef hb_remove_const> Type; static Type & get_crap () { return Crap (); } }; #define Crap(Type) CrapHelper::get_crap () template struct CrapOrNullHelper { - static Type & get () { return Crap(Type); } + static Type & get () { return Crap (Type); } }; template struct CrapOrNullHelper { - static const Type & get () { return Null(Type); } + static const Type & get () { return Null (Type); } }; #define CrapOrNull(Type) CrapOrNullHelper::get () @@ -184,7 +164,7 @@ struct CrapOrNullHelper { template struct hb_nonnull_ptr_t { - typedef typename hb_remove_pointer (P) T; + typedef hb_remove_pointer

T; hb_nonnull_ptr_t (T *v_ = nullptr) : v (v_) {} T * operator = (T *v_) { return v = v_; } @@ -194,7 +174,7 @@ struct hb_nonnull_ptr_t /* Only auto-cast to const types. */ template operator const C * () const { return get (); } operator const char * () const { return (const char *) get (); } - T * get () const { return v ? v : const_cast (&Null(T)); } + T * get () const { return v ? v : const_cast (&Null (T)); } T * get_raw () const { return v; } T *v; diff --git a/src/java.desktop/share/native/libharfbuzz/hb-number-parser.hh b/src/java.desktop/share/native/libharfbuzz/hb-number-parser.hh new file mode 100644 index 000000000000..9d2867e48355 --- /dev/null +++ b/src/java.desktop/share/native/libharfbuzz/hb-number-parser.hh @@ -0,0 +1,237 @@ + +#line 1 "hb-number-parser.rl" +/* + * Copyright © 2019 Ebrahim Byagowi + * + * This is part of HarfBuzz, a text shaping library. + * + * Permission is hereby granted, without written agreement and without + * license or royalty fees, to use, copy, modify, and distribute this + * software and its documentation for any purpose, provided that the + * above copyright notice and the following two paragraphs appear in + * all copies of this software. + * + * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN + * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + * + * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, + * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + * + */ + +#ifndef HB_NUMBER_PARSER_HH +#define HB_NUMBER_PARSER_HH + +#include "hb.hh" + + +#line 35 "hb-number-parser.hh" +static const unsigned char _double_parser_trans_keys[] = { + 0u, 0u, 43u, 57u, 46u, 57u, 48u, 57u, 43u, 57u, 48u, 57u, 48u, 101u, 48u, 57u, + 46u, 101u, 0 +}; + +static const char _double_parser_key_spans[] = { + 0, 15, 12, 10, 15, 10, 54, 10, + 56 +}; + +static const unsigned char _double_parser_index_offsets[] = { + 0, 0, 16, 29, 40, 56, 67, 122, + 133 +}; + +static const char _double_parser_indicies[] = { + 0, 1, 2, 3, 1, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, + 1, 3, 1, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 1, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, + 1, 6, 1, 7, 1, 1, 8, 8, + 8, 8, 8, 8, 8, 8, 8, 8, + 1, 8, 8, 8, 8, 8, 8, 8, + 8, 8, 8, 1, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 9, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 9, 1, 8, 8, 8, 8, 8, + 8, 8, 8, 8, 8, 1, 3, 1, + 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 9, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 9, 1, 0 +}; + +static const char _double_parser_trans_targs[] = { + 2, 0, 2, 3, 8, 6, 5, 5, + 7, 4 +}; + +static const char _double_parser_trans_actions[] = { + 0, 0, 1, 0, 2, 3, 0, 4, + 5, 0 +}; + +static const int double_parser_start = 1; +static const int double_parser_first_final = 6; +static const int double_parser_error = 0; + +static const int double_parser_en_main = 1; + + +#line 68 "hb-number-parser.rl" + + +/* Works only for n < 512 */ +static inline double +_pow10 (unsigned exponent) +{ + static const double _powers_of_10[] = + { + 1.0e+256, + 1.0e+128, + 1.0e+64, + 1.0e+32, + 1.0e+16, + 1.0e+8, + 10000., + 100., + 10. + }; + unsigned mask = 1 << (ARRAY_LENGTH (_powers_of_10) - 1); + double result = 1; + for (const double *power = _powers_of_10; mask; ++power, mask >>= 1) + if (exponent & mask) result *= *power; + return result; +} + +/* a variant of strtod that also gets end of buffer in its second argument */ +static inline double +strtod_rl (const char *p, const char **end_ptr /* IN/OUT */) +{ + double value = 0; + double frac = 0; + double frac_count = 0; + unsigned exp = 0; + bool neg = false, exp_neg = false, exp_overflow = false; + const unsigned long long MAX_FRACT = 0xFFFFFFFFFFFFFull; /* 2^52-1 */ + const unsigned MAX_EXP = 0x7FFu; /* 2^11-1 */ + + const char *pe = *end_ptr; + while (p < pe && ISSPACE (*p)) + p++; + + int cs; + +#line 139 "hb-number-parser.hh" + { + cs = double_parser_start; + } + +#line 144 "hb-number-parser.hh" + { + int _slen; + int _trans; + const unsigned char *_keys; + const char *_inds; + if ( p == pe ) + goto _test_eof; + if ( cs == 0 ) + goto _out; +_resume: + _keys = _double_parser_trans_keys + (cs<<1); + _inds = _double_parser_indicies + _double_parser_index_offsets[cs]; + + _slen = _double_parser_key_spans[cs]; + _trans = _inds[ _slen > 0 && _keys[0] <=(*p) && + (*p) <= _keys[1] ? + (*p) - _keys[0] : _slen ]; + + cs = _double_parser_trans_targs[_trans]; + + if ( _double_parser_trans_actions[_trans] == 0 ) + goto _again; + + switch ( _double_parser_trans_actions[_trans] ) { + case 1: +#line 37 "hb-number-parser.rl" + { neg = true; } + break; + case 4: +#line 38 "hb-number-parser.rl" + { exp_neg = true; } + break; + case 2: +#line 40 "hb-number-parser.rl" + { + value = value * 10. + ((*p) - '0'); +} + break; + case 3: +#line 43 "hb-number-parser.rl" + { + if (likely (frac <= MAX_FRACT / 10)) + { + frac = frac * 10. + ((*p) - '0'); + ++frac_count; + } +} + break; + case 5: +#line 50 "hb-number-parser.rl" + { + if (likely (exp * 10 + ((*p) - '0') <= MAX_EXP)) + exp = exp * 10 + ((*p) - '0'); + else + exp_overflow = true; +} + break; +#line 202 "hb-number-parser.hh" + } + +_again: + if ( cs == 0 ) + goto _out; + if ( ++p != pe ) + goto _resume; + _test_eof: {} + _out: {} + } + +#line 113 "hb-number-parser.rl" + + + *end_ptr = p; + + if (frac_count) value += frac / _pow10 (frac_count); + if (neg) value *= -1.; + + if (unlikely (exp_overflow)) + { + if (value == 0) return value; + if (exp_neg) return neg ? -DBL_MIN : DBL_MIN; + else return neg ? -DBL_MAX : DBL_MAX; + } + + if (exp) + { + if (exp_neg) value /= _pow10 (exp); + else value *= _pow10 (exp); + } + + return value; +} + +#endif /* HB_NUMBER_PARSER_HH */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-number.cc b/src/java.desktop/share/native/libharfbuzz/hb-number.cc new file mode 100644 index 000000000000..f4ce693d8ed8 --- /dev/null +++ b/src/java.desktop/share/native/libharfbuzz/hb-number.cc @@ -0,0 +1,80 @@ +/* + * Copyright © 2019 Ebrahim Byagowi + * + * This is part of HarfBuzz, a text shaping library. + * + * Permission is hereby granted, without written agreement and without + * license or royalty fees, to use, copy, modify, and distribute this + * software and its documentation for any purpose, provided that the + * above copyright notice and the following two paragraphs appear in + * all copies of this software. + * + * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN + * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + * + * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, + * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + * + */ + +#include "hb.hh" +#include "hb-machinery.hh" +#include "hb-number.hh" +#include "hb-number-parser.hh" + +template +static bool +_parse_number (const char **pp, const char *end, T *pv, + bool whole_buffer, Func f) +{ + char buf[32]; + unsigned len = hb_min (ARRAY_LENGTH (buf) - 1, (unsigned) (end - *pp)); + strncpy (buf, *pp, len); + buf[len] = '\0'; + + char *p = buf; + char *pend = p; + + errno = 0; + *pv = f (p, &pend); + if (unlikely (errno || p == pend || + /* Check if consumed whole buffer if is requested */ + (whole_buffer && pend - p != end - *pp))) + return false; + + *pp += pend - p; + return true; +} + +bool +hb_parse_int (const char **pp, const char *end, int *pv, bool whole_buffer) +{ + return _parse_number (pp, end, pv, whole_buffer, + [] (const char *p, char **end) + { return strtol (p, end, 10); }); +} + +bool +hb_parse_uint (const char **pp, const char *end, unsigned *pv, + bool whole_buffer, int base) +{ + return _parse_number (pp, end, pv, whole_buffer, + [base] (const char *p, char **end) + { return strtoul (p, end, base); }); +} + +bool +hb_parse_double (const char **pp, const char *end, double *pv, bool whole_buffer) +{ + const char *pend = end; + *pv = strtod_rl (*pp, &pend); + if (unlikely (*pp == pend)) return false; + *pp = pend; + return !whole_buffer || end == pend; +} diff --git a/src/java.desktop/share/native/libharfbuzz/hb-number.hh b/src/java.desktop/share/native/libharfbuzz/hb-number.hh new file mode 100644 index 000000000000..47d902cf3e74 --- /dev/null +++ b/src/java.desktop/share/native/libharfbuzz/hb-number.hh @@ -0,0 +1,41 @@ +/* + * Copyright © 2019 Ebrahim Byagowi + * + * This is part of HarfBuzz, a text shaping library. + * + * Permission is hereby granted, without written agreement and without + * license or royalty fees, to use, copy, modify, and distribute this + * software and its documentation for any purpose, provided that the + * above copyright notice and the following two paragraphs appear in + * all copies of this software. + * + * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN + * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + * + * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, + * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + * + */ + +#ifndef HB_NUMBER_HH +#define HB_NUMBER_HH + +HB_INTERNAL bool +hb_parse_int (const char **pp, const char *end, int *pv, + bool whole_buffer = false); + +HB_INTERNAL bool +hb_parse_uint (const char **pp, const char *end, unsigned int *pv, + bool whole_buffer = false, int base = 10); + +HB_INTERNAL bool +hb_parse_double (const char **pp, const char *end, double *pv, + bool whole_buffer = false); + +#endif /* HB_NUMBER_HH */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-object.hh b/src/java.desktop/share/native/libharfbuzz/hb-object.hh index 1d7b6bf4f0b9..f01508ed40ba 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-object.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-object.hh @@ -168,8 +168,8 @@ struct hb_user_data_array_t void *data; hb_destroy_func_t destroy; - bool operator == (hb_user_data_key_t *other_key) const { return key == other_key; } - bool operator == (hb_user_data_item_t &other) const { return key == other.key; } + bool operator == (const hb_user_data_key_t *other_key) const { return key == other_key; } + bool operator == (const hb_user_data_item_t &other) const { return key == other.key; } void fini () { if (destroy) destroy (data); } }; diff --git a/src/java.desktop/share/native/libharfbuzz/hb-open-file.hh b/src/java.desktop/share/native/libharfbuzz/hb-open-file.hh index 72b203041d69..95a8f75ff205 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-open-file.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-open-file.hh @@ -56,7 +56,7 @@ typedef struct TableRecord { int cmp (Tag t) const { return -t.cmp (tag); } - static int cmp (const void *pa, const void *pb) + HB_INTERNAL static int cmp (const void *pa, const void *pb) { const TableRecord *a = (const TableRecord *) pa; const TableRecord *b = (const TableRecord *) pb; @@ -86,27 +86,22 @@ typedef struct OffsetTable const TableRecord& get_table (unsigned int i) const { return tables[i]; } unsigned int get_table_tags (unsigned int start_offset, - unsigned int *table_count, /* IN/OUT */ - hb_tag_t *table_tags /* OUT */) const + unsigned int *table_count, /* IN/OUT */ + hb_tag_t *table_tags /* OUT */) const { if (table_count) { - if (start_offset >= tables.len) - *table_count = 0; - else - *table_count = MIN (*table_count, tables.len - start_offset); - - const TableRecord *sub_tables = tables.arrayZ + start_offset; - unsigned int count = *table_count; - for (unsigned int i = 0; i < count; i++) - table_tags[i] = sub_tables[i].tag; + + tables.sub_array (start_offset, table_count) + | hb_map (&TableRecord::tag) + | hb_sink (hb_array (table_tags, *table_count)) + ; } return tables.len; } bool find_table_index (hb_tag_t tag, unsigned int *table_index) const { Tag t; - t.set (tag); + t = tag; return tables.bfind (t, table_index, HB_BFIND_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX); } const TableRecord& get_table_by_tag (hb_tag_t tag) const @@ -127,7 +122,7 @@ typedef struct OffsetTable /* Alloc 12 for the OTHeader. */ if (unlikely (!c->extend_min (*this))) return_trace (false); /* Write sfntVersion (bytes 0..3). */ - sfnt_version.set (sfnt_tag); + sfnt_version = sfnt_tag; /* Take space for numTables, searchRange, entrySelector, RangeShift * and the TableRecords themselves. */ if (unlikely (!tables.serialize (c, items.length))) return_trace (false); @@ -140,15 +135,16 @@ typedef struct OffsetTable { TableRecord &rec = tables.arrayZ[i]; hb_blob_t *blob = items[i].blob; - rec.tag.set (items[i].tag); - rec.length.set (hb_blob_get_length (blob)); + rec.tag = items[i].tag; + rec.length = blob->length; rec.offset.serialize (c, this); /* Allocate room for the table and copy it. */ char *start = (char *) c->allocate_size (rec.length); - if (unlikely (!start)) {return false;} + if (unlikely (!start)) return false; - memcpy (start, hb_blob_get_data (blob, nullptr), rec.length); + if (likely (rec.length)) + memcpy (start, blob->data, rec.length); /* 4-byte alignment. */ c->align (4); @@ -159,7 +155,7 @@ typedef struct OffsetTable { head *h = (head *) start; checksum_adjustment = &h->checkSumAdjustment; - checksum_adjustment->set (0); + *checksum_adjustment = 0; } rec.checkSum.set_for_data (start, end - start); @@ -177,10 +173,10 @@ typedef struct OffsetTable for (unsigned int i = 0; i < items.length; i++) { TableRecord &rec = tables.arrayZ[i]; - checksum.set (checksum + rec.checkSum); + checksum = checksum + rec.checkSum; } - checksum_adjustment->set (0xB1B0AFBAu - checksum); + *checksum_adjustment = 0xB1B0AFBAu - checksum; } return_trace (true); @@ -222,7 +218,7 @@ struct TTCHeaderVersion1 Tag ttcTag; /* TrueType Collection ID string: 'ttcf' */ FixedVersion<>version; /* Version of the TTC Header (1.0), * 0x00010000u */ - LArrayOf > + LArrayOf> table; /* Array of offsets to the OffsetTable for each font * from the beginning of the file */ public: @@ -248,7 +244,7 @@ struct TTCHeader switch (u.header.version.major) { case 2: /* version 2 is compatible with version 1 */ case 1: return u.version1.get_face (i); - default:return Null(OpenTypeFontFace); + default:return Null (OpenTypeFontFace); } } @@ -283,10 +279,10 @@ struct TTCHeader struct ResourceRecord { const OpenTypeFontFace & get_face (const void *data_base) const - { return CastR ((data_base+offset).arrayZ); } + { return * reinterpret_cast ((data_base+offset).arrayZ); } bool sanitize (hb_sanitize_context_t *c, - const void *data_base) const + const void *data_base) const { TRACE_SANITIZE (this); return_trace (c->check_struct (this) && @@ -334,7 +330,7 @@ struct ResourceTypeRecord protected: Tag tag; /* Resource type. */ HBUINT16 resCountM1; /* Number of resources minus 1. */ - NNOffsetTo > + NNOffsetTo> resourcesZ; /* Offset from beginning of resource type list * to reference item list for this type. */ public: @@ -390,7 +386,7 @@ struct ResourceMap HBUINT32 reserved1; /* Reserved for handle to next resource map */ HBUINT16 resreved2; /* Reserved for file reference number */ HBUINT16 attrs; /* Resource fork attribute */ - NNOffsetTo > + NNOffsetTo> typeList; /* Offset from beginning of map to * resource type list */ Offset16 nameList; /* Offset from beginning of map to @@ -422,7 +418,7 @@ struct ResourceForkHeader } protected: - LNNOffsetTo > + LNNOffsetTo> data; /* Offset from beginning of resource fork * to resource data */ LNNOffsetTo @@ -477,7 +473,7 @@ struct OpenTypeFontFile case TrueTypeTag: return u.fontFace; case TTCTag: return u.ttcHeader.get_face (i); case DFontTag: return u.rfHeader.get_face (i, base_offset); - default: return Null(OpenTypeFontFace); + default: return Null (OpenTypeFontFace); } } diff --git a/src/java.desktop/share/native/libharfbuzz/hb-open-type.hh b/src/java.desktop/share/native/libharfbuzz/hb-open-type.hh index 596099305c16..624194651d3a 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-open-type.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-open-type.hh @@ -52,22 +52,34 @@ namespace OT { * Int types */ -template struct hb_signedness_int; -template <> struct hb_signedness_int { typedef unsigned int value; }; -template <> struct hb_signedness_int { typedef signed int value; }; - /* Integer types in big-endian order and no alignment requirement */ template struct IntType { typedef Type type; - typedef typename hb_signedness_int::value>::value wide_type; + typedef hb_conditional wide_type; - void set (wide_type i) { v.set (i); } + IntType& operator = (wide_type i) { v = i; return *this; } operator wide_type () const { return v; } - bool operator == (const IntType &o) const { return (Type) v == (Type) o.v; } - bool operator != (const IntType &o) const { return !(*this == o); } - static int cmp (const IntType *a, const IntType *b) { return b->cmp (*a); } + bool operator == (const IntType &o) const { return (Type) v == (Type) o.v; } + bool operator != (const IntType &o) const { return !(*this == o); } + + IntType& operator += (unsigned count) { *this = *this + count; return *this; } + IntType& operator -= (unsigned count) { *this = *this - count; return *this; } + IntType& operator ++ () { *this += 1; return *this; } + IntType& operator -- () { *this -= 1; return *this; } + IntType operator ++ (int) { IntType c (*this); ++*this; return c; } + IntType operator -- (int) { IntType c (*this); --*this; return c; } + + HB_INTERNAL static int cmp (const IntType *a, const IntType *b) + { return b->cmp (*a); } + HB_INTERNAL static int cmp (const void *a, const void *b) + { + IntType *pa = (IntType *) a; + IntType *pb = (IntType *) b; + + return pb->cmp (*pa); + } template int cmp (Type2 a) const { @@ -110,19 +122,21 @@ typedef HBUINT16 UFWORD; /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */ struct F2DOT14 : HBINT16 { + F2DOT14& operator = (uint16_t i ) { HBINT16::operator= (i); return *this; } // 16384 means 1<<14 float to_float () const { return ((int32_t) v) / 16384.f; } - void set_float (float f) { v.set (round (f * 16384.f)); } + void set_float (float f) { v = roundf (f * 16384.f); } public: DEFINE_SIZE_STATIC (2); }; /* 32-bit signed fixed-point number (16.16). */ -struct Fixed : HBINT32 +struct HBFixed : HBINT32 { + HBFixed& operator = (uint32_t i) { HBINT32::operator= (i); return *this; } // 65536 means 1<<16 float to_float () const { return ((int32_t) v) / 65536.f; } - void set_float (float f) { v.set (round (f * 65536.f)); } + void set_float (float f) { v = roundf (f * 65536.f); } public: DEFINE_SIZE_STATIC (4); }; @@ -147,6 +161,7 @@ struct LONGDATETIME * system, feature, or baseline */ struct Tag : HBUINT32 { + Tag& operator = (hb_tag_t i) { HBUINT32::operator= (i); return *this; } /* What the char* converters return is NOT nul-terminated. Print using "%.4s" */ operator const char* () const { return reinterpret_cast (&this->v); } operator char* () { return reinterpret_cast (&this->v); } @@ -155,11 +170,15 @@ struct Tag : HBUINT32 }; /* Glyph index number, same as uint16 (length = 16 bits) */ -typedef HBUINT16 GlyphID; +struct HBGlyphID : HBUINT16 +{ + HBGlyphID& operator = (uint16_t i) { HBUINT16::operator= (i); return *this; } +}; /* Script/language-system/feature index */ struct Index : HBUINT16 { static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFu; + Index& operator = (uint16_t i) { HBUINT16::operator= (i); return *this; } }; DECLARE_NULL_NAMESPACE_BYTES (OT, Index); @@ -169,6 +188,8 @@ typedef Index NameID; template struct Offset : Type { + Offset& operator = (typename Type::type i) { Type::operator= (i); return *this; } + typedef Type type; bool is_null () const { return has_null && 0 == *this; } @@ -176,7 +197,7 @@ struct Offset : Type void *serialize (hb_serialize_context_t *c, const void *base) { void *t = c->start_embed (); - this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */ + c->check_assign (*this, (unsigned) ((char *) t - (char *) base)); return t; } @@ -191,6 +212,8 @@ typedef Offset Offset32; /* CheckSum */ struct CheckSum : HBUINT32 { + CheckSum& operator = (uint32_t i) { HBUINT32::operator= (i); return *this; } + /* This is reference implementation from the spec. */ static uint32_t CalcTableChecksum (const HBUINT32 *Table, uint32_t Length) { @@ -205,7 +228,7 @@ struct CheckSum : HBUINT32 /* Note: data should be 4byte aligned and have 4byte padding at the end. */ void set_for_data (const void *data, unsigned int length) - { set (CalcTableChecksum ((const HBUINT32 *) data, length)); } + { *this = CalcTableChecksum ((const HBUINT32 *) data, length); } public: DEFINE_SIZE_STATIC (4); @@ -248,13 +271,18 @@ struct _hb_has_null template struct _hb_has_null { - static const Type *get_null () { return &Null(Type); } - static Type *get_crap () { return &Crap(Type); } + static const Type *get_null () { return &Null (Type); } + static Type *get_crap () { return &Crap (Type); } }; template struct OffsetTo : Offset { + HB_DELETE_COPY_ASSIGN (OffsetTo); + OffsetTo () = default; + + OffsetTo& operator = (typename OffsetType::type i) { OffsetType::operator= (i); return *this; } + const Type& operator () (const void *base) const { if (unlikely (this->is_null ())) return *_hb_has_null::get_null (); @@ -266,24 +294,73 @@ struct OffsetTo : Offset return StructAtOffset (base, *this); } + template + friend const Type& operator + (const Base &base, const OffsetTo &offset) { return offset ((const void *) base); } + template + friend const Type& operator + (const OffsetTo &offset, const Base &base) { return offset ((const void *) base); } + template + friend Type& operator + (Base &&base, OffsetTo &offset) { return offset ((void *) base); } + template + friend Type& operator + (OffsetTo &offset, Base &&base) { return offset ((void *) base); } + Type& serialize (hb_serialize_context_t *c, const void *base) { return * (Type *) Offset::serialize (c, base); } - template - void serialize_subset (hb_subset_context_t *c, const T &src, const void *base) + template + bool serialize_subset (hb_subset_context_t *c, const OffsetTo& src, + const void *src_base, Ts&&... ds) { - if (&src == &Null (T)) - { - this->set (0); - return; - } - serialize (c->serializer, base); - if (!src.subset (c)) - this->set (0); + *this = 0; + if (src.is_null ()) + return false; + + auto *s = c->serializer; + + s->push (); + + bool ret = c->dispatch (src_base+src, hb_forward (ds)...); + + if (ret || !has_null) + s->add_link (*this, s->pop_pack ()); + else + s->pop_discard (); + + return ret; + } + + /* TODO: Somehow merge this with previous function into a serialize_dispatch(). */ + /* Workaround clang bug: https://bugs.llvm.org/show_bug.cgi?id=23029 + * Can't compile: whence = hb_serialize_context_t::Head followed by Ts&&... + */ + template + bool serialize_copy (hb_serialize_context_t *c, const OffsetTo& src, + const void *src_base, unsigned dst_bias, + hb_serialize_context_t::whence_t whence, + Ts&&... ds) + { + *this = 0; + if (src.is_null ()) + return false; + + c->push (); + + bool ret = c->copy (src_base+src, hb_forward (ds)...); + + c->add_link (*this, c->pop_pack (), whence, dst_bias); + + return ret; } + bool serialize_copy (hb_serialize_context_t *c, const OffsetTo& src, + const void *src_base, unsigned dst_bias = 0) + { return serialize_copy (c, src, src_base, dst_bias, hb_serialize_context_t::Head); } + bool sanitize_shallow (hb_sanitize_context_t *c, const void *base) const { TRACE_SANITIZE (this); @@ -293,39 +370,13 @@ struct OffsetTo : Offset return_trace (true); } - bool sanitize (hb_sanitize_context_t *c, const void *base) const - { - TRACE_SANITIZE (this); - return_trace (sanitize_shallow (c, base) && - (this->is_null () || - StructAtOffset (base, *this).sanitize (c) || - neuter (c))); - } - template - bool sanitize (hb_sanitize_context_t *c, const void *base, T1 d1) const - { - TRACE_SANITIZE (this); - return_trace (sanitize_shallow (c, base) && - (this->is_null () || - StructAtOffset (base, *this).sanitize (c, d1) || - neuter (c))); - } - template - bool sanitize (hb_sanitize_context_t *c, const void *base, T1 d1, T2 d2) const - { - TRACE_SANITIZE (this); - return_trace (sanitize_shallow (c, base) && - (this->is_null () || - StructAtOffset (base, *this).sanitize (c, d1, d2) || - neuter (c))); - } - template - bool sanitize (hb_sanitize_context_t *c, const void *base, T1 d1, T2 d2, T3 d3) const + template + bool sanitize (hb_sanitize_context_t *c, const void *base, Ts&&... ds) const { TRACE_SANITIZE (this); return_trace (sanitize_shallow (c, base) && (this->is_null () || - StructAtOffset (base, *this).sanitize (c, d1, d2, d3) || + c->dispatch (StructAtOffset (base, *this), hb_forward (ds)...) || neuter (c))); } @@ -338,14 +389,12 @@ struct OffsetTo : Offset DEFINE_SIZE_STATIC (sizeof (OffsetType)); }; /* Partial specializations. */ -template struct LOffsetTo : OffsetTo {}; -template struct NNOffsetTo : OffsetTo {}; -template struct LNNOffsetTo : OffsetTo {}; - -template -static inline const Type& operator + (const Base &base, const OffsetTo &offset) { return offset (base); } -template -static inline Type& operator + (Base &base, OffsetTo &offset) { return offset (base); } +template +using LOffsetTo = OffsetTo; +template +using NNOffsetTo = OffsetTo; +template +using LNNOffsetTo = LOffsetTo; /* @@ -358,7 +407,7 @@ struct UnsizedArrayOf typedef Type item_t; static constexpr unsigned item_size = hb_static_size (Type); - HB_NO_CREATE_COPY_ASSIGN_TEMPLATE (UnsizedArrayOf, Type); + HB_DELETE_CREATE_COPY_ASSIGN (UnsizedArrayOf); const Type& operator [] (int i_) const { @@ -384,7 +433,7 @@ struct UnsizedArrayOf { return hb_array (arrayZ, len); } hb_array_t as_array (unsigned int len) const { return hb_array (arrayZ, len); } - operator hb_array_t () { return as_array (); } + operator hb_array_t< Type> () { return as_array (); } operator hb_array_t () const { return as_array (); } template @@ -393,42 +442,49 @@ struct UnsizedArrayOf template const Type &lsearch (unsigned int len, const T &x, const Type ¬_found = Null (Type)) const { return *as_array (len).lsearch (x, ¬_found); } + template + bool lfind (unsigned int len, const T &x, unsigned *pos = nullptr) const + { return as_array (len).lfind (x, pos); } void qsort (unsigned int len, unsigned int start = 0, unsigned int end = (unsigned int) -1) { as_array (len).qsort (start, end); } - bool sanitize (hb_sanitize_context_t *c, unsigned int count) const + bool serialize (hb_serialize_context_t *c, unsigned int items_len) { - TRACE_SANITIZE (this); - if (unlikely (!sanitize_shallow (c, count))) return_trace (false); - - /* Note: for structs that do not reference other structs, - * we do not need to call their sanitize() as we already did - * a bound check on the aggregate array size. We just include - * a small unreachable expression to make sure the structs - * pointed to do have a simple sanitize(), ie. they do not - * reference other structs via offsets. - */ - (void) (false && arrayZ[0].sanitize (c)); - + TRACE_SERIALIZE (this); + if (unlikely (!c->extend (*this, items_len))) return_trace (false); return_trace (true); } - bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base) const + template + bool serialize (hb_serialize_context_t *c, Iterator items) { - TRACE_SANITIZE (this); - if (unlikely (!sanitize_shallow (c, count))) return_trace (false); - for (unsigned int i = 0; i < count; i++) - if (unlikely (!arrayZ[i].sanitize (c, base))) - return_trace (false); + TRACE_SERIALIZE (this); + unsigned count = items.len (); + if (unlikely (!serialize (c, count))) return_trace (false); + /* TODO Umm. Just exhaust the iterator instead? Being extra + * cautious right now.. */ + for (unsigned i = 0; i < count; i++, ++items) + arrayZ[i] = *items; return_trace (true); } - template - bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base, T user_data) const + + UnsizedArrayOf* copy (hb_serialize_context_t *c, unsigned count) const + { + TRACE_SERIALIZE (this); + auto *out = c->start_embed (this); + if (unlikely (!as_array (count).copy (c))) return_trace (nullptr); + return_trace (out); + } + + template + bool sanitize (hb_sanitize_context_t *c, unsigned int count, Ts&&... ds) const { TRACE_SANITIZE (this); if (unlikely (!sanitize_shallow (c, count))) return_trace (false); + if (!sizeof... (Ts) && hb_is_trivially_copyable (Type)) return_trace (true); for (unsigned int i = 0; i < count; i++) - if (unlikely (!arrayZ[i].sanitize (c, base, user_data))) + if (unlikely (!c->dispatch (arrayZ[i], hb_forward (ds)...))) return_trace (false); return_trace (true); } @@ -440,14 +496,14 @@ struct UnsizedArrayOf } public: - Type arrayZ[VAR]; + Type arrayZ[HB_VAR_ARRAY]; public: DEFINE_SIZE_UNBOUNDED (0); }; /* Unsized array of offset's */ template -struct UnsizedOffsetArrayOf : UnsizedArrayOf > {}; +using UnsizedOffsetArrayOf = UnsizedArrayOf>; /* Unsized array of offsets relative to the beginning of the array itself. */ template @@ -468,17 +524,12 @@ struct UnsizedOffsetListOf : UnsizedOffsetArrayOf return this+*p; } - - bool sanitize (hb_sanitize_context_t *c, unsigned int count) const - { - TRACE_SANITIZE (this); - return_trace ((UnsizedOffsetArrayOf::sanitize (c, count, this))); - } - template - bool sanitize (hb_sanitize_context_t *c, unsigned int count, T user_data) const + template + bool sanitize (hb_sanitize_context_t *c, unsigned int count, Ts&&... ds) const { TRACE_SANITIZE (this); - return_trace ((UnsizedOffsetArrayOf::sanitize (c, count, this, user_data))); + return_trace ((UnsizedOffsetArrayOf + ::sanitize (c, count, this, hb_forward (ds)...))); } }; @@ -501,8 +552,8 @@ struct SortedUnsizedArrayOf : UnsizedArrayOf { return *as_array (len).bsearch (x, ¬_found); } template bool bfind (unsigned int len, const T &x, unsigned int *i = nullptr, - hb_bfind_not_found_t not_found = HB_BFIND_NOT_FOUND_DONT_STORE, - unsigned int to_store = (unsigned int) -1) const + hb_bfind_not_found_t not_found = HB_BFIND_NOT_FOUND_DONT_STORE, + unsigned int to_store = (unsigned int) -1) const { return as_array (len).bfind (x, i, not_found, to_store); } }; @@ -514,7 +565,7 @@ struct ArrayOf typedef Type item_t; static constexpr unsigned item_size = hb_static_size (Type); - HB_NO_CREATE_COPY_ASSIGN_TEMPLATE2 (ArrayOf, Type, LenType); + HB_DELETE_CREATE_COPY_ASSIGN (ArrayOf); const Type& operator [] (int i_) const { @@ -532,74 +583,83 @@ struct ArrayOf unsigned int get_size () const { return len.static_size + len * Type::static_size; } - hb_array_t as_array () - { return hb_array (arrayZ, len); } - hb_array_t as_array () const - { return hb_array (arrayZ, len); } - operator hb_array_t (void) { return as_array (); } - operator hb_array_t (void) const { return as_array (); } + explicit operator bool () const { return len; } + + void pop () { len--; } + + hb_array_t< Type> as_array () { return hb_array (arrayZ, len); } + hb_array_t as_array () const { return hb_array (arrayZ, len); } + + /* Iterator. */ + typedef hb_array_t iter_t; + typedef hb_array_t< Type> writer_t; + iter_t iter () const { return as_array (); } + writer_t writer () { return as_array (); } + operator iter_t () const { return iter (); } + operator writer_t () { return writer (); } hb_array_t sub_array (unsigned int start_offset, unsigned int count) const - { return as_array ().sub_array (start_offset, count);} + { return as_array ().sub_array (start_offset, count); } hb_array_t sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const - { return as_array ().sub_array (start_offset, count);} + { return as_array ().sub_array (start_offset, count); } hb_array_t sub_array (unsigned int start_offset, unsigned int count) - { return as_array ().sub_array (start_offset, count);} + { return as_array ().sub_array (start_offset, count); } hb_array_t sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) - { return as_array ().sub_array (start_offset, count);} + { return as_array ().sub_array (start_offset, count); } - bool serialize (hb_serialize_context_t *c, unsigned int items_len) + hb_success_t serialize (hb_serialize_context_t *c, unsigned items_len) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); - len.set (items_len); /* TODO(serialize) Overflow? */ + c->check_assign (len, items_len); if (unlikely (!c->extend (*this))) return_trace (false); return_trace (true); } - template - bool serialize (hb_serialize_context_t *c, hb_array_t items) + template + hb_success_t serialize (hb_serialize_context_t *c, Iterator items) { TRACE_SERIALIZE (this); - if (unlikely (!serialize (c, items.length))) return_trace (false); - for (unsigned int i = 0; i < items.length; i++) - hb_assign (arrayZ[i], items[i]); + unsigned count = items.len (); + if (unlikely (!serialize (c, count))) return_trace (false); + /* TODO Umm. Just exhaust the iterator instead? Being extra + * cautious right now.. */ + for (unsigned i = 0; i < count; i++, ++items) + arrayZ[i] = *items; return_trace (true); } - bool sanitize (hb_sanitize_context_t *c) const + Type* serialize_append (hb_serialize_context_t *c) { - TRACE_SANITIZE (this); - if (unlikely (!sanitize_shallow (c))) return_trace (false); - - /* Note: for structs that do not reference other structs, - * we do not need to call their sanitize() as we already did - * a bound check on the aggregate array size. We just include - * a small unreachable expression to make sure the structs - * pointed to do have a simple sanitize(), ie. they do not - * reference other structs via offsets. - */ - (void) (false && arrayZ[0].sanitize (c)); - - return_trace (true); + TRACE_SERIALIZE (this); + len++; + if (unlikely (!len || !c->extend (*this))) + { + len--; + return_trace (nullptr); + } + return_trace (&arrayZ[len - 1]); } - bool sanitize (hb_sanitize_context_t *c, const void *base) const + + ArrayOf* copy (hb_serialize_context_t *c) const { - TRACE_SANITIZE (this); - if (unlikely (!sanitize_shallow (c))) return_trace (false); - unsigned int count = len; - for (unsigned int i = 0; i < count; i++) - if (unlikely (!arrayZ[i].sanitize (c, base))) - return_trace (false); - return_trace (true); + TRACE_SERIALIZE (this); + auto *out = c->start_embed (this); + if (unlikely (!c->extend_min (out))) return_trace (nullptr); + c->check_assign (out->len, len); + if (unlikely (!as_array ().copy (c))) return_trace (nullptr); + return_trace (out); } - template - bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const + + template + bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const { TRACE_SANITIZE (this); if (unlikely (!sanitize_shallow (c))) return_trace (false); + if (!sizeof... (Ts) && hb_is_trivially_copyable (Type)) return_trace (true); unsigned int count = len; for (unsigned int i = 0; i < count; i++) - if (unlikely (!arrayZ[i].sanitize (c, base, user_data))) + if (unlikely (!c->dispatch (arrayZ[i], hb_forward (ds)...))) return_trace (false); return_trace (true); } @@ -610,6 +670,9 @@ struct ArrayOf template const Type &lsearch (const T &x, const Type ¬_found = Null (Type)) const { return *as_array ().lsearch (x, ¬_found); } + template + bool lfind (const T &x, unsigned *pos = nullptr) const + { return as_array ().lfind (x, pos); } void qsort (unsigned int start = 0, unsigned int end = (unsigned int) -1) { as_array ().qsort (start, end); } @@ -622,20 +685,21 @@ struct ArrayOf public: LenType len; - Type arrayZ[VAR]; + Type arrayZ[HB_VAR_ARRAY]; public: DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ); }; -template struct LArrayOf : ArrayOf {}; -typedef ArrayOf PString; +template +using LArrayOf = ArrayOf; +using PString = ArrayOf; /* Array of Offset's */ template -struct OffsetArrayOf : ArrayOf > {}; +using OffsetArrayOf = ArrayOf>; template -struct LOffsetArrayOf : ArrayOf > {}; +using LOffsetArrayOf = ArrayOf>; template -struct LOffsetLArrayOf : ArrayOf, HBUINT32> {}; +using LOffsetLArrayOf = ArrayOf, HBUINT32>; /* Array of offsets relative to the beginning of the array itself. */ template @@ -661,20 +725,15 @@ struct OffsetListOf : OffsetArrayOf if (unlikely (!out)) return_trace (false); unsigned int count = this->len; for (unsigned int i = 0; i < count; i++) - out->arrayZ[i].serialize_subset (c, (*this)[i], out); + out->arrayZ[i].serialize_subset (c, this->arrayZ[i], this, out); return_trace (true); } - bool sanitize (hb_sanitize_context_t *c) const - { - TRACE_SANITIZE (this); - return_trace (OffsetArrayOf::sanitize (c, this)); - } - template - bool sanitize (hb_sanitize_context_t *c, T user_data) const + template + bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const { TRACE_SANITIZE (this); - return_trace (OffsetArrayOf::sanitize (c, this, user_data)); + return_trace (OffsetArrayOf::sanitize (c, this, hb_forward (ds)...)); } }; @@ -684,7 +743,7 @@ struct HeadlessArrayOf { static constexpr unsigned item_size = Type::static_size; - HB_NO_CREATE_COPY_ASSIGN_TEMPLATE2 (HeadlessArrayOf, Type, LenType); + HB_DELETE_CREATE_COPY_ASSIGN (HeadlessArrayOf); const Type& operator [] (int i_) const { @@ -699,34 +758,53 @@ struct HeadlessArrayOf return arrayZ[i-1]; } unsigned int get_size () const - { return lenP1.static_size + (lenP1 ? lenP1 - 1 : 0) * Type::static_size; } + { return lenP1.static_size + get_length () * Type::static_size; } - bool serialize (hb_serialize_context_t *c, - hb_array_t items) + unsigned get_length () const { return lenP1 ? lenP1 - 1 : 0; } + + hb_array_t< Type> as_array () { return hb_array (arrayZ, get_length ()); } + hb_array_t as_array () const { return hb_array (arrayZ, get_length ()); } + + /* Iterator. */ + typedef hb_array_t iter_t; + typedef hb_array_t< Type> writer_t; + iter_t iter () const { return as_array (); } + writer_t writer () { return as_array (); } + operator iter_t () const { return iter (); } + operator writer_t () { return writer (); } + + bool serialize (hb_serialize_context_t *c, unsigned int items_len) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); - lenP1.set (items.length + 1); /* TODO(serialize) Overflow? */ + c->check_assign (lenP1, items_len + 1); if (unlikely (!c->extend (*this))) return_trace (false); - for (unsigned int i = 0; i < items.length; i++) - arrayZ[i] = items[i]; + return_trace (true); + } + template + bool serialize (hb_serialize_context_t *c, Iterator items) + { + TRACE_SERIALIZE (this); + unsigned count = items.len (); + if (unlikely (!serialize (c, count))) return_trace (false); + /* TODO Umm. Just exhaust the iterator instead? Being extra + * cautious right now.. */ + for (unsigned i = 0; i < count; i++, ++items) + arrayZ[i] = *items; return_trace (true); } - bool sanitize (hb_sanitize_context_t *c) const + template + bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const { TRACE_SANITIZE (this); if (unlikely (!sanitize_shallow (c))) return_trace (false); - - /* Note: for structs that do not reference other structs, - * we do not need to call their sanitize() as we already did - * a bound check on the aggregate array size. We just include - * a small unreachable expression to make sure the structs - * pointed to do have a simple sanitize(), ie. they do not - * reference other structs via offsets. - */ - (void) (false && arrayZ[0].sanitize (c)); - + if (!sizeof... (Ts) && hb_is_trivially_copyable (Type)) return_trace (true); + unsigned int count = get_length (); + for (unsigned int i = 0; i < count; i++) + if (unlikely (!c->dispatch (arrayZ[i], hb_forward (ds)...))) + return_trace (false); return_trace (true); } @@ -740,7 +818,7 @@ struct HeadlessArrayOf public: LenType lenP1; - Type arrayZ[VAR]; + Type arrayZ[HB_VAR_ARRAY]; public: DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ); }; @@ -749,7 +827,7 @@ struct HeadlessArrayOf template struct ArrayOfM1 { - HB_NO_CREATE_COPY_ASSIGN_TEMPLATE2 (ArrayOfM1, Type, LenType); + HB_DELETE_CREATE_COPY_ASSIGN (ArrayOfM1); const Type& operator [] (int i_) const { @@ -766,14 +844,14 @@ struct ArrayOfM1 unsigned int get_size () const { return lenM1.static_size + (lenM1 + 1) * Type::static_size; } - template - bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const + template + bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const { TRACE_SANITIZE (this); if (unlikely (!sanitize_shallow (c))) return_trace (false); unsigned int count = lenM1 + 1; for (unsigned int i = 0; i < count; i++) - if (unlikely (!arrayZ[i].sanitize (c, base, user_data))) + if (unlikely (!c->dispatch (arrayZ[i], hb_forward (ds)...))) return_trace (false); return_trace (true); } @@ -788,7 +866,7 @@ struct ArrayOfM1 public: LenType lenM1; - Type arrayZ[VAR]; + Type arrayZ[HB_VAR_ARRAY]; public: DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ); }; @@ -797,21 +875,40 @@ struct ArrayOfM1 template struct SortedArrayOf : ArrayOf { - hb_sorted_array_t as_array () - { return hb_sorted_array (this->arrayZ, this->len); } - hb_sorted_array_t as_array () const - { return hb_sorted_array (this->arrayZ, this->len); } - operator hb_sorted_array_t () { return as_array (); } - operator hb_sorted_array_t () const { return as_array (); } + hb_sorted_array_t< Type> as_array () { return hb_sorted_array (this->arrayZ, this->len); } + hb_sorted_array_t as_array () const { return hb_sorted_array (this->arrayZ, this->len); } + + /* Iterator. */ + typedef hb_sorted_array_t iter_t; + typedef hb_sorted_array_t< Type> writer_t; + iter_t iter () const { return as_array (); } + writer_t writer () { return as_array (); } + operator iter_t () const { return iter (); } + operator writer_t () { return writer (); } + + hb_sorted_array_t sub_array (unsigned int start_offset, unsigned int count) const + { return as_array ().sub_array (start_offset, count); } + hb_sorted_array_t sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const + { return as_array ().sub_array (start_offset, count); } + hb_sorted_array_t sub_array (unsigned int start_offset, unsigned int count) + { return as_array ().sub_array (start_offset, count); } + hb_sorted_array_t sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) + { return as_array ().sub_array (start_offset, count); } - hb_array_t sub_array (unsigned int start_offset, unsigned int count) const - { return as_array ().sub_array (start_offset, count);} - hb_array_t sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const - { return as_array ().sub_array (start_offset, count);} - hb_array_t sub_array (unsigned int start_offset, unsigned int count) - { return as_array ().sub_array (start_offset, count);} - hb_array_t sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) - { return as_array ().sub_array (start_offset, count);} + bool serialize (hb_serialize_context_t *c, unsigned int items_len) + { + TRACE_SERIALIZE (this); + bool ret = ArrayOf::serialize (c, items_len); + return_trace (ret); + } + template + bool serialize (hb_serialize_context_t *c, Iterator items) + { + TRACE_SERIALIZE (this); + bool ret = ArrayOf::serialize (c, items); + return_trace (ret); + } template Type &bsearch (const T &x, Type ¬_found = Crap (Type)) @@ -821,8 +918,8 @@ struct SortedArrayOf : ArrayOf { return *as_array ().bsearch (x, ¬_found); } template bool bfind (const T &x, unsigned int *i = nullptr, - hb_bfind_not_found_t not_found = HB_BFIND_NOT_FOUND_DONT_STORE, - unsigned int to_store = (unsigned int) -1) const + hb_bfind_not_found_t not_found = HB_BFIND_NOT_FOUND_DONT_STORE, + unsigned int to_store = (unsigned int) -1) const { return as_array ().bfind (x, i, not_found, to_store); } }; @@ -841,15 +938,16 @@ struct BinSearchHeader return_trace (c->check_struct (this)); } - void set (unsigned int v) + BinSearchHeader& operator = (unsigned int v) { - len.set (v); + len = v; assert (len == v); - entrySelector.set (MAX (1u, hb_bit_storage (v)) - 1); - searchRange.set (16 * (1u << entrySelector)); - rangeShift.set (v * 16 > searchRange - ? 16 * v - searchRange - : 0); + entrySelector = hb_max (1u, hb_bit_storage (v)) - 1; + searchRange = 16 * (1u << entrySelector); + rangeShift = v * 16 > searchRange + ? 16 * v - searchRange + : 0; + return *this; } protected: @@ -863,7 +961,7 @@ struct BinSearchHeader }; template -struct BinSearchArrayOf : SortedArrayOf > {}; +using BinSearchArrayOf = SortedArrayOf>; struct VarSizedBinSearchHeader @@ -893,7 +991,7 @@ struct VarSizedBinSearchArrayOf { static constexpr unsigned item_size = Type::static_size; - HB_NO_CREATE_COPY_ASSIGN_TEMPLATE (VarSizedBinSearchArrayOf, Type); + HB_DELETE_CREATE_COPY_ASSIGN (VarSizedBinSearchArrayOf); bool last_is_terminator () const { @@ -928,40 +1026,15 @@ struct VarSizedBinSearchArrayOf unsigned int get_size () const { return header.static_size + header.nUnits * header.unitSize; } - bool sanitize (hb_sanitize_context_t *c) const - { - TRACE_SANITIZE (this); - if (unlikely (!sanitize_shallow (c))) return_trace (false); - - /* Note: for structs that do not reference other structs, - * we do not need to call their sanitize() as we already did - * a bound check on the aggregate array size. We just include - * a small unreachable expression to make sure the structs - * pointed to do have a simple sanitize(), ie. they do not - * reference other structs via offsets. - */ - (void) (false && StructAtOffset (&bytesZ, 0).sanitize (c)); - - return_trace (true); - } - bool sanitize (hb_sanitize_context_t *c, const void *base) const + template + bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const { TRACE_SANITIZE (this); if (unlikely (!sanitize_shallow (c))) return_trace (false); + if (!sizeof... (Ts) && hb_is_trivially_copyable (Type)) return_trace (true); unsigned int count = get_length (); for (unsigned int i = 0; i < count; i++) - if (unlikely (!(*this)[i].sanitize (c, base))) - return_trace (false); - return_trace (true); - } - template - bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const - { - TRACE_SANITIZE (this); - if (unlikely (!sanitize_shallow (c))) return_trace (false); - unsigned int count = get_length (); - for (unsigned int i = 0; i < count; i++) - if (unlikely (!(*this)[i].sanitize (c, base, user_data))) + if (unlikely (!(*this)[i].sanitize (c, hb_forward (ds)...))) return_trace (false); return_trace (true); } @@ -969,18 +1042,15 @@ struct VarSizedBinSearchArrayOf template const Type *bsearch (const T &key) const { - unsigned int size = header.unitSize; - int min = 0, max = (int) get_length () - 1; - while (min <= max) - { - int mid = ((unsigned int) min + (unsigned int) max) / 2; - const Type *p = (const Type *) (((const char *) &bytesZ) + (mid * size)); - int c = p->cmp (key); - if (c < 0) max = mid - 1; - else if (c > 0) min = mid + 1; - else return p; - } - return nullptr; + unsigned pos; + return hb_bsearch_impl (&pos, + key, + (const void *) bytesZ, + get_length (), + header.unitSize, + _hb_cmp_method) + ? (const Type *) (((const char *) &bytesZ) + (pos * header.unitSize)) + : nullptr; } private: diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-cff-common.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-cff-common.hh index d278e03d930a..28073724efc9 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-cff-common.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-cff-common.hh @@ -27,6 +27,7 @@ #define HB_OT_CFF_COMMON_HH #include "hb-open-type.hh" +#include "hb-bimap.hh" #include "hb-ot-layout-common.hh" #include "hb-cff-interp-dict-common.hh" #include "hb-subset-plan.hh" @@ -37,16 +38,19 @@ using namespace OT; #define CFF_UNDEF_CODE 0xFFFFFFFF +using objidx_t = hb_serialize_context_t::objidx_t; +using whence_t = hb_serialize_context_t::whence_t; + /* utility macro */ template -static inline const Type& StructAtOffsetOrNull(const void *P, unsigned int offset) -{ return offset? (* reinterpret_cast ((const char *) P + offset)): Null(Type); } +static inline const Type& StructAtOffsetOrNull (const void *P, unsigned int offset) +{ return offset ? StructAtOffset (P, offset) : Null (Type); } -inline unsigned int calcOffSize(unsigned int dataSize) +inline unsigned int calcOffSize (unsigned int dataSize) { unsigned int size = 1; unsigned int offset = dataSize + 1; - while ((offset & ~0xFF) != 0) + while (offset & ~0xFF) { size++; offset >>= 8; @@ -57,8 +61,8 @@ inline unsigned int calcOffSize(unsigned int dataSize) struct code_pair_t { - hb_codepoint_t code; - hb_codepoint_t glyph; + hb_codepoint_t code; + hb_codepoint_t glyph; }; typedef hb_vector_t str_buff_t; @@ -82,27 +86,20 @@ struct str_buff_vec_t : hb_vector_t template struct CFFIndex { - bool sanitize (hb_sanitize_context_t *c) const - { - TRACE_SANITIZE (this); - return_trace (likely ((count.sanitize (c) && count == 0) || /* empty INDEX */ - (c->check_struct (this) && offSize >= 1 && offSize <= 4 && - c->check_array (offsets, offSize, count + 1) && - c->check_array ((const HBUINT8*)data_base (), 1, max_offset () - 1)))); - } - static unsigned int calculate_offset_array_size (unsigned int offSize, unsigned int count) { return offSize * (count + 1); } unsigned int offset_array_size () const { return calculate_offset_array_size (offSize, count); } - static unsigned int calculate_serialized_size (unsigned int offSize, unsigned int count, unsigned int dataSize) + CFFIndex *copy (hb_serialize_context_t *c) const { - if (count == 0) - return COUNT::static_size; - else - return min_size + calculate_offset_array_size (offSize, count) + dataSize; + TRACE_SERIALIZE (this); + unsigned int size = get_size (); + CFFIndex *out = c->allocate_size (size); + if (likely (out)) + memcpy (out, this, size); + return_trace (out); } bool serialize (hb_serialize_context_t *c, const CFFIndex &src) @@ -110,7 +107,7 @@ struct CFFIndex TRACE_SERIALIZE (this); unsigned int size = src.get_size (); CFFIndex *dest = c->allocate_size (size); - if (unlikely (dest == nullptr)) return_trace (false); + if (unlikely (!dest)) return_trace (false); memcpy (dest, &src, size); return_trace (true); } @@ -123,16 +120,16 @@ struct CFFIndex if (byteArray.length == 0) { COUNT *dest = c->allocate_min (); - if (unlikely (dest == nullptr)) return_trace (false); - dest->set (0); + if (unlikely (!dest)) return_trace (false); + *dest = 0; } else { /* serialize CFFIndex header */ if (unlikely (!c->extend_min (*this))) return_trace (false); - this->count.set (byteArray.length); - this->offSize.set (offSize_); - if (!unlikely (c->allocate_size (offSize_ * (byteArray.length + 1)))) + this->count = byteArray.length; + this->offSize = offSize_; + if (unlikely (!c->allocate_size (offSize_ * (byteArray.length + 1)))) return_trace (false); /* serialize indices */ @@ -149,9 +146,8 @@ struct CFFIndex for (unsigned int i = 0; i < byteArray.length; i++) { const byte_str_t &bs = byteArray[i]; - unsigned char *dest = c->allocate_size (bs.length); - if (unlikely (dest == nullptr)) - return_trace (false); + unsigned char *dest = c->allocate_size (bs.length); + if (unlikely (!dest)) return_trace (false); memcpy (dest, &bs[0], bs.length); } } @@ -166,14 +162,77 @@ struct CFFIndex byteArray.init (); byteArray.resize (buffArray.length); for (unsigned int i = 0; i < byteArray.length; i++) - { - byteArray[i] = byte_str_t (buffArray[i].arrayZ (), buffArray[i].length); - } + byteArray[i] = byte_str_t (buffArray[i].arrayZ, buffArray[i].length); bool result = this->serialize (c, offSize_, byteArray); byteArray.fini (); return result; } + template + bool serialize (hb_serialize_context_t *c, + Iterator it) + { + TRACE_SERIALIZE (this); + if (it.len () == 0) + { + COUNT *dest = c->allocate_min (); + if (unlikely (!dest)) return_trace (false); + *dest = 0; + } + else + { + serialize_header(c, + it | hb_map ([] (const byte_str_t &_) { return _.length; })); + for (const byte_str_t &_ : +it) + _.copy (c); + } + return_trace (true); + } + + bool serialize (hb_serialize_context_t *c, + const byte_str_array_t &byteArray) + { return serialize (c, + hb_iter (byteArray)); } + + bool serialize (hb_serialize_context_t *c, + const str_buff_vec_t &buffArray) + { + auto it = + + hb_iter (buffArray) + | hb_map ([] (const str_buff_t &_) { return byte_str_t (_.arrayZ, _.length); }) + ; + return serialize (c, it); + } + + template + bool serialize_header (hb_serialize_context_t *c, + Iterator it) + { + TRACE_SERIALIZE (this); + + unsigned total = + it | hb_reduce (hb_add, 0); + unsigned off_size = calcOffSize (total); + + /* serialize CFFIndex header */ + if (unlikely (!c->extend_min (*this))) return_trace (false); + this->count = it.len (); + this->offSize = off_size; + if (unlikely (!c->allocate_size (off_size * (it.len () + 1)))) + return_trace (false); + + /* serialize indices */ + unsigned int offset = 1; + unsigned int i = 0; + for (unsigned _ : +it) + { + CFFIndex::set_offset_at (i++, offset); + offset += _; + } + CFFIndex::set_offset_at (i, offset); + + return_trace (true); + } + void set_offset_at (unsigned int index, unsigned int offset) { HBUINT8 *p = offsets + offSize * index + offSize; @@ -181,7 +240,7 @@ struct CFFIndex for (; size; size--) { --p; - p->set (offset & 0xFF); + *p = offset & 0xFF; offset >>= 8; } } @@ -199,37 +258,38 @@ struct CFFIndex unsigned int length_at (unsigned int index) const { - if (likely ((offset_at (index + 1) >= offset_at (index)) && - (offset_at (index + 1) <= offset_at (count)))) - return offset_at (index + 1) - offset_at (index); - else - return 0; + if (unlikely ((offset_at (index + 1) < offset_at (index)) || + (offset_at (index + 1) > offset_at (count)))) + return 0; + return offset_at (index + 1) - offset_at (index); } const unsigned char *data_base () const - { return (const unsigned char *)this + min_size + offset_array_size (); } + { return (const unsigned char *) this + min_size + offset_array_size (); } unsigned int data_size () const { return HBINT8::static_size; } byte_str_t operator [] (unsigned int index) const { - if (likely (index < count)) - return byte_str_t (data_base () + offset_at (index) - 1, length_at (index)); - else - return Null(byte_str_t); + if (unlikely (index >= count)) return Null (byte_str_t); + return byte_str_t (data_base () + offset_at (index) - 1, length_at (index)); } unsigned int get_size () const { - if (this != &Null(CFFIndex)) - { - if (count > 0) - return min_size + offset_array_size () + (offset_at (count) - 1); - else - return count.static_size; /* empty CFFIndex contains count only */ - } - else - return 0; + if (this == &Null (CFFIndex)) return 0; + if (count > 0) + return min_size + offset_array_size () + (offset_at (count) - 1); + return count.static_size; /* empty CFFIndex contains count only */ + } + + bool sanitize (hb_sanitize_context_t *c) const + { + TRACE_SANITIZE (this); + return_trace (likely ((c->check_struct (this) && count == 0) || /* empty INDEX */ + (c->check_struct (this) && offSize >= 1 && offSize <= 4 && + c->check_array (offsets, offSize, count + 1) && + c->check_array ((const HBUINT8*) data_base (), 1, max_offset () - 1)))); } protected: @@ -245,10 +305,11 @@ struct CFFIndex } public: - COUNT count; /* Number of object data. Note there are (count+1) offsets */ - HBUINT8 offSize; /* The byte size of each offset in the offsets array. */ - HBUINT8 offsets[VAR]; /* The array of (count + 1) offsets into objects array (1-base). */ - /* HBUINT8 data[VAR]; Object data */ + COUNT count; /* Number of object data. Note there are (count+1) offsets */ + HBUINT8 offSize; /* The byte size of each offset in the offsets array. */ + HBUINT8 offsets[HB_VAR_ARRAY]; + /* The array of (count + 1) offsets into objects array (1-base). */ + /* HBUINT8 data[HB_VAR_ARRAY]; Object data */ public: DEFINE_SIZE_ARRAY (COUNT::static_size + HBUINT8::static_size, offsets); }; @@ -260,7 +321,7 @@ struct CFFIndexOf : CFFIndex { if (likely (index < CFFIndex::count)) return byte_str_t (CFFIndex::data_base () + CFFIndex::offset_at (index) - 1, CFFIndex::length_at (index)); - return Null(byte_str_t); + return Null (byte_str_t); } template @@ -275,9 +336,9 @@ struct CFFIndexOf : CFFIndex TRACE_SERIALIZE (this); /* serialize CFFIndex header */ if (unlikely (!c->extend_min (*this))) return_trace (false); - this->count.set (dataArrayLen); - this->offSize.set (offSize_); - if (!unlikely (c->allocate_size (offSize_ * (dataArrayLen + 1)))) + this->count = dataArrayLen; + this->offSize = offSize_; + if (unlikely (!c->allocate_size (offSize_ * (dataArrayLen + 1)))) return_trace (false); /* serialize indices */ @@ -293,112 +354,74 @@ struct CFFIndexOf : CFFIndex /* serialize data */ for (unsigned int i = 0; i < dataArrayLen; i++) { - TYPE *dest = c->start_embed (); - if (unlikely (dest == nullptr || - !dest->serialize (c, dataArray[i], param1, param2))) + TYPE *dest = c->start_embed (); + if (unlikely (!dest || !dest->serialize (c, dataArray[i], param1, param2))) return_trace (false); } return_trace (true); } - - /* in parallel to above */ - template - static unsigned int calculate_serialized_size (unsigned int &offSize_ /* OUT */, - const DATA *dataArray, - unsigned int dataArrayLen, - hb_vector_t &dataSizeArray, /* OUT */ - const PARAM ¶m) - { - /* determine offset size */ - unsigned int totalDataSize = 0; - for (unsigned int i = 0; i < dataArrayLen; i++) - { - unsigned int dataSize = TYPE::calculate_serialized_size (dataArray[i], param); - dataSizeArray[i] = dataSize; - totalDataSize += dataSize; - } - offSize_ = calcOffSize (totalDataSize); - - return CFFIndex::calculate_serialized_size (offSize_, dataArrayLen, totalDataSize); - } }; /* Top Dict, Font Dict, Private Dict */ struct Dict : UnsizedByteStr { - template + template bool serialize (hb_serialize_context_t *c, const DICTVAL &dictval, OP_SERIALIZER& opszr, - PARAM& param) + Ts&&... ds) { TRACE_SERIALIZE (this); for (unsigned int i = 0; i < dictval.get_count (); i++) - { - if (unlikely (!opszr.serialize (c, dictval[i], param))) + if (unlikely (!opszr.serialize (c, dictval[i], hb_forward (ds)...))) return_trace (false); - } - return_trace (true); - } - /* in parallel to above */ - template - static unsigned int calculate_serialized_size (const DICTVAL &dictval, - OP_SERIALIZER& opszr, - PARAM& param) - { - unsigned int size = 0; - for (unsigned int i = 0; i < dictval.get_count (); i++) - size += opszr.calculate_serialized_size (dictval[i], param); - return size; - } - - template - static unsigned int calculate_serialized_size (const DICTVAL &dictval, - OP_SERIALIZER& opszr) - { - unsigned int size = 0; - for (unsigned int i = 0; i < dictval.get_count (); i++) - size += opszr.calculate_serialized_size (dictval[i]); - return size; + return_trace (true); } - template - static bool serialize_int_op (hb_serialize_context_t *c, op_code_t op, int value, op_code_t intOp) + template + static bool serialize_int_op (hb_serialize_context_t *c, op_code_t op, V value, op_code_t intOp) { // XXX: not sure why but LLVM fails to compile the following 'unlikely' macro invocation - if (/*unlikely*/ (!serialize_int (c, intOp, value))) + if (/*unlikely*/ (!serialize_int (c, intOp, value))) return false; TRACE_SERIALIZE (this); /* serialize the opcode */ HBUINT8 *p = c->allocate_size (OpCode_Size (op)); - if (unlikely (p == nullptr)) return_trace (false); + if (unlikely (!p)) return_trace (false); if (Is_OpCode_ESC (op)) { - p->set (OpCode_escape); + *p = OpCode_escape; op = Unmake_OpCode_ESC (op); p++; } - p->set (op); + *p = op; return_trace (true); } - static bool serialize_uint4_op (hb_serialize_context_t *c, op_code_t op, int value) - { return serialize_int_op (c, op, value, OpCode_longintdict); } + template + static bool serialize_int4_op (hb_serialize_context_t *c, op_code_t op, V value) + { return serialize_int_op (c, op, value, OpCode_longintdict); } - static bool serialize_uint2_op (hb_serialize_context_t *c, op_code_t op, int value) - { return serialize_int_op (c, op, value, OpCode_shortint); } + template + static bool serialize_int2_op (hb_serialize_context_t *c, op_code_t op, V value) + { return serialize_int_op (c, op, value, OpCode_shortint); } - static bool serialize_offset4_op (hb_serialize_context_t *c, op_code_t op, int value) + template + static bool serialize_link_op (hb_serialize_context_t *c, op_code_t op, objidx_t link, whence_t whence) { - return serialize_uint4_op (c, op, value); + T &ofs = *(T *) (c->head + OpCode_Size (int_op)); + if (unlikely (!serialize_int_op (c, op, 0, int_op))) return false; + c->add_link (ofs, link, whence); + return true; } - static bool serialize_offset2_op (hb_serialize_context_t *c, op_code_t op, int value) - { - return serialize_uint2_op (c, op, value); - } + static bool serialize_link4_op (hb_serialize_context_t *c, op_code_t op, objidx_t link, whence_t whence = whence_t::Head) + { return serialize_link_op (c, op, link, whence); } + + static bool serialize_link2_op (hb_serialize_context_t *c, op_code_t op, objidx_t link, whence_t whence = whence_t::Head) + { return serialize_link_op (c, op, link, whence); } }; struct TopDict : Dict {}; @@ -407,155 +430,39 @@ struct PrivateDict : Dict {}; struct table_info_t { - void init () { offSize = offset = size = 0; } + void init () { offset = size = 0; link = 0; } unsigned int offset; unsigned int size; - unsigned int offSize; -}; - -/* used to remap font index or SID from fullset to subset. - * set to CFF_UNDEF_CODE if excluded from subset */ -struct remap_t : hb_vector_t -{ - void init () { SUPER::init (); } - - void fini () { SUPER::fini (); } - - bool reset (unsigned int size) - { - if (unlikely (!SUPER::resize (size))) - return false; - for (unsigned int i = 0; i < length; i++) - (*this)[i] = CFF_UNDEF_CODE; - count = 0; - return true; - } - - bool identity (unsigned int size) - { - if (unlikely (!SUPER::resize (size))) - return false; - unsigned int i; - for (i = 0; i < length; i++) - (*this)[i] = i; - count = i; - return true; - } - - bool excludes (hb_codepoint_t id) const - { return (id < length) && ((*this)[id] == CFF_UNDEF_CODE); } - - bool includes (hb_codepoint_t id) const - { return !excludes (id); } - - unsigned int add (unsigned int i) - { - if ((*this)[i] == CFF_UNDEF_CODE) - (*this)[i] = count++; - return (*this)[i]; - } - - hb_codepoint_t get_count () const { return count; } - - protected: - hb_codepoint_t count; - - private: - typedef hb_vector_t SUPER; + objidx_t link; }; template struct FDArray : CFFIndexOf { - /* used by CFF1 */ - template + template bool serialize (hb_serialize_context_t *c, - unsigned int offSize_, - const hb_vector_t &fontDicts, + Iterator it, OP_SERIALIZER& opszr) { TRACE_SERIALIZE (this); - if (unlikely (!c->extend_min (*this))) return_trace (false); - this->count.set (fontDicts.length); - this->offSize.set (offSize_); - if (!unlikely (c->allocate_size (offSize_ * (fontDicts.length + 1)))) - return_trace (false); - - /* serialize font dict offsets */ - unsigned int offset = 1; - unsigned int fid = 0; - for (; fid < fontDicts.length; fid++) - { - CFFIndexOf::set_offset_at (fid, offset); - offset += FontDict::calculate_serialized_size (fontDicts[fid], opszr); - } - CFFIndexOf::set_offset_at (fid, offset); - /* serialize font dicts */ - for (unsigned int i = 0; i < fontDicts.length; i++) + /* serialize INDEX data */ + hb_vector_t sizes; + c->push (); + + it + | hb_map ([&] (const hb_pair_t &_) { FontDict *dict = c->start_embed (); - if (unlikely (!dict->serialize (c, fontDicts[i], opszr, fontDicts[i]))) - return_trace (false); - } - return_trace (true); - } - - /* used by CFF2 */ - template - bool serialize (hb_serialize_context_t *c, - unsigned int offSize_, - const hb_vector_t &fontDicts, - unsigned int fdCount, - const remap_t &fdmap, - OP_SERIALIZER& opszr, - const hb_vector_t &privateInfos) - { - TRACE_SERIALIZE (this); - if (unlikely (!c->extend_min (*this))) return_trace (false); - this->count.set (fdCount); - this->offSize.set (offSize_); - if (!unlikely (c->allocate_size (offSize_ * (fdCount + 1)))) - return_trace (false); - - /* serialize font dict offsets */ - unsigned int offset = 1; - unsigned int fid = 0; - for (unsigned i = 0; i < fontDicts.length; i++) - if (fdmap.includes (i)) - { - CFFIndexOf::set_offset_at (fid++, offset); - offset += FontDict::calculate_serialized_size (fontDicts[i], opszr); - } - CFFIndexOf::set_offset_at (fid, offset); + dict->serialize (c, _.first, opszr, _.second); + return c->head - (const char*)dict; + }) + | hb_sink (sizes) + ; + c->pop_pack (false); - /* serialize font dicts */ - for (unsigned int i = 0; i < fontDicts.length; i++) - if (fdmap.includes (i)) - { - FontDict *dict = c->start_embed (); - if (unlikely (!dict->serialize (c, fontDicts[i], opszr, privateInfos[fdmap[i]]))) - return_trace (false); - } - return_trace (true); - } - - /* in parallel to above */ - template - static unsigned int calculate_serialized_size (unsigned int &offSize_ /* OUT */, - const hb_vector_t &fontDicts, - unsigned int fdCount, - const remap_t &fdmap, - OP_SERIALIZER& opszr) - { - unsigned int dictsSize = 0; - for (unsigned int i = 0; i < fontDicts.len; i++) - if (fdmap.includes (i)) - dictsSize += FontDict::calculate_serialized_size (fontDicts[i], opszr); - - offSize_ = calcOffSize (dictsSize); - return CFFIndex::calculate_serialized_size (offSize_, fdCount, dictsSize); + /* serialize INDEX header */ + return_trace (CFFIndex::serialize_header (c, hb_iter (sizes))); } }; @@ -574,21 +481,20 @@ struct FDSelect0 { } hb_codepoint_t get_fd (hb_codepoint_t glyph) const - { - return (hb_codepoint_t)fds[glyph]; - } + { return (hb_codepoint_t) fds[glyph]; } unsigned int get_size (unsigned int num_glyphs) const { return HBUINT8::static_size * num_glyphs; } - HBUINT8 fds[VAR]; + HBUINT8 fds[HB_VAR_ARRAY]; - DEFINE_SIZE_MIN (1); + DEFINE_SIZE_MIN (0); }; template -struct FDSelect3_4_Range { - bool sanitize (hb_sanitize_context_t *c, const void */*nullptr*/, unsigned int fdcount) const +struct FDSelect3_4_Range +{ + bool sanitize (hb_sanitize_context_t *c, const void * /*nullptr*/, unsigned int fdcount) const { TRACE_SANITIZE (this); return_trace (first < c->get_num_glyphs () && (fd < fdcount)); @@ -596,12 +502,13 @@ struct FDSelect3_4_Range { GID_TYPE first; FD_TYPE fd; - + public: DEFINE_SIZE_STATIC (GID_TYPE::static_size + FD_TYPE::static_size); }; template -struct FDSelect3_4 { +struct FDSelect3_4 +{ unsigned int get_size () const { return GID_TYPE::static_size * 2 + ranges.get_size (); } @@ -613,10 +520,8 @@ struct FDSelect3_4 { return_trace (false); for (unsigned int i = 1; i < nRanges (); i++) - { if (unlikely (ranges[i - 1].first >= ranges[i].first)) - return_trace (false); - } + return_trace (false); if (unlikely (!sentinel().sanitize (c) || (sentinel() != c->get_num_glyphs ()))) return_trace (false); @@ -631,13 +536,13 @@ struct FDSelect3_4 { if (glyph < ranges[i].first) break; - return (hb_codepoint_t)ranges[i - 1].fd; + return (hb_codepoint_t) ranges[i - 1].fd; } - GID_TYPE &nRanges () { return ranges.len; } - GID_TYPE nRanges () const { return ranges.len; } - GID_TYPE &sentinel () { return StructAfter (ranges[nRanges () - 1]); } - const GID_TYPE &sentinel () const { return StructAfter (ranges[nRanges () - 1]); } + GID_TYPE &nRanges () { return ranges.len; } + GID_TYPE nRanges () const { return ranges.len; } + GID_TYPE &sentinel () { return StructAfter (ranges[nRanges () - 1]); } + const GID_TYPE &sentinel () const { return StructAfter (ranges[nRanges () - 1]); } ArrayOf, GID_TYPE> ranges; /* GID_TYPE sentinel */ @@ -648,56 +553,60 @@ struct FDSelect3_4 { typedef FDSelect3_4 FDSelect3; typedef FDSelect3_4_Range FDSelect3_Range; -struct FDSelect { - bool sanitize (hb_sanitize_context_t *c, unsigned int fdcount) const - { - TRACE_SANITIZE (this); - - return_trace (likely (c->check_struct (this) && (format == 0 || format == 3) && - (format == 0)? - u.format0.sanitize (c, fdcount): - u.format3.sanitize (c, fdcount))); - } - +struct FDSelect +{ bool serialize (hb_serialize_context_t *c, const FDSelect &src, unsigned int num_glyphs) { TRACE_SERIALIZE (this); unsigned int size = src.get_size (num_glyphs); FDSelect *dest = c->allocate_size (size); - if (unlikely (dest == nullptr)) return_trace (false); + if (unlikely (!dest)) return_trace (false); memcpy (dest, &src, size); return_trace (true); } - unsigned int calculate_serialized_size (unsigned int num_glyphs) const - { return get_size (num_glyphs); } - unsigned int get_size (unsigned int num_glyphs) const { - unsigned int size = format.static_size; - if (format == 0) - size += u.format0.get_size (num_glyphs); - else - size += u.format3.get_size (); - return size; + switch (format) + { + case 0: return format.static_size + u.format0.get_size (num_glyphs); + case 3: return format.static_size + u.format3.get_size (); + default:return 0; + } } hb_codepoint_t get_fd (hb_codepoint_t glyph) const { - if (this == &Null(FDSelect)) - return 0; - if (format == 0) - return u.format0.get_fd (glyph); - else - return u.format3.get_fd (glyph); + if (this == &Null (FDSelect)) return 0; + + switch (format) + { + case 0: return u.format0.get_fd (glyph); + case 3: return u.format3.get_fd (glyph); + default:return 0; + } + } + + bool sanitize (hb_sanitize_context_t *c, unsigned int fdcount) const + { + TRACE_SANITIZE (this); + if (unlikely (!c->check_struct (this))) + return_trace (false); + + switch (format) + { + case 0: return_trace (u.format0.sanitize (c, fdcount)); + case 3: return_trace (u.format3.sanitize (c, fdcount)); + default:return_trace (false); + } } HBUINT8 format; union { - FDSelect0 format0; - FDSelect3 format3; + FDSelect0 format0; + FDSelect3 format3; } u; - + public: DEFINE_SIZE_MIN (1); }; diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-cff1-std-str.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-cff1-std-str.hh new file mode 100644 index 000000000000..65d56ae18b5d --- /dev/null +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-cff1-std-str.hh @@ -0,0 +1,425 @@ +/* + * Copyright © 2019 Adobe, Inc. + * + * This is part of HarfBuzz, a text shaping library. + * + * Permission is hereby granted, without written agreement and without + * license or royalty fees, to use, copy, modify, and distribute this + * software and its documentation for any purpose, provided that the + * above copyright notice and the following two paragraphs appear in + * all copies of this software. + * + * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN + * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + * + * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, + * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + * + * Adobe Author(s): Michiharu Ariza + */ + +#ifndef HB_OT_CFF1_STD_STR_HH +#if 0 /* Make checks happy. */ +#define HB_OT_CFF1_STD_STR_HH +#include "hb.hh" +#endif + +_S(".notdef") +_S("space") +_S("exclam") +_S("quotedbl") +_S("numbersign") +_S("dollar") +_S("percent") +_S("ampersand") +_S("quoteright") +_S("parenleft") +_S("parenright") +_S("asterisk") +_S("plus") +_S("comma") +_S("hyphen") +_S("period") +_S("slash") +_S("zero") +_S("one") +_S("two") +_S("three") +_S("four") +_S("five") +_S("six") +_S("seven") +_S("eight") +_S("nine") +_S("colon") +_S("semicolon") +_S("less") +_S("equal") +_S("greater") +_S("question") +_S("at") +_S("A") +_S("B") +_S("C") +_S("D") +_S("E") +_S("F") +_S("G") +_S("H") +_S("I") +_S("J") +_S("K") +_S("L") +_S("M") +_S("N") +_S("O") +_S("P") +_S("Q") +_S("R") +_S("S") +_S("T") +_S("U") +_S("V") +_S("W") +_S("X") +_S("Y") +_S("Z") +_S("bracketleft") +_S("backslash") +_S("bracketright") +_S("asciicircum") +_S("underscore") +_S("quoteleft") +_S("a") +_S("b") +_S("c") +_S("d") +_S("e") +_S("f") +_S("g") +_S("h") +_S("i") +_S("j") +_S("k") +_S("l") +_S("m") +_S("n") +_S("o") +_S("p") +_S("q") +_S("r") +_S("s") +_S("t") +_S("u") +_S("v") +_S("w") +_S("x") +_S("y") +_S("z") +_S("braceleft") +_S("bar") +_S("braceright") +_S("asciitilde") +_S("exclamdown") +_S("cent") +_S("sterling") +_S("fraction") +_S("yen") +_S("florin") +_S("section") +_S("currency") +_S("quotesingle") +_S("quotedblleft") +_S("guillemotleft") +_S("guilsinglleft") +_S("guilsinglright") +_S("fi") +_S("fl") +_S("endash") +_S("dagger") +_S("daggerdbl") +_S("periodcentered") +_S("paragraph") +_S("bullet") +_S("quotesinglbase") +_S("quotedblbase") +_S("quotedblright") +_S("guillemotright") +_S("ellipsis") +_S("perthousand") +_S("questiondown") +_S("grave") +_S("acute") +_S("circumflex") +_S("tilde") +_S("macron") +_S("breve") +_S("dotaccent") +_S("dieresis") +_S("ring") +_S("cedilla") +_S("hungarumlaut") +_S("ogonek") +_S("caron") +_S("emdash") +_S("AE") +_S("ordfeminine") +_S("Lslash") +_S("Oslash") +_S("OE") +_S("ordmasculine") +_S("ae") +_S("dotlessi") +_S("lslash") +_S("oslash") +_S("oe") +_S("germandbls") +_S("onesuperior") +_S("logicalnot") +_S("mu") +_S("trademark") +_S("Eth") +_S("onehalf") +_S("plusminus") +_S("Thorn") +_S("onequarter") +_S("divide") +_S("brokenbar") +_S("degree") +_S("thorn") +_S("threequarters") +_S("twosuperior") +_S("registered") +_S("minus") +_S("eth") +_S("multiply") +_S("threesuperior") +_S("copyright") +_S("Aacute") +_S("Acircumflex") +_S("Adieresis") +_S("Agrave") +_S("Aring") +_S("Atilde") +_S("Ccedilla") +_S("Eacute") +_S("Ecircumflex") +_S("Edieresis") +_S("Egrave") +_S("Iacute") +_S("Icircumflex") +_S("Idieresis") +_S("Igrave") +_S("Ntilde") +_S("Oacute") +_S("Ocircumflex") +_S("Odieresis") +_S("Ograve") +_S("Otilde") +_S("Scaron") +_S("Uacute") +_S("Ucircumflex") +_S("Udieresis") +_S("Ugrave") +_S("Yacute") +_S("Ydieresis") +_S("Zcaron") +_S("aacute") +_S("acircumflex") +_S("adieresis") +_S("agrave") +_S("aring") +_S("atilde") +_S("ccedilla") +_S("eacute") +_S("ecircumflex") +_S("edieresis") +_S("egrave") +_S("iacute") +_S("icircumflex") +_S("idieresis") +_S("igrave") +_S("ntilde") +_S("oacute") +_S("ocircumflex") +_S("odieresis") +_S("ograve") +_S("otilde") +_S("scaron") +_S("uacute") +_S("ucircumflex") +_S("udieresis") +_S("ugrave") +_S("yacute") +_S("ydieresis") +_S("zcaron") +_S("exclamsmall") +_S("Hungarumlautsmall") +_S("dollaroldstyle") +_S("dollarsuperior") +_S("ampersandsmall") +_S("Acutesmall") +_S("parenleftsuperior") +_S("parenrightsuperior") +_S("twodotenleader") +_S("onedotenleader") +_S("zerooldstyle") +_S("oneoldstyle") +_S("twooldstyle") +_S("threeoldstyle") +_S("fouroldstyle") +_S("fiveoldstyle") +_S("sixoldstyle") +_S("sevenoldstyle") +_S("eightoldstyle") +_S("nineoldstyle") +_S("commasuperior") +_S("threequartersemdash") +_S("periodsuperior") +_S("questionsmall") +_S("asuperior") +_S("bsuperior") +_S("centsuperior") +_S("dsuperior") +_S("esuperior") +_S("isuperior") +_S("lsuperior") +_S("msuperior") +_S("nsuperior") +_S("osuperior") +_S("rsuperior") +_S("ssuperior") +_S("tsuperior") +_S("ff") +_S("ffi") +_S("ffl") +_S("parenleftinferior") +_S("parenrightinferior") +_S("Circumflexsmall") +_S("hyphensuperior") +_S("Gravesmall") +_S("Asmall") +_S("Bsmall") +_S("Csmall") +_S("Dsmall") +_S("Esmall") +_S("Fsmall") +_S("Gsmall") +_S("Hsmall") +_S("Ismall") +_S("Jsmall") +_S("Ksmall") +_S("Lsmall") +_S("Msmall") +_S("Nsmall") +_S("Osmall") +_S("Psmall") +_S("Qsmall") +_S("Rsmall") +_S("Ssmall") +_S("Tsmall") +_S("Usmall") +_S("Vsmall") +_S("Wsmall") +_S("Xsmall") +_S("Ysmall") +_S("Zsmall") +_S("colonmonetary") +_S("onefitted") +_S("rupiah") +_S("Tildesmall") +_S("exclamdownsmall") +_S("centoldstyle") +_S("Lslashsmall") +_S("Scaronsmall") +_S("Zcaronsmall") +_S("Dieresissmall") +_S("Brevesmall") +_S("Caronsmall") +_S("Dotaccentsmall") +_S("Macronsmall") +_S("figuredash") +_S("hypheninferior") +_S("Ogoneksmall") +_S("Ringsmall") +_S("Cedillasmall") +_S("questiondownsmall") +_S("oneeighth") +_S("threeeighths") +_S("fiveeighths") +_S("seveneighths") +_S("onethird") +_S("twothirds") +_S("zerosuperior") +_S("foursuperior") +_S("fivesuperior") +_S("sixsuperior") +_S("sevensuperior") +_S("eightsuperior") +_S("ninesuperior") +_S("zeroinferior") +_S("oneinferior") +_S("twoinferior") +_S("threeinferior") +_S("fourinferior") +_S("fiveinferior") +_S("sixinferior") +_S("seveninferior") +_S("eightinferior") +_S("nineinferior") +_S("centinferior") +_S("dollarinferior") +_S("periodinferior") +_S("commainferior") +_S("Agravesmall") +_S("Aacutesmall") +_S("Acircumflexsmall") +_S("Atildesmall") +_S("Adieresissmall") +_S("Aringsmall") +_S("AEsmall") +_S("Ccedillasmall") +_S("Egravesmall") +_S("Eacutesmall") +_S("Ecircumflexsmall") +_S("Edieresissmall") +_S("Igravesmall") +_S("Iacutesmall") +_S("Icircumflexsmall") +_S("Idieresissmall") +_S("Ethsmall") +_S("Ntildesmall") +_S("Ogravesmall") +_S("Oacutesmall") +_S("Ocircumflexsmall") +_S("Otildesmall") +_S("Odieresissmall") +_S("OEsmall") +_S("Oslashsmall") +_S("Ugravesmall") +_S("Uacutesmall") +_S("Ucircumflexsmall") +_S("Udieresissmall") +_S("Yacutesmall") +_S("Thornsmall") +_S("Ydieresissmall") +_S("001.000") +_S("001.001") +_S("001.002") +_S("001.003") +_S("Black") +_S("Bold") +_S("Book") +_S("Light") +_S("Medium") +_S("Regular") +_S("Roman") +_S("Semibold") + +#endif /* HB_OT_CFF1_STD_STR_HH */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-cff1-table.cc b/src/java.desktop/share/native/libharfbuzz/hb-ot-cff1-table.cc index a5ee1f31f6f9..24287364ba2c 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-cff1-table.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-cff1-table.cc @@ -24,11 +24,29 @@ * Adobe Author(s): Michiharu Ariza */ +#include "hb.hh" + +#ifndef HB_NO_CFF + +#include "hb-draw.hh" +#include "hb-algs.hh" #include "hb-ot-cff1-table.hh" #include "hb-cff1-interp-cs.hh" using namespace CFF; +struct sid_to_gid_t +{ + uint16_t sid; + uint8_t gid; + + int cmp (uint16_t a) const + { + if (a == sid) return 0; + return (a < sid) ? -1 : 1; + } +}; + /* SID to code */ static const uint8_t standard_encoding_to_code [] = { @@ -100,6 +118,80 @@ static const uint16_t expert_subset_charset_to_sid [] = 340, 341, 342, 343, 344, 345, 346 }; +/* SID to glyph ID */ +static const sid_to_gid_t expert_charset_sid_to_gid [] = +{ + { 1, 1 }, { 13, 12 }, { 14, 13 }, { 15, 14 }, + { 27, 26 }, { 28, 27 }, { 99, 15 }, { 109, 46 }, + { 110, 47 }, { 150, 111 }, { 155, 101 }, { 158, 100 }, + { 163, 102 }, { 164, 112 }, { 169, 113 }, { 229, 2 }, + { 230, 3 }, { 231, 4 }, { 232, 5 }, { 233, 6 }, + { 234, 7 }, { 235, 8 }, { 236, 9 }, { 237, 10 }, + { 238, 11 }, { 239, 16 }, { 240, 17 }, { 241, 18 }, + { 242, 19 }, { 243, 20 }, { 244, 21 }, { 245, 22 }, + { 246, 23 }, { 247, 24 }, { 248, 25 }, { 249, 28 }, + { 250, 29 }, { 251, 30 }, { 252, 31 }, { 253, 32 }, + { 254, 33 }, { 255, 34 }, { 256, 35 }, { 257, 36 }, + { 258, 37 }, { 259, 38 }, { 260, 39 }, { 261, 40 }, + { 262, 41 }, { 263, 42 }, { 264, 43 }, { 265, 44 }, + { 266, 45 }, { 267, 48 }, { 268, 49 }, { 269, 50 }, + { 270, 51 }, { 271, 52 }, { 272, 53 }, { 273, 54 }, + { 274, 55 }, { 275, 56 }, { 276, 57 }, { 277, 58 }, + { 278, 59 }, { 279, 60 }, { 280, 61 }, { 281, 62 }, + { 282, 63 }, { 283, 64 }, { 284, 65 }, { 285, 66 }, + { 286, 67 }, { 287, 68 }, { 288, 69 }, { 289, 70 }, + { 290, 71 }, { 291, 72 }, { 292, 73 }, { 293, 74 }, + { 294, 75 }, { 295, 76 }, { 296, 77 }, { 297, 78 }, + { 298, 79 }, { 299, 80 }, { 300, 81 }, { 301, 82 }, + { 302, 83 }, { 303, 84 }, { 304, 85 }, { 305, 86 }, + { 306, 87 }, { 307, 88 }, { 308, 89 }, { 309, 90 }, + { 310, 91 }, { 311, 92 }, { 312, 93 }, { 313, 94 }, + { 314, 95 }, { 315, 96 }, { 316, 97 }, { 317, 98 }, + { 318, 99 }, { 319, 103 }, { 320, 104 }, { 321, 105 }, + { 322, 106 }, { 323, 107 }, { 324, 108 }, { 325, 109 }, + { 326, 110 }, { 327, 114 }, { 328, 115 }, { 329, 116 }, + { 330, 117 }, { 331, 118 }, { 332, 119 }, { 333, 120 }, + { 334, 121 }, { 335, 122 }, { 336, 123 }, { 337, 124 }, + { 338, 125 }, { 339, 126 }, { 340, 127 }, { 341, 128 }, + { 342, 129 }, { 343, 130 }, { 344, 131 }, { 345, 132 }, + { 346, 133 }, { 347, 134 }, { 348, 135 }, { 349, 136 }, + { 350, 137 }, { 351, 138 }, { 352, 139 }, { 353, 140 }, + { 354, 141 }, { 355, 142 }, { 356, 143 }, { 357, 144 }, + { 358, 145 }, { 359, 146 }, { 360, 147 }, { 361, 148 }, + { 362, 149 }, { 363, 150 }, { 364, 151 }, { 365, 152 }, + { 366, 153 }, { 367, 154 }, { 368, 155 }, { 369, 156 }, + { 370, 157 }, { 371, 158 }, { 372, 159 }, { 373, 160 }, + { 374, 161 }, { 375, 162 }, { 376, 163 }, { 377, 164 }, + { 378, 165 } +}; + +/* SID to glyph ID */ +static const sid_to_gid_t expert_subset_charset_sid_to_gid [] = +{ + { 1, 1 }, { 13, 8 }, { 14, 9 }, { 15, 10 }, + { 27, 22 }, { 28, 23 }, { 99, 11 }, { 109, 41 }, + { 110, 42 }, { 150, 64 }, { 155, 55 }, { 158, 54 }, + { 163, 56 }, { 164, 65 }, { 169, 66 }, { 231, 2 }, + { 232, 3 }, { 235, 4 }, { 236, 5 }, { 237, 6 }, + { 238, 7 }, { 239, 12 }, { 240, 13 }, { 241, 14 }, + { 242, 15 }, { 243, 16 }, { 244, 17 }, { 245, 18 }, + { 246, 19 }, { 247, 20 }, { 248, 21 }, { 249, 24 }, + { 250, 25 }, { 251, 26 }, { 253, 27 }, { 254, 28 }, + { 255, 29 }, { 256, 30 }, { 257, 31 }, { 258, 32 }, + { 259, 33 }, { 260, 34 }, { 261, 35 }, { 262, 36 }, + { 263, 37 }, { 264, 38 }, { 265, 39 }, { 266, 40 }, + { 267, 43 }, { 268, 44 }, { 269, 45 }, { 270, 46 }, + { 272, 47 }, { 300, 48 }, { 301, 49 }, { 302, 50 }, + { 305, 51 }, { 314, 52 }, { 315, 53 }, { 320, 57 }, + { 321, 58 }, { 322, 59 }, { 323, 60 }, { 324, 61 }, + { 325, 62 }, { 326, 63 }, { 327, 67 }, { 328, 68 }, + { 329, 69 }, { 330, 70 }, { 331, 71 }, { 332, 72 }, + { 333, 73 }, { 334, 74 }, { 335, 75 }, { 336, 76 }, + { 337, 77 }, { 338, 78 }, { 339, 79 }, { 340, 80 }, + { 341, 81 }, { 342, 82 }, { 343, 83 }, { 344, 84 }, + { 345, 85 }, { 346, 86 } +}; + /* code to SID */ static const uint8_t standard_encoding_to_sid [] = { @@ -153,6 +245,18 @@ hb_codepoint_t OT::cff1::lookup_expert_subset_charset_for_sid (hb_codepoint_t gl return 0; } +hb_codepoint_t OT::cff1::lookup_expert_charset_for_glyph (hb_codepoint_t sid) +{ + const auto *pair = hb_sorted_array (expert_charset_sid_to_gid).bsearch (sid); + return pair ? pair->gid : 0; +} + +hb_codepoint_t OT::cff1::lookup_expert_subset_charset_for_glyph (hb_codepoint_t sid) +{ + const auto *pair = hb_sorted_array (expert_subset_charset_sid_to_gid).bsearch (sid); + return pair ? pair->gid : 0; +} + hb_codepoint_t OT::cff1::lookup_standard_encoding_for_sid (hb_codepoint_t code) { if (code < ARRAY_LENGTH (standard_encoding_to_sid)) @@ -165,8 +269,8 @@ struct bounds_t { void init () { - min.set_int (0x7FFFFFFF, 0x7FFFFFFF); - max.set_int (-0x80000000, -0x80000000); + min.set_int (INT_MAX, INT_MAX); + max.set_int (INT_MIN, INT_MIN); } void update (const point_t &pt) @@ -199,14 +303,13 @@ struct bounds_t } } - bool empty () const - { return (min.x >= max.x) || (min.y >= max.y); } + bool empty () const { return (min.x >= max.x) || (min.y >= max.y); } point_t min; point_t max; }; -struct extents_param_t +struct cff1_extents_param_t { void init (const OT::cff1::accelerator_t *_cff) { @@ -215,25 +318,25 @@ struct extents_param_t bounds.init (); } - void start_path () { path_open = true; } - void end_path () { path_open = false; } + void start_path () { path_open = true; } + void end_path () { path_open = false; } bool is_path_open () const { return path_open; } - bool path_open; - bounds_t bounds; + bool path_open; + bounds_t bounds; const OT::cff1::accelerator_t *cff; }; -struct cff1_path_procs_extents_t : path_procs_t +struct cff1_path_procs_extents_t : path_procs_t { - static void moveto (cff1_cs_interp_env_t &env, extents_param_t& param, const point_t &pt) + static void moveto (cff1_cs_interp_env_t &env, cff1_extents_param_t& param, const point_t &pt) { param.end_path (); env.moveto (pt); } - static void line (cff1_cs_interp_env_t &env, extents_param_t& param, const point_t &pt1) + static void line (cff1_cs_interp_env_t &env, cff1_extents_param_t& param, const point_t &pt1) { if (!param.is_path_open ()) { @@ -244,7 +347,7 @@ struct cff1_path_procs_extents_t : path_procs_t +struct cff1_cs_opset_extents_t : cff1_cs_opset_t { - static void process_seac (cff1_cs_interp_env_t &env, extents_param_t& param) + static void process_seac (cff1_cs_interp_env_t &env, cff1_extents_param_t& param) { unsigned int n = env.argStack.get_count (); point_t delta; @@ -292,20 +395,25 @@ bool _get_bounds (const OT::cff1::accelerator_t *cff, hb_codepoint_t glyph, boun if (unlikely (!cff->is_valid () || (glyph >= cff->num_glyphs))) return false; unsigned int fd = cff->fdSelect->get_fd (glyph); - cff1_cs_interpreter_t interp; + cff1_cs_interpreter_t interp; const byte_str_t str = (*cff->charStrings)[glyph]; interp.env.init (str, *cff, fd); interp.env.set_in_seac (in_seac); - extents_param_t param; + cff1_extents_param_t param; param.init (cff); if (unlikely (!interp.interpret (param))) return false; bounds = param.bounds; return true; } -bool OT::cff1::accelerator_t::get_extents (hb_codepoint_t glyph, hb_glyph_extents_t *extents) const +bool OT::cff1::accelerator_t::get_extents (hb_font_t *font, hb_codepoint_t glyph, hb_glyph_extents_t *extents) const { - bounds_t bounds; +#ifdef HB_NO_OT_FONT_CFF + /* XXX Remove check when this code moves to .hh file. */ + return true; +#endif + + bounds_t bounds; if (!_get_bounds (this, glyph, bounds)) return false; @@ -317,8 +425,8 @@ bool OT::cff1::accelerator_t::get_extents (hb_codepoint_t glyph, hb_glyph_extent } else { - extents->x_bearing = (int32_t)bounds.min.x.floor (); - extents->width = (int32_t)bounds.max.x.ceil () - extents->x_bearing; + extents->x_bearing = font->em_scalef_x (bounds.min.x.to_real ()); + extents->width = font->em_scalef_x (bounds.max.x.to_real () - bounds.min.x.to_real ()); } if (bounds.min.y >= bounds.max.y) { @@ -327,13 +435,137 @@ bool OT::cff1::accelerator_t::get_extents (hb_codepoint_t glyph, hb_glyph_extent } else { - extents->y_bearing = (int32_t)bounds.max.y.ceil (); - extents->height = (int32_t)bounds.min.y.floor () - extents->y_bearing; + extents->y_bearing = font->em_scalef_y (bounds.max.y.to_real ()); + extents->height = font->em_scalef_y (bounds.min.y.to_real () - bounds.max.y.to_real ()); } return true; } +#ifdef HB_EXPERIMENTAL_API +struct cff1_path_param_t +{ + cff1_path_param_t (const OT::cff1::accelerator_t *cff_, hb_font_t *font_, + draw_helper_t &draw_helper_, point_t *delta_) + { + draw_helper = &draw_helper_; + cff = cff_; + font = font_; + delta = delta_; + } + + void move_to (const point_t &p) + { + point_t point = p; + if (delta) point.move (*delta); + draw_helper->move_to (font->em_scalef_x (point.x.to_real ()), font->em_scalef_y (point.y.to_real ())); + } + + void line_to (const point_t &p) + { + point_t point = p; + if (delta) point.move (*delta); + draw_helper->line_to (font->em_scalef_x (point.x.to_real ()), font->em_scalef_y (point.y.to_real ())); + } + + void cubic_to (const point_t &p1, const point_t &p2, const point_t &p3) + { + point_t point1 = p1, point2 = p2, point3 = p3; + if (delta) + { + point1.move (*delta); + point2.move (*delta); + point3.move (*delta); + } + draw_helper->cubic_to (font->em_scalef_x (point1.x.to_real ()), font->em_scalef_y (point1.y.to_real ()), + font->em_scalef_x (point2.x.to_real ()), font->em_scalef_y (point2.y.to_real ()), + font->em_scalef_x (point3.x.to_real ()), font->em_scalef_y (point3.y.to_real ())); + } + + void end_path () { draw_helper->end_path (); } + + hb_font_t *font; + draw_helper_t *draw_helper; + point_t *delta; + + const OT::cff1::accelerator_t *cff; +}; + +struct cff1_path_procs_path_t : path_procs_t +{ + static void moveto (cff1_cs_interp_env_t &env, cff1_path_param_t& param, const point_t &pt) + { + param.move_to (pt); + env.moveto (pt); + } + + static void line (cff1_cs_interp_env_t &env, cff1_path_param_t ¶m, const point_t &pt1) + { + param.line_to (pt1); + env.moveto (pt1); + } + + static void curve (cff1_cs_interp_env_t &env, cff1_path_param_t ¶m, const point_t &pt1, const point_t &pt2, const point_t &pt3) + { + param.cubic_to (pt1, pt2, pt3); + env.moveto (pt3); + } +}; + +static bool _get_path (const OT::cff1::accelerator_t *cff, hb_font_t *font, hb_codepoint_t glyph, + draw_helper_t &draw_helper, bool in_seac = false, point_t *delta = nullptr); + +struct cff1_cs_opset_path_t : cff1_cs_opset_t +{ + static void process_seac (cff1_cs_interp_env_t &env, cff1_path_param_t& param) + { + /* End previous path */ + param.end_path (); + + unsigned int n = env.argStack.get_count (); + point_t delta; + delta.x = env.argStack[n-4]; + delta.y = env.argStack[n-3]; + hb_codepoint_t base = param.cff->std_code_to_glyph (env.argStack[n-2].to_int ()); + hb_codepoint_t accent = param.cff->std_code_to_glyph (env.argStack[n-1].to_int ()); + + if (unlikely (!(!env.in_seac && base && accent + && _get_path (param.cff, param.font, base, *param.draw_helper, true) + && _get_path (param.cff, param.font, accent, *param.draw_helper, true, &delta)))) + env.set_error (); + } +}; + +bool _get_path (const OT::cff1::accelerator_t *cff, hb_font_t *font, hb_codepoint_t glyph, + draw_helper_t &draw_helper, bool in_seac, point_t *delta) +{ + if (unlikely (!cff->is_valid () || (glyph >= cff->num_glyphs))) return false; + + unsigned int fd = cff->fdSelect->get_fd (glyph); + cff1_cs_interpreter_t interp; + const byte_str_t str = (*cff->charStrings)[glyph]; + interp.env.init (str, *cff, fd); + interp.env.set_in_seac (in_seac); + cff1_path_param_t param (cff, font, draw_helper, delta); + if (unlikely (!interp.interpret (param))) return false; + + /* Let's end the path specially since it is called inside seac also */ + param.end_path (); + + return true; +} + +bool OT::cff1::accelerator_t::get_path (hb_font_t *font, hb_codepoint_t glyph, draw_helper_t &draw_helper) const +{ +#ifdef HB_NO_OT_FONT_CFF + /* XXX Remove check when this code moves to .hh file. */ + return true; +#endif + + return _get_path (this, font, glyph, draw_helper); +} +#endif + struct get_seac_param_t { void init (const OT::cff1::accelerator_t *_cff) @@ -383,3 +615,6 @@ bool OT::cff1::accelerator_t::get_seac_components (hb_codepoint_t glyph, hb_code } return false; } + + +#endif diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-cff1-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-cff1-table.hh index 73258e79007c..6768e4ea1077 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-cff1-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-cff1-table.hh @@ -27,15 +27,21 @@ #ifndef HB_OT_CFF1_TABLE_HH #define HB_OT_CFF1_TABLE_HH -#include "hb-ot-head-table.hh" #include "hb-ot-cff-common.hh" #include "hb-subset-cff1.hh" +#include "hb-draw.hh" + +#define HB_STRING_ARRAY_NAME cff1_std_strings +#define HB_STRING_ARRAY_LIST "hb-ot-cff1-std-str.hh" +#include "hb-string-array.hh" +#undef HB_STRING_ARRAY_LIST +#undef HB_STRING_ARRAY_NAME namespace CFF { /* * CFF -- Compact Font Format (CFF) - * http://www.adobe.com/content/dam/acom/en/devnet/font/pdfs/5176.CFF.pdf + * https://www.adobe.com/content/dam/acom/en/devnet/font/pdfs/5176.CFF.pdf */ #define HB_OT_TAG_cff1 HB_TAG('C','F','F',' ') @@ -49,7 +55,6 @@ template struct CFF1IndexOf : CFFIndexOf {}; typedef CFFIndex CFF1Index; typedef CFF1Index CFF1CharStrings; -typedef FDArray CFF1FDArray; typedef Subrs CFF1Subrs; struct CFF1FDSelect : FDSelect {}; @@ -59,14 +64,14 @@ struct Encoding0 { bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); - return_trace (c->check_struct (this) && codes[nCodes - 1].sanitize (c)); + return_trace (codes.sanitize (c)); } hb_codepoint_t get_code (hb_codepoint_t glyph) const { assert (glyph > 0); glyph--; - if (glyph < nCodes) + if (glyph < nCodes ()) { return (hb_codepoint_t)codes[glyph]; } @@ -74,13 +79,12 @@ struct Encoding0 { return CFF_UNDEF_CODE; } - unsigned int get_size () const - { return HBUINT8::static_size * (nCodes + 1); } + HBUINT8 &nCodes () { return codes.len; } + HBUINT8 nCodes () const { return codes.len; } - HBUINT8 nCodes; - HBUINT8 codes[VAR]; + ArrayOf codes; - DEFINE_SIZE_ARRAY(1, codes); + DEFINE_SIZE_ARRAY_SIZED (1, codes); }; struct Encoding1_Range { @@ -97,34 +101,34 @@ struct Encoding1_Range { }; struct Encoding1 { - unsigned int get_size () const - { return HBUINT8::static_size + Encoding1_Range::static_size * nRanges; } - bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); - return_trace (c->check_struct (this) && ((nRanges == 0) || (ranges[nRanges - 1]).sanitize (c))); + return_trace (ranges.sanitize (c)); } hb_codepoint_t get_code (hb_codepoint_t glyph) const { assert (glyph > 0); glyph--; - for (unsigned int i = 0; i < nRanges; i++) + for (unsigned int i = 0; i < nRanges (); i++) { if (glyph <= ranges[i].nLeft) { - return (hb_codepoint_t)ranges[i].first + glyph; + hb_codepoint_t code = (hb_codepoint_t) ranges[i].first + glyph; + return (likely (code < 0x100) ? code: CFF_UNDEF_CODE); } glyph -= (ranges[i].nLeft + 1); } return CFF_UNDEF_CODE; } - HBUINT8 nRanges; - Encoding1_Range ranges[VAR]; + HBUINT8 &nRanges () { return ranges.len; } + HBUINT8 nRanges () const { return ranges.len; } + + ArrayOf ranges; - DEFINE_SIZE_ARRAY (1, ranges); + DEFINE_SIZE_ARRAY_SIZED (1, ranges); }; struct SuppEncoding { @@ -144,47 +148,33 @@ struct CFF1SuppEncData { bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); - return_trace (c->check_struct (this) && ((nSups == 0) || (supps[nSups - 1]).sanitize (c))); + return_trace (supps.sanitize (c)); } void get_codes (hb_codepoint_t sid, hb_vector_t &codes) const { - for (unsigned int i = 0; i < nSups; i++) + for (unsigned int i = 0; i < nSups (); i++) if (sid == supps[i].glyph) codes.push (supps[i].code); } - unsigned int get_size () const - { return HBUINT8::static_size + SuppEncoding::static_size * nSups; } + HBUINT8 &nSups () { return supps.len; } + HBUINT8 nSups () const { return supps.len; } - HBUINT8 nSups; - SuppEncoding supps[VAR]; + ArrayOf supps; - DEFINE_SIZE_ARRAY (1, supps); + DEFINE_SIZE_ARRAY_SIZED (1, supps); }; -struct Encoding { - bool sanitize (hb_sanitize_context_t *c) const - { - TRACE_SANITIZE (this); - - if (unlikely (!c->check_struct (this))) - return_trace (false); - unsigned int fmt = format & 0x7F; - if (unlikely (fmt > 1)) - return_trace (false); - if (unlikely (!((fmt == 0)? u.format0.sanitize (c): u.format1.sanitize (c)))) - return_trace (false); - return_trace (((format & 0x80) == 0) || suppEncData ().sanitize (c)); - } - +struct Encoding +{ /* serialize a fullset Encoding */ bool serialize (hb_serialize_context_t *c, const Encoding &src) { TRACE_SERIALIZE (this); unsigned int size = src.get_size (); Encoding *dest = c->allocate_size (size); - if (unlikely (dest == nullptr)) return_trace (false); + if (unlikely (!dest)) return_trace (false); memcpy (dest, &src, size); return_trace (true); } @@ -198,72 +188,66 @@ struct Encoding { { TRACE_SERIALIZE (this); Encoding *dest = c->extend_min (*this); - if (unlikely (dest == nullptr)) return_trace (false); - dest->format.set (format | ((supp_codes.length > 0)? 0x80: 0)); - if (format == 0) + if (unlikely (!dest)) return_trace (false); + dest->format = format | ((supp_codes.length > 0) ? 0x80 : 0); + switch (format) { + case 0: { Encoding0 *fmt0 = c->allocate_size (Encoding0::min_size + HBUINT8::static_size * enc_count); - if (unlikely (fmt0 == nullptr)) return_trace (false); - fmt0->nCodes.set (enc_count); + if (unlikely (!fmt0)) return_trace (false); + fmt0->nCodes () = enc_count; unsigned int glyph = 0; for (unsigned int i = 0; i < code_ranges.length; i++) { hb_codepoint_t code = code_ranges[i].code; for (int left = (int)code_ranges[i].glyph; left >= 0; left--) - fmt0->codes[glyph++].set (code++); + fmt0->codes[glyph++] = code++; if (unlikely (!((glyph <= 0x100) && (code <= 0x100)))) return_trace (false); } } - else + break; + + case 1: { Encoding1 *fmt1 = c->allocate_size (Encoding1::min_size + Encoding1_Range::static_size * code_ranges.length); - if (unlikely (fmt1 == nullptr)) return_trace (false); - fmt1->nRanges.set (code_ranges.length); + if (unlikely (!fmt1)) return_trace (false); + fmt1->nRanges () = code_ranges.length; for (unsigned int i = 0; i < code_ranges.length; i++) { if (unlikely (!((code_ranges[i].code <= 0xFF) && (code_ranges[i].glyph <= 0xFF)))) return_trace (false); - fmt1->ranges[i].first.set (code_ranges[i].code); - fmt1->ranges[i].nLeft.set (code_ranges[i].glyph); + fmt1->ranges[i].first = code_ranges[i].code; + fmt1->ranges[i].nLeft = code_ranges[i].glyph; } } - if (supp_codes.length > 0) + break; + + } + + if (supp_codes.length) { CFF1SuppEncData *suppData = c->allocate_size (CFF1SuppEncData::min_size + SuppEncoding::static_size * supp_codes.length); - if (unlikely (suppData == nullptr)) return_trace (false); - suppData->nSups.set (supp_codes.length); + if (unlikely (!suppData)) return_trace (false); + suppData->nSups () = supp_codes.length; for (unsigned int i = 0; i < supp_codes.length; i++) { - suppData->supps[i].code.set (supp_codes[i].code); - suppData->supps[i].glyph.set (supp_codes[i].glyph); /* actually SID */ + suppData->supps[i].code = supp_codes[i].code; + suppData->supps[i].glyph = supp_codes[i].glyph; /* actually SID */ } } - return_trace (true); - } - /* parallel to above: calculate the size of a subset Encoding */ - static unsigned int calculate_serialized_size (uint8_t format, - unsigned int enc_count, - unsigned int supp_count) - { - unsigned int size = min_size; - if (format == 0) - size += Encoding0::min_size + HBUINT8::static_size * enc_count; - else - size += Encoding1::min_size + Encoding1_Range::static_size * enc_count; - if (supp_count > 0) - size += CFF1SuppEncData::min_size + SuppEncoding::static_size * supp_count; - return size; + return_trace (true); } unsigned int get_size () const { unsigned int size = min_size; - if (table_format () == 0) - size += u.format0.get_size (); - else - size += u.format1.get_size (); + switch (table_format ()) + { + case 0: size += u.format0.get_size (); break; + case 1: size += u.format1.get_size (); break; + } if (has_supplement ()) size += suppEncData ().get_size (); return size; @@ -271,14 +255,16 @@ struct Encoding { hb_codepoint_t get_code (hb_codepoint_t glyph) const { - if (table_format () == 0) - return u.format0.get_code (glyph); - else - return u.format1.get_code (glyph); + switch (table_format ()) + { + case 0: return u.format0.get_code (glyph); + case 1: return u.format1.get_code (glyph); + default:return 0; + } } - uint8_t table_format () const { return (format & 0x7F); } - bool has_supplement () const { return (format & 0x80) != 0; } + uint8_t table_format () const { return format & 0x7F; } + bool has_supplement () const { return format & 0x80; } void get_supplement_codes (hb_codepoint_t sid, hb_vector_t &codes) const { @@ -287,21 +273,37 @@ struct Encoding { suppEncData().get_codes (sid, codes); } + bool sanitize (hb_sanitize_context_t *c) const + { + TRACE_SANITIZE (this); + if (unlikely (!c->check_struct (this))) + return_trace (false); + + switch (table_format ()) + { + case 0: if (unlikely (!u.format0.sanitize (c))) { return_trace (false); } break; + case 1: if (unlikely (!u.format1.sanitize (c))) { return_trace (false); } break; + default:return_trace (false); + } + return_trace (likely (!has_supplement () || suppEncData ().sanitize (c))); + } + protected: const CFF1SuppEncData &suppEncData () const { - if ((format & 0x7F) == 0) - return StructAfter (u.format0.codes[u.format0.nCodes-1]); - else - return StructAfter (u.format1.ranges[u.format1.nRanges-1]); + switch (table_format ()) + { + case 0: return StructAfter (u.format0.codes[u.format0.nCodes ()-1]); + case 1: return StructAfter (u.format1.ranges[u.format1.nRanges ()-1]); + default:return Null (CFF1SuppEncData); + } } public: HBUINT8 format; - union { - Encoding0 format0; - Encoding1 format1; + Encoding0 format0; + Encoding1 format1; } u; /* CFF1SuppEncData suppEncData; */ @@ -343,7 +345,7 @@ struct Charset0 { return HBUINT16::static_size * (num_glyphs - 1); } - HBUINT16 sids[VAR]; + HBUINT16 sids[HB_VAR_ARRAY]; DEFINE_SIZE_ARRAY(0, sids); }; @@ -425,7 +427,7 @@ struct Charset1_2 { return size; } - Charset_Range ranges[VAR]; + Charset_Range ranges[HB_VAR_ARRAY]; DEFINE_SIZE_ARRAY (0, ranges); }; @@ -435,30 +437,15 @@ typedef Charset1_2 Charset2; typedef Charset_Range Charset1_Range; typedef Charset_Range Charset2_Range; -struct Charset { - bool sanitize (hb_sanitize_context_t *c) const - { - TRACE_SANITIZE (this); - - if (unlikely (!c->check_struct (this))) - return_trace (false); - if (format == 0) - return_trace (u.format0.sanitize (c, c->get_num_glyphs ())); - else if (format == 1) - return_trace (u.format1.sanitize (c, c->get_num_glyphs ())); - else if (likely (format == 2)) - return_trace (u.format2.sanitize (c, c->get_num_glyphs ())); - else - return_trace (false); - } - +struct Charset +{ /* serialize a fullset Charset */ bool serialize (hb_serialize_context_t *c, const Charset &src, unsigned int num_glyphs) { TRACE_SERIALIZE (this); unsigned int size = src.get_size (num_glyphs); Charset *dest = c->allocate_size (size); - if (unlikely (dest == nullptr)) return_trace (false); + if (unlikely (!dest)) return_trace (false); memcpy (dest, &src, size); return_trace (true); } @@ -471,93 +458,103 @@ struct Charset { { TRACE_SERIALIZE (this); Charset *dest = c->extend_min (*this); - if (unlikely (dest == nullptr)) return_trace (false); - dest->format.set (format); - if (format == 0) + if (unlikely (!dest)) return_trace (false); + dest->format = format; + switch (format) + { + case 0: { Charset0 *fmt0 = c->allocate_size (Charset0::min_size + HBUINT16::static_size * (num_glyphs - 1)); - if (unlikely (fmt0 == nullptr)) return_trace (false); + if (unlikely (!fmt0)) return_trace (false); unsigned int glyph = 0; for (unsigned int i = 0; i < sid_ranges.length; i++) { hb_codepoint_t sid = sid_ranges[i].code; for (int left = (int)sid_ranges[i].glyph; left >= 0; left--) - fmt0->sids[glyph++].set (sid++); + fmt0->sids[glyph++] = sid++; } } - else if (format == 1) + break; + + case 1: { Charset1 *fmt1 = c->allocate_size (Charset1::min_size + Charset1_Range::static_size * sid_ranges.length); - if (unlikely (fmt1 == nullptr)) return_trace (false); + if (unlikely (!fmt1)) return_trace (false); for (unsigned int i = 0; i < sid_ranges.length; i++) { if (unlikely (!(sid_ranges[i].glyph <= 0xFF))) return_trace (false); - fmt1->ranges[i].first.set (sid_ranges[i].code); - fmt1->ranges[i].nLeft.set (sid_ranges[i].glyph); + fmt1->ranges[i].first = sid_ranges[i].code; + fmt1->ranges[i].nLeft = sid_ranges[i].glyph; } } - else /* format 2 */ + break; + + case 2: { Charset2 *fmt2 = c->allocate_size (Charset2::min_size + Charset2_Range::static_size * sid_ranges.length); - if (unlikely (fmt2 == nullptr)) return_trace (false); + if (unlikely (!fmt2)) return_trace (false); for (unsigned int i = 0; i < sid_ranges.length; i++) { if (unlikely (!(sid_ranges[i].glyph <= 0xFFFF))) return_trace (false); - fmt2->ranges[i].first.set (sid_ranges[i].code); - fmt2->ranges[i].nLeft.set (sid_ranges[i].glyph); + fmt2->ranges[i].first = sid_ranges[i].code; + fmt2->ranges[i].nLeft = sid_ranges[i].glyph; } + } + break; + } return_trace (true); } - /* parallel to above: calculate the size of a subset Charset */ - static unsigned int calculate_serialized_size ( - uint8_t format, - unsigned int count) + unsigned int get_size (unsigned int num_glyphs) const { - unsigned int size = min_size; - if (format == 0) - size += Charset0::min_size + HBUINT16::static_size * (count - 1); - else if (format == 1) - size += Charset1::min_size + Charset1_Range::static_size * count; - else - size += Charset2::min_size + Charset2_Range::static_size * count; - - return size; + switch (format) + { + case 0: return min_size + u.format0.get_size (num_glyphs); + case 1: return min_size + u.format1.get_size (num_glyphs); + case 2: return min_size + u.format2.get_size (num_glyphs); + default:return 0; + } } - unsigned int get_size (unsigned int num_glyphs) const + hb_codepoint_t get_sid (hb_codepoint_t glyph, unsigned int num_glyphs) const { - unsigned int size = min_size; - if (format == 0) - size += u.format0.get_size (num_glyphs); - else if (format == 1) - size += u.format1.get_size (num_glyphs); - else - size += u.format2.get_size (num_glyphs); - return size; + if (unlikely (glyph >= num_glyphs)) return 0; + switch (format) + { + case 0: return u.format0.get_sid (glyph); + case 1: return u.format1.get_sid (glyph); + case 2: return u.format2.get_sid (glyph); + default:return 0; + } } - hb_codepoint_t get_sid (hb_codepoint_t glyph) const + hb_codepoint_t get_glyph (hb_codepoint_t sid, unsigned int num_glyphs) const { - if (format == 0) - return u.format0.get_sid (glyph); - else if (format == 1) - return u.format1.get_sid (glyph); - else - return u.format2.get_sid (glyph); + switch (format) + { + case 0: return u.format0.get_glyph (sid, num_glyphs); + case 1: return u.format1.get_glyph (sid, num_glyphs); + case 2: return u.format2.get_glyph (sid, num_glyphs); + default:return 0; + } } - hb_codepoint_t get_glyph (hb_codepoint_t sid, unsigned int num_glyphs) const + bool sanitize (hb_sanitize_context_t *c) const { - if (format == 0) - return u.format0.get_glyph (sid, num_glyphs); - else if (format == 1) - return u.format1.get_glyph (sid, num_glyphs); - else - return u.format2.get_glyph (sid, num_glyphs); + TRACE_SANITIZE (this); + if (unlikely (!c->check_struct (this))) + return_trace (false); + + switch (format) + { + case 0: return_trace (u.format0.sanitize (c, c->get_num_glyphs ())); + case 1: return_trace (u.format1.sanitize (c, c->get_num_glyphs ())); + case 2: return_trace (u.format2.sanitize (c, c->get_num_glyphs ())); + default:return_trace (false); + } } HBUINT8 format; @@ -573,48 +570,32 @@ struct Charset { struct CFF1StringIndex : CFF1Index { bool serialize (hb_serialize_context_t *c, const CFF1StringIndex &strings, - unsigned int offSize_, const remap_t &sidmap) + const hb_inc_bimap_t &sidmap) { TRACE_SERIALIZE (this); - if (unlikely ((strings.count == 0) || (sidmap.get_count () == 0))) + if (unlikely ((strings.count == 0) || (sidmap.get_population () == 0))) { - if (!unlikely (c->extend_min (this->count))) + if (unlikely (!c->extend_min (this->count))) return_trace (false); - count.set (0); + count = 0; return_trace (true); } byte_str_array_t bytesArray; bytesArray.init (); - if (!bytesArray.resize (sidmap.get_count ())) + if (!bytesArray.resize (sidmap.get_population ())) return_trace (false); for (unsigned int i = 0; i < strings.count; i++) { hb_codepoint_t j = sidmap[i]; - if (j != CFF_UNDEF_CODE) + if (j != HB_MAP_VALUE_INVALID) bytesArray[j] = strings[i]; } - bool result = CFF1Index::serialize (c, offSize_, bytesArray); + bool result = CFF1Index::serialize (c, bytesArray); bytesArray.fini (); return_trace (result); } - - /* in parallel to above */ - unsigned int calculate_serialized_size (unsigned int &offSize /*OUT*/, const remap_t &sidmap) const - { - offSize = 0; - if ((count == 0) || (sidmap.get_count () == 0)) - return count.static_size; - - unsigned int dataSize = 0; - for (unsigned int i = 0; i < count; i++) - if (sidmap[i] != CFF_UNDEF_CODE) - dataSize += length_at (i); - - offSize = calcOffSize(dataSize); - return CFF1Index::calculate_serialized_size (offSize, sidmap.get_count (), dataSize); - } }; struct cff1_top_dict_interp_env_t : num_interp_env_t @@ -717,7 +698,7 @@ struct cff1_top_dict_values_t : top_dict_values_t unsigned int EncodingOffset; unsigned int CharsetOffset; unsigned int FDSelectOffset; - table_info_t privateDictInfo; + table_info_t privateDictInfo; }; struct cff1_top_dict_opset_t : top_dict_opset_t @@ -859,21 +840,10 @@ struct cff1_private_dict_values_base_t : dict_values_t { dict_values_t::init (); subrsOffset = 0; - localSubrs = &Null(CFF1Subrs); + localSubrs = &Null (CFF1Subrs); } void fini () { dict_values_t::fini (); } - unsigned int calculate_serialized_size () const - { - unsigned int size = 0; - for (unsigned int i = 0; i < dict_values_t::get_count; i++) - if (dict_values_t::get_value (i).op == OpCode_Subrs) - size += OpCode_Size (OpCode_shortint) + 2 + OpCode_Size (OpCode_Subrs); - else - size += dict_values_t::get_value (i).str.length; - return size; - } - unsigned int subrsOffset; const CFF1Subrs *localSubrs; }; @@ -976,6 +946,37 @@ typedef dict_interpreter_t cff1 typedef CFF1Index CFF1NameIndex; typedef CFF1IndexOf CFF1TopDictIndex; +struct cff1_font_dict_values_mod_t +{ + cff1_font_dict_values_mod_t() { init (); } + + void init () { init ( &Null (cff1_font_dict_values_t), CFF_UNDEF_SID ); } + + void init (const cff1_font_dict_values_t *base_, + unsigned int fontName_) + { + base = base_; + fontName = fontName_; + privateDictInfo.init (); + } + + unsigned get_count () const { return base->get_count (); } + + const op_str_t &operator [] (unsigned int i) const { return (*base)[i]; } + + const cff1_font_dict_values_t *base; + table_info_t privateDictInfo; + unsigned int fontName; +}; + +struct CFF1FDArray : FDArray +{ + /* FDArray::serialize() requires this partial specialization to compile */ + template + bool serialize (hb_serialize_context_t *c, ITER it, OP_SERIALIZER& opszr) + { return FDArray::serialize (c, it, opszr); } +}; + } /* namespace CFF */ namespace OT { @@ -1010,7 +1011,7 @@ struct cff1 const OT::cff1 *cff = this->blob->template as (); - if (cff == &Null(OT::cff1)) + if (cff == &Null (OT::cff1)) { fini (); return; } nameIndex = &cff->nameIndex (cff); @@ -1031,7 +1032,7 @@ struct cff1 } if (is_predef_charset ()) - charset = &Null(Charset); + charset = &Null (Charset); else { charset = &StructAtOffsetOrNull (cff, topDict.CharsetOffset); @@ -1043,16 +1044,30 @@ struct cff1 { fdArray = &StructAtOffsetOrNull (cff, topDict.FDArrayOffset); fdSelect = &StructAtOffsetOrNull (cff, topDict.FDSelectOffset); - if (unlikely ((fdArray == &Null(CFF1FDArray)) || !fdArray->sanitize (&sc) || - (fdSelect == &Null(CFF1FDSelect)) || !fdSelect->sanitize (&sc, fdArray->count))) + if (unlikely ((fdArray == &Null (CFF1FDArray)) || !fdArray->sanitize (&sc) || + (fdSelect == &Null (CFF1FDSelect)) || !fdSelect->sanitize (&sc, fdArray->count))) { fini (); return; } fdCount = fdArray->count; } else { - fdArray = &Null(CFF1FDArray); - fdSelect = &Null(CFF1FDSelect); + fdArray = &Null (CFF1FDArray); + fdSelect = &Null (CFF1FDSelect); + } + + encoding = &Null (Encoding); + if (is_CID ()) + { + if (unlikely (charset == &Null (Charset))) { fini (); return; } + } + else + { + if (!is_predef_encoding ()) + { + encoding = &StructAtOffsetOrNull (cff, topDict.EncodingOffset); + if (unlikely ((encoding == &Null (Encoding)) || !encoding->sanitize (&sc))) { fini (); return; } + } } stringIndex = &StructAtOffset (topDictIndex, topDictIndex->get_size ()); @@ -1065,14 +1080,15 @@ struct cff1 charStrings = &StructAtOffsetOrNull (cff, topDict.charStringsOffset); - if ((charStrings == &Null(CFF1CharStrings)) || unlikely (!charStrings->sanitize (&sc))) + if ((charStrings == &Null (CFF1CharStrings)) || unlikely (!charStrings->sanitize (&sc))) { fini (); return; } num_glyphs = charStrings->count; if (num_glyphs != sc.get_num_glyphs ()) { fini (); return; } - privateDicts.resize (fdCount); + if (unlikely (!privateDicts.resize (fdCount))) + { fini (); return; } for (unsigned int i = 0; i < fdCount; i++) privateDicts[i].init (); @@ -1083,14 +1099,14 @@ struct cff1 { byte_str_t fontDictStr = (*fdArray)[i]; if (unlikely (!fontDictStr.sanitize (&sc))) { fini (); return; } - cff1_font_dict_values_t *font; + cff1_font_dict_values_t *font; cff1_font_dict_interpreter_t font_interp; font_interp.env.init (fontDictStr); font = fontDicts.push (); - if (unlikely (font == &Crap(cff1_font_dict_values_t))) { fini (); return; } + if (unlikely (font == &Crap (cff1_font_dict_values_t))) { fini (); return; } font->init (); if (unlikely (!font_interp.interpret (*font))) { fini (); return; } - PRIVDICTVAL *priv = &privateDicts[i]; + PRIVDICTVAL *priv = &privateDicts[i]; const byte_str_t privDictStr (StructAtOffset (cff, font->privateDictInfo.offset), font->privateDictInfo.size); if (unlikely (!privDictStr.sanitize (&sc))) { fini (); return; } dict_interpreter_t priv_interp; @@ -1099,15 +1115,15 @@ struct cff1 if (unlikely (!priv_interp.interpret (*priv))) { fini (); return; } priv->localSubrs = &StructAtOffsetOrNull (&privDictStr, priv->subrsOffset); - if (priv->localSubrs != &Null(CFF1Subrs) && + if (priv->localSubrs != &Null (CFF1Subrs) && unlikely (!priv->localSubrs->sanitize (&sc))) { fini (); return; } } } else /* non-CID */ { - cff1_top_dict_values_t *font = &topDict; - PRIVDICTVAL *priv = &privateDicts[0]; + cff1_top_dict_values_t *font = &topDict; + PRIVDICTVAL *priv = &privateDicts[0]; const byte_str_t privDictStr (StructAtOffset (cff, font->privateDictInfo.offset), font->privateDictInfo.size); if (unlikely (!privDictStr.sanitize (&sc))) { fini (); return; } @@ -1117,7 +1133,7 @@ struct cff1 if (unlikely (!priv_interp.interpret (*priv))) { fini (); return; } priv->localSubrs = &StructAtOffsetOrNull (&privDictStr, priv->subrsOffset); - if (priv->localSubrs != &Null(CFF1Subrs) && + if (priv->localSubrs != &Null (CFF1Subrs) && unlikely (!priv->localSubrs->sanitize (&sc))) { fini (); return; } } @@ -1133,8 +1149,8 @@ struct cff1 blob = nullptr; } - bool is_valid () const { return blob != nullptr; } - bool is_CID () const { return topDict.is_CID (); } + bool is_valid () const { return blob; } + bool is_CID () const { return topDict.is_CID (); } bool is_predef_charset () const { return topDict.CharsetOffset <= ExpertSubsetCharset; } @@ -1144,144 +1160,232 @@ struct cff1 if (unlikely (sid == CFF_UNDEF_SID)) return 0; - if (charset != &Null(Charset)) + if (charset != &Null (Charset)) return charset->get_glyph (sid, num_glyphs); else if ((topDict.CharsetOffset == ISOAdobeCharset) && (code <= 228 /*zcaron*/)) return sid; return 0; } - protected: - hb_blob_t *blob; - hb_sanitize_context_t sc; - - public: - const Charset *charset; - const CFF1NameIndex *nameIndex; - const CFF1TopDictIndex *topDictIndex; - const CFF1StringIndex *stringIndex; - const CFF1Subrs *globalSubrs; - const CFF1CharStrings *charStrings; - const CFF1FDArray *fdArray; - const CFF1FDSelect *fdSelect; - unsigned int fdCount; - - cff1_top_dict_values_t topDict; - hb_vector_t fontDicts; - hb_vector_t privateDicts; - - unsigned int num_glyphs; - }; - - struct accelerator_t : accelerator_templ_t - { - HB_INTERNAL bool get_extents (hb_codepoint_t glyph, hb_glyph_extents_t *extents) const; - HB_INTERNAL bool get_seac_components (hb_codepoint_t glyph, hb_codepoint_t *base, hb_codepoint_t *accent) const; - }; + bool is_predef_encoding () const { return topDict.EncodingOffset <= ExpertEncoding; } - struct accelerator_subset_t : accelerator_templ_t - { - void init (hb_face_t *face) + hb_codepoint_t glyph_to_code (hb_codepoint_t glyph) const { - SUPER::init (face); - if (blob == nullptr) return; - - const OT::cff1 *cff = this->blob->as (); - encoding = &Null(Encoding); - if (is_CID ()) - { - if (unlikely (charset == &Null(Charset))) { fini (); return; } - } + if (encoding != &Null (Encoding)) + return encoding->get_code (glyph); else { - if (!is_predef_encoding ()) + hb_codepoint_t sid = glyph_to_sid (glyph); + if (sid == 0) return 0; + hb_codepoint_t code = 0; + switch (topDict.EncodingOffset) { - encoding = &StructAtOffsetOrNull (cff, topDict.EncodingOffset); - if (unlikely ((encoding == &Null (Encoding)) || !encoding->sanitize (&sc))) { fini (); return; } + case StandardEncoding: + code = lookup_standard_encoding_for_code (sid); + break; + case ExpertEncoding: + code = lookup_expert_encoding_for_code (sid); + break; + default: + break; } + return code; } } - bool is_predef_encoding () const { return topDict.EncodingOffset <= ExpertEncoding; } - - hb_codepoint_t glyph_to_code (hb_codepoint_t glyph) const + hb_codepoint_t glyph_to_sid (hb_codepoint_t glyph) const { - if (encoding != &Null(Encoding)) - return encoding->get_code (glyph); + if (charset != &Null (Charset)) + return charset->get_sid (glyph, num_glyphs); else { - hb_codepoint_t sid = glyph_to_sid (glyph); - if (sid == 0) return 0; - hb_codepoint_t code = 0; - switch (topDict.EncodingOffset) + hb_codepoint_t sid = 0; + switch (topDict.CharsetOffset) { - case StandardEncoding: - code = lookup_standard_encoding_for_code (sid); + case ISOAdobeCharset: + if (glyph <= 228 /*zcaron*/) sid = glyph; + break; + case ExpertCharset: + sid = lookup_expert_charset_for_sid (glyph); break; - case ExpertEncoding: - code = lookup_expert_encoding_for_code (sid); + case ExpertSubsetCharset: + sid = lookup_expert_subset_charset_for_sid (glyph); break; default: break; } - return code; + return sid; } } - hb_codepoint_t glyph_to_sid (hb_codepoint_t glyph) const + hb_codepoint_t sid_to_glyph (hb_codepoint_t sid) const { - if (charset != &Null(Charset)) - return charset->get_sid (glyph); + if (charset != &Null (Charset)) + return charset->get_glyph (sid, num_glyphs); else { - hb_codepoint_t sid = 0; + hb_codepoint_t glyph = 0; switch (topDict.CharsetOffset) { - case ISOAdobeCharset: - if (glyph <= 228 /*zcaron*/) sid = glyph; + case ISOAdobeCharset: + if (sid <= 228 /*zcaron*/) glyph = sid; break; - case ExpertCharset: - sid = lookup_expert_charset_for_sid (glyph); + case ExpertCharset: + glyph = lookup_expert_charset_for_glyph (sid); break; - case ExpertSubsetCharset: - sid = lookup_expert_subset_charset_for_sid (glyph); + case ExpertSubsetCharset: + glyph = lookup_expert_subset_charset_for_glyph (sid); break; default: break; } - return sid; + return glyph; } } - const Encoding *encoding; + protected: + hb_blob_t *blob; + hb_sanitize_context_t sc; - private: - typedef accelerator_templ_t SUPER; + public: + const Encoding *encoding; + const Charset *charset; + const CFF1NameIndex *nameIndex; + const CFF1TopDictIndex *topDictIndex; + const CFF1StringIndex *stringIndex; + const CFF1Subrs *globalSubrs; + const CFF1CharStrings *charStrings; + const CFF1FDArray *fdArray; + const CFF1FDSelect *fdSelect; + unsigned int fdCount; + + cff1_top_dict_values_t topDict; + hb_vector_t + fontDicts; + hb_vector_t privateDicts; + + unsigned int num_glyphs; }; - bool subset (hb_subset_plan_t *plan) const + struct accelerator_t : accelerator_templ_t { - hb_blob_t *cff_prime = nullptr; - - bool success = true; - if (hb_subset_cff1 (plan, &cff_prime)) { - success = success && plan->add_table (HB_OT_TAG_cff1, cff_prime); - hb_blob_t *head_blob = hb_sanitize_context_t().reference_table (plan->source); - success = success && head_blob && plan->add_table (HB_OT_TAG_head, head_blob); - hb_blob_destroy (head_blob); - } else { - success = false; + void init (hb_face_t *face) + { + SUPER::init (face); + + if (!is_valid ()) return; + if (is_CID ()) return; + + /* fill glyph_names */ + for (hb_codepoint_t gid = 0; gid < num_glyphs; gid++) + { + hb_codepoint_t sid = glyph_to_sid (gid); + gname_t gname; + gname.sid = sid; + if (sid < cff1_std_strings_length) + gname.name = cff1_std_strings (sid); + else + { + byte_str_t ustr = (*stringIndex)[sid - cff1_std_strings_length]; + gname.name = hb_bytes_t ((const char*)ustr.arrayZ, ustr.length); + } + if (unlikely (!gname.name.arrayZ)) { fini (); return; } + glyph_names.push (gname); + } + glyph_names.qsort (); } - hb_blob_destroy (cff_prime); - return success; - } + void fini () + { + glyph_names.fini (); + + SUPER::fini (); + } + + bool get_glyph_name (hb_codepoint_t glyph, + char *buf, unsigned int buf_len) const + { + if (!buf) return true; + if (unlikely (!is_valid ())) return false; + if (is_CID()) return false; + hb_codepoint_t sid = glyph_to_sid (glyph); + const char *str; + size_t str_len; + if (sid < cff1_std_strings_length) + { + hb_bytes_t byte_str = cff1_std_strings (sid); + str = byte_str.arrayZ; + str_len = byte_str.length; + } + else + { + byte_str_t ubyte_str = (*stringIndex)[sid - cff1_std_strings_length]; + str = (const char *)ubyte_str.arrayZ; + str_len = ubyte_str.length; + } + if (!str_len) return false; + unsigned int len = hb_min (buf_len - 1, str_len); + strncpy (buf, (const char*)str, len); + buf[len] = '\0'; + return true; + } + + bool get_glyph_from_name (const char *name, int len, + hb_codepoint_t *glyph) const + { + if (len < 0) len = strlen (name); + if (unlikely (!len)) return false; + + gname_t key = { hb_bytes_t (name, len), 0 }; + const gname_t *gname = glyph_names.bsearch (key); + if (!gname) return false; + hb_codepoint_t gid = sid_to_glyph (gname->sid); + if (!gid && gname->sid) return false; + *glyph = gid; + return true; + } + + HB_INTERNAL bool get_extents (hb_font_t *font, hb_codepoint_t glyph, hb_glyph_extents_t *extents) const; + HB_INTERNAL bool get_seac_components (hb_codepoint_t glyph, hb_codepoint_t *base, hb_codepoint_t *accent) const; +#ifdef HB_EXPERIMENTAL_API + HB_INTERNAL bool get_path (hb_font_t *font, hb_codepoint_t glyph, draw_helper_t &draw_helper) const; +#endif + + private: + struct gname_t + { + hb_bytes_t name; + uint16_t sid; + + static int cmp (const void *a_, const void *b_) + { + const gname_t *a = (const gname_t *)a_; + const gname_t *b = (const gname_t *)b_; + int minlen = hb_min (a->name.length, b->name.length); + int ret = strncmp (a->name.arrayZ, b->name.arrayZ, minlen); + if (ret) return ret; + return a->name.length - b->name.length; + } + + int cmp (const gname_t &a) const { return cmp (&a, this); } + }; + + hb_sorted_vector_t glyph_names; + + typedef accelerator_templ_t SUPER; + }; + + struct accelerator_subset_t : accelerator_templ_t {}; + + bool subset (hb_subset_context_t *c) const { return hb_subset_cff1 (c); } protected: HB_INTERNAL static hb_codepoint_t lookup_standard_encoding_for_code (hb_codepoint_t sid); HB_INTERNAL static hb_codepoint_t lookup_expert_encoding_for_code (hb_codepoint_t sid); HB_INTERNAL static hb_codepoint_t lookup_expert_charset_for_sid (hb_codepoint_t glyph); HB_INTERNAL static hb_codepoint_t lookup_expert_subset_charset_for_sid (hb_codepoint_t glyph); + HB_INTERNAL static hb_codepoint_t lookup_expert_charset_for_glyph (hb_codepoint_t sid); + HB_INTERNAL static hb_codepoint_t lookup_expert_subset_charset_for_glyph (hb_codepoint_t sid); HB_INTERNAL static hb_codepoint_t lookup_standard_encoding_for_sid (hb_codepoint_t code); public: diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-cff2-table.cc b/src/java.desktop/share/native/libharfbuzz/hb-ot-cff2-table.cc index 45eb8bd27610..56f331ed4476 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-cff2-table.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-cff2-table.cc @@ -24,24 +24,29 @@ * Adobe Author(s): Michiharu Ariza */ +#include "hb.hh" + +#ifndef HB_NO_OT_FONT_CFF + #include "hb-ot-cff2-table.hh" #include "hb-cff2-interp-cs.hh" +#include "hb-draw.hh" using namespace CFF; -struct extents_param_t +struct cff2_extents_param_t { void init () { path_open = false; - min_x.set_int (0x7FFFFFFF); - min_y.set_int (0x7FFFFFFF); - max_x.set_int (-0x80000000); - max_y.set_int (-0x80000000); + min_x.set_int (INT_MAX); + min_y.set_int (INT_MAX); + max_x.set_int (INT_MIN); + max_y.set_int (INT_MIN); } - void start_path () { path_open = true; } - void end_path () { path_open = false; } + void start_path () { path_open = true; } + void end_path () { path_open = false; } bool is_path_open () const { return path_open; } void update_bounds (const point_t &pt) @@ -59,15 +64,15 @@ struct extents_param_t number_t max_y; }; -struct cff2_path_procs_extents_t : path_procs_t +struct cff2_path_procs_extents_t : path_procs_t { - static void moveto (cff2_cs_interp_env_t &env, extents_param_t& param, const point_t &pt) + static void moveto (cff2_cs_interp_env_t &env, cff2_extents_param_t& param, const point_t &pt) { param.end_path (); env.moveto (pt); } - static void line (cff2_cs_interp_env_t &env, extents_param_t& param, const point_t &pt1) + static void line (cff2_cs_interp_env_t &env, cff2_extents_param_t& param, const point_t &pt1) { if (!param.is_path_open ()) { @@ -78,7 +83,7 @@ struct cff2_path_procs_extents_t : path_procs_t {}; +struct cff2_cs_opset_extents_t : cff2_cs_opset_t {}; bool OT::cff2::accelerator_t::get_extents (hb_font_t *font, hb_codepoint_t glyph, hb_glyph_extents_t *extents) const { +#ifdef HB_NO_OT_FONT_CFF + /* XXX Remove check when this code moves to .hh file. */ + return true; +#endif + if (unlikely (!is_valid () || (glyph >= num_glyphs))) return false; - unsigned int num_coords; - const int *coords = hb_font_get_var_coords_normalized (font, &num_coords); unsigned int fd = fdSelect->get_fd (glyph); - cff2_cs_interpreter_t interp; + cff2_cs_interpreter_t interp; const byte_str_t str = (*charStrings)[glyph]; - interp.env.init (str, *this, fd, coords, num_coords); - extents_param_t param; + interp.env.init (str, *this, fd, font->coords, font->num_coords); + cff2_extents_param_t param; param.init (); if (unlikely (!interp.interpret (param))) return false; @@ -118,8 +126,8 @@ bool OT::cff2::accelerator_t::get_extents (hb_font_t *font, } else { - extents->x_bearing = (int32_t)param.min_x.floor (); - extents->width = (int32_t)param.max_x.ceil () - extents->x_bearing; + extents->x_bearing = font->em_scalef_x (param.min_x.to_real ()); + extents->width = font->em_scalef_x (param.max_x.to_real () - param.min_x.to_real ()); } if (param.min_y >= param.max_y) { @@ -128,9 +136,80 @@ bool OT::cff2::accelerator_t::get_extents (hb_font_t *font, } else { - extents->y_bearing = (int32_t)param.max_y.ceil (); - extents->height = (int32_t)param.min_y.floor () - extents->y_bearing; + extents->y_bearing = font->em_scalef_y (param.max_y.to_real ()); + extents->height = font->em_scalef_y (param.min_y.to_real () - param.max_y.to_real ()); + } + + return true; +} + +#ifdef HB_EXPERIMENTAL_API +struct cff2_path_param_t +{ + cff2_path_param_t (hb_font_t *font_, draw_helper_t &draw_helper_) + { + draw_helper = &draw_helper_; + font = font_; + } + + void move_to (const point_t &p) + { draw_helper->move_to (font->em_scalef_x (p.x.to_real ()), font->em_scalef_y (p.y.to_real ())); } + + void line_to (const point_t &p) + { draw_helper->line_to (font->em_scalef_x (p.x.to_real ()), font->em_scalef_y (p.y.to_real ())); } + + void cubic_to (const point_t &p1, const point_t &p2, const point_t &p3) + { + draw_helper->cubic_to (font->em_scalef_x (p1.x.to_real ()), font->em_scalef_y (p1.y.to_real ()), + font->em_scalef_x (p2.x.to_real ()), font->em_scalef_y (p2.y.to_real ()), + font->em_scalef_x (p3.x.to_real ()), font->em_scalef_y (p3.y.to_real ())); + } + + protected: + draw_helper_t *draw_helper; + hb_font_t *font; +}; + +struct cff2_path_procs_path_t : path_procs_t +{ + static void moveto (cff2_cs_interp_env_t &env, cff2_path_param_t& param, const point_t &pt) + { + param.move_to (pt); + env.moveto (pt); + } + + static void line (cff2_cs_interp_env_t &env, cff2_path_param_t& param, const point_t &pt1) + { + param.line_to (pt1); + env.moveto (pt1); } + static void curve (cff2_cs_interp_env_t &env, cff2_path_param_t& param, const point_t &pt1, const point_t &pt2, const point_t &pt3) + { + param.cubic_to (pt1, pt2, pt3); + env.moveto (pt3); + } +}; + +struct cff2_cs_opset_path_t : cff2_cs_opset_t {}; + +bool OT::cff2::accelerator_t::get_path (hb_font_t *font, hb_codepoint_t glyph, draw_helper_t &draw_helper) const +{ +#ifdef HB_NO_OT_FONT_CFF + /* XXX Remove check when this code moves to .hh file. */ + return true; +#endif + + if (unlikely (!is_valid () || (glyph >= num_glyphs))) return false; + + unsigned int fd = fdSelect->get_fd (glyph); + cff2_cs_interpreter_t interp; + const byte_str_t str = (*charStrings)[glyph]; + interp.env.init (str, *this, fd, font->coords, font->num_coords); + cff2_path_param_t param (font, draw_helper); + if (unlikely (!interp.interpret (param))) return false; return true; } +#endif + +#endif diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-cff2-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-cff2-table.hh index 8111d503844c..90c0b5b9cf68 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-cff2-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-cff2-table.hh @@ -27,9 +27,9 @@ #ifndef HB_OT_CFF2_TABLE_HH #define HB_OT_CFF2_TABLE_HH -#include "hb-ot-head-table.hh" #include "hb-ot-cff-common.hh" #include "hb-subset-cff2.hh" +#include "hb-draw.hh" namespace CFF { @@ -43,7 +43,6 @@ typedef CFFIndex CFF2Index; template struct CFF2IndexOf : CFFIndexOf {}; typedef CFF2Index CFF2CharStrings; -typedef FDArray CFF2FDArray; typedef Subrs CFF2Subrs; typedef FDSelect3_4 FDSelect4; @@ -51,62 +50,63 @@ typedef FDSelect3_4_Range FDSelect4_Range; struct CFF2FDSelect { - bool sanitize (hb_sanitize_context_t *c, unsigned int fdcount) const - { - TRACE_SANITIZE (this); - - return_trace (likely (c->check_struct (this) && (format == 0 || format == 3 || format == 4) && - (format == 0)? - u.format0.sanitize (c, fdcount): - ((format == 3)? - u.format3.sanitize (c, fdcount): - u.format4.sanitize (c, fdcount)))); - } - bool serialize (hb_serialize_context_t *c, const CFF2FDSelect &src, unsigned int num_glyphs) { TRACE_SERIALIZE (this); unsigned int size = src.get_size (num_glyphs); CFF2FDSelect *dest = c->allocate_size (size); - if (unlikely (dest == nullptr)) return_trace (false); + if (unlikely (!dest)) return_trace (false); memcpy (dest, &src, size); return_trace (true); } - unsigned int calculate_serialized_size (unsigned int num_glyphs) const - { return get_size (num_glyphs); } - unsigned int get_size (unsigned int num_glyphs) const { - unsigned int size = format.static_size; - if (format == 0) - size += u.format0.get_size (num_glyphs); - else if (format == 3) - size += u.format3.get_size (); - else - size += u.format4.get_size (); - return size; + switch (format) + { + case 0: return format.static_size + u.format0.get_size (num_glyphs); + case 3: return format.static_size + u.format3.get_size (); + case 4: return format.static_size + u.format4.get_size (); + default:return 0; + } } hb_codepoint_t get_fd (hb_codepoint_t glyph) const { - if (this == &Null(CFF2FDSelect)) + if (this == &Null (CFF2FDSelect)) return 0; - if (format == 0) - return u.format0.get_fd (glyph); - else if (format == 3) - return u.format3.get_fd (glyph); - else - return u.format4.get_fd (glyph); + + switch (format) + { + case 0: return u.format0.get_fd (glyph); + case 3: return u.format3.get_fd (glyph); + case 4: return u.format4.get_fd (glyph); + default:return 0; + } + } + + bool sanitize (hb_sanitize_context_t *c, unsigned int fdcount) const + { + TRACE_SANITIZE (this); + if (unlikely (!c->check_struct (this))) + return_trace (false); + + switch (format) + { + case 0: return_trace (u.format0.sanitize (c, fdcount)); + case 3: return_trace (u.format3.sanitize (c, fdcount)); + case 4: return_trace (u.format4.sanitize (c, fdcount)); + default:return_trace (false); + } } HBUINT8 format; union { - FDSelect0 format0; - FDSelect3 format3; - FDSelect4 format4; + FDSelect0 format0; + FDSelect3 format3; + FDSelect4 format4; } u; - + public: DEFINE_SIZE_MIN (2); }; @@ -123,7 +123,7 @@ struct CFF2VariationStore TRACE_SERIALIZE (this); unsigned int size_ = varStore->get_size (); CFF2VariationStore *dest = c->allocate_size (size_); - if (unlikely (dest == nullptr)) return_trace (false); + if (unlikely (!dest)) return_trace (false); memcpy (dest, varStore, size_); return_trace (true); } @@ -146,26 +146,6 @@ struct cff2_top_dict_values_t : top_dict_values_t<> } void fini () { top_dict_values_t<>::fini (); } - unsigned int calculate_serialized_size () const - { - unsigned int size = 0; - for (unsigned int i = 0; i < get_count (); i++) - { - op_code_t op = get_value (i).op; - switch (op) - { - case OpCode_vstore: - case OpCode_FDSelect: - size += OpCode_Size (OpCode_longintdict) + 4 + OpCode_Size (op); - break; - default: - size += top_dict_values_t<>::calculate_serialized_op_size (get_value (i)); - break; - } - } - return size; - } - unsigned int vstoreOffset; unsigned int FDSelectOffset; }; @@ -252,22 +232,11 @@ struct cff2_private_dict_values_base_t : dict_values_t { dict_values_t::init (); subrsOffset = 0; - localSubrs = &Null(CFF2Subrs); + localSubrs = &Null (CFF2Subrs); ivs = 0; } void fini () { dict_values_t::fini (); } - unsigned int calculate_serialized_size () const - { - unsigned int size = 0; - for (unsigned int i = 0; i < dict_values_t::get_count; i++) - if (dict_values_t::get_value (i).op == OpCode_Subrs) - size += OpCode_Size (OpCode_shortint) + 2 + OpCode_Size (OpCode_Subrs); - else - size += dict_values_t::get_value (i).str.length; - return size; - } - unsigned int subrsOffset; const CFF2Subrs *localSubrs; unsigned int ivs; @@ -400,6 +369,14 @@ struct cff2_private_dict_opset_subset_t : dict_opset_t typedef dict_interpreter_t cff2_top_dict_interpreter_t; typedef dict_interpreter_t cff2_font_dict_interpreter_t; +struct CFF2FDArray : FDArray +{ + /* FDArray::serialize does not compile without this partial specialization */ + template + bool serialize (hb_serialize_context_t *c, ITER it, OP_SERIALIZER& opszr) + { return FDArray::serialize (c, it, opszr); } +}; + } /* namespace CFF */ namespace OT { @@ -434,7 +411,7 @@ struct cff2 const OT::cff2 *cff2 = this->blob->template as (); - if (cff2 == &Null(OT::cff2)) + if (cff2 == &Null (OT::cff2)) { fini (); return; } { /* parse top dict */ @@ -452,11 +429,11 @@ struct cff2 fdArray = &StructAtOffsetOrNull (cff2, topDict.FDArrayOffset); fdSelect = &StructAtOffsetOrNull (cff2, topDict.FDSelectOffset); - if (((varStore != &Null(CFF2VariationStore)) && unlikely (!varStore->sanitize (&sc))) || - (charStrings == &Null(CFF2CharStrings)) || unlikely (!charStrings->sanitize (&sc)) || - (globalSubrs == &Null(CFF2Subrs)) || unlikely (!globalSubrs->sanitize (&sc)) || - (fdArray == &Null(CFF2FDArray)) || unlikely (!fdArray->sanitize (&sc)) || - (((fdSelect != &Null(CFF2FDSelect)) && unlikely (!fdSelect->sanitize (&sc, fdArray->count))))) + if (((varStore != &Null (CFF2VariationStore)) && unlikely (!varStore->sanitize (&sc))) || + (charStrings == &Null (CFF2CharStrings)) || unlikely (!charStrings->sanitize (&sc)) || + (globalSubrs == &Null (CFF2Subrs)) || unlikely (!globalSubrs->sanitize (&sc)) || + (fdArray == &Null (CFF2FDArray)) || unlikely (!fdArray->sanitize (&sc)) || + (((fdSelect != &Null (CFF2FDSelect)) && unlikely (!fdSelect->sanitize (&sc, fdArray->count))))) { fini (); return; } num_glyphs = charStrings->count; @@ -464,7 +441,8 @@ struct cff2 { fini (); return; } fdCount = fdArray->count; - privateDicts.resize (fdCount); + if (!privateDicts.resize (fdCount)) + { fini (); return; } /* parse font dicts and gather private dicts */ for (unsigned int i = 0; i < fdCount; i++) @@ -475,7 +453,7 @@ struct cff2 cff2_font_dict_interpreter_t font_interp; font_interp.env.init (fontDictStr); font = fontDicts.push (); - if (unlikely (font == &Crap(cff2_font_dict_values_t))) { fini (); return; } + if (unlikely (font == &Crap (cff2_font_dict_values_t))) { fini (); return; } font->init (); if (unlikely (!font_interp.interpret (*font))) { fini (); return; } @@ -487,7 +465,7 @@ struct cff2 if (unlikely (!priv_interp.interpret (privateDicts[i]))) { fini (); return; } privateDicts[i].localSubrs = &StructAtOffsetOrNull (&privDictStr[0], privateDicts[i].subrsOffset); - if (privateDicts[i].localSubrs != &Null(CFF2Subrs) && + if (privateDicts[i].localSubrs != &Null (CFF2Subrs) && unlikely (!privateDicts[i].localSubrs->sanitize (&sc))) { fini (); return; } } @@ -503,7 +481,7 @@ struct cff2 blob = nullptr; } - bool is_valid () const { return blob != nullptr; } + bool is_valid () const { return blob; } protected: hb_blob_t *blob; @@ -529,27 +507,14 @@ struct cff2 HB_INTERNAL bool get_extents (hb_font_t *font, hb_codepoint_t glyph, hb_glyph_extents_t *extents) const; +#ifdef HB_EXPERIMENTAL_API + HB_INTERNAL bool get_path (hb_font_t *font, hb_codepoint_t glyph, draw_helper_t &draw_helper) const; +#endif }; typedef accelerator_templ_t accelerator_subset_t; - bool subset (hb_subset_plan_t *plan) const - { - hb_blob_t *cff2_prime = nullptr; - - bool success = true; - if (hb_subset_cff2 (plan, &cff2_prime)) { - success = success && plan->add_table (HB_OT_TAG_cff2, cff2_prime); - hb_blob_t *head_blob = hb_sanitize_context_t().reference_table (plan->source); - success = success && head_blob && plan->add_table (HB_OT_TAG_head, head_blob); - hb_blob_destroy (head_blob); - } else { - success = false; - } - hb_blob_destroy (cff2_prime); - - return success; - } + bool subset (hb_subset_context_t *c) const { return hb_subset_cff2 (c); } public: FixedVersion version; /* Version of CFF2 table. set to 0x0200u */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-cmap-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-cmap-table.hh index 9e2ada67c4a5..7160b168a7a9 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-cmap-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-cmap-table.hh @@ -56,6 +56,18 @@ struct CmapSubtableFormat0 out->add (i); } + void collect_mapping (hb_set_t *unicodes, /* OUT */ + hb_map_t *mapping /* OUT */) const + { + for (unsigned i = 0; i < 256; i++) + if (glyphIdArray[i]) + { + hb_codepoint_t glyph = glyphIdArray[i]; + unicodes->add (i); + mapping->set (i, glyph); + } + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -74,154 +86,203 @@ struct CmapSubtableFormat0 struct CmapSubtableFormat4 { - struct segment_plan - { - HBUINT16 start_code; - HBUINT16 end_code; - bool use_delta; - }; - bool serialize (hb_serialize_context_t *c, - const hb_subset_plan_t *plan, - const hb_vector_t &segments) + template + HBUINT16* serialize_endcode_array (hb_serialize_context_t *c, + Iterator it) { - TRACE_SERIALIZE (this); - - if (unlikely (!c->extend_min (*this))) return_trace (false); + HBUINT16 *endCode = c->start_embed (); + hb_codepoint_t prev_endcp = 0xFFFF; - this->format.set (4); - this->length.set (get_sub_table_size (segments)); - - this->segCountX2.set (segments.length * 2); - this->entrySelector.set (MAX (1u, hb_bit_storage (segments.length)) - 1); - this->searchRange.set (2 * (1u << this->entrySelector)); - this->rangeShift.set (segments.length * 2 > this->searchRange - ? 2 * segments.length - this->searchRange - : 0); - - HBUINT16 *end_count = c->allocate_size (HBUINT16::static_size * segments.length); - c->allocate_size (HBUINT16::static_size); // 2 bytes of padding. - HBUINT16 *start_count = c->allocate_size (HBUINT16::static_size * segments.length); - HBINT16 *id_delta = c->allocate_size (HBUINT16::static_size * segments.length); - HBUINT16 *id_range_offset = c->allocate_size (HBUINT16::static_size * segments.length); - - if (id_range_offset == nullptr) - return_trace (false); + for (const hb_item_type _ : +it) + { + if (prev_endcp != 0xFFFF && prev_endcp + 1u != _.first) + { + HBUINT16 end_code; + end_code = prev_endcp; + c->copy (end_code); + } + prev_endcp = _.first; + } - for (unsigned int i = 0; i < segments.length; i++) { - end_count[i].set (segments[i].end_code); - start_count[i].set (segments[i].start_code); - if (segments[i].use_delta) + // last endCode + HBUINT16 endcode; + endcode = prev_endcp; + if (unlikely (!c->copy (endcode))) return nullptr; + // There must be a final entry with end_code == 0xFFFF. + if (prev_endcp != 0xFFFF) { - hb_codepoint_t cp = segments[i].start_code; - hb_codepoint_t start_gid = 0; - if (unlikely (!plan->new_gid_for_codepoint (cp, &start_gid) && cp != 0xFFFF)) - return_trace (false); - id_delta[i].set (start_gid - segments[i].start_code); - } else { - id_delta[i].set (0); - unsigned int num_codepoints = segments[i].end_code - segments[i].start_code + 1; - HBUINT16 *glyph_id_array = c->allocate_size (HBUINT16::static_size * num_codepoints); - if (glyph_id_array == nullptr) - return_trace (false); - // From the cmap spec: - // - // id_range_offset[i]/2 - // + (cp - segments[i].start_code) - // + (id_range_offset + i) - // = - // glyph_id_array + (cp - segments[i].start_code) - // - // So, solve for id_range_offset[i]: - // - // id_range_offset[i] - // = - // 2 * (glyph_id_array - id_range_offset - i) - id_range_offset[i].set (2 * ( - glyph_id_array - id_range_offset - i)); - for (unsigned int j = 0; j < num_codepoints; j++) - { - hb_codepoint_t cp = segments[i].start_code + j; - hb_codepoint_t new_gid; - if (unlikely (!plan->new_gid_for_codepoint (cp, &new_gid))) - return_trace (false); - glyph_id_array[j].set (new_gid); - } + HBUINT16 finalcode; + finalcode = 0xFFFF; + if (unlikely (!c->copy (finalcode))) return nullptr; } } - return_trace (true); + return endCode; } - static size_t get_sub_table_size (const hb_vector_t &segments) + template + HBUINT16* serialize_startcode_array (hb_serialize_context_t *c, + Iterator it) { - size_t segment_size = 0; - for (unsigned int i = 0; i < segments.length; i++) + HBUINT16 *startCode = c->start_embed (); + hb_codepoint_t prev_cp = 0xFFFF; + + for (const hb_item_type _ : +it) { - // Parallel array entries - segment_size += - 2 // end count - + 2 // start count - + 2 // delta - + 2; // range offset - - if (!segments[i].use_delta) - // Add bytes for the glyph index array entries for this segment. - segment_size += (segments[i].end_code - segments[i].start_code + 1) * 2; + if (prev_cp == 0xFFFF || prev_cp + 1u != _.first) + { + HBUINT16 start_code; + start_code = _.first; + c->copy (start_code); + } + + prev_cp = _.first; } - return min_size - + 2 // Padding - + segment_size; + // There must be a final entry with end_code == 0xFFFF. + if (it.len () == 0 || prev_cp != 0xFFFF) + { + HBUINT16 finalcode; + finalcode = 0xFFFF; + if (unlikely (!c->copy (finalcode))) return nullptr; + } + + return startCode; } - static bool create_sub_table_plan (const hb_subset_plan_t *plan, - hb_vector_t *segments) + template + HBINT16* serialize_idDelta_array (hb_serialize_context_t *c, + Iterator it, + HBUINT16 *endCode, + HBUINT16 *startCode, + unsigned segcount) { - segment_plan *segment = nullptr; - hb_codepoint_t last_gid = 0; + unsigned i = 0; + hb_codepoint_t last_gid = 0, start_gid = 0, last_cp = 0xFFFF; + bool use_delta = true; - hb_codepoint_t cp = HB_SET_VALUE_INVALID; - while (plan->unicodes->next (&cp)) { - hb_codepoint_t new_gid; - if (unlikely (!plan->new_gid_for_codepoint (cp, &new_gid))) + HBINT16 *idDelta = c->start_embed (); + if ((char *)idDelta - (char *)startCode != (int) segcount * (int) HBINT16::static_size) + return nullptr; + + for (const hb_item_type _ : +it) + { + if (_.first == startCode[i]) { - DEBUG_MSG(SUBSET, nullptr, "Unable to find new gid for %04x", cp); - return false; + use_delta = true; + start_gid = _.second; } + else if (_.second != last_gid + 1) use_delta = false; - /* Stop adding to cmap if we are now outside of unicode BMP. */ - if (cp > 0xFFFF) break; - - if (!segment || - cp != segment->end_code + 1u) + if (_.first == endCode[i]) { - segment = segments->push (); - segment->start_code.set (cp); - segment->end_code.set (cp); - segment->use_delta = true; - } else { - segment->end_code.set (cp); - if (last_gid + 1u != new_gid) - // gid's are not consecutive in this segment so delta - // cannot be used. - segment->use_delta = false; + HBINT16 delta; + if (use_delta) delta = (int)start_gid - (int)startCode[i]; + else delta = 0; + c->copy (delta); + + i++; } - last_gid = new_gid; + last_gid = _.second; + last_cp = _.first; } - // There must be a final entry with end_code == 0xFFFF. Check if we need to add one. - if (segment == nullptr || segment->end_code != 0xFFFF) + if (it.len () == 0 || last_cp != 0xFFFF) { - segment = segments->push (); - segment->start_code.set (0xFFFF); - segment->end_code.set (0xFFFF); - segment->use_delta = true; + HBINT16 delta; + delta = 1; + if (unlikely (!c->copy (delta))) return nullptr; } - return true; + return idDelta; + } + + template + HBUINT16* serialize_rangeoffset_glyid (hb_serialize_context_t *c, + Iterator it, + HBUINT16 *endCode, + HBUINT16 *startCode, + HBINT16 *idDelta, + unsigned segcount) + { + HBUINT16 *idRangeOffset = c->allocate_size (HBUINT16::static_size * segcount); + if (unlikely (!c->check_success (idRangeOffset))) return nullptr; + if (unlikely ((char *)idRangeOffset - (char *)idDelta != (int) segcount * (int) HBINT16::static_size)) return nullptr; + + + hb_range (segcount) + | hb_filter ([&] (const unsigned _) { return idDelta[_] == 0; }) + | hb_apply ([&] (const unsigned i) + { + idRangeOffset[i] = 2 * (c->start_embed () - idRangeOffset - i); + + + it + | hb_filter ([&] (const hb_item_type _) { return _.first >= startCode[i] && _.first <= endCode[i]; }) + | hb_apply ([&] (const hb_item_type _) + { + HBUINT16 glyID; + glyID = _.second; + c->copy (glyID); + }) + ; + + + }) + ; + + return idRangeOffset; + } + + template + void serialize (hb_serialize_context_t *c, + Iterator it) + { + auto format4_iter = + + it + | hb_filter ([&] (const hb_pair_t _) + { return _.first <= 0xFFFF; }) + ; + + if (format4_iter.len () == 0) return; + + unsigned table_initpos = c->length (); + if (unlikely (!c->extend_min (*this))) return; + this->format = 4; + + //serialize endCode[] + HBUINT16 *endCode = serialize_endcode_array (c, format4_iter); + if (unlikely (!endCode)) return; + + unsigned segcount = (c->length () - min_size) / HBUINT16::static_size; + + // 2 bytes of padding. + if (unlikely (!c->allocate_size (HBUINT16::static_size))) return; // 2 bytes of padding. + + // serialize startCode[] + HBUINT16 *startCode = serialize_startcode_array (c, format4_iter); + if (unlikely (!startCode)) return; + + //serialize idDelta[] + HBINT16 *idDelta = serialize_idDelta_array (c, format4_iter, endCode, startCode, segcount); + if (unlikely (!idDelta)) return; + + HBUINT16 *idRangeOffset = serialize_rangeoffset_glyid (c, format4_iter, endCode, startCode, idDelta, segcount); + if (unlikely (!c->check_success (idRangeOffset))) return; + + if (unlikely (!c->check_assign(this->length, c->length () - table_initpos))) return; + this->segCountX2 = segcount * 2; + this->entrySelector = hb_max (1u, hb_bit_storage (segcount)) - 1; + this->searchRange = 2 * (1u << this->entrySelector); + this->rangeShift = segcount * 2 > this->searchRange + ? 2 * segcount - this->searchRange + : 0; } struct accelerator_t @@ -244,27 +305,28 @@ struct CmapSubtableFormat4 bool get_glyph (hb_codepoint_t codepoint, hb_codepoint_t *glyph) const { - /* Custom two-array bsearch. */ - int min = 0, max = (int) this->segCount - 1; - const HBUINT16 *startCount = this->startCount; - const HBUINT16 *endCount = this->endCount; - unsigned int i; - while (min <= max) + struct CustomRange { - int mid = ((unsigned int) min + (unsigned int) max) / 2; - if (codepoint < startCount[mid]) - max = mid - 1; - else if (codepoint > endCount[mid]) - min = mid + 1; - else + int cmp (hb_codepoint_t k, + unsigned distance) const { - i = mid; - goto found; + if (k > last) return +1; + if (k < (&last)[distance]) return -1; + return 0; } - } - return false; + HBUINT16 last; + }; + + const HBUINT16 *found = hb_bsearch (codepoint, + this->endCount, + this->segCount, + 2, + _hb_cmp_method, + this->segCount + 1); + if (!found) + return false; + unsigned int i = found - endCount; - found: hb_codepoint_t gid; unsigned int rangeOffset = this->idRangeOffset[i]; if (rangeOffset == 0) @@ -286,10 +348,10 @@ struct CmapSubtableFormat4 *glyph = gid; return true; } - static bool get_glyph_func (const void *obj, hb_codepoint_t codepoint, hb_codepoint_t *glyph) - { - return ((const accelerator_t *) obj)->get_glyph (codepoint, glyph); - } + + HB_INTERNAL static bool get_glyph_func (const void *obj, hb_codepoint_t codepoint, hb_codepoint_t *glyph) + { return ((const accelerator_t *) obj)->get_glyph (codepoint, glyph); } + void collect_unicodes (hb_set_t *out) const { unsigned int count = this->segCount; @@ -297,14 +359,22 @@ struct CmapSubtableFormat4 count--; /* Skip sentinel segment. */ for (unsigned int i = 0; i < count; i++) { + hb_codepoint_t start = this->startCount[i]; + hb_codepoint_t end = this->endCount[i]; unsigned int rangeOffset = this->idRangeOffset[i]; if (rangeOffset == 0) - out->add_range (this->startCount[i], this->endCount[i]); + { + for (hb_codepoint_t codepoint = start; codepoint <= end; codepoint++) + { + hb_codepoint_t gid = (codepoint + this->idDelta[i]) & 0xFFFFu; + if (unlikely (!gid)) + continue; + out->add (codepoint); + } + } else { - for (hb_codepoint_t codepoint = this->startCount[i]; - codepoint <= this->endCount[i]; - codepoint++) + for (hb_codepoint_t codepoint = start; codepoint <= end; codepoint++) { unsigned int index = rangeOffset / 2 + (codepoint - this->startCount[i]) + i - this->segCount; if (unlikely (index >= this->glyphIdArrayLength)) @@ -318,6 +388,45 @@ struct CmapSubtableFormat4 } } + void collect_mapping (hb_set_t *unicodes, /* OUT */ + hb_map_t *mapping /* OUT */) const + { + unsigned count = this->segCount; + if (count && this->startCount[count - 1] == 0xFFFFu) + count--; /* Skip sentinel segment. */ + for (unsigned i = 0; i < count; i++) + { + hb_codepoint_t start = this->startCount[i]; + hb_codepoint_t end = this->endCount[i]; + unsigned rangeOffset = this->idRangeOffset[i]; + if (rangeOffset == 0) + { + for (hb_codepoint_t codepoint = start; codepoint <= end; codepoint++) + { + hb_codepoint_t gid = (codepoint + this->idDelta[i]) & 0xFFFFu; + if (unlikely (!gid)) + continue; + unicodes->add (codepoint); + mapping->set (codepoint, gid); + } + } + else + { + for (hb_codepoint_t codepoint = start; codepoint <= end; codepoint++) + { + unsigned index = rangeOffset / 2 + (codepoint - this->startCount[i]) + i - this->segCount; + if (unlikely (index >= this->glyphIdArrayLength)) + break; + hb_codepoint_t gid = this->glyphIdArray[index]; + if (unlikely (!gid)) + continue; + unicodes->add (codepoint); + mapping->set (codepoint, gid); + } + } + } + } + const HBUINT16 *endCount; const HBUINT16 *startCount; const HBUINT16 *idDelta; @@ -338,6 +447,13 @@ struct CmapSubtableFormat4 accel.collect_unicodes (out); } + void collect_mapping (hb_set_t *unicodes, /* OUT */ + hb_map_t *mapping /* OUT */) const + { + accelerator_t accel (this); + accel.collect_mapping (unicodes, mapping); + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -349,9 +465,9 @@ struct CmapSubtableFormat4 /* Some broken fonts have too long of a "length" value. * If that is the case, just change the value to truncate * the subtable at the end of the blob. */ - uint16_t new_length = (uint16_t) MIN ((uintptr_t) 65535, - (uintptr_t) (c->end - - (char *) this)); + uint16_t new_length = (uint16_t) hb_min ((uintptr_t) 65535, + (uintptr_t) (c->end - + (char *) this)); if (!c->try_set (&length, new_length)) return_trace (false); } @@ -440,6 +556,21 @@ struct CmapSubtableTrimmed out->add (start + i); } + void collect_mapping (hb_set_t *unicodes, /* OUT */ + hb_map_t *mapping /* OUT */) const + { + hb_codepoint_t start_cp = startCharCode; + unsigned count = glyphIdArray.len; + for (unsigned i = 0; i < count; i++) + if (glyphIdArray[i]) + { + hb_codepoint_t unicode = start_cp + i; + hb_codepoint_t glyphid = glyphIdArray[i]; + unicodes->add (unicode); + mapping->set (unicode, glyphid); + } + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -451,7 +582,7 @@ struct CmapSubtableTrimmed UINT length; /* Byte length of this subtable. */ UINT language; /* Ignore. */ UINT startCharCode; /* First character code covered. */ - ArrayOf + ArrayOf glyphIdArray; /* Array of glyph index values for character * codes in the range. */ public: @@ -475,12 +606,56 @@ struct CmapSubtableLongSegmented return true; } - void collect_unicodes (hb_set_t *out) const + void collect_unicodes (hb_set_t *out, unsigned int num_glyphs) const + { + for (unsigned int i = 0; i < this->groups.len; i++) + { + hb_codepoint_t start = this->groups[i].startCharCode; + hb_codepoint_t end = hb_min ((hb_codepoint_t) this->groups[i].endCharCode, + (hb_codepoint_t) HB_UNICODE_MAX); + hb_codepoint_t gid = this->groups[i].glyphID; + if (!gid) + { + /* Intention is: if (hb_is_same (T, CmapSubtableFormat13)) continue; */ + if (! T::group_get_glyph (this->groups[i], end)) continue; + start++; + gid++; + } + if (unlikely ((unsigned int) gid >= num_glyphs)) continue; + if (unlikely ((unsigned int) (gid + end - start) >= num_glyphs)) + end = start + (hb_codepoint_t) num_glyphs - gid; + + out->add_range (start, end); + } + } + + void collect_mapping (hb_set_t *unicodes, /* OUT */ + hb_map_t *mapping, /* OUT */ + unsigned num_glyphs) const { - for (unsigned int i = 0; i < this->groups.len; i++) { - out->add_range (this->groups[i].startCharCode, - MIN ((hb_codepoint_t) this->groups[i].endCharCode, - (hb_codepoint_t) HB_UNICODE_MAX)); + for (unsigned i = 0; i < this->groups.len; i++) + { + hb_codepoint_t start = this->groups[i].startCharCode; + hb_codepoint_t end = hb_min ((hb_codepoint_t) this->groups[i].endCharCode, + (hb_codepoint_t) HB_UNICODE_MAX); + hb_codepoint_t gid = this->groups[i].glyphID; + if (!gid) + { + /* Intention is: if (hb_is_same (T, CmapSubtableFormat13)) continue; */ + if (! T::group_get_glyph (this->groups[i], end)) continue; + start++; + gid++; + } + if (unlikely ((unsigned int) gid >= num_glyphs)) continue; + if (unlikely ((unsigned int) (gid + end - start) >= num_glyphs)) + end = start + (hb_codepoint_t) num_glyphs - gid; + + for (unsigned cp = start; cp <= end; cp++) + { + unicodes->add (cp); + mapping->set (cp, gid); + gid++; + } } } @@ -490,15 +665,6 @@ struct CmapSubtableLongSegmented return_trace (c->check_struct (this) && groups.sanitize (c)); } - bool serialize (hb_serialize_context_t *c, - const hb_vector_t &group_data) - { - TRACE_SERIALIZE (this); - if (unlikely (!c->extend_min (*this))) return_trace (false); - if (unlikely (!groups.serialize (c, group_data.as_array ()))) return_trace (false); - return true; - } - protected: HBUINT16 format; /* Subtable format; set to 12. */ HBUINT16 reserved; /* Reserved; set to 0. */ @@ -518,63 +684,66 @@ struct CmapSubtableFormat12 : CmapSubtableLongSegmented group.glyphID + (u - group.startCharCode) : 0; } - bool serialize (hb_serialize_context_t *c, - const hb_vector_t &groups) + template + void serialize (hb_serialize_context_t *c, + Iterator it) { - if (unlikely (!c->extend_min (*this))) return false; - - this->format.set (12); - this->reserved.set (0); - this->length.set (get_sub_table_size (groups)); + if (it.len () == 0) return; + unsigned table_initpos = c->length (); + if (unlikely (!c->extend_min (*this))) return; - return CmapSubtableLongSegmented::serialize (c, groups); - } + hb_codepoint_t startCharCode = 0xFFFF, endCharCode = 0xFFFF; + hb_codepoint_t glyphID = 0; - static size_t get_sub_table_size (const hb_vector_t &groups) - { - return 16 + 12 * groups.length; - } - - static bool create_sub_table_plan (const hb_subset_plan_t *plan, - hb_vector_t *groups) - { - CmapSubtableLongGroup *group = nullptr; - - hb_codepoint_t cp = HB_SET_VALUE_INVALID; - while (plan->unicodes->next (&cp)) { - hb_codepoint_t new_gid; - if (unlikely (!plan->new_gid_for_codepoint (cp, &new_gid))) + for (const hb_item_type _ : +it) + { + if (startCharCode == 0xFFFF) { - DEBUG_MSG(SUBSET, nullptr, "Unable to find new gid for %04x", cp); - return false; + startCharCode = _.first; + endCharCode = _.first; + glyphID = _.second; } - - if (!group || !_is_gid_consecutive (group, cp, new_gid)) + else if (!_is_gid_consecutive (endCharCode, startCharCode, glyphID, _.first, _.second)) { - group = groups->push (); - group->startCharCode.set (cp); - group->endCharCode.set (cp); - group->glyphID.set (new_gid); + CmapSubtableLongGroup grouprecord; + grouprecord.startCharCode = startCharCode; + grouprecord.endCharCode = endCharCode; + grouprecord.glyphID = glyphID; + c->copy (grouprecord); + + startCharCode = _.first; + endCharCode = _.first; + glyphID = _.second; } - else group->endCharCode.set (cp); + else + endCharCode = _.first; } - DEBUG_MSG(SUBSET, nullptr, "cmap"); - for (unsigned int i = 0; i < groups->length; i++) { - CmapSubtableLongGroup& group = (*groups)[i]; - DEBUG_MSG(SUBSET, nullptr, " %d: U+%04X-U+%04X, gid %d-%d", i, (uint32_t) group.startCharCode, (uint32_t) group.endCharCode, (uint32_t) group.glyphID, (uint32_t) group.glyphID + ((uint32_t) group.endCharCode - (uint32_t) group.startCharCode)); - } + CmapSubtableLongGroup record; + record.startCharCode = startCharCode; + record.endCharCode = endCharCode; + record.glyphID = glyphID; + c->copy (record); - return true; + this->format = 12; + this->reserved = 0; + this->length = c->length () - table_initpos; + this->groups.len = (this->length - min_size)/CmapSubtableLongGroup::static_size; } - private: - static bool _is_gid_consecutive (CmapSubtableLongGroup *group, + static size_t get_sub_table_size (const hb_sorted_vector_t &groups_data) + { return 16 + 12 * groups_data.length; } + + private: + static bool _is_gid_consecutive (hb_codepoint_t endCharCode, + hb_codepoint_t startCharCode, + hb_codepoint_t glyphID, hb_codepoint_t cp, hb_codepoint_t new_gid) { - return (cp - 1 == group->endCharCode) && - new_gid == group->glyphID + (cp - group->startCharCode); + return (cp - 1 == endCharCode) && + new_gid == glyphID + (cp - startCharCode); } }; @@ -623,12 +792,69 @@ struct DefaultUVS : SortedArrayOf for (unsigned int i = 0; i < count; i++) { hb_codepoint_t first = arrayZ[i].startUnicodeValue; - hb_codepoint_t last = MIN ((hb_codepoint_t) (first + arrayZ[i].additionalCount), - (hb_codepoint_t) HB_UNICODE_MAX); + hb_codepoint_t last = hb_min ((hb_codepoint_t) (first + arrayZ[i].additionalCount), + (hb_codepoint_t) HB_UNICODE_MAX); out->add_range (first, last); } } + DefaultUVS* copy (hb_serialize_context_t *c, + const hb_set_t *unicodes) const + { + DefaultUVS *out = c->start_embed (); + if (unlikely (!out)) return nullptr; + auto snap = c->snapshot (); + + HBUINT32 len; + len = 0; + if (unlikely (!c->copy (len))) return nullptr; + unsigned init_len = c->length (); + + hb_codepoint_t lastCode = HB_MAP_VALUE_INVALID; + int count = -1; + + for (const UnicodeValueRange& _ : as_array ()) + { + for (const unsigned addcnt : hb_range ((unsigned) _.additionalCount + 1)) + { + unsigned curEntry = (unsigned) _.startUnicodeValue + addcnt; + if (!unicodes->has (curEntry)) continue; + count += 1; + if (lastCode == HB_MAP_VALUE_INVALID) + lastCode = curEntry; + else if (lastCode + count != curEntry) + { + UnicodeValueRange rec; + rec.startUnicodeValue = lastCode; + rec.additionalCount = count - 1; + c->copy (rec); + + lastCode = curEntry; + count = 0; + } + } + } + + if (lastCode != HB_MAP_VALUE_INVALID) + { + UnicodeValueRange rec; + rec.startUnicodeValue = lastCode; + rec.additionalCount = count; + c->copy (rec); + } + + if (c->length () - init_len == 0) + { + c->revert (snap); + return nullptr; + } + else + { + if (unlikely (!c->check_assign (out->len, (c->length () - init_len) / UnicodeValueRange::static_size))) return nullptr; + return out; + } + } + public: DEFINE_SIZE_ARRAY (4, *this); }; @@ -636,9 +862,7 @@ struct DefaultUVS : SortedArrayOf struct UVSMapping { int cmp (const hb_codepoint_t &codepoint) const - { - return unicodeValue.cmp (codepoint); - } + { return unicodeValue.cmp (codepoint); } bool sanitize (hb_sanitize_context_t *c) const { @@ -647,7 +871,7 @@ struct UVSMapping } HBUINT24 unicodeValue; /* Base Unicode value of the UVS */ - GlyphID glyphID; /* Glyph ID of the UVS */ + HBGlyphID glyphID; /* Glyph ID of the UVS */ public: DEFINE_SIZE_STATIC (5); }; @@ -658,7 +882,63 @@ struct NonDefaultUVS : SortedArrayOf { unsigned int count = len; for (unsigned int i = 0; i < count; i++) - out->add (arrayZ[i].glyphID); + out->add (arrayZ[i].unicodeValue); + } + + void collect_mapping (hb_set_t *unicodes, /* OUT */ + hb_map_t *mapping /* OUT */) const + { + unsigned count = len; + for (unsigned i = 0; i < count; i++) + { + hb_codepoint_t unicode = arrayZ[i].unicodeValue; + hb_codepoint_t glyphid = arrayZ[i].glyphID; + unicodes->add (unicode); + mapping->set (unicode, glyphid); + } + } + + void closure_glyphs (const hb_set_t *unicodes, + hb_set_t *glyphset) const + { + + as_array () + | hb_filter (unicodes, &UVSMapping::unicodeValue) + | hb_map (&UVSMapping::glyphID) + | hb_sink (glyphset) + ; + } + + NonDefaultUVS* copy (hb_serialize_context_t *c, + const hb_set_t *unicodes, + const hb_set_t *glyphs_requested, + const hb_map_t *glyph_map) const + { + NonDefaultUVS *out = c->start_embed (); + if (unlikely (!out)) return nullptr; + + auto it = + + as_array () + | hb_filter ([&] (const UVSMapping& _) + { + return unicodes->has (_.unicodeValue) || glyphs_requested->has (_.glyphID); + }) + ; + + if (!it) return nullptr; + + HBUINT32 len; + len = it.len (); + if (unlikely (!c->copy (len))) return nullptr; + + for (const UVSMapping& _ : it) + { + UVSMapping mapping; + mapping.unicodeValue = _.unicodeValue; + mapping.glyphID = glyph_map->get (_.glyphID); + c->copy (mapping); + } + + return out; } public: @@ -682,17 +962,37 @@ struct VariationSelectorRecord return GLYPH_VARIANT_NOT_FOUND; } + VariationSelectorRecord(const VariationSelectorRecord& other) + { + *this = other; + } + + void operator= (const VariationSelectorRecord& other) + { + varSelector = other.varSelector; + HBUINT32 offset = other.defaultUVS; + defaultUVS = offset; + offset = other.nonDefaultUVS; + nonDefaultUVS = offset; + } + void collect_unicodes (hb_set_t *out, const void *base) const { (base+defaultUVS).collect_unicodes (out); (base+nonDefaultUVS).collect_unicodes (out); } - int cmp (const hb_codepoint_t &variation_selector) const + void collect_mapping (const void *base, + hb_set_t *unicodes, /* OUT */ + hb_map_t *mapping /* OUT */) const { - return varSelector.cmp (variation_selector); + (base+defaultUVS).collect_unicodes (unicodes); + (base+nonDefaultUVS).collect_mapping (unicodes, mapping); } + int cmp (const hb_codepoint_t &variation_selector) const + { return varSelector.cmp (variation_selector); } + bool sanitize (hb_sanitize_context_t *c, const void *base) const { TRACE_SANITIZE (this); @@ -701,6 +1001,45 @@ struct VariationSelectorRecord nonDefaultUVS.sanitize (c, base)); } + hb_pair_t + copy (hb_serialize_context_t *c, + const hb_set_t *unicodes, + const hb_set_t *glyphs_requested, + const hb_map_t *glyph_map, + const void *base) const + { + auto snap = c->snapshot (); + auto *out = c->embed (*this); + if (unlikely (!out)) return hb_pair (0, 0); + + out->defaultUVS = 0; + out->nonDefaultUVS = 0; + + unsigned non_default_uvs_objidx = 0; + if (nonDefaultUVS != 0) + { + c->push (); + if (c->copy (base+nonDefaultUVS, unicodes, glyphs_requested, glyph_map)) + non_default_uvs_objidx = c->pop_pack (); + else c->pop_discard (); + } + + unsigned default_uvs_objidx = 0; + if (defaultUVS != 0) + { + c->push (); + if (c->copy (base+defaultUVS, unicodes)) + default_uvs_objidx = c->pop_pack (); + else c->pop_discard (); + } + + + if (!default_uvs_objidx && !non_default_uvs_objidx) + c->revert (snap); + + return hb_pair (default_uvs_objidx, non_default_uvs_objidx); + } + HBUINT24 varSelector; /* Variation selector. */ LOffsetTo defaultUVS; /* Offset to Default UVS Table. May be 0. */ @@ -715,9 +1054,7 @@ struct CmapSubtableFormat14 glyph_variant_t get_glyph_variant (hb_codepoint_t codepoint, hb_codepoint_t variation_selector, hb_codepoint_t *glyph) const - { - return record.bsearch (variation_selector).get_glyph (codepoint, glyph, this); - } + { return record.bsearch (variation_selector).get_glyph (codepoint, glyph, this); } void collect_variation_selectors (hb_set_t *out) const { @@ -727,8 +1064,110 @@ struct CmapSubtableFormat14 } void collect_variation_unicodes (hb_codepoint_t variation_selector, hb_set_t *out) const + { record.bsearch (variation_selector).collect_unicodes (out, this); } + + void serialize (hb_serialize_context_t *c, + const hb_set_t *unicodes, + const hb_set_t *glyphs_requested, + const hb_map_t *glyph_map, + const void *base) + { + auto snap = c->snapshot (); + unsigned table_initpos = c->length (); + const char* init_tail = c->tail; + + if (unlikely (!c->extend_min (*this))) return; + this->format = 14; + + auto src_tbl = reinterpret_cast (base); + + /* + * Some versions of OTS require that offsets are in order. Due to the use + * of push()/pop_pack() serializing the variation records in order results + * in the offsets being in reverse order (first record has the largest + * offset). While this is perfectly valid, it will cause some versions of + * OTS to consider this table bad. + * + * So to prevent this issue we serialize the variation records in reverse + * order, so that the offsets are ordered from small to large. Since + * variation records are supposed to be in increasing order of varSelector + * we then have to reverse the order of the written variation selector + * records after everything is finalized. + */ + hb_vector_t> obj_indices; + for (int i = src_tbl->record.len - 1; i >= 0; i--) + { + hb_pair_t result = src_tbl->record[i].copy (c, unicodes, glyphs_requested, glyph_map, base); + if (result.first || result.second) + obj_indices.push (result); + } + + if (c->length () - table_initpos == CmapSubtableFormat14::min_size) + { + c->revert (snap); + return; + } + + if (unlikely (!c->check_success (!obj_indices.in_error ()))) + return; + + int tail_len = init_tail - c->tail; + c->check_assign (this->length, c->length () - table_initpos + tail_len); + c->check_assign (this->record.len, + (c->length () - table_initpos - CmapSubtableFormat14::min_size) / + VariationSelectorRecord::static_size); + + /* Correct the incorrect write order by reversing the order of the variation + records array. */ + _reverse_variation_records (); + + /* Now that records are in the right order, we can set up the offsets. */ + _add_links_to_variation_records (c, obj_indices); + } + + void _reverse_variation_records () + { + record.as_array ().reverse (); + } + + void _add_links_to_variation_records (hb_serialize_context_t *c, + const hb_vector_t>& obj_indices) + { + for (unsigned i = 0; i < obj_indices.length; i++) + { + /* + * Since the record array has been reversed (see comments in copy()) + * but obj_indices has not been, the indices at obj_indices[i] + * are for the variation record at record[j]. + */ + int j = obj_indices.length - 1 - i; + c->add_link (record[j].defaultUVS, obj_indices[i].first); + c->add_link (record[j].nonDefaultUVS, obj_indices[i].second); + } + } + + void closure_glyphs (const hb_set_t *unicodes, + hb_set_t *glyphset) const + { + + hb_iter (record) + | hb_filter (hb_bool, &VariationSelectorRecord::nonDefaultUVS) + | hb_map (&VariationSelectorRecord::nonDefaultUVS) + | hb_map (hb_add (this)) + | hb_apply ([=] (const NonDefaultUVS& _) { _.closure_glyphs (unicodes, glyphset); }) + ; + } + + void collect_unicodes (hb_set_t *out) const + { + for (const VariationSelectorRecord& _ : record) + _.collect_unicodes (out, this); + } + + void collect_mapping (hb_set_t *unicodes, /* OUT */ + hb_map_t *mapping /* OUT */) const { - record.bsearch (variation_selector).collect_unicodes (out, this); + for (const VariationSelectorRecord& _ : record) + _.collect_mapping (this, unicodes, mapping); } bool sanitize (hb_sanitize_context_t *c) const @@ -766,20 +1205,52 @@ struct CmapSubtable default: return false; } } - void collect_unicodes (hb_set_t *out) const + void collect_unicodes (hb_set_t *out, unsigned int num_glyphs = UINT_MAX) const { switch (u.format) { case 0: u.format0 .collect_unicodes (out); return; case 4: u.format4 .collect_unicodes (out); return; case 6: u.format6 .collect_unicodes (out); return; case 10: u.format10.collect_unicodes (out); return; - case 12: u.format12.collect_unicodes (out); return; - case 13: u.format13.collect_unicodes (out); return; + case 12: u.format12.collect_unicodes (out, num_glyphs); return; + case 13: u.format13.collect_unicodes (out, num_glyphs); return; case 14: default: return; } } + void collect_mapping (hb_set_t *unicodes, /* OUT */ + hb_map_t *mapping, /* OUT */ + unsigned num_glyphs = UINT_MAX) const + { + switch (u.format) { + case 0: u.format0 .collect_mapping (unicodes, mapping); return; + case 4: u.format4 .collect_mapping (unicodes, mapping); return; + case 6: u.format6 .collect_mapping (unicodes, mapping); return; + case 10: u.format10.collect_mapping (unicodes, mapping); return; + case 12: u.format12.collect_mapping (unicodes, mapping, num_glyphs); return; + case 13: u.format13.collect_mapping (unicodes, mapping, num_glyphs); return; + case 14: + default: return; + } + } + + template + void serialize (hb_serialize_context_t *c, + Iterator it, + unsigned format, + const hb_subset_plan_t *plan, + const void *base) + { + switch (format) { + case 4: return u.format4.serialize (c, it); + case 12: return u.format12.serialize (c, it); + case 14: return u.format14.serialize (c, plan->unicodes, plan->glyphs_requested, plan->glyph_map, base); + default: return; + } + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -831,6 +1302,40 @@ struct EncodingRecord subtable.sanitize (c, base)); } + template + EncodingRecord* copy (hb_serialize_context_t *c, + Iterator it, + unsigned format, + const void *base, + const hb_subset_plan_t *plan, + /* INOUT */ unsigned *objidx) const + { + TRACE_SERIALIZE (this); + auto snap = c->snapshot (); + auto *out = c->embed (this); + if (unlikely (!out)) return_trace (nullptr); + out->subtable = 0; + + if (*objidx == 0) + { + CmapSubtable *cmapsubtable = c->push (); + unsigned origin_length = c->length (); + cmapsubtable->serialize (c, it, format, plan, &(base+subtable)); + if (c->length () - origin_length > 0) *objidx = c->pop_pack (); + else c->pop_discard (); + } + + if (*objidx == 0) + { + c->revert (snap); + return_trace (nullptr); + } + + c->add_link (out->subtable, *objidx); + return_trace (out); + } + HBUINT16 platformID; /* Platform ID. */ HBUINT16 encodingID; /* Platform-specific encoding ID. */ LOffsetTo @@ -843,124 +1348,128 @@ struct cmap { static constexpr hb_tag_t tableTag = HB_OT_TAG_cmap; - struct subset_plan + template + void serialize (hb_serialize_context_t *c, + Iterator it, + EncodingRecIter encodingrec_iter, + const void *base, + const hb_subset_plan_t *plan) { - size_t final_size () const + if (unlikely (!c->extend_min ((*this)))) return; + this->version = 0; + + unsigned format4objidx = 0, format12objidx = 0, format14objidx = 0; + + for (const EncodingRecord& _ : encodingrec_iter) { - return 4 // header - + 8 * 3 // 3 EncodingRecord - + CmapSubtableFormat4::get_sub_table_size (this->format4_segments) - + CmapSubtableFormat12::get_sub_table_size (this->format12_groups); + unsigned format = (base+_.subtable).u.format; + if (!plan->glyphs_requested->is_empty ()) + { + hb_set_t unicodes_set; + hb_map_t cp_glyphid_map; + (base+_.subtable).collect_mapping (&unicodes_set, &cp_glyphid_map); + + auto table_iter = + + hb_zip (unicodes_set.iter(), unicodes_set.iter() | hb_map(cp_glyphid_map)) + | hb_filter (plan->_glyphset, hb_second) + | hb_filter ([plan] (const hb_pair_t& p) + { + return plan->unicodes->has (p.first) || + plan->glyphs_requested->has (p.second); + }) + | hb_map ([plan] (const hb_pair_t& p_org) + { + return hb_pair_t (p_org.first, plan->glyph_map->get(p_org.second)); + }) + ; + + if (format == 4) c->copy (_, table_iter, 4u, base, plan, &format4objidx); + else if (format == 12) c->copy (_, table_iter, 12u, base, plan, &format12objidx); + else if (format == 14) c->copy (_, table_iter, 14u, base, plan, &format14objidx); + } + /* when --gids option is not used, we iterate input unicodes instead of + * all codepoints in each subtable, which is more efficient */ + else + { + hb_set_t unicodes_set; + (base+_.subtable).collect_unicodes (&unicodes_set); + + if (format == 4) c->copy (_, + it | hb_filter (unicodes_set, hb_first), 4u, base, plan, &format4objidx); + else if (format == 12) c->copy (_, + it | hb_filter (unicodes_set, hb_first), 12u, base, plan, &format12objidx); + else if (format == 14) c->copy (_, it, 14u, base, plan, &format14objidx); + } } - hb_vector_t format4_segments; - hb_vector_t format12_groups; - }; + c->check_assign(this->encodingRecord.len, (c->length () - cmap::min_size)/EncodingRecord::static_size); + } - bool _create_plan (const hb_subset_plan_t *plan, - subset_plan *cmap_plan) const + void closure_glyphs (const hb_set_t *unicodes, + hb_set_t *glyphset) const { - if (unlikely (!CmapSubtableFormat4::create_sub_table_plan (plan, &cmap_plan->format4_segments))) - return false; - - return CmapSubtableFormat12::create_sub_table_plan (plan, &cmap_plan->format12_groups); + + hb_iter (encodingRecord) + | hb_map (&EncodingRecord::subtable) + | hb_map (hb_add (this)) + | hb_filter ([&] (const CmapSubtable& _) { return _.u.format == 14; }) + | hb_apply ([=] (const CmapSubtable& _) { _.u.format14.closure_glyphs (unicodes, glyphset); }) + ; } - bool _subset (const hb_subset_plan_t *plan, - const subset_plan &cmap_subset_plan, - size_t dest_sz, - void *dest) const + bool subset (hb_subset_context_t *c) const { - hb_serialize_context_t c (dest, dest_sz); - - cmap *table = c.start_serialize (); - if (unlikely (!c.extend_min (*table))) - { - return false; - } + TRACE_SUBSET (this); - table->version.set (0); + cmap *cmap_prime = c->serializer->start_embed (); + if (unlikely (!c->serializer->check_success (cmap_prime))) return_trace (false); - if (unlikely (!table->encodingRecord.serialize (&c, /* numTables */ 3))) - return false; - - // TODO(grieger): Convert the below to a for loop + auto encodingrec_iter = + + hb_iter (encodingRecord) + | hb_filter ([&] (const EncodingRecord& _) + { + if ((_.platformID == 0 && _.encodingID == 3) || + (_.platformID == 0 && _.encodingID == 4) || + (_.platformID == 3 && _.encodingID == 1) || + (_.platformID == 3 && _.encodingID == 10) || + (this + _.subtable).u.format == 14) + return true; - // Format 4, Plat 0 Encoding Record - EncodingRecord &format4_plat0_rec = table->encodingRecord[0]; - format4_plat0_rec.platformID.set (0); // Unicode - format4_plat0_rec.encodingID.set (3); + return false; + }) + ; - // Format 4, Plat 3 Encoding Record - EncodingRecord &format4_plat3_rec = table->encodingRecord[1]; - format4_plat3_rec.platformID.set (3); // Windows - format4_plat3_rec.encodingID.set (1); // Unicode BMP + if (unlikely (!encodingrec_iter.len ())) return_trace (false); - // Format 12 Encoding Record - EncodingRecord &format12_rec = table->encodingRecord[2]; - format12_rec.platformID.set (3); // Windows - format12_rec.encodingID.set (10); // Unicode UCS-4 + const EncodingRecord *unicode_bmp= nullptr, *unicode_ucs4 = nullptr, *ms_bmp = nullptr, *ms_ucs4 = nullptr; + bool has_format12 = false; - // Write out format 4 sub table + for (const EncodingRecord& _ : encodingrec_iter) { - CmapSubtable &subtable = format4_plat0_rec.subtable.serialize (&c, table); - format4_plat3_rec.subtable.set (format4_plat0_rec.subtable); - subtable.u.format.set (4); - - CmapSubtableFormat4 &format4 = subtable.u.format4; - if (unlikely (!format4.serialize (&c, plan, cmap_subset_plan.format4_segments))) - return false; + unsigned format = (this + _.subtable).u.format; + if (format == 12) has_format12 = true; + + const EncodingRecord *table = hb_addressof (_); + if (_.platformID == 0 && _.encodingID == 3) unicode_bmp = table; + else if (_.platformID == 0 && _.encodingID == 4) unicode_ucs4 = table; + else if (_.platformID == 3 && _.encodingID == 1) ms_bmp = table; + else if (_.platformID == 3 && _.encodingID == 10) ms_ucs4 = table; } - // Write out format 12 sub table. - { - CmapSubtable &subtable = format12_rec.subtable.serialize (&c, table); - subtable.u.format.set (12); - - CmapSubtableFormat12 &format12 = subtable.u.format12; - if (unlikely (!format12.serialize (&c, cmap_subset_plan.format12_groups))) - return false; - } - - c.end_serialize (); - - return true; - } - - bool subset (hb_subset_plan_t *plan) const - { - subset_plan cmap_subset_plan; - - if (unlikely (!_create_plan (plan, &cmap_subset_plan))) - { - DEBUG_MSG(SUBSET, nullptr, "Failed to generate a cmap subsetting plan."); - return false; - } - - // We now know how big our blob needs to be - size_t dest_sz = cmap_subset_plan.final_size (); - void *dest = malloc (dest_sz); - if (unlikely (!dest)) { - DEBUG_MSG(SUBSET, nullptr, "Unable to alloc %lu for cmap subset output", (unsigned long) dest_sz); - return false; - } - - if (unlikely (!_subset (plan, cmap_subset_plan, dest_sz, dest))) - { - DEBUG_MSG(SUBSET, nullptr, "Failed to perform subsetting of cmap."); - free (dest); - return false; - } - - // all done, write the blob into dest - hb_blob_t *cmap_prime = hb_blob_create ((const char *) dest, - dest_sz, - HB_MEMORY_MODE_READONLY, - dest, - free); - bool result = plan->add_table (HB_OT_TAG_cmap, cmap_prime); - hb_blob_destroy (cmap_prime); - return result; + if (unlikely (!has_format12 && !unicode_bmp && !ms_bmp)) return_trace (false); + if (unlikely (has_format12 && (!unicode_ucs4 && !ms_ucs4))) return_trace (false); + + auto it = + + hb_iter (c->plan->unicodes) + | hb_map ([&] (hb_codepoint_t _) + { + hb_codepoint_t new_gid = HB_MAP_VALUE_INVALID; + c->plan->new_gid_for_codepoint (_, &new_gid); + return hb_pair_t (_, new_gid); + }) + | hb_filter ([&] (const hb_pair_t _) + { return (_.second != HB_MAP_VALUE_INVALID); }) + ; + cmap_prime->serialize (c->serializer, it, encodingrec_iter, this, c->plan); + return_trace (true); } const CmapSubtable *find_best_subtable (bool *symbol = nullptr) const @@ -969,6 +1478,15 @@ struct cmap const CmapSubtable *subtable; + /* Symbol subtable. + * Prefer symbol if available. + * https://github.com/harfbuzz/harfbuzz/issues/1918 */ + if ((subtable = this->find_subtable (3, 0))) + { + if (symbol) *symbol = true; + return subtable; + } + /* 32-bit subtables. */ if ((subtable = this->find_subtable (3, 10))) return subtable; if ((subtable = this->find_subtable (0, 6))) return subtable; @@ -981,13 +1499,6 @@ struct cmap if ((subtable = this->find_subtable (0, 1))) return subtable; if ((subtable = this->find_subtable (0, 0))) return subtable; - /* Symbol subtable. */ - if ((subtable = this->find_subtable (3, 0))) - { - if (symbol) *symbol = true; - return subtable; - } - /* Meh. */ return &Null (CmapSubtable); } @@ -1008,9 +1519,9 @@ struct cmap this->get_glyph_data = subtable; if (unlikely (symbol)) - { this->get_glyph_funcZ = get_glyph_from_symbol; - } else { + else + { switch (subtable->u.format) { /* Accelerate format 4 and format 12. */ default: @@ -1020,20 +1531,20 @@ struct cmap this->get_glyph_funcZ = get_glyph_from; break; case 4: - { - this->format4_accel.init (&subtable->u.format4); - this->get_glyph_data = &this->format4_accel; - this->get_glyph_funcZ = this->format4_accel.get_glyph_func; - } + { + this->format4_accel.init (&subtable->u.format4); + this->get_glyph_data = &this->format4_accel; + this->get_glyph_funcZ = this->format4_accel.get_glyph_func; break; } + } } } void fini () { this->table.destroy (); } bool get_nominal_glyph (hb_codepoint_t unicode, - hb_codepoint_t *glyph) const + hb_codepoint_t *glyph) const { if (unlikely (!this->get_glyph_funcZ)) return false; return this->get_glyph_funcZ (this->get_glyph_data, unicode, glyph); @@ -1076,19 +1587,16 @@ struct cmap return get_nominal_glyph (unicode, glyph); } - void collect_unicodes (hb_set_t *out) const - { - subtable->collect_unicodes (out); - } + void collect_unicodes (hb_set_t *out, unsigned int num_glyphs) const + { subtable->collect_unicodes (out, num_glyphs); } + void collect_mapping (hb_set_t *unicodes, hb_map_t *mapping, + unsigned num_glyphs = UINT_MAX) const + { subtable->collect_mapping (unicodes, mapping, num_glyphs); } void collect_variation_selectors (hb_set_t *out) const - { - subtable_uvs->collect_variation_selectors (out); - } + { subtable_uvs->collect_variation_selectors (out); } void collect_variation_unicodes (hb_codepoint_t variation_selector, hb_set_t *out) const - { - subtable_uvs->collect_variation_unicodes (variation_selector, out); - } + { subtable_uvs->collect_variation_unicodes (variation_selector, out); } protected: typedef bool (*hb_cmap_get_glyph_func_t) (const void *obj, @@ -1096,18 +1604,18 @@ struct cmap hb_codepoint_t *glyph); template - static bool get_glyph_from (const void *obj, - hb_codepoint_t codepoint, - hb_codepoint_t *glyph) + HB_INTERNAL static bool get_glyph_from (const void *obj, + hb_codepoint_t codepoint, + hb_codepoint_t *glyph) { const Type *typed_obj = (const Type *) obj; return typed_obj->get_glyph (codepoint, glyph); } template - static bool get_glyph_from_symbol (const void *obj, - hb_codepoint_t codepoint, - hb_codepoint_t *glyph) + HB_INTERNAL static bool get_glyph_from_symbol (const void *obj, + hb_codepoint_t codepoint, + hb_codepoint_t *glyph) { const Type *typed_obj = (const Type *) obj; if (likely (typed_obj->get_glyph (codepoint, glyph))) @@ -1135,6 +1643,7 @@ struct cmap CmapSubtableFormat4::accelerator_t format4_accel; + public: hb_blob_ptr_t table; }; @@ -1144,8 +1653,8 @@ struct cmap unsigned int encoding_id) const { EncodingRecord key; - key.platformID.set (platform_id); - key.encodingID.set (encoding_id); + key.platformID = platform_id; + key.encodingID = encoding_id; const EncodingRecord &result = encodingRecord.bsearch (key); if (!result.subtable) @@ -1154,6 +1663,28 @@ struct cmap return &(this+result.subtable); } + const EncodingRecord *find_encodingrec (unsigned int platform_id, + unsigned int encoding_id) const + { + EncodingRecord key; + key.platformID = platform_id; + key.encodingID = encoding_id; + + return encodingRecord.as_array ().bsearch (key); + } + + bool find_subtable (unsigned format) const + { + auto it = + + hb_iter (encodingRecord) + | hb_map (&EncodingRecord::subtable) + | hb_map (hb_add (this)) + | hb_filter ([&] (const CmapSubtable& _) { return _.u.format == format; }) + ; + + return it.len (); + } + public: bool sanitize (hb_sanitize_context_t *c) const @@ -1165,9 +1696,9 @@ struct cmap } protected: - HBUINT16 version; /* Table version number (0). */ + HBUINT16 version; /* Table version number (0). */ SortedArrayOf - encodingRecord; /* Encoding tables. */ + encodingRecord; /* Encoding tables. */ public: DEFINE_SIZE_ARRAY (4, encodingRecord); }; diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-color-cbdt-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-color-cbdt-table.hh index 36ec2be984fb..3e619bd40356 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-color-cbdt-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-color-cbdt-table.hh @@ -21,7 +21,7 @@ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. * - * Google Author(s): Seigo Nonaka + * Google Author(s): Seigo Nonaka, Calder Kitagawa */ #ifndef HB_OT_COLOR_CBDT_TABLE_HH @@ -43,6 +43,35 @@ namespace OT { +struct cblc_bitmap_size_subset_context_t +{ + const char *cbdt; + unsigned int cbdt_length; + hb_vector_t *cbdt_prime; + unsigned int size; /* INOUT + * Input: old size of IndexSubtable + * Output: new size of IndexSubtable + */ + unsigned int num_tables; /* INOUT + * Input: old number of subtables. + * Output: new number of subtables. + */ + hb_codepoint_t start_glyph; /* OUT */ + hb_codepoint_t end_glyph; /* OUT */ +}; + +static inline bool +_copy_data_to_cbdt (hb_vector_t *cbdt_prime, + const void *data, + unsigned length) +{ + unsigned int new_len = cbdt_prime->length + length; + if (unlikely (!cbdt_prime->alloc (new_len))) return false; + memcpy (cbdt_prime->arrayZ + cbdt_prime->length, data, length); + cbdt_prime->length = new_len; + return true; +} + struct SmallGlyphMetrics { bool sanitize (hb_sanitize_context_t *c) const @@ -51,12 +80,12 @@ struct SmallGlyphMetrics return_trace (c->check_struct (this)); } - void get_extents (hb_glyph_extents_t *extents) const + void get_extents (hb_font_t *font, hb_glyph_extents_t *extents) const { - extents->x_bearing = bearingX; - extents->y_bearing = bearingY; - extents->width = width; - extents->height = - (hb_position_t) height; + extents->x_bearing = font->em_scale_x (bearingX); + extents->y_bearing = font->em_scale_y (bearingY); + extents->width = font->em_scale_x (width); + extents->height = font->em_scale_y (-static_cast(height)); } HBUINT8 height; @@ -65,7 +94,7 @@ struct SmallGlyphMetrics HBINT8 bearingY; HBUINT8 advance; public: - DEFINE_SIZE_STATIC(5); + DEFINE_SIZE_STATIC (5); }; struct BigGlyphMetrics : SmallGlyphMetrics @@ -74,7 +103,7 @@ struct BigGlyphMetrics : SmallGlyphMetrics HBINT8 vertBearingY; HBUINT8 vertAdvance; public: - DEFINE_SIZE_STATIC(8); + DEFINE_SIZE_STATIC (8); }; struct SBitLineMetrics @@ -98,7 +127,7 @@ struct SBitLineMetrics HBINT8 padding1; HBINT8 padding2; public: - DEFINE_SIZE_STATIC(12); + DEFINE_SIZE_STATIC (12); }; @@ -118,7 +147,7 @@ struct IndexSubtableHeader HBUINT16 imageFormat; HBUINT32 imageDataOffset; public: - DEFINE_SIZE_STATIC(8); + DEFINE_SIZE_STATIC (8); }; template @@ -143,11 +172,23 @@ struct IndexSubtableFormat1Or3 return true; } + bool add_offset (hb_serialize_context_t *c, + unsigned int offset, + unsigned int *size /* OUT (accumulated) */) + { + TRACE_SERIALIZE (this); + Offset embedded_offset; + embedded_offset = offset; + *size += sizeof (OffsetType); + auto *o = c->embed (embedded_offset); + return_trace ((bool) o); + } + IndexSubtableHeader header; - UnsizedArrayOf > + UnsizedArrayOf> offsetArrayZ; public: - DEFINE_SIZE_ARRAY(8, offsetArrayZ); + DEFINE_SIZE_ARRAY (8, offsetArrayZ); }; struct IndexSubtableFormat1 : IndexSubtableFormat1Or3 {}; @@ -159,35 +200,153 @@ struct IndexSubtable { TRACE_SANITIZE (this); if (!u.header.sanitize (c)) return_trace (false); - switch (u.header.indexFormat) { + switch (u.header.indexFormat) + { case 1: return_trace (u.format1.sanitize (c, glyph_count)); case 3: return_trace (u.format3.sanitize (c, glyph_count)); default:return_trace (true); } } + bool + finish_subtable (hb_serialize_context_t *c, + unsigned int cbdt_prime_len, + unsigned int num_glyphs, + unsigned int *size /* OUT (accumulated) */) + { + TRACE_SERIALIZE (this); + + unsigned int local_offset = cbdt_prime_len - u.header.imageDataOffset; + switch (u.header.indexFormat) + { + case 1: return_trace (u.format1.add_offset (c, local_offset, size)); + case 3: { + if (!u.format3.add_offset (c, local_offset, size)) + return_trace (false); + if (!(num_glyphs & 0x01)) // Pad to 32-bit alignment if needed. + return_trace (u.format3.add_offset (c, 0, size)); + return_trace (true); + } + // TODO: implement 2, 4, 5. + case 2: case 4: // No-op. + case 5: // Pad to 32-bit aligned. + default: return_trace (false); + } + } + + bool + fill_missing_glyphs (hb_serialize_context_t *c, + unsigned int cbdt_prime_len, + unsigned int num_missing, + unsigned int *size /* OUT (accumulated) */, + unsigned int *num_glyphs /* OUT (accumulated) */) + { + TRACE_SERIALIZE (this); + + unsigned int local_offset = cbdt_prime_len - u.header.imageDataOffset; + switch (u.header.indexFormat) + { + case 1: { + for (unsigned int i = 0; i < num_missing; i++) + { + if (unlikely (!u.format1.add_offset (c, local_offset, size))) + return_trace (false); + *num_glyphs += 1; + } + return_trace (true); + } + case 3: { + for (unsigned int i = 0; i < num_missing; i++) + { + if (unlikely (!u.format3.add_offset (c, local_offset, size))) + return_trace (false); + *num_glyphs += 1; + } + return_trace (true); + } + // TODO: implement 2, 4, 5. + case 2: // Add empty space in cbdt_prime?. + case 4: case 5: // No-op as sparse is supported. + default: return_trace (false); + } + } + + bool + copy_glyph_at_idx (hb_serialize_context_t *c, unsigned int idx, + const char *cbdt, unsigned int cbdt_length, + hb_vector_t *cbdt_prime /* INOUT */, + IndexSubtable *subtable_prime /* INOUT */, + unsigned int *size /* OUT (accumulated) */) const + { + TRACE_SERIALIZE (this); + + unsigned int offset, length, format; + if (unlikely (!get_image_data (idx, &offset, &length, &format))) return_trace (false); + if (unlikely (offset > cbdt_length || cbdt_length - offset < length)) return_trace (false); + + auto *header_prime = subtable_prime->get_header (); + unsigned int new_local_offset = cbdt_prime->length - (unsigned int) header_prime->imageDataOffset; + if (unlikely (!_copy_data_to_cbdt (cbdt_prime, cbdt + offset, length))) return_trace (false); + + return_trace (subtable_prime->add_offset (c, new_local_offset, size)); + } + + bool + add_offset (hb_serialize_context_t *c, unsigned int local_offset, + unsigned int *size /* OUT (accumulated) */) + { + TRACE_SERIALIZE (this); + switch (u.header.indexFormat) + { + case 1: return_trace (u.format1.add_offset (c, local_offset, size)); + case 3: return_trace (u.format3.add_offset (c, local_offset, size)); + // TODO: Implement tables 2, 4, 5 + case 2: // Should be a no-op. + case 4: case 5: // Handle sparse cases. + default: return_trace (false); + } + } + bool get_extents (hb_glyph_extents_t *extents HB_UNUSED) const { - switch (u.header.indexFormat) { + switch (u.header.indexFormat) + { case 2: case 5: /* TODO */ case 1: case 3: case 4: /* Variable-metrics formats do not have metrics here. */ default:return (false); } } - bool get_image_data (unsigned int idx, - unsigned int *offset, - unsigned int *length, - unsigned int *format) const + bool + get_image_data (unsigned int idx, unsigned int *offset, + unsigned int *length, unsigned int *format) const { *format = u.header.imageFormat; - switch (u.header.indexFormat) { + switch (u.header.indexFormat) + { case 1: return u.format1.get_image_data (idx, offset, length); case 3: return u.format3.get_image_data (idx, offset, length); default: return false; } } + const IndexSubtableHeader* get_header () const { return &u.header; } + + void populate_header (unsigned index_format, + unsigned image_format, + unsigned int image_data_offset, + unsigned int *size) + { + u.header.indexFormat = index_format; + u.header.imageFormat = image_format; + u.header.imageDataOffset = image_data_offset; + switch (u.header.indexFormat) + { + case 1: *size += IndexSubtableFormat1::min_size; break; + case 3: *size += IndexSubtableFormat3::min_size; break; + } + } + protected: union { IndexSubtableHeader header; @@ -209,12 +368,135 @@ struct IndexSubtableRecord offsetToSubtable.sanitize (c, base, lastGlyphIndex - firstGlyphIndex + 1)); } - bool get_extents (hb_glyph_extents_t *extents, - const void *base) const + const IndexSubtable* get_subtable (const void *base) const + { + return &(base+offsetToSubtable); + } + + bool add_new_subtable (hb_subset_context_t* c, + cblc_bitmap_size_subset_context_t *bitmap_size_context, + IndexSubtableRecord *record, + const hb_vector_t> *lookup, /* IN */ + const void *base, + unsigned int *start /* INOUT */) const + { + TRACE_SERIALIZE (this); + + auto *subtable = c->serializer->start_embed (); + if (unlikely (!subtable)) return_trace (false); + if (unlikely (!c->serializer->extend_min (subtable))) return_trace (false); + + auto *old_subtable = get_subtable (base); + auto *old_header = old_subtable->get_header (); + + subtable->populate_header (old_header->indexFormat, + old_header->imageFormat, + bitmap_size_context->cbdt_prime->length, + &bitmap_size_context->size); + + unsigned int num_glyphs = 0; + bool early_exit = false; + for (unsigned int i = *start; i < lookup->length; i++) + { + hb_codepoint_t new_gid = (*lookup)[i].first; + const IndexSubtableRecord *next_record = (*lookup)[i].second; + const IndexSubtable *next_subtable = next_record->get_subtable (base); + auto *next_header = next_subtable->get_header (); + if (next_header != old_header) + { + *start = i; + early_exit = true; + break; + } + unsigned int num_missing = record->add_glyph_for_subset (new_gid); + if (unlikely (!subtable->fill_missing_glyphs (c->serializer, + bitmap_size_context->cbdt_prime->length, + num_missing, + &bitmap_size_context->size, + &num_glyphs))) + return_trace (false); + + hb_codepoint_t old_gid = 0; + c->plan->old_gid_for_new_gid (new_gid, &old_gid); + if (old_gid < next_record->firstGlyphIndex) + return_trace (false); + + unsigned int old_idx = (unsigned int) old_gid - next_record->firstGlyphIndex; + if (unlikely (!next_subtable->copy_glyph_at_idx (c->serializer, + old_idx, + bitmap_size_context->cbdt, + bitmap_size_context->cbdt_length, + bitmap_size_context->cbdt_prime, + subtable, + &bitmap_size_context->size))) + return_trace (false); + num_glyphs += 1; + } + if (!early_exit) + *start = lookup->length; + if (unlikely (!subtable->finish_subtable (c->serializer, + bitmap_size_context->cbdt_prime->length, + num_glyphs, + &bitmap_size_context->size))) + return_trace (false); + return_trace (true); + } + + bool add_new_record (hb_subset_context_t *c, + cblc_bitmap_size_subset_context_t *bitmap_size_context, + const hb_vector_t> *lookup, /* IN */ + const void *base, + unsigned int *start, /* INOUT */ + hb_vector_t* records /* INOUT */) const + { + TRACE_SERIALIZE (this); + auto snap = c->serializer->snapshot (); + unsigned int old_size = bitmap_size_context->size; + unsigned int old_cbdt_prime_length = bitmap_size_context->cbdt_prime->length; + + // Set to invalid state to indicate filling glyphs is not yet started. + if (unlikely (!records->resize (records->length + 1))) + return_trace (c->serializer->check_success (false)); + + (*records)[records->length - 1].firstGlyphIndex = 1; + (*records)[records->length - 1].lastGlyphIndex = 0; + bitmap_size_context->size += IndexSubtableRecord::min_size; + + c->serializer->push (); + + if (unlikely (!add_new_subtable (c, bitmap_size_context, &((*records)[records->length - 1]), lookup, base, start))) + { + c->serializer->pop_discard (); + c->serializer->revert (snap); + bitmap_size_context->cbdt_prime->shrink (old_cbdt_prime_length); + bitmap_size_context->size = old_size; + records->resize (records->length - 1); + return_trace (false); + } + + bitmap_size_context->num_tables += 1; + return_trace (true); + } + + unsigned int add_glyph_for_subset (hb_codepoint_t gid) { - return (base+offsetToSubtable).get_extents (extents); + if (firstGlyphIndex > lastGlyphIndex) + { + firstGlyphIndex = gid; + lastGlyphIndex = gid; + return 0; + } + // TODO maybe assert? this shouldn't occur. + if (lastGlyphIndex > gid) + return 0; + unsigned int num_missing = (unsigned int) (gid - lastGlyphIndex - 1); + lastGlyphIndex = gid; + return num_missing; } + bool get_extents (hb_glyph_extents_t *extents, const void *base) const + { return (base+offsetToSubtable).get_extents (extents); } + bool get_image_data (unsigned int gid, const void *base, unsigned int *offset, @@ -226,11 +508,11 @@ struct IndexSubtableRecord offset, length, format); } - GlyphID firstGlyphIndex; - GlyphID lastGlyphIndex; + HBGlyphID firstGlyphIndex; + HBGlyphID lastGlyphIndex; LOffsetTo offsetToSubtable; public: - DEFINE_SIZE_STATIC(8); + DEFINE_SIZE_STATIC (8); }; struct IndexSubtableArray @@ -243,6 +525,79 @@ struct IndexSubtableArray return_trace (indexSubtablesZ.sanitize (c, count, this)); } + void + build_lookup (hb_subset_context_t *c, cblc_bitmap_size_subset_context_t *bitmap_size_context, + hb_vector_t> *lookup /* OUT */) const + { + bool start_glyph_is_set = false; + for (hb_codepoint_t new_gid = 0; new_gid < c->plan->num_output_glyphs (); new_gid++) + { + hb_codepoint_t old_gid; + if (unlikely (!c->plan->old_gid_for_new_gid (new_gid, &old_gid))) continue; + + const IndexSubtableRecord* record = find_table (old_gid, bitmap_size_context->num_tables); + if (unlikely (!record)) continue; + + // Don't add gaps to the lookup. The best way to determine if a glyph is a + // gap is that it has no image data. + unsigned int offset, length, format; + if (unlikely (!record->get_image_data (old_gid, this, &offset, &length, &format))) continue; + + lookup->push (hb_pair_t (new_gid, record)); + + if (!start_glyph_is_set) + { + bitmap_size_context->start_glyph = new_gid; + start_glyph_is_set = true; + } + + bitmap_size_context->end_glyph = new_gid; + } + } + + bool + subset (hb_subset_context_t *c, + cblc_bitmap_size_subset_context_t *bitmap_size_context) const + { + TRACE_SUBSET (this); + + auto *dst = c->serializer->start_embed (); + if (unlikely (!dst)) return_trace (false); + + hb_vector_t> lookup; + build_lookup (c, bitmap_size_context, &lookup); + if (unlikely (lookup.in_error ())) + return c->serializer->check_success (false); + + bitmap_size_context->size = 0; + bitmap_size_context->num_tables = 0; + hb_vector_t records; + for (unsigned int start = 0; start < lookup.length;) + { + if (unlikely (!lookup[start].second->add_new_record (c, bitmap_size_context, &lookup, this, &start, &records))) + { + // Discard any leftover pushes to the serializer from successful records. + for (unsigned int i = 0; i < records.length; i++) + c->serializer->pop_discard (); + return_trace (false); + } + } + + /* Workaround to ensure offset ordering is from least to greatest when + * resolving links. */ + hb_vector_t objidxs; + for (unsigned int i = 0; i < records.length; i++) + objidxs.push (c->serializer->pop_pack ()); + for (unsigned int i = 0; i < records.length; i++) + { + IndexSubtableRecord* record = c->serializer->embed (records[i]); + if (unlikely (!record)) return_trace (false); + c->serializer->add_link (record->offsetToSubtable, objidxs[records.length - 1 - i]); + } + return_trace (true); + } + public: const IndexSubtableRecord* find_table (hb_codepoint_t glyph, unsigned int numTables) const { @@ -274,14 +629,48 @@ struct BitmapSizeTable vertical.sanitize (c)); } - const IndexSubtableRecord *find_table (hb_codepoint_t glyph, - const void *base, - const void **out_base) const + const IndexSubtableRecord * + find_table (hb_codepoint_t glyph, const void *base, const void **out_base) const { *out_base = &(base+indexSubtableArrayOffset); return (base+indexSubtableArrayOffset).find_table (glyph, numberOfIndexSubtables); } + bool + subset (hb_subset_context_t *c, const void *base, + const char *cbdt, unsigned int cbdt_length, + hb_vector_t *cbdt_prime /* INOUT */) const + { + TRACE_SUBSET (this); + auto *out_table = c->serializer->embed (this); + if (unlikely (!out_table)) return_trace (false); + + cblc_bitmap_size_subset_context_t bitmap_size_context; + bitmap_size_context.cbdt = cbdt; + bitmap_size_context.cbdt_length = cbdt_length; + bitmap_size_context.cbdt_prime = cbdt_prime; + bitmap_size_context.size = indexTablesSize; + bitmap_size_context.num_tables = numberOfIndexSubtables; + bitmap_size_context.start_glyph = 1; + bitmap_size_context.end_glyph = 0; + + if (!out_table->indexSubtableArrayOffset.serialize_subset (c, + indexSubtableArrayOffset, + base, + &bitmap_size_context)) + return_trace (false); + if (!bitmap_size_context.size || + !bitmap_size_context.num_tables || + bitmap_size_context.start_glyph > bitmap_size_context.end_glyph) + return_trace (false); + + out_table->indexTablesSize = bitmap_size_context.size; + out_table->numberOfIndexSubtables = bitmap_size_context.num_tables; + out_table->startGlyphIndex = bitmap_size_context.start_glyph; + out_table->endGlyphIndex = bitmap_size_context.end_glyph; + return_trace (true); + } + protected: LNNOffsetTo indexSubtableArrayOffset; @@ -290,14 +679,14 @@ struct BitmapSizeTable HBUINT32 colorRef; SBitLineMetrics horizontal; SBitLineMetrics vertical; - GlyphID startGlyphIndex; - GlyphID endGlyphIndex; + HBGlyphID startGlyphIndex; + HBGlyphID endGlyphIndex; HBUINT8 ppemX; HBUINT8 ppemY; HBUINT8 bitDepth; HBINT8 flags; public: - DEFINE_SIZE_STATIC(48); + DEFINE_SIZE_STATIC (48); }; @@ -310,7 +699,7 @@ struct GlyphBitmapDataFormat17 SmallGlyphMetrics glyphMetrics; LArrayOf data; public: - DEFINE_SIZE_ARRAY(9, data); + DEFINE_SIZE_ARRAY (9, data); }; struct GlyphBitmapDataFormat18 @@ -318,14 +707,14 @@ struct GlyphBitmapDataFormat18 BigGlyphMetrics glyphMetrics; LArrayOf data; public: - DEFINE_SIZE_ARRAY(12, data); + DEFINE_SIZE_ARRAY (12, data); }; struct GlyphBitmapDataFormat19 { LArrayOf data; public: - DEFINE_SIZE_ARRAY(4, data); + DEFINE_SIZE_ARRAY (4, data); }; struct CBLC @@ -342,22 +731,60 @@ struct CBLC sizeTables.sanitize (c, this)); } + static bool + sink_cbdt (hb_subset_context_t *c, hb_vector_t* cbdt_prime) + { + hb_blob_t *cbdt_prime_blob = hb_blob_create (cbdt_prime->arrayZ, + cbdt_prime->length, + HB_MEMORY_MODE_WRITABLE, + cbdt_prime->arrayZ, + free); + cbdt_prime->init (); // Leak arrayZ to the blob. + bool ret = c->plan->add_table (HB_OT_TAG_CBDT, cbdt_prime_blob); + hb_blob_destroy (cbdt_prime_blob); + return ret; + } + + bool + subset_size_table (hb_subset_context_t *c, const BitmapSizeTable& table, + const char *cbdt /* IN */, unsigned int cbdt_length, + CBLC *cblc_prime /* INOUT */, hb_vector_t *cbdt_prime /* INOUT */) const + { + TRACE_SUBSET (this); + cblc_prime->sizeTables.len++; + + auto snap = c->serializer->snapshot (); + auto cbdt_prime_len = cbdt_prime->length; + + if (!table.subset (c, this, cbdt, cbdt_length, cbdt_prime)) + { + cblc_prime->sizeTables.len--; + c->serializer->revert (snap); + cbdt_prime->shrink (cbdt_prime_len); + return_trace (false); + } + return_trace (true); + } + + // Implemented in cc file as it depends on definition of CBDT. + HB_INTERNAL bool subset (hb_subset_context_t *c) const; + protected: const BitmapSizeTable &choose_strike (hb_font_t *font) const { unsigned count = sizeTables.len; if (unlikely (!count)) - return Null(BitmapSizeTable); + return Null (BitmapSizeTable); - unsigned int requested_ppem = MAX (font->x_ppem, font->y_ppem); + unsigned int requested_ppem = hb_max (font->x_ppem, font->y_ppem); if (!requested_ppem) requested_ppem = 1<<30; /* Choose largest strike. */ unsigned int best_i = 0; - unsigned int best_ppem = MAX (sizeTables[0].ppemX, sizeTables[0].ppemY); + unsigned int best_ppem = hb_max (sizeTables[0].ppemX, sizeTables[0].ppemY); for (unsigned int i = 1; i < count; i++) { - unsigned int ppem = MAX (sizeTables[i].ppemX, sizeTables[i].ppemY); + unsigned int ppem = hb_max (sizeTables[i].ppemX, sizeTables[i].ppemY); if ((requested_ppem <= ppem && ppem < best_ppem) || (requested_ppem > best_ppem && ppem > best_ppem)) { @@ -373,7 +800,7 @@ struct CBLC FixedVersion<> version; LArrayOf sizeTables; public: - DEFINE_SIZE_ARRAY(8, sizeTables); + DEFINE_SIZE_ARRAY (8, sizeTables); }; struct CBDT @@ -384,8 +811,8 @@ struct CBDT { void init (hb_face_t *face) { - cblc = hb_sanitize_context_t().reference_table (face); - cbdt = hb_sanitize_context_t().reference_table (face); + cblc = hb_sanitize_context_t ().reference_table (face); + cbdt = hb_sanitize_context_t ().reference_table (face); upem = hb_face_get_upem (face); } @@ -396,8 +823,8 @@ struct CBDT this->cbdt.destroy (); } - bool get_extents (hb_font_t *font, hb_codepoint_t glyph, - hb_glyph_extents_t *extents) const + bool + get_extents (hb_font_t *font, hb_codepoint_t glyph, hb_glyph_extents_t *extents) const { const void *base; const BitmapSizeTable &strike = this->cblc->choose_strike (font); @@ -412,48 +839,42 @@ struct CBDT if (!subtable_record->get_image_data (glyph, base, &image_offset, &image_length, &image_format)) return false; + unsigned int cbdt_len = cbdt.get_length (); + if (unlikely (image_offset > cbdt_len || cbdt_len - image_offset < image_length)) + return false; + + switch (image_format) { - unsigned int cbdt_len = cbdt.get_length (); - if (unlikely (image_offset > cbdt_len || cbdt_len - image_offset < image_length)) + case 17: { + if (unlikely (image_length < GlyphBitmapDataFormat17::min_size)) return false; - - switch (image_format) - { - case 17: { - if (unlikely (image_length < GlyphBitmapDataFormat17::min_size)) - return false; - const GlyphBitmapDataFormat17& glyphFormat17 = - StructAtOffset (this->cbdt, image_offset); - glyphFormat17.glyphMetrics.get_extents (extents); - break; - } - case 18: { - if (unlikely (image_length < GlyphBitmapDataFormat18::min_size)) - return false; - const GlyphBitmapDataFormat18& glyphFormat18 = - StructAtOffset (this->cbdt, image_offset); - glyphFormat18.glyphMetrics.get_extents (extents); - break; - } - default: - // TODO: Support other image formats. - return false; - } + auto &glyphFormat17 = StructAtOffset (this->cbdt, image_offset); + glyphFormat17.glyphMetrics.get_extents (font, extents); + break; + } + case 18: { + if (unlikely (image_length < GlyphBitmapDataFormat18::min_size)) + return false; + auto &glyphFormat18 = StructAtOffset (this->cbdt, image_offset); + glyphFormat18.glyphMetrics.get_extents (font, extents); + break; + } + default: return false; /* TODO: Support other image formats. */ } /* Convert to font units. */ - double x_scale = upem / (double) strike.ppemX; - double y_scale = upem / (double) strike.ppemY; - extents->x_bearing = round (extents->x_bearing * x_scale); - extents->y_bearing = round (extents->y_bearing * y_scale); - extents->width = round (extents->width * x_scale); - extents->height = round (extents->height * y_scale); + float x_scale = upem / (float) strike.ppemX; + float y_scale = upem / (float) strike.ppemY; + extents->x_bearing = roundf (extents->x_bearing * x_scale); + extents->y_bearing = roundf (extents->y_bearing * y_scale); + extents->width = roundf (extents->width * x_scale); + extents->height = roundf (extents->height * y_scale); return true; } - hb_blob_t* reference_png (hb_font_t *font, - hb_codepoint_t glyph) const + hb_blob_t* + reference_png (hb_font_t *font, hb_codepoint_t glyph) const { const void *base; const BitmapSizeTable &strike = this->cblc->choose_strike (font); @@ -465,44 +886,41 @@ struct CBDT if (!subtable_record->get_image_data (glyph, base, &image_offset, &image_length, &image_format)) return hb_blob_get_empty (); + unsigned int cbdt_len = cbdt.get_length (); + if (unlikely (image_offset > cbdt_len || cbdt_len - image_offset < image_length)) + return hb_blob_get_empty (); + + switch (image_format) + { + case 17: { - unsigned int cbdt_len = cbdt.get_length (); - if (unlikely (image_offset > cbdt_len || cbdt_len - image_offset < image_length)) + if (unlikely (image_length < GlyphBitmapDataFormat17::min_size)) return hb_blob_get_empty (); - - switch (image_format) - { - case 17: { - if (unlikely (image_length < GlyphBitmapDataFormat17::min_size)) - return hb_blob_get_empty (); - const GlyphBitmapDataFormat17& glyphFormat17 = - StructAtOffset (this->cbdt, image_offset); - return hb_blob_create_sub_blob (cbdt.get_blob (), - image_offset + GlyphBitmapDataFormat17::min_size, - glyphFormat17.data.len); - } - case 18: { - if (unlikely (image_length < GlyphBitmapDataFormat18::min_size)) - return hb_blob_get_empty (); - const GlyphBitmapDataFormat18& glyphFormat18 = - StructAtOffset (this->cbdt, image_offset); - return hb_blob_create_sub_blob (cbdt.get_blob (), - image_offset + GlyphBitmapDataFormat18::min_size, - glyphFormat18.data.len); - } - case 19: { - if (unlikely (image_length < GlyphBitmapDataFormat19::min_size)) - return hb_blob_get_empty (); - const GlyphBitmapDataFormat19& glyphFormat19 = - StructAtOffset (this->cbdt, image_offset); - return hb_blob_create_sub_blob (cbdt.get_blob (), - image_offset + GlyphBitmapDataFormat19::min_size, - glyphFormat19.data.len); - } - } + auto &glyphFormat17 = StructAtOffset (this->cbdt, image_offset); + return hb_blob_create_sub_blob (cbdt.get_blob (), + image_offset + GlyphBitmapDataFormat17::min_size, + glyphFormat17.data.len); + } + case 18: + { + if (unlikely (image_length < GlyphBitmapDataFormat18::min_size)) + return hb_blob_get_empty (); + auto &glyphFormat18 = StructAtOffset (this->cbdt, image_offset); + return hb_blob_create_sub_blob (cbdt.get_blob (), + image_offset + GlyphBitmapDataFormat18::min_size, + glyphFormat18.data.len); + } + case 19: + { + if (unlikely (image_length < GlyphBitmapDataFormat19::min_size)) + return hb_blob_get_empty (); + auto &glyphFormat19 = StructAtOffset (this->cbdt, image_offset); + return hb_blob_create_sub_blob (cbdt.get_blob (), + image_offset + GlyphBitmapDataFormat19::min_size, + glyphFormat19.data.len); + } + default: return hb_blob_get_empty (); /* TODO: Support other image formats. */ } - - return hb_blob_get_empty (); } bool has_data () const { return cbdt.get_length (); } @@ -525,9 +943,41 @@ struct CBDT FixedVersion<> version; UnsizedArrayOf dataZ; public: - DEFINE_SIZE_ARRAY(4, dataZ); + DEFINE_SIZE_ARRAY (4, dataZ); }; +inline bool +CBLC::subset (hb_subset_context_t *c) const +{ + TRACE_SUBSET (this); + + auto *cblc_prime = c->serializer->start_embed (); + + // Use a vector as a secondary buffer as the tables need to be built in parallel. + hb_vector_t cbdt_prime; + + if (unlikely (!cblc_prime)) return_trace (false); + if (unlikely (!c->serializer->extend_min (cblc_prime))) return_trace (false); + cblc_prime->version = version; + + hb_blob_t* cbdt_blob = hb_sanitize_context_t ().reference_table (c->plan->source); + unsigned int cbdt_length; + CBDT* cbdt = (CBDT *) hb_blob_get_data (cbdt_blob, &cbdt_length); + if (unlikely (cbdt_length < CBDT::min_size)) + { + hb_blob_destroy (cbdt_blob); + return_trace (false); + } + _copy_data_to_cbdt (&cbdt_prime, cbdt, CBDT::min_size); + + for (const BitmapSizeTable& table : + sizeTables.iter ()) + subset_size_table (c, table, (const char *) cbdt, cbdt_length, cblc_prime, &cbdt_prime); + + hb_blob_destroy (cbdt_blob); + + return_trace (CBLC::sink_cbdt (c, &cbdt_prime)); +} + struct CBDT_accelerator_t : CBDT::accelerator_t {}; } /* namespace OT */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-color-colr-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-color-colr-table.hh index 362b4a14de6c..21821d458a54 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-color-colr-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-color-colr-table.hh @@ -1,5 +1,6 @@ /* * Copyright © 2018 Ebrahim Byagowi + * Copyright © 2020 Google, Inc. * * This is part of HarfBuzz, a text shaping library. * @@ -20,6 +21,8 @@ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + * + * Google Author(s): Calder Kitagawa */ #ifndef HB_OT_COLOR_COLR_TABLE_HH @@ -39,6 +42,8 @@ namespace OT { struct LayerRecord { + operator hb_ot_color_layer_t () const { return {glyphId, colorIdx}; } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -46,7 +51,7 @@ struct LayerRecord } public: - GlyphID glyphId; /* Glyph ID of layer glyph */ + HBGlyphID glyphId; /* Glyph ID of layer glyph */ Index colorIdx; /* Index value to use with a * selected color palette. * An index value of 0xFFFF @@ -73,7 +78,7 @@ struct BaseGlyphRecord } public: - GlyphID glyphId; /* Glyph ID of reference glyph */ + HBGlyphID glyphId; /* Glyph ID of reference glyph */ HBUINT16 firstLayerIdx; /* Index (from beginning of * the Layer Records) to the * layer record. There will be @@ -98,22 +103,50 @@ struct COLR { const BaseGlyphRecord &record = (this+baseGlyphsZ).bsearch (numBaseGlyphs, glyph); - hb_array_t all_layers ((this+layersZ).arrayZ, numLayers); + hb_array_t all_layers = (this+layersZ).as_array (numLayers); hb_array_t glyph_layers = all_layers.sub_array (record.firstLayerIdx, record.numLayers); if (count) { - hb_array_t segment_layers = glyph_layers.sub_array (start_offset, *count); - *count = segment_layers.length; - for (unsigned int i = 0; i < segment_layers.length; i++) - { - layers[i].glyph = segment_layers.arrayZ[i].glyphId; - layers[i].color_index = segment_layers.arrayZ[i].colorIdx; - } + + glyph_layers.sub_array (start_offset, count) + | hb_sink (hb_array (layers, *count)) + ; } return glyph_layers.length; } + struct accelerator_t + { + accelerator_t () {} + ~accelerator_t () { fini (); } + + void init (hb_face_t *face) + { colr = hb_sanitize_context_t ().reference_table (face); } + + void fini () { this->colr.destroy (); } + + bool is_valid () { return colr.get_blob ()->length; } + + void closure_glyphs (hb_codepoint_t glyph, + hb_set_t *related_ids /* OUT */) const + { colr->closure_glyphs (glyph, related_ids); } + + private: + hb_blob_ptr_t colr; + }; + + void closure_glyphs (hb_codepoint_t glyph, + hb_set_t *related_ids /* OUT */) const + { + const BaseGlyphRecord *record = get_base_glyph_record (glyph); + if (!record) return; + + auto glyph_layers = (this+layersZ).as_array (numLayers).sub_array (record->firstLayerIdx, + record->numLayers); + if (!glyph_layers.length) return; + related_ids->add_array (&glyph_layers[0].glyphId, glyph_layers.length, LayerRecord::min_size); + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -122,12 +155,117 @@ struct COLR (this+layersZ).sanitize (c, numLayers))); } + template + bool serialize (hb_serialize_context_t *c, + unsigned version, + BaseIterator base_it, + LayerIterator layer_it) + { + TRACE_SERIALIZE (this); + if (unlikely (base_it.len () != layer_it.len ())) + return_trace (false); + + if (unlikely (!c->extend_min (this))) return_trace (false); + this->version = version; + numLayers = 0; + numBaseGlyphs = base_it.len (); + baseGlyphsZ = COLR::min_size; + layersZ = COLR::min_size + numBaseGlyphs * BaseGlyphRecord::min_size; + + for (const hb_item_type _ : + base_it.iter ()) + { + auto* record = c->embed (_); + if (unlikely (!record)) return_trace (false); + record->firstLayerIdx = numLayers; + numLayers += record->numLayers; + } + + for (const hb_item_type& _ : + layer_it.iter ()) + _.as_array ().copy (c); + + return_trace (true); + } + + const BaseGlyphRecord* get_base_glyph_record (hb_codepoint_t gid) const + { + if ((unsigned int) gid == 0) // Ignore notdef. + return nullptr; + const BaseGlyphRecord* record = &(this+baseGlyphsZ).bsearch (numBaseGlyphs, (unsigned int) gid); + if ((record && (hb_codepoint_t) record->glyphId != gid)) + record = nullptr; + return record; + } + + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + + const hb_map_t &reverse_glyph_map = *c->plan->reverse_glyph_map; + + auto base_it = + + hb_range (c->plan->num_output_glyphs ()) + | hb_map_retains_sorting ([&](hb_codepoint_t new_gid) + { + hb_codepoint_t old_gid = reverse_glyph_map.get (new_gid); + + const BaseGlyphRecord* old_record = get_base_glyph_record (old_gid); + if (unlikely (!old_record)) + return hb_pair_t (false, Null (BaseGlyphRecord)); + + BaseGlyphRecord new_record; + new_record.glyphId = new_gid; + new_record.numLayers = old_record->numLayers; + return hb_pair_t (true, new_record); + }) + | hb_filter (hb_first) + | hb_map_retains_sorting (hb_second) + ; + + auto layer_it = + + hb_range (c->plan->num_output_glyphs ()) + | hb_map (reverse_glyph_map) + | hb_map_retains_sorting ([&](hb_codepoint_t old_gid) + { + const BaseGlyphRecord* old_record = get_base_glyph_record (old_gid); + hb_vector_t out_layers; + + if (unlikely (!old_record || + old_record->firstLayerIdx >= numLayers || + old_record->firstLayerIdx + old_record->numLayers > numLayers)) + return hb_pair_t> (false, out_layers); + + auto layers = (this+layersZ).as_array (numLayers).sub_array (old_record->firstLayerIdx, + old_record->numLayers); + out_layers.resize (layers.length); + for (unsigned int i = 0; i < layers.length; i++) { + out_layers[i] = layers[i]; + hb_codepoint_t new_gid = 0; + if (unlikely (!c->plan->new_gid_for_old_gid (out_layers[i].glyphId, &new_gid))) + return hb_pair_t> (false, out_layers); + out_layers[i].glyphId = new_gid; + } + + return hb_pair_t> (true, out_layers); + }) + | hb_filter (hb_first) + | hb_map_retains_sorting (hb_second) + ; + + if (unlikely (!base_it || !layer_it || base_it.len () != layer_it.len ())) + return_trace (false); + + COLR *colr_prime = c->serializer->start_embed (); + return_trace (colr_prime->serialize (c->serializer, version, base_it, layer_it)); + } + protected: HBUINT16 version; /* Table version number (starts at 0). */ HBUINT16 numBaseGlyphs; /* Number of Base Glyph Records. */ - LNNOffsetTo > + LNNOffsetTo> baseGlyphsZ; /* Offset to Base Glyph records. */ - LNNOffsetTo > + LNNOffsetTo> layersZ; /* Offset to Layer Records. */ HBUINT16 numLayers; /* Number of Layer Records. */ public: diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-color-cpal-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-color-cpal-table.hh index f4ef69734178..f5f642d6bfac 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-color-cpal-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-color-cpal-table.hh @@ -87,15 +87,15 @@ struct CPALV1Tail } protected: - LNNOffsetTo > + LNNOffsetTo> paletteFlagsZ; /* Offset from the beginning of CPAL table to * the Palette Type Array. Set to 0 if no array * is provided. */ - LNNOffsetTo > + LNNOffsetTo> paletteLabelsZ; /* Offset from the beginning of CPAL table to * the palette labels array. Set to 0 if no * array is provided. */ - LNNOffsetTo > + LNNOffsetTo> colorLabelsZ; /* Offset from the beginning of CPAL table to * the color labels array. Set to 0 * if no array is provided. */ @@ -115,7 +115,7 @@ struct CPAL { return min_size + numPalettes * sizeof (colorRecordIndicesZ[0]); } unsigned int get_palette_count () const { return numPalettes; } - unsigned int get_color_count () const { return numColors; } + unsigned int get_color_count () const { return numColors; } hb_ot_color_palette_flags_t get_palette_flags (unsigned int palette_index) const { return v1 ().get_palette_flags (this, palette_index, numPalettes); } @@ -142,12 +142,9 @@ struct CPAL numColors); if (color_count) { - hb_array_t segment_colors = palette_colors.sub_array (start_offset, *color_count); - /* Always return numColors colors per palette even if it has out-of-bounds start index. */ - unsigned int count = MIN (MAX (numColors - start_offset, 0), *color_count); - *color_count = count; - for (unsigned int i = 0; i < count; i++) - colors[i] = segment_colors[i]; /* Bound-checked read. */ + + palette_colors.sub_array (start_offset, color_count) + | hb_sink (hb_array (colors, *color_count)) + ; } return numColors; } @@ -155,7 +152,7 @@ struct CPAL private: const CPALV1Tail& v1 () const { - if (version == 0) return Null(CPALV1Tail); + if (version == 0) return Null (CPALV1Tail); return StructAfter (*this); } @@ -176,7 +173,7 @@ struct CPAL HBUINT16 numPalettes; /* Number of palettes in the table. */ HBUINT16 numColorRecords; /* Total number of color records, combined for * all palettes. */ - LNNOffsetTo > + LNNOffsetTo> colorRecordsZ; /* Offset from the beginning of CPAL table to * the first ColorRecord. */ UnsizedArrayOf diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-color-sbix-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-color-sbix-table.hh index 5b89796d561b..27b935edbbcc 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-color-sbix-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-color-sbix-table.hh @@ -1,5 +1,6 @@ /* * Copyright © 2018 Ebrahim Byagowi + * Copyright © 2020 Google, Inc. * * This is part of HarfBuzz, a text shaping library. * @@ -20,12 +21,15 @@ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + * + * Google Author(s): Calder Kitagawa */ #ifndef HB_OT_COLOR_SBIX_TABLE_HH #define HB_OT_COLOR_SBIX_TABLE_HH #include "hb-open-type.hh" +#include "hb-ot-layout-common.hh" /* * sbix -- Standard Bitmap Graphics @@ -40,6 +44,20 @@ namespace OT { struct SBIXGlyph { + SBIXGlyph* copy (hb_serialize_context_t *c, unsigned int data_length) const + { + TRACE_SERIALIZE (this); + SBIXGlyph* new_glyph = c->start_embed (); + if (unlikely (!new_glyph)) return_trace (nullptr); + if (unlikely (!c->extend_min (new_glyph))) return_trace (nullptr); + + new_glyph->xOffset = xOffset; + new_glyph->yOffset = yOffset; + new_glyph->graphicType = graphicType; + data.copy (c, data_length); + return_trace (new_glyph); + } + HBINT16 xOffset; /* The horizontal (x-axis) offset from the left * edge of the graphic to the glyph’s origin. * That is, the x-coordinate of the point on the @@ -62,6 +80,9 @@ struct SBIXGlyph struct SBIXStrike { + static unsigned int get_size (unsigned num_glyphs) + { return min_size + num_glyphs * HBUINT32::static_size; } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -116,16 +137,59 @@ struct SBIXStrike return hb_blob_create_sub_blob (sbix_blob, glyph_offset, glyph_length); } + bool subset (hb_subset_context_t *c, unsigned int available_len) const + { + TRACE_SUBSET (this); + unsigned int num_output_glyphs = c->plan->num_output_glyphs (); + + auto* out = c->serializer->start_embed (); + if (unlikely (!out)) return_trace (false); + auto snap = c->serializer->snapshot (); + if (unlikely (!c->serializer->extend (*out, num_output_glyphs + 1))) return_trace (false); + out->ppem = ppem; + out->resolution = resolution; + HBUINT32 head; + head = get_size (num_output_glyphs + 1); + + bool has_glyphs = false; + for (unsigned new_gid = 0; new_gid < num_output_glyphs; new_gid++) + { + hb_codepoint_t old_gid; + if (!c->plan->old_gid_for_new_gid (new_gid, &old_gid) || + unlikely (imageOffsetsZ[old_gid].is_null () || + imageOffsetsZ[old_gid + 1].is_null () || + imageOffsetsZ[old_gid + 1] <= imageOffsetsZ[old_gid] || + imageOffsetsZ[old_gid + 1] - imageOffsetsZ[old_gid] <= SBIXGlyph::min_size) || + (unsigned int) imageOffsetsZ[old_gid + 1] > available_len) + { + out->imageOffsetsZ[new_gid] = head; + continue; + } + has_glyphs = true; + unsigned int delta = imageOffsetsZ[old_gid + 1] - imageOffsetsZ[old_gid]; + unsigned int glyph_data_length = delta - SBIXGlyph::min_size; + if (!(this+imageOffsetsZ[old_gid]).copy (c->serializer, glyph_data_length)) + return_trace (false); + out->imageOffsetsZ[new_gid] = head; + head += delta; + } + if (has_glyphs) + out->imageOffsetsZ[num_output_glyphs] = head; + else + c->serializer->revert (snap); + return_trace (has_glyphs); + } + public: HBUINT16 ppem; /* The PPEM size for which this strike was designed. */ HBUINT16 resolution; /* The device pixel density (in PPI) for which this * strike was designed. (E.g., 96 PPI, 192 PPI.) */ protected: - UnsizedArrayOf > + UnsizedArrayOf> imageOffsetsZ; /* Offset from the beginning of the strike data header * to bitmap data for an individual glyph ID. */ public: - DEFINE_SIZE_STATIC (8); + DEFINE_SIZE_ARRAY (4, imageOffsetsZ); }; struct sbix @@ -140,7 +204,7 @@ struct sbix { void init (hb_face_t *face) { - table = hb_sanitize_context_t().reference_table (face); + table = hb_sanitize_context_t ().reference_table (face); num_glyphs = face->get_num_glyphs (); } void fini () { table.destroy (); } @@ -173,9 +237,9 @@ struct sbix { unsigned count = table->strikes.len; if (unlikely (!count)) - return Null(SBIXStrike); + return Null (SBIXStrike); - unsigned int requested_ppem = MAX (font->x_ppem, font->y_ppem); + unsigned int requested_ppem = hb_max (font->x_ppem, font->y_ppem); if (!requested_ppem) requested_ppem = 1<<30; /* Choose largest strike. */ /* TODO Add DPI sensitivity as well? */ @@ -235,18 +299,25 @@ struct sbix const PNGHeader &png = *blob->as(); extents->x_bearing = x_offset; - extents->y_bearing = y_offset; + extents->y_bearing = png.IHDR.height + y_offset; extents->width = png.IHDR.width; - extents->height = png.IHDR.height; + extents->height = -1 * png.IHDR.height; /* Convert to font units. */ if (strike_ppem) { - double scale = font->face->get_upem () / (double) strike_ppem; - extents->x_bearing = round (extents->x_bearing * scale); - extents->y_bearing = round (extents->y_bearing * scale); - extents->width = round (extents->width * scale); - extents->height = round (extents->height * scale); + float scale = font->face->get_upem () / (float) strike_ppem; + extents->x_bearing = font->em_scalef_x (extents->x_bearing * scale); + extents->y_bearing = font->em_scalef_y (extents->y_bearing * scale); + extents->width = font->em_scalef_x (extents->width * scale); + extents->height = font->em_scalef_y (extents->height * scale); + } + else + { + extents->x_bearing = font->em_scale_x (extents->x_bearing); + extents->y_bearing = font->em_scale_y (extents->y_bearing); + extents->width = font->em_scale_x (extents->width); + extents->height = font->em_scale_y (extents->height); } hb_blob_destroy (blob); @@ -268,6 +339,63 @@ struct sbix strikes.sanitize (c, this))); } + bool + add_strike (hb_subset_context_t *c, unsigned i) const + { + if (strikes[i].is_null () || c->source_blob->length < (unsigned) strikes[i]) + return false; + + return (this+strikes[i]).subset (c, c->source_blob->length - (unsigned) strikes[i]); + } + + bool serialize_strike_offsets (hb_subset_context_t *c) const + { + TRACE_SERIALIZE (this); + + auto *out = c->serializer->start_embed> (); + if (unlikely (!out)) return_trace (false); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + + hb_vector_t*> new_strikes; + hb_vector_t objidxs; + for (int i = strikes.len - 1; i >= 0; --i) + { + auto* o = out->serialize_append (c->serializer); + if (unlikely (!o)) return_trace (false); + *o = 0; + auto snap = c->serializer->snapshot (); + c->serializer->push (); + bool ret = add_strike (c, i); + if (!ret) + { + c->serializer->pop_discard (); + out->pop (); + c->serializer->revert (snap); + } + else + { + objidxs.push (c->serializer->pop_pack ()); + new_strikes.push (o); + } + } + for (unsigned int i = 0; i < new_strikes.length; ++i) + c->serializer->add_link (*new_strikes[i], objidxs[new_strikes.length - 1 - i]); + + return_trace (true); + } + + bool subset (hb_subset_context_t* c) const + { + TRACE_SUBSET (this); + + sbix *sbix_prime = c->serializer->start_embed (); + if (unlikely (!sbix_prime)) return_trace (false); + if (unlikely (!c->serializer->embed (this->version))) return_trace (false); + if (unlikely (!c->serializer->embed (this->flags))) return_trace (false); + + return_trace (serialize_strike_offsets (c)); + } + protected: HBUINT16 version; /* Table version number — set to 1 */ HBUINT16 flags; /* Bit 0: Set to 1. Bit 1: Draw outlines. diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-color-svg-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-color-svg-table.hh index eb0ba22debc1..ccf9ed3365c2 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-color-svg-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-color-svg-table.hh @@ -62,7 +62,7 @@ struct SVGDocumentIndexEntry * this index entry. */ HBUINT16 endGlyphID; /* The last glyph ID in the range described by * this index entry. Must be >= startGlyphID. */ - LNNOffsetTo > + LNNOffsetTo> svgDoc; /* Offset from the beginning of the SVG Document Index * to an SVG document. Must be non-zero. */ HBUINT32 svgDocLength; /* Length of the SVG document. @@ -80,7 +80,7 @@ struct SVG struct accelerator_t { void init (hb_face_t *face) - { table = hb_sanitize_context_t().reference_table (face); } + { table = hb_sanitize_context_t ().reference_table (face); } void fini () { table.destroy (); } hb_blob_t *reference_blob_for_glyph (hb_codepoint_t glyph_id) const @@ -107,7 +107,7 @@ struct SVG protected: HBUINT16 version; /* Table version (starting at 0). */ - LOffsetTo > + LOffsetTo> svgDocEntries; /* Offset (relative to the start of the SVG table) to the * SVG Documents Index. Must be non-zero. */ /* Array of SVG Document Index Entries. */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-color.cc b/src/java.desktop/share/native/libharfbuzz/hb-ot-color.cc index 84aeb96126e8..d37e134c08ba 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-color.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-color.cc @@ -25,20 +25,21 @@ * Google Author(s): Sascha Brawer, Behdad Esfahbod */ -#include "hb-open-type.hh" +#include "hb.hh" + +#ifndef HB_NO_COLOR + +#include "hb-ot.h" + #include "hb-ot-color-cbdt-table.hh" #include "hb-ot-color-colr-table.hh" #include "hb-ot-color-cpal-table.hh" #include "hb-ot-color-sbix-table.hh" #include "hb-ot-color-svg-table.hh" -#include "hb-ot-face.hh" -#include "hb-ot.h" #include #include -#include "hb-ot-layout.hh" - /** * SECTION:hb-ot-color @@ -47,6 +48,8 @@ * @include: hb-ot.h * * Functions for fetching color-font information from OpenType font faces. + * + * HarfBuzz supports `COLR`/`CPAL`, `sbix`, `CBDT`, and `SVG` color fonts. **/ @@ -57,9 +60,11 @@ /** * hb_ot_color_has_palettes: - * @face: a font face. + * @face: #hb_face_t to work upon + * + * Tests whether a face includes a `CPAL` color-palette table. * - * Returns: whether CPAL table is available. + * Return value: true if data found, false otherwise * * Since: 2.1.0 */ @@ -71,10 +76,11 @@ hb_ot_color_has_palettes (hb_face_t *face) /** * hb_ot_color_palette_get_count: - * @face: a font face. + * @face: #hb_face_t to work upon * - * Returns: the number of color palettes in @face, or zero if @face has - * no colors. + * Fetches the number of color palettes in a face. + * + * Return value: the number of palettes found * * Since: 2.1.0 */ @@ -86,13 +92,16 @@ hb_ot_color_palette_get_count (hb_face_t *face) /** * hb_ot_color_palette_get_name_id: - * @face: a font face. - * @palette_index: the index of the color palette whose name is being requested. + * @face: #hb_face_t to work upon + * @palette_index: The index of the color palette + * + * Fetches the `name` table Name ID that provides display names for + * a `CPAL` color palette. * - * Retrieves the name id of a color palette. For example, a color font can - * have themed palettes like "Spring", "Summer", "Fall", and "Winter". + * Palette display names can be generic (e.g., "Default") or provide + * specific, themed names (e.g., "Spring", "Summer", "Fall", and "Winter"). * - * Returns: an identifier within @face's `name` table. + * Return value: the Named ID found for the palette. * If the requested palette has no name the result is #HB_OT_NAME_ID_INVALID. * * Since: 2.1.0 @@ -106,10 +115,16 @@ hb_ot_color_palette_get_name_id (hb_face_t *face, /** * hb_ot_color_palette_color_get_name_id: - * @face: a font face. - * @color_index: palette entry index. + * @face: #hb_face_t to work upon + * @color_index: The index of the color * - * Returns: Name ID associated with a palette entry, e.g. eye color + * Fetches the `name` table Name ID that provides display names for + * the specificed color in a face's `CPAL` color palette. + * + * Display names can be generic (e.g., "Background") or specific + * (e.g., "Eye color"). + * + * Return value: the Name ID found for the color. * * Since: 2.1.0 */ @@ -122,10 +137,12 @@ hb_ot_color_palette_color_get_name_id (hb_face_t *face, /** * hb_ot_color_palette_get_flags: - * @face: a font face - * @palette_index: the index of the color palette whose flags are being requested + * @face: #hb_face_t to work upon + * @palette_index: The index of the color palette + * + * Fetches the flags defined for a color palette. * - * Returns: the flags for the requested color palette. + * Return value: the #hb_ot_color_palette_flags_t of the requested color palette * * Since: 2.1.0 */ @@ -138,25 +155,22 @@ hb_ot_color_palette_get_flags (hb_face_t *face, /** * hb_ot_color_palette_get_colors: - * @face: a font face. - * @palette_index:the index of the color palette whose colors - * are being requested. - * @start_offset: the index of the first color being requested. - * @color_count: (inout) (optional): on input, how many colors - * can be maximally stored into the @colors array; - * on output, how many colors were actually stored. - * @colors: (array length=color_count) (out) (optional): - * an array of #hb_color_t records. After calling - * this function, @colors will be filled with - * the palette colors. If @colors is NULL, the function - * will just return the number of total colors - * without storing any actual colors; this can be used - * for allocating a buffer of suitable size before calling - * hb_ot_color_palette_get_colors() a second time. - * - * Retrieves the colors in a color palette. - * - * Returns: the total number of colors in the palette. + * @face: #hb_face_t to work upon + * @palette_index: the index of the color palette to query + * @start_offset: offset of the first color to retrieve + * @color_count: (inout) (optional): Input = the maximum number of colors to return; + * Output = the actual number of colors returned (may be zero) + * @colors: (out) (array length=color_count) (nullable): The array of #hb_color_t records found + * + * Fetches a list of the colors in a color palette. + * + * After calling this function, @colors will be filled with the palette + * colors. If @colors is NULL, the function will just return the number + * of total colors without storing any actual colors; this can be used + * for allocating a buffer of suitable size before calling + * hb_ot_color_palette_get_colors() a second time. + * + * Return value: the total number of colors in the palette * * Since: 2.1.0 */ @@ -177,9 +191,11 @@ hb_ot_color_palette_get_colors (hb_face_t *face, /** * hb_ot_color_has_layers: - * @face: a font face. + * @face: #hb_face_t to work upon + * + * Tests whether a face includes any `COLR` color layers. * - * Returns: whether COLR table is available. + * Return value: true if data found, false otherwise * * Since: 2.1.0 */ @@ -191,14 +207,17 @@ hb_ot_color_has_layers (hb_face_t *face) /** * hb_ot_color_glyph_get_layers: - * @face: a font face. - * @glyph: a layered color glyph id. - * @start_offset: starting offset of layers. - * @count: (inout) (optional): gets number of layers available to be written on buffer - * and returns number of written layers. - * @layers: (array length=count) (out) (optional): layers buffer to buffer. + * @face: #hb_face_t to work upon + * @glyph: The glyph index to query + * @start_offset: offset of the first layer to retrieve + * @layer_count: (inout) (optional): Input = the maximum number of layers to return; + * Output = the actual number of layers returned (may be zero) + * @layers: (out) (array length=layer_count) (nullable): The array of layers found + * + * Fetches a list of all color layers for the specified glyph index in the specified + * face. The list returned will begin at the offset provided. * - * Returns: Total number of layers a layered color glyph have. + * Return value: Total number of layers available for the glyph index queried * * Since: 2.1.0 */ @@ -206,10 +225,10 @@ unsigned int hb_ot_color_glyph_get_layers (hb_face_t *face, hb_codepoint_t glyph, unsigned int start_offset, - unsigned int *count, /* IN/OUT. May be NULL. */ + unsigned int *layer_count, /* IN/OUT. May be NULL. */ hb_ot_color_layer_t *layers /* OUT. May be NULL. */) { - return face->table.COLR->get_glyph_layers (glyph, start_offset, count, layers); + return face->table.COLR->get_glyph_layers (glyph, start_offset, layer_count, layers); } @@ -219,11 +238,11 @@ hb_ot_color_glyph_get_layers (hb_face_t *face, /** * hb_ot_color_has_svg: - * @face: a font face. + * @face: #hb_face_t to work upon. * - * Check whether @face has SVG glyph images. + * Tests whether a face includes any `SVG` glyph images. * - * Returns true if available, false otherwise. + * Return value: true if data found, false otherwise. * * Since: 2.1.0 */ @@ -235,12 +254,12 @@ hb_ot_color_has_svg (hb_face_t *face) /** * hb_ot_color_glyph_reference_svg: - * @face: a font face. - * @glyph: a svg glyph index. + * @face: #hb_face_t to work upon + * @glyph: a svg glyph index * - * Get SVG document for a glyph. The blob may be either plain text or gzip-encoded. + * Fetches the SVG document for a glyph. The blob may be either plain text or gzip-encoded. * - * Returns: (transfer full): respective svg blob of the glyph, if available. + * Return value: (transfer full): An #hb_blob_t containing the SVG document of the glyph, if available * * Since: 2.1.0 */ @@ -257,11 +276,11 @@ hb_ot_color_glyph_reference_svg (hb_face_t *face, hb_codepoint_t glyph) /** * hb_ot_color_has_png: - * @face: a font face. + * @face: #hb_face_t to work upon * - * Check whether @face has PNG glyph images (either CBDT or sbix tables). + * Tests whether a face has PNG glyph images (either in `CBDT` or `sbix` tables). * - * Returns true if available, false otherwise. + * Return value: true if data found, false otherwise * * Since: 2.1.0 */ @@ -273,14 +292,14 @@ hb_ot_color_has_png (hb_face_t *face) /** * hb_ot_color_glyph_reference_png: - * @font: a font object, not face. upem should be set on - * that font object if one wants to get optimal png blob, otherwise - * return the biggest one - * @glyph: a glyph index. + * @font: #hb_font_t to work upon + * @glyph: a glyph index * - * Get PNG image for a glyph. + * Fetches the PNG image for a glyph. This function takes a font object, not a face object, + * as input. To get an optimally sized PNG blob, the UPEM value must be set on the @font + * object. If UPEM is unset, the blob returned will be the largest PNG available. * - * Returns: (transfer full): respective PNG blob of the glyph, if available. + * Return value: (transfer full): An #hb_blob_t containing the PNG image for the glyph, if available * * Since: 2.1.0 */ @@ -297,3 +316,6 @@ hb_ot_color_glyph_reference_png (hb_font_t *font, hb_codepoint_t glyph) return blob; } + + +#endif diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-color.h b/src/java.desktop/share/native/libharfbuzz/hb-ot-color.h index 5736890fbe53..593447568dd4 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-color.h +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-color.h @@ -59,11 +59,11 @@ hb_ot_color_palette_color_get_name_id (hb_face_t *face, /** * hb_ot_color_palette_flags_t: - * @HB_OT_COLOR_PALETTE_FLAG_DEFAULT: default indicating that there is nothing special + * @HB_OT_COLOR_PALETTE_FLAG_DEFAULT: Default indicating that there is nothing special * to note about a color palette. - * @HB_OT_COLOR_PALETTE_FLAG_USABLE_WITH_LIGHT_BACKGROUND: flag indicating that the color + * @HB_OT_COLOR_PALETTE_FLAG_USABLE_WITH_LIGHT_BACKGROUND: Flag indicating that the color * palette is appropriate to use when displaying the font on a light background such as white. - * @HB_OT_COLOR_PALETTE_FLAG_USABLE_WITH_DARK_BACKGROUND: flag indicating that the color + * @HB_OT_COLOR_PALETTE_FLAG_USABLE_WITH_DARK_BACKGROUND: Flag indicating that the color * palette is appropriate to use when displaying the font on a dark background such as black. * * Since: 2.1.0 @@ -110,7 +110,7 @@ HB_EXTERN unsigned int hb_ot_color_glyph_get_layers (hb_face_t *face, hb_codepoint_t glyph, unsigned int start_offset, - unsigned int *count, /* IN/OUT. May be NULL. */ + unsigned int *layer_count, /* IN/OUT. May be NULL. */ hb_ot_color_layer_t *layers /* OUT. May be NULL. */); /* diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-deprecated.h b/src/java.desktop/share/native/libharfbuzz/hb-ot-deprecated.h index 2a31b3206713..4fdb2b36ab9b 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-deprecated.h +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-deprecated.h @@ -40,6 +40,10 @@ HB_BEGIN_DECLS #ifndef HB_DISABLE_DEPRECATED +/* https://github.com/harfbuzz/harfbuzz/issues/1734 */ +#define HB_MATH_GLYPH_PART_FLAG_EXTENDER HB_OT_MATH_GLYPH_PART_FLAG_EXTENDER + + /* Like hb_ot_layout_table_find_script, but takes zero-terminated array of scripts to test */ HB_EXTERN HB_DEPRECATED_FOR (hb_ot_layout_table_select_script) hb_bool_t hb_ot_layout_table_choose_script (hb_face_t *face, diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-face-table-list.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-face-table-list.hh new file mode 100644 index 000000000000..367e143fdfb9 --- /dev/null +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-face-table-list.hh @@ -0,0 +1,138 @@ +/* + * Copyright © 2007,2008,2009 Red Hat, Inc. + * Copyright © 2012,2013 Google, Inc. + * Copyright © 2019, Facebook Inc. + * + * This is part of HarfBuzz, a text shaping library. + * + * Permission is hereby granted, without written agreement and without + * license or royalty fees, to use, copy, modify, and distribute this + * software and its documentation for any purpose, provided that the + * above copyright notice and the following two paragraphs appear in + * all copies of this software. + * + * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR + * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN + * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + * + * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, + * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS + * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO + * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + * + * Red Hat Author(s): Behdad Esfahbod + * Google Author(s): Behdad Esfahbod + * Facebook Author(s): Behdad Esfahbod + */ + +#ifndef HB_OT_FACE_TABLE_LIST_HH +#define HB_OT_FACE_TABLE_LIST_HH +#endif /* HB_OT_FACE_TABLE_LIST_HH */ /* Dummy header guards */ + +#ifndef HB_OT_ACCELERATOR +#define HB_OT_ACCELERATOR(Namespace, Type) HB_OT_TABLE (Namespace, Type) +#define _HB_OT_ACCELERATOR_UNDEF +#endif + + +/* This lists font tables that the hb_face_t will contain and lazily + * load. Don't add a table unless it's used though. This is not + * exactly free. */ + +/* v--- Add new tables in the right place here. */ + + +/* OpenType fundamentals. */ +HB_OT_TABLE (OT, head) +#if !defined(HB_NO_FACE_COLLECT_UNICODES) || !defined(HB_NO_OT_FONT) +HB_OT_ACCELERATOR (OT, cmap) +#endif +HB_OT_TABLE (OT, hhea) +HB_OT_ACCELERATOR (OT, hmtx) +HB_OT_TABLE (OT, OS2) +#if !defined(HB_NO_OT_FONT_GLYPH_NAMES) || !defined(HB_NO_METRICS) || !defined(HB_NO_STYLE) +HB_OT_ACCELERATOR (OT, post) +#endif +#ifndef HB_NO_NAME +HB_OT_ACCELERATOR (OT, name) +#endif +#ifndef HB_NO_STYLE +HB_OT_TABLE (OT, STAT) +#endif +#ifndef HB_NO_META +HB_OT_ACCELERATOR (OT, meta) +#endif + +/* Vertical layout. */ +HB_OT_TABLE (OT, vhea) +HB_OT_ACCELERATOR (OT, vmtx) + +/* TrueType outlines. */ +HB_OT_ACCELERATOR (OT, glyf) + +/* CFF outlines. */ +#ifndef HB_NO_CFF +HB_OT_ACCELERATOR (OT, cff1) +HB_OT_ACCELERATOR (OT, cff2) +HB_OT_TABLE (OT, VORG) +#endif + +/* OpenType variations. */ +#ifndef HB_NO_VAR +HB_OT_TABLE (OT, fvar) +HB_OT_TABLE (OT, avar) +HB_OT_ACCELERATOR (OT, gvar) +HB_OT_TABLE (OT, MVAR) +#endif + +/* Legacy kern. */ +#ifndef HB_NO_OT_KERN +HB_OT_TABLE (OT, kern) +#endif + +/* OpenType shaping. */ +#ifndef HB_NO_OT_LAYOUT +HB_OT_ACCELERATOR (OT, GDEF) +HB_OT_ACCELERATOR (OT, GSUB) +HB_OT_ACCELERATOR (OT, GPOS) +//HB_OT_TABLE (OT, JSTF) +#endif + +/* OpenType baseline. */ +#ifndef HB_NO_BASE +HB_OT_TABLE (OT, BASE) +#endif + +/* AAT shaping. */ +#ifndef HB_NO_AAT +HB_OT_TABLE (AAT, morx) +HB_OT_TABLE (AAT, mort) +HB_OT_TABLE (AAT, kerx) +HB_OT_TABLE (AAT, ankr) +HB_OT_TABLE (AAT, trak) +HB_OT_TABLE (AAT, ltag) +HB_OT_TABLE (AAT, feat) +// HB_OT_TABLE (AAT, opbd) +#endif + +/* OpenType color fonts. */ +#ifndef HB_NO_COLOR +HB_OT_TABLE (OT, COLR) +HB_OT_TABLE (OT, CPAL) +HB_OT_ACCELERATOR (OT, CBDT) +HB_OT_ACCELERATOR (OT, sbix) +HB_OT_ACCELERATOR (OT, SVG) +#endif + +/* OpenType math. */ +#ifndef HB_NO_MATH +HB_OT_TABLE (OT, MATH) +#endif + + +#ifdef _HB_OT_ACCELERATOR_UNDEF +#undef HB_OT_ACCELERATOR +#endif diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-face.cc b/src/java.desktop/share/native/libharfbuzz/hb-ot-face.cc index 9b17526b7e40..5ef8df43ce7c 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-face.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-face.cc @@ -32,6 +32,7 @@ #include "hb-ot-cff2-table.hh" #include "hb-ot-hmtx-table.hh" #include "hb-ot-kern-table.hh" +#include "hb-ot-meta-table.hh" #include "hb-ot-name-table.hh" #include "hb-ot-post-table.hh" #include "hb-ot-color-cbdt-table.hh" @@ -46,16 +47,12 @@ void hb_ot_face_t::init0 (hb_face_t *face) { this->face = face; #define HB_OT_TABLE(Namespace, Type) Type.init0 (); -#define HB_OT_ACCELERATOR(Namespace, Type) HB_OT_TABLE (Namespace, Type) - HB_OT_TABLES -#undef HB_OT_ACCELERATOR +#include "hb-ot-face-table-list.hh" #undef HB_OT_TABLE } void hb_ot_face_t::fini () { #define HB_OT_TABLE(Namespace, Type) Type.fini (); -#define HB_OT_ACCELERATOR(Namespace, Type) HB_OT_TABLE (Namespace, Type) - HB_OT_TABLES -#undef HB_OT_ACCELERATOR +#include "hb-ot-face-table-list.hh" #undef HB_OT_TABLE } diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-face.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-face.hh index 7f47ba6cb8ed..e24d380bca8c 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-face.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-face.hh @@ -38,54 +38,10 @@ * hb_ot_face_t */ -#define HB_OT_TABLES \ - /* OpenType fundamentals. */ \ - HB_OT_TABLE(OT, head) \ - HB_OT_ACCELERATOR(OT, cmap) \ - HB_OT_ACCELERATOR(OT, hmtx) \ - HB_OT_ACCELERATOR(OT, vmtx) \ - HB_OT_ACCELERATOR(OT, post) \ - HB_OT_TABLE(OT, kern) \ - HB_OT_ACCELERATOR(OT, glyf) \ - HB_OT_ACCELERATOR(OT, cff1) \ - HB_OT_ACCELERATOR(OT, cff2) \ - HB_OT_TABLE(OT, VORG) \ - HB_OT_ACCELERATOR(OT, name) \ - HB_OT_TABLE(OT, OS2) \ - HB_OT_TABLE(OT, STAT) \ - /* OpenType shaping. */ \ - HB_OT_ACCELERATOR(OT, GDEF) \ - HB_OT_ACCELERATOR(OT, GSUB) \ - HB_OT_ACCELERATOR(OT, GPOS) \ - HB_OT_TABLE(OT, BASE) \ - HB_OT_TABLE(OT, JSTF) \ - /* AAT shaping. */ \ - HB_OT_TABLE(AAT, mort) \ - HB_OT_TABLE(AAT, morx) \ - HB_OT_TABLE(AAT, kerx) \ - HB_OT_TABLE(AAT, ankr) \ - HB_OT_TABLE(AAT, trak) \ - HB_OT_TABLE(AAT, lcar) \ - HB_OT_TABLE(AAT, ltag) \ - HB_OT_TABLE(AAT, feat) \ - /* OpenType variations. */ \ - HB_OT_TABLE(OT, fvar) \ - HB_OT_TABLE(OT, avar) \ - HB_OT_TABLE(OT, MVAR) \ - /* OpenType math. */ \ - HB_OT_TABLE(OT, MATH) \ - /* OpenType color fonts. */ \ - HB_OT_TABLE(OT, COLR) \ - HB_OT_TABLE(OT, CPAL) \ - HB_OT_ACCELERATOR(OT, CBDT) \ - HB_OT_ACCELERATOR(OT, sbix) \ - HB_OT_ACCELERATOR(OT, SVG) \ - /* */ - /* Declare tables. */ #define HB_OT_TABLE(Namespace, Type) namespace Namespace { struct Type; } #define HB_OT_ACCELERATOR(Namespace, Type) HB_OT_TABLE (Namespace, Type##_accelerator_t) -HB_OT_TABLES +#include "hb-ot-face-table-list.hh" #undef HB_OT_ACCELERATOR #undef HB_OT_TABLE @@ -100,9 +56,7 @@ struct hb_ot_face_t { ORDER_ZERO, #define HB_OT_TABLE(Namespace, Type) HB_OT_TABLE_ORDER (Namespace, Type), -#define HB_OT_ACCELERATOR(Namespace, Type) HB_OT_TABLE (Namespace, Type) - HB_OT_TABLES -#undef HB_OT_ACCELERATOR +#include "hb-ot-face-table-list.hh" #undef HB_OT_TABLE }; @@ -111,7 +65,7 @@ struct hb_ot_face_t hb_table_lazy_loader_t Type; #define HB_OT_ACCELERATOR(Namespace, Type) \ hb_face_lazy_loader_t Type; - HB_OT_TABLES +#include "hb-ot-face-table-list.hh" #undef HB_OT_ACCELERATOR #undef HB_OT_TABLE }; diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-font.cc b/src/java.desktop/share/native/libharfbuzz/hb-ot-font.cc index b290c4977626..f28de2af628f 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-font.cc +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-font.cc @@ -26,6 +26,8 @@ #include "hb.hh" +#ifndef HB_NO_OT_FONT + #include "hb-ot.h" #include "hb-font.hh" @@ -37,7 +39,6 @@ #include "hb-ot-cff1-table.hh" #include "hb-ot-cff2-table.hh" #include "hb-ot-hmtx-table.hh" -#include "hb-ot-kern-table.hh" #include "hb-ot-os2-table.hh" #include "hb-ot-post-table.hh" #include "hb-ot-stat-table.hh" // Just so we compile it; unused otherwise. @@ -52,7 +53,7 @@ * @short_description: OpenType font implementation * @include: hb-ot.h * - * Functions for using OpenType fonts with hb_shape(). Not that fonts returned + * Functions for using OpenType fonts with hb_shape(). Note that fonts returned * by hb_font_create() default to using these functions, so most clients would * never need to call these functions directly. **/ @@ -149,19 +150,21 @@ hb_ot_get_glyph_v_origin (hb_font_t *font, *x = font->get_glyph_h_advance (glyph) / 2; +#ifndef HB_NO_OT_FONT_CFF const OT::VORG &VORG = *ot_face->VORG; if (VORG.has_data ()) { *y = font->em_scale_y (VORG.get_y_origin (glyph)); return true; } +#endif hb_glyph_extents_t extents = {0}; - if (ot_face->glyf->get_extents (glyph, &extents)) + if (ot_face->glyf->get_extents (font, glyph, &extents)) { const OT::vmtx_accelerator_t &vmtx = *ot_face->vmtx; - hb_position_t tsb = vmtx.get_side_bearing (glyph); - *y = font->em_scale_y (extents.y_bearing + tsb); + hb_position_t tsb = vmtx.get_side_bearing (font, glyph); + *y = extents.y_bearing + font->em_scale_y (tsb); return true; } @@ -180,23 +183,24 @@ hb_ot_get_glyph_extents (hb_font_t *font, void *user_data HB_UNUSED) { const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data; - bool ret = ot_face->sbix->get_extents (font, glyph, extents); - if (!ret) - ret = ot_face->glyf->get_extents (glyph, extents); - if (!ret) - ret = ot_face->cff1->get_extents (glyph, extents); - if (!ret) - ret = ot_face->cff2->get_extents (font, glyph, extents); - if (!ret) - ret = ot_face->CBDT->get_extents (font, glyph, extents); + +#if !defined(HB_NO_OT_FONT_BITMAP) && !defined(HB_NO_COLOR) + if (ot_face->sbix->get_extents (font, glyph, extents)) return true; +#endif + if (ot_face->glyf->get_extents (font, glyph, extents)) return true; +#ifndef HB_NO_OT_FONT_CFF + if (ot_face->cff1->get_extents (font, glyph, extents)) return true; + if (ot_face->cff2->get_extents (font, glyph, extents)) return true; +#endif +#if !defined(HB_NO_OT_FONT_BITMAP) && !defined(HB_NO_COLOR) + if (ot_face->CBDT->get_extents (font, glyph, extents)) return true; +#endif + // TODO Hook up side-bearings variations. - extents->x_bearing = font->em_scale_x (extents->x_bearing); - extents->y_bearing = font->em_scale_y (extents->y_bearing); - extents->width = font->em_scale_x (extents->width); - extents->height = font->em_scale_y (extents->height); - return ret; + return false; } +#ifndef HB_NO_OT_FONT_GLYPH_NAMES static hb_bool_t hb_ot_get_glyph_name (hb_font_t *font HB_UNUSED, void *font_data, @@ -205,9 +209,12 @@ hb_ot_get_glyph_name (hb_font_t *font HB_UNUSED, void *user_data HB_UNUSED) { const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data; - return ot_face->post->get_glyph_name (glyph, name, size); + if (ot_face->post->get_glyph_name (glyph, name, size)) return true; +#ifndef HB_NO_OT_FONT_CFF + if (ot_face->cff1->get_glyph_name (glyph, name, size)) return true; +#endif + return false; } - static hb_bool_t hb_ot_get_glyph_from_name (hb_font_t *font HB_UNUSED, void *font_data, @@ -216,37 +223,34 @@ hb_ot_get_glyph_from_name (hb_font_t *font HB_UNUSED, void *user_data HB_UNUSED) { const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data; - return ot_face->post->get_glyph_from_name (name, len, glyph); + if (ot_face->post->get_glyph_from_name (name, len, glyph)) return true; +#ifndef HB_NO_OT_FONT_CFF + if (ot_face->cff1->get_glyph_from_name (name, len, glyph)) return true; +#endif + return false; } +#endif static hb_bool_t hb_ot_get_font_h_extents (hb_font_t *font, - void *font_data, + void *font_data HB_UNUSED, hb_font_extents_t *metrics, void *user_data HB_UNUSED) { - const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data; - const OT::hmtx_accelerator_t &hmtx = *ot_face->hmtx; - metrics->ascender = font->em_scale_y (hmtx.ascender); - metrics->descender = font->em_scale_y (hmtx.descender); - metrics->line_gap = font->em_scale_y (hmtx.line_gap); - // TODO Hook up variations. - return hmtx.has_font_extents; + return _hb_ot_metrics_get_position_common (font, HB_OT_METRICS_TAG_HORIZONTAL_ASCENDER, &metrics->ascender) && + _hb_ot_metrics_get_position_common (font, HB_OT_METRICS_TAG_HORIZONTAL_DESCENDER, &metrics->descender) && + _hb_ot_metrics_get_position_common (font, HB_OT_METRICS_TAG_HORIZONTAL_LINE_GAP, &metrics->line_gap); } static hb_bool_t hb_ot_get_font_v_extents (hb_font_t *font, - void *font_data, + void *font_data HB_UNUSED, hb_font_extents_t *metrics, void *user_data HB_UNUSED) { - const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data; - const OT::vmtx_accelerator_t &vmtx = *ot_face->vmtx; - metrics->ascender = font->em_scale_x (vmtx.ascender); - metrics->descender = font->em_scale_x (vmtx.descender); - metrics->line_gap = font->em_scale_x (vmtx.line_gap); - // TODO Hook up variations. - return vmtx.has_font_extents; + return _hb_ot_metrics_get_position_common (font, HB_OT_METRICS_TAG_VERTICAL_ASCENDER, &metrics->ascender) && + _hb_ot_metrics_get_position_common (font, HB_OT_METRICS_TAG_VERTICAL_DESCENDER, &metrics->descender) && + _hb_ot_metrics_get_position_common (font, HB_OT_METRICS_TAG_VERTICAL_LINE_GAP, &metrics->line_gap); } #if HB_USE_ATEXIT @@ -270,8 +274,10 @@ static struct hb_ot_font_funcs_lazy_loader_t : hb_font_funcs_lazy_loader_tface->table, nullptr); } + +#ifndef HB_NO_VAR +int +_glyf_get_side_bearing_var (hb_font_t *font, hb_codepoint_t glyph, bool is_vertical) +{ + return font->face->table.glyf->get_side_bearing_var (font, glyph, is_vertical); +} + +unsigned +_glyf_get_advance_var (hb_font_t *font, hb_codepoint_t glyph, bool is_vertical) +{ + return font->face->table.glyf->get_advance_var (font, glyph, is_vertical); +} +#endif + + +#endif diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-glyf-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-glyf-table.hh index 252d0b4eb1a4..cd95828e2f5a 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-glyf-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-glyf-table.hh @@ -1,5 +1,7 @@ /* * Copyright © 2015 Google, Inc. + * Copyright © 2019 Adobe Inc. + * Copyright © 2019 Ebrahim Byagowi * * This is part of HarfBuzz, a text shaping library. * @@ -21,7 +23,8 @@ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. * - * Google Author(s): Behdad Esfahbod + * Google Author(s): Behdad Esfahbod, Garret Rieger, Roderick Sheeter + * Adobe Author(s): Michiharu Ariza */ #ifndef HB_OT_GLYF_TABLE_HH @@ -29,7 +32,9 @@ #include "hb-open-type.hh" #include "hb-ot-head-table.hh" -#include "hb-subset-glyf.hh" +#include "hb-ot-hmtx-table.hh" +#include "hb-ot-var-gvar-table.hh" +#include "hb-draw.hh" namespace OT { @@ -54,11 +59,12 @@ struct loca } protected: - UnsizedArrayOf dataZ; /* Location data. */ + UnsizedArrayOf + dataZ; /* Location data. */ public: - DEFINE_SIZE_MIN (0); /* In reality, this is UNBOUNDED() type; but since we always - * check the size externally, allow Null() object of it by - * defining it MIN() instead. */ + DEFINE_SIZE_MIN (0); /* In reality, this is UNBOUNDED() type; but since we always + * check the size externally, allow Null() object of it by + * defining it _MIN instead. */ }; @@ -76,29 +82,143 @@ struct glyf bool sanitize (hb_sanitize_context_t *c HB_UNUSED) const { TRACE_SANITIZE (this); - /* We don't check for anything specific here. The users of the - * struct do all the hard work... */ + /* Runtime checks as eager sanitizing each glyph is costy */ return_trace (true); } - bool subset (hb_subset_plan_t *plan) const + template + static bool + _add_loca_and_head (hb_subset_plan_t * plan, Iterator padded_offsets) + { + unsigned max_offset = + + padded_offsets + | hb_reduce (hb_add, 0) + ; + unsigned num_offsets = padded_offsets.len () + 1; + bool use_short_loca = max_offset < 0x1FFFF; + unsigned entry_size = use_short_loca ? 2 : 4; + char *loca_prime_data = (char *) calloc (entry_size, num_offsets); + + if (unlikely (!loca_prime_data)) return false; + + DEBUG_MSG (SUBSET, nullptr, "loca entry_size %d num_offsets %d " + "max_offset %d size %d", + entry_size, num_offsets, max_offset, entry_size * num_offsets); + + if (use_short_loca) + _write_loca (padded_offsets, 1, hb_array ((HBUINT16 *) loca_prime_data, num_offsets)); + else + _write_loca (padded_offsets, 0, hb_array ((HBUINT32 *) loca_prime_data, num_offsets)); + + hb_blob_t *loca_blob = hb_blob_create (loca_prime_data, + entry_size * num_offsets, + HB_MEMORY_MODE_WRITABLE, + loca_prime_data, + free); + + bool result = plan->add_table (HB_OT_TAG_loca, loca_blob) + && _add_head_and_set_loca_version (plan, use_short_loca); + + hb_blob_destroy (loca_blob); + return result; + } + + template + static void + _write_loca (IteratorIn it, unsigned right_shift, IteratorOut dest) { - hb_blob_t *glyf_prime = nullptr; - hb_blob_t *loca_prime = nullptr; - - bool success = true; - bool use_short_loca = false; - if (hb_subset_glyf_and_loca (plan, &use_short_loca, &glyf_prime, &loca_prime)) { - success = success && plan->add_table (HB_OT_TAG_glyf, glyf_prime); - success = success && plan->add_table (HB_OT_TAG_loca, loca_prime); - success = success && _add_head_and_set_loca_version (plan, use_short_loca); - } else { - success = false; + unsigned int offset = 0; + dest << 0; + + it + | hb_map ([=, &offset] (unsigned int padded_size) + { + offset += padded_size; + DEBUG_MSG (SUBSET, nullptr, "loca entry offset %d", offset); + return offset >> right_shift; + }) + | hb_sink (dest) + ; + } + + /* requires source of SubsetGlyph complains the identifier isn't declared */ + template + bool serialize (hb_serialize_context_t *c, + Iterator it, + const hb_subset_plan_t *plan) + { + TRACE_SERIALIZE (this); + unsigned init_len = c->length (); + for (const auto &_ : it) _.serialize (c, plan); + + /* As a special case when all glyph in the font are empty, add a zero byte + * to the table, so that OTS doesn’t reject it, and to make the table work + * on Windows as well. + * See https://github.com/khaledhosny/ots/issues/52 */ + if (init_len == c->length ()) + { + HBUINT8 empty_byte; + empty_byte = 0; + c->copy (empty_byte); } - hb_blob_destroy (loca_prime); - hb_blob_destroy (glyf_prime); + return_trace (true); + } - return success; + /* Byte region(s) per glyph to output + unpadded, hints removed if so requested + If we fail to process a glyph we produce an empty (0-length) glyph */ + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + + glyf *glyf_prime = c->serializer->start_embed (); + if (unlikely (!c->serializer->check_success (glyf_prime))) return_trace (false); + + hb_vector_t glyphs; + _populate_subset_glyphs (c->plan, &glyphs); + + glyf_prime->serialize (c->serializer, hb_iter (glyphs), c->plan); + + auto padded_offsets = + + hb_iter (glyphs) + | hb_map (&SubsetGlyph::padded_size) + ; + + if (c->serializer->in_error ()) return_trace (false); + return_trace (c->serializer->check_success (_add_loca_and_head (c->plan, + padded_offsets))); + } + + template + void + _populate_subset_glyphs (const hb_subset_plan_t *plan, + hb_vector_t *glyphs /* OUT */) const + { + OT::glyf::accelerator_t glyf; + glyf.init (plan->source); + + + hb_range (plan->num_output_glyphs ()) + | hb_map ([&] (hb_codepoint_t new_gid) + { + SubsetGlyph subset_glyph = {0}; + subset_glyph.new_gid = new_gid; + + /* should never fail: all old gids should be mapped */ + if (!plan->old_gid_for_new_gid (new_gid, &subset_glyph.old_gid)) + return subset_glyph; + + subset_glyph.source_glyph = glyf.glyph_for_gid (subset_glyph.old_gid, true); + if (plan->drop_hints) subset_glyph.drop_hints_bytes (); + else subset_glyph.dest_start = subset_glyph.source_glyph.get_bytes (); + + return subset_glyph; + }) + | hb_sink (glyphs) + ; + + glyf.fini (); } static bool @@ -112,218 +232,292 @@ struct glyf return false; head *head_prime = (head *) hb_blob_get_data_writable (head_prime_blob, nullptr); - head_prime->indexToLocFormat.set (use_short_loca ? 0 : 1); + head_prime->indexToLocFormat = use_short_loca ? 0 : 1; bool success = plan->add_table (HB_OT_TAG_head, head_prime_blob); hb_blob_destroy (head_prime_blob); return success; } - struct GlyphHeader - { - HBINT16 numberOfContours; /* If the number of contours is - * greater than or equal to zero, - * this is a simple glyph; if negative, - * this is a composite glyph. */ - FWORD xMin; /* Minimum x for coordinate data. */ - FWORD yMin; /* Minimum y for coordinate data. */ - FWORD xMax; /* Maximum x for coordinate data. */ - FWORD yMax; /* Maximum y for coordinate data. */ - - DEFINE_SIZE_STATIC (10); - }; - - struct CompositeGlyphHeader + struct CompositeGlyphChain { - enum composite_glyph_flag_t { - ARG_1_AND_2_ARE_WORDS = 0x0001, - ARGS_ARE_XY_VALUES = 0x0002, - ROUND_XY_TO_GRID = 0x0004, - WE_HAVE_A_SCALE = 0x0008, - MORE_COMPONENTS = 0x0020, - WE_HAVE_AN_X_AND_Y_SCALE = 0x0040, - WE_HAVE_A_TWO_BY_TWO = 0x0080, - WE_HAVE_INSTRUCTIONS = 0x0100, - USE_MY_METRICS = 0x0200, - OVERLAP_COMPOUND = 0x0400, - SCALED_COMPONENT_OFFSET = 0x0800, - UNSCALED_COMPONENT_OFFSET = 0x1000 + protected: + enum composite_glyph_flag_t + { + ARG_1_AND_2_ARE_WORDS = 0x0001, + ARGS_ARE_XY_VALUES = 0x0002, + ROUND_XY_TO_GRID = 0x0004, + WE_HAVE_A_SCALE = 0x0008, + MORE_COMPONENTS = 0x0020, + WE_HAVE_AN_X_AND_Y_SCALE = 0x0040, + WE_HAVE_A_TWO_BY_TWO = 0x0080, + WE_HAVE_INSTRUCTIONS = 0x0100, + USE_MY_METRICS = 0x0200, + OVERLAP_COMPOUND = 0x0400, + SCALED_COMPONENT_OFFSET = 0x0800, + UNSCALED_COMPONENT_OFFSET = 0x1000 }; - HBUINT16 flags; - GlyphID glyphIndex; - + public: unsigned int get_size () const { unsigned int size = min_size; - // arg1 and 2 are int16 + /* arg1 and 2 are int16 */ if (flags & ARG_1_AND_2_ARE_WORDS) size += 4; - // arg1 and 2 are int8 + /* arg1 and 2 are int8 */ else size += 2; - // One x 16 bit (scale) + /* One x 16 bit (scale) */ if (flags & WE_HAVE_A_SCALE) size += 2; - // Two x 16 bit (xscale, yscale) + /* Two x 16 bit (xscale, yscale) */ else if (flags & WE_HAVE_AN_X_AND_Y_SCALE) size += 4; - // Four x 16 bit (xscale, scale01, scale10, yscale) + /* Four x 16 bit (xscale, scale01, scale10, yscale) */ else if (flags & WE_HAVE_A_TWO_BY_TWO) size += 8; return size; } - struct Iterator + void set_glyph_index (hb_codepoint_t new_gid) { glyphIndex = new_gid; } + hb_codepoint_t get_glyph_index () const { return glyphIndex; } + + void drop_instructions_flag () { flags = (uint16_t) flags & ~WE_HAVE_INSTRUCTIONS; } + bool has_instructions () const { return flags & WE_HAVE_INSTRUCTIONS; } + + bool has_more () const { return flags & MORE_COMPONENTS; } + bool is_use_my_metrics () const { return flags & USE_MY_METRICS; } + bool is_anchored () const { return !(flags & ARGS_ARE_XY_VALUES); } + void get_anchor_points (unsigned int &point1, unsigned int &point2) const { - const char *glyph_start; - const char *glyph_end; - const CompositeGlyphHeader *current; + const HBUINT8 *p = &StructAfter (glyphIndex); + if (flags & ARG_1_AND_2_ARE_WORDS) + { + point1 = ((const HBUINT16 *) p)[0]; + point2 = ((const HBUINT16 *) p)[1]; + } + else + { + point1 = p[0]; + point2 = p[1]; + } + } - bool move_to_next () + void transform_points (contour_point_vector_t &points) const + { + float matrix[4]; + contour_point_t trans; + if (get_transformation (matrix, trans)) { - if (current->flags & CompositeGlyphHeader::MORE_COMPONENTS) + if (scaled_offsets ()) { - const CompositeGlyphHeader *possible = - &StructAfter (*current); - if (!in_range (possible)) - return false; - current = possible; - return true; + points.translate (trans); + points.transform (matrix); + } + else + { + points.transform (matrix); + points.translate (trans); } - return false; } + } - bool in_range (const CompositeGlyphHeader *composite) const - { - return (const char *) composite >= glyph_start - && ((const char *) composite + CompositeGlyphHeader::min_size) <= glyph_end - && ((const char *) composite + composite->get_size ()) <= glyph_end; - } - }; + protected: + bool scaled_offsets () const + { return (flags & (SCALED_COMPONENT_OFFSET | UNSCALED_COMPONENT_OFFSET)) == SCALED_COMPONENT_OFFSET; } - static bool get_iterator (const char * glyph_data, - unsigned int length, - CompositeGlyphHeader::Iterator *iterator /* OUT */) + bool get_transformation (float (&matrix)[4], contour_point_t &trans) const { - if (length < GlyphHeader::static_size) - return false; /* Empty glyph; zero extents. */ + matrix[0] = matrix[3] = 1.f; + matrix[1] = matrix[2] = 0.f; - const GlyphHeader &glyph_header = StructAtOffset (glyph_data, 0); - if (glyph_header.numberOfContours < 0) + int tx, ty; + const HBINT8 *p = &StructAfter (glyphIndex); + if (flags & ARG_1_AND_2_ARE_WORDS) { - const CompositeGlyphHeader *possible = - &StructAfter (glyph_header); - - iterator->glyph_start = glyph_data; - iterator->glyph_end = (const char *) glyph_data + length; - if (!iterator->in_range (possible)) - return false; - iterator->current = possible; - return true; + tx = *(const HBINT16 *) p; + p += HBINT16::static_size; + ty = *(const HBINT16 *) p; + p += HBINT16::static_size; } + else + { + tx = *p++; + ty = *p++; + } + if (is_anchored ()) tx = ty = 0; - return false; + trans.init ((float) tx, (float) ty); + + { + const F2DOT14 *points = (const F2DOT14 *) p; + if (flags & WE_HAVE_A_SCALE) + { + matrix[0] = matrix[3] = points[0].to_float (); + return true; + } + else if (flags & WE_HAVE_AN_X_AND_Y_SCALE) + { + matrix[0] = points[0].to_float (); + matrix[3] = points[1].to_float (); + return true; + } + else if (flags & WE_HAVE_A_TWO_BY_TWO) + { + matrix[0] = points[0].to_float (); + matrix[1] = points[1].to_float (); + matrix[2] = points[2].to_float (); + matrix[3] = points[3].to_float (); + return true; + } + } + return tx || ty; } + protected: + HBUINT16 flags; + HBGlyphID glyphIndex; + public: DEFINE_SIZE_MIN (4); }; - struct accelerator_t + struct composite_iter_t : hb_iter_with_fallback_t { - void init (hb_face_t *face) + typedef const CompositeGlyphChain *__item_t__; + composite_iter_t (hb_bytes_t glyph_, __item_t__ current_) : + glyph (glyph_), current (current_) + { if (!check_range (current)) current = nullptr; } + composite_iter_t () : glyph (hb_bytes_t ()), current (nullptr) {} + + const CompositeGlyphChain &__item__ () const { return *current; } + bool __more__ () const { return current; } + void __next__ () { - memset (this, 0, sizeof (accelerator_t)); + if (!current->has_more ()) { current = nullptr; return; } - const OT::head &head = *face->table.head; - if (head.indexToLocFormat > 1 || head.glyphDataFormat != 0) - /* Unknown format. Leave num_glyphs=0, that takes care of disabling us. */ - return; - short_offset = 0 == head.indexToLocFormat; - - loca_table = hb_sanitize_context_t ().reference_table (face); - glyf_table = hb_sanitize_context_t ().reference_table (face); - - num_glyphs = MAX (1u, loca_table.get_length () / (short_offset ? 2 : 4)) - 1; + const CompositeGlyphChain *possible = &StructAfter (*current); + if (!check_range (possible)) { current = nullptr; return; } + current = possible; } + bool operator != (const composite_iter_t& o) const + { return glyph != o.glyph || current != o.current; } - void fini () + bool check_range (const CompositeGlyphChain *composite) const { - loca_table.destroy (); - glyf_table.destroy (); + return glyph.check_range (composite, CompositeGlyphChain::min_size) + && glyph.check_range (composite, composite->get_size ()); } - /* - * Returns true if the referenced glyph is a valid glyph and a composite glyph. - * If true is returned a pointer to the composite glyph will be written into - * composite. - */ - bool get_composite (hb_codepoint_t glyph, - CompositeGlyphHeader::Iterator *composite /* OUT */) const - { - if (unlikely (!num_glyphs)) - return false; + private: + hb_bytes_t glyph; + __item_t__ current; + }; - unsigned int start_offset, end_offset; - if (!get_offsets (glyph, &start_offset, &end_offset)) - return false; /* glyph not found */ + enum phantom_point_index_t + { + PHANTOM_LEFT = 0, + PHANTOM_RIGHT = 1, + PHANTOM_TOP = 2, + PHANTOM_BOTTOM = 3, + PHANTOM_COUNT = 4 + }; - return CompositeGlyphHeader::get_iterator ((const char *) this->glyf_table + start_offset, - end_offset - start_offset, - composite); - } + struct accelerator_t; - enum simple_glyph_flag_t { - FLAG_ON_CURVE = 0x01, - FLAG_X_SHORT = 0x02, - FLAG_Y_SHORT = 0x04, - FLAG_REPEAT = 0x08, - FLAG_X_SAME = 0x10, - FLAG_Y_SAME = 0x20, + struct Glyph + { + enum simple_glyph_flag_t + { + FLAG_ON_CURVE = 0x01, + FLAG_X_SHORT = 0x02, + FLAG_Y_SHORT = 0x04, + FLAG_REPEAT = 0x08, + FLAG_X_SAME = 0x10, + FLAG_Y_SAME = 0x20, FLAG_RESERVED1 = 0x40, FLAG_RESERVED2 = 0x80 }; - /* based on FontTools _g_l_y_f.py::trim */ - bool remove_padding (unsigned int start_offset, - unsigned int *end_offset) const + private: + struct GlyphHeader { - if (*end_offset - start_offset < GlyphHeader::static_size) return true; + bool has_data () const { return numberOfContours; } - const char *glyph = ((const char *) glyf_table) + start_offset; - const char * const glyph_end = glyph + (*end_offset - start_offset); - const GlyphHeader &glyph_header = StructAtOffset (glyph, 0); - int16_t num_contours = (int16_t) glyph_header.numberOfContours; + bool get_extents (hb_font_t *font, const accelerator_t &glyf_accelerator, + hb_codepoint_t gid, hb_glyph_extents_t *extents) const + { + /* Undocumented rasterizer behavior: shift glyph to the left by (lsb - xMin), i.e., xMin = lsb */ + /* extents->x_bearing = hb_min (glyph_header.xMin, glyph_header.xMax); */ + extents->x_bearing = font->em_scale_x (glyf_accelerator.hmtx->get_side_bearing (gid)); + extents->y_bearing = font->em_scale_y (hb_max (yMin, yMax)); + extents->width = font->em_scale_x (hb_max (xMin, xMax) - hb_min (xMin, xMax)); + extents->height = font->em_scale_y (hb_min (yMin, yMax) - hb_max (yMin, yMax)); - if (num_contours < 0) - /* Trimming for composites not implemented. - * If removing hints it falls out of that. */ return true; - else if (num_contours > 0) + } + + HBINT16 numberOfContours; + /* If the number of contours is + * greater than or equal to zero, + * this is a simple glyph; if negative, + * this is a composite glyph. */ + FWORD xMin; /* Minimum x for coordinate data. */ + FWORD yMin; /* Minimum y for coordinate data. */ + FWORD xMax; /* Maximum x for coordinate data. */ + FWORD yMax; /* Maximum y for coordinate data. */ + public: + DEFINE_SIZE_STATIC (10); + }; + + struct SimpleGlyph + { + const GlyphHeader &header; + hb_bytes_t bytes; + SimpleGlyph (const GlyphHeader &header_, hb_bytes_t bytes_) : + header (header_), bytes (bytes_) {} + + unsigned int instruction_len_offset () const + { return GlyphHeader::static_size + 2 * header.numberOfContours; } + + unsigned int length (unsigned int instruction_len) const + { return instruction_len_offset () + 2 + instruction_len; } + + unsigned int instructions_length () const { + unsigned int instruction_length_offset = instruction_len_offset (); + if (unlikely (instruction_length_offset + 2 > bytes.length)) return 0; + + const HBUINT16 &instructionLength = StructAtOffset (&bytes, instruction_length_offset); + /* Out of bounds of the current glyph */ + if (unlikely (length (instructionLength) > bytes.length)) return 0; + return instructionLength; + } + + const Glyph trim_padding () const + { + /* based on FontTools _g_l_y_f.py::trim */ + const char *glyph = bytes.arrayZ; + const char *glyph_end = glyph + bytes.length; /* simple glyph w/contours, possibly trimmable */ - glyph += GlyphHeader::static_size + 2 * num_contours; + glyph += instruction_len_offset (); - if (unlikely (glyph + 2 >= glyph_end)) return false; - uint16_t nCoordinates = (uint16_t) StructAtOffset (glyph - 2, 0) + 1; - uint16_t nInstructions = (uint16_t) StructAtOffset (glyph, 0); + if (unlikely (glyph + 2 >= glyph_end)) return Glyph (); + unsigned int num_coordinates = StructAtOffset (glyph - 2, 0) + 1; + unsigned int num_instructions = StructAtOffset (glyph, 0); - glyph += 2 + nInstructions; - if (unlikely (glyph + 2 >= glyph_end)) return false; + glyph += 2 + num_instructions; - unsigned int coordBytes = 0; - unsigned int coordsWithFlags = 0; + unsigned int coord_bytes = 0; + unsigned int coords_with_flags = 0; while (glyph < glyph_end) { - uint8_t flag = (uint8_t) *glyph; + uint8_t flag = *glyph; glyph++; unsigned int repeat = 1; if (flag & FLAG_REPEAT) { - if (glyph >= glyph_end) - { - DEBUG_MSG(SUBSET, nullptr, "Bad flag"); - return false; - } - repeat = ((uint8_t) *glyph) + 1; + if (unlikely (glyph >= glyph_end)) return Glyph (); + repeat = *glyph + 1; glyph++; } @@ -335,143 +529,728 @@ struct glyf if (flag & FLAG_Y_SHORT) yBytes = 1; else if ((flag & FLAG_Y_SAME) == 0) yBytes = 2; - coordBytes += (xBytes + yBytes) * repeat; - coordsWithFlags += repeat; - if (coordsWithFlags >= nCoordinates) - break; + coord_bytes += (xBytes + yBytes) * repeat; + coords_with_flags += repeat; + if (coords_with_flags >= num_coordinates) break; } - if (coordsWithFlags != nCoordinates) + if (unlikely (coords_with_flags != num_coordinates)) return Glyph (); + return Glyph (bytes.sub_array (0, bytes.length + coord_bytes - (glyph_end - glyph))); + } + + /* zero instruction length */ + void drop_hints () + { + GlyphHeader &glyph_header = const_cast (header); + (HBUINT16 &) StructAtOffset (&glyph_header, instruction_len_offset ()) = 0; + } + + void drop_hints_bytes (hb_bytes_t &dest_start, hb_bytes_t &dest_end) const + { + unsigned int instructions_len = instructions_length (); + unsigned int glyph_length = length (instructions_len); + dest_start = bytes.sub_array (0, glyph_length - instructions_len); + dest_end = bytes.sub_array (glyph_length, bytes.length - glyph_length); + } + + static bool read_points (const HBUINT8 *&p /* IN/OUT */, + contour_point_vector_t &points_ /* IN/OUT */, + const hb_bytes_t &bytes, + void (* setter) (contour_point_t &_, float v), + const simple_glyph_flag_t short_flag, + const simple_glyph_flag_t same_flag) + { + float v = 0; + for (unsigned i = 0; i < points_.length; i++) { - DEBUG_MSG(SUBSET, nullptr, "Expect %d coords to have flags, got flags for %d", nCoordinates, coordsWithFlags); - return false; + uint8_t flag = points_[i].flag; + if (flag & short_flag) + { + if (unlikely (!bytes.check_range (p))) return false; + if (flag & same_flag) + v += *p++; + else + v -= *p++; + } + else + { + if (!(flag & same_flag)) + { + if (unlikely (!bytes.check_range ((const HBUINT16 *) p))) return false; + v += *(const HBINT16 *) p; + p += HBINT16::static_size; + } + } + setter (points_[i], v); } - glyph += coordBytes; + return true; + } + + bool get_contour_points (contour_point_vector_t &points_ /* OUT */, + bool phantom_only = false) const + { + const HBUINT16 *endPtsOfContours = &StructAfter (header); + int num_contours = header.numberOfContours; + if (unlikely (!bytes.check_range (&endPtsOfContours[num_contours + 1]))) return false; + unsigned int num_points = endPtsOfContours[num_contours - 1] + 1; + + points_.resize (num_points); + for (unsigned int i = 0; i < points_.length; i++) points_[i].init (); + if (phantom_only) return true; - if (glyph < glyph_end) - *end_offset -= glyph_end - glyph; + for (int i = 0; i < num_contours; i++) + points_[endPtsOfContours[i]].is_end_point = true; + + /* Skip instructions */ + const HBUINT8 *p = &StructAtOffset (&endPtsOfContours[num_contours + 1], + endPtsOfContours[num_contours]); + + /* Read flags */ + for (unsigned int i = 0; i < num_points; i++) + { + if (unlikely (!bytes.check_range (p))) return false; + uint8_t flag = *p++; + points_[i].flag = flag; + if (flag & FLAG_REPEAT) + { + if (unlikely (!bytes.check_range (p))) return false; + unsigned int repeat_count = *p++; + while ((repeat_count-- > 0) && (++i < num_points)) + points_[i].flag = flag; + } + } + + /* Read x & y coordinates */ + return read_points (p, points_, bytes, [] (contour_point_t &p, float v) { p.x = v; }, + FLAG_X_SHORT, FLAG_X_SAME) + && read_points (p, points_, bytes, [] (contour_point_t &p, float v) { p.y = v; }, + FLAG_Y_SHORT, FLAG_Y_SAME); } - return true; - } + }; - bool get_offsets (hb_codepoint_t glyph, - unsigned int *start_offset /* OUT */, - unsigned int *end_offset /* OUT */) const + struct CompositeGlyph { - if (unlikely (glyph >= num_glyphs)) - return false; + const GlyphHeader &header; + hb_bytes_t bytes; + CompositeGlyph (const GlyphHeader &header_, hb_bytes_t bytes_) : + header (header_), bytes (bytes_) {} - if (short_offset) + composite_iter_t get_iterator () const + { return composite_iter_t (bytes, &StructAfter (header)); } + + unsigned int instructions_length (hb_bytes_t bytes) const { - const HBUINT16 *offsets = (const HBUINT16 *) loca_table->dataZ.arrayZ; - *start_offset = 2 * offsets[glyph]; - *end_offset = 2 * offsets[glyph + 1]; + unsigned int start = bytes.length; + unsigned int end = bytes.length; + const CompositeGlyphChain *last = nullptr; + for (auto &item : get_iterator ()) + last = &item; + if (unlikely (!last)) return 0; + + if (last->has_instructions ()) + start = (char *) last - &bytes + last->get_size (); + if (unlikely (start > end)) return 0; + return end - start; } - else + + /* Trimming for composites not implemented. + * If removing hints it falls out of that. */ + const Glyph trim_padding () const { return Glyph (bytes); } + + void drop_hints () { - const HBUINT32 *offsets = (const HBUINT32 *) loca_table->dataZ.arrayZ; + for (const auto &_ : get_iterator ()) + const_cast (_).drop_instructions_flag (); + } + + /* Chop instructions off the end */ + void drop_hints_bytes (hb_bytes_t &dest_start) const + { dest_start = bytes.sub_array (0, bytes.length - instructions_length (bytes)); } + }; + + enum glyph_type_t { EMPTY, SIMPLE, COMPOSITE }; + + public: + composite_iter_t get_composite_iterator () const + { + if (type != COMPOSITE) return composite_iter_t (); + return CompositeGlyph (*header, bytes).get_iterator (); + } - *start_offset = offsets[glyph]; - *end_offset = offsets[glyph + 1]; + const Glyph trim_padding () const + { + switch (type) { + case COMPOSITE: return CompositeGlyph (*header, bytes).trim_padding (); + case SIMPLE: return SimpleGlyph (*header, bytes).trim_padding (); + default: return bytes; } + } - if (*start_offset > *end_offset || *end_offset > glyf_table.get_length ()) - return false; + void drop_hints () + { + switch (type) { + case COMPOSITE: CompositeGlyph (*header, bytes).drop_hints (); return; + case SIMPLE: SimpleGlyph (*header, bytes).drop_hints (); return; + default: return; + } + } - return true; + void drop_hints_bytes (hb_bytes_t &dest_start, hb_bytes_t &dest_end) const + { + switch (type) { + case COMPOSITE: CompositeGlyph (*header, bytes).drop_hints_bytes (dest_start); return; + case SIMPLE: SimpleGlyph (*header, bytes).drop_hints_bytes (dest_start, dest_end); return; + default: return; + } } - bool get_instruction_offsets (unsigned int start_offset, - unsigned int end_offset, - unsigned int *instruction_start /* OUT */, - unsigned int *instruction_end /* OUT */) const + /* Note: Recursively calls itself. + * all_points includes phantom points + */ + bool get_points (hb_font_t *font, const accelerator_t &glyf_accelerator, + contour_point_vector_t &all_points /* OUT */, + bool phantom_only = false, + unsigned int depth = 0) const { - if (end_offset - start_offset < GlyphHeader::static_size) + if (unlikely (depth > HB_MAX_NESTING_LEVEL)) return false; + contour_point_vector_t points; + + switch (type) { + case COMPOSITE: { - *instruction_start = 0; - *instruction_end = 0; - return true; /* Empty glyph; no instructions. */ + /* pseudo component points for each component in composite glyph */ + unsigned num_points = hb_len (CompositeGlyph (*header, bytes).get_iterator ()); + if (unlikely (!points.resize (num_points))) return false; + for (unsigned i = 0; i < points.length; i++) + points[i].init (); + break; } - const GlyphHeader &glyph_header = StructAtOffset (glyf_table, start_offset); - int16_t num_contours = (int16_t) glyph_header.numberOfContours; - if (num_contours < 0) + case SIMPLE: + if (unlikely (!SimpleGlyph (*header, bytes).get_contour_points (points, phantom_only))) + return false; + break; + } + + /* Init phantom points */ + if (unlikely (!points.resize (points.length + PHANTOM_COUNT))) return false; + hb_array_t phantoms = points.sub_array (points.length - PHANTOM_COUNT, PHANTOM_COUNT); { - CompositeGlyphHeader::Iterator composite_it; - if (unlikely (!CompositeGlyphHeader::get_iterator ( - (const char*) this->glyf_table + start_offset, - end_offset - start_offset, &composite_it))) return false; - const CompositeGlyphHeader *last; - do { - last = composite_it.current; - } while (composite_it.move_to_next ()); - - if ((uint16_t) last->flags & CompositeGlyphHeader::WE_HAVE_INSTRUCTIONS) - *instruction_start = ((char *) last - (char *) glyf_table->dataZ.arrayZ) + last->get_size (); - else - *instruction_start = end_offset; - *instruction_end = end_offset; - if (unlikely (*instruction_start > *instruction_end)) + for (unsigned i = 0; i < PHANTOM_COUNT; ++i) phantoms[i].init (); + int h_delta = (int) header->xMin - glyf_accelerator.hmtx->get_side_bearing (gid); + int v_orig = (int) header->yMax + glyf_accelerator.vmtx->get_side_bearing (gid); + unsigned h_adv = glyf_accelerator.hmtx->get_advance (gid); + unsigned v_adv = glyf_accelerator.vmtx->get_advance (gid); + phantoms[PHANTOM_LEFT].x = h_delta; + phantoms[PHANTOM_RIGHT].x = h_adv + h_delta; + phantoms[PHANTOM_TOP].y = v_orig; + phantoms[PHANTOM_BOTTOM].y = v_orig - (int) v_adv; + } + +#ifndef HB_NO_VAR + if (unlikely (!glyf_accelerator.gvar->apply_deltas_to_points (gid, font, points.as_array ()))) + return false; +#endif + + switch (type) { + case SIMPLE: + all_points.extend (points.as_array ()); + break; + case COMPOSITE: + { + unsigned int comp_index = 0; + for (auto &item : get_composite_iterator ()) { - DEBUG_MSG(SUBSET, nullptr, "Invalid instruction offset, %d is outside [%d, %d]", *instruction_start, start_offset, end_offset); - return false; + contour_point_vector_t comp_points; + if (unlikely (!glyf_accelerator.glyph_for_gid (item.get_glyph_index ()) + .get_points (font, glyf_accelerator, comp_points, + phantom_only, depth + 1) + || comp_points.length < PHANTOM_COUNT)) + return false; + + /* Copy phantom points from component if USE_MY_METRICS flag set */ + if (item.is_use_my_metrics ()) + for (unsigned int i = 0; i < PHANTOM_COUNT; i++) + phantoms[i] = comp_points[comp_points.length - PHANTOM_COUNT + i]; + + /* Apply component transformation & translation */ + item.transform_points (comp_points); + + /* Apply translation from gvar */ + comp_points.translate (points[comp_index]); + + if (item.is_anchored ()) + { + unsigned int p1, p2; + item.get_anchor_points (p1, p2); + if (likely (p1 < all_points.length && p2 < comp_points.length)) + { + contour_point_t delta; + delta.init (all_points[p1].x - comp_points[p2].x, + all_points[p1].y - comp_points[p2].y); + + comp_points.translate (delta); + } + } + + all_points.extend (comp_points.sub_array (0, comp_points.length - PHANTOM_COUNT)); + + comp_index++; } + + all_points.extend (phantoms); + } break; + default: + all_points.extend (phantoms); } - else + + if (depth == 0) /* Apply at top level */ { - unsigned int instruction_length_offset = start_offset + GlyphHeader::static_size + 2 * num_contours; - if (unlikely (instruction_length_offset + 2 > end_offset)) + /* Undocumented rasterizer behavior: + * Shift points horizontally by the updated left side bearing + */ + contour_point_t delta; + delta.init (-phantoms[PHANTOM_LEFT].x, 0.f); + if (delta.x) all_points.translate (delta); + } + + return true; + } + + bool get_extents (hb_font_t *font, const accelerator_t &glyf_accelerator, + hb_glyph_extents_t *extents) const + { + if (type == EMPTY) return true; /* Empty glyph; zero extents. */ + return header->get_extents (font, glyf_accelerator, gid, extents); + } + + hb_bytes_t get_bytes () const { return bytes; } + + Glyph (hb_bytes_t bytes_ = hb_bytes_t (), + hb_codepoint_t gid_ = (hb_codepoint_t) -1) : bytes (bytes_), gid (gid_), + header (bytes.as ()) + { + int num_contours = header->numberOfContours; + if (unlikely (num_contours == 0)) type = EMPTY; + else if (num_contours > 0) type = SIMPLE; + else type = COMPOSITE; /* negative numbers */ + } + + protected: + hb_bytes_t bytes; + hb_codepoint_t gid; + const GlyphHeader *header; + unsigned type; + }; + + struct accelerator_t + { + void init (hb_face_t *face_) + { + short_offset = false; + num_glyphs = 0; + loca_table = nullptr; + glyf_table = nullptr; +#ifndef HB_NO_VAR + gvar = nullptr; +#endif + hmtx = nullptr; + vmtx = nullptr; + face = face_; + const OT::head &head = *face->table.head; + if (head.indexToLocFormat > 1 || head.glyphDataFormat > 0) + /* Unknown format. Leave num_glyphs=0, that takes care of disabling us. */ + return; + short_offset = 0 == head.indexToLocFormat; + + loca_table = hb_sanitize_context_t ().reference_table (face); + glyf_table = hb_sanitize_context_t ().reference_table (face); +#ifndef HB_NO_VAR + gvar = face->table.gvar; +#endif + hmtx = face->table.hmtx; + vmtx = face->table.vmtx; + + num_glyphs = hb_max (1u, loca_table.get_length () / (short_offset ? 2 : 4)) - 1; + num_glyphs = hb_min (num_glyphs, face->get_num_glyphs ()); + } + + void fini () + { + loca_table.destroy (); + glyf_table.destroy (); + } + + protected: + template + bool get_points (hb_font_t *font, hb_codepoint_t gid, T consumer) const + { + if (gid >= num_glyphs) return false; + + /* Making this alloc free is not that easy + https://github.com/harfbuzz/harfbuzz/issues/2095 + mostly because of gvar handling in VF fonts, + perhaps a separate path for non-VF fonts can be considered */ + contour_point_vector_t all_points; + + bool phantom_only = !consumer.is_consuming_contour_points (); + if (unlikely (!glyph_for_gid (gid).get_points (font, *this, all_points, phantom_only))) + return false; + + if (consumer.is_consuming_contour_points ()) + { + for (unsigned point_index = 0; point_index + 4 < all_points.length; ++point_index) + consumer.consume_point (all_points[point_index]); + consumer.points_end (); + } + + /* Where to write phantoms, nullptr if not requested */ + contour_point_t *phantoms = consumer.get_phantoms_sink (); + if (phantoms) + for (unsigned i = 0; i < PHANTOM_COUNT; ++i) + phantoms[i] = all_points[all_points.length - PHANTOM_COUNT + i]; + + return true; + } + +#ifndef HB_NO_VAR + struct points_aggregator_t + { + hb_font_t *font; + hb_glyph_extents_t *extents; + contour_point_t *phantoms; + + struct contour_bounds_t + { + contour_bounds_t () { min_x = min_y = FLT_MAX; max_x = max_y = -FLT_MAX; } + + void add (const contour_point_t &p) { - DEBUG_MSG(SUBSET, nullptr, "Glyph size is too short, missing field instructionLength."); - return false; + min_x = hb_min (min_x, p.x); + min_y = hb_min (min_y, p.y); + max_x = hb_max (max_x, p.x); + max_y = hb_max (max_y, p.y); } - const HBUINT16 &instruction_length = StructAtOffset (glyf_table, instruction_length_offset); - unsigned int start = instruction_length_offset + 2; - unsigned int end = start + (uint16_t) instruction_length; - if (unlikely (end > end_offset)) // Out of bounds of the current glyph + bool empty () const { return (min_x >= max_x) || (min_y >= max_y); } + + void get_extents (hb_font_t *font, hb_glyph_extents_t *extents) { - DEBUG_MSG(SUBSET, nullptr, "The instructions array overruns the glyph's boundaries."); - return false; + if (unlikely (empty ())) + { + extents->width = 0; + extents->x_bearing = 0; + extents->height = 0; + extents->y_bearing = 0; + return; + } + extents->x_bearing = font->em_scalef_x (min_x); + extents->width = font->em_scalef_x (max_x - min_x); + extents->y_bearing = font->em_scalef_y (max_y); + extents->height = font->em_scalef_y (min_y - max_y); } - *instruction_start = start; - *instruction_end = end; + protected: + float min_x, min_y, max_x, max_y; + } bounds; + + points_aggregator_t (hb_font_t *font_, hb_glyph_extents_t *extents_, contour_point_t *phantoms_) + { + font = font_; + extents = extents_; + phantoms = phantoms_; + if (extents) bounds = contour_bounds_t (); } - return true; + + void consume_point (const contour_point_t &point) { bounds.add (point); } + void points_end () { bounds.get_extents (font, extents); } + + bool is_consuming_contour_points () { return extents; } + contour_point_t *get_phantoms_sink () { return phantoms; } + }; + + public: + unsigned + get_advance_var (hb_font_t *font, hb_codepoint_t gid, bool is_vertical) const + { + if (unlikely (gid >= num_glyphs)) return 0; + + bool success = false; + + contour_point_t phantoms[PHANTOM_COUNT]; + if (likely (font->num_coords == gvar->get_axis_count ())) + success = get_points (font, gid, points_aggregator_t (font, nullptr, phantoms)); + + if (unlikely (!success)) + return is_vertical ? vmtx->get_advance (gid) : hmtx->get_advance (gid); + + float result = is_vertical + ? phantoms[PHANTOM_TOP].y - phantoms[PHANTOM_BOTTOM].y + : phantoms[PHANTOM_RIGHT].x - phantoms[PHANTOM_LEFT].x; + return hb_clamp (roundf (result), 0.f, (float) UINT_MAX / 2); } - bool get_extents (hb_codepoint_t glyph, hb_glyph_extents_t *extents) const + int get_side_bearing_var (hb_font_t *font, hb_codepoint_t gid, bool is_vertical) const { + if (unlikely (gid >= num_glyphs)) return 0; + + hb_glyph_extents_t extents; + + contour_point_t phantoms[PHANTOM_COUNT]; + if (unlikely (!get_points (font, gid, points_aggregator_t (font, &extents, phantoms)))) + return is_vertical ? vmtx->get_side_bearing (gid) : hmtx->get_side_bearing (gid); + + return is_vertical + ? ceilf (phantoms[PHANTOM_TOP].y) - extents.y_bearing + : floorf (phantoms[PHANTOM_LEFT].x); + } +#endif + + public: + bool get_extents (hb_font_t *font, hb_codepoint_t gid, hb_glyph_extents_t *extents) const + { + if (unlikely (gid >= num_glyphs)) return false; + +#ifndef HB_NO_VAR + if (font->num_coords && font->num_coords == gvar->get_axis_count ()) + return get_points (font, gid, points_aggregator_t (font, extents, nullptr)); +#endif + return glyph_for_gid (gid).get_extents (font, *this, extents); + } + + const Glyph + glyph_for_gid (hb_codepoint_t gid, bool needs_padding_removal = false) const + { + if (unlikely (gid >= num_glyphs)) return Glyph (); + unsigned int start_offset, end_offset; - if (!get_offsets (glyph, &start_offset, &end_offset)) - return false; - if (end_offset - start_offset < GlyphHeader::static_size) - return true; /* Empty glyph; zero extents. */ + if (short_offset) + { + const HBUINT16 *offsets = (const HBUINT16 *) loca_table->dataZ.arrayZ; + start_offset = 2 * offsets[gid]; + end_offset = 2 * offsets[gid + 1]; + } + else + { + const HBUINT32 *offsets = (const HBUINT32 *) loca_table->dataZ.arrayZ; + start_offset = offsets[gid]; + end_offset = offsets[gid + 1]; + } + + if (unlikely (start_offset > end_offset || end_offset > glyf_table.get_length ())) + return Glyph (); - const GlyphHeader &glyph_header = StructAtOffset (glyf_table, start_offset); + Glyph glyph (hb_bytes_t ((const char *) this->glyf_table + start_offset, + end_offset - start_offset), gid); + return needs_padding_removal ? glyph.trim_padding () : glyph; + } - extents->x_bearing = MIN (glyph_header.xMin, glyph_header.xMax); - extents->y_bearing = MAX (glyph_header.yMin, glyph_header.yMax); - extents->width = MAX (glyph_header.xMin, glyph_header.xMax) - extents->x_bearing; - extents->height = MIN (glyph_header.yMin, glyph_header.yMax) - extents->y_bearing; + void + add_gid_and_children (hb_codepoint_t gid, hb_set_t *gids_to_retain, + unsigned int depth = 0) const + { + if (unlikely (depth++ > HB_MAX_NESTING_LEVEL)) return; + /* Check if is already visited */ + if (gids_to_retain->has (gid)) return; - return true; + gids_to_retain->add (gid); + + for (auto &item : glyph_for_gid (gid).get_composite_iterator ()) + add_gid_and_children (item.get_glyph_index (), gids_to_retain, depth); } +#ifdef HB_EXPERIMENTAL_API + struct path_builder_t + { + hb_font_t *font; + draw_helper_t *draw_helper; + + struct optional_point_t + { + optional_point_t () { has_data = false; } + optional_point_t (float x_, float y_) { x = x_; y = y_; has_data = true; } + + bool has_data; + float x; + float y; + + optional_point_t lerp (optional_point_t p, float t) + { return optional_point_t (x + t * (p.x - x), y + t * (p.y - y)); } + } first_oncurve, first_offcurve, last_offcurve; + + path_builder_t (hb_font_t *font_, draw_helper_t &draw_helper_) + { + font = font_; + draw_helper = &draw_helper_; + first_oncurve = first_offcurve = last_offcurve = optional_point_t (); + } + + /* based on https://github.com/RazrFalcon/ttf-parser/blob/4f32821/src/glyf.rs#L287 + See also: + * https://developer.apple.com/fonts/TrueType-Reference-Manual/RM01/Chap1.html + * https://stackoverflow.com/a/20772557 */ + void consume_point (const contour_point_t &point) + { + /* Skip empty contours */ + if (unlikely (point.is_end_point && !first_oncurve.has_data && !first_offcurve.has_data)) + return; + + bool is_on_curve = point.flag & Glyph::FLAG_ON_CURVE; + optional_point_t p (point.x, point.y); + if (!first_oncurve.has_data) + { + if (is_on_curve) + { + first_oncurve = p; + draw_helper->move_to (font->em_scalef_x (p.x), font->em_scalef_y (p.y)); + } + else + { + if (first_offcurve.has_data) + { + optional_point_t mid = first_offcurve.lerp (p, .5f); + first_oncurve = mid; + last_offcurve = p; + draw_helper->move_to (font->em_scalef_x (mid.x), font->em_scalef_y (mid.y)); + } + else + first_offcurve = p; + } + } + else + { + if (last_offcurve.has_data) + { + if (is_on_curve) + { + draw_helper->quadratic_to (font->em_scalef_x (last_offcurve.x), font->em_scalef_y (last_offcurve.y), + font->em_scalef_x (p.x), font->em_scalef_y (p.y)); + last_offcurve = optional_point_t (); + } + else + { + optional_point_t mid = last_offcurve.lerp (p, .5f); + draw_helper->quadratic_to (font->em_scalef_x (last_offcurve.x), font->em_scalef_y (last_offcurve.y), + font->em_scalef_x (mid.x), font->em_scalef_y (mid.y)); + last_offcurve = p; + } + } + else + { + if (is_on_curve) + draw_helper->line_to (font->em_scalef_x (p.x), font->em_scalef_y (p.y)); + else + last_offcurve = p; + } + } + + if (point.is_end_point) + { + if (first_offcurve.has_data && last_offcurve.has_data) + { + optional_point_t mid = last_offcurve.lerp (first_offcurve, .5f); + draw_helper->quadratic_to (font->em_scalef_x (last_offcurve.x), font->em_scalef_y (last_offcurve.y), + font->em_scalef_x (mid.x), font->em_scalef_y (mid.y)); + last_offcurve = optional_point_t (); + /* now check the rest */ + } + + if (first_offcurve.has_data && first_oncurve.has_data) + draw_helper->quadratic_to (font->em_scalef_x (first_offcurve.x), font->em_scalef_y (first_offcurve.y), + font->em_scalef_x (first_oncurve.x), font->em_scalef_y (first_oncurve.y)); + else if (last_offcurve.has_data && first_oncurve.has_data) + draw_helper->quadratic_to (font->em_scalef_x (last_offcurve.x), font->em_scalef_y (last_offcurve.y), + font->em_scalef_x (first_oncurve.x), font->em_scalef_y (first_oncurve.y)); + else if (first_oncurve.has_data) + draw_helper->line_to (font->em_scalef_x (first_oncurve.x), font->em_scalef_y (first_oncurve.y)); + + /* Getting ready for the next contour */ + first_oncurve = first_offcurve = last_offcurve = optional_point_t (); + draw_helper->end_path (); + } + } + void points_end () {} + + bool is_consuming_contour_points () { return true; } + contour_point_t *get_phantoms_sink () { return nullptr; } + }; + + bool + get_path (hb_font_t *font, hb_codepoint_t gid, draw_helper_t &draw_helper) const + { return get_points (font, gid, path_builder_t (font, draw_helper)); } +#endif + +#ifndef HB_NO_VAR + const gvar_accelerator_t *gvar; +#endif + const hmtx_accelerator_t *hmtx; + const vmtx_accelerator_t *vmtx; + private: bool short_offset; unsigned int num_glyphs; hb_blob_ptr_t loca_table; hb_blob_ptr_t glyf_table; + hb_face_t *face; + }; + + struct SubsetGlyph + { + hb_codepoint_t new_gid; + hb_codepoint_t old_gid; + Glyph source_glyph; + hb_bytes_t dest_start; /* region of source_glyph to copy first */ + hb_bytes_t dest_end; /* region of source_glyph to copy second */ + + bool serialize (hb_serialize_context_t *c, + const hb_subset_plan_t *plan) const + { + TRACE_SERIALIZE (this); + + hb_bytes_t dest_glyph = dest_start.copy (c); + dest_glyph = hb_bytes_t (&dest_glyph, dest_glyph.length + dest_end.copy (c).length); + unsigned int pad_length = padding (); + DEBUG_MSG (SUBSET, nullptr, "serialize %d byte glyph, width %d pad %d", dest_glyph.length, dest_glyph.length + pad_length, pad_length); + + HBUINT8 pad; + pad = 0; + while (pad_length > 0) + { + c->embed (pad); + pad_length--; + } + + if (unlikely (!dest_glyph.length)) return_trace (true); + + /* update components gids */ + for (auto &_ : Glyph (dest_glyph).get_composite_iterator ()) + { + hb_codepoint_t new_gid; + if (plan->new_gid_for_old_gid (_.get_glyph_index (), &new_gid)) + const_cast (_).set_glyph_index (new_gid); + } + + if (plan->drop_hints) Glyph (dest_glyph).drop_hints (); + + return_trace (true); + } + + void drop_hints_bytes () + { source_glyph.drop_hints_bytes (dest_start, dest_end); } + + unsigned int length () const { return dest_start.length + dest_end.length; } + /* pad to 2 to ensure 2-byte loca will be ok */ + unsigned int padding () const { return length () % 2; } + unsigned int padded_size () const { return length () + padding (); } }; protected: - UnsizedArrayOf dataZ; /* Glyphs data. */ + UnsizedArrayOf + dataZ; /* Glyphs data. */ public: - DEFINE_SIZE_MIN (0); /* In reality, this is UNBOUNDED() type; but since we always - * check the size externally, allow Null() object of it by - * defining it MIN() instead. */ + DEFINE_SIZE_MIN (0); /* In reality, this is UNBOUNDED() type; but since we always + * check the size externally, allow Null() object of it by + * defining it _MIN instead. */ }; struct glyf_accelerator_t : glyf::accelerator_t {}; diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-hdmx-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-hdmx-table.hh index d27d098b69f9..201ffc50be7b 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-hdmx-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-hdmx-table.hh @@ -41,68 +41,31 @@ namespace OT { struct DeviceRecord { - struct SubsetView - { - const DeviceRecord *source_device_record; - unsigned int sizeDeviceRecord; - hb_subset_plan_t *subset_plan; - - void init (const DeviceRecord *source_device_record, - unsigned int sizeDeviceRecord, - hb_subset_plan_t *subset_plan) - { - this->source_device_record = source_device_record; - this->sizeDeviceRecord = sizeDeviceRecord; - this->subset_plan = subset_plan; - } - - unsigned int len () const - { return this->subset_plan->glyphs.length; } - - const HBUINT8* operator [] (unsigned int i) const - { - if (unlikely (i >= len ())) return nullptr; - hb_codepoint_t gid = this->subset_plan->glyphs [i]; - - if (gid >= sizeDeviceRecord - DeviceRecord::min_size) - return nullptr; - return &(this->source_device_record->widthsZ[gid]); - } - }; - - static unsigned int get_size (unsigned int count) + static unsigned int get_size (unsigned count) { return hb_ceil_to_4 (min_size + count * HBUINT8::static_size); } - bool serialize (hb_serialize_context_t *c, const SubsetView &subset_view) + template + bool serialize (hb_serialize_context_t *c, unsigned pixelSize, Iterator it) { TRACE_SERIALIZE (this); - unsigned int size = get_size (subset_view.len ()); - if (unlikely (!c->allocate_size (size))) - { - DEBUG_MSG(SUBSET, nullptr, "Couldn't allocate enough space for DeviceRecord: %d.", - size); - return_trace (false); - } - - this->pixelSize.set (subset_view.source_device_record->pixelSize); - this->maxWidth.set (subset_view.source_device_record->maxWidth); - - for (unsigned int i = 0; i < subset_view.len (); i++) - { - const HBUINT8 *width = subset_view[i]; - if (!width) - { - DEBUG_MSG(SUBSET, nullptr, "HDMX width for new gid %d is missing.", i); - return_trace (false); - } - widthsZ[i].set (*width); - } + unsigned length = it.len (); + + if (unlikely (!c->extend (*this, length))) return_trace (false); + + this->pixelSize = pixelSize; + this->maxWidth = + + it + | hb_reduce (hb_max, 0u); + + + it + | hb_sink (widthsZ.as_array (length)); return_trace (true); } - bool sanitize (hb_sanitize_context_t *c, unsigned int sizeDeviceRecord) const + bool sanitize (hb_sanitize_context_t *c, unsigned sizeDeviceRecord) const { TRACE_SANITIZE (this); return_trace (likely (c->check_struct (this) && @@ -132,62 +95,60 @@ struct hdmx return StructAtOffset (&this->firstDeviceRecord, i * sizeDeviceRecord); } - bool serialize (hb_serialize_context_t *c, const hdmx *source_hdmx, hb_subset_plan_t *plan) + template + bool serialize (hb_serialize_context_t *c, unsigned version, Iterator it) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min ((*this)))) return_trace (false); - this->version.set (source_hdmx->version); - this->numRecords.set (source_hdmx->numRecords); - this->sizeDeviceRecord.set (DeviceRecord::get_size (plan->glyphs.length)); + this->version = version; + this->numRecords = it.len (); + this->sizeDeviceRecord = DeviceRecord::get_size (it ? (*it).second.len () : 0); - for (unsigned int i = 0; i < source_hdmx->numRecords; i++) - { - DeviceRecord::SubsetView subset_view; - subset_view.init (&(*source_hdmx)[i], source_hdmx->sizeDeviceRecord, plan); + for (const hb_item_type& _ : +it) + c->start_embed ()->serialize (c, _.first, _.second); - if (!c->start_embed ()->serialize (c, subset_view)) - return_trace (false); - } - - return_trace (true); + return_trace (c->successful); } - static size_t get_subsetted_size (const hdmx *source_hdmx, hb_subset_plan_t *plan) + + bool subset (hb_subset_context_t *c) const { - return min_size + source_hdmx->numRecords * DeviceRecord::get_size (plan->glyphs.length); + TRACE_SUBSET (this); + + hdmx *hdmx_prime = c->serializer->start_embed (); + if (unlikely (!hdmx_prime)) return_trace (false); + + auto it = + + hb_range ((unsigned) numRecords) + | hb_map ([c, this] (unsigned _) + { + const DeviceRecord *device_record = + &StructAtOffset (&firstDeviceRecord, + _ * sizeDeviceRecord); + auto row = + + hb_range (c->plan->num_output_glyphs ()) + | hb_map (c->plan->reverse_glyph_map) + | hb_map ([this, c, device_record] (hb_codepoint_t _) + { + if (c->plan->is_empty_glyph (_)) + return Null (HBUINT8); + return device_record->widthsZ.as_array (get_num_glyphs ()) [_]; + }) + ; + return hb_pair ((unsigned) device_record->pixelSize, +row); + }) + ; + + hdmx_prime->serialize (c->serializer, version, it); + return_trace (true); } - bool subset (hb_subset_plan_t *plan) const + unsigned get_num_glyphs () const { - size_t dest_size = get_subsetted_size (this, plan); - hdmx *dest = (hdmx *) malloc (dest_size); - if (unlikely (!dest)) - { - DEBUG_MSG(SUBSET, nullptr, "Unable to alloc %lu for hdmx subset output.", (unsigned long) dest_size); - return false; - } - - hb_serialize_context_t c (dest, dest_size); - hdmx *hdmx_prime = c.start_serialize (); - if (!hdmx_prime || !hdmx_prime->serialize (&c, this, plan)) - { - free (dest); - DEBUG_MSG(SUBSET, nullptr, "Failed to serialize write new hdmx."); - return false; - } - c.end_serialize (); - - hb_blob_t *hdmx_prime_blob = hb_blob_create ((const char *) dest, - dest_size, - HB_MEMORY_MODE_READONLY, - dest, - free); - bool result = plan->add_table (HB_OT_TAG_hdmx, hdmx_prime_blob); - hb_blob_destroy (hdmx_prime_blob); - - return result; + return sizeDeviceRecord - DeviceRecord::min_size; } bool sanitize (hb_sanitize_context_t *c) const @@ -200,10 +161,12 @@ struct hdmx } protected: - HBUINT16 version; /* Table version number (0) */ - HBUINT16 numRecords; /* Number of device records. */ - HBUINT32 sizeDeviceRecord; /* Size of a device record, 32-bit aligned. */ - DeviceRecord firstDeviceRecord; /* Array of device records. */ + HBUINT16 version; /* Table version number (0) */ + HBUINT16 numRecords; /* Number of device records. */ + HBUINT32 sizeDeviceRecord; + /* Size of a device record, 32-bit aligned. */ + DeviceRecord firstDeviceRecord; + /* Array of device records. */ public: DEFINE_SIZE_MIN (8); }; diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-head-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-head-table.hh index d7448d2dfcee..3f4af706bc88 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-head-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-head-table.hh @@ -54,6 +54,18 @@ struct head return 16 <= upem && upem <= 16384 ? upem : 1000; } + bool serialize (hb_serialize_context_t *c) const + { + TRACE_SERIALIZE (this); + return_trace ((bool) c->embed (this)); + } + + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + return_trace (serialize (c->serializer)); + } + enum mac_style_flag_t { BOLD = 1u<<0, ITALIC = 1u<<1, diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-hhea-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-hhea-table.hh index 66879a085a00..37ef8744572d 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-hhea-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-hhea-table.hh @@ -45,6 +45,8 @@ namespace OT { template struct _hea { + bool has_data () const { return version.major; } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -52,35 +54,38 @@ struct _hea } public: - FixedVersion<>version; /* 0x00010000u for version 1.0. */ - FWORD ascender; /* Typographic ascent. */ - FWORD descender; /* Typographic descent. */ - FWORD lineGap; /* Typographic line gap. */ - UFWORD advanceMax; /* Maximum advance width/height value in - * metrics table. */ - FWORD minLeadingBearing; /* Minimum left/top sidebearing value in - * metrics table. */ - FWORD minTrailingBearing; /* Minimum right/bottom sidebearing value; - * calculated as Min(aw - lsb - - * (xMax - xMin)) for horizontal. */ - FWORD maxExtent; /* horizontal: Max(lsb + (xMax - xMin)), - * vertical: minLeadingBearing+(yMax-yMin). */ - HBINT16 caretSlopeRise; /* Used to calculate the slope of the - * cursor (rise/run); 1 for vertical caret, - * 0 for horizontal.*/ - HBINT16 caretSlopeRun; /* 0 for vertical caret, 1 for horizontal. */ - HBINT16 caretOffset; /* The amount by which a slanted - * highlight on a glyph needs - * to be shifted to produce the - * best appearance. Set to 0 for - * non-slanted fonts. */ - HBINT16 reserved1; /* Set to 0. */ - HBINT16 reserved2; /* Set to 0. */ - HBINT16 reserved3; /* Set to 0. */ - HBINT16 reserved4; /* Set to 0. */ - HBINT16 metricDataFormat; /* 0 for current format. */ - HBUINT16 numberOfLongMetrics; /* Number of LongMetric entries in metric - * table. */ + FixedVersion<>version; /* 0x00010000u for version 1.0. */ + FWORD ascender; /* Typographic ascent. */ + FWORD descender; /* Typographic descent. */ + FWORD lineGap; /* Typographic line gap. */ + UFWORD advanceMax; /* Maximum advance width/height value in + * metrics table. */ + FWORD minLeadingBearing; + /* Minimum left/top sidebearing value in + * metrics table. */ + FWORD minTrailingBearing; + /* Minimum right/bottom sidebearing value; + * calculated as Min(aw - lsb - + * (xMax - xMin)) for horizontal. */ + FWORD maxExtent; /* horizontal: Max(lsb + (xMax - xMin)), + * vertical: minLeadingBearing+(yMax-yMin). */ + HBINT16 caretSlopeRise; /* Used to calculate the slope of the + * cursor (rise/run); 1 for vertical caret, + * 0 for horizontal.*/ + HBINT16 caretSlopeRun; /* 0 for vertical caret, 1 for horizontal. */ + HBINT16 caretOffset; /* The amount by which a slanted + * highlight on a glyph needs + * to be shifted to produce the + * best appearance. Set to 0 for + * non-slanted fonts. */ + HBINT16 reserved1; /* Set to 0. */ + HBINT16 reserved2; /* Set to 0. */ + HBINT16 reserved3; /* Set to 0. */ + HBINT16 reserved4; /* Set to 0. */ + HBINT16 metricDataFormat;/* 0 for current format. */ + HBUINT16 numberOfLongMetrics; + /* Number of LongMetric entries in metric + * table. */ public: DEFINE_SIZE_STATIC (36); }; diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-hmtx-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-hmtx-table.hh index dfb0f78d6433..0a2973d8eb85 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-hmtx-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-hmtx-table.hh @@ -29,8 +29,8 @@ #include "hb-open-type.hh" #include "hb-ot-hhea-table.hh" -#include "hb-ot-os2-table.hh" #include "hb-ot-var-hvar-table.hh" +#include "hb-ot-metrics.hh" /* * hmtx -- Horizontal Metrics @@ -42,6 +42,13 @@ #define HB_OT_TAG_vmtx HB_TAG('v','m','t','x') +HB_INTERNAL int +_glyf_get_side_bearing_var (hb_font_t *font, hb_codepoint_t glyph, bool is_vertical); + +HB_INTERNAL unsigned +_glyf_get_advance_var (hb_font_t *font, hb_codepoint_t glyph, bool is_vertical); + + namespace OT { @@ -53,6 +60,7 @@ struct LongMetric DEFINE_SIZE_STATIC (4); }; + template struct hmtxvmtx { @@ -66,7 +74,7 @@ struct hmtxvmtx bool subset_update_header (hb_subset_plan_t *plan, - unsigned int num_hmetrics) const + unsigned int num_hmetrics) const { hb_blob_t *src_blob = hb_sanitize_context_t ().reference_table (plan->source, H::tableTag); hb_blob_t *dest_blob = hb_blob_copy_writable_or_fail (src_blob); @@ -78,7 +86,7 @@ struct hmtxvmtx unsigned int length; H *table = (H *) hb_blob_get_data (dest_blob, &length); - table->numberOfLongMetrics.set (num_hmetrics); + table->numberOfLongMetrics = num_hmetrics; bool result = plan->add_table (H::tableTag, dest_blob); hb_blob_destroy (dest_blob); @@ -86,100 +94,66 @@ struct hmtxvmtx return result; } - bool subset (hb_subset_plan_t *plan) const + template + void serialize (hb_serialize_context_t *c, + Iterator it, + unsigned num_advances) { - typename T::accelerator_t _mtx; - _mtx.init (plan->source); - - /* All the trailing glyphs with the same advance can use one LongMetric - * and just keep LSB */ - hb_vector_t &gids = plan->glyphs; - unsigned int num_advances = gids.length; - unsigned int last_advance = _mtx.get_advance (gids[num_advances - 1]); - while (num_advances > 1 && - last_advance == _mtx.get_advance (gids[num_advances - 2])) - { - num_advances--; - } - - /* alloc the new table */ - size_t dest_sz = num_advances * 4 - + (gids.length - num_advances) * 2; - void *dest = (void *) malloc (dest_sz); - if (unlikely (!dest)) + unsigned idx = 0; + for (auto _ : it) { - return false; - } - DEBUG_MSG(SUBSET, nullptr, "%c%c%c%c in src has %d advances, %d lsbs", HB_UNTAG(T::tableTag), _mtx.num_advances, _mtx.num_metrics - _mtx.num_advances); - DEBUG_MSG(SUBSET, nullptr, "%c%c%c%c in dest has %d advances, %d lsbs, %u bytes", HB_UNTAG(T::tableTag), num_advances, gids.length - num_advances, (unsigned int) dest_sz); - - const char *source_table = hb_blob_get_data (_mtx.table.get_blob (), nullptr); - // Copy everything over - LongMetric * old_metrics = (LongMetric *) source_table; - FWORD *lsbs = (FWORD *) (old_metrics + _mtx.num_advances); - char * dest_pos = (char *) dest; - - bool failed = false; - for (unsigned int i = 0; i < gids.length; i++) - { - /* the last metric or the one for gids[i] */ - LongMetric *src_metric = old_metrics + MIN ((hb_codepoint_t) _mtx.num_advances - 1, gids[i]); - if (gids[i] < _mtx.num_advances) + if (idx < num_advances) { - /* src is a LongMetric */ - if (i < num_advances) - { - /* dest is a LongMetric, copy it */ - *((LongMetric *) dest_pos) = *src_metric; - } - else - { - /* dest just sb */ - *((FWORD *) dest_pos) = src_metric->sb; - } + LongMetric lm; + lm.advance = _.first; + lm.sb = _.second; + if (unlikely (!c->embed (&lm))) return; } else { - if (gids[i] >= _mtx.num_metrics) - { - DEBUG_MSG(SUBSET, nullptr, "gid %d is >= number of source metrics %d", - gids[i], _mtx.num_metrics); - failed = true; - break; - } - FWORD src_sb = *(lsbs + gids[i] - _mtx.num_advances); - if (i < num_advances) - { - /* dest needs a full LongMetric */ - LongMetric *metric = (LongMetric *)dest_pos; - metric->advance = src_metric->advance; - metric->sb = src_sb; - } - else - { - /* dest just needs an sb */ - *((FWORD *) dest_pos) = src_sb; - } + FWORD *sb = c->allocate_size (FWORD::static_size); + if (unlikely (!sb)) return; + *sb = _.second; } - dest_pos += (i < num_advances ? 4 : 2); + idx++; } + } + + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + + T *table_prime = c->serializer->start_embed (); + if (unlikely (!table_prime)) return_trace (false); + + accelerator_t _mtx; + _mtx.init (c->plan->source); + unsigned num_advances = _mtx.num_advances_for_subset (c->plan); + + auto it = + + hb_range (c->plan->num_output_glyphs ()) + | hb_map ([c, &_mtx] (unsigned _) + { + hb_codepoint_t old_gid; + if (!c->plan->old_gid_for_new_gid (_, &old_gid)) + return hb_pair (0u, 0); + return hb_pair (_mtx.get_advance (old_gid), _mtx.get_side_bearing (old_gid)); + }) + ; + + table_prime->serialize (c->serializer, it, num_advances); + _mtx.fini (); + if (unlikely (c->serializer->ran_out_of_room || c->serializer->in_error ())) + return_trace (false); + // Amend header num hmetrics - if (failed || unlikely (!subset_update_header (plan, num_advances))) - { - free (dest); - return false; - } + if (unlikely (!subset_update_header (c->plan, num_advances))) + return_trace (false); - hb_blob_t *result = hb_blob_create ((const char *)dest, - dest_sz, - HB_MEMORY_MODE_READONLY, - dest, - free); - bool success = plan->add_table (T::tableTag, result); - hb_blob_destroy (result); - return success; + return_trace (true); } struct accelerator_t @@ -187,34 +161,13 @@ struct hmtxvmtx friend struct hmtxvmtx; void init (hb_face_t *face, - unsigned int default_advance_ = 0) + unsigned int default_advance_ = 0) { default_advance = default_advance_ ? default_advance_ : hb_face_get_upem (face); - bool got_font_extents = false; - if (T::os2Tag != HB_TAG_NONE && face->table.OS2->is_typo_metrics ()) - { - ascender = abs (face->table.OS2->sTypoAscender); - descender = -abs (face->table.OS2->sTypoDescender); - line_gap = face->table.OS2->sTypoLineGap; - got_font_extents = (ascender | descender) != 0; - } + num_advances = T::is_horizontal ? face->table.hhea->numberOfLongMetrics : face->table.vhea->numberOfLongMetrics; - hb_blob_t *_hea_blob = hb_sanitize_context_t().reference_table (face); - const H *_hea_table = _hea_blob->as (); - num_advances = _hea_table->numberOfLongMetrics; - if (!got_font_extents) - { - ascender = abs (_hea_table->ascender); - descender = -abs (_hea_table->descender); - line_gap = _hea_table->lineGap; - got_font_extents = (ascender | descender) != 0; - } - hb_blob_destroy (_hea_blob); - - has_font_extents = got_font_extents; - - table = hb_sanitize_context_t().reference_table (face, T::tableTag); + table = hb_sanitize_context_t ().reference_table (face, T::tableTag); /* Cap num_metrics() and num_advances() based on table length. */ unsigned int len = table.get_length (); @@ -231,7 +184,7 @@ struct hmtxvmtx table = hb_blob_get_empty (); } - var_table = hb_sanitize_context_t().reference_table (face, T::variationsTag); + var_table = hb_sanitize_context_t ().reference_table (face, T::variationsTag); } void fini () @@ -240,8 +193,7 @@ struct hmtxvmtx var_table.destroy (); } - /* TODO Add variations version. */ - unsigned int get_side_bearing (hb_codepoint_t glyph) const + int get_side_bearing (hb_codepoint_t glyph) const { if (glyph < num_advances) return table->longMetricZ[glyph].sb; @@ -253,6 +205,23 @@ struct hmtxvmtx return bearings[glyph - num_advances]; } + int get_side_bearing (hb_font_t *font, hb_codepoint_t glyph) const + { + int side_bearing = get_side_bearing (glyph); + +#ifndef HB_NO_VAR + if (unlikely (glyph >= num_metrics) || !font->num_coords) + return side_bearing; + + if (var_table.get_length ()) + return side_bearing + var_table->get_side_bearing_var (glyph, font->coords, font->num_coords); // TODO Optimize?! + + return _glyf_get_side_bearing_var (font, glyph, T::tableTag == HB_OT_TAG_vmtx); +#else + return side_bearing; +#endif + } + unsigned int get_advance (hb_codepoint_t glyph) const { if (unlikely (glyph >= num_metrics)) @@ -266,25 +235,52 @@ struct hmtxvmtx return default_advance; } - return table->longMetricZ[MIN (glyph, (uint32_t) num_advances - 1)].advance; + return table->longMetricZ[hb_min (glyph, (uint32_t) num_advances - 1)].advance; } unsigned int get_advance (hb_codepoint_t glyph, hb_font_t *font) const { unsigned int advance = get_advance (glyph); - if (likely (glyph < num_metrics)) + +#ifndef HB_NO_VAR + if (unlikely (glyph >= num_metrics) || !font->num_coords) + return advance; + + if (var_table.get_length ()) + return advance + roundf (var_table->get_advance_var (glyph, font)); // TODO Optimize?! + + return _glyf_get_advance_var (font, glyph, T::tableTag == HB_OT_TAG_vmtx); +#else + return advance; +#endif + } + + unsigned int num_advances_for_subset (const hb_subset_plan_t *plan) const + { + unsigned int num_advances = plan->num_output_glyphs (); + unsigned int last_advance = _advance_for_new_gid (plan, + num_advances - 1); + while (num_advances > 1 && + last_advance == _advance_for_new_gid (plan, + num_advances - 2)) { - advance += (font->num_coords ? var_table->get_advance_var (glyph, font->coords, font->num_coords) : 0); // TODO Optimize?! + num_advances--; } - return advance; + + return num_advances; } - public: - bool has_font_extents; - int ascender; - int descender; - int line_gap; + private: + unsigned int _advance_for_new_gid (const hb_subset_plan_t *plan, + hb_codepoint_t new_gid) const + { + hb_codepoint_t old_gid; + if (!plan->old_gid_for_new_gid (new_gid, &old_gid)) + return 0; + + return get_advance (old_gid); + } protected: unsigned int num_metrics; @@ -297,27 +293,29 @@ struct hmtxvmtx }; protected: - UnsizedArrayOflongMetricZ;/* Paired advance width and leading - * bearing values for each glyph. The - * value numOfHMetrics comes from - * the 'hhea' table. If the font is - * monospaced, only one entry need - * be in the array, but that entry is - * required. The last entry applies to - * all subsequent glyphs. */ -/*UnsizedArrayOf leadingBearingX;*//* Here the advance is assumed - * to be the same as the advance - * for the last entry above. The - * number of entries in this array is - * derived from numGlyphs (from 'maxp' - * table) minus numberOfLongMetrics. - * This generally is used with a run - * of monospaced glyphs (e.g., Kanji - * fonts or Courier fonts). Only one - * run is allowed and it must be at - * the end. This allows a monospaced - * font to vary the side bearing - * values for each glyph. */ + UnsizedArrayOf + longMetricZ; /* Paired advance width and leading + * bearing values for each glyph. The + * value numOfHMetrics comes from + * the 'hhea' table. If the font is + * monospaced, only one entry need + * be in the array, but that entry is + * required. The last entry applies to + * all subsequent glyphs. */ +/*UnsizedArrayOf leadingBearingX;*/ + /* Here the advance is assumed + * to be the same as the advance + * for the last entry above. The + * number of entries in this array is + * derived from numGlyphs (from 'maxp' + * table) minus numberOfLongMetrics. + * This generally is used with a run + * of monospaced glyphs (e.g., Kanji + * fonts or Courier fonts). Only one + * run is allowed and it must be at + * the end. This allows a monospaced + * font to vary the side bearing + * values for each glyph. */ public: DEFINE_SIZE_ARRAY (0, longMetricZ); }; @@ -325,12 +323,12 @@ struct hmtxvmtx struct hmtx : hmtxvmtx { static constexpr hb_tag_t tableTag = HB_OT_TAG_hmtx; static constexpr hb_tag_t variationsTag = HB_OT_TAG_HVAR; - static constexpr hb_tag_t os2Tag = HB_OT_TAG_OS2; + static constexpr bool is_horizontal = true; }; struct vmtx : hmtxvmtx { static constexpr hb_tag_t tableTag = HB_OT_TAG_vmtx; static constexpr hb_tag_t variationsTag = HB_OT_TAG_VVAR; - static constexpr hb_tag_t os2Tag = HB_TAG_NONE; + static constexpr bool is_horizontal = false; }; struct hmtx_accelerator_t : hmtx::accelerator_t {}; diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-kern-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-kern-table.hh index 9d870ecfc8a2..37ea1276ea6b 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-kern-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-kern-table.hh @@ -47,9 +47,9 @@ struct KernSubTableFormat3 int get_kerning (hb_codepoint_t left, hb_codepoint_t right) const { hb_array_t kernValue = kernValueZ.as_array (kernValueCount); - hb_array_t leftClass = StructAfter > (kernValue).as_array (glyphCount); - hb_array_t rightClass = StructAfter > (leftClass).as_array (glyphCount); - hb_array_t kernIndex = StructAfter > (rightClass).as_array (leftClassCount * rightClassCount); + hb_array_t leftClass = StructAfter> (kernValue).as_array (glyphCount); + hb_array_t rightClass = StructAfter> (leftClass).as_array (glyphCount); + hb_array_t kernIndex = StructAfter> (rightClass).as_array (leftClassCount * rightClassCount); unsigned int leftC = leftClass[left]; unsigned int rightC = rightClass[right]; @@ -86,21 +86,26 @@ struct KernSubTableFormat3 } protected: - KernSubTableHeader header; - HBUINT16 glyphCount; /* The number of glyphs in this font. */ - HBUINT8 kernValueCount; /* The number of kerning values. */ - HBUINT8 leftClassCount; /* The number of left-hand classes. */ - HBUINT8 rightClassCount;/* The number of right-hand classes. */ - HBUINT8 flags; /* Set to zero (reserved for future use). */ - UnsizedArrayOf kernValueZ; /* The kerning values. - * Length kernValueCount. */ + KernSubTableHeader + header; + HBUINT16 glyphCount; /* The number of glyphs in this font. */ + HBUINT8 kernValueCount; /* The number of kerning values. */ + HBUINT8 leftClassCount; /* The number of left-hand classes. */ + HBUINT8 rightClassCount;/* The number of right-hand classes. */ + HBUINT8 flags; /* Set to zero (reserved for future use). */ + UnsizedArrayOf + kernValueZ; /* The kerning values. + * Length kernValueCount. */ #if 0 - UnsizedArrayOfleftClass; /* The left-hand classes. - * Length glyphCount. */ - UnsizedArrayOfrightClass; /* The right-hand classes. - * Length glyphCount. */ - UnsizedArrayOfkernIndex; /* The indices into the kernValue array. - * Length leftClassCount * rightClassCount */ + UnsizedArrayOf + leftClass; /* The left-hand classes. + * Length glyphCount. */ + UnsizedArrayOf + rightClass; /* The right-hand classes. + * Length glyphCount. */ + UnsizedArrayOfkernIndex; + /* The indices into the kernValue array. + * Length leftClassCount * rightClassCount */ #endif public: DEFINE_SIZE_ARRAY (KernSubTableHeader::static_size + 6, kernValueZ); @@ -121,16 +126,20 @@ struct KernSubTable } } - template - typename context_t::return_t dispatch (context_t *c) const + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { unsigned int subtable_type = get_type (); TRACE_DISPATCH (this, subtable_type); switch (subtable_type) { case 0: return_trace (c->dispatch (u.format0)); - case 1: return_trace (u.header.apple ? c->dispatch (u.format1) : c->default_return_value ()); +#ifndef HB_NO_AAT_SHAPE + case 1: return_trace (u.header.apple ? c->dispatch (u.format1, hb_forward (ds)...) : c->default_return_value ()); +#endif case 2: return_trace (c->dispatch (u.format2)); - case 3: return_trace (u.header.apple ? c->dispatch (u.format3) : c->default_return_value ()); +#ifndef HB_NO_AAT_SHAPE + case 3: return_trace (u.header.apple ? c->dispatch (u.format3, hb_forward (ds)...) : c->default_return_value ()); +#endif default: return_trace (c->default_return_value ()); } } @@ -163,8 +172,8 @@ struct KernOTSubTableHeader static constexpr bool apple = false; typedef AAT::ObsoleteTypes Types; - unsigned int tuple_count () const { return 0; } - bool is_horizontal () const { return (coverage & Horizontal); } + unsigned tuple_count () const { return 0; } + bool is_horizontal () const { return (coverage & Horizontal); } enum Coverage { @@ -218,8 +227,8 @@ struct KernAATSubTableHeader static constexpr bool apple = true; typedef AAT::ObsoleteTypes Types; - unsigned int tuple_count () const { return 0; } - bool is_horizontal () const { return !(coverage & Vertical); } + unsigned tuple_count () const { return 0; } + bool is_horizontal () const { return !(coverage & Vertical); } enum Coverage { @@ -242,8 +251,8 @@ struct KernAATSubTableHeader HBUINT8 coverage; /* Coverage bits. */ HBUINT8 format; /* Subtable format. */ HBUINT16 tupleIndex; /* The tuple index (used for variations fonts). - * This value specifies which tuple this subtable covers. - * Note: We don't implement. */ + * This value specifies which tuple this subtable covers. + * Note: We don't implement. */ public: DEFINE_SIZE_STATIC (8); }; @@ -271,14 +280,16 @@ struct kern { static constexpr hb_tag_t tableTag = HB_OT_TAG_kern; - bool has_data () const { return u.version32; } - unsigned int get_type () const { return u.major; } + bool has_data () const { return u.version32; } + unsigned get_type () const { return u.major; } bool has_state_machine () const { switch (get_type ()) { case 0: return u.ot.has_state_machine (); +#ifndef HB_NO_AAT_SHAPE case 1: return u.aat.has_state_machine (); +#endif default:return false; } } @@ -287,7 +298,9 @@ struct kern { switch (get_type ()) { case 0: return u.ot.has_cross_stream (); +#ifndef HB_NO_AAT_SHAPE case 1: return u.aat.has_cross_stream (); +#endif default:return false; } } @@ -296,7 +309,9 @@ struct kern { switch (get_type ()) { case 0: return u.ot.get_h_kerning (left, right); +#ifndef HB_NO_AAT_SHAPE case 1: return u.aat.get_h_kerning (left, right); +#endif default:return 0; } } @@ -304,14 +319,16 @@ struct kern bool apply (AAT::hb_aat_apply_context_t *c) const { return dispatch (c); } - template - typename context_t::return_t dispatch (context_t *c) const + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { unsigned int subtable_type = get_type (); TRACE_DISPATCH (this, subtable_type); switch (subtable_type) { - case 0: return_trace (c->dispatch (u.ot)); - case 1: return_trace (c->dispatch (u.aat)); + case 0: return_trace (c->dispatch (u.ot, hb_forward (ds)...)); +#ifndef HB_NO_AAT_SHAPE + case 1: return_trace (c->dispatch (u.aat, hb_forward (ds)...)); +#endif default: return_trace (c->default_return_value ()); } } @@ -328,7 +345,9 @@ struct kern HBUINT32 version32; HBUINT16 major; KernOT ot; +#ifndef HB_NO_AAT_SHAPE KernAAT aat; +#endif } u; public: DEFINE_SIZE_UNION (4, version32); diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-base-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-base-table.hh index fe30e278795a..f8cdb0469105 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-base-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-base-table.hh @@ -1,7 +1,7 @@ /* - * Copyright © 2016 Elie Roux + * Copyright © 2016 Elie Roux * Copyright © 2018 Google, Inc. - * Copyright © 2018 Ebrahim Byagowi + * Copyright © 2018-2019 Ebrahim Byagowi * * This is part of HarfBuzz, a text shaping library. * @@ -32,9 +32,6 @@ #include "hb-open-type.hh" #include "hb-ot-layout-common.hh" -/* To be removed */ -typedef hb_tag_t hb_ot_layout_baseline_t; - namespace OT { /* @@ -76,7 +73,7 @@ struct BaseCoordFormat2 protected: HBUINT16 format; /* Format identifier--format = 2 */ FWORD coordinate; /* X or Y value, in design units */ - GlyphID referenceGlyph; /* Glyph ID of control glyph */ + HBGlyphID referenceGlyph; /* Glyph ID of control glyph */ HBUINT16 coordPoint; /* Index of contour point on the * reference glyph */ public: @@ -116,9 +113,11 @@ struct BaseCoordFormat3 struct BaseCoord { - hb_position_t get_coord (hb_font_t *font, + bool has_data () const { return u.format; } + + hb_position_t get_coord (hb_font_t *font, const VariationStore &var_store, - hb_direction_t direction) const + hb_direction_t direction) const { switch (u.format) { case 1: return u.format1.get_coord (); @@ -142,10 +141,10 @@ struct BaseCoord protected: union { - HBUINT16 format; - BaseCoordFormat1 format1; - BaseCoordFormat2 format2; - BaseCoordFormat3 format3; + HBUINT16 format; + BaseCoordFormat1 format1; + BaseCoordFormat2 format2; + BaseCoordFormat3 format3; } u; public: DEFINE_SIZE_UNION (2, format); @@ -153,14 +152,9 @@ struct BaseCoord struct FeatMinMaxRecord { - static int cmp (const void *key_, const void *entry_) - { - hb_tag_t key = * (hb_tag_t *) key_; - const FeatMinMaxRecord &entry = * (const FeatMinMaxRecord *) entry_; - return key < (unsigned int) entry.tag ? -1 : - key > (unsigned int) entry.tag ? 1 : - 0; - } + int cmp (hb_tag_t key) const { return tag.cmp (key); } + + bool has_data () const { return tag; } void get_min_max (const BaseCoord **min, const BaseCoord **max) const { @@ -195,17 +189,12 @@ struct FeatMinMaxRecord struct MinMax { void get_min_max (hb_tag_t feature_tag, - const BaseCoord **min, - const BaseCoord **max) const + const BaseCoord **min, + const BaseCoord **max) const { - /* TODO Replace hb_bsearch() with .bsearch(). */ - const FeatMinMaxRecord *minMaxCoord = (const FeatMinMaxRecord *) - hb_bsearch (&feature_tag, featMinMaxRecords.arrayZ, - featMinMaxRecords.len, - FeatMinMaxRecord::static_size, - FeatMinMaxRecord::cmp); - if (minMaxCoord) - minMaxCoord->get_min_max (min, max); + const FeatMinMaxRecord &minMaxCoord = featMinMaxRecords.bsearch (feature_tag); + if (minMaxCoord.has_data ()) + minMaxCoord.get_min_max (min, max); else { if (likely (min)) *min = &(this+minCoord); @@ -271,17 +260,11 @@ struct BaseValues struct BaseLangSysRecord { - static int cmp (const void *key_, const void *entry_) - { - hb_tag_t key = * (hb_tag_t *) key_; - const BaseLangSysRecord &entry = * (const BaseLangSysRecord *) entry_; - return key < (unsigned int) entry.baseLangSysTag ? -1 : - key > (unsigned int) entry.baseLangSysTag ? 1 : - 0; - } + int cmp (hb_tag_t key) const { return baseLangSysTag.cmp (key); } + + bool has_data () const { return baseLangSysTag; } - const MinMax &get_min_max () const - { return this+minMax; } + const MinMax &get_min_max () const { return this+minMax; } bool sanitize (hb_sanitize_context_t *c, const void *base) const { @@ -303,19 +286,14 @@ struct BaseScript { const MinMax &get_min_max (hb_tag_t language_tag) const { - /* TODO Replace hb_bsearch() with .bsearch(). */ - const BaseLangSysRecord* record = (const BaseLangSysRecord *) - hb_bsearch (&language_tag, baseLangSysRecords.arrayZ, - baseLangSysRecords.len, - BaseLangSysRecord::static_size, - BaseLangSysRecord::cmp); - return record ? record->get_min_max () : this+defaultMinMax; + const BaseLangSysRecord& record = baseLangSysRecords.bsearch (language_tag); + return record.has_data () ? record.get_min_max () : this+defaultMinMax; } const BaseCoord &get_base_coord (int baseline_tag_index) const { return (this+baseValues).get_base_coord (baseline_tag_index); } - bool is_empty () const { return !baseValues; } + bool has_data () const { return baseValues; } bool sanitize (hb_sanitize_context_t *c) const { @@ -345,14 +323,9 @@ struct BaseScript struct BaseScriptList; struct BaseScriptRecord { - static int cmp (const void *key_, const void *entry_) - { - hb_tag_t key = * (hb_tag_t *) key_; - const BaseScriptRecord &entry = * (const BaseScriptRecord *) entry_; - return key < (unsigned int) entry.baseScriptTag ? -1 : - key > (unsigned int) entry.baseScriptTag ? 1 : - 0; - } + int cmp (hb_tag_t key) const { return baseScriptTag.cmp (key); } + + bool has_data () const { return baseScriptTag; } const BaseScript &get_base_script (const BaseScriptList *list) const { return list+baseScript; } @@ -376,22 +349,11 @@ struct BaseScriptRecord struct BaseScriptList { - const BaseScriptRecord *find_record (hb_tag_t script) const - { - /* TODO Replace hb_bsearch() with .bsearch(). */ - return (const BaseScriptRecord *) hb_bsearch (&script, baseScriptRecords.arrayZ, - baseScriptRecords.len, - BaseScriptRecord::static_size, - BaseScriptRecord::cmp); - } - - /* TODO: Or client should handle fallback? */ const BaseScript &get_base_script (hb_tag_t script) const { - const BaseScriptRecord *record = find_record (script); - if (!record) record = find_record ((hb_script_t) HB_TAG ('D','F','L','T')); - - return record ? record->get_base_script (this) : Null (BaseScript); + const BaseScriptRecord *record = &baseScriptRecords.bsearch (script); + if (!record->has_data ()) record = &baseScriptRecords.bsearch (HB_TAG ('D','F','L','T')); + return record->has_data () ? record->get_base_script (this) : Null (BaseScript); } bool sanitize (hb_sanitize_context_t *c) const @@ -411,15 +373,20 @@ struct BaseScriptList struct Axis { - bool get_baseline (hb_ot_layout_baseline_t baseline, - hb_tag_t script_tag, - hb_tag_t language_tag, - const BaseCoord **coord) const + bool get_baseline (hb_tag_t baseline_tag, + hb_tag_t script_tag, + hb_tag_t language_tag, + const BaseCoord **coord) const { const BaseScript &base_script = (this+baseScriptList).get_base_script (script_tag); - if (base_script.is_empty ()) return false; + if (!base_script.has_data ()) return false; - if (likely (coord)) *coord = &base_script.get_base_coord ((this+baseTagList).bsearch (baseline)); + if (likely (coord)) + { + unsigned int tag_index = 0; + (this+baseTagList).bfind (baseline_tag, &tag_index); + *coord = &base_script.get_base_coord (tag_index); + } return true; } @@ -431,7 +398,7 @@ struct Axis const BaseCoord **max_coord) const { const BaseScript &base_script = (this+baseScriptList).get_base_script (script_tag); - if (base_script.is_empty ()) return false; + if (!base_script.has_data ()) return false; base_script.get_min_max (language_tag).get_min_max (feature_tag, min_coord, max_coord); @@ -447,7 +414,7 @@ struct Axis } protected: - OffsetTo > + OffsetTo> baseTagList; /* Offset to BaseTagList table, from beginning * of Axis table (may be NULL) * Array of 4-byte baseline identification tags — must @@ -472,20 +439,21 @@ struct BASE const VariationStore &get_var_store () const { return version.to_int () < 0x00010001u ? Null (VariationStore) : this+varStore; } - bool get_baseline (hb_font_t *font, - hb_ot_layout_baseline_t baseline, - hb_direction_t direction, - hb_tag_t script_tag, - hb_tag_t language_tag, - hb_position_t *base) const + bool get_baseline (hb_font_t *font, + hb_tag_t baseline_tag, + hb_direction_t direction, + hb_tag_t script_tag, + hb_tag_t language_tag, + hb_position_t *base) const { - const BaseCoord *base_coord; - if (!get_axis (direction).get_baseline (baseline, script_tag, language_tag, &base_coord)) + const BaseCoord *base_coord = nullptr; + if (unlikely (!get_axis (direction).get_baseline (baseline_tag, script_tag, language_tag, &base_coord) || + !base_coord || !base_coord->has_data ())) return false; - if (likely (base && base_coord)) *base = base_coord->get_coord (font, - get_var_store (), - direction); + if (likely (base)) + *base = base_coord->get_coord (font, get_var_store (), direction); + return true; } diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-common.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-common.hh index 7618057ed1c3..56773e68e366 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-common.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-common.hh @@ -33,6 +33,7 @@ #include "hb-ot-layout.hh" #include "hb-open-type.hh" #include "hb-set.hh" +#include "hb-bimap.hh" #ifndef HB_MAX_NESTING_LEVEL @@ -59,6 +60,18 @@ #define HB_MAX_LANGSYS 2000 #endif +#ifndef HB_MAX_FEATURES +#define HB_MAX_FEATURES 750 +#endif + +#ifndef HB_MAX_FEATURE_INDICES +#define HB_MAX_FEATURE_INDICES 1500 +#endif + +#ifndef HB_MAX_LOOKUP_INDICES +#define HB_MAX_LOOKUP_INDICES 20000 +#endif + namespace OT { @@ -66,6 +79,213 @@ namespace OT { #define NOT_COVERED ((unsigned int) -1) +template +static inline void Coverage_serialize (hb_serialize_context_t *c, + Iterator it); + +template +static inline void ClassDef_serialize (hb_serialize_context_t *c, + Iterator it); + +static void ClassDef_remap_and_serialize (hb_serialize_context_t *c, + const hb_set_t &glyphset, + const hb_map_t &gid_klass_map, + hb_sorted_vector_t &glyphs, + const hb_set_t &klasses, + hb_map_t *klass_map /*INOUT*/); + +struct hb_subset_layout_context_t : + hb_dispatch_context_t +{ + const char *get_name () { return "SUBSET_LAYOUT"; } + static return_t default_return_value () { return hb_empty_t (); } + + bool visitScript () + { + return script_count++ < HB_MAX_SCRIPTS; + } + + bool visitLangSys () + { + return langsys_count++ < HB_MAX_LANGSYS; + } + + bool visitFeatureIndex (int count) + { + feature_index_count += count; + return feature_index_count < HB_MAX_FEATURE_INDICES; + } + + bool visitLookupIndex() + { + lookup_index_count++; + return lookup_index_count < HB_MAX_LOOKUP_INDICES; + } + + hb_subset_context_t *subset_context; + const hb_tag_t table_tag; + const hb_map_t *lookup_index_map; + const hb_map_t *feature_index_map; + + hb_subset_layout_context_t (hb_subset_context_t *c_, + hb_tag_t tag_, + hb_map_t *lookup_map_, + hb_map_t *feature_map_) : + subset_context (c_), + table_tag (tag_), + lookup_index_map (lookup_map_), + feature_index_map (feature_map_), + script_count (0), + langsys_count (0), + feature_index_count (0), + lookup_index_count (0) + {} + + private: + unsigned script_count; + unsigned langsys_count; + unsigned feature_index_count; + unsigned lookup_index_count; +}; + +struct hb_collect_variation_indices_context_t : + hb_dispatch_context_t +{ + template + return_t dispatch (const T &obj) { obj.collect_variation_indices (this); return hb_empty_t (); } + static return_t default_return_value () { return hb_empty_t (); } + + hb_set_t *layout_variation_indices; + const hb_set_t *glyph_set; + const hb_map_t *gpos_lookups; + + hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_, + const hb_set_t *glyph_set_, + const hb_map_t *gpos_lookups_) : + layout_variation_indices (layout_variation_indices_), + glyph_set (glyph_set_), + gpos_lookups (gpos_lookups_) {} +}; + +template +struct subset_offset_array_t +{ + subset_offset_array_t (hb_subset_context_t *subset_context_, + OutputArray& out_, + const void *base_) : subset_context (subset_context_), + out (out_), base (base_) {} + + template + bool operator () (T&& offset) + { + auto *o = out.serialize_append (subset_context->serializer); + if (unlikely (!o)) return false; + auto snap = subset_context->serializer->snapshot (); + bool ret = o->serialize_subset (subset_context, offset, base); + if (!ret) + { + out.pop (); + subset_context->serializer->revert (snap); + } + return ret; + } + + private: + hb_subset_context_t *subset_context; + OutputArray &out; + const void *base; +}; + + +template +struct subset_offset_array_arg_t +{ + subset_offset_array_arg_t (hb_subset_context_t *subset_context_, + OutputArray& out_, + const void *base_, + Arg &&arg_) : subset_context (subset_context_), out (out_), + base (base_), arg (arg_) {} + + template + bool operator () (T&& offset) + { + auto *o = out.serialize_append (subset_context->serializer); + if (unlikely (!o)) return false; + auto snap = subset_context->serializer->snapshot (); + bool ret = o->serialize_subset (subset_context, offset, base, arg); + if (!ret) + { + out.pop (); + subset_context->serializer->revert (snap); + } + return ret; + } + + private: + hb_subset_context_t *subset_context; + OutputArray &out; + const void *base; + Arg &&arg; +}; + +/* + * Helper to subset an array of offsets. Subsets the thing pointed to by each offset + * and discards the offset in the array if the subset operation results in an empty + * thing. + */ +struct +{ + template + subset_offset_array_t + operator () (hb_subset_context_t *subset_context, OutputArray& out, + const void *base) const + { return subset_offset_array_t (subset_context, out, base); } + + /* Variant with one extra argument passed to serialize_subset */ + template + subset_offset_array_arg_t + operator () (hb_subset_context_t *subset_context, OutputArray& out, + const void *base, Arg &&arg) const + { return subset_offset_array_arg_t (subset_context, out, base, arg); } +} +HB_FUNCOBJ (subset_offset_array); + +template +struct subset_record_array_t +{ + subset_record_array_t (hb_subset_layout_context_t *c_, OutputArray* out_, + const void *base_) : subset_layout_context (c_), + out (out_), base (base_) {} + + template + void + operator () (T&& record) + { + auto snap = subset_layout_context->subset_context->serializer->snapshot (); + bool ret = record.subset (subset_layout_context, base); + if (!ret) subset_layout_context->subset_context->serializer->revert (snap); + else out->len++; + } + + private: + hb_subset_layout_context_t *subset_layout_context; + OutputArray *out; + const void *base; +}; + +/* + * Helper to subset a RecordList/record array. Subsets each Record in the array and + * discards the record if the subset operation returns false. + */ +struct +{ + template + subset_record_array_t + operator () (hb_subset_layout_context_t *c, OutputArray* out, + const void *base) const + { return subset_record_array_t (c, out, base); } +} +HB_FUNCOBJ (subset_record_array); /* * @@ -88,6 +308,15 @@ struct Record { int cmp (hb_tag_t a) const { return tag.cmp (a); } + bool subset (hb_subset_layout_context_t *c, const void *base) const + { + TRACE_SUBSET (this); + auto *out = c->subset_context->serializer->embed (this); + if (unlikely (!out)) return_trace (false); + bool ret = out->offset.serialize_subset (c->subset_context, offset, base, c, &tag); + return_trace (ret); + } + bool sanitize (hb_sanitize_context_t *c, const void *base) const { TRACE_SANITIZE (this); @@ -104,7 +333,7 @@ struct Record }; template -struct RecordArrayOf : SortedArrayOf > +struct RecordArrayOf : SortedArrayOf> { const OffsetTo& get_offset (unsigned int i) const { return (*this)[i].offset; } @@ -116,11 +345,12 @@ struct RecordArrayOf : SortedArrayOf > unsigned int *record_count /* IN/OUT */, hb_tag_t *record_tags /* OUT */) const { - if (record_count) { - const Record *arr = this->sub_array (start_offset, record_count); - unsigned int count = *record_count; - for (unsigned int i = 0; i < count; i++) - record_tags[i] = arr[i].tag; + if (record_count) + { + + this->sub_array (start_offset, record_count) + | hb_map (&Record::tag) + | hb_sink (hb_array (record_tags, *record_count)) + ; } return this->len; } @@ -136,14 +366,16 @@ struct RecordListOf : RecordArrayOf const Type& operator [] (unsigned int i) const { return this+this->get_offset (i); } - bool subset (hb_subset_context_t *c) const + bool subset (hb_subset_context_t *c, + hb_subset_layout_context_t *l) const { TRACE_SUBSET (this); - struct RecordListOf *out = c->serializer->embed (*this); - if (unlikely (!out)) return_trace (false); - unsigned int count = this->len; - for (unsigned int i = 0; i < count; i++) - out->get_offset (i).serialize_subset (c, (*this)[i], out); + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + + + this->iter () + | hb_apply (subset_record_array (l, out, this)) + ; return_trace (true); } @@ -154,11 +386,31 @@ struct RecordListOf : RecordArrayOf } }; +struct Feature; + +struct RecordListOfFeature : RecordListOf +{ + bool subset (hb_subset_context_t *c, + hb_subset_layout_context_t *l) const + { + TRACE_SUBSET (this); + auto *out = c->serializer->start_embed (*this); + if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false); + + unsigned count = this->len; + + hb_zip (*this, hb_range (count)) + | hb_filter (l->feature_index_map, hb_second) + | hb_map (hb_first) + | hb_apply (subset_record_array (l, out, this)) + ; + return_trace (true); + } +}; struct RangeRecord { int cmp (hb_codepoint_t g) const - { return g < start ? -1 : g <= end ? 0 : +1; } + { return g < first ? -1 : g <= last ? 0 : +1; } bool sanitize (hb_sanitize_context_t *c) const { @@ -167,14 +419,14 @@ struct RangeRecord } bool intersects (const hb_set_t *glyphs) const - { return glyphs->intersects (start, end); } + { return glyphs->intersects (first, last); } template - bool add_coverage (set_t *glyphs) const - { return glyphs->add_range (start, end); } + bool collect_coverage (set_t *glyphs) const + { return glyphs->add_range (first, last); } - GlyphID start; /* First GlyphID in the range */ - GlyphID end; /* Last GlyphID in the range */ + HBGlyphID first; /* First GlyphID in the range */ + HBGlyphID last; /* Last GlyphID in the range */ HBUINT16 value; /* Value */ public: DEFINE_SIZE_STATIC (6); @@ -184,15 +436,38 @@ DECLARE_NULL_NAMESPACE_BYTES (OT, RangeRecord); struct IndexArray : ArrayOf { + bool intersects (const hb_map_t *indexes) const + { return hb_any (*this, indexes); } + + template + void serialize (hb_serialize_context_t *c, + hb_subset_layout_context_t *l, + Iterator it) + { + if (!it) return; + if (unlikely (!c->extend_min ((*this)))) return; + + for (const auto _ : it) + { + if (!l->visitLookupIndex()) break; + + Index i; + i = _; + c->copy (i); + this->len++; + } + } + unsigned int get_indexes (unsigned int start_offset, unsigned int *_count /* IN/OUT */, unsigned int *_indexes /* OUT */) const { - if (_count) { - const HBUINT16 *arr = this->sub_array (start_offset, _count); - unsigned int count = *_count; - for (unsigned int i = 0; i < count; i++) - _indexes[i] = arr[i]; + if (_count) + { + + this->sub_array (start_offset, _count) + | hb_sink (hb_array (_indexes, *_count)) + ; } return this->len; } @@ -204,11 +479,6 @@ struct IndexArray : ArrayOf }; -struct Script; -struct LangSys; -struct Feature; - - struct LangSys { unsigned int get_feature_count () const @@ -227,13 +497,49 @@ struct LangSys { if (reqFeatureIndex == 0xFFFFu) return Index::NOT_FOUND_INDEX; - return reqFeatureIndex;; + return reqFeatureIndex; } - bool subset (hb_subset_context_t *c) const + LangSys* copy (hb_serialize_context_t *c) const + { + TRACE_SERIALIZE (this); + return_trace (c->embed (*this)); + } + + bool operator == (const LangSys& o) const + { + if (featureIndex.len != o.featureIndex.len || + reqFeatureIndex != o.reqFeatureIndex) + return false; + + for (const auto _ : + hb_zip (featureIndex, o.featureIndex)) + if (_.first != _.second) return false; + + return true; + } + + bool subset (hb_subset_context_t *c, + hb_subset_layout_context_t *l, + const Tag *tag = nullptr) const { TRACE_SUBSET (this); - return_trace (c->serializer->embed (*this)); + auto *out = c->serializer->start_embed (*this); + if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false); + + out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex) ? l->feature_index_map->get (reqFeatureIndex) : 0xFFFFu; + + if (!l->visitFeatureIndex (featureIndex.len)) + return_trace (false); + + auto it = + + hb_iter (featureIndex) + | hb_filter (l->feature_index_map) + | hb_map (l->feature_index_map) + ; + + bool ret = bool (it); + out->featureIndex.serialize (c->serializer, l, it); + return_trace (ret); } bool sanitize (hb_sanitize_context_t *c, @@ -275,16 +581,46 @@ struct Script bool has_default_lang_sys () const { return defaultLangSys != 0; } const LangSys& get_default_lang_sys () const { return this+defaultLangSys; } - bool subset (hb_subset_context_t *c) const + bool subset (hb_subset_context_t *c, + hb_subset_layout_context_t *l, + const Tag *tag) const { TRACE_SUBSET (this); - struct Script *out = c->serializer->embed (*this); - if (unlikely (!out)) return_trace (false); - out->defaultLangSys.serialize_subset (c, this+defaultLangSys, out); - unsigned int count = langSys.len; - for (unsigned int i = 0; i < count; i++) - out->langSys.arrayZ[i].offset.serialize_subset (c, this+langSys[i].offset, out); - return_trace (true); + if (!l->visitScript ()) return_trace (false); + + auto *out = c->serializer->start_embed (*this); + if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false); + + bool defaultLang = false; + if (has_default_lang_sys ()) + { + c->serializer->push (); + const LangSys& ls = this+defaultLangSys; + bool ret = ls.subset (c, l); + if (!ret && tag && *tag != HB_TAG ('D', 'F', 'L', 'T')) + { + c->serializer->pop_discard (); + out->defaultLangSys = 0; + } + else + { + c->serializer->add_link (out->defaultLangSys, c->serializer->pop_pack ()); + defaultLang = true; + } + } + + + langSys.iter () + | hb_filter ([=] (const Record& record) {return l->visitLangSys (); }) + | hb_filter ([&] (const Record& record) + { + const LangSys& d = this+defaultLangSys; + const LangSys& l = this+record.offset; + return !(l == d); + }) + | hb_apply (subset_record_array (l, &(out->langSys), this)) + ; + + return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB); } bool sanitize (hb_sanitize_context_t *c, @@ -381,6 +717,12 @@ struct FeatureParamsSize return_trace (true); } + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + return_trace ((bool) c->serializer->embed (*this)); + } + HBUINT16 designSize; /* Represents the design size in 720/inch * units (decipoints). The design size entry * must be non-zero. When there is a design @@ -431,6 +773,12 @@ struct FeatureParamsStylisticSet return_trace (c->check_struct (this)); } + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + return_trace ((bool) c->serializer->embed (*this)); + } + HBUINT16 version; /* (set to 0): This corresponds to a “minor” * version number. Additional data may be * added to the end of this Feature Parameters @@ -457,6 +805,27 @@ struct FeatureParamsStylisticSet /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */ struct FeatureParamsCharacterVariants { + unsigned + get_characters (unsigned start_offset, unsigned *char_count, hb_codepoint_t *chars) const + { + if (char_count) + { + + characters.sub_array (start_offset, char_count) + | hb_sink (hb_array (chars, *char_count)) + ; + } + return characters.len; + } + + unsigned get_size () const + { return min_size + characters.len * HBUINT24::static_size; } + + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + return_trace ((bool) c->serializer->embed (*this)); + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -500,6 +869,9 @@ struct FeatureParams { bool sanitize (hb_sanitize_context_t *c, hb_tag_t tag) const { +#ifdef HB_NO_LAYOUT_FEATURE_PARAMS + return true; +#endif TRACE_SANITIZE (this); if (tag == HB_TAG ('s','i','z','e')) return_trace (u.size.sanitize (c)); @@ -510,26 +882,39 @@ struct FeatureParams return_trace (true); } + bool subset (hb_subset_context_t *c, const Tag* tag) const + { + TRACE_SUBSET (this); + if (!tag) return_trace (false); + if (*tag == HB_TAG ('s','i','z','e')) + return_trace (u.size.subset (c)); + if ((*tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */ + return_trace (u.stylisticSet.subset (c)); + if ((*tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */ + return_trace (u.characterVariants.subset (c)); + return_trace (false); + } + +#ifndef HB_NO_LAYOUT_FEATURE_PARAMS const FeatureParamsSize& get_size_params (hb_tag_t tag) const { if (tag == HB_TAG ('s','i','z','e')) return u.size; return Null (FeatureParamsSize); } - const FeatureParamsStylisticSet& get_stylistic_set_params (hb_tag_t tag) const { if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */ return u.stylisticSet; return Null (FeatureParamsStylisticSet); } - const FeatureParamsCharacterVariants& get_character_variants_params (hb_tag_t tag) const { if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */ return u.characterVariants; return Null (FeatureParamsCharacterVariants); } +#endif private: union { @@ -538,7 +923,7 @@ struct FeatureParams FeatureParamsCharacterVariants characterVariants; } u; public: - DEFINE_SIZE_STATIC (17); + DEFINE_SIZE_MIN (0); }; struct Feature @@ -557,13 +942,28 @@ struct Feature const FeatureParams &get_feature_params () const { return this+featureParams; } - bool subset (hb_subset_context_t *c) const + bool intersects_lookup_indexes (const hb_map_t *lookup_indexes) const + { return lookupIndex.intersects (lookup_indexes); } + + bool subset (hb_subset_context_t *c, + hb_subset_layout_context_t *l, + const Tag *tag = nullptr) const { TRACE_SUBSET (this); - struct Feature *out = c->serializer->embed (*this); - if (unlikely (!out)) return_trace (false); - out->featureParams.set (0); /* TODO(subset) FeatureParams. */ - return_trace (true); + auto *out = c->serializer->start_embed (*this); + if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false); + + bool subset_featureParams = out->featureParams.serialize_subset (c, featureParams, this, tag); + + auto it = + + hb_iter (lookupIndex) + | hb_filter (l->lookup_index_map) + | hb_map (l->lookup_index_map) + ; + + out->lookupIndex.serialize (c->serializer, l, it); + return_trace (bool (it) || subset_featureParams + || (tag && *tag == HB_TAG ('p', 'r', 'e', 'f'))); } bool sanitize (hb_sanitize_context_t *c, @@ -584,25 +984,25 @@ struct Feature * Adobe tools, only the 'size' feature had FeatureParams defined. */ - OffsetTo orig_offset = featureParams; + if (likely (featureParams.is_null ())) + return_trace (true); + + unsigned int orig_offset = featureParams; if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))) return_trace (false); - if (likely (orig_offset.is_null ())) - return_trace (true); - if (featureParams == 0 && closure && closure->tag == HB_TAG ('s','i','z','e') && closure->list_base && closure->list_base < this) { - unsigned int new_offset_int = (unsigned int) orig_offset - + unsigned int new_offset_int = orig_offset - (((char *) this) - ((char *) closure->list_base)); OffsetTo new_offset; - /* Check that it did not overflow. */ - new_offset.set (new_offset_int); + /* Check that it would not overflow. */ + new_offset = new_offset_int; if (new_offset == new_offset_int && - c->try_set (&featureParams, new_offset) && + c->try_set (&featureParams, new_offset_int) && !featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)) return_trace (false); } @@ -648,16 +1048,19 @@ struct Lookup { unsigned int get_subtable_count () const { return subTable.len; } - template - const TSubTable& get_subtable (unsigned int i) const - { return this+CastR > (subTable)[i]; } - template const OffsetArrayOf& get_subtables () const - { return CastR > (subTable); } + { return reinterpret_cast &> (subTable); } template OffsetArrayOf& get_subtables () - { return CastR > (subTable); } + { return reinterpret_cast &> (subTable); } + + template + const TSubTable& get_subtable (unsigned int i) const + { return this+get_subtables ()[i]; } + template + TSubTable& get_subtable (unsigned int i) + { return this+get_subtables ()[i]; } unsigned int get_size () const { @@ -683,14 +1086,14 @@ struct Lookup return flag; } - template - typename context_t::return_t dispatch (context_t *c) const + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { unsigned int lookup_type = get_type (); TRACE_DISPATCH (this, lookup_type); unsigned int count = get_subtable_count (); for (unsigned int i = 0; i < count; i++) { - typename context_t::return_t r = get_subtable (i).dispatch (c, lookup_type); + typename context_t::return_t r = get_subtable (i).dispatch (c, lookup_type, hb_forward (ds)...); if (c->stop_sublookup_iteration (r)) return_trace (r); } @@ -704,95 +1107,73 @@ struct Lookup { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); - lookupType.set (lookup_type); - lookupFlag.set (lookup_props & 0xFFFFu); + lookupType = lookup_type; + lookupFlag = lookup_props & 0xFFFFu; if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false); if (lookupFlag & LookupFlag::UseMarkFilteringSet) { if (unlikely (!c->extend (*this))) return_trace (false); HBUINT16 &markFilteringSet = StructAfter (subTable); - markFilteringSet.set (lookup_props >> 16); + markFilteringSet = lookup_props >> 16; } return_trace (true); } - /* Older compilers need this to NOT be locally defined in a function. */ - template - struct SubTableSubsetWrapper - { - SubTableSubsetWrapper (const TSubTable &subtable_, - unsigned int lookup_type_) : - subtable (subtable_), - lookup_type (lookup_type_) {} - - bool subset (hb_subset_context_t *c) const - { return subtable.dispatch (c, lookup_type); } - - private: - const TSubTable &subtable; - unsigned int lookup_type; - }; - template bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - struct Lookup *out = c->serializer->embed (*this); - if (unlikely (!out)) return_trace (false); - - /* Subset the actual subtables. */ - /* TODO Drop empty ones, either by calling intersects() beforehand, - * or just dropping null offsets after. */ - const OffsetArrayOf& subtables = get_subtables (); - OffsetArrayOf& out_subtables = out->get_subtables (); - unsigned int count = subTable.len; - for (unsigned int i = 0; i < count; i++) - { - SubTableSubsetWrapper wrapper (this+subtables[i], get_type ()); + auto *out = c->serializer->start_embed (*this); + if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false); + out->lookupType = lookupType; + out->lookupFlag = lookupFlag; - out_subtables[i].serialize_subset (c, wrapper, out); - } + const hb_set_t *glyphset = c->plan->glyphset (); + unsigned int lookup_type = get_type (); + + hb_iter (get_subtables ()) + | hb_filter ([this, glyphset, lookup_type] (const OffsetTo &_) { return (this+_).intersects (glyphset, lookup_type); }) + | hb_apply (subset_offset_array (c, out->get_subtables (), this, lookup_type)) + ; return_trace (true); } - /* Older compilers need this to NOT be locally defined in a function. */ - template - struct SubTableSanitizeWrapper : TSubTable - { - bool sanitize (hb_sanitize_context_t *c, unsigned int lookup_type) const - { return this->dispatch (c, lookup_type); } - }; - template bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false); + + unsigned subtables = get_subtable_count (); + if (unlikely (!c->visit_subtables (subtables))) return_trace (false); + if (lookupFlag & LookupFlag::UseMarkFilteringSet) { const HBUINT16 &markFilteringSet = StructAfter (subTable); if (!markFilteringSet.sanitize (c)) return_trace (false); } - if (unlikely (!CastR > > (subTable) - .sanitize (c, this, get_type ()))) + if (unlikely (!get_subtables ().sanitize (c, this, get_type ()))) return_trace (false); - if (unlikely (get_type () == TSubTable::Extension)) + if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ())) { /* The spec says all subtables of an Extension lookup should * have the same type, which shall not be the Extension type * itself (but we already checked for that). - * This is specially important if one has a reverse type! */ + * This is specially important if one has a reverse type! + * + * We only do this if sanitizer edit_count is zero. Otherwise, + * some of the subtables might have become insane after they + * were sanity-checked by the edits of subsequent subtables. + * https://bugs.chromium.org/p/chromium/issues/detail?id=960331 + */ unsigned int type = get_subtable (0).u.extension.get_type (); - unsigned int count = get_subtable_count (); - for (unsigned int i = 1; i < count; i++) + for (unsigned int i = 1; i < subtables; i++) if (get_subtable (i).u.extension.get_type () != type) return_trace (false); } return_trace (true); - return_trace (true); } private: @@ -800,7 +1181,7 @@ struct Lookup HBUINT16 lookupFlag; /* Lookup qualifiers */ ArrayOf subTable; /* Array of SubTables */ -/*HBUINT16 markFilteringSetX[VAR];*//* Index (base 0) into GDEF mark glyph sets +/*HBUINT16 markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets * structure. This field is only present if bit * UseMarkFilteringSet of lookup flags is set. */ public: @@ -809,6 +1190,32 @@ struct Lookup typedef OffsetListOf LookupList; +template +struct LookupOffsetList : OffsetListOf +{ + bool subset (hb_subset_context_t *c, + hb_subset_layout_context_t *l) const + { + TRACE_SUBSET (this); + auto *out = c->serializer->start_embed (this); + if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false); + + unsigned count = this->len; + + hb_zip (*this, hb_range (count)) + | hb_filter (l->lookup_index_map, hb_second) + | hb_map (hb_first) + | hb_apply (subset_offset_array (c, *out, this)) + ; + return_trace (true); + } + + bool sanitize (hb_sanitize_context_t *c) const + { + TRACE_SANITIZE (this); + return_trace (OffsetListOf::sanitize (c, this)); + } +}; + /* * Coverage Table @@ -826,8 +1233,9 @@ struct CoverageFormat1 return i; } - bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs) + template + bool serialize (hb_serialize_context_t *c, Iterator glyphs) { TRACE_SERIALIZE (this); return_trace (glyphArray.serialize (c, glyphs)); @@ -852,20 +1260,20 @@ struct CoverageFormat1 { return glyphs->has (glyphArray[index]); } template - bool add_coverage (set_t *glyphs) const - { - return glyphs->add_sorted_array (glyphArray.arrayZ, glyphArray.len); - } + bool collect_coverage (set_t *glyphs) const + { return glyphs->add_sorted_array (glyphArray.arrayZ, glyphArray.len); } public: /* Older compilers need this to be public. */ - struct Iter { + struct iter_t + { void init (const struct CoverageFormat1 &c_) { c = &c_; i = 0; } void fini () {} - bool more () { return i < c->glyphArray.len; } + bool more () const { return i < c->glyphArray.len; } void next () { i++; } - hb_codepoint_t get_glyph () { return c->glyphArray[i]; } - unsigned int get_coverage () { return i; } + hb_codepoint_t get_glyph () const { return c->glyphArray[i]; } + bool operator != (const iter_t& o) const + { return i != o.i || c != o.c; } private: const struct CoverageFormat1 *c; @@ -875,7 +1283,7 @@ struct CoverageFormat1 protected: HBUINT16 coverageFormat; /* Format identifier--format = 1 */ - SortedArrayOf + SortedArrayOf glyphArray; /* Array of GlyphIDs--in numerical order */ public: DEFINE_SIZE_ARRAY (4, glyphArray); @@ -889,43 +1297,53 @@ struct CoverageFormat2 unsigned int get_coverage (hb_codepoint_t glyph_id) const { const RangeRecord &range = rangeRecord.bsearch (glyph_id); - return likely (range.start <= range.end) ? - (unsigned int) range.value + (glyph_id - range.start) : - NOT_COVERED; + return likely (range.first <= range.last) + ? (unsigned int) range.value + (glyph_id - range.first) + : NOT_COVERED; } - bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs) + template + bool serialize (hb_serialize_context_t *c, Iterator glyphs) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); - if (unlikely (!glyphs.length)) + if (unlikely (!glyphs)) { - rangeRecord.len.set (0); + rangeRecord.len = 0; return_trace (true); } - unsigned int num_ranges = 1; - for (unsigned int i = 1; i < glyphs.length; i++) - if (glyphs[i - 1] + 1 != glyphs[i]) + /* TODO(iter) Write more efficiently? */ + + unsigned num_ranges = 0; + hb_codepoint_t last = (hb_codepoint_t) -2; + for (auto g: glyphs) + { + if (last + 1 != g) num_ranges++; - rangeRecord.len.set (num_ranges); - if (unlikely (!c->extend (rangeRecord))) return_trace (false); + last = g; + } + + if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false); - unsigned int range = 0; - rangeRecord[range].start = glyphs[0]; - rangeRecord[range].value.set (0); - for (unsigned int i = 1; i < glyphs.length; i++) + unsigned count = 0; + unsigned range = (unsigned) -1; + last = (hb_codepoint_t) -2; + for (auto g: glyphs) { - if (glyphs[i - 1] + 1 != glyphs[i]) + if (last + 1 != g) { range++; - rangeRecord[range].start = glyphs[i]; - rangeRecord[range].value.set (i); + rangeRecord[range].first = g; + rangeRecord[range].value = count; } - rangeRecord[range].end = glyphs[i]; + rangeRecord[range].last = g; + last = g; + count++; } + return_trace (true); } @@ -951,7 +1369,7 @@ struct CoverageFormat2 for (i = 0; i < count; i++) { const RangeRecord &range = rangeRecord[i]; if (range.value <= index && - index < (unsigned int) range.value + (range.end - range.start) && + index < (unsigned int) range.value + (range.last - range.first) && range.intersects (glyphs)) return true; else if (index < range.value) @@ -961,57 +1379,61 @@ struct CoverageFormat2 } template - bool add_coverage (set_t *glyphs) const + bool collect_coverage (set_t *glyphs) const { unsigned int count = rangeRecord.len; for (unsigned int i = 0; i < count; i++) - if (unlikely (!rangeRecord[i].add_coverage (glyphs))) + if (unlikely (!rangeRecord[i].collect_coverage (glyphs))) return false; return true; } public: /* Older compilers need this to be public. */ - struct Iter + struct iter_t { void init (const CoverageFormat2 &c_) { c = &c_; coverage = 0; i = 0; - j = c->rangeRecord.len ? c->rangeRecord[0].start : 0; - if (unlikely (c->rangeRecord[0].start > c->rangeRecord[0].end)) + j = c->rangeRecord.len ? c->rangeRecord[0].first : 0; + if (unlikely (c->rangeRecord[0].first > c->rangeRecord[0].last)) { /* Broken table. Skip. */ i = c->rangeRecord.len; } } void fini () {} - bool more () { return i < c->rangeRecord.len; } + bool more () const { return i < c->rangeRecord.len; } void next () { - if (j >= c->rangeRecord[i].end) + if (j >= c->rangeRecord[i].last) { i++; if (more ()) { - hb_codepoint_t old = j; - j = c->rangeRecord[i].start; - if (unlikely (j <= old)) + unsigned int old = coverage; + j = c->rangeRecord[i].first; + coverage = c->rangeRecord[i].value; + if (unlikely (coverage != old + 1)) { - /* Broken table. Skip. Important to avoid DoS. */ + /* Broken table. Skip. Important to avoid DoS. + * Also, our callers depend on coverage being + * consecutive and monotonically increasing, + * ie. iota(). */ i = c->rangeRecord.len; return; } - coverage = c->rangeRecord[i].value; } return; } coverage++; j++; } - hb_codepoint_t get_glyph () { return j; } - unsigned int get_coverage () { return coverage; } + hb_codepoint_t get_glyph () const { return j; } + bool operator != (const iter_t& o) const + { return i != o.i || j != o.j || c != o.c; } private: const struct CoverageFormat2 *c; @@ -1032,6 +1454,15 @@ struct CoverageFormat2 struct Coverage { + /* Has interface. */ + static constexpr unsigned SENTINEL = NOT_COVERED; + typedef unsigned int value_t; + value_t operator [] (hb_codepoint_t k) const { return get (k); } + bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; } + /* Predicate. */ + bool operator () (hb_codepoint_t k) const { return has (k); } + + unsigned int get (hb_codepoint_t k) const { return get_coverage (k); } unsigned int get_coverage (hb_codepoint_t glyph_id) const { switch (u.format) { @@ -1041,17 +1472,24 @@ struct Coverage } } - bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs) + template + bool serialize (hb_serialize_context_t *c, Iterator glyphs) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); - unsigned int num_ranges = 1; - for (unsigned int i = 1; i < glyphs.length; i++) - if (glyphs[i - 1] + 1 != glyphs[i]) + unsigned count = 0; + unsigned num_ranges = 0; + hb_codepoint_t last = (hb_codepoint_t) -2; + for (auto g: glyphs) + { + if (last + 1 != g) num_ranges++; - u.format.set (glyphs.length * 2 < num_ranges * 3 ? 1 : 2); + last = g; + count++; + } + u.format = count <= num_ranges * 3 ? 1 : 2; switch (u.format) { @@ -1061,6 +1499,23 @@ struct Coverage } } + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + auto it = + + iter () + | hb_filter (glyphset) + | hb_map_retains_sorting (glyph_map) + ; + + bool ret = bool (it); + Coverage_serialize (c->serializer, it); + return_trace (ret); + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -1095,19 +1550,20 @@ struct Coverage /* Might return false if array looks unsorted. * Used for faster rejection of corrupt data. */ template - bool add_coverage (set_t *glyphs) const + bool collect_coverage (set_t *glyphs) const { switch (u.format) { - case 1: return u.format1.add_coverage (glyphs); - case 2: return u.format2.add_coverage (glyphs); + case 1: return u.format1.collect_coverage (glyphs); + case 2: return u.format2.collect_coverage (glyphs); default:return false; } } - struct Iter + struct iter_t : hb_iter_with_fallback_t { - Iter (const Coverage &c_) + static constexpr bool is_sorted_iterator = true; + iter_t (const Coverage &c_ = Null (Coverage)) { memset (this, 0, sizeof (*this)); format = c_.u.format; @@ -1118,7 +1574,7 @@ struct Coverage default: return; } } - bool more () + bool __more__ () const { switch (format) { @@ -1127,7 +1583,7 @@ struct Coverage default:return false; } } - void next () + void __next__ () { switch (format) { @@ -1136,7 +1592,10 @@ struct Coverage default: break; } } - hb_codepoint_t get_glyph () + typedef hb_codepoint_t __item_t__; + __item_t__ __item__ () const { return get_glyph (); } + + hb_codepoint_t get_glyph () const { switch (format) { @@ -1145,23 +1604,25 @@ struct Coverage default:return 0; } } - unsigned int get_coverage () + bool operator != (const iter_t& o) const { + if (format != o.format) return true; switch (format) { - case 1: return u.format1.get_coverage (); - case 2: return u.format2.get_coverage (); - default:return -1; + case 1: return u.format1 != o.u.format1; + case 2: return u.format2 != o.u.format2; + default:return false; } } private: unsigned int format; union { - CoverageFormat2::Iter format2; /* Put this one first since it's larger; helps shut up compiler. */ - CoverageFormat1::Iter format1; + CoverageFormat2::iter_t format2; /* Put this one first since it's larger; helps shut up compiler. */ + CoverageFormat1::iter_t format1; } u; }; + iter_t iter () const { return iter_t (*this); } protected: union { @@ -1173,15 +1634,56 @@ struct Coverage DEFINE_SIZE_UNION (2, format); }; +template +static inline void +Coverage_serialize (hb_serialize_context_t *c, + Iterator it) +{ c->start_embed ()->serialize (c, it); } + +static void ClassDef_remap_and_serialize (hb_serialize_context_t *c, + const hb_set_t &glyphset, + const hb_map_t &gid_klass_map, + hb_sorted_vector_t &glyphs, + const hb_set_t &klasses, + hb_map_t *klass_map /*INOUT*/) +{ + if (!klass_map) + { + ClassDef_serialize (c, hb_zip (glyphs.iter (), + glyphs.iter () + | hb_map (gid_klass_map))); + return; + } + + /* any glyph not assigned a class value falls into Class zero (0), + * if any glyph assigned to class 0, remapping must start with 0->0*/ + if (glyphset.get_population () > gid_klass_map.get_population ()) + klass_map->set (0, 0); + + unsigned idx = klass_map->has (0) ? 1 : 0; + for (const unsigned k: klasses.iter ()) + { + if (klass_map->has (k)) continue; + klass_map->set (k, idx); + idx++; + } + + auto it = + + glyphs.iter () + | hb_map_retains_sorting ([&] (const HBGlyphID& gid) -> hb_pair_t + { + unsigned new_klass = klass_map->get (gid_klass_map[gid]); + return hb_pair ((hb_codepoint_t)gid, new_klass); + }) + ; + + c->propagate_error (glyphs, klasses); + ClassDef_serialize (c, it); +} /* * Class Definition Table */ -static inline void ClassDef_serialize (hb_serialize_context_t *c, - hb_array_t glyphs, - hb_array_t klasses); - struct ClassDefFormat1 { friend struct ClassDef; @@ -1192,54 +1694,64 @@ struct ClassDefFormat1 return classValue[(unsigned int) (glyph_id - startGlyph)]; } + template bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs, - hb_array_t klasses) + Iterator it) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); - if (unlikely (!glyphs.length)) + if (unlikely (!it)) { - startGlyph.set (0); - classValue.len.set (0); + startGlyph = 0; + classValue.len = 0; return_trace (true); } - hb_codepoint_t glyph_min = glyphs[0]; - hb_codepoint_t glyph_max = glyphs[glyphs.length - 1]; - - startGlyph.set (glyph_min); - classValue.len.set (glyph_max - glyph_min + 1); - if (unlikely (!c->extend (classValue))) return_trace (false); - - for (unsigned int i = 0; i < glyphs.length; i++) - classValue[glyphs[i] - glyph_min] = klasses[i]; + hb_codepoint_t glyph_min = (*it).first; + hb_codepoint_t glyph_max = + it + | hb_map (hb_first) + | hb_reduce (hb_max, 0u); + unsigned glyph_count = glyph_max - glyph_min + 1; + startGlyph = glyph_min; + if (unlikely (!classValue.serialize (c, glyph_count))) return_trace (false); + for (const hb_pair_t gid_klass_pair : + it) + { + unsigned idx = gid_klass_pair.first - glyph_min; + classValue[idx] = gid_klass_pair.second; + } return_trace (true); } - bool subset (hb_subset_context_t *c) const + bool subset (hb_subset_context_t *c, + hb_map_t *klass_map = nullptr /*OUT*/) const { TRACE_SUBSET (this); - const hb_set_t &glyphset = *c->plan->glyphset; + const hb_set_t &glyphset = *c->plan->_glyphset_gsub; const hb_map_t &glyph_map = *c->plan->glyph_map; - hb_vector_t glyphs; - hb_vector_t klasses; + + hb_sorted_vector_t glyphs; + hb_set_t orig_klasses; + hb_map_t gid_org_klass_map; hb_codepoint_t start = startGlyph; hb_codepoint_t end = start + classValue.len; - for (hb_codepoint_t g = start; g < end; g++) + for (const hb_codepoint_t gid : + hb_range (start, end) + | hb_filter (glyphset)) { - unsigned int value = classValue[g - start]; - if (!value) continue; - if (!glyphset.has (g)) continue; - glyphs.push()->set (glyph_map[g]); - klasses.push()->set (value); + unsigned klass = classValue[gid - start]; + if (!klass) continue; + + glyphs.push (glyph_map[gid]); + gid_org_klass_map.set (glyph_map[gid], klass); + orig_klasses.add (klass); } - c->serializer->propagate_error (glyphs, klasses); - ClassDef_serialize (c->serializer, glyphs, klasses); - return_trace (glyphs.length); + + ClassDef_remap_and_serialize (c->serializer, glyphset, gid_org_klass_map, + glyphs, orig_klasses, klass_map); + return_trace ((bool) glyphs); } bool sanitize (hb_sanitize_context_t *c) const @@ -1249,7 +1761,7 @@ struct ClassDefFormat1 } template - bool add_coverage (set_t *glyphs) const + bool collect_coverage (set_t *glyphs) const { unsigned int start = 0; unsigned int count = classValue.len; @@ -1272,7 +1784,7 @@ struct ClassDefFormat1 } template - bool add_class (set_t *glyphs, unsigned int klass) const + bool collect_class (set_t *glyphs, unsigned int klass) const { unsigned int count = classValue.len; for (unsigned int i = 0; i < count; i++) @@ -1311,7 +1823,7 @@ struct ClassDefFormat1 protected: HBUINT16 classFormat; /* Format identifier--format = 1 */ - GlyphID startGlyph; /* First GlyphID of the classValueArray */ + HBGlyphID startGlyph; /* First GlyphID of the classValueArray */ ArrayOf classValue; /* Array of Class Values--one per GlyphID */ public: @@ -1328,69 +1840,90 @@ struct ClassDefFormat2 return rangeRecord.bsearch (glyph_id).value; } + template bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs, - hb_array_t klasses) + Iterator it) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); - if (unlikely (!glyphs.length)) + if (unlikely (!it)) { - rangeRecord.len.set (0); + rangeRecord.len = 0; return_trace (true); } - unsigned int num_ranges = 1; - for (unsigned int i = 1; i < glyphs.length; i++) - if (glyphs[i - 1] + 1 != glyphs[i] || - klasses[i - 1] != klasses[i]) - num_ranges++; - rangeRecord.len.set (num_ranges); - if (unlikely (!c->extend (rangeRecord))) return_trace (false); + unsigned num_ranges = 1; + hb_codepoint_t prev_gid = (*it).first; + unsigned prev_klass = (*it).second; + + RangeRecord range_rec; + range_rec.first = prev_gid; + range_rec.last = prev_gid; + range_rec.value = prev_klass; - unsigned int range = 0; - rangeRecord[range].start = glyphs[0]; - rangeRecord[range].value.set (klasses[0]); - for (unsigned int i = 1; i < glyphs.length; i++) + RangeRecord *record = c->copy (range_rec); + if (unlikely (!record)) return_trace (false); + + for (const auto gid_klass_pair : + (++it)) { - if (glyphs[i - 1] + 1 != glyphs[i] || - klasses[i - 1] != klasses[i]) + hb_codepoint_t cur_gid = gid_klass_pair.first; + unsigned cur_klass = gid_klass_pair.second; + + if (cur_gid != prev_gid + 1 || + cur_klass != prev_klass) { - range++; - rangeRecord[range].start = glyphs[i]; - rangeRecord[range].value = klasses[i]; + if (unlikely (!record)) break; + record->last = prev_gid; + num_ranges++; + + range_rec.first = cur_gid; + range_rec.last = cur_gid; + range_rec.value = cur_klass; + + record = c->copy (range_rec); } - rangeRecord[range].end = glyphs[i]; + + prev_klass = cur_klass; + prev_gid = cur_gid; } + + if (likely (record)) record->last = prev_gid; + rangeRecord.len = num_ranges; return_trace (true); } - bool subset (hb_subset_context_t *c) const + bool subset (hb_subset_context_t *c, + hb_map_t *klass_map = nullptr /*OUT*/) const { TRACE_SUBSET (this); - const hb_set_t &glyphset = *c->plan->glyphset; + const hb_set_t &glyphset = *c->plan->_glyphset_gsub; const hb_map_t &glyph_map = *c->plan->glyph_map; - hb_vector_t glyphs; - hb_vector_t klasses; - unsigned int count = rangeRecord.len; - for (unsigned int i = 0; i < count; i++) + hb_sorted_vector_t glyphs; + hb_set_t orig_klasses; + hb_map_t gid_org_klass_map; + + unsigned count = rangeRecord.len; + for (unsigned i = 0; i < count; i++) { - unsigned int value = rangeRecord[i].value; - if (!value) continue; - hb_codepoint_t start = rangeRecord[i].start; - hb_codepoint_t end = rangeRecord[i].end + 1; + unsigned klass = rangeRecord[i].value; + if (!klass) continue; + hb_codepoint_t start = rangeRecord[i].first; + hb_codepoint_t end = rangeRecord[i].last + 1; for (hb_codepoint_t g = start; g < end; g++) { if (!glyphset.has (g)) continue; - glyphs.push ()->set (glyph_map[g]); - klasses.push ()->set (value); + glyphs.push (glyph_map[g]); + gid_org_klass_map.set (glyph_map[g], klass); + orig_klasses.add (klass); } } - c->serializer->propagate_error (glyphs, klasses); - ClassDef_serialize (c->serializer, glyphs, klasses); - return_trace (glyphs.length); + + ClassDef_remap_and_serialize (c->serializer, glyphset, gid_org_klass_map, + glyphs, orig_klasses, klass_map); + return_trace ((bool) glyphs); } bool sanitize (hb_sanitize_context_t *c) const @@ -1400,24 +1933,24 @@ struct ClassDefFormat2 } template - bool add_coverage (set_t *glyphs) const + bool collect_coverage (set_t *glyphs) const { unsigned int count = rangeRecord.len; for (unsigned int i = 0; i < count; i++) if (rangeRecord[i].value) - if (unlikely (!rangeRecord[i].add_coverage (glyphs))) + if (unlikely (!rangeRecord[i].collect_coverage (glyphs))) return false; return true; } template - bool add_class (set_t *glyphs, unsigned int klass) const + bool collect_class (set_t *glyphs, unsigned int klass) const { unsigned int count = rangeRecord.len; for (unsigned int i = 0; i < count; i++) { if (rangeRecord[i].value == klass) - if (unlikely (!rangeRecord[i].add_coverage (glyphs))) + if (unlikely (!rangeRecord[i].collect_coverage (glyphs))) return false; } return true; @@ -1443,9 +1976,9 @@ struct ClassDefFormat2 { if (!hb_set_next (glyphs, &g)) break; - if (g < rangeRecord[i].start) + if (g < rangeRecord[i].first) return true; - g = rangeRecord[i].end; + g = rangeRecord[i].last; } if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g)) return true; @@ -1468,6 +2001,15 @@ struct ClassDefFormat2 struct ClassDef { + /* Has interface. */ + static constexpr unsigned SENTINEL = 0; + typedef unsigned int value_t; + value_t operator [] (hb_codepoint_t k) const { return get (k); } + bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; } + /* Projection. */ + hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); } + + unsigned int get (hb_codepoint_t k) const { return get_class (k); } unsigned int get_class (hb_codepoint_t glyph_id) const { switch (u.format) { @@ -1477,44 +2019,58 @@ struct ClassDef } } - bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs, - hb_array_t klasses) + template + bool serialize (hb_serialize_context_t *c, Iterator it) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); - unsigned int format = 2; - if (glyphs.length) + unsigned format = 2; + if (likely (it)) { - hb_codepoint_t glyph_min = glyphs[0]; - hb_codepoint_t glyph_max = glyphs[glyphs.length - 1]; + hb_codepoint_t glyph_min = (*it).first; + hb_codepoint_t glyph_max = + it + | hb_map (hb_first) + | hb_reduce (hb_max, 0u); + + unsigned num_ranges = 1; + hb_codepoint_t prev_gid = glyph_min; + unsigned prev_klass = (*it).second; - unsigned int num_ranges = 1; - for (unsigned int i = 1; i < glyphs.length; i++) - if (glyphs[i - 1] + 1 != glyphs[i] || - klasses[i - 1] != klasses[i]) + for (const auto gid_klass_pair : it) + { + hb_codepoint_t cur_gid = gid_klass_pair.first; + unsigned cur_klass = gid_klass_pair.second; + if (cur_gid == glyph_min || !cur_klass) continue; + if (cur_gid != prev_gid + 1 || + cur_klass != prev_klass) num_ranges++; - if (1 + (glyph_max - glyph_min + 1) < num_ranges * 3) + prev_gid = cur_gid; + prev_klass = cur_klass; + } + + if (1 + (glyph_max - glyph_min + 1) <= num_ranges * 3) format = 1; } - u.format.set (format); + u.format = format; switch (u.format) { - case 1: return_trace (u.format1.serialize (c, glyphs, klasses)); - case 2: return_trace (u.format2.serialize (c, glyphs, klasses)); + case 1: return_trace (u.format1.serialize (c, it)); + case 2: return_trace (u.format2.serialize (c, it)); default:return_trace (false); } } - bool subset (hb_subset_context_t *c) const + bool subset (hb_subset_context_t *c, + hb_map_t *klass_map = nullptr /*OUT*/) const { TRACE_SUBSET (this); switch (u.format) { - case 1: return_trace (u.format1.subset (c)); - case 2: return_trace (u.format2.subset (c)); + case 1: return_trace (u.format1.subset (c, klass_map)); + case 2: return_trace (u.format2.subset (c, klass_map)); default:return_trace (false); } } @@ -1533,11 +2089,11 @@ struct ClassDef /* Might return false if array looks unsorted. * Used for faster rejection of corrupt data. */ template - bool add_coverage (set_t *glyphs) const + bool collect_coverage (set_t *glyphs) const { switch (u.format) { - case 1: return u.format1.add_coverage (glyphs); - case 2: return u.format2.add_coverage (glyphs); + case 1: return u.format1.collect_coverage (glyphs); + case 2: return u.format2.collect_coverage (glyphs); default:return false; } } @@ -1545,11 +2101,11 @@ struct ClassDef /* Might return false if array looks unsorted. * Used for faster rejection of corrupt data. */ template - bool add_class (set_t *glyphs, unsigned int klass) const + bool collect_class (set_t *glyphs, unsigned int klass) const { switch (u.format) { - case 1: return u.format1.add_class (glyphs, klass); - case 2: return u.format2.add_class (glyphs, klass); + case 1: return u.format1.collect_class (glyphs, klass); + case 2: return u.format2.collect_class (glyphs, klass); default:return false; } } @@ -1581,10 +2137,10 @@ struct ClassDef DEFINE_SIZE_UNION (2, format); }; +template static inline void ClassDef_serialize (hb_serialize_context_t *c, - hb_array_t glyphs, - hb_array_t klasses) -{ c->start_embed ()->serialize (c, glyphs, klasses); } + Iterator it) +{ c->start_embed ()->serialize (c, it); } /* @@ -1635,7 +2191,7 @@ struct VarRegionAxis struct VarRegionList { float evaluate (unsigned int region_index, - const int *coords, unsigned int coord_len) const + const int *coords, unsigned int coord_len) const { if (unlikely (region_index >= regionCount)) return 0.; @@ -1662,6 +2218,26 @@ struct VarRegionList axesZ.sanitize (c, (unsigned int) axisCount * (unsigned int) regionCount)); } + bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_bimap_t ®ion_map) + { + TRACE_SERIALIZE (this); + VarRegionList *out = c->allocate_min (); + if (unlikely (!out)) return_trace (false); + axisCount = src->axisCount; + regionCount = region_map.get_population (); + if (unlikely (!c->allocate_size (get_size () - min_size))) return_trace (false); + unsigned int region_count = src->get_region_count (); + for (unsigned int r = 0; r < regionCount; r++) + { + unsigned int backward = region_map.backward (r); + if (backward >= region_count) return_trace (false); + memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount); + } + + return_trace (true); + } + + unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; } unsigned int get_region_count () const { return regionCount; } protected: @@ -1685,8 +2261,8 @@ struct VarData { return itemCount * get_row_size (); } float get_delta (unsigned int inner, - const int *coords, unsigned int coord_count, - const VarRegionList ®ions) const + const int *coords, unsigned int coord_count, + const VarRegionList ®ions) const { if (unlikely (inner >= itemCount)) return 0.; @@ -1694,7 +2270,7 @@ struct VarData unsigned int count = regionIndices.len; unsigned int scount = shortCount; - const HBUINT8 *bytes = &StructAfter (regionIndices); + const HBUINT8 *bytes = get_delta_bytes (); const HBUINT8 *row = bytes + inner * (scount + count); float delta = 0.; @@ -1716,16 +2292,16 @@ struct VarData return delta; } - void get_scalars (int *coords, unsigned int coord_count, + void get_scalars (const int *coords, unsigned int coord_count, const VarRegionList ®ions, float *scalars /*OUT */, unsigned int num_scalars) const { - assert (num_scalars == regionIndices.len); - for (unsigned int i = 0; i < num_scalars; i++) - { - scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count); - } + unsigned count = hb_min (num_scalars, regionIndices.len); + for (unsigned int i = 0; i < count; i++) + scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count); + for (unsigned int i = count; i < num_scalars; i++) + scalars[i] = 0.f; } bool sanitize (hb_sanitize_context_t *c) const @@ -1734,11 +2310,117 @@ struct VarData return_trace (c->check_struct (this) && regionIndices.sanitize (c) && shortCount <= regionIndices.len && - c->check_range (&StructAfter (regionIndices), + c->check_range (get_delta_bytes (), itemCount, get_row_size ())); } + bool serialize (hb_serialize_context_t *c, + const VarData *src, + const hb_inc_bimap_t &inner_map, + const hb_bimap_t ®ion_map) + { + TRACE_SERIALIZE (this); + if (unlikely (!c->extend_min (*this))) return_trace (false); + itemCount = inner_map.get_next_value (); + + /* Optimize short count */ + unsigned short ri_count = src->regionIndices.len; + enum delta_size_t { kZero=0, kByte, kShort }; + hb_vector_t delta_sz; + hb_vector_t ri_map; /* maps old index to new index */ + delta_sz.resize (ri_count); + ri_map.resize (ri_count); + unsigned int new_short_count = 0; + unsigned int r; + for (r = 0; r < ri_count; r++) + { + delta_sz[r] = kZero; + for (unsigned int i = 0; i < inner_map.get_next_value (); i++) + { + unsigned int old = inner_map.backward (i); + int16_t delta = src->get_item_delta (old, r); + if (delta < -128 || 127 < delta) + { + delta_sz[r] = kShort; + new_short_count++; + break; + } + else if (delta != 0) + delta_sz[r] = kByte; + } + } + unsigned int short_index = 0; + unsigned int byte_index = new_short_count; + unsigned int new_ri_count = 0; + for (r = 0; r < ri_count; r++) + if (delta_sz[r]) + { + ri_map[r] = (delta_sz[r] == kShort)? short_index++ : byte_index++; + new_ri_count++; + } + + shortCount = new_short_count; + regionIndices.len = new_ri_count; + + unsigned int size = regionIndices.get_size () - HBUINT16::static_size/*regionIndices.len*/ + (get_row_size () * itemCount); + if (unlikely (!c->allocate_size (size))) + return_trace (false); + + for (r = 0; r < ri_count; r++) + if (delta_sz[r]) regionIndices[ri_map[r]] = region_map[src->regionIndices[r]]; + + for (unsigned int i = 0; i < itemCount; i++) + { + unsigned int old = inner_map.backward (i); + for (unsigned int r = 0; r < ri_count; r++) + if (delta_sz[r]) set_item_delta (i, ri_map[r], src->get_item_delta (old, r)); + } + + return_trace (true); + } + + void collect_region_refs (hb_inc_bimap_t ®ion_map, const hb_inc_bimap_t &inner_map) const + { + for (unsigned int r = 0; r < regionIndices.len; r++) + { + unsigned int region = regionIndices[r]; + if (region_map.has (region)) continue; + for (unsigned int i = 0; i < inner_map.get_next_value (); i++) + if (get_item_delta (inner_map.backward (i), r) != 0) + { + region_map.add (region); + break; + } + } + } + + protected: + const HBUINT8 *get_delta_bytes () const + { return &StructAfter (regionIndices); } + + HBUINT8 *get_delta_bytes () + { return &StructAfter (regionIndices); } + + int16_t get_item_delta (unsigned int item, unsigned int region) const + { + if ( item >= itemCount || unlikely (region >= regionIndices.len)) return 0; + const HBINT8 *p = (const HBINT8 *)get_delta_bytes () + item * get_row_size (); + if (region < shortCount) + return ((const HBINT16 *)p)[region]; + else + return (p + HBINT16::static_size * shortCount)[region - shortCount]; + } + + void set_item_delta (unsigned int item, unsigned int region, int16_t delta) + { + HBINT8 *p = (HBINT8 *)get_delta_bytes () + item * get_row_size (); + if (region < shortCount) + ((HBINT16 *)p)[region] = delta; + else + (p + HBINT16::static_size * shortCount)[region - shortCount] = delta; + } + protected: HBUINT16 itemCount; HBUINT16 shortCount; @@ -1753,8 +2435,12 @@ struct VariationStore float get_delta (unsigned int outer, unsigned int inner, const int *coords, unsigned int coord_count) const { +#ifdef HB_NO_VAR + return 0.f; +#endif + if (unlikely (outer >= dataSets.len)) - return 0.; + return 0.f; return (this+dataSets[outer]).get_delta (inner, coords, coord_count, @@ -1771,6 +2457,10 @@ struct VariationStore bool sanitize (hb_sanitize_context_t *c) const { +#ifdef HB_NO_VAR + return true; +#endif + TRACE_SANITIZE (this); return_trace (c->check_struct (this) && format == 1 && @@ -1778,18 +2468,98 @@ struct VariationStore dataSets.sanitize (c, this)); } + bool serialize (hb_serialize_context_t *c, + const VariationStore *src, + const hb_array_t &inner_maps) + { + TRACE_SERIALIZE (this); + unsigned int set_count = 0; + for (unsigned int i = 0; i < inner_maps.length; i++) + if (inner_maps[i].get_population () > 0) set_count++; + + unsigned int size = min_size + HBUINT32::static_size * set_count; + if (unlikely (!c->allocate_size (size))) return_trace (false); + format = 1; + + hb_inc_bimap_t region_map; + for (unsigned int i = 0; i < inner_maps.length; i++) + (src+src->dataSets[i]).collect_region_refs (region_map, inner_maps[i]); + region_map.sort (); + + if (unlikely (!regions.serialize (c, this) + .serialize (c, &(src+src->regions), region_map))) return_trace (false); + + /* TODO: The following code could be simplified when + * OffsetListOf::subset () can take a custom param to be passed to VarData::serialize () + */ + dataSets.len = set_count; + unsigned int set_index = 0; + for (unsigned int i = 0; i < inner_maps.length; i++) + { + if (inner_maps[i].get_population () == 0) continue; + if (unlikely (!dataSets[set_index++].serialize (c, this) + .serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map))) + return_trace (false); + } + + return_trace (true); + } + + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + + VariationStore *varstore_prime = c->serializer->start_embed (); + if (unlikely (!varstore_prime)) return_trace (false); + + const hb_set_t *variation_indices = c->plan->layout_variation_indices; + if (variation_indices->is_empty ()) return_trace (false); + + hb_vector_t inner_maps; + inner_maps.resize ((unsigned) dataSets.len); + for (unsigned i = 0; i < inner_maps.length; i++) + inner_maps[i].init (); + + for (unsigned idx : c->plan->layout_variation_indices->iter ()) + { + uint16_t major = idx >> 16; + uint16_t minor = idx & 0xFFFF; + + if (major >= inner_maps.length) + { + for (unsigned i = 0; i < inner_maps.length; i++) + inner_maps[i].fini (); + return_trace (false); + } + inner_maps[major].add (minor); + } + varstore_prime->serialize (c->serializer, this, inner_maps.as_array ()); + + for (unsigned i = 0; i < inner_maps.length; i++) + inner_maps[i].fini (); + return_trace (bool (varstore_prime->dataSets)); + } + unsigned int get_region_index_count (unsigned int ivs) const { return (this+dataSets[ivs]).get_region_index_count (); } void get_scalars (unsigned int ivs, - int *coords, unsigned int coord_count, + const int *coords, unsigned int coord_count, float *scalars /*OUT*/, unsigned int num_scalars) const { +#ifdef HB_NO_VAR + for (unsigned i = 0; i < num_scalars; i++) + scalars[i] = 0.f; + return; +#endif + (this+dataSets[ivs]).get_scalars (coords, coord_count, this+regions, &scalars[0], num_scalars); } + unsigned int get_sub_table_count () const { return dataSets.len; } + protected: HBUINT16 format; LOffsetTo regions; @@ -1806,6 +2576,14 @@ struct ConditionFormat1 { friend struct Condition; + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + auto *out = c->serializer->embed (this); + if (unlikely (!out)) return_trace (false); + return_trace (true); + } + private: bool evaluate (const int *coords, unsigned int coord_len) const { @@ -1838,6 +2616,17 @@ struct Condition } } + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const + { + TRACE_DISPATCH (this, u.format); + if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); + switch (u.format) { + case 1: return_trace (c->dispatch (u.format1, hb_forward (ds)...)); + default:return_trace (c->default_return_value ()); + } + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -1868,6 +2657,18 @@ struct ConditionSet return true; } + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + auto *out = c->serializer->start_embed (this); + if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false); + + + conditions.iter () + | hb_apply (subset_offset_array (c, out->conditions, this)) + ; + return_trace (true); + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -1884,6 +2685,30 @@ struct FeatureTableSubstitutionRecord { friend struct FeatureTableSubstitution; + void collect_lookups (const void *base, hb_set_t *lookup_indexes /* OUT */) const + { + return (base+feature).add_lookup_indexes_to (lookup_indexes); + } + + void closure_features (const void *base, + const hb_map_t *lookup_indexes, + hb_set_t *feature_indexes /* OUT */) const + { + if ((base+feature).intersects_lookup_indexes (lookup_indexes)) + feature_indexes->add (featureIndex); + } + + bool subset (hb_subset_layout_context_t *c, const void *base) const + { + TRACE_SUBSET (this); + auto *out = c->subset_context->serializer->embed (this); + if (unlikely (!out)) return_trace (false); + + out->featureIndex = c->feature_index_map->get (featureIndex); + bool ret = out->feature.serialize_subset (c->subset_context, feature, base, c); + return_trace (ret); + } + bool sanitize (hb_sanitize_context_t *c, const void *base) const { TRACE_SANITIZE (this); @@ -1911,6 +2736,39 @@ struct FeatureTableSubstitution return nullptr; } + void collect_lookups (const hb_set_t *feature_indexes, + hb_set_t *lookup_indexes /* OUT */) const + { + + hb_iter (substitutions) + | hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex) + | hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r) + { r.collect_lookups (this, lookup_indexes); }) + ; + } + + void closure_features (const hb_map_t *lookup_indexes, + hb_set_t *feature_indexes /* OUT */) const + { + for (const FeatureTableSubstitutionRecord& record : substitutions) + record.closure_features (this, lookup_indexes, feature_indexes); + } + + bool subset (hb_subset_context_t *c, + hb_subset_layout_context_t *l) const + { + TRACE_SUBSET (this); + auto *out = c->serializer->start_embed (*this); + if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false); + + out->version.major = version.major; + out->version.minor = version.minor; + + + substitutions.iter () + | hb_apply (subset_record_array (l, &(out->substitutions), this)) + ; + return_trace (true); + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -1931,6 +2789,32 @@ struct FeatureVariationRecord { friend struct FeatureVariations; + void collect_lookups (const void *base, + const hb_set_t *feature_indexes, + hb_set_t *lookup_indexes /* OUT */) const + { + return (base+substitutions).collect_lookups (feature_indexes, lookup_indexes); + } + + void closure_features (const void *base, + const hb_map_t *lookup_indexes, + hb_set_t *feature_indexes /* OUT */) const + { + (base+substitutions).closure_features (lookup_indexes, feature_indexes); + } + + bool subset (hb_subset_layout_context_t *c, const void *base) const + { + TRACE_SUBSET (this); + auto *out = c->subset_context->serializer->embed (this); + if (unlikely (!out)) return_trace (false); + + out->conditions.serialize_subset (c->subset_context, conditions, base); + out->substitutions.serialize_subset (c->subset_context, substitutions, base, c); + + return_trace (true); + } + bool sanitize (hb_sanitize_context_t *c, const void *base) const { TRACE_SANITIZE (this); @@ -1952,7 +2836,7 @@ struct FeatureVariations static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFFFFFu; bool find_index (const int *coords, unsigned int coord_len, - unsigned int *index) const + unsigned int *index) const { unsigned int count = varRecords.len; for (unsigned int i = 0; i < count; i++) @@ -1975,10 +2859,40 @@ struct FeatureVariations return (this+record.substitutions).find_substitute (feature_index); } - bool subset (hb_subset_context_t *c) const + FeatureVariations* copy (hb_serialize_context_t *c) const + { + TRACE_SERIALIZE (this); + return_trace (c->embed (*this)); + } + + void collect_lookups (const hb_set_t *feature_indexes, + hb_set_t *lookup_indexes /* OUT */) const + { + for (const FeatureVariationRecord& r : varRecords) + r.collect_lookups (this, feature_indexes, lookup_indexes); + } + + void closure_features (const hb_map_t *lookup_indexes, + hb_set_t *feature_indexes /* OUT */) const + { + for (const FeatureVariationRecord& record : varRecords) + record.closure_features (this, lookup_indexes, feature_indexes); + } + + bool subset (hb_subset_context_t *c, + hb_subset_layout_context_t *l) const { TRACE_SUBSET (this); - return_trace (c->serializer->embed (*this)); + auto *out = c->serializer->start_embed (*this); + if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false); + + out->version.major = version.major; + out->version.minor = version.minor; + + + varRecords.iter () + | hb_apply (subset_record_array (l, &(out->varRecords), this)) + ; + return_trace (bool (out->varRecords)); } bool sanitize (hb_sanitize_context_t *c) const @@ -2014,6 +2928,8 @@ struct HintingDevice hb_position_t get_y_delta (hb_font_t *font) const { return get_delta (font->y_ppem, font->y_scale); } + public: + unsigned int get_size () const { unsigned int f = deltaFormat; @@ -2027,6 +2943,12 @@ struct HintingDevice return_trace (c->check_struct (this) && c->check_range (this, this->get_size ())); } + HintingDevice* copy (hb_serialize_context_t *c) const + { + TRACE_SERIALIZE (this); + return_trace (c->embed (this)); + } + private: int get_delta (unsigned int ppem, int scale) const @@ -2088,6 +3010,32 @@ struct VariationDevice hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store) const { return font->em_scalef_y (get_delta (font, store)); } + VariationDevice* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map) const + { + TRACE_SERIALIZE (this); + auto snap = c->snapshot (); + auto *out = c->embed (this); + if (unlikely (!out)) return_trace (nullptr); + if (!layout_variation_idx_map || layout_variation_idx_map->is_empty ()) return_trace (out); + + unsigned org_idx = (outerIndex << 16) + innerIndex; + if (!layout_variation_idx_map->has (org_idx)) + { + c->revert (snap); + return_trace (nullptr); + } + unsigned new_idx = layout_variation_idx_map->get (org_idx); + out->outerIndex = new_idx >> 16; + out->innerIndex = new_idx & 0xFFFF; + return_trace (out); + } + + void record_variation_index (hb_set_t *layout_variation_indices) const + { + unsigned var_idx = (outerIndex << 16) + innerIndex; + layout_variation_indices->add (var_idx); + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -2126,10 +3074,14 @@ struct Device { switch (u.b.format) { +#ifndef HB_NO_HINTING case 1: case 2: case 3: return u.hinting.get_x_delta (font); +#endif +#ifndef HB_NO_VAR case 0x8000: return u.variation.get_x_delta (font, store); +#endif default: return 0; } @@ -2139,9 +3091,13 @@ struct Device switch (u.b.format) { case 1: case 2: case 3: +#ifndef HB_NO_HINTING return u.hinting.get_y_delta (font); +#endif +#ifndef HB_NO_VAR case 0x8000: return u.variation.get_y_delta (font, store); +#endif default: return 0; } @@ -2152,20 +3108,64 @@ struct Device TRACE_SANITIZE (this); if (!u.b.format.sanitize (c)) return_trace (false); switch (u.b.format) { +#ifndef HB_NO_HINTING case 1: case 2: case 3: return_trace (u.hinting.sanitize (c)); +#endif +#ifndef HB_NO_VAR case 0x8000: return_trace (u.variation.sanitize (c)); +#endif default: return_trace (true); } } + Device* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map=nullptr) const + { + TRACE_SERIALIZE (this); + switch (u.b.format) { +#ifndef HB_NO_HINTING + case 1: + case 2: + case 3: + return_trace (reinterpret_cast (u.hinting.copy (c))); +#endif +#ifndef HB_NO_VAR + case 0x8000: + return_trace (reinterpret_cast (u.variation.copy (c, layout_variation_idx_map))); +#endif + default: + return_trace (nullptr); + } + } + + void collect_variation_indices (hb_set_t *layout_variation_indices) const + { + switch (u.b.format) { +#ifndef HB_NO_HINTING + case 1: + case 2: + case 3: + return; +#endif +#ifndef HB_NO_VAR + case 0x8000: + u.variation.record_variation_index (layout_variation_indices); + return; +#endif + default: + return; + } + } + protected: union { DeviceHeader b; HintingDevice hinting; +#ifndef HB_NO_VAR VariationDevice variation; +#endif } u; public: DEFINE_SIZE_UNION (6, b); diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-gdef-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-gdef-table.hh index 533c95a41e6c..201a6c980fca 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-gdef-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-gdef-table.hh @@ -41,8 +41,18 @@ namespace OT { * Attachment List Table */ -typedef ArrayOf AttachPoint; /* Array of contour point indices--in - * increasing numerical order */ +/* Array of contour point indices--in increasing numerical order */ +struct AttachPoint : ArrayOf +{ + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + auto *out = c->serializer->start_embed (*this); + if (unlikely (!out)) return_trace (false); + + return_trace (out->serialize (c->serializer, + iter ())); + } +}; struct AttachList { @@ -63,15 +73,36 @@ struct AttachList if (point_count) { - hb_array_t array = points.sub_array (start_offset, point_count); - unsigned int count = array.length; - for (unsigned int i = 0; i < count; i++) - point_array[i] = array[i]; + + points.sub_array (start_offset, point_count) + | hb_sink (hb_array (point_array, *point_count)) + ; } return points.len; } + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + + hb_sorted_vector_t new_coverage; + + hb_zip (this+coverage, attachPoint) + | hb_filter (glyphset, hb_first) + | hb_filter (subset_offset_array (c, out->attachPoint, this), hb_second) + | hb_map (hb_first) + | hb_map (glyph_map) + | hb_sink (new_coverage) + ; + out->coverage.serialize (c->serializer, out) + .serialize (c->serializer, new_coverage.iter ()); + return_trace (bool (new_coverage)); + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -96,6 +127,13 @@ struct AttachList struct CaretValueFormat1 { friend struct CaretValue; + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + auto *out = c->serializer->embed (this); + if (unlikely (!out)) return_trace (false); + return_trace (true); + } private: hb_position_t get_caret_value (hb_font_t *font, hb_direction_t direction) const @@ -119,6 +157,13 @@ struct CaretValueFormat1 struct CaretValueFormat2 { friend struct CaretValue; + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + auto *out = c->serializer->embed (this); + if (unlikely (!out)) return_trace (false); + return_trace (true); + } private: hb_position_t get_caret_value (hb_font_t *font, hb_direction_t direction, hb_codepoint_t glyph_id) const @@ -153,6 +198,19 @@ struct CaretValueFormat3 font->em_scale_y (coordinate) + (this+deviceTable).get_y_delta (font, var_store); } + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + auto *out = c->serializer->embed (this); + if (unlikely (!out)) return_trace (false); + + return_trace (out->deviceTable.serialize_copy (c->serializer, deviceTable, this, c->serializer->to_bias (out), + hb_serialize_context_t::Head, c->plan->layout_variation_idx_map)); + } + + void collect_variation_indices (hb_set_t *layout_variation_indices) const + { (this+deviceTable).collect_variation_indices (layout_variation_indices); } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -173,9 +231,9 @@ struct CaretValueFormat3 struct CaretValue { hb_position_t get_caret_value (hb_font_t *font, - hb_direction_t direction, - hb_codepoint_t glyph_id, - const VariationStore &var_store) const + hb_direction_t direction, + hb_codepoint_t glyph_id, + const VariationStore &var_store) const { switch (u.format) { case 1: return u.format1.get_caret_value (font, direction); @@ -185,6 +243,32 @@ struct CaretValue } } + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const + { + TRACE_DISPATCH (this, u.format); + if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); + switch (u.format) { + case 1: return_trace (c->dispatch (u.format1, hb_forward (ds)...)); + case 2: return_trace (c->dispatch (u.format2, hb_forward (ds)...)); + case 3: return_trace (c->dispatch (u.format3, hb_forward (ds)...)); + default:return_trace (c->default_return_value ()); + } + } + + void collect_variation_indices (hb_set_t *layout_variation_indices) const + { + switch (u.format) { + case 1: + case 2: + return; + case 3: + u.format3.collect_variation_indices (layout_variation_indices); + return; + default: return; + } + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -210,25 +294,45 @@ struct CaretValue struct LigGlyph { - unsigned int get_lig_carets (hb_font_t *font, - hb_direction_t direction, - hb_codepoint_t glyph_id, - const VariationStore &var_store, - unsigned int start_offset, - unsigned int *caret_count /* IN/OUT */, - hb_position_t *caret_array /* OUT */) const + unsigned get_lig_carets (hb_font_t *font, + hb_direction_t direction, + hb_codepoint_t glyph_id, + const VariationStore &var_store, + unsigned start_offset, + unsigned *caret_count /* IN/OUT */, + hb_position_t *caret_array /* OUT */) const { if (caret_count) { - hb_array_t > array = carets.sub_array (start_offset, caret_count); - unsigned int count = array.length; - for (unsigned int i = 0; i < count; i++) - caret_array[i] = (this+array[i]).get_caret_value (font, direction, glyph_id, var_store); + + carets.sub_array (start_offset, caret_count) + | hb_map (hb_add (this)) + | hb_map ([&] (const CaretValue &value) { return value.get_caret_value (font, direction, glyph_id, var_store); }) + | hb_sink (hb_array (caret_array, *caret_count)) + ; } return carets.len; } + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + + + hb_iter (carets) + | hb_apply (subset_offset_array (c, out->carets, this)) + ; + + return_trace (bool (out->carets)); + } + + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { + for (const OffsetTo& offset : carets.iter ()) + (this+offset).collect_variation_indices (c->layout_variation_indices); + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -265,6 +369,38 @@ struct LigCaretList return lig_glyph.get_lig_carets (font, direction, glyph_id, var_store, start_offset, caret_count, caret_array); } + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + + hb_sorted_vector_t new_coverage; + + hb_zip (this+coverage, ligGlyph) + | hb_filter (glyphset, hb_first) + | hb_filter (subset_offset_array (c, out->ligGlyph, this), hb_second) + | hb_map (hb_first) + | hb_map (glyph_map) + | hb_sink (new_coverage) + ; + out->coverage.serialize (c->serializer, out) + .serialize (c->serializer, new_coverage.iter ()); + return_trace (bool (new_coverage)); + } + + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { + + hb_zip (this+coverage, ligGlyph) + | hb_filter (c->glyph_set, hb_first) + | hb_map (hb_second) + | hb_map (hb_add (this)) + | hb_apply ([c] (const LigGlyph& _) { _.collect_variation_indices (c); }) + ; + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -288,6 +424,34 @@ struct MarkGlyphSetsFormat1 bool covers (unsigned int set_index, hb_codepoint_t glyph_id) const { return (this+coverage[set_index]).get_coverage (glyph_id) != NOT_COVERED; } + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + out->format = format; + + bool ret = true; + for (const LOffsetTo& offset : coverage.iter ()) + { + auto *o = out->coverage.serialize_append (c->serializer); + if (unlikely (!o)) + { + ret = false; + break; + } + + //not using o->serialize_subset (c, offset, this, out) here because + //OTS doesn't allow null offset. + //See issue: https://github.com/khaledhosny/ots/issues/172 + c->serializer->push (); + c->dispatch (this+offset); + c->serializer->add_link (*o, c->serializer->pop_pack ()); + } + + return_trace (ret && out->coverage.len); + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -296,7 +460,7 @@ struct MarkGlyphSetsFormat1 protected: HBUINT16 format; /* Format identifier--format = 1 */ - ArrayOf > + ArrayOf> coverage; /* Array of long offsets to mark set * coverage tables */ public: @@ -313,6 +477,15 @@ struct MarkGlyphSets } } + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + switch (u.format) { + case 1: return_trace (u.format1.subset (c)); + default:return_trace (false); + } + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -356,7 +529,7 @@ struct GDEF unsigned int get_glyph_class (hb_codepoint_t glyph) const { return (this+glyphClassDef).get_class (glyph); } void get_glyphs_in_class (unsigned int klass, hb_set_t *glyphs) const - { (this+glyphClassDef).add_class (glyphs, klass); } + { (this+glyphClassDef).collect_class (glyphs, klass); } bool has_mark_attachment_types () const { return markAttachClassDef != 0; } unsigned int get_mark_attachment_type (hb_codepoint_t glyph) const @@ -386,7 +559,7 @@ struct GDEF bool has_var_store () const { return version.to_int () >= 0x00010003u && varStore != 0; } const VariationStore &get_var_store () const - { return version.to_int () >= 0x00010003u ? this+varStore : Null(VariationStore); } + { return version.to_int () >= 0x00010003u ? this+varStore : Null (VariationStore); } /* glyph_props is a 16-bit integer where the lower 8-bit have bits representing * glyph class and other bits, and high 8-bit the mark attachment type (if any). @@ -409,15 +582,15 @@ struct GDEF } } - HB_INTERNAL bool is_blacklisted (hb_blob_t *blob, + HB_INTERNAL bool is_blocklisted (hb_blob_t *blob, hb_face_t *face) const; struct accelerator_t { void init (hb_face_t *face) { - this->table = hb_sanitize_context_t().reference_table (face); - if (unlikely (this->table->is_blacklisted (this->table.get_blob (), face))) + this->table = hb_sanitize_context_t ().reference_table (face); + if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face))) { hb_blob_destroy (this->table.get_blob ()); this->table = hb_blob_get_empty (); @@ -436,24 +609,66 @@ struct GDEF (version.to_int () >= 0x00010003u ? varStore.static_size : 0); } + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { (this+ligCaretList).collect_variation_indices (c); } + + void remap_layout_variation_indices (const hb_set_t *layout_variation_indices, + hb_map_t *layout_variation_idx_map /* OUT */) const + { + if (version.to_int () < 0x00010003u || !varStore) return; + if (layout_variation_indices->is_empty ()) return; + + unsigned new_major = 0, new_minor = 0; + unsigned last_major = (layout_variation_indices->get_min ()) >> 16; + for (unsigned idx : layout_variation_indices->iter ()) + { + uint16_t major = idx >> 16; + if (major >= (this+varStore).get_sub_table_count ()) break; + if (major != last_major) + { + new_minor = 0; + ++new_major; + } + + unsigned new_idx = (new_major << 16) + new_minor; + layout_variation_idx_map->set (idx, new_idx); + ++new_minor; + last_major = major; + } + } + bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - struct GDEF *out = c->serializer->embed (*this); + auto *out = c->serializer->embed (*this); if (unlikely (!out)) return_trace (false); - out->glyphClassDef.serialize_subset (c, this+glyphClassDef, out); - out->attachList.set (0);//TODO(subset) serialize_subset (c, this+attachList, out); - out->ligCaretList.set (0);//TODO(subset) serialize_subset (c, this+ligCaretList, out); - out->markAttachClassDef.serialize_subset (c, this+markAttachClassDef, out); + bool subset_glyphclassdef = out->glyphClassDef.serialize_subset (c, glyphClassDef, this); + bool subset_attachlist = out->attachList.serialize_subset (c, attachList, this); + bool subset_ligcaretlist = out->ligCaretList.serialize_subset (c, ligCaretList, this); + bool subset_markattachclassdef = out->markAttachClassDef.serialize_subset (c, markAttachClassDef, this); + bool subset_markglyphsetsdef = true; if (version.to_int () >= 0x00010002u) - out->markGlyphSetsDef.set (0);// TODO(subset) serialize_subset (c, this+markGlyphSetsDef, out); + { + subset_markglyphsetsdef = out->markGlyphSetsDef.serialize_subset (c, markGlyphSetsDef, this); + if (!subset_markglyphsetsdef && + version.to_int () == 0x00010002u) + out->version.minor = 0; + } + bool subset_varstore = true; if (version.to_int () >= 0x00010003u) - out->varStore.set (0);// TODO(subset) serialize_subset (c, this+varStore, out); + { + subset_varstore = out->varStore.serialize_subset (c, varStore, this); + if (!subset_varstore && version.to_int () == 0x00010003u) + out->version.minor = 2; + } - return_trace (true); + return_trace (subset_glyphclassdef || subset_attachlist || + subset_ligcaretlist || subset_markattachclassdef || + (out->version.to_int () >= 0x00010002u && subset_markglyphsetsdef) || + (out->version.to_int () >= 0x00010003u && subset_varstore)); } bool sanitize (hb_sanitize_context_t *c) const diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-gpos-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-gpos-table.hh index 2a5165005811..eddae150e5f6 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-gpos-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-gpos-table.hh @@ -34,6 +34,11 @@ namespace OT { +struct MarkArray; +static void Markclass_closure_and_remap_indexes (const Coverage &mark_coverage, + const MarkArray &mark_array, + const hb_set_t &glyphset, + hb_map_t* klass_mapping /* INOUT */); /* buffer **position** var allocations */ #define attach_chain() var.i16[0] /* glyph to which this attaches to, relative to current glyphs; negative for going back, positive for forward. */ @@ -74,14 +79,14 @@ struct ValueFormat : HBUINT16 /* All fields are options. Only those available advance the value pointer. */ #if 0 - HBINT16 xPlacement; /* Horizontal adjustment for + HBINT16 xPlacement; /* Horizontal adjustment for * placement--in design units */ - HBINT16 yPlacement; /* Vertical adjustment for + HBINT16 yPlacement; /* Vertical adjustment for * placement--in design units */ - HBINT16 xAdvance; /* Horizontal adjustment for + HBINT16 xAdvance; /* Horizontal adjustment for * advance--in design units (only used * for horizontal writing) */ - HBINT16 yAdvance; /* Vertical adjustment for advance--in + HBINT16 yAdvance; /* Vertical adjustment for advance--in * design units (only used for vertical * writing) */ OffsetTo xPlaDevice; /* Offset to Device table for @@ -101,10 +106,10 @@ struct ValueFormat : HBUINT16 unsigned int get_len () const { return hb_popcount ((unsigned int) *this); } unsigned int get_size () const { return get_len () * Value::static_size; } - bool apply_value (hb_ot_apply_context_t *c, - const void *base, - const Value *values, - hb_glyph_position_t &glyph_pos) const + bool apply_value (hb_ot_apply_context_t *c, + const void *base, + const Value *values, + hb_glyph_position_t &glyph_pos) const { bool ret = false; unsigned int format = *this; @@ -155,6 +160,60 @@ struct ValueFormat : HBUINT16 return ret; } + void serialize_copy (hb_serialize_context_t *c, const void *base, + const Value *values, const hb_map_t *layout_variation_idx_map) const + { + unsigned int format = *this; + if (!format) return; + + if (format & xPlacement) c->copy (*values++); + if (format & yPlacement) c->copy (*values++); + if (format & xAdvance) c->copy (*values++); + if (format & yAdvance) c->copy (*values++); + + if (format & xPlaDevice) copy_device (c, base, values++, layout_variation_idx_map); + if (format & yPlaDevice) copy_device (c, base, values++, layout_variation_idx_map); + if (format & xAdvDevice) copy_device (c, base, values++, layout_variation_idx_map); + if (format & yAdvDevice) copy_device (c, base, values++, layout_variation_idx_map); + } + + void collect_variation_indices (hb_collect_variation_indices_context_t *c, + const void *base, + const hb_array_t& values) const + { + unsigned format = *this; + unsigned i = 0; + if (format & xPlacement) i++; + if (format & yPlacement) i++; + if (format & xAdvance) i++; + if (format & yAdvance) i++; + if (format & xPlaDevice) + { + (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices); + i++; + } + + if (format & ValueFormat::yPlaDevice) + { + (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices); + i++; + } + + if (format & ValueFormat::xAdvDevice) + { + + (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices); + i++; + } + + if (format & ValueFormat::yAdvDevice) + { + + (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices); + i++; + } + } + private: bool sanitize_value_devices (hb_sanitize_context_t *c, const void *base, const Value *values) const { @@ -173,18 +232,42 @@ struct ValueFormat : HBUINT16 return true; } - static OffsetTo& get_device (Value* value) - { return *CastP > (value); } - static const OffsetTo& get_device (const Value* value, bool *worked=nullptr) + static inline OffsetTo& get_device (Value* value) + { + return *static_cast *> (value); + } + static inline const OffsetTo& get_device (const Value* value, bool *worked=nullptr) { if (worked) *worked |= bool (*value); - return *CastP > (value); + return *static_cast *> (value); + } + + bool copy_device (hb_serialize_context_t *c, const void *base, + const Value *src_value, const hb_map_t *layout_variation_idx_map) const + { + Value *dst_value = c->copy (*src_value); + + if (!dst_value) return false; + if (*dst_value == 0) return true; + + *dst_value = 0; + c->push (); + if ((base + get_device (src_value)).copy (c, layout_variation_idx_map)) + { + c->add_link (*dst_value, c->pop_pack ()); + return true; + } + else + { + c->pop_discard (); + return false; + } } - static const HBINT16& get_short (const Value* value, bool *worked=nullptr) + static inline const HBINT16& get_short (const Value* value, bool *worked=nullptr) { if (worked) *worked |= bool (*value); - return *CastP (value); + return *reinterpret_cast (value); } public: @@ -236,6 +319,13 @@ struct ValueFormat : HBUINT16 } }; +template +static void SinglePos_serialize (hb_serialize_context_t *c, + const void *src, + Iterator it, + ValueFormat valFormat, + const hb_map_t *layout_variation_idx_map); + struct AnchorFormat1 { @@ -253,6 +343,12 @@ struct AnchorFormat1 return_trace (c->check_struct (this)); } + AnchorFormat1* copy (hb_serialize_context_t *c) const + { + TRACE_SERIALIZE (this); + return_trace (c->embed (this)); + } + protected: HBUINT16 format; /* Format identifier--format = 1 */ FWORD xCoordinate; /* Horizontal value--in design units */ @@ -267,6 +363,13 @@ struct AnchorFormat2 float *x, float *y) const { hb_font_t *font = c->font; + +#ifdef HB_NO_HINTING + *x = font->em_fscale_x (xCoordinate); + *y = font->em_fscale_y (yCoordinate); + return; +#endif + unsigned int x_ppem = font->x_ppem; unsigned int y_ppem = font->y_ppem; hb_position_t cx = 0, cy = 0; @@ -284,6 +387,12 @@ struct AnchorFormat2 return_trace (c->check_struct (this)); } + AnchorFormat2* copy (hb_serialize_context_t *c) const + { + TRACE_SERIALIZE (this); + return_trace (c->embed (this)); + } + protected: HBUINT16 format; /* Format identifier--format = 2 */ FWORD xCoordinate; /* Horizontal value--in design units */ @@ -314,6 +423,26 @@ struct AnchorFormat3 return_trace (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this)); } + AnchorFormat3* copy (hb_serialize_context_t *c, + const hb_map_t *layout_variation_idx_map) const + { + TRACE_SERIALIZE (this); + if (!layout_variation_idx_map) return_trace (nullptr); + + auto *out = c->embed (this); + if (unlikely (!out)) return_trace (nullptr); + + out->xDeviceTable.serialize_copy (c, xDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map); + out->yDeviceTable.serialize_copy (c, yDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map); + return_trace (out); + } + + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { + (this+xDeviceTable).collect_variation_indices (c->layout_variation_indices); + (this+yDeviceTable).collect_variation_indices (c->layout_variation_indices); + } + protected: HBUINT16 format; /* Format identifier--format = 3 */ FWORD xCoordinate; /* Horizontal value--in design units */ @@ -356,6 +485,29 @@ struct Anchor } } + Anchor* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map) const + { + TRACE_SERIALIZE (this); + switch (u.format) { + case 1: return_trace (reinterpret_cast (u.format1.copy (c))); + case 2: return_trace (reinterpret_cast (u.format2.copy (c))); + case 3: return_trace (reinterpret_cast (u.format3.copy (c, layout_variation_idx_map))); + default:return_trace (nullptr); + } + } + + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { + switch (u.format) { + case 1: case 2: + return; + case 3: + u.format3.collect_variation_indices (c); + return; + default: return; + } + } + protected: union { HBUINT16 format; /* Format identifier */ @@ -374,11 +526,46 @@ struct AnchorMatrix unsigned int cols, bool *found) const { *found = false; - if (unlikely (row >= rows || col >= cols)) return Null(Anchor); + if (unlikely (row >= rows || col >= cols)) return Null (Anchor); *found = !matrixZ[row * cols + col].is_null (); return this+matrixZ[row * cols + col]; } + template + void collect_variation_indices (hb_collect_variation_indices_context_t *c, + Iterator index_iter) const + { + for (unsigned i : index_iter) + (this+matrixZ[i]).collect_variation_indices (c); + } + + template + bool serialize (hb_serialize_context_t *c, + unsigned num_rows, + AnchorMatrix const *offset_matrix, + const hb_map_t *layout_variation_idx_map, + Iterator index_iter) + { + TRACE_SERIALIZE (this); + if (!index_iter) return_trace (false); + if (unlikely (!c->extend_min ((*this)))) return_trace (false); + + this->rows = num_rows; + for (const unsigned i : index_iter) + { + auto *offset = c->embed (offset_matrix->matrixZ[i]); + if (!offset) return_trace (false); + offset->serialize_copy (c, offset_matrix->matrixZ[i], + offset_matrix, c->to_bias (this), + hb_serialize_context_t::Head, + layout_variation_idx_map); + } + + return_trace (true); + } + bool sanitize (hb_sanitize_context_t *c, unsigned int cols) const { TRACE_SANITIZE (this); @@ -392,8 +579,7 @@ struct AnchorMatrix } HBUINT16 rows; /* Number of rows */ - protected: - UnsizedArrayOf > + UnsizedArrayOf> matrixZ; /* Matrix of offsets to Anchor tables-- * from beginning of AnchorMatrix table */ public: @@ -405,12 +591,34 @@ struct MarkRecord { friend struct MarkArray; + unsigned get_class () const { return (unsigned) klass; } bool sanitize (hb_sanitize_context_t *c, const void *base) const { TRACE_SANITIZE (this); return_trace (c->check_struct (this) && markAnchor.sanitize (c, base)); } + MarkRecord *copy (hb_serialize_context_t *c, + const void *src_base, + unsigned dst_bias, + const hb_map_t *klass_mapping, + const hb_map_t *layout_variation_idx_map) const + { + TRACE_SERIALIZE (this); + auto *out = c->embed (this); + if (unlikely (!out)) return_trace (nullptr); + + out->klass = klass_mapping->get (klass); + out->markAnchor.serialize_copy (c, markAnchor, src_base, dst_bias, hb_serialize_context_t::Head, layout_variation_idx_map); + return_trace (out); + } + + void collect_variation_indices (hb_collect_variation_indices_context_t *c, + const void *src_base) const + { + (src_base+markAnchor).collect_variation_indices (c); + } + protected: HBUINT16 klass; /* Class defined for this mark */ OffsetTo @@ -446,8 +654,8 @@ struct MarkArray : ArrayOf /* Array of MarkRecords--in Coverage ord glyph_anchor.get_anchor (c, buffer->info[glyph_pos].codepoint, &base_x, &base_y); hb_glyph_position_t &o = buffer->cur_pos(); - o.x_offset = round (base_x - mark_x); - o.y_offset = round (base_y - mark_y); + o.x_offset = roundf (base_x - mark_x); + o.y_offset = roundf (base_y - mark_y); o.attach_type() = ATTACH_TYPE_MARK; o.attach_chain() = (int) glyph_pos - (int) buffer->idx; buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT; @@ -456,6 +664,21 @@ struct MarkArray : ArrayOf /* Array of MarkRecords--in Coverage ord return_trace (true); } + template + bool serialize (hb_serialize_context_t *c, + const hb_map_t *klass_mapping, + const hb_map_t *layout_variation_idx_map, + const void *base, + Iterator it) + { + TRACE_SERIALIZE (this); + if (unlikely (!c->extend_min (*this))) return_trace (false); + if (unlikely (!c->check_assign (len, it.len ()))) return_trace (false); + c->copy_all (it, base, c->to_bias (this), klass_mapping, layout_variation_idx_map); + return_trace (true); + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -471,8 +694,22 @@ struct SinglePosFormat1 bool intersects (const hb_set_t *glyphs) const { return (this+coverage).intersects (glyphs); } + void closure_lookups (hb_closure_lookups_context_t *c) const {} + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { + if (!valueFormat.has_device ()) return; + + auto it = + + hb_iter (this+coverage) + | hb_filter (c->glyph_set) + ; + + if (!it) return; + valueFormat.collect_variation_indices (c, this, values.as_array (valueFormat.get_len ())); + } + void collect_glyphs (hb_collect_glyphs_context_t *c) const - { if (unlikely (!(this+coverage).add_coverage (c->input))) return; } + { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; } const Coverage &get_coverage () const { return this+coverage; } @@ -489,11 +726,48 @@ struct SinglePosFormat1 return_trace (true); } + template + void serialize (hb_serialize_context_t *c, + const void *src, + Iterator it, + ValueFormat valFormat, + const hb_map_t *layout_variation_idx_map) + { + auto out = c->extend_min (*this); + if (unlikely (!out)) return; + if (unlikely (!c->check_assign (valueFormat, valFormat))) return; + + + it + | hb_map (hb_second) + | hb_apply ([&] (hb_array_t _) + { valFormat.serialize_copy (c, src, &_, layout_variation_idx_map); }) + ; + + auto glyphs = + + it + | hb_map_retains_sorting (hb_first) + ; + + coverage.serialize (c, this).serialize (c, glyphs); + } + bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - // TODO(subset) - return_trace (false); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + auto it = + + hb_iter (this+coverage) + | hb_filter (glyphset) + | hb_map_retains_sorting (glyph_map) + | hb_zip (hb_repeat (values.as_array (valueFormat.get_len ()))) + ; + + bool ret = bool (it); + SinglePos_serialize (c->serializer, this, it, valueFormat, c->plan->layout_variation_idx_map); + return_trace (ret); } bool sanitize (hb_sanitize_context_t *c) const @@ -523,8 +797,29 @@ struct SinglePosFormat2 bool intersects (const hb_set_t *glyphs) const { return (this+coverage).intersects (glyphs); } + void closure_lookups (hb_closure_lookups_context_t *c) const {} + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { + if (!valueFormat.has_device ()) return; + + auto it = + + hb_zip (this+coverage, hb_range ((unsigned) valueCount)) + | hb_filter (c->glyph_set, hb_first) + ; + + if (!it) return; + + unsigned sub_length = valueFormat.get_len (); + const hb_array_t values_array = values.as_array (valueCount * sub_length); + + for (unsigned i : + it + | hb_map (hb_second)) + valueFormat.collect_variation_indices (c, this, values_array.sub_array (i * sub_length, sub_length)); + + } + void collect_glyphs (hb_collect_glyphs_context_t *c) const - { if (unlikely (!(this+coverage).add_coverage (c->input))) return; } + { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; } const Coverage &get_coverage () const { return this+coverage; } @@ -545,11 +840,56 @@ struct SinglePosFormat2 return_trace (true); } + template + void serialize (hb_serialize_context_t *c, + const void *src, + Iterator it, + ValueFormat valFormat, + const hb_map_t *layout_variation_idx_map) + { + auto out = c->extend_min (*this); + if (unlikely (!out)) return; + if (unlikely (!c->check_assign (valueFormat, valFormat))) return; + if (unlikely (!c->check_assign (valueCount, it.len ()))) return; + + + it + | hb_map (hb_second) + | hb_apply ([&] (hb_array_t _) + { valFormat.serialize_copy (c, src, &_, layout_variation_idx_map); }) + ; + + auto glyphs = + + it + | hb_map_retains_sorting (hb_first) + ; + + coverage.serialize (c, this).serialize (c, glyphs); + } + bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - // TODO(subset) - return_trace (false); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + unsigned sub_length = valueFormat.get_len (); + auto values_array = values.as_array (valueCount * sub_length); + + auto it = + + hb_zip (this+coverage, hb_range ((unsigned) valueCount)) + | hb_filter (glyphset, hb_first) + | hb_map_retains_sorting ([&] (const hb_pair_t& _) + { + return hb_pair (glyph_map[_.first], + values_array.sub_array (_.second * sub_length, + sub_length)); + }) + ; + + bool ret = bool (it); + SinglePos_serialize (c->serializer, this, it, valueFormat, c->plan->layout_variation_idx_map); + return_trace (ret); } bool sanitize (hb_sanitize_context_t *c) const @@ -576,14 +916,52 @@ struct SinglePosFormat2 struct SinglePos { - template - typename context_t::return_t dispatch (context_t *c) const + template + unsigned get_format (Iterator glyph_val_iter_pairs) + { + hb_array_t first_val_iter = hb_second (*glyph_val_iter_pairs); + + for (const auto iter : glyph_val_iter_pairs) + for (const auto _ : hb_zip (iter.second, first_val_iter)) + if (_.first != _.second) + return 2; + + return 1; + } + + + template + void serialize (hb_serialize_context_t *c, + const void *src, + Iterator glyph_val_iter_pairs, + ValueFormat valFormat, + const hb_map_t *layout_variation_idx_map) + { + if (unlikely (!c->extend_min (u.format))) return; + unsigned format = 2; + + if (glyph_val_iter_pairs) format = get_format (glyph_val_iter_pairs); + + u.format = format; + switch (u.format) { + case 1: u.format1.serialize (c, src, glyph_val_iter_pairs, valFormat, layout_variation_idx_map); + return; + case 2: u.format2.serialize (c, src, glyph_val_iter_pairs, valFormat, layout_variation_idx_map); + return; + default:return; + } + } + + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { TRACE_DISPATCH (this, u.format); if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); switch (u.format) { - case 1: return_trace (c->dispatch (u.format1)); - case 2: return_trace (c->dispatch (u.format2)); + case 1: return_trace (c->dispatch (u.format1, hb_forward (ds)...)); + case 2: return_trace (c->dispatch (u.format2, hb_forward (ds)...)); default:return_trace (c->default_return_value ()); } } @@ -596,13 +974,64 @@ struct SinglePos } u; }; +template +static void +SinglePos_serialize (hb_serialize_context_t *c, + const void *src, + Iterator it, + ValueFormat valFormat, + const hb_map_t *layout_variation_idx_map) +{ c->start_embed ()->serialize (c, src, it, valFormat, layout_variation_idx_map); } + struct PairValueRecord { friend struct PairSet; + int cmp (hb_codepoint_t k) const + { return secondGlyph.cmp (k); } + + struct serialize_closure_t + { + const void *base; + const ValueFormat *valueFormats; + unsigned len1; /* valueFormats[0].get_len() */ + const hb_map_t *glyph_map; + const hb_map_t *layout_variation_idx_map; + }; + + bool serialize (hb_serialize_context_t *c, + serialize_closure_t *closure) const + { + TRACE_SERIALIZE (this); + auto *out = c->start_embed (*this); + if (unlikely (!c->extend_min (out))) return_trace (false); + + out->secondGlyph = (*closure->glyph_map)[secondGlyph]; + + closure->valueFormats[0].serialize_copy (c, closure->base, &values[0], closure->layout_variation_idx_map); + closure->valueFormats[1].serialize_copy (c, closure->base, &values[closure->len1], closure->layout_variation_idx_map); + + return_trace (true); + } + + void collect_variation_indices (hb_collect_variation_indices_context_t *c, + const ValueFormat *valueFormats, + const void *base) const + { + unsigned record1_len = valueFormats[0].get_len (); + unsigned record2_len = valueFormats[1].get_len (); + const hb_array_t values_array = values.as_array (record1_len + record2_len); + + if (valueFormats[0].has_device ()) + valueFormats[0].collect_variation_indices (c, base, values_array.sub_array (0, record1_len)); + + if (valueFormats[1].has_device ()) + valueFormats[1].collect_variation_indices (c, base, values_array.sub_array (record1_len, record2_len)); + } + protected: - GlyphID secondGlyph; /* GlyphID of second glyph in the + HBGlyphID secondGlyph; /* GlyphID of second glyph in the * pair--first glyph is listed in the * Coverage table */ ValueRecord values; /* Positioning data for the first glyph @@ -616,7 +1045,7 @@ struct PairSet friend struct PairPosFormat1; bool intersects (const hb_set_t *glyphs, - const ValueFormat *valueFormats) const + const ValueFormat *valueFormats) const { unsigned int len1 = valueFormats[0].get_len (); unsigned int len2 = valueFormats[1].get_len (); @@ -634,7 +1063,7 @@ struct PairSet } void collect_glyphs (hb_collect_glyphs_context_t *c, - const ValueFormat *valueFormats) const + const ValueFormat *valueFormats) const { unsigned int len1 = valueFormats[0].get_len (); unsigned int len2 = valueFormats[1].get_len (); @@ -644,9 +1073,27 @@ struct PairSet c->input->add_array (&record->secondGlyph, len, record_size); } + void collect_variation_indices (hb_collect_variation_indices_context_t *c, + const ValueFormat *valueFormats) const + { + unsigned len1 = valueFormats[0].get_len (); + unsigned len2 = valueFormats[1].get_len (); + unsigned record_size = HBUINT16::static_size * (1 + len1 + len2); + + const PairValueRecord *record = &firstPairValueRecord; + unsigned count = len; + for (unsigned i = 0; i < count; i++) + { + if (c->glyph_set->has (record->secondGlyph)) + { record->collect_variation_indices (c, valueFormats, this); } + + record = &StructAtOffset (record, record_size); + } + } + bool apply (hb_ot_apply_context_t *c, - const ValueFormat *valueFormats, - unsigned int pos) const + const ValueFormat *valueFormats, + unsigned int pos) const { TRACE_APPLY (this); hb_buffer_t *buffer = c->buffer; @@ -654,41 +1101,66 @@ struct PairSet unsigned int len2 = valueFormats[1].get_len (); unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2); - unsigned int count = len; + const PairValueRecord *record = hb_bsearch (buffer->info[pos].codepoint, + &firstPairValueRecord, + len, + record_size); + if (record) + { + /* Note the intentional use of "|" instead of short-circuit "||". */ + if (valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos()) | + valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos])) + buffer->unsafe_to_break (buffer->idx, pos + 1); + if (len2) + pos++; + buffer->idx = pos; + return_trace (true); + } + return_trace (false); + } - /* Hand-coded bsearch. */ - if (unlikely (!count)) - return_trace (false); - hb_codepoint_t x = buffer->info[pos].codepoint; - int min = 0, max = (int) count - 1; - while (min <= max) + bool subset (hb_subset_context_t *c, + const ValueFormat valueFormats[2]) const + { + TRACE_SUBSET (this); + auto snap = c->serializer->snapshot (); + + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + out->len = 0; + + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + unsigned len1 = valueFormats[0].get_len (); + unsigned len2 = valueFormats[1].get_len (); + unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2); + + PairValueRecord::serialize_closure_t closure = { - int mid = ((unsigned int) min + (unsigned int) max) / 2; - const PairValueRecord *record = &StructAtOffset (&firstPairValueRecord, record_size * mid); - hb_codepoint_t mid_x = record->secondGlyph; - if (x < mid_x) - max = mid - 1; - else if (x > mid_x) - min = mid + 1; - else - { - /* Note the intentional use of "|" instead of short-circuit "||". */ - if (valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos()) | - valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos])) - buffer->unsafe_to_break (buffer->idx, pos + 1); - if (len2) - pos++; - buffer->idx = pos; - return_trace (true); - } + this, + valueFormats, + len1, + &glyph_map, + c->plan->layout_variation_idx_map + }; + + const PairValueRecord *record = &firstPairValueRecord; + unsigned count = len, num = 0; + for (unsigned i = 0; i < count; i++) + { + if (glyphset.has (record->secondGlyph) + && record->serialize (c->serializer, &closure)) num++; + record = &StructAtOffset (record, record_size); } - return_trace (false); + out->len = num; + if (!num) c->serializer->revert (snap); + return_trace (num); } struct sanitize_closure_t { - const void *base; const ValueFormat *valueFormats; unsigned int len1; /* valueFormats[0].get_len() */ unsigned int stride; /* 1 + len1 + len2 */ @@ -705,8 +1177,8 @@ struct PairSet unsigned int count = len; const PairValueRecord *record = &firstPairValueRecord; - return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, closure->base, &record->values[0], count, closure->stride) && - closure->valueFormats[1].sanitize_values_stride_unsafe (c, closure->base, &record->values[closure->len1], count, closure->stride)); + return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, this, &record->values[0], count, closure->stride) && + closure->valueFormats[1].sanitize_values_stride_unsafe (c, this, &record->values[closure->len1], count, closure->stride)); } protected: @@ -722,21 +1194,37 @@ struct PairPosFormat1 { bool intersects (const hb_set_t *glyphs) const { - unsigned int count = pairSet.len; - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - if (unlikely (iter.get_coverage () >= count)) - break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ - if (glyphs->has (iter.get_glyph ()) && - (this+pairSet[iter.get_coverage ()]).intersects (glyphs, valueFormat)) - return true; - } - return false; + return + + hb_zip (this+coverage, pairSet) + | hb_filter (*glyphs, hb_first) + | hb_map (hb_second) + | hb_map ([glyphs, this] (const OffsetTo &_) + { return (this+_).intersects (glyphs, valueFormat); }) + | hb_any + ; + } + + void closure_lookups (hb_closure_lookups_context_t *c) const {} + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { + if ((!valueFormat[0].has_device ()) && (!valueFormat[1].has_device ())) return; + + auto it = + + hb_zip (this+coverage, pairSet) + | hb_filter (c->glyph_set, hb_first) + | hb_map (hb_second) + ; + + if (!it) return; + + it + | hb_map (hb_add (this)) + | hb_apply ([&] (const PairSet& _) { _.collect_variation_indices (c, valueFormat); }) + ; } void collect_glyphs (hb_collect_glyphs_context_t *c) const { - if (unlikely (!(this+coverage).add_coverage (c->input))) return; + if (unlikely (!(this+coverage).collect_coverage (c->input))) return; unsigned int count = pairSet.len; for (unsigned int i = 0; i < count; i++) (this+pairSet[i]).collect_glyphs (c, valueFormat); @@ -761,8 +1249,43 @@ struct PairPosFormat1 bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - // TODO(subset) - return_trace (false); + + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + out->format = format; + out->valueFormat[0] = valueFormat[0]; + out->valueFormat[1] = valueFormat[1]; + + hb_sorted_vector_t new_coverage; + + + hb_zip (this+coverage, pairSet) + | hb_filter (glyphset, hb_first) + | hb_filter ([this, c, out] (const OffsetTo& _) + { + auto *o = out->pairSet.serialize_append (c->serializer); + if (unlikely (!o)) return false; + auto snap = c->serializer->snapshot (); + bool ret = o->serialize_subset (c, _, this, valueFormat); + if (!ret) + { + out->pairSet.pop (); + c->serializer->revert (snap); + } + return ret; + }, + hb_second) + | hb_map (hb_first) + | hb_map (glyph_map) + | hb_sink (new_coverage) + ; + + out->coverage.serialize (c->serializer, out) + .serialize (c->serializer, new_coverage.iter ()); + + return_trace (bool (new_coverage)); } bool sanitize (hb_sanitize_context_t *c) const @@ -775,7 +1298,6 @@ struct PairPosFormat1 unsigned int len2 = valueFormat[1].get_len (); PairSet::sanitize_closure_t closure = { - this, valueFormat, len1, 1 + len1 + len2 @@ -810,10 +1332,43 @@ struct PairPosFormat2 (this+classDef2).intersects (glyphs); } + void closure_lookups (hb_closure_lookups_context_t *c) const {} + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { + if ((!valueFormat1.has_device ()) && (!valueFormat2.has_device ())) return; + + hb_set_t class1_set, class2_set; + for (const unsigned cp : c->glyph_set->iter ()) + { + unsigned klass1 = (this+classDef1).get (cp); + unsigned klass2 = (this+classDef2).get (cp); + class1_set.add (klass1); + class2_set.add (klass2); + } + + if (class1_set.is_empty () || class2_set.is_empty ()) return; + + unsigned len1 = valueFormat1.get_len (); + unsigned len2 = valueFormat2.get_len (); + const hb_array_t values_array = values.as_array ((unsigned)class1Count * (unsigned) class2Count * (len1 + len2)); + for (const unsigned class1_idx : class1_set.iter ()) + { + for (const unsigned class2_idx : class2_set.iter ()) + { + unsigned start_offset = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2); + if (valueFormat1.has_device ()) + valueFormat1.collect_variation_indices (c, this, values_array.sub_array (start_offset, len1)); + + if (valueFormat2.has_device ()) + valueFormat2.collect_variation_indices (c, this, values_array.sub_array (start_offset+len1, len2)); + } + } + } + void collect_glyphs (hb_collect_glyphs_context_t *c) const { - if (unlikely (!(this+coverage).add_coverage (c->input))) return; - if (unlikely (!(this+classDef2).add_coverage (c->input))) return; + if (unlikely (!(this+coverage).collect_coverage (c->input))) return; + if (unlikely (!(this+classDef2).collect_coverage (c->input))) return; } const Coverage &get_coverage () const { return this+coverage; } @@ -853,8 +1408,50 @@ struct PairPosFormat2 bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - // TODO(subset) - return_trace (false); + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + out->format = format; + out->valueFormat1 = valueFormat1; + out->valueFormat2 = valueFormat2; + + hb_map_t klass1_map; + out->classDef1.serialize_subset (c, classDef1, this, &klass1_map); + out->class1Count = klass1_map.get_population (); + + hb_map_t klass2_map; + out->classDef2.serialize_subset (c, classDef2, this, &klass2_map); + out->class2Count = klass2_map.get_population (); + + unsigned len1 = valueFormat1.get_len (); + unsigned len2 = valueFormat2.get_len (); + + + hb_range ((unsigned) class1Count) + | hb_filter (klass1_map) + | hb_apply ([&] (const unsigned class1_idx) + { + + hb_range ((unsigned) class2Count) + | hb_filter (klass2_map) + | hb_apply ([&] (const unsigned class2_idx) + { + unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2); + valueFormat1.serialize_copy (c->serializer, this, &values[idx], c->plan->layout_variation_idx_map); + valueFormat2.serialize_copy (c->serializer, this, &values[idx + len1], c->plan->layout_variation_idx_map); + }) + ; + }) + ; + + const hb_set_t &glyphset = *c->plan->_glyphset_gsub; + const hb_map_t &glyph_map = *c->plan->glyph_map; + + auto it = + + hb_iter (this+coverage) + | hb_filter (glyphset) + | hb_map_retains_sorting (glyph_map) + ; + + out->coverage.serialize (c->serializer, out).serialize (c->serializer, it); + return_trace (out->class1Count && out->class2Count && bool (it)); } bool sanitize (hb_sanitize_context_t *c) const @@ -909,14 +1506,14 @@ struct PairPosFormat2 struct PairPos { - template - typename context_t::return_t dispatch (context_t *c) const + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { TRACE_DISPATCH (this, u.format); if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); switch (u.format) { - case 1: return_trace (c->dispatch (u.format1)); - case 2: return_trace (c->dispatch (u.format2)); + case 1: return_trace (c->dispatch (u.format1, hb_forward (ds)...)); + case 2: return_trace (c->dispatch (u.format2, hb_forward (ds)...)); default:return_trace (c->default_return_value ()); } } @@ -940,6 +1537,27 @@ struct EntryExitRecord return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base)); } + void collect_variation_indices (hb_collect_variation_indices_context_t *c, + const void *src_base) const + { + (src_base+entryAnchor).collect_variation_indices (c); + (src_base+exitAnchor).collect_variation_indices (c); + } + + EntryExitRecord* copy (hb_serialize_context_t *c, + const void *src_base, + const void *dst_base, + const hb_map_t *layout_variation_idx_map) const + { + TRACE_SERIALIZE (this); + auto *out = c->embed (this); + if (unlikely (!out)) return_trace (nullptr); + + out->entryAnchor.serialize_copy (c, entryAnchor, src_base, c->to_bias (dst_base), hb_serialize_context_t::Head, layout_variation_idx_map); + out->exitAnchor.serialize_copy (c, exitAnchor, src_base, c->to_bias (dst_base), hb_serialize_context_t::Head, layout_variation_idx_map); + return_trace (out); + } + protected: OffsetTo entryAnchor; /* Offset to EntryAnchor table--from @@ -961,8 +1579,19 @@ struct CursivePosFormat1 bool intersects (const hb_set_t *glyphs) const { return (this+coverage).intersects (glyphs); } + void closure_lookups (hb_closure_lookups_context_t *c) const {} + + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { + + hb_zip (this+coverage, entryExitRecord) + | hb_filter (c->glyph_set, hb_first) + | hb_map (hb_second) + | hb_apply ([&] (const EntryExitRecord& record) { record.collect_variation_indices (c, this); }) + ; + } + void collect_glyphs (hb_collect_glyphs_context_t *c) const - { if (unlikely (!(this+coverage).add_coverage (c->input))) return; } + { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; } const Coverage &get_coverage () const { return this+coverage; } @@ -995,32 +1624,32 @@ struct CursivePosFormat1 /* Main-direction adjustment */ switch (c->direction) { case HB_DIRECTION_LTR: - pos[i].x_advance = round (exit_x) + pos[i].x_offset; + pos[i].x_advance = roundf (exit_x) + pos[i].x_offset; - d = round (entry_x) + pos[j].x_offset; + d = roundf (entry_x) + pos[j].x_offset; pos[j].x_advance -= d; pos[j].x_offset -= d; break; case HB_DIRECTION_RTL: - d = round (exit_x) + pos[i].x_offset; + d = roundf (exit_x) + pos[i].x_offset; pos[i].x_advance -= d; pos[i].x_offset -= d; - pos[j].x_advance = round (entry_x) + pos[j].x_offset; + pos[j].x_advance = roundf (entry_x) + pos[j].x_offset; break; case HB_DIRECTION_TTB: - pos[i].y_advance = round (exit_y) + pos[i].y_offset; + pos[i].y_advance = roundf (exit_y) + pos[i].y_offset; - d = round (entry_y) + pos[j].y_offset; + d = roundf (entry_y) + pos[j].y_offset; pos[j].y_advance -= d; pos[j].y_offset -= d; break; case HB_DIRECTION_BTT: - d = round (exit_y) + pos[i].y_offset; + d = roundf (exit_y) + pos[i].y_offset; pos[i].y_advance -= d; pos[i].y_offset -= d; - pos[j].y_advance = round (entry_y); + pos[j].y_advance = roundf (entry_y); break; case HB_DIRECTION_INVALID: default: @@ -1063,15 +1692,58 @@ struct CursivePosFormat1 else pos[child].x_offset = x_offset; + /* If parent was attached to child, break them free. + * https://github.com/harfbuzz/harfbuzz/issues/2469 + */ + if (unlikely (pos[parent].attach_chain() == -pos[child].attach_chain())) + pos[parent].attach_chain() = 0; + buffer->idx++; return_trace (true); } + template + void serialize (hb_serialize_context_t *c, + Iterator it, + const void *src_base, + const hb_map_t *layout_variation_idx_map) + { + if (unlikely (!c->extend_min ((*this)))) return; + this->format = 1; + this->entryExitRecord.len = it.len (); + + for (const EntryExitRecord& entry_record : + it + | hb_map (hb_second)) + c->copy (entry_record, src_base, this, layout_variation_idx_map); + + auto glyphs = + + it + | hb_map_retains_sorting (hb_first) + ; + + coverage.serialize (c, this).serialize (c, glyphs); + } + bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - // TODO(subset) - return_trace (false); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + auto *out = c->serializer->start_embed (*this); + if (unlikely (!out)) return_trace (false); + + auto it = + + hb_zip (this+coverage, entryExitRecord) + | hb_filter (glyphset, hb_first) + | hb_map_retains_sorting ([&] (hb_pair_t p) -> hb_pair_t + { return hb_pair (glyph_map[p.first], p.second);}) + ; + + bool ret = bool (it); + out->serialize (c->serializer, it, this, c->plan->layout_variation_idx_map); + return_trace (ret); } bool sanitize (hb_sanitize_context_t *c) const @@ -1094,13 +1766,13 @@ struct CursivePosFormat1 struct CursivePos { - template - typename context_t::return_t dispatch (context_t *c) const + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { TRACE_DISPATCH (this, u.format); if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); switch (u.format) { - case 1: return_trace (c->dispatch (u.format1)); + case 1: return_trace (c->dispatch (u.format1, hb_forward (ds)...)); default:return_trace (c->default_return_value ()); } } @@ -1118,16 +1790,73 @@ typedef AnchorMatrix BaseArray; /* base-major-- * mark-minor-- * ordered by class--zero-based. */ +static void Markclass_closure_and_remap_indexes (const Coverage &mark_coverage, + const MarkArray &mark_array, + const hb_set_t &glyphset, + hb_map_t* klass_mapping /* INOUT */) +{ + hb_set_t orig_classes; + + + hb_zip (mark_coverage, mark_array) + | hb_filter (glyphset, hb_first) + | hb_map (hb_second) + | hb_map (&MarkRecord::get_class) + | hb_sink (orig_classes) + ; + + unsigned idx = 0; + for (auto klass : orig_classes.iter ()) + { + if (klass_mapping->has (klass)) continue; + klass_mapping->set (klass, idx); + idx++; + } +} + struct MarkBasePosFormat1 { bool intersects (const hb_set_t *glyphs) const - { return (this+markCoverage).intersects (glyphs) && - (this+baseCoverage).intersects (glyphs); } + { + return (this+markCoverage).intersects (glyphs) && + (this+baseCoverage).intersects (glyphs); + } + + void closure_lookups (hb_closure_lookups_context_t *c) const {} + + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { + + hb_zip (this+markCoverage, this+markArray) + | hb_filter (c->glyph_set, hb_first) + | hb_map (hb_second) + | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); }) + ; + + hb_map_t klass_mapping; + Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping); + + unsigned basecount = (this+baseArray).rows; + auto base_iter = + + hb_zip (this+baseCoverage, hb_range (basecount)) + | hb_filter (c->glyph_set, hb_first) + | hb_map (hb_second) + ; + + hb_sorted_vector_t base_indexes; + for (const unsigned row : base_iter) + { + + hb_range ((unsigned) classCount) + | hb_filter (klass_mapping) + | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; }) + | hb_sink (base_indexes) + ; + } + (this+baseArray).collect_variation_indices (c, base_indexes.iter ()); + } void collect_glyphs (hb_collect_glyphs_context_t *c) const { - if (unlikely (!(this+markCoverage).add_coverage (c->input))) return; - if (unlikely (!(this+baseCoverage).add_coverage (c->input))) return; + if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return; + if (unlikely (!(this+baseCoverage).collect_coverage (c->input))) return; } const Coverage &get_coverage () const { return this+markCoverage; } @@ -1175,8 +1904,70 @@ struct MarkBasePosFormat1 bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - // TODO(subset) - return_trace (false); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + out->format = format; + + hb_map_t klass_mapping; + Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping); + + if (!klass_mapping.get_population ()) return_trace (false); + out->classCount = klass_mapping.get_population (); + + auto mark_iter = + + hb_zip (this+markCoverage, this+markArray) + | hb_filter (glyphset, hb_first) + ; + + hb_sorted_vector_t new_coverage; + + mark_iter + | hb_map (hb_first) + | hb_map (glyph_map) + | hb_sink (new_coverage) + ; + + if (!out->markCoverage.serialize (c->serializer, out) + .serialize (c->serializer, new_coverage.iter ())) + return_trace (false); + + out->markArray.serialize (c->serializer, out) + .serialize (c->serializer, &klass_mapping, c->plan->layout_variation_idx_map, &(this+markArray), + mark_iter + | hb_map (hb_second)); + + unsigned basecount = (this+baseArray).rows; + auto base_iter = + + hb_zip (this+baseCoverage, hb_range (basecount)) + | hb_filter (glyphset, hb_first) + ; + + new_coverage.reset (); + + base_iter + | hb_map (hb_first) + | hb_map (glyph_map) + | hb_sink (new_coverage) + ; + + if (!out->baseCoverage.serialize (c->serializer, out) + .serialize (c->serializer, new_coverage.iter ())) + return_trace (false); + + hb_sorted_vector_t base_indexes; + for (const unsigned row : + base_iter + | hb_map (hb_second)) + { + + hb_range ((unsigned) classCount) + | hb_filter (klass_mapping) + | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; }) + | hb_sink (base_indexes) + ; + } + out->baseArray.serialize (c->serializer, out) + .serialize (c->serializer, base_iter.len (), &(this+baseArray), c->plan->layout_variation_idx_map, base_indexes.iter ()); + + return_trace (true); } bool sanitize (hb_sanitize_context_t *c) const @@ -1210,13 +2001,13 @@ struct MarkBasePosFormat1 struct MarkBasePos { - template - typename context_t::return_t dispatch (context_t *c) const + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { TRACE_DISPATCH (this, u.format); if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); switch (u.format) { - case 1: return_trace (c->dispatch (u.format1)); + case 1: return_trace (c->dispatch (u.format1, hb_forward (ds)...)); default:return_trace (c->default_return_value ()); } } @@ -1242,13 +2033,53 @@ typedef OffsetListOf LigatureArray; struct MarkLigPosFormat1 { bool intersects (const hb_set_t *glyphs) const - { return (this+markCoverage).intersects (glyphs) && - (this+ligatureCoverage).intersects (glyphs); } + { + return (this+markCoverage).intersects (glyphs) && + (this+ligatureCoverage).intersects (glyphs); + } + + void closure_lookups (hb_closure_lookups_context_t *c) const {} + + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { + + hb_zip (this+markCoverage, this+markArray) + | hb_filter (c->glyph_set, hb_first) + | hb_map (hb_second) + | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); }) + ; + + hb_map_t klass_mapping; + Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping); + + unsigned ligcount = (this+ligatureArray).len; + auto lig_iter = + + hb_zip (this+ligatureCoverage, hb_range (ligcount)) + | hb_filter (c->glyph_set, hb_first) + | hb_map (hb_second) + ; + + const LigatureArray& lig_array = this+ligatureArray; + for (const unsigned i : lig_iter) + { + hb_sorted_vector_t lig_indexes; + unsigned row_count = lig_array[i].rows; + for (unsigned row : + hb_range (row_count)) + { + + hb_range ((unsigned) classCount) + | hb_filter (klass_mapping) + | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; }) + | hb_sink (lig_indexes) + ; + } + + lig_array[i].collect_variation_indices (c, lig_indexes.iter ()); + } + } void collect_glyphs (hb_collect_glyphs_context_t *c) const { - if (unlikely (!(this+markCoverage).add_coverage (c->input))) return; - if (unlikely (!(this+ligatureCoverage).add_coverage (c->input))) return; + if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return; + if (unlikely (!(this+ligatureCoverage).collect_coverage (c->input))) return; } const Coverage &get_coverage () const { return this+markCoverage; } @@ -1289,7 +2120,7 @@ struct MarkLigPosFormat1 unsigned int mark_id = _hb_glyph_info_get_lig_id (&buffer->cur()); unsigned int mark_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); if (lig_id && lig_id == mark_id && mark_comp > 0) - comp_index = MIN (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1; + comp_index = hb_min (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1; else comp_index = comp_count - 1; @@ -1335,13 +2166,13 @@ struct MarkLigPosFormat1 struct MarkLigPos { - template - typename context_t::return_t dispatch (context_t *c) const + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { TRACE_DISPATCH (this, u.format); if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); switch (u.format) { - case 1: return_trace (c->dispatch (u.format1)); + case 1: return_trace (c->dispatch (u.format1, hb_forward (ds)...)); default:return_trace (c->default_return_value ()); } } @@ -1362,13 +2193,47 @@ typedef AnchorMatrix Mark2Array; /* mark2-major-- struct MarkMarkPosFormat1 { bool intersects (const hb_set_t *glyphs) const - { return (this+mark1Coverage).intersects (glyphs) && - (this+mark2Coverage).intersects (glyphs); } + { + return (this+mark1Coverage).intersects (glyphs) && + (this+mark2Coverage).intersects (glyphs); + } + + void closure_lookups (hb_closure_lookups_context_t *c) const {} + + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { + + hb_zip (this+mark1Coverage, this+mark1Array) + | hb_filter (c->glyph_set, hb_first) + | hb_map (hb_second) + | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+mark1Array)); }) + ; + + hb_map_t klass_mapping; + Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, *c->glyph_set, &klass_mapping); + + unsigned mark2_count = (this+mark2Array).rows; + auto mark2_iter = + + hb_zip (this+mark2Coverage, hb_range (mark2_count)) + | hb_filter (c->glyph_set, hb_first) + | hb_map (hb_second) + ; + + hb_sorted_vector_t mark2_indexes; + for (const unsigned row : mark2_iter) + { + + hb_range ((unsigned) classCount) + | hb_filter (klass_mapping) + | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; }) + | hb_sink (mark2_indexes) + ; + } + (this+mark2Array).collect_variation_indices (c, mark2_indexes.iter ()); + } void collect_glyphs (hb_collect_glyphs_context_t *c) const { - if (unlikely (!(this+mark1Coverage).add_coverage (c->input))) return; - if (unlikely (!(this+mark2Coverage).add_coverage (c->input))) return; + if (unlikely (!(this+mark1Coverage).collect_coverage (c->input))) return; + if (unlikely (!(this+mark2Coverage).collect_coverage (c->input))) return; } const Coverage &get_coverage () const { return this+mark1Coverage; } @@ -1395,12 +2260,15 @@ struct MarkMarkPosFormat1 unsigned int comp1 = _hb_glyph_info_get_lig_comp (&buffer->cur()); unsigned int comp2 = _hb_glyph_info_get_lig_comp (&buffer->info[j]); - if (likely (id1 == id2)) { + if (likely (id1 == id2)) + { if (id1 == 0) /* Marks belonging to the same base. */ goto good; else if (comp1 == comp2) /* Marks belonging to the same ligature component. */ goto good; - } else { + } + else + { /* If ligature ids don't match, it may be the case that one of the marks * itself is a ligature. In which case match. */ if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2)) @@ -1420,8 +2288,70 @@ struct MarkMarkPosFormat1 bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - // TODO(subset) - return_trace (false); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + out->format = format; + + hb_map_t klass_mapping; + Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, glyphset, &klass_mapping); + + if (!klass_mapping.get_population ()) return_trace (false); + out->classCount = klass_mapping.get_population (); + + auto mark1_iter = + + hb_zip (this+mark1Coverage, this+mark1Array) + | hb_filter (glyphset, hb_first) + ; + + hb_sorted_vector_t new_coverage; + + mark1_iter + | hb_map (hb_first) + | hb_map (glyph_map) + | hb_sink (new_coverage) + ; + + if (!out->mark1Coverage.serialize (c->serializer, out) + .serialize (c->serializer, new_coverage.iter ())) + return_trace (false); + + out->mark1Array.serialize (c->serializer, out) + .serialize (c->serializer, &klass_mapping, c->plan->layout_variation_idx_map, &(this+mark1Array), + mark1_iter + | hb_map (hb_second)); + + unsigned mark2count = (this+mark2Array).rows; + auto mark2_iter = + + hb_zip (this+mark2Coverage, hb_range (mark2count)) + | hb_filter (glyphset, hb_first) + ; + + new_coverage.reset (); + + mark2_iter + | hb_map (hb_first) + | hb_map (glyph_map) + | hb_sink (new_coverage) + ; + + if (!out->mark2Coverage.serialize (c->serializer, out) + .serialize (c->serializer, new_coverage.iter ())) + return_trace (false); + + hb_sorted_vector_t mark2_indexes; + for (const unsigned row : + mark2_iter + | hb_map (hb_second)) + { + + hb_range ((unsigned) classCount) + | hb_filter (klass_mapping) + | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; }) + | hb_sink (mark2_indexes) + ; + } + out->mark2Array.serialize (c->serializer, out) + .serialize (c->serializer, mark2_iter.len (), &(this+mark2Array), c->plan->layout_variation_idx_map, mark2_indexes.iter ()); + + return_trace (true); } bool sanitize (hb_sanitize_context_t *c) const @@ -1457,13 +2387,13 @@ struct MarkMarkPosFormat1 struct MarkMarkPos { - template - typename context_t::return_t dispatch (context_t *c) const + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { TRACE_DISPATCH (this, u.format); if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); switch (u.format) { - case 1: return_trace (c->dispatch (u.format1)); + case 1: return_trace (c->dispatch (u.format1, hb_forward (ds)...)); default:return_trace (c->default_return_value ()); } } @@ -1509,24 +2439,30 @@ struct PosLookupSubTable Extension = 9 }; - template - typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type) const + template + typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const { TRACE_DISPATCH (this, lookup_type); switch (lookup_type) { - case Single: return_trace (u.single.dispatch (c)); - case Pair: return_trace (u.pair.dispatch (c)); - case Cursive: return_trace (u.cursive.dispatch (c)); - case MarkBase: return_trace (u.markBase.dispatch (c)); - case MarkLig: return_trace (u.markLig.dispatch (c)); - case MarkMark: return_trace (u.markMark.dispatch (c)); - case Context: return_trace (u.context.dispatch (c)); - case ChainContext: return_trace (u.chainContext.dispatch (c)); - case Extension: return_trace (u.extension.dispatch (c)); + case Single: return_trace (u.single.dispatch (c, hb_forward (ds)...)); + case Pair: return_trace (u.pair.dispatch (c, hb_forward (ds)...)); + case Cursive: return_trace (u.cursive.dispatch (c, hb_forward (ds)...)); + case MarkBase: return_trace (u.markBase.dispatch (c, hb_forward (ds)...)); + case MarkLig: return_trace (u.markLig.dispatch (c, hb_forward (ds)...)); + case MarkMark: return_trace (u.markMark.dispatch (c, hb_forward (ds)...)); + case Context: return_trace (u.context.dispatch (c, hb_forward (ds)...)); + case ChainContext: return_trace (u.chainContext.dispatch (c, hb_forward (ds)...)); + case Extension: return_trace (u.extension.dispatch (c, hb_forward (ds)...)); default: return_trace (c->default_return_value ()); } } + bool intersects (const hb_set_t *glyphs, unsigned int lookup_type) const + { + hb_intersects_context_t c (glyphs); + return dispatch (&c, lookup_type); + } + protected: union { SinglePos single; @@ -1571,21 +2507,40 @@ struct PosLookup : Lookup hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const { return dispatch (c); } + hb_closure_lookups_context_t::return_t closure_lookups (hb_closure_lookups_context_t *c, unsigned this_index) const + { + if (c->is_lookup_visited (this_index)) + return hb_closure_lookups_context_t::default_return_value (); + + c->set_lookup_visited (this_index); + if (!intersects (c->glyphs)) + { + c->set_lookup_inactive (this_index); + return hb_closure_lookups_context_t::default_return_value (); + } + c->set_recurse_func (dispatch_closure_lookups_recurse_func); + + hb_closure_lookups_context_t::return_t ret = dispatch (c); + return ret; + } + template - void add_coverage (set_t *glyphs) const + void collect_coverage (set_t *glyphs) const { - hb_add_coverage_context_t c (glyphs); + hb_collect_coverage_context_t c (glyphs); dispatch (&c); } - static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index); + static inline bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index); template static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index); - template - typename context_t::return_t dispatch (context_t *c) const - { return Lookup::dispatch (c); } + HB_INTERNAL static hb_closure_lookups_context_t::return_t dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index); + + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const + { return Lookup::dispatch (c, hb_forward (ds)...); } bool subset (hb_subset_context_t *c) const { return Lookup::subset (c); } @@ -1604,21 +2559,39 @@ struct GPOS : GSUBGPOS static constexpr hb_tag_t tableTag = HB_OT_TAG_GPOS; const PosLookup& get_lookup (unsigned int i) const - { return CastR (GSUBGPOS::get_lookup (i)); } + { return static_cast (GSUBGPOS::get_lookup (i)); } static inline void position_start (hb_font_t *font, hb_buffer_t *buffer); static inline void position_finish_advances (hb_font_t *font, hb_buffer_t *buffer); static inline void position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer); bool subset (hb_subset_context_t *c) const - { return GSUBGPOS::subset (c); } + { + hb_subset_layout_context_t l (c, tableTag, c->plan->gpos_lookups, c->plan->gpos_features); + return GSUBGPOS::subset (&l); + } bool sanitize (hb_sanitize_context_t *c) const { return GSUBGPOS::sanitize (c); } - HB_INTERNAL bool is_blacklisted (hb_blob_t *blob, + HB_INTERNAL bool is_blocklisted (hb_blob_t *blob, hb_face_t *face) const; + void collect_variation_indices (hb_collect_variation_indices_context_t *c) const + { + for (unsigned i = 0; i < GSUBGPOS::get_lookup_count (); i++) + { + if (!c->gpos_lookups->has (i)) continue; + const PosLookup &l = get_lookup (i); + l.dispatch (c); + } + } + + void closure_lookups (hb_face_t *face, + const hb_set_t *glyphs, + hb_set_t *lookup_indexes /* IN/OUT */) const + { GSUBGPOS::closure_lookups (face, glyphs, lookup_indexes); } + typedef GSUBGPOS::accelerator_t accelerator_t; }; @@ -1732,14 +2705,21 @@ struct GPOS_accelerator_t : GPOS::accelerator_t {}; /* Out-of-class implementation for methods recursing */ +#ifndef HB_NO_OT_LAYOUT template -/*static*/ inline typename context_t::return_t PosLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index) +/*static*/ typename context_t::return_t PosLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index) { const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index); return l.dispatch (c); } -/*static*/ inline bool PosLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index) +/*static*/ inline hb_closure_lookups_context_t::return_t PosLookup::dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index) +{ + const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (this_index); + return l.closure_lookups (c, this_index); +} + +/*static*/ bool PosLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index) { const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index); unsigned int saved_lookup_props = c->lookup_props; @@ -1751,6 +2731,7 @@ template c->set_lookup_props (saved_lookup_props); return ret; } +#endif } /* namespace OT */ diff --git a/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-gsub-table.hh b/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-gsub-table.hh index 288c07b552e4..de49c4e20845 100644 --- a/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-gsub-table.hh +++ b/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-gsub-table.hh @@ -34,10 +34,12 @@ namespace OT { +typedef hb_pair_t hb_codepoint_pair_t; + +template +static void SingleSubst_serialize (hb_serialize_context_t *c, + Iterator it); -static inline void SingleSubst_serialize (hb_serialize_context_t *c, - hb_array_t glyphs, - hb_array_t substitutes); struct SingleSubstFormat1 { @@ -46,35 +48,30 @@ struct SingleSubstFormat1 void closure (hb_closure_context_t *c) const { - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - /* TODO Switch to range-based API to work around malicious fonts. - * https://github.com/harfbuzz/harfbuzz/issues/363 */ - hb_codepoint_t glyph_id = iter.get_glyph (); - if (c->glyphs->has (glyph_id)) - c->out->add ((glyph_id + deltaGlyphID) & 0xFFFFu); - } + unsigned d = deltaGlyphID; + + hb_iter (this+coverage) + | hb_filter (*c->glyphs) + | hb_map ([d] (hb_codepoint_t g) { return (g + d) & 0xFFFFu; }) + | hb_sink (c->output) + ; } + void closure_lookups (hb_closure_lookups_context_t *c) const {} + void collect_glyphs (hb_collect_glyphs_context_t *c) const { - if (unlikely (!(this+coverage).add_coverage (c->input))) return; - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - /* TODO Switch to range-based API to work around malicious fonts. - * https://github.com/harfbuzz/harfbuzz/issues/363 */ - hb_codepoint_t glyph_id = iter.get_glyph (); - c->output->add ((glyph_id + deltaGlyphID) & 0xFFFFu); - } + if (unlikely (!(this+coverage).collect_coverage (c->input))) return; + unsigned d = deltaGlyphID; + + hb_iter (this+coverage) + | hb_map ([d] (hb_codepoint_t g) { return (g + d) & 0xFFFFu; }) + | hb_sink (c->output) + ; } const Coverage &get_coverage () const { return this+coverage; } bool would_apply (hb_would_apply_context_t *c) const - { - TRACE_WOULD_APPLY (this); - return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED); - } + { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; } bool apply (hb_ot_apply_context_t *c) const { @@ -91,34 +88,41 @@ struct SingleSubstFormat1 return_trace (true); } + template bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs, - int delta) + Iterator glyphs, + unsigned delta) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs))) return_trace (false); - deltaGlyphID.set (delta); /* TODO(serialize) overflow? */ + c->check_assign (deltaGlyphID, delta); return_trace (true); } bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - const hb_set_t &glyphset = *c->plan->glyphset; + const hb_set_t &glyphset = *c->plan->glyphset_gsub (); const hb_map_t &glyph_map = *c->plan->glyph_map; - hb_vector_t from; - hb_vector_t to; + hb_codepoint_t delta = deltaGlyphID; - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - if (!glyphset.has (iter.get_glyph ())) continue; - from.push ()->set (glyph_map[iter.get_glyph ()]); - to.push ()->set (glyph_map[(iter.get_glyph () + delta) & 0xFFFF]); - } - c->serializer->propagate_error (from, to); - SingleSubst_serialize (c->serializer, from, to); - return_trace (from.length); + + auto it = + + hb_iter (this+coverage) + | hb_filter (glyphset) + | hb_map_retains_sorting ([&] (hb_codepoint_t g) { + return hb_codepoint_pair_t (g, + (g + delta) & 0xFFFF); }) + | hb_filter (glyphset, hb_second) + | hb_map_retains_sorting ([&] (hb_codepoint_pair_t p) -> hb_codepoint_pair_t + { return hb_pair (glyph_map[p.first], glyph_map[p.second]); }) + ; + + bool ret = bool (it); + SingleSubst_serialize (c->serializer, it); + return_trace (ret); } bool sanitize (hb_sanitize_context_t *c) const @@ -132,8 +136,8 @@ struct SingleSubstFormat1 OffsetTo coverage; /* Offset to Coverage table--from * beginning of Substitution table */ - HBINT16 deltaGlyphID; /* Add to original GlyphID to get - * substitute GlyphID */ + HBUINT16 deltaGlyphID; /* Add to original GlyphID to get + * substitute GlyphID, modulo 0x10000 */ public: DEFINE_SIZE_STATIC (6); }; @@ -145,35 +149,28 @@ struct SingleSubstFormat2 void closure (hb_closure_context_t *c) const { - unsigned int count = substitute.len; - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - if (unlikely (iter.get_coverage () >= count)) - break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ - if (c->glyphs->has (iter.get_glyph ())) - c->out->add (substitute[iter.get_coverage ()]); - } + + hb_zip (this+coverage, substitute) + | hb_filter (*c->glyphs, hb_first) + | hb_map (hb_second) + | hb_sink (c->output) + ; } + void closure_lookups (hb_closure_lookups_context_t *c) const {} + void collect_glyphs (hb_collect_glyphs_context_t *c) const { - if (unlikely (!(this+coverage).add_coverage (c->input))) return; - unsigned int count = substitute.len; - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - if (unlikely (iter.get_coverage () >= count)) - break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ - c->output->add (substitute[iter.get_coverage ()]); - } + if (unlikely (!(this+coverage).collect_coverage (c->input))) return; + + hb_zip (this+coverage, substitute) + | hb_map (hb_second) + | hb_sink (c->output) + ; } const Coverage &get_coverage () const { return this+coverage; } bool would_apply (hb_would_apply_context_t *c) const - { - TRACE_WOULD_APPLY (this); - return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED); - } + { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; } bool apply (hb_ot_apply_context_t *c) const { @@ -188,11 +185,21 @@ struct SingleSubstFormat2 return_trace (true); } + template bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs, - hb_array_t substitutes) + Iterator it) { TRACE_SERIALIZE (this); + auto substitutes = + + it + | hb_map (hb_second) + ; + auto glyphs = + + it + | hb_map_retains_sorting (hb_first) + ; if (unlikely (!c->extend_min (*this))) return_trace (false); if (unlikely (!substitute.serialize (c, substitutes))) return_trace (false); if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs))) return_trace (false); @@ -202,19 +209,20 @@ struct SingleSubstFormat2 bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - const hb_set_t &glyphset = *c->plan->glyphset; + const hb_set_t &glyphset = *c->plan->glyphset_gsub (); const hb_map_t &glyph_map = *c->plan->glyph_map; - hb_vector_t from; - hb_vector_t to; - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - if (!glyphset.has (iter.get_glyph ())) continue; - from.push ()->set (glyph_map[iter.get_glyph ()]); - to.push ()->set (glyph_map[substitute[iter.get_coverage ()]]); - } - c->serializer->propagate_error (from, to); - SingleSubst_serialize (c->serializer, from, to); - return_trace (from.length); + + auto it = + + hb_zip (this+coverage, substitute) + | hb_filter (glyphset, hb_first) + | hb_filter (glyphset, hb_second) + | hb_map_retains_sorting ([&] (hb_pair_t p) -> hb_codepoint_pair_t + { return hb_pair (glyph_map[p.first], glyph_map[p.second]); }) + ; + + bool ret = bool (it); + SingleSubst_serialize (c->serializer, it); + return_trace (ret); } bool sanitize (hb_sanitize_context_t *c) const @@ -228,7 +236,7 @@ struct SingleSubstFormat2 OffsetTo coverage; /* Offset to Coverage table--from * beginning of Substitution table */ - ArrayOf + ArrayOf substitute; /* Array of substitute * GlyphIDs--ordered by Coverage Index */ public: @@ -237,41 +245,44 @@ struct SingleSubstFormat2 struct SingleSubst { + + template bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs, - hb_array_t substitutes) + Iterator glyphs) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (u.format))) return_trace (false); - unsigned int format = 2; - int delta = 0; - if (glyphs.length) + unsigned format = 2; + unsigned delta = 0; + if (glyphs) { format = 1; - /* TODO(serialize) check for wrap-around */ - delta = substitutes[0] - glyphs[0]; - for (unsigned int i = 1; i < glyphs.length; i++) - if (delta != (int) (substitutes[i] - glyphs[i])) { - format = 2; - break; - } + auto get_delta = [=] (hb_codepoint_pair_t _) + { return (unsigned) (_.second - _.first) & 0xFFFF; }; + delta = get_delta (*glyphs); + if (!hb_all (++(+glyphs), delta, get_delta)) format = 2; } - u.format.set (format); + u.format = format; switch (u.format) { - case 1: return_trace (u.format1.serialize (c, glyphs, delta)); - case 2: return_trace (u.format2.serialize (c, glyphs, substitutes)); + case 1: return_trace (u.format1.serialize (c, + + glyphs + | hb_map_retains_sorting (hb_first), + delta)); + case 2: return_trace (u.format2.serialize (c, glyphs)); default:return_trace (false); } } - template - typename context_t::return_t dispatch (context_t *c) const + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { TRACE_DISPATCH (this, u.format); if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); switch (u.format) { - case 1: return_trace (c->dispatch (u.format1)); - case 2: return_trace (c->dispatch (u.format2)); + case 1: return_trace (c->dispatch (u.format1, hb_forward (ds)...)); + case 2: return_trace (c->dispatch (u.format2, hb_forward (ds)...)); default:return_trace (c->default_return_value ()); } } @@ -284,20 +295,19 @@ struct SingleSubst } u; }; -static inline void +template +static void SingleSubst_serialize (hb_serialize_context_t *c, - hb_array_t glyphs, - hb_array_t substitutes) -{ c->start_embed ()->serialize (c, glyphs, substitutes); } + Iterator it) +{ c->start_embed ()->serialize (c, it); } struct Sequence { + bool intersects (const hb_set_t *glyphs) const + { return hb_all (substitute, glyphs); } + void closure (hb_closure_context_t *c) const - { - unsigned int count = substitute.len; - for (unsigned int i = 0; i < count; i++) - c->out->add (substitute[i]); - } + { c->output->add_array (substitute.arrayZ, substitute.len); } void collect_glyphs (hb_collect_glyphs_context_t *c) const { c->output->add_array (substitute.arrayZ, substitute.len); } @@ -334,11 +344,30 @@ struct Sequence return_trace (true); } + template bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs) + Iterator subst) { TRACE_SERIALIZE (this); - return_trace (substitute.serialize (c, glyphs)); + return_trace (substitute.serialize (c, subst)); + } + + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + if (!intersects (&glyphset)) return_trace (false); + + auto it = + + hb_iter (substitute) + | hb_map (glyph_map) + ; + + auto *out = c->serializer->start_embed (*this); + return_trace (out->serialize (c->serializer, it)); } bool sanitize (hb_sanitize_context_t *c) const @@ -348,7 +377,7 @@ struct Sequence } protected: - ArrayOf + ArrayOf substitute; /* String of GlyphIDs to substitute */ public: DEFINE_SIZE_ARRAY (2, substitute); @@ -361,31 +390,30 @@ struct MultipleSubstFormat1 void closure (hb_closure_context_t *c) const { - unsigned int count = sequence.len; - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - if (unlikely (iter.get_coverage () >= count)) - break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ - if (c->glyphs->has (iter.get_glyph ())) - (this+sequence[iter.get_coverage ()]).closure (c); - } + + hb_zip (this+coverage, sequence) + | hb_filter (*c->glyphs, hb_first) + | hb_map (hb_second) + | hb_map (hb_add (this)) + | hb_apply ([c] (const Sequence &_) { _.closure (c); }) + ; } + void closure_lookups (hb_closure_lookups_context_t *c) const {} + void collect_glyphs (hb_collect_glyphs_context_t *c) const { - if (unlikely (!(this+coverage).add_coverage (c->input))) return; - unsigned int count = sequence.len; - for (unsigned int i = 0; i < count; i++) - (this+sequence[i]).collect_glyphs (c); + if (unlikely (!(this+coverage).collect_coverage (c->input))) return; + + hb_zip (this+coverage, sequence) + | hb_map (hb_second) + | hb_map (hb_add (this)) + | hb_apply ([c] (const Sequence &_) { _.collect_glyphs (c); }) + ; } const Coverage &get_coverage () const { return this+coverage; } bool would_apply (hb_would_apply_context_t *c) const - { - TRACE_WOULD_APPLY (this); - return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED); - } + { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; } bool apply (hb_ot_apply_context_t *c) const { @@ -398,9 +426,9 @@ struct MultipleSubstFormat1 } bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs, + hb_sorted_array_t glyphs, hb_array_t substitute_len_list, - hb_array_t substitute_glyphs_list) + hb_array_t substitute_glyphs_list) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); @@ -419,8 +447,24 @@ struct MultipleSubstFormat1 bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - // TODO(subset) - return_trace (false); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + out->format = format; + + hb_sorted_vector_t new_coverage; + + hb_zip (this+coverage, sequence) + | hb_filter (glyphset, hb_first) + | hb_filter (subset_offset_array (c, out->sequence, this), hb_second) + | hb_map (hb_first) + | hb_map (glyph_map) + | hb_sink (new_coverage) + ; + out->coverage.serialize (c->serializer, out) + .serialize (c->serializer, new_coverage.iter ()); + return_trace (bool (new_coverage)); } bool sanitize (hb_sanitize_context_t *c) const @@ -444,27 +488,27 @@ struct MultipleSubstFormat1 struct MultipleSubst { bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs, + hb_sorted_array_t glyphs, hb_array_t substitute_len_list, - hb_array_t substitute_glyphs_list) + hb_array_t substitute_glyphs_list) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (u.format))) return_trace (false); unsigned int format = 1; - u.format.set (format); + u.format = format; switch (u.format) { case 1: return_trace (u.format1.serialize (c, glyphs, substitute_len_list, substitute_glyphs_list)); default:return_trace (false); } } - template - typename context_t::return_t dispatch (context_t *c) const + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { TRACE_DISPATCH (this, u.format); if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); switch (u.format) { - case 1: return_trace (c->dispatch (u.format1)); + case 1: return_trace (c->dispatch (u.format1, hb_forward (ds)...)); default:return_trace (c->default_return_value ()); } } @@ -478,12 +522,11 @@ struct MultipleSubst struct AlternateSet { + bool intersects (const hb_set_t *glyphs) const + { return hb_any (alternates, glyphs); } + void closure (hb_closure_context_t *c) const - { - unsigned int count = alternates.len; - for (unsigned int i = 0; i < count; i++) - c->out->add (alternates[i]); - } + { c->output->add_array (alternates.arrayZ, alternates.len); } void collect_glyphs (hb_collect_glyphs_context_t *c) const { c->output->add_array (alternates.arrayZ, alternates.len); } @@ -502,7 +545,7 @@ struct AlternateSet unsigned int shift = hb_ctz (lookup_mask); unsigned int alt_index = ((lookup_mask & glyph_mask) >> shift); - /* If alt_index is MAX, randomize feature if it is the rand feature. */ + /* If alt_index is MAX_VALUE, randomize feature if it is the rand feature. */ if (alt_index == HB_OT_MAP_MAX_VALUE && c->random) alt_index = c->random_number () % count + 1; @@ -513,11 +556,44 @@ struct AlternateSet return_trace (true); } + unsigned + get_alternates (unsigned start_offset, + unsigned *alternate_count /* IN/OUT. May be NULL. */, + hb_codepoint_t *alternate_glyphs /* OUT. May be NULL. */) const + { + if (alternates.len && alternate_count) + { + + alternates.sub_array (start_offset, alternate_count) + | hb_sink (hb_array (alternate_glyphs, *alternate_count)) + ; + } + return alternates.len; + } + + template bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs) + Iterator alts) { TRACE_SERIALIZE (this); - return_trace (alternates.serialize (c, glyphs)); + return_trace (alternates.serialize (c, alts)); + } + + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + auto it = + + hb_iter (alternates) + | hb_filter (glyphset) + | hb_map (glyph_map) + ; + + auto *out = c->serializer->start_embed (*this); + return_trace (out->serialize (c->serializer, it) && + out->alternates); } bool sanitize (hb_sanitize_context_t *c) const @@ -527,7 +603,7 @@ struct AlternateSet } protected: - ArrayOf + ArrayOf alternates; /* Array of alternate GlyphIDs--in * arbitrary order */ public: @@ -541,35 +617,38 @@ struct AlternateSubstFormat1 void closure (hb_closure_context_t *c) const { - unsigned int count = alternateSet.len; - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - if (unlikely (iter.get_coverage () >= count)) - break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ - if (c->glyphs->has (iter.get_glyph ())) - (this+alternateSet[iter.get_coverage ()]).closure (c); - } + + hb_zip (this+coverage, alternateSet) + | hb_filter (c->glyphs, hb_first) + | hb_map (hb_second) + | hb_map (hb_add (this)) + | hb_apply ([c] (const AlternateSet &_) { _.closure (c); }) + ; } + void closure_lookups (hb_closure_lookups_context_t *c) const {} + void collect_glyphs (hb_collect_glyphs_context_t *c) const { - if (unlikely (!(this+coverage).add_coverage (c->input))) return; - unsigned int count = alternateSet.len; - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - if (unlikely (iter.get_coverage () >= count)) - break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ - (this+alternateSet[iter.get_coverage ()]).collect_glyphs (c); - } + if (unlikely (!(this+coverage).collect_coverage (c->input))) return; + + hb_zip (this+coverage, alternateSet) + | hb_map (hb_second) + | hb_map (hb_add (this)) + | hb_apply ([c] (const AlternateSet &_) { _.collect_glyphs (c); }) + ; } const Coverage &get_coverage () const { return this+coverage; } bool would_apply (hb_would_apply_context_t *c) const - { - TRACE_WOULD_APPLY (this); - return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED); - } + { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; } + + unsigned + get_glyph_alternates (hb_codepoint_t gid, + unsigned start_offset, + unsigned *alternate_count /* IN/OUT. May be NULL. */, + hb_codepoint_t *alternate_glyphs /* OUT. May be NULL. */) const + { return (this+alternateSet[(this+coverage).get_coverage (gid)]) + .get_alternates (start_offset, alternate_count, alternate_glyphs); } bool apply (hb_ot_apply_context_t *c) const { @@ -582,9 +661,9 @@ struct AlternateSubstFormat1 } bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs, + hb_sorted_array_t glyphs, hb_array_t alternate_len_list, - hb_array_t alternate_glyphs_list) + hb_array_t alternate_glyphs_list) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); @@ -603,8 +682,24 @@ struct AlternateSubstFormat1 bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - // TODO(subset) - return_trace (false); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + out->format = format; + + hb_sorted_vector_t new_coverage; + + hb_zip (this+coverage, alternateSet) + | hb_filter (glyphset, hb_first) + | hb_filter (subset_offset_array (c, out->alternateSet, this), hb_second) + | hb_map (hb_first) + | hb_map (glyph_map) + | hb_sink (new_coverage) + ; + out->coverage.serialize (c->serializer, out) + .serialize (c->serializer, new_coverage.iter ()); + return_trace (bool (new_coverage)); } bool sanitize (hb_sanitize_context_t *c) const @@ -628,27 +723,27 @@ struct AlternateSubstFormat1 struct AlternateSubst { bool serialize (hb_serialize_context_t *c, - hb_array_t glyphs, + hb_sorted_array_t glyphs, hb_array_t alternate_len_list, - hb_array_t alternate_glyphs_list) + hb_array_t alternate_glyphs_list) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (u.format))) return_trace (false); unsigned int format = 1; - u.format.set (format); + u.format = format; switch (u.format) { case 1: return_trace (u.format1.serialize (c, glyphs, alternate_len_list, alternate_glyphs_list)); default:return_trace (false); } } - template - typename context_t::return_t dispatch (context_t *c) const + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { TRACE_DISPATCH (this, u.format); if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); switch (u.format) { - case 1: return_trace (c->dispatch (u.format1)); + case 1: return_trace (c->dispatch (u.format1, hb_forward (ds)...)); default:return_trace (c->default_return_value ()); } } @@ -664,40 +759,30 @@ struct AlternateSubst struct Ligature { bool intersects (const hb_set_t *glyphs) const - { - unsigned int count = component.lenP1; - for (unsigned int i = 1; i < count; i++) - if (!glyphs->has (component[i])) - return false; - return true; - } + { return hb_all (component, glyphs); } void closure (hb_closure_context_t *c) const { - unsigned int count = component.lenP1; - for (unsigned int i = 1; i < count; i++) - if (!c->glyphs->has (component[i])) - return; - c->out->add (ligGlyph); + if (!intersects (c->glyphs)) return; + c->output->add (ligGlyph); } void collect_glyphs (hb_collect_glyphs_context_t *c) const { - c->input->add_array (component.arrayZ, component.lenP1 ? component.lenP1 - 1 : 0); + c->input->add_array (component.arrayZ, component.get_length ()); c->output->add (ligGlyph); } bool would_apply (hb_would_apply_context_t *c) const { - TRACE_WOULD_APPLY (this); if (c->len != component.lenP1) - return_trace (false); + return false; for (unsigned int i = 1; i < c->len; i++) if (likely (c->glyphs[i] != component[i])) - return_trace (false); + return false; - return_trace (true); + return true; } bool apply (hb_ot_apply_context_t *c) const @@ -739,9 +824,11 @@ struct Ligature return_trace (true); } + template bool serialize (hb_serialize_context_t *c, - GlyphID ligature, - hb_array_t components /* Starting from second */) + hb_codepoint_t ligature, + Iterator components /* Starting from second */) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); @@ -750,6 +837,25 @@ struct Ligature return_trace (true); } + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + if (!intersects (&glyphset) || !glyphset.has (ligGlyph)) return_trace (false); + + auto it = + + hb_iter (component) + | hb_map (glyph_map) + ; + + auto *out = c->serializer->start_embed (*this); + return_trace (out->serialize (c->serializer, + glyph_map[ligGlyph], + it)); + } + public: bool sanitize (hb_sanitize_context_t *c) const { @@ -758,8 +864,8 @@ struct Ligature } protected: - GlyphID ligGlyph; /* GlyphID of ligature to substitute */ - HeadlessArrayOf + HBGlyphID ligGlyph; /* GlyphID of ligature to substitute */ + HeadlessArrayOf component; /* Array of component GlyphIDs--start * with the second component--ordered * in writing direction */ @@ -771,38 +877,38 @@ struct LigatureSet { bool intersects (const hb_set_t *glyphs) const { - unsigned int num_ligs = ligature.len; - for (unsigned int i = 0; i < num_ligs; i++) - if ((this+ligature[i]).intersects (glyphs)) - return true; - return false; + return + + hb_iter (ligature) + | hb_map (hb_add (this)) + | hb_map ([glyphs] (const Ligature &_) { return _.intersects (glyphs); }) + | hb_any + ; } void closure (hb_closure_context_t *c) const { - unsigned int num_ligs = ligature.len; - for (unsigned int i = 0; i < num_ligs; i++) - (this+ligature[i]).closure (c); + + hb_iter (ligature) + | hb_map (hb_add (this)) + | hb_apply ([c] (const Ligature &_) { _.closure (c); }) + ; } void collect_glyphs (hb_collect_glyphs_context_t *c) const { - unsigned int num_ligs = ligature.len; - for (unsigned int i = 0; i < num_ligs; i++) - (this+ligature[i]).collect_glyphs (c); + + hb_iter (ligature) + | hb_map (hb_add (this)) + | hb_apply ([c] (const Ligature &_) { _.collect_glyphs (c); }) + ; } bool would_apply (hb_would_apply_context_t *c) const { - TRACE_WOULD_APPLY (this); - unsigned int num_ligs = ligature.len; - for (unsigned int i = 0; i < num_ligs; i++) - { - const Ligature &lig = this+ligature[i]; - if (lig.would_apply (c)) - return_trace (true); - } - return_trace (false); + return + + hb_iter (ligature) + | hb_map (hb_add (this)) + | hb_map ([c] (const Ligature &_) { return _.would_apply (c); }) + | hb_any + ; } bool apply (hb_ot_apply_context_t *c) const @@ -819,16 +925,16 @@ struct LigatureSet } bool serialize (hb_serialize_context_t *c, - hb_array_t ligatures, + hb_array_t ligatures, hb_array_t component_count_list, - hb_array_t &component_list /* Starting from second for each ligature */) + hb_array_t &component_list /* Starting from second for each ligature */) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); if (unlikely (!ligature.serialize (c, ligatures.length))) return_trace (false); for (unsigned int i = 0; i < ligatures.length; i++) { - unsigned int component_count = MAX (component_count_list[i] - 1, 0); + unsigned int component_count = (unsigned) hb_max ((int) component_count_list[i] - 1, 0); if (unlikely (!ligature[i].serialize (c, this) .serialize (c, ligatures[i], @@ -839,6 +945,19 @@ struct LigatureSet return_trace (true); } + bool subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + + + hb_iter (ligature) + | hb_filter (subset_offset_array (c, out->ligature, this)) + | hb_drain + ; + return_trace (bool (out->ligature)); + } + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -857,59 +976,55 @@ struct LigatureSubstFormat1 { bool intersects (const hb_set_t *glyphs) const { - unsigned int count = ligatureSet.len; - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - if (unlikely (iter.get_coverage () >= count)) - break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ - if (glyphs->has (iter.get_glyph ()) && - (this+ligatureSet[iter.get_coverage ()]).intersects (glyphs)) - return true; - } - return false; + return + + hb_zip (this+coverage, ligatureSet) + | hb_filter (*glyphs, hb_first) + | hb_map (hb_second) + | hb_map ([this, glyphs] (const OffsetTo &_) + { return (this+_).intersects (glyphs); }) + | hb_any + ; } void closure (hb_closure_context_t *c) const { - unsigned int count = ligatureSet.len; - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - if (unlikely (iter.get_coverage () >= count)) - break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ - if (c->glyphs->has (iter.get_glyph ())) - (this+ligatureSet[iter.get_coverage ()]).closure (c); - } + + hb_zip (this+coverage, ligatureSet) + | hb_filter (*c->glyphs, hb_first) + | hb_map (hb_second) + | hb_map (hb_add (this)) + | hb_apply ([c] (const LigatureSet &_) { _.closure (c); }) + ; } + void closure_lookups (hb_closure_lookups_context_t *c) const {} + void collect_glyphs (hb_collect_glyphs_context_t *c) const { - if (unlikely (!(this+coverage).add_coverage (c->input))) return; - unsigned int count = ligatureSet.len; - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - if (unlikely (iter.get_coverage () >= count)) - break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ - (this+ligatureSet[iter.get_coverage ()]).collect_glyphs (c); - } + if (unlikely (!(this+coverage).collect_coverage (c->input))) return; + + + hb_zip (this+coverage, ligatureSet) + | hb_map (hb_second) + | hb_map (hb_add (this)) + | hb_apply ([c] (const LigatureSet &_) { _.collect_glyphs (c); }) + ; } const Coverage &get_coverage () const { return this+coverage; } bool would_apply (hb_would_apply_context_t *c) const { - TRACE_WOULD_APPLY (this); unsigned int index = (this+coverage).get_coverage (c->glyphs[0]); - if (likely (index == NOT_COVERED)) return_trace (false); + if (likely (index == NOT_COVERED)) return false; const LigatureSet &lig_set = this+ligatureSet[index]; - return_trace (lig_set.would_apply (c)); + return lig_set.would_apply (c); } bool apply (hb_ot_apply_context_t *c) const { TRACE_APPLY (this); - unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); + unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint); if (likely (index == NOT_COVERED)) return_trace (false); const LigatureSet &lig_set = this+ligatureSet[index]; @@ -917,11 +1032,11 @@ struct LigatureSubstFormat1 } bool serialize (hb_serialize_context_t *c, - hb_array_t first_glyphs, + hb_sorted_array_t first_glyphs, hb_array_t ligature_per_first_glyph_count_list, - hb_array_t ligatures_list, + hb_array_t ligatures_list, hb_array_t component_count_list, - hb_array_t component_list /* Starting from second for each ligature */) + hb_array_t component_list /* Starting from second for each ligature */) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (*this))) return_trace (false); @@ -943,8 +1058,24 @@ struct LigatureSubstFormat1 bool subset (hb_subset_context_t *c) const { TRACE_SUBSET (this); - // TODO(subset) - return_trace (false); + const hb_set_t &glyphset = *c->plan->glyphset (); + const hb_map_t &glyph_map = *c->plan->glyph_map; + + auto *out = c->serializer->start_embed (*this); + if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + out->format = format; + + hb_sorted_vector_t new_coverage; + + hb_zip (this+coverage, ligatureSet) + | hb_filter (glyphset, hb_first) + | hb_filter (subset_offset_array (c, out->ligatureSet, this), hb_second) + | hb_map (hb_first) + | hb_map (glyph_map) + | hb_sink (new_coverage) + ; + out->coverage.serialize (c->serializer, out) + .serialize (c->serializer, new_coverage.iter ()); + return_trace (bool (new_coverage)); } bool sanitize (hb_sanitize_context_t *c) const @@ -968,16 +1099,16 @@ struct LigatureSubstFormat1 struct LigatureSubst { bool serialize (hb_serialize_context_t *c, - hb_array_t first_glyphs, + hb_sorted_array_t first_glyphs, hb_array_t ligature_per_first_glyph_count_list, - hb_array_t ligatures_list, + hb_array_t ligatures_list, hb_array_t component_count_list, - hb_array_t component_list /* Starting from second for each ligature */) + hb_array_t component_list /* Starting from second for each ligature */) { TRACE_SERIALIZE (this); if (unlikely (!c->extend_min (u.format))) return_trace (false); unsigned int format = 1; - u.format.set (format); + u.format = format; switch (u.format) { case 1: return_trace (u.format1.serialize (c, first_glyphs, @@ -989,13 +1120,13 @@ struct LigatureSubst } } - template - typename context_t::return_t dispatch (context_t *c) const + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { TRACE_DISPATCH (this, u.format); if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); switch (u.format) { - case 1: return_trace (c->dispatch (u.format1)); + case 1: return_trace (c->dispatch (u.format1, hb_forward (ds)...)); default:return_trace (c->default_return_value ()); } } @@ -1015,7 +1146,6 @@ struct ChainContextSubst : ChainContext {}; struct ExtensionSubst : Extension { typedef struct SubstLookupSubTable SubTable; - bool is_reverse () const; }; @@ -1027,7 +1157,7 @@ struct ReverseChainSingleSubstFormat1 if (!(this+coverage).intersects (glyphs)) return false; - const OffsetArrayOf &lookahead = StructAfter > (backtrack); + const OffsetArrayOf &lookahead = StructAfter> (backtrack); unsigned int count; @@ -1046,47 +1176,36 @@ struct ReverseChainSingleSubstFormat1 void closure (hb_closure_context_t *c) const { - const OffsetArrayOf &lookahead = StructAfter > (backtrack); + if (!intersects (c->glyphs)) return; - unsigned int count; - - count = backtrack.len; - for (unsigned int i = 0; i < count; i++) - if (!(this+backtrack[i]).intersects (c->glyphs)) - return; - - count = lookahead.len; - for (unsigned int i = 0; i < count; i++) - if (!(this+lookahead[i]).intersects (c->glyphs)) - return; + const OffsetArrayOf &lookahead = StructAfter> (backtrack); + const ArrayOf &substitute = StructAfter> (lookahead); - const ArrayOf &substitute = StructAfter > (lookahead); - count = substitute.len; - for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ()) - { - if (unlikely (iter.get_coverage () >= count)) - break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ - if (c->glyphs->has (iter.get_glyph ())) - c->out->add (substitute[iter.get_coverage ()]); - } + + hb_zip (this+coverage, substitute) + | hb_filter (*c->glyphs, hb_first) + | hb_map (hb_second) + | hb_sink (c->output) + ; } + void closure_lookups (hb_closure_lookups_context_t *c) const {} + void collect_glyphs (hb_collect_glyphs_context_t *c) const { - if (unlikely (!(this+coverage).add_coverage (c->input))) return; + if (unlikely (!(this+coverage).collect_coverage (c->input))) return; unsigned int count; count = backtrack.len; for (unsigned int i = 0; i < count; i++) - if (unlikely (!(this+backtrack[i]).add_coverage (c->before))) return; + if (unlikely (!(this+backtrack[i]).collect_coverage (c->before))) return; - const OffsetArrayOf &lookahead = StructAfter > (backtrack); + const OffsetArrayOf &lookahead = StructAfter> (backtrack); count = lookahead.len; for (unsigned int i = 0; i < count; i++) - if (unlikely (!(this+lookahead[i]).add_coverage (c->after))) return; + if (unlikely (!(this+lookahead[i]).collect_coverage (c->after))) return; - const ArrayOf &substitute = StructAfter > (lookahead); + const ArrayOf &substitute = StructAfter> (lookahead); count = substitute.len; c->output->add_array (substitute.arrayZ, substitute.len); } @@ -1094,10 +1213,7 @@ struct ReverseChainSingleSubstFormat1 const Coverage &get_coverage () const { return this+coverage; } bool would_apply (hb_would_apply_context_t *c) const - { - TRACE_WOULD_APPLY (this); - return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED); - } + { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; } bool apply (hb_ot_apply_context_t *c) const { @@ -1105,13 +1221,15 @@ struct ReverseChainSingleSubstFormat1 if (unlikely (c->nesting_level_left != HB_MAX_NESTING_LEVEL)) return_trace (false); /* No chaining to this type */ - unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); + unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint); if (likely (index == NOT_COVERED)) return_trace (false); - const OffsetArrayOf &lookahead = StructAfter > (backtrack); - const ArrayOf &substitute = StructAfter > (lookahead); + const OffsetArrayOf &lookahead = StructAfter> (backtrack); + const ArrayOf &substitute = StructAfter> (lookahead); + + if (unlikely (index >= substitute.len)) return_trace (false); - unsigned int start_index = 0, end_index = 0; + unsigned int start_index = 0, end_index = 0; if (match_backtrack (c, backtrack.len, (HBUINT16 *) backtrack.arrayZ, match_coverage, this, @@ -1144,10 +1262,10 @@ struct ReverseChainSingleSubstFormat1 TRACE_SANITIZE (this); if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this))) return_trace (false); - const OffsetArrayOf &lookahead = StructAfter > (backtrack); + const OffsetArrayOf &lookahead = StructAfter> (backtrack); if (!lookahead.sanitize (c, this)) return_trace (false); - const ArrayOf &substitute = StructAfter > (lookahead); + const ArrayOf &substitute = StructAfter> (lookahead); return_trace (substitute.sanitize (c)); } @@ -1164,7 +1282,7 @@ struct ReverseChainSingleSubstFormat1 lookaheadX; /* Array of coverage tables * in lookahead sequence, in glyph * sequence order */ - ArrayOf + ArrayOf substituteX; /* Array of substitute * GlyphIDs--ordered by Coverage Index */ public: @@ -1173,13 +1291,13 @@ struct ReverseChainSingleSubstFormat1 struct ReverseChainSingleSubst { - template - typename context_t::return_t dispatch (context_t *c) const + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const { TRACE_DISPATCH (this, u.format); if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); switch (u.format) { - case 1: return_trace (c->dispatch (u.format1)); + case 1: return_trace (c->dispatch (u.format1, hb_forward (ds)...)); default:return_trace (c->default_return_value ()); } } @@ -1213,23 +1331,29 @@ struct SubstLookupSubTable ReverseChainSingle = 8 }; - template - typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type) const + template + typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const { TRACE_DISPATCH (this, lookup_type); switch (lookup_type) { - case Single: return_trace (u.single.dispatch (c)); - case Multiple: return_trace (u.multiple.dispatch (c)); - case Alternate: return_trace (u.alternate.dispatch (c)); - case Ligature: return_trace (u.ligature.dispatch (c)); - case Context: return_trace (u.context.dispatch (c)); - case ChainContext: return_trace (u.chainContext.dispatch (c)); - case Extension: return_trace (u.extension.dispatch (c)); - case ReverseChainSingle: return_trace (u.reverseChainContextSingle.dispatch (c)); + case Single: return_trace (u.single.dispatch (c, hb_forward (ds)...)); + case Multiple: return_trace (u.multiple.dispatch (c, hb_forward (ds)...)); + case Alternate: return_trace (u.alternate.dispatch (c, hb_forward (ds)...)); + case Ligature: return_trace (u.ligature.dispatch (c, hb_forward (ds)...)); + case Context: return_trace (u.context.dispatch (c, hb_forward (ds)...)); + case ChainContext: return_trace (u.chainContext.dispatch (c, hb_forward (ds)...)); + case Extension: return_trace (u.extension.dispatch (c, hb_forward (ds)...)); + case ReverseChainSingle: return_trace (u.reverseChainContextSingle.dispatch (c, hb_forward (ds)...)); default: return_trace (c->default_return_value ()); } } + bool intersects (const hb_set_t *glyphs, unsigned int lookup_type) const + { + hb_intersects_context_t c (glyphs); + return dispatch (&c, lookup_type); + } + protected: union { SingleSubst single; @@ -1253,14 +1377,14 @@ struct SubstLookup : Lookup const SubTable& get_subtable (unsigned int i) const { return Lookup::get_subtable (i); } - static bool lookup_type_is_reverse (unsigned int lookup_type) + static inline bool lookup_type_is_reverse (unsigned int lookup_type) { return lookup_type == SubTable::ReverseChainSingle; } bool is_reverse () const { unsigned int type = get_type (); if (unlikely (type == SubTable::Extension)) - return CastR (get_subtable(0)).is_reverse (); + return reinterpret_cast (get_subtable (0)).is_reverse (); return lookup_type_is_reverse (type); } @@ -1290,6 +1414,24 @@ struct SubstLookup : Lookup return ret; } + hb_closure_lookups_context_t::return_t closure_lookups (hb_closure_lookups_context_t *c, unsigned this_index) const + { + if (c->is_lookup_visited (this_index)) + return hb_closure_lookups_context_t::default_return_value (); + + c->set_lookup_visited (this_index); + if (!intersects (c->glyphs)) + { + c->set_lookup_inactive (this_index); + return hb_closure_lookups_context_t::default_return_value (); + } + + c->set_recurse_func (dispatch_closure_lookups_recurse_func); + + hb_closure_lookups_context_t::return_t ret = dispatch (c); + return ret; + } + hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const { c->set_recurse_func (dispatch_recurse_func); @@ -1297,90 +1439,93 @@ struct SubstLookup : Lookup } template - void add_coverage (set_t *glyphs) const + void collect_coverage (set_t *glyphs) const { - hb_add_coverage_context_t c (glyphs); + hb_collect_coverage_context_t c (glyphs); dispatch (&c); } bool would_apply (hb_would_apply_context_t *c, const hb_ot_layout_lookup_accelerator_t *accel) const { - TRACE_WOULD_APPLY (this); - if (unlikely (!c->len)) return_trace (false); - if (!accel->may_have (c->glyphs[0])) return_trace (false); - return_trace (dispatch (c)); + if (unlikely (!c->len)) return false; + if (!accel->may_have (c->glyphs[0])) return false; + return dispatch (c); } - static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index); + static inline bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index); SubTable& serialize_subtable (hb_serialize_context_t *c, - unsigned int i) + unsigned int i) { return get_subtables ()[i].serialize (c, this); } bool serialize_single (hb_serialize_context_t *c, uint32_t lookup_props, - hb_array_t glyphs, - hb_array_t substitutes) + hb_sorted_array_t glyphs, + hb_array_t substitutes) { TRACE_SERIALIZE (this); if (unlikely (!Lookup::serialize (c, SubTable::Single, lookup_props, 1))) return_trace (false); - return_trace (serialize_subtable (c, 0).u.single.serialize (c, glyphs, substitutes)); + return_trace (serialize_subtable (c, 0).u.single. + serialize (c, hb_zip (glyphs, substitutes))); } bool serialize_multiple (hb_serialize_context_t *c, uint32_t lookup_props, - hb_array_t glyphs, + hb_sorted_array_t glyphs, hb_array_t substitute_len_list, - hb_array_t substitute_glyphs_list) + hb_array_t substitute_glyphs_list) { TRACE_SERIALIZE (this); if (unlikely (!Lookup::serialize (c, SubTable::Multiple, lookup_props, 1))) return_trace (false); - return_trace (serialize_subtable (c, 0).u.multiple.serialize (c, - glyphs, - substitute_len_list, - substitute_glyphs_list)); + return_trace (serialize_subtable (c, 0).u.multiple. + serialize (c, + glyphs, + substitute_len_list, + substitute_glyphs_list)); } bool serialize_alternate (hb_serialize_context_t *c, uint32_t lookup_props, - hb_array_t glyphs, + hb_sorted_array_t glyphs, hb_array_t alternate_len_list, - hb_array_t alternate_glyphs_list) + hb_array_t alternate_glyphs_list) { TRACE_SERIALIZE (this); if (unlikely (!Lookup::serialize (c, SubTable::Alternate, lookup_props, 1))) return_trace (false); - return_trace (serialize_subtable (c, 0).u.alternate.serialize (c, - glyphs, - alternate_len_list, - alternate_glyphs_list)); + return_trace (serialize_subtable (c, 0).u.alternate. + serialize (c, + glyphs, + alternate_len_list, + alternate_glyphs_list)); } bool serialize_ligature (hb_serialize_context_t *c, uint32_t lookup_props, - hb_array_t first_glyphs, + hb_sorted_array_t first_glyphs, hb_array_t ligature_per_first_glyph_count_list, - hb_array_t ligatures_list, + hb_array_t ligatures_list, hb_array_t component_count_list, - hb_array_t component_list /* Starting from second for each ligature */) + hb_array_t component_list /* Starting from second for each ligature */) { TRACE_SERIALIZE (this); if (unlikely (!Lookup::serialize (c, SubTable::Ligature, lookup_props, 1))) return_trace (false); - return_trace (serialize_subtable (c, 0).u.ligature.serialize (c, - first_glyphs, - ligature_per_first_glyph_count_list, - ligatures_list, - component_count_list, - component_list)); + return_trace (serialize_subtable (c, 0).u.ligature. + serialize (c, + first_glyphs, + ligature_per_first_glyph_count_list, + ligatures_list, + component_count_list, + component_list)); } template - static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index); + static inline typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index); - static hb_closure_context_t::return_t dispatch_closure_recurse_func (hb_closure_context_t *c, unsigned int lookup_index) + static inline hb_closure_context_t::return_t dispatch_closure_recurse_func (hb_closure_context_t *c, unsigned int lookup_index) { if (!c->should_visit_lookup (lookup_index)) - return HB_VOID; + return hb_empty_t (); hb_closure_context_t::return_t ret = dispatch_recurse_func (c, lookup_index); @@ -1392,9 +1537,11 @@ struct SubstLookup : Lookup return ret; } - template - typename context_t::return_t dispatch (context_t *c) const - { return Lookup::dispatch (c); } + HB_INTERNAL static hb_closure_lookups_context_t::return_t dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned lookup_index); + + template + typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const + { return Lookup::dispatch (c, hb_forward (ds)...); } bool subset (hb_subset_context_t *c) const { return Lookup::subset (c); } @@ -1413,17 +1560,25 @@ struct GSUB : GSUBGPOS static constexpr hb_tag_t tableTag = HB_OT_TAG_GSUB; const SubstLookup& get_lookup (unsigned int i) const - { return CastR (GSUBGPOS::get_lookup (i)); } + { return static_cast