/* * Copyright © 2007,2008,2009 Red Hat, Inc. * Copyright © 2010,2012 Google, Inc. * * This is part of HarfBuzz, a text shaping library. * * Permission is hereby granted, without written agreement and without * license or royalty fees, to use, copy, modify, and distribute this * software and its documentation for any purpose, provided that the * above copyright notice and the following two paragraphs appear in * all copies of this software. * * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH * DAMAGE. * * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. * * Red Hat Author(s): Behdad Esfahbod * Google Author(s): Behdad Esfahbod */ #ifndef HB_OT_LAYOUT_COMMON_HH #define HB_OT_LAYOUT_COMMON_HH #include "hb.hh" #include "hb-ot-layout.hh" #include "hb-open-type.hh" #include "hb-set.hh" #include "hb-bimap.hh" #ifndef HB_MAX_NESTING_LEVEL #define HB_MAX_NESTING_LEVEL 6 #endif #ifndef HB_MAX_CONTEXT_LENGTH #define HB_MAX_CONTEXT_LENGTH 64 #endif #ifndef HB_CLOSURE_MAX_STAGES /* * The maximum number of times a lookup can be applied during shaping. * Used to limit the number of iterations of the closure algorithm. * This must be larger than the number of times add_pause() is * called in a collect_features call of any shaper. */ #define HB_CLOSURE_MAX_STAGES 32 #endif #ifndef HB_MAX_SCRIPTS #define HB_MAX_SCRIPTS 500 #endif #ifndef HB_MAX_LANGSYS #define HB_MAX_LANGSYS 2000 #endif #ifndef HB_MAX_FEATURES #define HB_MAX_FEATURES 750 #endif #ifndef HB_MAX_FEATURE_INDICES #define HB_MAX_FEATURE_INDICES 1500 #endif #ifndef HB_MAX_LOOKUP_VISIT_COUNT #define HB_MAX_LOOKUP_VISIT_COUNT 35000 #endif namespace OT { #define NOT_COVERED ((unsigned int) -1) template static inline void Coverage_serialize (hb_serialize_context_t *c, Iterator it); template static inline void ClassDef_serialize (hb_serialize_context_t *c, Iterator it); static void ClassDef_remap_and_serialize (hb_serialize_context_t *c, const hb_map_t &gid_klass_map, hb_sorted_vector_t &glyphs, const hb_set_t &klasses, bool use_class_zero, hb_map_t *klass_map /*INOUT*/); struct hb_prune_langsys_context_t { hb_prune_langsys_context_t (const void *table_, hb_hashmap_t *script_langsys_map_, const hb_map_t *duplicate_feature_map_, hb_set_t *new_collected_feature_indexes_) :table (table_), script_langsys_map (script_langsys_map_), duplicate_feature_map (duplicate_feature_map_), new_feature_indexes (new_collected_feature_indexes_), script_count (0),langsys_count (0) {} bool visitedScript (const void *s) { if (script_count++ > HB_MAX_SCRIPTS) return true; return visited (s, visited_script); } bool visitedLangsys (const void *l) { if (langsys_count++ > HB_MAX_LANGSYS) return true; return visited (l, visited_langsys); } private: template bool visited (const T *p, hb_set_t &visited_set) { hb_codepoint_t delta = (hb_codepoint_t) ((uintptr_t) p - (uintptr_t) table); if (visited_set.in_error () || visited_set.has (delta)) return true; visited_set.add (delta); return false; } public: const void *table; hb_hashmap_t *script_langsys_map; const hb_map_t *duplicate_feature_map; hb_set_t *new_feature_indexes; private: hb_set_t visited_script; hb_set_t visited_langsys; unsigned script_count; unsigned langsys_count; }; struct hb_subset_layout_context_t : hb_dispatch_context_t { const char *get_name () { return "SUBSET_LAYOUT"; } static return_t default_return_value () { return hb_empty_t (); } bool visitScript () { return script_count++ < HB_MAX_SCRIPTS; } bool visitLangSys () { return langsys_count++ < HB_MAX_LANGSYS; } bool visitFeatureIndex (int count) { feature_index_count += count; return feature_index_count < HB_MAX_FEATURE_INDICES; } bool visitLookupIndex() { lookup_index_count++; return lookup_index_count < HB_MAX_LOOKUP_VISIT_COUNT; } hb_subset_context_t *subset_context; const hb_tag_t table_tag; const hb_map_t *lookup_index_map; const hb_hashmap_t *script_langsys_map; const hb_map_t *feature_index_map; unsigned cur_script_index; hb_subset_layout_context_t (hb_subset_context_t *c_, hb_tag_t tag_, hb_map_t *lookup_map_, hb_hashmap_t *script_langsys_map_, hb_map_t *feature_index_map_) : subset_context (c_), table_tag (tag_), lookup_index_map (lookup_map_), script_langsys_map (script_langsys_map_), feature_index_map (feature_index_map_), cur_script_index (0xFFFFu), script_count (0), langsys_count (0), feature_index_count (0), lookup_index_count (0) {} private: unsigned script_count; unsigned langsys_count; unsigned feature_index_count; unsigned lookup_index_count; }; struct hb_collect_variation_indices_context_t : hb_dispatch_context_t { template return_t dispatch (const T &obj) { obj.collect_variation_indices (this); return hb_empty_t (); } static return_t default_return_value () { return hb_empty_t (); } hb_set_t *layout_variation_indices; const hb_set_t *glyph_set; const hb_map_t *gpos_lookups; hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_, const hb_set_t *glyph_set_, const hb_map_t *gpos_lookups_) : layout_variation_indices (layout_variation_indices_), glyph_set (glyph_set_), gpos_lookups (gpos_lookups_) {} }; template struct subset_offset_array_t { subset_offset_array_t (hb_subset_context_t *subset_context_, OutputArray& out_, const void *base_) : subset_context (subset_context_), out (out_), base (base_) {} template bool operator () (T&& offset) { auto snap = subset_context->serializer->snapshot (); auto *o = out.serialize_append (subset_context->serializer); if (unlikely (!o)) return false; bool ret = o->serialize_subset (subset_context, offset, base); if (!ret) { out.pop (); subset_context->serializer->revert (snap); } return ret; } private: hb_subset_context_t *subset_context; OutputArray &out; const void *base; }; template struct subset_offset_array_arg_t { subset_offset_array_arg_t (hb_subset_context_t *subset_context_, OutputArray& out_, const void *base_, Arg &&arg_) : subset_context (subset_context_), out (out_), base (base_), arg (arg_) {} template bool operator () (T&& offset) { auto snap = subset_context->serializer->snapshot (); auto *o = out.serialize_append (subset_context->serializer); if (unlikely (!o)) return false; bool ret = o->serialize_subset (subset_context, offset, base, arg); if (!ret) { out.pop (); subset_context->serializer->revert (snap); } return ret; } private: hb_subset_context_t *subset_context; OutputArray &out; const void *base; Arg &&arg; }; /* * Helper to subset an array of offsets. Subsets the thing pointed to by each offset * and discards the offset in the array if the subset operation results in an empty * thing. */ struct { template subset_offset_array_t operator () (hb_subset_context_t *subset_context, OutputArray& out, const void *base) const { return subset_offset_array_t (subset_context, out, base); } /* Variant with one extra argument passed to serialize_subset */ template subset_offset_array_arg_t operator () (hb_subset_context_t *subset_context, OutputArray& out, const void *base, Arg &&arg) const { return subset_offset_array_arg_t (subset_context, out, base, arg); } } HB_FUNCOBJ (subset_offset_array); template struct subset_record_array_t { subset_record_array_t (hb_subset_layout_context_t *c_, OutputArray* out_, const void *base_) : subset_layout_context (c_), out (out_), base (base_) {} template void operator () (T&& record) { auto snap = subset_layout_context->subset_context->serializer->snapshot (); bool ret = record.subset (subset_layout_context, base); if (!ret) subset_layout_context->subset_context->serializer->revert (snap); else out->len++; } private: hb_subset_layout_context_t *subset_layout_context; OutputArray *out; const void *base; }; /* * Helper to subset a RecordList/record array. Subsets each Record in the array and * discards the record if the subset operation returns false. */ struct { template subset_record_array_t operator () (hb_subset_layout_context_t *c, OutputArray* out, const void *base) const { return subset_record_array_t (c, out, base); } } HB_FUNCOBJ (subset_record_array); template struct serialize_math_record_array_t { serialize_math_record_array_t (hb_serialize_context_t *serialize_context_, OutputArray& out_, const void *base_) : serialize_context (serialize_context_), out (out_), base (base_) {} template bool operator () (T&& record) { if (!serialize_context->copy (record, base)) return false; out.len++; return true; } private: hb_serialize_context_t *serialize_context; OutputArray &out; const void *base; }; /* * Helper to serialize an array of MATH records. */ struct { template serialize_math_record_array_t operator () (hb_serialize_context_t *serialize_context, OutputArray& out, const void *base) const { return serialize_math_record_array_t (serialize_context, out, base); } } HB_FUNCOBJ (serialize_math_record_array); /* * * OpenType Layout Common Table Formats * */ /* * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList */ struct Record_sanitize_closure_t { hb_tag_t tag; const void *list_base; }; template struct Record { int cmp (hb_tag_t a) const { return tag.cmp (a); } bool subset (hb_subset_layout_context_t *c, const void *base) const { TRACE_SUBSET (this); auto *out = c->subset_context->serializer->embed (this); if (unlikely (!out)) return_trace (false); bool ret = out->offset.serialize_subset (c->subset_context, offset, base, c, &tag); return_trace (ret); } bool sanitize (hb_sanitize_context_t *c, const void *base) const { TRACE_SANITIZE (this); const Record_sanitize_closure_t closure = {tag, base}; return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure)); } Tag tag; /* 4-byte Tag identifier */ Offset16To offset; /* Offset from beginning of object holding * the Record */ public: DEFINE_SIZE_STATIC (6); }; template struct RecordArrayOf : SortedArray16Of> { const Offset16To& get_offset (unsigned int i) const { return (*this)[i].offset; } Offset16To& get_offset (unsigned int i) { return (*this)[i].offset; } const Tag& get_tag (unsigned int i) const { return (*this)[i].tag; } unsigned int get_tags (unsigned int start_offset, unsigned int *record_count /* IN/OUT */, hb_tag_t *record_tags /* OUT */) const { if (record_count) { + this->sub_array (start_offset, record_count) | hb_map (&Record::tag) | hb_sink (hb_array (record_tags, *record_count)) ; } return this->len; } bool find_index (hb_tag_t tag, unsigned int *index) const { return this->bfind (tag, index, HB_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX); } }; template struct RecordListOf : RecordArrayOf { const Type& operator [] (unsigned int i) const { return this+this->get_offset (i); } bool subset (hb_subset_context_t *c, hb_subset_layout_context_t *l) const { TRACE_SUBSET (this); auto *out = c->serializer->start_embed (*this); if (unlikely (!c->serializer->extend_min (out))) return_trace (false); + this->iter () | hb_apply (subset_record_array (l, out, this)) ; return_trace (true); } bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); return_trace (RecordArrayOf::sanitize (c, this)); } }; struct Feature; struct RecordListOfFeature : RecordListOf { bool subset (hb_subset_context_t *c, hb_subset_layout_context_t *l) const { TRACE_SUBSET (this); auto *out = c->serializer->start_embed (*this); if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false); unsigned count = this->len; + hb_zip (*this, hb_range (count)) | hb_filter (l->feature_index_map, hb_second) | hb_map (hb_first) | hb_apply (subset_record_array (l, out, this)) ; return_trace (true); } }; struct Script; struct RecordListOfScript : RecordListOf