File: | root/firefox-clang/gfx/harfbuzz/src/graph/../hb-ot-layout-common.hh |
Warning: | line 3777, column 7 Value stored to 'varStore' is never read |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* |
2 | * Copyright © 2007,2008,2009 Red Hat, Inc. |
3 | * Copyright © 2010,2012 Google, Inc. |
4 | * |
5 | * This is part of HarfBuzz, a text shaping library. |
6 | * |
7 | * Permission is hereby granted, without written agreement and without |
8 | * license or royalty fees, to use, copy, modify, and distribute this |
9 | * software and its documentation for any purpose, provided that the |
10 | * above copyright notice and the following two paragraphs appear in |
11 | * all copies of this software. |
12 | * |
13 | * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
14 | * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
15 | * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
16 | * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
17 | * DAMAGE. |
18 | * |
19 | * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
20 | * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
21 | * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
22 | * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
23 | * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
24 | * |
25 | * Red Hat Author(s): Behdad Esfahbod |
26 | * Google Author(s): Behdad Esfahbod |
27 | */ |
28 | |
29 | #ifndef HB_OT_LAYOUT_COMMON_HH |
30 | #define HB_OT_LAYOUT_COMMON_HH |
31 | |
32 | #include "hb.hh" |
33 | #include "hb-ot-layout.hh" |
34 | #include "hb-open-type.hh" |
35 | #include "hb-set.hh" |
36 | #include "hb-bimap.hh" |
37 | #include "hb-cache.hh" |
38 | |
39 | #include "OT/Layout/Common/Coverage.hh" |
40 | #include "OT/Layout/types.hh" |
41 | |
42 | // TODO(garretrieger): cleanup these after migration. |
43 | using OT::Layout::Common::Coverage; |
44 | using OT::Layout::Common::RangeRecord; |
45 | using OT::Layout::SmallTypes; |
46 | using OT::Layout::MediumTypes; |
47 | |
48 | |
49 | namespace OT { |
50 | |
51 | template<typename Iterator> |
52 | static inline bool ClassDef_serialize (hb_serialize_context_t *c, |
53 | Iterator it); |
54 | |
55 | static bool ClassDef_remap_and_serialize ( |
56 | hb_serialize_context_t *c, |
57 | const hb_set_t &klasses, |
58 | bool use_class_zero, |
59 | hb_sorted_vector_t<hb_codepoint_pair_t> &glyph_and_klass, /* IN/OUT */ |
60 | hb_map_t *klass_map /*IN/OUT*/); |
61 | |
62 | struct hb_collect_feature_substitutes_with_var_context_t |
63 | { |
64 | const hb_map_t *axes_index_tag_map; |
65 | const hb_hashmap_t<hb_tag_t, Triple> *axes_location; |
66 | hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *record_cond_idx_map; |
67 | hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map; |
68 | hb_set_t& catch_all_record_feature_idxes; |
69 | |
70 | // not stored in subset_plan |
71 | hb_set_t *feature_indices; |
72 | bool apply; |
73 | bool variation_applied; |
74 | bool universal; |
75 | unsigned cur_record_idx; |
76 | hb_hashmap_t<hb::shared_ptr<hb_map_t>, unsigned> *conditionset_map; |
77 | }; |
78 | |
79 | struct hb_prune_langsys_context_t |
80 | { |
81 | hb_prune_langsys_context_t (const void *table_, |
82 | hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map_, |
83 | const hb_map_t *duplicate_feature_map_, |
84 | hb_set_t *new_collected_feature_indexes_) |
85 | :table (table_), |
86 | script_langsys_map (script_langsys_map_), |
87 | duplicate_feature_map (duplicate_feature_map_), |
88 | new_feature_indexes (new_collected_feature_indexes_), |
89 | script_count (0),langsys_feature_count (0) {} |
90 | |
91 | bool visitScript () |
92 | { return script_count++ < HB_MAX_SCRIPTS500; } |
93 | |
94 | bool visitLangsys (unsigned feature_count) |
95 | { |
96 | langsys_feature_count += feature_count; |
97 | return langsys_feature_count < HB_MAX_LANGSYS_FEATURE_COUNT50000; |
98 | } |
99 | |
100 | public: |
101 | const void *table; |
102 | hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map; |
103 | const hb_map_t *duplicate_feature_map; |
104 | hb_set_t *new_feature_indexes; |
105 | |
106 | private: |
107 | unsigned script_count; |
108 | unsigned langsys_feature_count; |
109 | }; |
110 | |
111 | struct hb_subset_layout_context_t : |
112 | hb_dispatch_context_t<hb_subset_layout_context_t, hb_empty_t, HB_DEBUG_SUBSET(0 +0)> |
113 | { |
114 | const char *get_name () { return "SUBSET_LAYOUT"; } |
115 | static return_t default_return_value () { return hb_empty_t (); } |
116 | |
117 | bool visitScript () |
118 | { |
119 | return script_count++ < HB_MAX_SCRIPTS500; |
120 | } |
121 | |
122 | bool visitLangSys () |
123 | { |
124 | return langsys_count++ < HB_MAX_LANGSYS2000; |
125 | } |
126 | |
127 | bool visitFeatureIndex (int count) |
128 | { |
129 | feature_index_count += count; |
130 | return feature_index_count < HB_MAX_FEATURE_INDICES1500; |
131 | } |
132 | |
133 | bool visitLookupIndex() |
134 | { |
135 | lookup_index_count++; |
136 | return lookup_index_count < HB_MAX_LOOKUP_VISIT_COUNT35000; |
137 | } |
138 | |
139 | hb_subset_context_t *subset_context; |
140 | const hb_tag_t table_tag; |
141 | const hb_map_t *lookup_index_map; |
142 | const hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map; |
143 | const hb_map_t *feature_index_map; |
144 | const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map; |
145 | hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map; |
146 | const hb_set_t *catch_all_record_feature_idxes; |
147 | const hb_hashmap_t<unsigned, hb_pair_t<const void*, const void*>> *feature_idx_tag_map; |
148 | |
149 | unsigned cur_script_index; |
150 | unsigned cur_feature_var_record_idx; |
151 | |
152 | hb_subset_layout_context_t (hb_subset_context_t *c_, |
153 | hb_tag_t tag_) : |
154 | subset_context (c_), |
155 | table_tag (tag_), |
156 | cur_script_index (0xFFFFu), |
157 | cur_feature_var_record_idx (0u), |
158 | script_count (0), |
159 | langsys_count (0), |
160 | feature_index_count (0), |
161 | lookup_index_count (0) |
162 | { |
163 | if (tag_ == HB_OT_TAG_GSUB((hb_tag_t)((((uint32_t)('G')&0xFF)<<24)|(((uint32_t )('S')&0xFF)<<16)|(((uint32_t)('U')&0xFF)<< 8)|((uint32_t)('B')&0xFF)))) |
164 | { |
165 | lookup_index_map = &c_->plan->gsub_lookups; |
166 | script_langsys_map = &c_->plan->gsub_langsys; |
167 | feature_index_map = &c_->plan->gsub_features; |
168 | feature_substitutes_map = &c_->plan->gsub_feature_substitutes_map; |
169 | feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gsub_feature_record_cond_idx_map; |
170 | catch_all_record_feature_idxes = &c_->plan->gsub_old_features; |
171 | feature_idx_tag_map = &c_->plan->gsub_old_feature_idx_tag_map; |
172 | } |
173 | else |
174 | { |
175 | lookup_index_map = &c_->plan->gpos_lookups; |
176 | script_langsys_map = &c_->plan->gpos_langsys; |
177 | feature_index_map = &c_->plan->gpos_features; |
178 | feature_substitutes_map = &c_->plan->gpos_feature_substitutes_map; |
179 | feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gpos_feature_record_cond_idx_map; |
180 | catch_all_record_feature_idxes = &c_->plan->gpos_old_features; |
181 | feature_idx_tag_map = &c_->plan->gpos_old_feature_idx_tag_map; |
182 | } |
183 | } |
184 | |
185 | private: |
186 | unsigned script_count; |
187 | unsigned langsys_count; |
188 | unsigned feature_index_count; |
189 | unsigned lookup_index_count; |
190 | }; |
191 | |
192 | struct ItemVariationStore; |
193 | struct hb_collect_variation_indices_context_t : |
194 | hb_dispatch_context_t<hb_collect_variation_indices_context_t> |
195 | { |
196 | template <typename T> |
197 | return_t dispatch (const T &obj) { obj.collect_variation_indices (this); return hb_empty_t (); } |
198 | static return_t default_return_value () { return hb_empty_t (); } |
199 | |
200 | hb_set_t *layout_variation_indices; |
201 | const hb_set_t *glyph_set; |
202 | const hb_map_t *gpos_lookups; |
203 | |
204 | hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_, |
205 | const hb_set_t *glyph_set_, |
206 | const hb_map_t *gpos_lookups_) : |
207 | layout_variation_indices (layout_variation_indices_), |
208 | glyph_set (glyph_set_), |
209 | gpos_lookups (gpos_lookups_) {} |
210 | }; |
211 | |
212 | template<typename OutputArray> |
213 | struct subset_offset_array_t |
214 | { |
215 | subset_offset_array_t (hb_subset_context_t *subset_context_, |
216 | OutputArray& out_, |
217 | const void *base_) : subset_context (subset_context_), |
218 | out (out_), base (base_) {} |
219 | |
220 | template <typename T> |
221 | bool operator () (T&& offset) |
222 | { |
223 | auto snap = subset_context->serializer->snapshot (); |
224 | auto *o = out.serialize_append (subset_context->serializer); |
225 | if (unlikely (!o)__builtin_expect (bool(!o), 0)) return false; |
226 | bool ret = o->serialize_subset (subset_context, offset, base); |
227 | if (!ret) |
228 | { |
229 | out.pop (); |
230 | subset_context->serializer->revert (snap); |
231 | } |
232 | return ret; |
233 | } |
234 | |
235 | private: |
236 | hb_subset_context_t *subset_context; |
237 | OutputArray &out; |
238 | const void *base; |
239 | }; |
240 | |
241 | |
242 | template<typename OutputArray, typename Arg> |
243 | struct subset_offset_array_arg_t |
244 | { |
245 | subset_offset_array_arg_t (hb_subset_context_t *subset_context_, |
246 | OutputArray& out_, |
247 | const void *base_, |
248 | Arg &&arg_) : subset_context (subset_context_), out (out_), |
249 | base (base_), arg (arg_) {} |
250 | |
251 | template <typename T> |
252 | bool operator () (T&& offset) |
253 | { |
254 | auto snap = subset_context->serializer->snapshot (); |
255 | auto *o = out.serialize_append (subset_context->serializer); |
256 | if (unlikely (!o)__builtin_expect (bool(!o), 0)) return false; |
257 | bool ret = o->serialize_subset (subset_context, offset, base, arg); |
258 | if (!ret) |
259 | { |
260 | out.pop (); |
261 | subset_context->serializer->revert (snap); |
262 | } |
263 | return ret; |
264 | } |
265 | |
266 | private: |
267 | hb_subset_context_t *subset_context; |
268 | OutputArray &out; |
269 | const void *base; |
270 | Arg &&arg; |
271 | }; |
272 | |
273 | /* |
274 | * Helper to subset an array of offsets. Subsets the thing pointed to by each offset |
275 | * and discards the offset in the array if the subset operation results in an empty |
276 | * thing. |
277 | */ |
278 | struct |
279 | { |
280 | template<typename OutputArray> |
281 | subset_offset_array_t<OutputArray> |
282 | operator () (hb_subset_context_t *subset_context, OutputArray& out, |
283 | const void *base) const |
284 | { return subset_offset_array_t<OutputArray> (subset_context, out, base); } |
285 | |
286 | /* Variant with one extra argument passed to serialize_subset */ |
287 | template<typename OutputArray, typename Arg> |
288 | subset_offset_array_arg_t<OutputArray, Arg> |
289 | operator () (hb_subset_context_t *subset_context, OutputArray& out, |
290 | const void *base, Arg &&arg) const |
291 | { return subset_offset_array_arg_t<OutputArray, Arg> (subset_context, out, base, arg); } |
292 | } |
293 | HB_FUNCOBJ (subset_offset_array)static const subset_offset_array __attribute__((unused)); |
294 | |
295 | template<typename OutputArray> |
296 | struct subset_record_array_t |
297 | { |
298 | subset_record_array_t (hb_subset_layout_context_t *c_, OutputArray* out_, |
299 | const void *base_) : subset_layout_context (c_), |
300 | out (out_), base (base_) {} |
301 | |
302 | template <typename T> |
303 | void |
304 | operator () (T&& record) |
305 | { |
306 | auto snap = subset_layout_context->subset_context->serializer->snapshot (); |
307 | bool ret = record.subset (subset_layout_context, base); |
308 | if (!ret) subset_layout_context->subset_context->serializer->revert (snap); |
309 | else out->len++; |
310 | } |
311 | |
312 | private: |
313 | hb_subset_layout_context_t *subset_layout_context; |
314 | OutputArray *out; |
315 | const void *base; |
316 | }; |
317 | |
318 | template<typename OutputArray, typename Arg> |
319 | struct subset_record_array_arg_t |
320 | { |
321 | subset_record_array_arg_t (hb_subset_layout_context_t *c_, OutputArray* out_, |
322 | const void *base_, |
323 | Arg &&arg_) : subset_layout_context (c_), |
324 | out (out_), base (base_), arg (arg_) {} |
325 | |
326 | template <typename T> |
327 | void |
328 | operator () (T&& record) |
329 | { |
330 | auto snap = subset_layout_context->subset_context->serializer->snapshot (); |
331 | bool ret = record.subset (subset_layout_context, base, arg); |
332 | if (!ret) subset_layout_context->subset_context->serializer->revert (snap); |
333 | else out->len++; |
334 | } |
335 | |
336 | private: |
337 | hb_subset_layout_context_t *subset_layout_context; |
338 | OutputArray *out; |
339 | const void *base; |
340 | Arg &&arg; |
341 | }; |
342 | |
343 | /* |
344 | * Helper to subset a RecordList/record array. Subsets each Record in the array and |
345 | * discards the record if the subset operation returns false. |
346 | */ |
347 | struct |
348 | { |
349 | template<typename OutputArray> |
350 | subset_record_array_t<OutputArray> |
351 | operator () (hb_subset_layout_context_t *c, OutputArray* out, |
352 | const void *base) const |
353 | { return subset_record_array_t<OutputArray> (c, out, base); } |
354 | |
355 | /* Variant with one extra argument passed to subset */ |
356 | template<typename OutputArray, typename Arg> |
357 | subset_record_array_arg_t<OutputArray, Arg> |
358 | operator () (hb_subset_layout_context_t *c, OutputArray* out, |
359 | const void *base, Arg &&arg) const |
360 | { return subset_record_array_arg_t<OutputArray, Arg> (c, out, base, arg); } |
361 | } |
362 | HB_FUNCOBJ (subset_record_array)static const subset_record_array __attribute__((unused)); |
363 | |
364 | |
365 | template<typename OutputArray> |
366 | struct serialize_math_record_array_t |
367 | { |
368 | serialize_math_record_array_t (hb_serialize_context_t *serialize_context_, |
369 | OutputArray& out_, |
370 | const void *base_) : serialize_context (serialize_context_), |
371 | out (out_), base (base_) {} |
372 | |
373 | template <typename T> |
374 | bool operator () (T&& record) |
375 | { |
376 | if (!serialize_context->copy (record, base)) return false; |
377 | out.len++; |
378 | return true; |
379 | } |
380 | |
381 | private: |
382 | hb_serialize_context_t *serialize_context; |
383 | OutputArray &out; |
384 | const void *base; |
385 | }; |
386 | |
387 | /* |
388 | * Helper to serialize an array of MATH records. |
389 | */ |
390 | struct |
391 | { |
392 | template<typename OutputArray> |
393 | serialize_math_record_array_t<OutputArray> |
394 | operator () (hb_serialize_context_t *serialize_context, OutputArray& out, |
395 | const void *base) const |
396 | { return serialize_math_record_array_t<OutputArray> (serialize_context, out, base); } |
397 | |
398 | } |
399 | HB_FUNCOBJ (serialize_math_record_array)static const serialize_math_record_array __attribute__((unused )); |
400 | |
401 | /* |
402 | * |
403 | * OpenType Layout Common Table Formats |
404 | * |
405 | */ |
406 | |
407 | |
408 | /* |
409 | * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList |
410 | */ |
411 | |
412 | struct IndexArray : Array16Of<Index> |
413 | { |
414 | bool intersects (const hb_map_t *indexes) const |
415 | { return hb_any (*this, indexes); } |
416 | |
417 | template <typename Iterator, |
418 | hb_requires (hb_is_iterator (Iterator))typename hb_enable_if<((hb_is_iterator_of<Iterator, typename Iterator::item_t>::value))>::type* = nullptr> |
419 | void serialize (hb_serialize_context_t *c, |
420 | hb_subset_layout_context_t *l, |
421 | Iterator it) |
422 | { |
423 | if (!it) return; |
424 | if (unlikely (!c->extend_min ((*this)))__builtin_expect (bool(!c->extend_min ((*this))), 0)) return; |
425 | |
426 | for (const auto _ : it) |
427 | { |
428 | if (!l->visitLookupIndex()) break; |
429 | |
430 | Index i; |
431 | i = _; |
432 | c->copy (i); |
433 | this->len++; |
434 | } |
435 | } |
436 | |
437 | unsigned int get_indexes (unsigned int start_offset, |
438 | unsigned int *_count /* IN/OUT */, |
439 | unsigned int *_indexes /* OUT */) const |
440 | { |
441 | if (_count) |
442 | { |
443 | + this->as_array ().sub_array (start_offset, _count) |
444 | | hb_sink (hb_array (_indexes, *_count)) |
445 | ; |
446 | } |
447 | return this->len; |
448 | } |
449 | |
450 | void add_indexes_to (hb_set_t* output /* OUT */) const |
451 | { |
452 | output->add_array (as_array ()); |
453 | } |
454 | }; |
455 | |
456 | |
457 | /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */ |
458 | struct FeatureParamsSize |
459 | { |
460 | bool sanitize (hb_sanitize_context_t *c) const |
461 | { |
462 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
463 | if (unlikely (!c->check_struct (this))__builtin_expect (bool(!c->check_struct (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 463); |
464 | hb_barrier (); |
465 | |
466 | /* This subtable has some "history", if you will. Some earlier versions of |
467 | * Adobe tools calculated the offset of the FeatureParams subtable from the |
468 | * beginning of the FeatureList table! Now, that is dealt with in the |
469 | * Feature implementation. But we still need to be able to tell junk from |
470 | * real data. Note: We don't check that the nameID actually exists. |
471 | * |
472 | * Read Roberts wrote on 9/15/06 on opentype-list@indx.co.uk : |
473 | * |
474 | * Yes, it is correct that a new version of the AFDKO (version 2.0) will be |
475 | * coming out soon, and that the makeotf program will build a font with a |
476 | * 'size' feature that is correct by the specification. |
477 | * |
478 | * The specification for this feature tag is in the "OpenType Layout Tag |
479 | * Registry". You can see a copy of this at: |
480 | * https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tag-size |
481 | * |
482 | * Here is one set of rules to determine if the 'size' feature is built |
483 | * correctly, or as by the older versions of MakeOTF. You may be able to do |
484 | * better. |
485 | * |
486 | * Assume that the offset to the size feature is according to specification, |
487 | * and make the following value checks. If it fails, assume the size |
488 | * feature is calculated as versions of MakeOTF before the AFDKO 2.0 built it. |
489 | * If this fails, reject the 'size' feature. The older makeOTF's calculated the |
490 | * offset from the beginning of the FeatureList table, rather than from the |
491 | * beginning of the 'size' Feature table. |
492 | * |
493 | * If "design size" == 0: |
494 | * fails check |
495 | * |
496 | * Else if ("subfamily identifier" == 0 and |
497 | * "range start" == 0 and |
498 | * "range end" == 0 and |
499 | * "range start" == 0 and |
500 | * "menu name ID" == 0) |
501 | * passes check: this is the format used when there is a design size |
502 | * specified, but there is no recommended size range. |
503 | * |
504 | * Else if ("design size" < "range start" or |
505 | * "design size" > "range end" or |
506 | * "range end" <= "range start" or |
507 | * "menu name ID" < 256 or |
508 | * "menu name ID" > 32767 or |
509 | * menu name ID is not a name ID which is actually in the name table) |
510 | * fails test |
511 | * Else |
512 | * passes test. |
513 | */ |
514 | |
515 | if (!designSize) |
516 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 516); |
517 | else if (subfamilyID == 0 && |
518 | subfamilyNameID == 0 && |
519 | rangeStart == 0 && |
520 | rangeEnd == 0) |
521 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 521); |
522 | else if (designSize < rangeStart || |
523 | designSize > rangeEnd || |
524 | subfamilyNameID < 256 || |
525 | subfamilyNameID > 32767) |
526 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 526); |
527 | else |
528 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 528); |
529 | } |
530 | |
531 | void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const |
532 | { nameids_to_retain->add (subfamilyNameID); } |
533 | |
534 | bool subset (hb_subset_context_t *c) const |
535 | { |
536 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
537 | return_trace ((bool) c->serializer->embed (*this))return trace.ret ((bool) c->serializer->embed (*this), __PRETTY_FUNCTION__ , 537); |
538 | } |
539 | |
540 | HBUINT16 designSize; /* Represents the design size in 720/inch |
541 | * units (decipoints). The design size entry |
542 | * must be non-zero. When there is a design |
543 | * size but no recommended size range, the |
544 | * rest of the array will consist of zeros. */ |
545 | HBUINT16 subfamilyID; /* Has no independent meaning, but serves |
546 | * as an identifier that associates fonts |
547 | * in a subfamily. All fonts which share a |
548 | * Preferred or Font Family name and which |
549 | * differ only by size range shall have the |
550 | * same subfamily value, and no fonts which |
551 | * differ in weight or style shall have the |
552 | * same subfamily value. If this value is |
553 | * zero, the remaining fields in the array |
554 | * will be ignored. */ |
555 | NameID subfamilyNameID;/* If the preceding value is non-zero, this |
556 | * value must be set in the range 256 - 32767 |
557 | * (inclusive). It records the value of a |
558 | * field in the name table, which must |
559 | * contain English-language strings encoded |
560 | * in Windows Unicode and Macintosh Roman, |
561 | * and may contain additional strings |
562 | * localized to other scripts and languages. |
563 | * Each of these strings is the name an |
564 | * application should use, in combination |
565 | * with the family name, to represent the |
566 | * subfamily in a menu. Applications will |
567 | * choose the appropriate version based on |
568 | * their selection criteria. */ |
569 | HBUINT16 rangeStart; /* Large end of the recommended usage range |
570 | * (inclusive), stored in 720/inch units |
571 | * (decipoints). */ |
572 | HBUINT16 rangeEnd; /* Small end of the recommended usage range |
573 | (exclusive), stored in 720/inch units |
574 | * (decipoints). */ |
575 | public: |
576 | DEFINE_SIZE_STATIC (10)void _instance_assertion_on_line_576 () const { static_assert ((sizeof (*this) == (10)), ""); } unsigned int get_size () const { return (10); } static constexpr unsigned null_size = (10); static constexpr unsigned min_size = (10); static constexpr unsigned static_size = (10); |
577 | }; |
578 | |
579 | /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx */ |
580 | struct FeatureParamsStylisticSet |
581 | { |
582 | bool sanitize (hb_sanitize_context_t *c) const |
583 | { |
584 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
585 | /* Right now minorVersion is at zero. Which means, any table supports |
586 | * the uiNameID field. */ |
587 | return_trace (c->check_struct (this))return trace.ret (c->check_struct (this), __PRETTY_FUNCTION__ , 587); |
588 | } |
589 | |
590 | void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const |
591 | { nameids_to_retain->add (uiNameID); } |
592 | |
593 | bool subset (hb_subset_context_t *c) const |
594 | { |
595 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
596 | return_trace ((bool) c->serializer->embed (*this))return trace.ret ((bool) c->serializer->embed (*this), __PRETTY_FUNCTION__ , 596); |
597 | } |
598 | |
599 | HBUINT16 version; /* (set to 0): This corresponds to a “minor” |
600 | * version number. Additional data may be |
601 | * added to the end of this Feature Parameters |
602 | * table in the future. */ |
603 | |
604 | NameID uiNameID; /* The 'name' table name ID that specifies a |
605 | * string (or strings, for multiple languages) |
606 | * for a user-interface label for this |
607 | * feature. The values of uiLabelNameId and |
608 | * sampleTextNameId are expected to be in the |
609 | * font-specific name ID range (256-32767), |
610 | * though that is not a requirement in this |
611 | * Feature Parameters specification. The |
612 | * user-interface label for the feature can |
613 | * be provided in multiple languages. An |
614 | * English string should be included as a |
615 | * fallback. The string should be kept to a |
616 | * minimal length to fit comfortably with |
617 | * different application interfaces. */ |
618 | public: |
619 | DEFINE_SIZE_STATIC (4)void _instance_assertion_on_line_619 () const { static_assert ((sizeof (*this) == (4)), ""); } unsigned int get_size () const { return (4); } static constexpr unsigned null_size = (4); static constexpr unsigned min_size = (4); static constexpr unsigned static_size = (4); |
620 | }; |
621 | |
622 | /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */ |
623 | struct FeatureParamsCharacterVariants |
624 | { |
625 | unsigned |
626 | get_characters (unsigned start_offset, unsigned *char_count, hb_codepoint_t *chars) const |
627 | { |
628 | if (char_count) |
629 | { |
630 | + characters.as_array ().sub_array (start_offset, char_count) |
631 | | hb_sink (hb_array (chars, *char_count)) |
632 | ; |
633 | } |
634 | return characters.len; |
635 | } |
636 | |
637 | unsigned get_size () const |
638 | { return min_size + characters.len * HBUINT24::static_size; } |
639 | |
640 | void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const |
641 | { |
642 | if (featUILableNameID) nameids_to_retain->add (featUILableNameID); |
643 | if (featUITooltipTextNameID) nameids_to_retain->add (featUITooltipTextNameID); |
644 | if (sampleTextNameID) nameids_to_retain->add (sampleTextNameID); |
645 | |
646 | if (!firstParamUILabelNameID || !numNamedParameters || numNamedParameters >= 0x7FFF) |
647 | return; |
648 | |
649 | unsigned last_name_id = (unsigned) firstParamUILabelNameID + (unsigned) numNamedParameters - 1; |
650 | nameids_to_retain->add_range (firstParamUILabelNameID, last_name_id); |
651 | } |
652 | |
653 | bool subset (hb_subset_context_t *c) const |
654 | { |
655 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
656 | return_trace ((bool) c->serializer->embed (*this))return trace.ret ((bool) c->serializer->embed (*this), __PRETTY_FUNCTION__ , 656); |
657 | } |
658 | |
659 | bool sanitize (hb_sanitize_context_t *c) const |
660 | { |
661 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
662 | return_trace (c->check_struct (this) &&return trace.ret (c->check_struct (this) && characters .sanitize (c), __PRETTY_FUNCTION__, 663) |
663 | characters.sanitize (c))return trace.ret (c->check_struct (this) && characters .sanitize (c), __PRETTY_FUNCTION__, 663); |
664 | } |
665 | |
666 | HBUINT16 format; /* Format number is set to 0. */ |
667 | NameID featUILableNameID; /* The ‘name’ table name ID that |
668 | * specifies a string (or strings, |
669 | * for multiple languages) for a |
670 | * user-interface label for this |
671 | * feature. (May be NULL.) */ |
672 | NameID featUITooltipTextNameID;/* The ‘name’ table name ID that |
673 | * specifies a string (or strings, |
674 | * for multiple languages) that an |
675 | * application can use for tooltip |
676 | * text for this feature. (May be |
677 | * nullptr.) */ |
678 | NameID sampleTextNameID; /* The ‘name’ table name ID that |
679 | * specifies sample text that |
680 | * illustrates the effect of this |
681 | * feature. (May be NULL.) */ |
682 | HBUINT16 numNamedParameters; /* Number of named parameters. (May |
683 | * be zero.) */ |
684 | NameID firstParamUILabelNameID;/* The first ‘name’ table name ID |
685 | * used to specify strings for |
686 | * user-interface labels for the |
687 | * feature parameters. (Must be zero |
688 | * if numParameters is zero.) */ |
689 | Array16Of<HBUINT24> |
690 | characters; /* Array of the Unicode Scalar Value |
691 | * of the characters for which this |
692 | * feature provides glyph variants. |
693 | * (May be zero.) */ |
694 | public: |
695 | DEFINE_SIZE_ARRAY (14, characters)void _compiles_assertion_on_line_695 () const { (void) (characters )[0].static_size; } void _instance_assertion_on_line_695 () const { static_assert ((sizeof (*this) == (14) + (1 +0) * sizeof ( (characters)[0])), ""); } static constexpr unsigned null_size = (14); static constexpr unsigned min_size = (14); |
696 | }; |
697 | |
698 | struct FeatureParams |
699 | { |
700 | bool sanitize (hb_sanitize_context_t *c, hb_tag_t tag) const |
701 | { |
702 | #ifdef HB_NO_LAYOUT_FEATURE_PARAMS |
703 | return true; |
704 | #endif |
705 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
706 | if (tag == HB_TAG ('s','i','z','e')((hb_tag_t)((((uint32_t)('s')&0xFF)<<24)|(((uint32_t )('i')&0xFF)<<16)|(((uint32_t)('z')&0xFF)<< 8)|((uint32_t)('e')&0xFF)))) |
707 | return_trace (u.size.sanitize (c))return trace.ret (u.size.sanitize (c), __PRETTY_FUNCTION__, 707 ); |
708 | if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')((hb_tag_t)((((uint32_t)('s')&0xFF)<<24)|(((uint32_t )('s')&0xFF)<<16)|(((uint32_t)('\0')&0xFF)<< 8)|((uint32_t)('\0')&0xFF)))) /* ssXX */ |
709 | return_trace (u.stylisticSet.sanitize (c))return trace.ret (u.stylisticSet.sanitize (c), __PRETTY_FUNCTION__ , 709); |
710 | if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')((hb_tag_t)((((uint32_t)('c')&0xFF)<<24)|(((uint32_t )('v')&0xFF)<<16)|(((uint32_t)('\0')&0xFF)<< 8)|((uint32_t)('\0')&0xFF)))) /* cvXX */ |
711 | return_trace (u.characterVariants.sanitize (c))return trace.ret (u.characterVariants.sanitize (c), __PRETTY_FUNCTION__ , 711); |
712 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 712); |
713 | } |
714 | |
715 | void collect_name_ids (hb_tag_t tag, hb_set_t *nameids_to_retain /* OUT */) const |
716 | { |
717 | #ifdef HB_NO_LAYOUT_FEATURE_PARAMS |
718 | return; |
719 | #endif |
720 | if (tag == HB_TAG ('s','i','z','e')((hb_tag_t)((((uint32_t)('s')&0xFF)<<24)|(((uint32_t )('i')&0xFF)<<16)|(((uint32_t)('z')&0xFF)<< 8)|((uint32_t)('e')&0xFF)))) |
721 | return (u.size.collect_name_ids (nameids_to_retain)); |
722 | if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')((hb_tag_t)((((uint32_t)('s')&0xFF)<<24)|(((uint32_t )('s')&0xFF)<<16)|(((uint32_t)('\0')&0xFF)<< 8)|((uint32_t)('\0')&0xFF)))) /* ssXX */ |
723 | return (u.stylisticSet.collect_name_ids (nameids_to_retain)); |
724 | if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')((hb_tag_t)((((uint32_t)('c')&0xFF)<<24)|(((uint32_t )('v')&0xFF)<<16)|(((uint32_t)('\0')&0xFF)<< 8)|((uint32_t)('\0')&0xFF)))) /* cvXX */ |
725 | return (u.characterVariants.collect_name_ids (nameids_to_retain)); |
726 | } |
727 | |
728 | bool subset (hb_subset_context_t *c, const Tag* tag) const |
729 | { |
730 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
731 | if (!tag) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 731); |
732 | if (*tag == HB_TAG ('s','i','z','e')((hb_tag_t)((((uint32_t)('s')&0xFF)<<24)|(((uint32_t )('i')&0xFF)<<16)|(((uint32_t)('z')&0xFF)<< 8)|((uint32_t)('e')&0xFF)))) |
733 | return_trace (u.size.subset (c))return trace.ret (u.size.subset (c), __PRETTY_FUNCTION__, 733 ); |
734 | if ((*tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')((hb_tag_t)((((uint32_t)('s')&0xFF)<<24)|(((uint32_t )('s')&0xFF)<<16)|(((uint32_t)('\0')&0xFF)<< 8)|((uint32_t)('\0')&0xFF)))) /* ssXX */ |
735 | return_trace (u.stylisticSet.subset (c))return trace.ret (u.stylisticSet.subset (c), __PRETTY_FUNCTION__ , 735); |
736 | if ((*tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')((hb_tag_t)((((uint32_t)('c')&0xFF)<<24)|(((uint32_t )('v')&0xFF)<<16)|(((uint32_t)('\0')&0xFF)<< 8)|((uint32_t)('\0')&0xFF)))) /* cvXX */ |
737 | return_trace (u.characterVariants.subset (c))return trace.ret (u.characterVariants.subset (c), __PRETTY_FUNCTION__ , 737); |
738 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 738); |
739 | } |
740 | |
741 | #ifndef HB_NO_LAYOUT_FEATURE_PARAMS |
742 | const FeatureParamsSize& get_size_params (hb_tag_t tag) const |
743 | { |
744 | if (tag == HB_TAG ('s','i','z','e')((hb_tag_t)((((uint32_t)('s')&0xFF)<<24)|(((uint32_t )('i')&0xFF)<<16)|(((uint32_t)('z')&0xFF)<< 8)|((uint32_t)('e')&0xFF)))) |
745 | return u.size; |
746 | return Null (FeatureParamsSize)NullHelper<FeatureParamsSize>::get_null (); |
747 | } |
748 | const FeatureParamsStylisticSet& get_stylistic_set_params (hb_tag_t tag) const |
749 | { |
750 | if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')((hb_tag_t)((((uint32_t)('s')&0xFF)<<24)|(((uint32_t )('s')&0xFF)<<16)|(((uint32_t)('\0')&0xFF)<< 8)|((uint32_t)('\0')&0xFF)))) /* ssXX */ |
751 | return u.stylisticSet; |
752 | return Null (FeatureParamsStylisticSet)NullHelper<FeatureParamsStylisticSet>::get_null (); |
753 | } |
754 | const FeatureParamsCharacterVariants& get_character_variants_params (hb_tag_t tag) const |
755 | { |
756 | if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')((hb_tag_t)((((uint32_t)('c')&0xFF)<<24)|(((uint32_t )('v')&0xFF)<<16)|(((uint32_t)('\0')&0xFF)<< 8)|((uint32_t)('\0')&0xFF)))) /* cvXX */ |
757 | return u.characterVariants; |
758 | return Null (FeatureParamsCharacterVariants)NullHelper<FeatureParamsCharacterVariants>::get_null (); |
759 | } |
760 | #endif |
761 | |
762 | private: |
763 | union { |
764 | FeatureParamsSize size; |
765 | FeatureParamsStylisticSet stylisticSet; |
766 | FeatureParamsCharacterVariants characterVariants; |
767 | } u; |
768 | public: |
769 | DEFINE_SIZE_MIN (0)void _instance_assertion_on_line_769 () const { static_assert ((sizeof (*this) >= (0)), ""); } static constexpr unsigned null_size = (0); static constexpr unsigned min_size = (0); |
770 | }; |
771 | |
772 | struct Record_sanitize_closure_t { |
773 | hb_tag_t tag; |
774 | const void *list_base; |
775 | }; |
776 | |
777 | struct Feature |
778 | { |
779 | unsigned int get_lookup_count () const |
780 | { return lookupIndex.len; } |
781 | hb_tag_t get_lookup_index (unsigned int i) const |
782 | { return lookupIndex[i]; } |
783 | unsigned int get_lookup_indexes (unsigned int start_index, |
784 | unsigned int *lookup_count /* IN/OUT */, |
785 | unsigned int *lookup_tags /* OUT */) const |
786 | { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); } |
787 | void add_lookup_indexes_to (hb_set_t *lookup_indexes) const |
788 | { lookupIndex.add_indexes_to (lookup_indexes); } |
789 | |
790 | const FeatureParams &get_feature_params () const |
791 | { return this+featureParams; } |
792 | |
793 | bool intersects_lookup_indexes (const hb_map_t *lookup_indexes) const |
794 | { return lookupIndex.intersects (lookup_indexes); } |
795 | |
796 | void collect_name_ids (hb_tag_t tag, hb_set_t *nameids_to_retain /* OUT */) const |
797 | { |
798 | if (featureParams) |
799 | get_feature_params ().collect_name_ids (tag, nameids_to_retain); |
800 | } |
801 | |
802 | bool subset (hb_subset_context_t *c, |
803 | hb_subset_layout_context_t *l, |
804 | const Tag *tag = nullptr) const |
805 | { |
806 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
807 | auto *out = c->serializer->start_embed (*this); |
808 | if (unlikely (!c->serializer->extend_min (out))__builtin_expect (bool(!c->serializer->extend_min (out) ), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 808); |
809 | |
810 | out->featureParams.serialize_subset (c, featureParams, this, tag); |
811 | |
812 | auto it = |
813 | + hb_iter (lookupIndex) |
814 | | hb_filter (l->lookup_index_map) |
815 | | hb_map (l->lookup_index_map) |
816 | ; |
817 | |
818 | out->lookupIndex.serialize (c->serializer, l, it); |
819 | // The decision to keep or drop this feature is already made before we get here |
820 | // so always retain it. |
821 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 821); |
822 | } |
823 | |
824 | bool sanitize (hb_sanitize_context_t *c, |
825 | const Record_sanitize_closure_t *closure = nullptr) const |
826 | { |
827 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
828 | if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c)))__builtin_expect (bool(!(c->check_struct (this) && lookupIndex.sanitize (c))), 0)) |
829 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 829); |
830 | hb_barrier (); |
831 | |
832 | /* Some earlier versions of Adobe tools calculated the offset of the |
833 | * FeatureParams subtable from the beginning of the FeatureList table! |
834 | * |
835 | * If sanitizing "failed" for the FeatureParams subtable, try it with the |
836 | * alternative location. We would know sanitize "failed" if old value |
837 | * of the offset was non-zero, but it's zeroed now. |
838 | * |
839 | * Only do this for the 'size' feature, since at the time of the faulty |
840 | * Adobe tools, only the 'size' feature had FeatureParams defined. |
841 | */ |
842 | |
843 | if (likely (featureParams.is_null ())__builtin_expect (bool(featureParams.is_null ()), 1)) |
844 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 844); |
845 | |
846 | unsigned int orig_offset = featureParams; |
847 | if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))__builtin_expect (bool(!featureParams.sanitize (c, this, closure ? closure->tag : ((hb_tag_t)((((uint32_t)(0)&0xFF)<< 24)|(((uint32_t)(0)&0xFF)<<16)|(((uint32_t)(0)& 0xFF)<<8)|((uint32_t)(0)&0xFF))))), 0)) |
848 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 848); |
849 | hb_barrier (); |
850 | |
851 | if (featureParams == 0 && closure && |
852 | closure->tag == HB_TAG ('s','i','z','e')((hb_tag_t)((((uint32_t)('s')&0xFF)<<24)|(((uint32_t )('i')&0xFF)<<16)|(((uint32_t)('z')&0xFF)<< 8)|((uint32_t)('e')&0xFF))) && |
853 | closure->list_base && closure->list_base < this) |
854 | { |
855 | unsigned int new_offset_int = orig_offset - |
856 | (((char *) this) - ((char *) closure->list_base)); |
857 | |
858 | Offset16To<FeatureParams> new_offset; |
859 | /* Check that it would not overflow. */ |
860 | new_offset = new_offset_int; |
861 | if (new_offset == new_offset_int && |
862 | c->try_set (&featureParams, new_offset_int) && |
863 | !featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE((hb_tag_t)((((uint32_t)(0)&0xFF)<<24)|(((uint32_t) (0)&0xFF)<<16)|(((uint32_t)(0)&0xFF)<<8)| ((uint32_t)(0)&0xFF))))) |
864 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 864); |
865 | } |
866 | |
867 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 867); |
868 | } |
869 | |
870 | Offset16To<FeatureParams> |
871 | featureParams; /* Offset to Feature Parameters table (if one |
872 | * has been defined for the feature), relative |
873 | * to the beginning of the Feature Table; = Null |
874 | * if not required */ |
875 | IndexArray lookupIndex; /* Array of LookupList indices */ |
876 | public: |
877 | DEFINE_SIZE_ARRAY_SIZED (4, lookupIndex)unsigned int get_size () const { return (4 - (lookupIndex).min_size + (lookupIndex).get_size ()); } void _compiles_assertion_on_line_877 () const { (void) (lookupIndex)[0].static_size; } void _instance_assertion_on_line_877 () const { static_assert ((sizeof (*this) == (4) + (1 +0) * sizeof ((lookupIndex)[0])), ""); } static constexpr unsigned null_size = (4); static constexpr unsigned min_size = (4); |
878 | }; |
879 | |
880 | template <typename Type> |
881 | struct Record |
882 | { |
883 | int cmp (hb_tag_t a) const { return tag.cmp (a); } |
884 | |
885 | bool subset (hb_subset_layout_context_t *c, const void *base, const void *f_sub = nullptr) const |
886 | { |
887 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
888 | auto *out = c->subset_context->serializer->embed (this); |
889 | if (unlikely (!out)__builtin_expect (bool(!out), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 889); |
890 | |
891 | if (!f_sub) |
892 | return_trace (out->offset.serialize_subset (c->subset_context, offset, base, c, &tag))return trace.ret (out->offset.serialize_subset (c->subset_context , offset, base, c, &tag), __PRETTY_FUNCTION__, 892); |
893 | |
894 | const Feature& f = *reinterpret_cast<const Feature *> (f_sub); |
895 | auto *s = c->subset_context->serializer; |
896 | s->push (); |
897 | |
898 | out->offset = 0; |
899 | bool ret = f.subset (c->subset_context, c, &tag); |
900 | if (ret) |
901 | s->add_link (out->offset, s->pop_pack ()); |
902 | else |
903 | s->pop_discard (); |
904 | |
905 | return_trace (ret)return trace.ret (ret, __PRETTY_FUNCTION__, 905); |
906 | } |
907 | |
908 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
909 | { |
910 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
911 | const Record_sanitize_closure_t closure = {tag, base}; |
912 | return_trace (c->check_struct (this) &&return trace.ret (c->check_struct (this) && offset .sanitize (c, base, &closure), __PRETTY_FUNCTION__, 913) |
913 | offset.sanitize (c, base, &closure))return trace.ret (c->check_struct (this) && offset .sanitize (c, base, &closure), __PRETTY_FUNCTION__, 913); |
914 | } |
915 | |
916 | Tag tag; /* 4-byte Tag identifier */ |
917 | Offset16To<Type> |
918 | offset; /* Offset from beginning of object holding |
919 | * the Record */ |
920 | public: |
921 | DEFINE_SIZE_STATIC (6)void _instance_assertion_on_line_921 () const { static_assert ((sizeof (*this) == (6)), ""); } unsigned int get_size () const { return (6); } static constexpr unsigned null_size = (6); static constexpr unsigned min_size = (6); static constexpr unsigned static_size = (6); |
922 | }; |
923 | |
924 | template <typename Type> |
925 | struct RecordArrayOf : SortedArray16Of<Record<Type>> |
926 | { |
927 | const Offset16To<Type>& get_offset (unsigned int i) const |
928 | { return (*this)[i].offset; } |
929 | Offset16To<Type>& get_offset (unsigned int i) |
930 | { return (*this)[i].offset; } |
931 | const Tag& get_tag (unsigned int i) const |
932 | { return (*this)[i].tag; } |
933 | unsigned int get_tags (unsigned int start_offset, |
934 | unsigned int *record_count /* IN/OUT */, |
935 | hb_tag_t *record_tags /* OUT */) const |
936 | { |
937 | if (record_count) |
938 | { |
939 | + this->as_array ().sub_array (start_offset, record_count) |
940 | | hb_map (&Record<Type>::tag) |
941 | | hb_sink (hb_array (record_tags, *record_count)) |
942 | ; |
943 | } |
944 | return this->len; |
945 | } |
946 | bool find_index (hb_tag_t tag, unsigned int *index) const |
947 | { |
948 | return this->bfind (tag, index, HB_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX); |
949 | } |
950 | }; |
951 | |
952 | template <typename Type> |
953 | struct RecordListOf : RecordArrayOf<Type> |
954 | { |
955 | const Type& operator [] (unsigned int i) const |
956 | { return this+this->get_offset (i); } |
957 | |
958 | bool subset (hb_subset_context_t *c, |
959 | hb_subset_layout_context_t *l) const |
960 | { |
961 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
962 | auto *out = c->serializer->start_embed (*this); |
963 | if (unlikely (!c->serializer->extend_min (out))__builtin_expect (bool(!c->serializer->extend_min (out) ), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 963); |
964 | |
965 | + this->iter () |
966 | | hb_apply (subset_record_array (l, out, this)) |
967 | ; |
968 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 968); |
969 | } |
970 | |
971 | bool sanitize (hb_sanitize_context_t *c) const |
972 | { |
973 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
974 | return_trace (RecordArrayOf<Type>::sanitize (c, this))return trace.ret (RecordArrayOf<Type>::sanitize (c, this ), __PRETTY_FUNCTION__, 974); |
975 | } |
976 | }; |
977 | |
978 | struct RecordListOfFeature : RecordListOf<Feature> |
979 | { |
980 | bool subset (hb_subset_context_t *c, |
981 | hb_subset_layout_context_t *l) const |
982 | { |
983 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
984 | auto *out = c->serializer->start_embed (*this); |
985 | if (unlikely (!c->serializer->extend_min (out))__builtin_expect (bool(!c->serializer->extend_min (out) ), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 985); |
986 | |
987 | + hb_enumerate (*this) |
988 | | hb_filter (l->feature_index_map, hb_first) |
989 | | hb_apply ([l, out, this] (const hb_pair_t<unsigned, const Record<Feature>&>& _) |
990 | { |
991 | const Feature *f_sub = nullptr; |
992 | const Feature **f = nullptr; |
993 | if (l->feature_substitutes_map->has (_.first, &f)) |
994 | f_sub = *f; |
995 | |
996 | subset_record_array (l, out, this, f_sub) (_.second); |
997 | }) |
998 | ; |
999 | |
1000 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 1000); |
1001 | } |
1002 | }; |
1003 | |
1004 | typedef RecordListOf<Feature> FeatureList; |
1005 | |
1006 | |
1007 | struct LangSys |
1008 | { |
1009 | unsigned int get_feature_count () const |
1010 | { return featureIndex.len; } |
1011 | hb_tag_t get_feature_index (unsigned int i) const |
1012 | { return featureIndex[i]; } |
1013 | unsigned int get_feature_indexes (unsigned int start_offset, |
1014 | unsigned int *feature_count /* IN/OUT */, |
1015 | unsigned int *feature_indexes /* OUT */) const |
1016 | { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); } |
1017 | void add_feature_indexes_to (hb_set_t *feature_indexes) const |
1018 | { featureIndex.add_indexes_to (feature_indexes); } |
1019 | |
1020 | bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; } |
1021 | unsigned int get_required_feature_index () const |
1022 | { |
1023 | if (reqFeatureIndex == 0xFFFFu) |
1024 | return Index::NOT_FOUND_INDEX; |
1025 | return reqFeatureIndex; |
1026 | } |
1027 | |
1028 | LangSys* copy (hb_serialize_context_t *c) const |
1029 | { |
1030 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
1031 | return_trace (c->embed (*this))return trace.ret (c->embed (*this), __PRETTY_FUNCTION__, 1031 ); |
1032 | } |
1033 | |
1034 | bool compare (const LangSys& o, const hb_map_t *feature_index_map) const |
1035 | { |
1036 | if (reqFeatureIndex != o.reqFeatureIndex) |
1037 | return false; |
1038 | |
1039 | auto iter = |
1040 | + hb_iter (featureIndex) |
1041 | | hb_filter (feature_index_map) |
1042 | | hb_map (feature_index_map) |
1043 | ; |
1044 | |
1045 | auto o_iter = |
1046 | + hb_iter (o.featureIndex) |
1047 | | hb_filter (feature_index_map) |
1048 | | hb_map (feature_index_map) |
1049 | ; |
1050 | |
1051 | for (; iter && o_iter; iter++, o_iter++) |
1052 | { |
1053 | unsigned a = *iter; |
1054 | unsigned b = *o_iter; |
1055 | if (a != b) return false; |
1056 | } |
1057 | |
1058 | if (iter || o_iter) return false; |
1059 | |
1060 | return true; |
1061 | } |
1062 | |
1063 | void collect_features (hb_prune_langsys_context_t *c) const |
1064 | { |
1065 | if (!has_required_feature () && !get_feature_count ()) return; |
1066 | if (has_required_feature () && |
1067 | c->duplicate_feature_map->has (reqFeatureIndex)) |
1068 | c->new_feature_indexes->add (get_required_feature_index ()); |
1069 | |
1070 | + hb_iter (featureIndex) |
1071 | | hb_filter (c->duplicate_feature_map) |
1072 | | hb_sink (c->new_feature_indexes) |
1073 | ; |
1074 | } |
1075 | |
1076 | bool subset (hb_subset_context_t *c, |
1077 | hb_subset_layout_context_t *l, |
1078 | const Tag *tag = nullptr) const |
1079 | { |
1080 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
1081 | auto *out = c->serializer->start_embed (*this); |
1082 | if (unlikely (!c->serializer->extend_min (out))__builtin_expect (bool(!c->serializer->extend_min (out) ), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1082); |
1083 | |
1084 | const uint32_t *v; |
1085 | out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex, &v) ? *v : 0xFFFFu; |
1086 | |
1087 | if (!l->visitFeatureIndex (featureIndex.len)) |
1088 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1088); |
1089 | |
1090 | auto it = |
1091 | + hb_iter (featureIndex) |
1092 | | hb_filter (l->feature_index_map) |
1093 | | hb_map (l->feature_index_map) |
1094 | ; |
1095 | |
1096 | bool ret = bool (it); |
1097 | out->featureIndex.serialize (c->serializer, l, it); |
1098 | return_trace (ret)return trace.ret (ret, __PRETTY_FUNCTION__, 1098); |
1099 | } |
1100 | |
1101 | bool sanitize (hb_sanitize_context_t *c, |
1102 | const Record_sanitize_closure_t * = nullptr) const |
1103 | { |
1104 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
1105 | return_trace (c->check_struct (this) && featureIndex.sanitize (c))return trace.ret (c->check_struct (this) && featureIndex .sanitize (c), __PRETTY_FUNCTION__, 1105); |
1106 | } |
1107 | |
1108 | Offset16 lookupOrderZ; /* = Null (reserved for an offset to a |
1109 | * reordering table) */ |
1110 | HBUINT16 reqFeatureIndex;/* Index of a feature required for this |
1111 | * language system--if no required features |
1112 | * = 0xFFFFu */ |
1113 | IndexArray featureIndex; /* Array of indices into the FeatureList */ |
1114 | public: |
1115 | DEFINE_SIZE_ARRAY_SIZED (6, featureIndex)unsigned int get_size () const { return (6 - (featureIndex).min_size + (featureIndex).get_size ()); } void _compiles_assertion_on_line_1115 () const { (void) (featureIndex)[0].static_size; } void _instance_assertion_on_line_1115 () const { static_assert ((sizeof (*this) == (6) + (1 +0) * sizeof ((featureIndex)[0])), ""); } static constexpr unsigned null_size = (6); static constexpr unsigned min_size = (6); |
1116 | }; |
1117 | DECLARE_NULL_NAMESPACE_BYTES (OT, LangSys)} extern __attribute__((__visibility__("hidden"))) const unsigned char _hb_Null_OT_LangSys[hb_null_size<OT::LangSys>::value ]; template <> struct Null<OT::LangSys> { static OT ::LangSys const & get_null () { return *reinterpret_cast< const OT::LangSys *> (_hb_Null_OT_LangSys); } }; namespace OT { static_assert (true, ""); |
1118 | |
1119 | struct Script |
1120 | { |
1121 | unsigned int get_lang_sys_count () const |
1122 | { return langSys.len; } |
1123 | const Tag& get_lang_sys_tag (unsigned int i) const |
1124 | { return langSys.get_tag (i); } |
1125 | unsigned int get_lang_sys_tags (unsigned int start_offset, |
1126 | unsigned int *lang_sys_count /* IN/OUT */, |
1127 | hb_tag_t *lang_sys_tags /* OUT */) const |
1128 | { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); } |
1129 | const LangSys& get_lang_sys (unsigned int i) const |
1130 | { |
1131 | if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys (); |
1132 | return this+langSys[i].offset; |
1133 | } |
1134 | bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const |
1135 | { return langSys.find_index (tag, index); } |
1136 | |
1137 | bool has_default_lang_sys () const { return defaultLangSys != 0; } |
1138 | const LangSys& get_default_lang_sys () const { return this+defaultLangSys; } |
1139 | |
1140 | void prune_langsys (hb_prune_langsys_context_t *c, |
1141 | unsigned script_index) const |
1142 | { |
1143 | if (!has_default_lang_sys () && !get_lang_sys_count ()) return; |
1144 | if (!c->visitScript ()) return; |
1145 | |
1146 | if (!c->script_langsys_map->has (script_index)) |
1147 | { |
1148 | if (unlikely (!c->script_langsys_map->set (script_index, hb::unique_ptr<hb_set_t> {hb_set_create ()}))__builtin_expect (bool(!c->script_langsys_map->set (script_index , hb::unique_ptr<hb_set_t> {hb_set_create ()})), 0)) |
1149 | return; |
1150 | } |
1151 | |
1152 | if (has_default_lang_sys ()) |
1153 | { |
1154 | //only collect features from non-redundant langsys |
1155 | const LangSys& d = get_default_lang_sys (); |
1156 | if (c->visitLangsys (d.get_feature_count ())) { |
1157 | d.collect_features (c); |
1158 | } |
1159 | |
1160 | for (auto _ : + hb_enumerate (langSys)) |
1161 | { |
1162 | const LangSys& l = this+_.second.offset; |
1163 | if (!c->visitLangsys (l.get_feature_count ())) continue; |
1164 | if (l.compare (d, c->duplicate_feature_map)) continue; |
1165 | |
1166 | l.collect_features (c); |
1167 | c->script_langsys_map->get (script_index)->add (_.first); |
1168 | } |
1169 | } |
1170 | else |
1171 | { |
1172 | for (auto _ : + hb_enumerate (langSys)) |
1173 | { |
1174 | const LangSys& l = this+_.second.offset; |
1175 | if (!c->visitLangsys (l.get_feature_count ())) continue; |
1176 | l.collect_features (c); |
1177 | c->script_langsys_map->get (script_index)->add (_.first); |
1178 | } |
1179 | } |
1180 | } |
1181 | |
1182 | bool subset (hb_subset_context_t *c, |
1183 | hb_subset_layout_context_t *l, |
1184 | const Tag *tag) const |
1185 | { |
1186 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
1187 | if (!l->visitScript ()) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1187); |
1188 | if (tag && !c->plan->layout_scripts.has (*tag)) |
1189 | return false; |
1190 | |
1191 | auto *out = c->serializer->start_embed (*this); |
1192 | if (unlikely (!c->serializer->extend_min (out))__builtin_expect (bool(!c->serializer->extend_min (out) ), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1192); |
1193 | |
1194 | bool defaultLang = false; |
1195 | if (has_default_lang_sys ()) |
1196 | { |
1197 | c->serializer->push (); |
1198 | const LangSys& ls = this+defaultLangSys; |
1199 | bool ret = ls.subset (c, l); |
1200 | if (!ret && tag && *tag != HB_TAG ('D', 'F', 'L', 'T')((hb_tag_t)((((uint32_t)('D')&0xFF)<<24)|(((uint32_t )('F')&0xFF)<<16)|(((uint32_t)('L')&0xFF)<< 8)|((uint32_t)('T')&0xFF)))) |
1201 | { |
1202 | c->serializer->pop_discard (); |
1203 | out->defaultLangSys = 0; |
1204 | } |
1205 | else |
1206 | { |
1207 | c->serializer->add_link (out->defaultLangSys, c->serializer->pop_pack ()); |
1208 | defaultLang = true; |
1209 | } |
1210 | } |
1211 | |
1212 | const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index); |
1213 | if (active_langsys) |
1214 | { |
1215 | + hb_enumerate (langSys) |
1216 | | hb_filter (active_langsys, hb_first) |
1217 | | hb_map (hb_second) |
1218 | | hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); }) |
1219 | | hb_apply (subset_record_array (l, &(out->langSys), this)) |
1220 | ; |
1221 | } |
1222 | |
1223 | return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB)return trace.ret (bool (out->langSys.len) || defaultLang || l->table_tag == ((hb_tag_t)((((uint32_t)('G')&0xFF)<< 24)|(((uint32_t)('S')&0xFF)<<16)|(((uint32_t)('U')& 0xFF)<<8)|((uint32_t)('B')&0xFF))), __PRETTY_FUNCTION__ , 1223); |
1224 | } |
1225 | |
1226 | bool sanitize (hb_sanitize_context_t *c, |
1227 | const Record_sanitize_closure_t * = nullptr) const |
1228 | { |
1229 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
1230 | return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this))return trace.ret (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this), __PRETTY_FUNCTION__, 1230); |
1231 | } |
1232 | |
1233 | protected: |
1234 | Offset16To<LangSys> |
1235 | defaultLangSys; /* Offset to DefaultLangSys table--from |
1236 | * beginning of Script table--may be Null */ |
1237 | RecordArrayOf<LangSys> |
1238 | langSys; /* Array of LangSysRecords--listed |
1239 | * alphabetically by LangSysTag */ |
1240 | public: |
1241 | DEFINE_SIZE_ARRAY_SIZED (4, langSys)unsigned int get_size () const { return (4 - (langSys).min_size + (langSys).get_size ()); } void _compiles_assertion_on_line_1241 () const { (void) (langSys)[0].static_size; } void _instance_assertion_on_line_1241 () const { static_assert ((sizeof (*this) == (4) + (1 +0) * sizeof ((langSys)[0])), ""); } static constexpr unsigned null_size = (4); static constexpr unsigned min_size = (4); |
1242 | }; |
1243 | |
1244 | struct RecordListOfScript : RecordListOf<Script> |
1245 | { |
1246 | bool subset (hb_subset_context_t *c, |
1247 | hb_subset_layout_context_t *l) const |
1248 | { |
1249 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
1250 | auto *out = c->serializer->start_embed (*this); |
1251 | if (unlikely (!c->serializer->extend_min (out))__builtin_expect (bool(!c->serializer->extend_min (out) ), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1251); |
1252 | |
1253 | for (auto _ : + hb_enumerate (*this)) |
1254 | { |
1255 | auto snap = c->serializer->snapshot (); |
1256 | l->cur_script_index = _.first; |
1257 | bool ret = _.second.subset (l, this); |
1258 | if (!ret) c->serializer->revert (snap); |
1259 | else out->len++; |
1260 | } |
1261 | |
1262 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 1262); |
1263 | } |
1264 | }; |
1265 | |
1266 | typedef RecordListOfScript ScriptList; |
1267 | |
1268 | |
1269 | |
1270 | struct LookupFlag : HBUINT16 |
1271 | { |
1272 | enum Flags { |
1273 | RightToLeft = 0x0001u, |
1274 | IgnoreBaseGlyphs = 0x0002u, |
1275 | IgnoreLigatures = 0x0004u, |
1276 | IgnoreMarks = 0x0008u, |
1277 | IgnoreFlags = 0x000Eu, |
1278 | UseMarkFilteringSet = 0x0010u, |
1279 | Reserved = 0x00E0u, |
1280 | MarkAttachmentType = 0xFF00u |
1281 | }; |
1282 | public: |
1283 | DEFINE_SIZE_STATIC (2)void _instance_assertion_on_line_1283 () const { static_assert ((sizeof (*this) == (2)), ""); } unsigned int get_size () const { return (2); } static constexpr unsigned null_size = (2); static constexpr unsigned min_size = (2); static constexpr unsigned static_size = (2); |
1284 | }; |
1285 | |
1286 | } /* namespace OT */ |
1287 | /* This has to be outside the namespace. */ |
1288 | HB_MARK_AS_FLAG_T (OT::LookupFlag::Flags)extern "C++" { static inline constexpr OT::LookupFlag::Flags operator | (OT::LookupFlag::Flags l, OT::LookupFlag::Flags r) { return OT::LookupFlag::Flags ((unsigned) l | (unsigned) r); } static inline constexpr OT::LookupFlag::Flags operator & (OT::LookupFlag ::Flags l, OT::LookupFlag::Flags r) { return OT::LookupFlag:: Flags ((unsigned) l & (unsigned) r); } static inline constexpr OT::LookupFlag::Flags operator ^ (OT::LookupFlag::Flags l, OT ::LookupFlag::Flags r) { return OT::LookupFlag::Flags ((unsigned ) l ^ (unsigned) r); } static inline constexpr unsigned operator ~ (OT::LookupFlag::Flags r) { return (~(unsigned) r); } static inline OT::LookupFlag::Flags& operator |= (OT::LookupFlag ::Flags &l, OT::LookupFlag::Flags r) { l = l | r; return l ; } static inline OT::LookupFlag::Flags& operator &= ( OT::LookupFlag::Flags& l, OT::LookupFlag::Flags r) { l = l & r; return l; } static inline OT::LookupFlag::Flags& operator ^= (OT::LookupFlag::Flags& l, OT::LookupFlag::Flags r) { l = l ^ r; return l; } } static_assert (true, ""); |
1289 | namespace OT { |
1290 | |
1291 | struct Lookup |
1292 | { |
1293 | unsigned int get_subtable_count () const { return subTable.len; } |
1294 | |
1295 | template <typename TSubTable> |
1296 | const Array16OfOffset16To<TSubTable>& get_subtables () const |
1297 | { return reinterpret_cast<const Array16OfOffset16To<TSubTable> &> (subTable); } |
1298 | template <typename TSubTable> |
1299 | Array16OfOffset16To<TSubTable>& get_subtables () |
1300 | { return reinterpret_cast<Array16OfOffset16To<TSubTable> &> (subTable); } |
1301 | |
1302 | template <typename TSubTable> |
1303 | const TSubTable& get_subtable (unsigned int i) const |
1304 | { return this+get_subtables<TSubTable> ()[i]; } |
1305 | template <typename TSubTable> |
1306 | TSubTable& get_subtable (unsigned int i) |
1307 | { return this+get_subtables<TSubTable> ()[i]; } |
1308 | |
1309 | unsigned int get_size () const |
1310 | { |
1311 | const HBUINT16 &markFilteringSet = StructAfter<const HBUINT16> (subTable); |
1312 | if (lookupFlag & LookupFlag::UseMarkFilteringSet) |
1313 | return (const char *) &StructAfter<const char> (markFilteringSet) - (const char *) this; |
1314 | return (const char *) &markFilteringSet - (const char *) this; |
1315 | } |
1316 | |
1317 | unsigned int get_type () const { return lookupType; } |
1318 | |
1319 | /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and |
1320 | * higher 16-bit is mark-filtering-set if the lookup uses one. |
1321 | * Not to be confused with glyph_props which is very similar. */ |
1322 | uint32_t get_props () const |
1323 | { |
1324 | unsigned int flag = lookupFlag; |
1325 | if (unlikely (flag & LookupFlag::UseMarkFilteringSet)__builtin_expect (bool(flag & LookupFlag::UseMarkFilteringSet ), 0)) |
1326 | { |
1327 | const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable); |
1328 | flag += (markFilteringSet << 16); |
1329 | } |
1330 | return flag; |
1331 | } |
1332 | |
1333 | template <typename TSubTable, typename context_t, typename ...Ts> |
1334 | typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const |
1335 | { |
1336 | unsigned int lookup_type = get_type (); |
1337 | TRACE_DISPATCH (this, lookup_type)hb_no_trace_t<typename context_t::return_t> trace; |
1338 | unsigned int count = get_subtable_count (); |
1339 | for (unsigned int i = 0; i < count; i++) { |
1340 | typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type, std::forward<Ts> (ds)...); |
1341 | if (c->stop_sublookup_iteration (r)) |
1342 | return_trace (r)return trace.ret (r, __PRETTY_FUNCTION__, 1342); |
1343 | } |
1344 | return_trace (c->default_return_value ())return trace.ret (c->default_return_value (), __PRETTY_FUNCTION__ , 1344); |
1345 | } |
1346 | |
1347 | bool serialize (hb_serialize_context_t *c, |
1348 | unsigned int lookup_type, |
1349 | uint32_t lookup_props, |
1350 | unsigned int num_subtables) |
1351 | { |
1352 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
1353 | if (unlikely (!c->extend_min (this))__builtin_expect (bool(!c->extend_min (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1353); |
1354 | lookupType = lookup_type; |
1355 | lookupFlag = lookup_props & 0xFFFFu; |
1356 | if (unlikely (!subTable.serialize (c, num_subtables))__builtin_expect (bool(!subTable.serialize (c, num_subtables) ), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1356); |
1357 | if (lookupFlag & LookupFlag::UseMarkFilteringSet) |
1358 | { |
1359 | if (unlikely (!c->extend (this))__builtin_expect (bool(!c->extend (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1359); |
1360 | HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable); |
1361 | markFilteringSet = lookup_props >> 16; |
1362 | } |
1363 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 1363); |
1364 | } |
1365 | |
1366 | template <typename TSubTable> |
1367 | bool subset (hb_subset_context_t *c) const |
1368 | { |
1369 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
1370 | auto *out = c->serializer->start_embed (*this); |
1371 | if (unlikely (!c->serializer->extend_min (out))__builtin_expect (bool(!c->serializer->extend_min (out) ), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1371); |
1372 | out->lookupType = lookupType; |
1373 | out->lookupFlag = lookupFlag; |
1374 | |
1375 | const hb_set_t *glyphset = c->plan->glyphset_gsub (); |
1376 | unsigned int lookup_type = get_type (); |
1377 | + hb_iter (get_subtables <TSubTable> ()) |
1378 | | hb_filter ([this, glyphset, lookup_type] (const Offset16To<TSubTable> &_) { return (this+_).intersects (glyphset, lookup_type); }) |
1379 | | hb_apply (subset_offset_array (c, out->get_subtables<TSubTable> (), this, lookup_type)) |
1380 | ; |
1381 | |
1382 | if (lookupFlag & LookupFlag::UseMarkFilteringSet) |
1383 | { |
1384 | const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable); |
1385 | hb_codepoint_t *idx; |
1386 | if (!c->plan->used_mark_sets_map.has (markFilteringSet, &idx)) |
1387 | { |
1388 | unsigned new_flag = lookupFlag; |
1389 | new_flag &= ~LookupFlag::UseMarkFilteringSet; |
1390 | out->lookupFlag = new_flag; |
1391 | } |
1392 | else |
1393 | { |
1394 | if (unlikely (!c->serializer->extend (out))__builtin_expect (bool(!c->serializer->extend (out)), 0 )) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1394); |
1395 | HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable); |
1396 | outMarkFilteringSet = *idx; |
1397 | } |
1398 | } |
1399 | |
1400 | // Always keep the lookup even if it's empty. The rest of layout subsetting depends on lookup |
1401 | // indices being consistent with those computed during planning. So if an empty lookup is |
1402 | // discarded during the subset phase it will invalidate all subsequent lookup indices. |
1403 | // Generally we shouldn't end up with an empty lookup as we pre-prune them during the planning |
1404 | // phase, but it can happen in rare cases such as when during closure subtable is considered |
1405 | // degenerate (see: https://github.com/harfbuzz/harfbuzz/issues/3853) |
1406 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 1406); |
1407 | } |
1408 | |
1409 | template <typename TSubTable> |
1410 | bool sanitize (hb_sanitize_context_t *c) const |
1411 | { |
1412 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
1413 | if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1413); |
1414 | hb_barrier (); |
1415 | |
1416 | unsigned subtables = get_subtable_count (); |
1417 | if (unlikely (!c->visit_subtables (subtables))__builtin_expect (bool(!c->visit_subtables (subtables)), 0 )) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1417); |
1418 | |
1419 | if (lookupFlag & LookupFlag::UseMarkFilteringSet) |
1420 | { |
1421 | const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable); |
1422 | if (!markFilteringSet.sanitize (c)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1422); |
1423 | } |
1424 | |
1425 | if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ()))__builtin_expect (bool(!get_subtables<TSubTable> ().sanitize (c, this, get_type ())), 0)) |
1426 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1426); |
1427 | |
1428 | if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ())__builtin_expect (bool(get_type () == TSubTable::Extension && !c->get_edit_count ()), 0)) |
1429 | { |
1430 | hb_barrier (); |
1431 | |
1432 | /* The spec says all subtables of an Extension lookup should |
1433 | * have the same type, which shall not be the Extension type |
1434 | * itself (but we already checked for that). |
1435 | * This is specially important if one has a reverse type! |
1436 | * |
1437 | * We only do this if sanitizer edit_count is zero. Otherwise, |
1438 | * some of the subtables might have become insane after they |
1439 | * were sanity-checked by the edits of subsequent subtables. |
1440 | * https://bugs.chromium.org/p/chromium/issues/detail?id=960331 |
1441 | */ |
1442 | unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type (); |
1443 | for (unsigned int i = 1; i < subtables; i++) |
1444 | if (get_subtable<TSubTable> (i).u.extension.get_type () != type) |
1445 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1445); |
1446 | } |
1447 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 1447); |
1448 | } |
1449 | |
1450 | protected: |
1451 | HBUINT16 lookupType; /* Different enumerations for GSUB and GPOS */ |
1452 | HBUINT16 lookupFlag; /* Lookup qualifiers */ |
1453 | Array16Of<Offset16> |
1454 | subTable; /* Array of SubTables */ |
1455 | /*HBUINT16 markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets |
1456 | * structure. This field is only present if bit |
1457 | * UseMarkFilteringSet of lookup flags is set. */ |
1458 | public: |
1459 | DEFINE_SIZE_ARRAY (6, subTable)void _compiles_assertion_on_line_1459 () const { (void) (subTable )[0].static_size; } void _instance_assertion_on_line_1459 () const { static_assert ((sizeof (*this) == (6) + (1 +0) * sizeof (( subTable)[0])), ""); } static constexpr unsigned null_size = ( 6); static constexpr unsigned min_size = (6); |
1460 | }; |
1461 | |
1462 | template <typename Types> |
1463 | using LookupList = List16OfOffsetTo<Lookup, typename Types::HBUINT>; |
1464 | |
1465 | template <typename TLookup, typename OffsetType> |
1466 | struct LookupOffsetList : List16OfOffsetTo<TLookup, OffsetType> |
1467 | { |
1468 | bool subset (hb_subset_context_t *c, |
1469 | hb_subset_layout_context_t *l) const |
1470 | { |
1471 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
1472 | auto *out = c->serializer->start_embed (this); |
1473 | if (unlikely (!c->serializer->extend_min (out))__builtin_expect (bool(!c->serializer->extend_min (out) ), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1473); |
1474 | |
1475 | + hb_enumerate (*this) |
1476 | | hb_filter (l->lookup_index_map, hb_first) |
1477 | | hb_map (hb_second) |
1478 | | hb_apply (subset_offset_array (c, *out, this)) |
1479 | ; |
1480 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 1480); |
1481 | } |
1482 | |
1483 | bool sanitize (hb_sanitize_context_t *c) const |
1484 | { |
1485 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
1486 | return_trace (List16OfOffset16To<TLookup>::sanitize (c, this))return trace.ret (List16OfOffset16To<TLookup>::sanitize (c, this), __PRETTY_FUNCTION__, 1486); |
1487 | } |
1488 | }; |
1489 | |
1490 | |
1491 | /* |
1492 | * Coverage Table |
1493 | */ |
1494 | |
1495 | |
1496 | static bool ClassDef_remap_and_serialize (hb_serialize_context_t *c, |
1497 | const hb_set_t &klasses, |
1498 | bool use_class_zero, |
1499 | hb_sorted_vector_t<hb_codepoint_pair_t> &glyph_and_klass, /* IN/OUT */ |
1500 | hb_map_t *klass_map /*IN/OUT*/) |
1501 | { |
1502 | if (!klass_map) |
1503 | return ClassDef_serialize (c, glyph_and_klass.iter ()); |
1504 | |
1505 | /* any glyph not assigned a class value falls into Class zero (0), |
1506 | * if any glyph assigned to class 0, remapping must start with 0->0*/ |
1507 | if (!use_class_zero) |
1508 | klass_map->set (0, 0); |
1509 | |
1510 | unsigned idx = klass_map->has (0) ? 1 : 0; |
1511 | for (const unsigned k: klasses) |
1512 | { |
1513 | if (klass_map->has (k)) continue; |
1514 | klass_map->set (k, idx); |
1515 | idx++; |
1516 | } |
1517 | |
1518 | |
1519 | for (unsigned i = 0; i < glyph_and_klass.length; i++) |
1520 | { |
1521 | hb_codepoint_t klass = glyph_and_klass[i].second; |
1522 | glyph_and_klass[i].second = klass_map->get (klass); |
1523 | } |
1524 | |
1525 | c->propagate_error (glyph_and_klass, klasses); |
1526 | return ClassDef_serialize (c, glyph_and_klass.iter ()); |
1527 | } |
1528 | |
1529 | /* |
1530 | * Class Definition Table |
1531 | */ |
1532 | |
1533 | template <typename Types> |
1534 | struct ClassDefFormat1_3 |
1535 | { |
1536 | friend struct ClassDef; |
1537 | |
1538 | private: |
1539 | unsigned int get_class (hb_codepoint_t glyph_id) const |
1540 | { |
1541 | return classValue[(unsigned int) (glyph_id - startGlyph)]; |
1542 | } |
1543 | |
1544 | unsigned get_population () const |
1545 | { |
1546 | return classValue.len; |
1547 | } |
1548 | |
1549 | template<typename Iterator, |
1550 | hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))typename hb_enable_if<(((hb_is_source_of<Iterator, hb_codepoint_t >::value && Iterator::is_sorted_iterator)))>::type * = nullptr> |
1551 | bool serialize (hb_serialize_context_t *c, |
1552 | Iterator it) |
1553 | { |
1554 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
1555 | if (unlikely (!c->extend_min (this))__builtin_expect (bool(!c->extend_min (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1555); |
1556 | |
1557 | if (unlikely (!it)__builtin_expect (bool(!it), 0)) |
1558 | { |
1559 | classFormat = 1; |
1560 | startGlyph = 0; |
1561 | classValue.len = 0; |
1562 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 1562); |
1563 | } |
1564 | |
1565 | hb_codepoint_t glyph_min = (*it).first; |
1566 | hb_codepoint_t glyph_max = + it |
1567 | | hb_map (hb_first) |
1568 | | hb_reduce (hb_max, 0u); |
1569 | unsigned glyph_count = glyph_max - glyph_min + 1; |
1570 | |
1571 | startGlyph = glyph_min; |
1572 | if (unlikely (!classValue.serialize (c, glyph_count))__builtin_expect (bool(!classValue.serialize (c, glyph_count) ), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1572); |
1573 | for (const hb_pair_t<hb_codepoint_t, uint32_t> gid_klass_pair : + it) |
1574 | { |
1575 | unsigned idx = gid_klass_pair.first - glyph_min; |
1576 | classValue[idx] = gid_klass_pair.second; |
1577 | } |
1578 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 1578); |
1579 | } |
1580 | |
1581 | bool subset (hb_subset_context_t *c, |
1582 | hb_map_t *klass_map = nullptr /*OUT*/, |
1583 | bool keep_empty_table = true, |
1584 | bool use_class_zero = true, |
1585 | const Coverage* glyph_filter = nullptr) const |
1586 | { |
1587 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
1588 | const hb_map_t &glyph_map = c->plan->glyph_map_gsub; |
1589 | |
1590 | hb_sorted_vector_t<hb_codepoint_pair_t> glyph_and_klass; |
1591 | hb_set_t orig_klasses; |
1592 | |
1593 | hb_codepoint_t start = startGlyph; |
1594 | hb_codepoint_t end = start + classValue.len; |
1595 | |
1596 | for (const hb_codepoint_t gid : + hb_range (start, end)) |
1597 | { |
1598 | hb_codepoint_t new_gid = glyph_map[gid]; |
1599 | if (new_gid == HB_MAP_VALUE_INVALID((hb_codepoint_t) -1)) continue; |
1600 | if (glyph_filter && !glyph_filter->has(gid)) continue; |
1601 | |
1602 | unsigned klass = classValue[gid - start]; |
1603 | if (!klass) continue; |
1604 | |
1605 | glyph_and_klass.push (hb_pair (new_gid, klass)); |
1606 | orig_klasses.add (klass); |
1607 | } |
1608 | |
1609 | if (use_class_zero) |
1610 | { |
1611 | unsigned glyph_count = glyph_filter |
1612 | ? hb_len (hb_iter (glyph_map.keys()) | hb_filter (glyph_filter)) |
1613 | : glyph_map.get_population (); |
1614 | use_class_zero = glyph_count <= glyph_and_klass.length; |
1615 | } |
1616 | if (!ClassDef_remap_and_serialize (c->serializer, |
1617 | orig_klasses, |
1618 | use_class_zero, |
1619 | glyph_and_klass, |
1620 | klass_map)) |
1621 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1621); |
1622 | return_trace (keep_empty_table || (bool) glyph_and_klass)return trace.ret (keep_empty_table || (bool) glyph_and_klass, __PRETTY_FUNCTION__, 1622); |
1623 | } |
1624 | |
1625 | bool sanitize (hb_sanitize_context_t *c) const |
1626 | { |
1627 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
1628 | return_trace (c->check_struct (this) && classValue.sanitize (c))return trace.ret (c->check_struct (this) && classValue .sanitize (c), __PRETTY_FUNCTION__, 1628); |
1629 | } |
1630 | |
1631 | unsigned cost () const { return 1; } |
1632 | |
1633 | template <typename set_t> |
1634 | bool collect_coverage (set_t *glyphs) const |
1635 | { |
1636 | unsigned int start = 0; |
1637 | unsigned int count = classValue.len; |
1638 | for (unsigned int i = 0; i < count; i++) |
1639 | { |
1640 | if (classValue[i]) |
1641 | continue; |
1642 | |
1643 | if (start != i) |
1644 | if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + i))__builtin_expect (bool(!glyphs->add_range (startGlyph + start , startGlyph + i)), 0)) |
1645 | return false; |
1646 | |
1647 | start = i + 1; |
1648 | } |
1649 | if (start != count) |
1650 | if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + count))__builtin_expect (bool(!glyphs->add_range (startGlyph + start , startGlyph + count)), 0)) |
1651 | return false; |
1652 | |
1653 | return true; |
1654 | } |
1655 | |
1656 | template <typename set_t> |
1657 | bool collect_class (set_t *glyphs, unsigned klass) const |
1658 | { |
1659 | unsigned int count = classValue.len; |
1660 | for (unsigned int i = 0; i < count; i++) |
1661 | if (classValue[i] == klass) glyphs->add (startGlyph + i); |
1662 | return true; |
1663 | } |
1664 | |
1665 | bool intersects (const hb_set_t *glyphs) const |
1666 | { |
1667 | hb_codepoint_t start = startGlyph; |
1668 | hb_codepoint_t end = startGlyph + classValue.len; |
1669 | for (hb_codepoint_t iter = startGlyph - 1; |
1670 | glyphs->next (&iter) && iter < end;) |
1671 | if (classValue[iter - start]) return true; |
1672 | return false; |
1673 | } |
1674 | bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const |
1675 | { |
1676 | unsigned int count = classValue.len; |
1677 | if (klass == 0) |
1678 | { |
1679 | /* Match if there's any glyph that is not listed! */ |
1680 | hb_codepoint_t g = HB_SET_VALUE_INVALID((hb_codepoint_t) -1); |
1681 | if (!glyphs->next (&g)) return false; |
1682 | if (g < startGlyph) return true; |
1683 | g = startGlyph + count - 1; |
1684 | if (glyphs->next (&g)) return true; |
1685 | /* Fall through. */ |
1686 | } |
1687 | /* TODO Speed up, using set overlap first? */ |
1688 | /* TODO(iter) Rewrite as dagger. */ |
1689 | const HBUINT16 *arr = classValue.arrayZ; |
1690 | for (unsigned int i = 0; i < count; i++) |
1691 | if (arr[i] == klass && glyphs->has (startGlyph + i)) |
1692 | return true; |
1693 | return false; |
1694 | } |
1695 | |
1696 | void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const |
1697 | { |
1698 | unsigned count = classValue.len; |
1699 | if (klass == 0) |
1700 | { |
1701 | unsigned start_glyph = startGlyph; |
1702 | for (uint32_t g = HB_SET_VALUE_INVALID((hb_codepoint_t) -1); |
1703 | glyphs->next (&g) && g < start_glyph;) |
1704 | intersect_glyphs->add (g); |
1705 | |
1706 | for (uint32_t g = startGlyph + count - 1; |
1707 | glyphs-> next (&g);) |
1708 | intersect_glyphs->add (g); |
1709 | |
1710 | return; |
1711 | } |
1712 | |
1713 | for (unsigned i = 0; i < count; i++) |
1714 | if (classValue[i] == klass && glyphs->has (startGlyph + i)) |
1715 | intersect_glyphs->add (startGlyph + i); |
1716 | |
1717 | #if 0 |
1718 | /* The following implementation is faster asymptotically, but slower |
1719 | * in practice. */ |
1720 | unsigned start_glyph = startGlyph; |
1721 | unsigned end_glyph = start_glyph + count; |
1722 | for (unsigned g = startGlyph - 1; |
1723 | glyphs->next (&g) && g < end_glyph;) |
1724 | if (classValue.arrayZ[g - start_glyph] == klass) |
1725 | intersect_glyphs->add (g); |
1726 | #endif |
1727 | } |
1728 | |
1729 | void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const |
1730 | { |
1731 | if (glyphs->is_empty ()) return; |
1732 | hb_codepoint_t end_glyph = startGlyph + classValue.len - 1; |
1733 | if (glyphs->get_min () < startGlyph || |
1734 | glyphs->get_max () > end_glyph) |
1735 | intersect_classes->add (0); |
1736 | |
1737 | for (const auto& _ : + hb_enumerate (classValue)) |
1738 | { |
1739 | hb_codepoint_t g = startGlyph + _.first; |
1740 | if (glyphs->has (g)) |
1741 | intersect_classes->add (_.second); |
1742 | } |
1743 | } |
1744 | |
1745 | protected: |
1746 | HBUINT16 classFormat; /* Format identifier--format = 1 */ |
1747 | typename Types::HBGlyphID |
1748 | startGlyph; /* First GlyphID of the classValueArray */ |
1749 | typename Types::template ArrayOf<HBUINT16> |
1750 | classValue; /* Array of Class Values--one per GlyphID */ |
1751 | public: |
1752 | DEFINE_SIZE_ARRAY (2 + 2 * Types::size, classValue)void _compiles_assertion_on_line_1752 () const { (void) (classValue )[0].static_size; } void _instance_assertion_on_line_1752 () const { static_assert ((sizeof (*this) == (2 + 2 * Types::size) + ( 1 +0) * sizeof ((classValue)[0])), ""); } static constexpr unsigned null_size = (2 + 2 * Types::size); static constexpr unsigned min_size = (2 + 2 * Types::size); |
1753 | }; |
1754 | |
1755 | template <typename Types> |
1756 | struct ClassDefFormat2_4 |
1757 | { |
1758 | friend struct ClassDef; |
1759 | |
1760 | private: |
1761 | unsigned int get_class (hb_codepoint_t glyph_id) const |
1762 | { |
1763 | return rangeRecord.bsearch (glyph_id).value; |
1764 | } |
1765 | |
1766 | unsigned get_population () const |
1767 | { |
1768 | typename Types::large_int ret = 0; |
1769 | for (const auto &r : rangeRecord) |
1770 | ret += r.get_population (); |
1771 | return ret > UINT_MAX(2147483647 *2U +1U) ? UINT_MAX(2147483647 *2U +1U) : (unsigned) ret; |
1772 | } |
1773 | |
1774 | template<typename Iterator, |
1775 | hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))typename hb_enable_if<(((hb_is_source_of<Iterator, hb_codepoint_t >::value && Iterator::is_sorted_iterator)))>::type * = nullptr> |
1776 | bool serialize (hb_serialize_context_t *c, |
1777 | Iterator it) |
1778 | { |
1779 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
1780 | if (unlikely (!c->extend_min (this))__builtin_expect (bool(!c->extend_min (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1780); |
1781 | |
1782 | if (unlikely (!it)__builtin_expect (bool(!it), 0)) |
1783 | { |
1784 | classFormat = 2; |
1785 | rangeRecord.len = 0; |
1786 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 1786); |
1787 | } |
1788 | |
1789 | unsigned unsorted = false; |
1790 | unsigned num_ranges = 1; |
1791 | hb_codepoint_t prev_gid = (*it).first; |
1792 | unsigned prev_klass = (*it).second; |
1793 | |
1794 | RangeRecord<Types> range_rec; |
1795 | range_rec.first = prev_gid; |
1796 | range_rec.last = prev_gid; |
1797 | range_rec.value = prev_klass; |
1798 | |
1799 | auto *record = c->copy (range_rec); |
1800 | if (unlikely (!record)__builtin_expect (bool(!record), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1800); |
1801 | |
1802 | for (const auto gid_klass_pair : + (++it)) |
1803 | { |
1804 | hb_codepoint_t cur_gid = gid_klass_pair.first; |
1805 | unsigned cur_klass = gid_klass_pair.second; |
1806 | |
1807 | if (cur_gid != prev_gid + 1 || |
1808 | cur_klass != prev_klass) |
1809 | { |
1810 | |
1811 | if (unlikely (cur_gid < prev_gid)__builtin_expect (bool(cur_gid < prev_gid), 0)) |
1812 | unsorted = true; |
1813 | |
1814 | if (unlikely (!record)__builtin_expect (bool(!record), 0)) break; |
1815 | record->last = prev_gid; |
1816 | num_ranges++; |
1817 | |
1818 | range_rec.first = cur_gid; |
1819 | range_rec.last = cur_gid; |
1820 | range_rec.value = cur_klass; |
1821 | |
1822 | record = c->copy (range_rec); |
1823 | } |
1824 | |
1825 | prev_klass = cur_klass; |
1826 | prev_gid = cur_gid; |
1827 | } |
1828 | |
1829 | if (unlikely (c->in_error ())__builtin_expect (bool(c->in_error ()), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1829); |
1830 | |
1831 | if (likely (record)__builtin_expect (bool(record), 1)) record->last = prev_gid; |
1832 | rangeRecord.len = num_ranges; |
1833 | |
1834 | if (unlikely (unsorted)__builtin_expect (bool(unsorted), 0)) |
1835 | rangeRecord.as_array ().qsort (RangeRecord<Types>::cmp_range); |
1836 | |
1837 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 1837); |
1838 | } |
1839 | |
1840 | bool subset (hb_subset_context_t *c, |
1841 | hb_map_t *klass_map = nullptr /*OUT*/, |
1842 | bool keep_empty_table = true, |
1843 | bool use_class_zero = true, |
1844 | const Coverage* glyph_filter = nullptr) const |
1845 | { |
1846 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
1847 | const hb_map_t &glyph_map = c->plan->glyph_map_gsub; |
1848 | const hb_set_t &glyph_set = *c->plan->glyphset_gsub (); |
1849 | |
1850 | hb_sorted_vector_t<hb_codepoint_pair_t> glyph_and_klass; |
1851 | hb_set_t orig_klasses; |
1852 | |
1853 | if (glyph_set.get_population () * hb_bit_storage ((unsigned) rangeRecord.len) |
1854 | < get_population ()) |
1855 | { |
1856 | for (hb_codepoint_t g : glyph_set) |
1857 | { |
1858 | unsigned klass = get_class (g); |
1859 | if (!klass) continue; |
1860 | hb_codepoint_t new_gid = glyph_map[g]; |
1861 | if (new_gid == HB_MAP_VALUE_INVALID((hb_codepoint_t) -1)) continue; |
1862 | if (glyph_filter && !glyph_filter->has (g)) continue; |
1863 | glyph_and_klass.push (hb_pair (new_gid, klass)); |
1864 | orig_klasses.add (klass); |
1865 | } |
1866 | } |
1867 | else |
1868 | { |
1869 | unsigned num_source_glyphs = c->plan->source->get_num_glyphs (); |
1870 | for (auto &range : rangeRecord) |
1871 | { |
1872 | unsigned klass = range.value; |
1873 | if (!klass) continue; |
1874 | hb_codepoint_t start = range.first; |
1875 | hb_codepoint_t end = hb_min (range.last + 1, num_source_glyphs); |
1876 | for (hb_codepoint_t g = start; g < end; g++) |
1877 | { |
1878 | hb_codepoint_t new_gid = glyph_map[g]; |
1879 | if (new_gid == HB_MAP_VALUE_INVALID((hb_codepoint_t) -1)) continue; |
1880 | if (glyph_filter && !glyph_filter->has (g)) continue; |
1881 | |
1882 | glyph_and_klass.push (hb_pair (new_gid, klass)); |
1883 | orig_klasses.add (klass); |
1884 | } |
1885 | } |
1886 | } |
1887 | |
1888 | const hb_set_t& glyphset = *c->plan->glyphset_gsub (); |
1889 | unsigned glyph_count = glyph_filter |
1890 | ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter)) |
1891 | : glyph_map.get_population (); |
1892 | use_class_zero = use_class_zero && glyph_count <= glyph_and_klass.length; |
1893 | if (!ClassDef_remap_and_serialize (c->serializer, |
1894 | orig_klasses, |
1895 | use_class_zero, |
1896 | glyph_and_klass, |
1897 | klass_map)) |
1898 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1898); |
1899 | return_trace (keep_empty_table || (bool) glyph_and_klass)return trace.ret (keep_empty_table || (bool) glyph_and_klass, __PRETTY_FUNCTION__, 1899); |
1900 | } |
1901 | |
1902 | bool sanitize (hb_sanitize_context_t *c) const |
1903 | { |
1904 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
1905 | return_trace (rangeRecord.sanitize (c))return trace.ret (rangeRecord.sanitize (c), __PRETTY_FUNCTION__ , 1905); |
1906 | } |
1907 | |
1908 | unsigned cost () const { return hb_bit_storage ((unsigned) rangeRecord.len); /* bsearch cost */ } |
1909 | |
1910 | template <typename set_t> |
1911 | bool collect_coverage (set_t *glyphs) const |
1912 | { |
1913 | for (auto &range : rangeRecord) |
1914 | if (range.value) |
1915 | if (unlikely (!range.collect_coverage (glyphs))__builtin_expect (bool(!range.collect_coverage (glyphs)), 0)) |
1916 | return false; |
1917 | return true; |
1918 | } |
1919 | |
1920 | template <typename set_t> |
1921 | bool collect_class (set_t *glyphs, unsigned int klass) const |
1922 | { |
1923 | for (auto &range : rangeRecord) |
1924 | { |
1925 | if (range.value == klass) |
1926 | if (unlikely (!range.collect_coverage (glyphs))__builtin_expect (bool(!range.collect_coverage (glyphs)), 0)) |
1927 | return false; |
1928 | } |
1929 | return true; |
1930 | } |
1931 | |
1932 | bool intersects (const hb_set_t *glyphs) const |
1933 | { |
1934 | if (rangeRecord.len > glyphs->get_population () * hb_bit_storage ((unsigned) rangeRecord.len)) |
1935 | { |
1936 | for (auto g : *glyphs) |
1937 | if (get_class (g)) |
1938 | return true; |
1939 | return false; |
1940 | } |
1941 | |
1942 | return hb_any (+ hb_iter (rangeRecord) |
1943 | | hb_map ([glyphs] (const RangeRecord<Types> &range) { return range.intersects (*glyphs) && range.value; })); |
1944 | } |
1945 | bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const |
1946 | { |
1947 | if (klass == 0) |
1948 | { |
1949 | /* Match if there's any glyph that is not listed! */ |
1950 | hb_codepoint_t g = HB_SET_VALUE_INVALID((hb_codepoint_t) -1); |
1951 | hb_codepoint_t last = HB_SET_VALUE_INVALID((hb_codepoint_t) -1); |
1952 | auto it = hb_iter (rangeRecord); |
1953 | for (auto &range : it) |
1954 | { |
1955 | if (it->first == last + 1) |
1956 | { |
1957 | it++; |
1958 | continue; |
1959 | } |
1960 | |
1961 | if (!glyphs->next (&g)) |
1962 | break; |
1963 | if (g < range.first) |
1964 | return true; |
1965 | g = range.last; |
1966 | last = g; |
1967 | } |
1968 | if (g != HB_SET_VALUE_INVALID((hb_codepoint_t) -1) && glyphs->next (&g)) |
1969 | return true; |
1970 | /* Fall through. */ |
1971 | } |
1972 | for (const auto &range : rangeRecord) |
1973 | if (range.value == klass && range.intersects (*glyphs)) |
1974 | return true; |
1975 | return false; |
1976 | } |
1977 | |
1978 | void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const |
1979 | { |
1980 | if (klass == 0) |
1981 | { |
1982 | hb_codepoint_t g = HB_SET_VALUE_INVALID((hb_codepoint_t) -1); |
1983 | for (auto &range : rangeRecord) |
1984 | { |
1985 | if (!glyphs->next (&g)) |
1986 | goto done; |
1987 | while (g < range.first) |
1988 | { |
1989 | intersect_glyphs->add (g); |
1990 | if (!glyphs->next (&g)) |
1991 | goto done; |
1992 | } |
1993 | g = range.last; |
1994 | } |
1995 | while (glyphs->next (&g)) |
1996 | intersect_glyphs->add (g); |
1997 | done: |
1998 | |
1999 | return; |
2000 | } |
2001 | |
2002 | unsigned count = rangeRecord.len; |
2003 | if (count > glyphs->get_population () * hb_bit_storage (count)) |
2004 | { |
2005 | for (auto g : *glyphs) |
2006 | { |
2007 | unsigned i; |
2008 | if (rangeRecord.as_array ().bfind (g, &i) && |
2009 | rangeRecord.arrayZ[i].value == klass) |
2010 | intersect_glyphs->add (g); |
2011 | } |
2012 | return; |
2013 | } |
2014 | |
2015 | for (auto &range : rangeRecord) |
2016 | { |
2017 | if (range.value != klass) continue; |
2018 | |
2019 | unsigned end = range.last + 1; |
2020 | for (hb_codepoint_t g = range.first - 1; |
2021 | glyphs->next (&g) && g < end;) |
2022 | intersect_glyphs->add (g); |
2023 | } |
2024 | } |
2025 | |
2026 | void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const |
2027 | { |
2028 | if (glyphs->is_empty ()) return; |
2029 | |
2030 | hb_codepoint_t g = HB_SET_VALUE_INVALID((hb_codepoint_t) -1); |
2031 | for (auto &range : rangeRecord) |
2032 | { |
2033 | if (!glyphs->next (&g)) |
2034 | break; |
2035 | if (g < range.first) |
2036 | { |
2037 | intersect_classes->add (0); |
2038 | break; |
2039 | } |
2040 | g = range.last; |
2041 | } |
2042 | if (g != HB_SET_VALUE_INVALID((hb_codepoint_t) -1) && glyphs->next (&g)) |
2043 | intersect_classes->add (0); |
2044 | |
2045 | for (const auto& range : rangeRecord) |
2046 | if (range.intersects (*glyphs)) |
2047 | intersect_classes->add (range.value); |
2048 | } |
2049 | |
2050 | protected: |
2051 | HBUINT16 classFormat; /* Format identifier--format = 2 */ |
2052 | typename Types::template SortedArrayOf<RangeRecord<Types>> |
2053 | rangeRecord; /* Array of glyph ranges--ordered by |
2054 | * Start GlyphID */ |
2055 | public: |
2056 | DEFINE_SIZE_ARRAY (2 + Types::size, rangeRecord)void _compiles_assertion_on_line_2056 () const { (void) (rangeRecord )[0].static_size; } void _instance_assertion_on_line_2056 () const { static_assert ((sizeof (*this) == (2 + Types::size) + (1 + 0) * sizeof ((rangeRecord)[0])), ""); } static constexpr unsigned null_size = (2 + Types::size); static constexpr unsigned min_size = (2 + Types::size); |
2057 | }; |
2058 | |
2059 | struct ClassDef |
2060 | { |
2061 | /* Has interface. */ |
2062 | unsigned operator [] (hb_codepoint_t k) const { return get (k); } |
2063 | bool has (hb_codepoint_t k) const { return (*this)[k]; } |
2064 | /* Projection. */ |
2065 | hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); } |
2066 | |
2067 | unsigned int get (hb_codepoint_t k) const { return get_class (k); } |
2068 | unsigned int get_class (hb_codepoint_t glyph_id) const |
2069 | { |
2070 | switch (u.format) { |
2071 | case 1: hb_barrier (); return u.format1.get_class (glyph_id); |
2072 | case 2: hb_barrier (); return u.format2.get_class (glyph_id); |
2073 | #ifndef HB_NO_BEYOND_64K |
2074 | case 3: hb_barrier (); return u.format3.get_class (glyph_id); |
2075 | case 4: hb_barrier (); return u.format4.get_class (glyph_id); |
2076 | #endif |
2077 | default:return 0; |
2078 | } |
2079 | } |
2080 | unsigned int get_class (hb_codepoint_t glyph_id, |
2081 | hb_ot_lookup_cache_t *cache) const |
2082 | { |
2083 | unsigned klass; |
2084 | if (cache && cache->get (glyph_id, &klass)) return klass; |
2085 | klass = get_class (glyph_id); |
2086 | if (cache) cache->set (glyph_id, klass); |
2087 | return klass; |
2088 | } |
2089 | |
2090 | unsigned get_population () const |
2091 | { |
2092 | switch (u.format) { |
2093 | case 1: hb_barrier (); return u.format1.get_population (); |
2094 | case 2: hb_barrier (); return u.format2.get_population (); |
2095 | #ifndef HB_NO_BEYOND_64K |
2096 | case 3: hb_barrier (); return u.format3.get_population (); |
2097 | case 4: hb_barrier (); return u.format4.get_population (); |
2098 | #endif |
2099 | default:return NOT_COVERED((unsigned int) -1); |
2100 | } |
2101 | } |
2102 | |
2103 | template<typename Iterator, |
2104 | hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))typename hb_enable_if<(((hb_is_source_of<Iterator, hb_codepoint_t >::value && Iterator::is_sorted_iterator)))>::type * = nullptr> |
2105 | bool serialize (hb_serialize_context_t *c, Iterator it_with_class_zero) |
2106 | { |
2107 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
2108 | if (unlikely (!c->extend_min (this))__builtin_expect (bool(!c->extend_min (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2108); |
2109 | |
2110 | auto it = + it_with_class_zero | hb_filter (hb_second); |
2111 | |
2112 | unsigned format = 2; |
2113 | hb_codepoint_t glyph_max = 0; |
2114 | if (likely (it)__builtin_expect (bool(it), 1)) |
2115 | { |
2116 | hb_codepoint_t glyph_min = (*it).first; |
2117 | glyph_max = glyph_min; |
2118 | |
2119 | unsigned num_glyphs = 0; |
2120 | unsigned num_ranges = 1; |
2121 | hb_codepoint_t prev_gid = glyph_min; |
2122 | unsigned prev_klass = (*it).second; |
2123 | |
2124 | for (const auto gid_klass_pair : it) |
2125 | { |
2126 | hb_codepoint_t cur_gid = gid_klass_pair.first; |
2127 | unsigned cur_klass = gid_klass_pair.second; |
2128 | num_glyphs++; |
2129 | if (cur_gid == glyph_min) continue; |
2130 | if (cur_gid > glyph_max) glyph_max = cur_gid; |
2131 | if (cur_gid != prev_gid + 1 || |
2132 | cur_klass != prev_klass) |
2133 | num_ranges++; |
2134 | |
2135 | prev_gid = cur_gid; |
2136 | prev_klass = cur_klass; |
2137 | } |
2138 | |
2139 | if (num_glyphs && 1 + (glyph_max - glyph_min + 1) <= num_ranges * 3) |
2140 | format = 1; |
2141 | } |
2142 | |
2143 | #ifndef HB_NO_BEYOND_64K |
2144 | if (glyph_max > 0xFFFFu) |
2145 | u.format += 2; |
2146 | if (unlikely (glyph_max > 0xFFFFFFu)__builtin_expect (bool(glyph_max > 0xFFFFFFu), 0)) |
2147 | #else |
2148 | if (unlikely (glyph_max > 0xFFFFu)__builtin_expect (bool(glyph_max > 0xFFFFu), 0)) |
2149 | #endif |
2150 | { |
2151 | c->check_success (false, HB_SERIALIZE_ERROR_INT_OVERFLOW); |
2152 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2152); |
2153 | } |
2154 | |
2155 | u.format = format; |
2156 | |
2157 | switch (u.format) |
2158 | { |
2159 | case 1: hb_barrier (); return_trace (u.format1.serialize (c, it))return trace.ret (u.format1.serialize (c, it), __PRETTY_FUNCTION__ , 2159); |
2160 | case 2: hb_barrier (); return_trace (u.format2.serialize (c, it))return trace.ret (u.format2.serialize (c, it), __PRETTY_FUNCTION__ , 2160); |
2161 | #ifndef HB_NO_BEYOND_64K |
2162 | case 3: hb_barrier (); return_trace (u.format3.serialize (c, it))return trace.ret (u.format3.serialize (c, it), __PRETTY_FUNCTION__ , 2162); |
2163 | case 4: hb_barrier (); return_trace (u.format4.serialize (c, it))return trace.ret (u.format4.serialize (c, it), __PRETTY_FUNCTION__ , 2163); |
2164 | #endif |
2165 | default:return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2165); |
2166 | } |
2167 | } |
2168 | |
2169 | bool subset (hb_subset_context_t *c, |
2170 | hb_map_t *klass_map = nullptr /*OUT*/, |
2171 | bool keep_empty_table = true, |
2172 | bool use_class_zero = true, |
2173 | const Coverage* glyph_filter = nullptr) const |
2174 | { |
2175 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
2176 | switch (u.format) { |
2177 | case 1: hb_barrier (); return_trace (u.format1.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter))return trace.ret (u.format1.subset (c, klass_map, keep_empty_table , use_class_zero, glyph_filter), __PRETTY_FUNCTION__, 2177); |
2178 | case 2: hb_barrier (); return_trace (u.format2.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter))return trace.ret (u.format2.subset (c, klass_map, keep_empty_table , use_class_zero, glyph_filter), __PRETTY_FUNCTION__, 2178); |
2179 | #ifndef HB_NO_BEYOND_64K |
2180 | case 3: hb_barrier (); return_trace (u.format3.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter))return trace.ret (u.format3.subset (c, klass_map, keep_empty_table , use_class_zero, glyph_filter), __PRETTY_FUNCTION__, 2180); |
2181 | case 4: hb_barrier (); return_trace (u.format4.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter))return trace.ret (u.format4.subset (c, klass_map, keep_empty_table , use_class_zero, glyph_filter), __PRETTY_FUNCTION__, 2181); |
2182 | #endif |
2183 | default:return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2183); |
2184 | } |
2185 | } |
2186 | |
2187 | bool sanitize (hb_sanitize_context_t *c) const |
2188 | { |
2189 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
2190 | if (!u.format.sanitize (c)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2190); |
2191 | hb_barrier (); |
2192 | switch (u.format) { |
2193 | case 1: hb_barrier (); return_trace (u.format1.sanitize (c))return trace.ret (u.format1.sanitize (c), __PRETTY_FUNCTION__ , 2193); |
2194 | case 2: hb_barrier (); return_trace (u.format2.sanitize (c))return trace.ret (u.format2.sanitize (c), __PRETTY_FUNCTION__ , 2194); |
2195 | #ifndef HB_NO_BEYOND_64K |
2196 | case 3: hb_barrier (); return_trace (u.format3.sanitize (c))return trace.ret (u.format3.sanitize (c), __PRETTY_FUNCTION__ , 2196); |
2197 | case 4: hb_barrier (); return_trace (u.format4.sanitize (c))return trace.ret (u.format4.sanitize (c), __PRETTY_FUNCTION__ , 2197); |
2198 | #endif |
2199 | default:return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 2199); |
2200 | } |
2201 | } |
2202 | |
2203 | unsigned cost () const |
2204 | { |
2205 | switch (u.format) { |
2206 | case 1: hb_barrier (); return u.format1.cost (); |
2207 | case 2: hb_barrier (); return u.format2.cost (); |
2208 | #ifndef HB_NO_BEYOND_64K |
2209 | case 3: hb_barrier (); return u.format3.cost (); |
2210 | case 4: hb_barrier (); return u.format4.cost (); |
2211 | #endif |
2212 | default:return 0u; |
2213 | } |
2214 | } |
2215 | |
2216 | /* Might return false if array looks unsorted. |
2217 | * Used for faster rejection of corrupt data. */ |
2218 | template <typename set_t> |
2219 | bool collect_coverage (set_t *glyphs) const |
2220 | { |
2221 | switch (u.format) { |
2222 | case 1: hb_barrier (); return u.format1.collect_coverage (glyphs); |
2223 | case 2: hb_barrier (); return u.format2.collect_coverage (glyphs); |
2224 | #ifndef HB_NO_BEYOND_64K |
2225 | case 3: hb_barrier (); return u.format3.collect_coverage (glyphs); |
2226 | case 4: hb_barrier (); return u.format4.collect_coverage (glyphs); |
2227 | #endif |
2228 | default:return false; |
2229 | } |
2230 | } |
2231 | |
2232 | /* Might return false if array looks unsorted. |
2233 | * Used for faster rejection of corrupt data. */ |
2234 | template <typename set_t> |
2235 | bool collect_class (set_t *glyphs, unsigned int klass) const |
2236 | { |
2237 | switch (u.format) { |
2238 | case 1: hb_barrier (); return u.format1.collect_class (glyphs, klass); |
2239 | case 2: hb_barrier (); return u.format2.collect_class (glyphs, klass); |
2240 | #ifndef HB_NO_BEYOND_64K |
2241 | case 3: hb_barrier (); return u.format3.collect_class (glyphs, klass); |
2242 | case 4: hb_barrier (); return u.format4.collect_class (glyphs, klass); |
2243 | #endif |
2244 | default:return false; |
2245 | } |
2246 | } |
2247 | |
2248 | bool intersects (const hb_set_t *glyphs) const |
2249 | { |
2250 | switch (u.format) { |
2251 | case 1: hb_barrier (); return u.format1.intersects (glyphs); |
2252 | case 2: hb_barrier (); return u.format2.intersects (glyphs); |
2253 | #ifndef HB_NO_BEYOND_64K |
2254 | case 3: hb_barrier (); return u.format3.intersects (glyphs); |
2255 | case 4: hb_barrier (); return u.format4.intersects (glyphs); |
2256 | #endif |
2257 | default:return false; |
2258 | } |
2259 | } |
2260 | bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const |
2261 | { |
2262 | switch (u.format) { |
2263 | case 1: hb_barrier (); return u.format1.intersects_class (glyphs, klass); |
2264 | case 2: hb_barrier (); return u.format2.intersects_class (glyphs, klass); |
2265 | #ifndef HB_NO_BEYOND_64K |
2266 | case 3: hb_barrier (); return u.format3.intersects_class (glyphs, klass); |
2267 | case 4: hb_barrier (); return u.format4.intersects_class (glyphs, klass); |
2268 | #endif |
2269 | default:return false; |
2270 | } |
2271 | } |
2272 | |
2273 | void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const |
2274 | { |
2275 | switch (u.format) { |
2276 | case 1: hb_barrier (); return u.format1.intersected_class_glyphs (glyphs, klass, intersect_glyphs); |
2277 | case 2: hb_barrier (); return u.format2.intersected_class_glyphs (glyphs, klass, intersect_glyphs); |
2278 | #ifndef HB_NO_BEYOND_64K |
2279 | case 3: hb_barrier (); return u.format3.intersected_class_glyphs (glyphs, klass, intersect_glyphs); |
2280 | case 4: hb_barrier (); return u.format4.intersected_class_glyphs (glyphs, klass, intersect_glyphs); |
2281 | #endif |
2282 | default:return; |
2283 | } |
2284 | } |
2285 | |
2286 | void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const |
2287 | { |
2288 | switch (u.format) { |
2289 | case 1: hb_barrier (); return u.format1.intersected_classes (glyphs, intersect_classes); |
2290 | case 2: hb_barrier (); return u.format2.intersected_classes (glyphs, intersect_classes); |
2291 | #ifndef HB_NO_BEYOND_64K |
2292 | case 3: hb_barrier (); return u.format3.intersected_classes (glyphs, intersect_classes); |
2293 | case 4: hb_barrier (); return u.format4.intersected_classes (glyphs, intersect_classes); |
2294 | #endif |
2295 | default:return; |
2296 | } |
2297 | } |
2298 | |
2299 | |
2300 | protected: |
2301 | union { |
2302 | HBUINT16 format; /* Format identifier */ |
2303 | ClassDefFormat1_3<SmallTypes> format1; |
2304 | ClassDefFormat2_4<SmallTypes> format2; |
2305 | #ifndef HB_NO_BEYOND_64K |
2306 | ClassDefFormat1_3<MediumTypes>format3; |
2307 | ClassDefFormat2_4<MediumTypes>format4; |
2308 | #endif |
2309 | } u; |
2310 | public: |
2311 | DEFINE_SIZE_UNION (2, format)void _compiles_assertion_on_line_2311 () const { (void) this-> u.format.static_size; } void _instance_assertion_on_line_2311 () const { static_assert ((sizeof(this->u.format) == (2)) , ""); } static constexpr unsigned null_size = (2); static constexpr unsigned min_size = (2); |
2312 | }; |
2313 | |
2314 | template<typename Iterator> |
2315 | static inline bool ClassDef_serialize (hb_serialize_context_t *c, |
2316 | Iterator it) |
2317 | { return (c->start_embed<ClassDef> ()->serialize (c, it)); } |
2318 | |
2319 | |
2320 | /* |
2321 | * Item Variation Store |
2322 | */ |
2323 | |
2324 | /* ported from fonttools (class _Encoding) */ |
2325 | struct delta_row_encoding_t |
2326 | { |
2327 | /* each byte represents a region, value is one of 0/1/2/4, which means bytes |
2328 | * needed for this region */ |
2329 | hb_vector_t<uint8_t> chars; |
2330 | unsigned width = 0; |
2331 | hb_vector_t<uint8_t> columns; |
2332 | unsigned overhead = 0; |
2333 | hb_vector_t<const hb_vector_t<int>*> items; |
2334 | |
2335 | delta_row_encoding_t () = default; |
2336 | delta_row_encoding_t (hb_vector_t<uint8_t>&& chars_, |
2337 | const hb_vector_t<int>* row = nullptr) : |
2338 | delta_row_encoding_t () |
2339 | |
2340 | { |
2341 | chars = std::move (chars_); |
2342 | width = get_width (); |
2343 | columns = get_columns (); |
2344 | overhead = get_chars_overhead (columns); |
2345 | if (row) items.push (row); |
2346 | } |
2347 | |
2348 | bool is_empty () const |
2349 | { return !items; } |
2350 | |
2351 | static hb_vector_t<uint8_t> get_row_chars (const hb_vector_t<int>& row) |
2352 | { |
2353 | hb_vector_t<uint8_t> ret; |
2354 | if (!ret.alloc (row.length)) return ret; |
2355 | |
2356 | bool long_words = false; |
2357 | |
2358 | /* 0/1/2 byte encoding */ |
2359 | for (int i = row.length - 1; i >= 0; i--) |
2360 | { |
2361 | int v = row.arrayZ[i]; |
2362 | if (v == 0) |
2363 | ret.push (0); |
2364 | else if (v > 32767 || v < -32768) |
2365 | { |
2366 | long_words = true; |
2367 | break; |
2368 | } |
2369 | else if (v > 127 || v < -128) |
2370 | ret.push (2); |
2371 | else |
2372 | ret.push (1); |
2373 | } |
2374 | |
2375 | if (!long_words) |
2376 | return ret; |
2377 | |
2378 | /* redo, 0/2/4 bytes encoding */ |
2379 | ret.reset (); |
2380 | for (int i = row.length - 1; i >= 0; i--) |
2381 | { |
2382 | int v = row.arrayZ[i]; |
2383 | if (v == 0) |
2384 | ret.push (0); |
2385 | else if (v > 32767 || v < -32768) |
2386 | ret.push (4); |
2387 | else |
2388 | ret.push (2); |
2389 | } |
2390 | return ret; |
2391 | } |
2392 | |
2393 | inline unsigned get_width () |
2394 | { |
2395 | unsigned ret = + hb_iter (chars) |
2396 | | hb_reduce (hb_add, 0u) |
2397 | ; |
2398 | return ret; |
2399 | } |
2400 | |
2401 | hb_vector_t<uint8_t> get_columns () |
2402 | { |
2403 | hb_vector_t<uint8_t> cols; |
2404 | cols.alloc (chars.length); |
2405 | for (auto v : chars) |
2406 | { |
2407 | uint8_t flag = v ? 1 : 0; |
2408 | cols.push (flag); |
2409 | } |
2410 | return cols; |
2411 | } |
2412 | |
2413 | static inline unsigned get_chars_overhead (const hb_vector_t<uint8_t>& cols) |
2414 | { |
2415 | unsigned c = 4 + 6; // 4 bytes for LOffset, 6 bytes for VarData header |
2416 | unsigned cols_bit_count = 0; |
2417 | for (auto v : cols) |
2418 | if (v) cols_bit_count++; |
2419 | return c + cols_bit_count * 2; |
2420 | } |
2421 | |
2422 | unsigned get_gain () const |
2423 | { |
2424 | int count = items.length; |
2425 | return hb_max (0, (int) overhead - count); |
2426 | } |
2427 | |
2428 | int gain_from_merging (const delta_row_encoding_t& other_encoding) const |
2429 | { |
2430 | int combined_width = 0; |
2431 | for (unsigned i = 0; i < chars.length; i++) |
2432 | combined_width += hb_max (chars.arrayZ[i], other_encoding.chars.arrayZ[i]); |
2433 | |
2434 | hb_vector_t<uint8_t> combined_columns; |
2435 | combined_columns.alloc (columns.length); |
2436 | for (unsigned i = 0; i < columns.length; i++) |
2437 | combined_columns.push (columns.arrayZ[i] | other_encoding.columns.arrayZ[i]); |
2438 | |
2439 | int combined_overhead = get_chars_overhead (combined_columns); |
2440 | int combined_gain = (int) overhead + (int) other_encoding.overhead - combined_overhead |
2441 | - (combined_width - (int) width) * items.length |
2442 | - (combined_width - (int) other_encoding.width) * other_encoding.items.length; |
2443 | |
2444 | return combined_gain; |
2445 | } |
2446 | |
2447 | static int cmp (const void *pa, const void *pb) |
2448 | { |
2449 | const delta_row_encoding_t *a = (const delta_row_encoding_t *)pa; |
2450 | const delta_row_encoding_t *b = (const delta_row_encoding_t *)pb; |
2451 | |
2452 | int gain_a = a->get_gain (); |
2453 | int gain_b = b->get_gain (); |
2454 | |
2455 | if (gain_a != gain_b) |
2456 | return gain_a - gain_b; |
2457 | |
2458 | return (b->chars).as_array ().cmp ((a->chars).as_array ()); |
2459 | } |
2460 | |
2461 | static int cmp_width (const void *pa, const void *pb) |
2462 | { |
2463 | const delta_row_encoding_t *a = (const delta_row_encoding_t *)pa; |
2464 | const delta_row_encoding_t *b = (const delta_row_encoding_t *)pb; |
2465 | |
2466 | if (a->width != b->width) |
2467 | return (int) a->width - (int) b->width; |
2468 | |
2469 | return (b->chars).as_array ().cmp ((a->chars).as_array ()); |
2470 | } |
2471 | |
2472 | bool add_row (const hb_vector_t<int>* row) |
2473 | { return items.push (row); } |
2474 | }; |
2475 | |
2476 | struct VarRegionAxis |
2477 | { |
2478 | float evaluate (int coord) const |
2479 | { |
2480 | int peak = peakCoord.to_int (); |
2481 | if (peak == 0 || coord == peak) |
2482 | return 1.f; |
2483 | else if (coord == 0) // Faster |
2484 | return 0.f; |
2485 | |
2486 | int start = startCoord.to_int (), end = endCoord.to_int (); |
2487 | |
2488 | /* TODO Move these to sanitize(). */ |
2489 | if (unlikely (start > peak || peak > end)__builtin_expect (bool(start > peak || peak > end), 0)) |
2490 | return 1.f; |
2491 | if (unlikely (start < 0 && end > 0 && peak != 0)__builtin_expect (bool(start < 0 && end > 0 && peak != 0), 0)) |
2492 | return 1.f; |
2493 | |
2494 | if (coord <= start || end <= coord) |
2495 | return 0.f; |
2496 | |
2497 | /* Interpolate */ |
2498 | if (coord < peak) |
2499 | return float (coord - start) / (peak - start); |
2500 | else |
2501 | return float (end - coord) / (end - peak); |
2502 | } |
2503 | |
2504 | bool sanitize (hb_sanitize_context_t *c) const |
2505 | { |
2506 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
2507 | return_trace (c->check_struct (this))return trace.ret (c->check_struct (this), __PRETTY_FUNCTION__ , 2507); |
2508 | } |
2509 | |
2510 | bool serialize (hb_serialize_context_t *c) const |
2511 | { |
2512 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
2513 | return_trace (c->embed (this))return trace.ret (c->embed (this), __PRETTY_FUNCTION__, 2513 ); |
2514 | } |
2515 | |
2516 | public: |
2517 | F2DOT14 startCoord; |
2518 | F2DOT14 peakCoord; |
2519 | F2DOT14 endCoord; |
2520 | public: |
2521 | DEFINE_SIZE_STATIC (6)void _instance_assertion_on_line_2521 () const { static_assert ((sizeof (*this) == (6)), ""); } unsigned int get_size () const { return (6); } static constexpr unsigned null_size = (6); static constexpr unsigned min_size = (6); static constexpr unsigned static_size = (6); |
2522 | }; |
2523 | struct SparseVarRegionAxis |
2524 | { |
2525 | float evaluate (const int *coords, unsigned int coord_len) const |
2526 | { |
2527 | unsigned i = axisIndex; |
2528 | int coord = i < coord_len ? coords[i] : 0; |
2529 | return axis.evaluate (coord); |
2530 | } |
2531 | |
2532 | bool sanitize (hb_sanitize_context_t *c) const |
2533 | { |
2534 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
2535 | return_trace (c->check_struct (this))return trace.ret (c->check_struct (this), __PRETTY_FUNCTION__ , 2535); |
2536 | } |
2537 | |
2538 | bool serialize (hb_serialize_context_t *c) const |
2539 | { |
2540 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
2541 | return_trace (c->embed (this))return trace.ret (c->embed (this), __PRETTY_FUNCTION__, 2541 ); |
2542 | } |
2543 | |
2544 | public: |
2545 | HBUINT16 axisIndex; |
2546 | VarRegionAxis axis; |
2547 | public: |
2548 | DEFINE_SIZE_STATIC (8)void _instance_assertion_on_line_2548 () const { static_assert ((sizeof (*this) == (8)), ""); } unsigned int get_size () const { return (8); } static constexpr unsigned null_size = (8); static constexpr unsigned min_size = (8); static constexpr unsigned static_size = (8); |
2549 | }; |
2550 | |
2551 | #define REGION_CACHE_ITEM_CACHE_INVALID INT_MIN(-2147483647 -1) |
2552 | #define REGION_CACHE_ITEM_MULTIPLIER(float (1 << ((sizeof (int) * 8) - 2))) (float (1 << ((sizeof (int) * 8) - 2))) |
2553 | #define REGION_CACHE_ITEM_DIVISOR(1.f / float (1 << ((sizeof (int) * 8) - 2))) (1.f / float (1 << ((sizeof (int) * 8) - 2))) |
2554 | |
2555 | struct VarRegionList |
2556 | { |
2557 | using cache_t = hb_atomic_t<int>; |
2558 | |
2559 | float evaluate (unsigned int region_index, |
2560 | const int *coords, unsigned int coord_len, |
2561 | cache_t *cache = nullptr) const |
2562 | { |
2563 | if (unlikely (region_index >= regionCount)__builtin_expect (bool(region_index >= regionCount), 0)) |
2564 | return 0.; |
2565 | |
2566 | cache_t *cached_value = nullptr; |
2567 | if (cache) |
2568 | { |
2569 | cached_value = &(cache[region_index]); |
2570 | if (*cached_value != REGION_CACHE_ITEM_CACHE_INVALID) |
2571 | return *cached_value * REGION_CACHE_ITEM_DIVISOR(1.f / float (1 << ((sizeof (int) * 8) - 2))); |
2572 | } |
2573 | |
2574 | const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount); |
2575 | |
2576 | float v = 1.; |
2577 | unsigned int count = axisCount; |
2578 | for (unsigned int i = 0; i < count; i++) |
2579 | { |
2580 | int coord = i < coord_len ? coords[i] : 0; |
2581 | float factor = axes[i].evaluate (coord); |
2582 | if (factor == 0.f) |
2583 | { |
2584 | if (cache) |
2585 | *cached_value = 0.; |
2586 | return 0.; |
2587 | } |
2588 | v *= factor; |
2589 | } |
2590 | |
2591 | if (cache) |
2592 | *cached_value = v * REGION_CACHE_ITEM_MULTIPLIER(float (1 << ((sizeof (int) * 8) - 2))); |
2593 | return v; |
2594 | } |
2595 | |
2596 | bool sanitize (hb_sanitize_context_t *c) const |
2597 | { |
2598 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
2599 | return_trace (c->check_struct (this) &&return trace.ret (c->check_struct (this) && hb_barrier () && axesZ.sanitize (c, axisCount * regionCount), __PRETTY_FUNCTION__ , 2601) |
2600 | hb_barrier () &&return trace.ret (c->check_struct (this) && hb_barrier () && axesZ.sanitize (c, axisCount * regionCount), __PRETTY_FUNCTION__ , 2601) |
2601 | axesZ.sanitize (c, axisCount * regionCount))return trace.ret (c->check_struct (this) && hb_barrier () && axesZ.sanitize (c, axisCount * regionCount), __PRETTY_FUNCTION__ , 2601); |
2602 | } |
2603 | |
2604 | bool serialize (hb_serialize_context_t *c, |
2605 | const hb_vector_t<hb_tag_t>& axis_tags, |
2606 | const hb_vector_t<const hb_hashmap_t<hb_tag_t, Triple>*>& regions) |
2607 | { |
2608 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
2609 | unsigned axis_count = axis_tags.length; |
2610 | unsigned region_count = regions.length; |
2611 | if (!axis_count || !region_count) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2611); |
2612 | if (unlikely (hb_unsigned_mul_overflows (axis_count * region_count,__builtin_expect (bool(hb_unsigned_mul_overflows (axis_count * region_count, VarRegionAxis::static_size)), 0) |
2613 | VarRegionAxis::static_size))__builtin_expect (bool(hb_unsigned_mul_overflows (axis_count * region_count, VarRegionAxis::static_size)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2613); |
2614 | if (unlikely (!c->extend_min (this))__builtin_expect (bool(!c->extend_min (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2614); |
2615 | axisCount = axis_count; |
2616 | regionCount = region_count; |
2617 | |
2618 | for (unsigned r = 0; r < region_count; r++) |
2619 | { |
2620 | const auto& region = regions[r]; |
2621 | for (unsigned i = 0; i < axis_count; i++) |
2622 | { |
2623 | hb_tag_t tag = axis_tags.arrayZ[i]; |
2624 | VarRegionAxis var_region_rec; |
2625 | Triple *coords; |
2626 | if (region->has (tag, &coords)) |
2627 | { |
2628 | var_region_rec.startCoord.set_float (coords->minimum); |
2629 | var_region_rec.peakCoord.set_float (coords->middle); |
2630 | var_region_rec.endCoord.set_float (coords->maximum); |
2631 | } |
2632 | else |
2633 | { |
2634 | var_region_rec.startCoord.set_int (0); |
2635 | var_region_rec.peakCoord.set_int (0); |
2636 | var_region_rec.endCoord.set_int (0); |
2637 | } |
2638 | if (!var_region_rec.serialize (c)) |
2639 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2639); |
2640 | } |
2641 | } |
2642 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 2642); |
2643 | } |
2644 | |
2645 | bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_inc_bimap_t ®ion_map) |
2646 | { |
2647 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
2648 | if (unlikely (!c->extend_min (this))__builtin_expect (bool(!c->extend_min (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2648); |
2649 | axisCount = src->axisCount; |
2650 | regionCount = region_map.get_population (); |
2651 | if (unlikely (hb_unsigned_mul_overflows (axisCount * regionCount,__builtin_expect (bool(hb_unsigned_mul_overflows (axisCount * regionCount, VarRegionAxis::static_size)), 0) |
2652 | VarRegionAxis::static_size))__builtin_expect (bool(hb_unsigned_mul_overflows (axisCount * regionCount, VarRegionAxis::static_size)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2652); |
2653 | if (unlikely (!c->extend (this))__builtin_expect (bool(!c->extend (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2653); |
2654 | unsigned int region_count = src->regionCount; |
2655 | for (unsigned int r = 0; r < regionCount; r++) |
2656 | { |
2657 | unsigned int backward = region_map.backward (r); |
2658 | if (backward >= region_count) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2658); |
2659 | hb_memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount); |
2660 | } |
2661 | |
2662 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 2662); |
2663 | } |
2664 | |
2665 | bool get_var_region (unsigned region_index, |
2666 | const hb_map_t& axes_old_index_tag_map, |
2667 | hb_hashmap_t<hb_tag_t, Triple>& axis_tuples /* OUT */) const |
2668 | { |
2669 | if (region_index >= regionCount) return false; |
2670 | const VarRegionAxis* axis_region = axesZ.arrayZ + (region_index * axisCount); |
2671 | for (unsigned i = 0; i < axisCount; i++) |
2672 | { |
2673 | hb_tag_t *axis_tag; |
2674 | if (!axes_old_index_tag_map.has (i, &axis_tag)) |
2675 | return false; |
2676 | |
2677 | float min_val = axis_region->startCoord.to_float (); |
2678 | float def_val = axis_region->peakCoord.to_float (); |
2679 | float max_val = axis_region->endCoord.to_float (); |
2680 | |
2681 | if (def_val != 0.f) |
2682 | axis_tuples.set (*axis_tag, Triple ((double) min_val, (double) def_val, (double) max_val)); |
2683 | axis_region++; |
2684 | } |
2685 | return !axis_tuples.in_error (); |
2686 | } |
2687 | |
2688 | bool get_var_regions (const hb_map_t& axes_old_index_tag_map, |
2689 | hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>>& regions /* OUT */) const |
2690 | { |
2691 | if (!regions.alloc (regionCount)) |
2692 | return false; |
2693 | |
2694 | for (unsigned i = 0; i < regionCount; i++) |
2695 | { |
2696 | hb_hashmap_t<hb_tag_t, Triple> axis_tuples; |
2697 | if (!get_var_region (i, axes_old_index_tag_map, axis_tuples)) |
2698 | return false; |
2699 | regions.push (std::move (axis_tuples)); |
2700 | } |
2701 | return !regions.in_error (); |
2702 | } |
2703 | |
2704 | unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; } |
2705 | |
2706 | public: |
2707 | HBUINT16 axisCount; |
2708 | HBUINT15 regionCount; |
2709 | protected: |
2710 | UnsizedArrayOf<VarRegionAxis> |
2711 | axesZ; |
2712 | public: |
2713 | DEFINE_SIZE_ARRAY (4, axesZ)void _compiles_assertion_on_line_2713 () const { (void) (axesZ )[0].static_size; } void _instance_assertion_on_line_2713 () const { static_assert ((sizeof (*this) == (4) + (1 +0) * sizeof (( axesZ)[0])), ""); } static constexpr unsigned null_size = (4) ; static constexpr unsigned min_size = (4); |
2714 | }; |
2715 | |
2716 | struct SparseVariationRegion : Array16Of<SparseVarRegionAxis> |
2717 | { |
2718 | float evaluate (const int *coords, unsigned int coord_len) const |
2719 | { |
2720 | float v = 1.f; |
2721 | unsigned int count = len; |
2722 | for (unsigned int i = 0; i < count; i++) |
2723 | { |
2724 | float factor = arrayZ[i].evaluate (coords, coord_len); |
2725 | if (factor == 0.f) |
2726 | return 0.; |
2727 | v *= factor; |
2728 | } |
2729 | return v; |
2730 | } |
2731 | }; |
2732 | |
2733 | struct SparseVarRegionList |
2734 | { |
2735 | using cache_t = hb_atomic_t<int>; |
2736 | |
2737 | float evaluate (unsigned int region_index, |
2738 | const int *coords, unsigned int coord_len, |
2739 | cache_t *cache = nullptr) const |
2740 | { |
2741 | if (unlikely (region_index >= regions.len)__builtin_expect (bool(region_index >= regions.len), 0)) |
2742 | return 0.; |
2743 | |
2744 | cache_t *cached_value = nullptr; |
2745 | if (cache) |
2746 | { |
2747 | cached_value = &(cache[region_index]); |
2748 | if (*cached_value != REGION_CACHE_ITEM_CACHE_INVALID) |
2749 | return *cached_value * REGION_CACHE_ITEM_DIVISOR(1.f / float (1 << ((sizeof (int) * 8) - 2))); |
2750 | } |
2751 | |
2752 | const SparseVariationRegion ®ion = this+regions[region_index]; |
2753 | |
2754 | float v = region.evaluate (coords, coord_len); |
2755 | |
2756 | if (cache) |
2757 | *cached_value = v * REGION_CACHE_ITEM_MULTIPLIER(float (1 << ((sizeof (int) * 8) - 2))); |
2758 | return v; |
2759 | } |
2760 | |
2761 | bool sanitize (hb_sanitize_context_t *c) const |
2762 | { |
2763 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
2764 | return_trace (regions.sanitize (c, this))return trace.ret (regions.sanitize (c, this), __PRETTY_FUNCTION__ , 2764); |
2765 | } |
2766 | |
2767 | public: |
2768 | Array16Of<Offset32To<SparseVariationRegion>> |
2769 | regions; |
2770 | public: |
2771 | DEFINE_SIZE_ARRAY (2, regions)void _compiles_assertion_on_line_2771 () const { (void) (regions )[0].static_size; } void _instance_assertion_on_line_2771 () const { static_assert ((sizeof (*this) == (2) + (1 +0) * sizeof (( regions)[0])), ""); } static constexpr unsigned null_size = ( 2); static constexpr unsigned min_size = (2); |
2772 | }; |
2773 | |
2774 | |
2775 | struct VarData |
2776 | { |
2777 | unsigned int get_item_count () const |
2778 | { return itemCount; } |
2779 | |
2780 | unsigned int get_region_index_count () const |
2781 | { return regionIndices.len; } |
2782 | |
2783 | unsigned get_region_index (unsigned i) const |
2784 | { return i >= regionIndices.len ? -1 : regionIndices[i]; } |
2785 | |
2786 | unsigned int get_row_size () const |
2787 | { return (wordCount () + regionIndices.len) * (longWords () ? 2 : 1); } |
2788 | |
2789 | unsigned int get_size () const |
2790 | { return min_size |
2791 | - regionIndices.min_size + regionIndices.get_size () |
2792 | + itemCount * get_row_size (); |
2793 | } |
2794 | |
2795 | float get_delta (unsigned int inner, |
2796 | const int *coords, unsigned int coord_count, |
2797 | const VarRegionList ®ions, |
2798 | VarRegionList::cache_t *cache = nullptr) const |
2799 | { |
2800 | if (unlikely (inner >= itemCount)__builtin_expect (bool(inner >= itemCount), 0)) |
2801 | return 0.; |
2802 | |
2803 | unsigned int count = regionIndices.len; |
2804 | bool is_long = longWords (); |
2805 | unsigned word_count = wordCount (); |
2806 | unsigned int scount = is_long ? count : word_count; |
2807 | unsigned int lcount = is_long ? word_count : 0; |
2808 | |
2809 | const HBUINT8 *bytes = get_delta_bytes (); |
2810 | const HBUINT8 *row = bytes + inner * get_row_size (); |
2811 | |
2812 | float delta = 0.; |
2813 | unsigned int i = 0; |
2814 | |
2815 | const HBINT32 *lcursor = reinterpret_cast<const HBINT32 *> (row); |
2816 | for (; i < lcount; i++) |
2817 | { |
2818 | float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache); |
2819 | delta += scalar * *lcursor++; |
2820 | } |
2821 | const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (lcursor); |
2822 | for (; i < scount; i++) |
2823 | { |
2824 | float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache); |
2825 | delta += scalar * *scursor++; |
2826 | } |
2827 | const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor); |
2828 | for (; i < count; i++) |
2829 | { |
2830 | float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache); |
2831 | delta += scalar * *bcursor++; |
2832 | } |
2833 | |
2834 | return delta; |
2835 | } |
2836 | |
2837 | void get_region_scalars (const int *coords, unsigned int coord_count, |
2838 | const VarRegionList ®ions, |
2839 | float *scalars /*OUT */, |
2840 | unsigned int num_scalars) const |
2841 | { |
2842 | unsigned count = hb_min (num_scalars, regionIndices.len); |
2843 | for (unsigned int i = 0; i < count; i++) |
2844 | scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count); |
2845 | for (unsigned int i = count; i < num_scalars; i++) |
2846 | scalars[i] = 0.f; |
2847 | } |
2848 | |
2849 | bool sanitize (hb_sanitize_context_t *c) const |
2850 | { |
2851 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
2852 | return_trace (c->check_struct (this) &&return trace.ret (c->check_struct (this) && regionIndices .sanitize (c) && hb_barrier () && wordCount ( ) <= regionIndices.len && c->check_range (get_delta_bytes (), itemCount, get_row_size ()), __PRETTY_FUNCTION__, 2858) |
2853 | regionIndices.sanitize (c) &&return trace.ret (c->check_struct (this) && regionIndices .sanitize (c) && hb_barrier () && wordCount ( ) <= regionIndices.len && c->check_range (get_delta_bytes (), itemCount, get_row_size ()), __PRETTY_FUNCTION__, 2858) |
2854 | hb_barrier () &&return trace.ret (c->check_struct (this) && regionIndices .sanitize (c) && hb_barrier () && wordCount ( ) <= regionIndices.len && c->check_range (get_delta_bytes (), itemCount, get_row_size ()), __PRETTY_FUNCTION__, 2858) |
2855 | wordCount () <= regionIndices.len &&return trace.ret (c->check_struct (this) && regionIndices .sanitize (c) && hb_barrier () && wordCount ( ) <= regionIndices.len && c->check_range (get_delta_bytes (), itemCount, get_row_size ()), __PRETTY_FUNCTION__, 2858) |
2856 | c->check_range (get_delta_bytes (),return trace.ret (c->check_struct (this) && regionIndices .sanitize (c) && hb_barrier () && wordCount ( ) <= regionIndices.len && c->check_range (get_delta_bytes (), itemCount, get_row_size ()), __PRETTY_FUNCTION__, 2858) |
2857 | itemCount,return trace.ret (c->check_struct (this) && regionIndices .sanitize (c) && hb_barrier () && wordCount ( ) <= regionIndices.len && c->check_range (get_delta_bytes (), itemCount, get_row_size ()), __PRETTY_FUNCTION__, 2858) |
2858 | get_row_size ()))return trace.ret (c->check_struct (this) && regionIndices .sanitize (c) && hb_barrier () && wordCount ( ) <= regionIndices.len && c->check_range (get_delta_bytes (), itemCount, get_row_size ()), __PRETTY_FUNCTION__, 2858); |
2859 | } |
2860 | |
2861 | bool serialize (hb_serialize_context_t *c, |
2862 | bool has_long, |
2863 | const hb_vector_t<const hb_vector_t<int>*>& rows) |
2864 | { |
2865 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
2866 | unsigned row_count = rows.length; |
2867 | if (!row_count) { |
2868 | // Nothing to serialize, will be empty. |
2869 | return false; |
2870 | } |
2871 | |
2872 | if (unlikely (!c->extend_min (this))__builtin_expect (bool(!c->extend_min (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2872); |
2873 | itemCount = row_count; |
2874 | |
2875 | int min_threshold = has_long ? -65536 : -128; |
2876 | int max_threshold = has_long ? +65535 : +127; |
2877 | enum delta_size_t { kZero=0, kNonWord, kWord }; |
2878 | hb_vector_t<delta_size_t> delta_sz; |
2879 | unsigned num_regions = rows[0]->length; |
2880 | if (!delta_sz.resize (num_regions)) |
2881 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2881); |
2882 | |
2883 | unsigned word_count = 0; |
2884 | for (unsigned r = 0; r < num_regions; r++) |
2885 | { |
2886 | for (unsigned i = 0; i < row_count; i++) |
2887 | { |
2888 | int delta = rows[i]->arrayZ[r]; |
2889 | if (delta < min_threshold || delta > max_threshold) |
2890 | { |
2891 | delta_sz[r] = kWord; |
2892 | word_count++; |
2893 | break; |
2894 | } |
2895 | else if (delta != 0) |
2896 | { |
2897 | delta_sz[r] = kNonWord; |
2898 | } |
2899 | } |
2900 | } |
2901 | |
2902 | /* reorder regions: words and then non-words*/ |
2903 | unsigned word_index = 0; |
2904 | unsigned non_word_index = word_count; |
2905 | hb_map_t ri_map; |
2906 | for (unsigned r = 0; r < num_regions; r++) |
2907 | { |
2908 | if (!delta_sz[r]) continue; |
2909 | unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++; |
2910 | if (!ri_map.set (new_r, r)) |
2911 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2911); |
2912 | } |
2913 | |
2914 | wordSizeCount = word_count | (has_long ? 0x8000u /* LONG_WORDS */ : 0); |
2915 | |
2916 | unsigned ri_count = ri_map.get_population (); |
2917 | regionIndices.len = ri_count; |
2918 | if (unlikely (!c->extend (this))__builtin_expect (bool(!c->extend (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2918); |
2919 | |
2920 | for (unsigned r = 0; r < ri_count; r++) |
2921 | { |
2922 | hb_codepoint_t *idx; |
2923 | if (!ri_map.has (r, &idx)) |
2924 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2924); |
2925 | regionIndices[r] = *idx; |
2926 | } |
2927 | |
2928 | HBUINT8 *delta_bytes = get_delta_bytes (); |
2929 | unsigned row_size = get_row_size (); |
2930 | for (unsigned int i = 0; i < row_count; i++) |
2931 | { |
2932 | for (unsigned int r = 0; r < ri_count; r++) |
2933 | { |
2934 | int delta = rows[i]->arrayZ[ri_map[r]]; |
2935 | set_item_delta_fast (i, r, delta, delta_bytes, row_size); |
2936 | } |
2937 | } |
2938 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 2938); |
2939 | } |
2940 | |
2941 | bool serialize (hb_serialize_context_t *c, |
2942 | const VarData *src, |
2943 | const hb_inc_bimap_t &inner_map, |
2944 | const hb_inc_bimap_t ®ion_map) |
2945 | { |
2946 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
2947 | if (unlikely (!c->extend_min (this))__builtin_expect (bool(!c->extend_min (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 2947); |
2948 | itemCount = inner_map.get_next_value (); |
2949 | |
2950 | /* Optimize word count */ |
2951 | unsigned ri_count = src->regionIndices.len; |
2952 | enum delta_size_t { kZero=0, kNonWord, kWord }; |
2953 | hb_vector_t<delta_size_t> delta_sz; |
2954 | hb_vector_t<unsigned int> ri_map; /* maps new index to old index */ |
2955 | delta_sz.resize (ri_count); |
2956 | ri_map.resize (ri_count); |
2957 | unsigned int new_word_count = 0; |
2958 | unsigned int r; |
2959 | |
2960 | const HBUINT8 *src_delta_bytes = src->get_delta_bytes (); |
2961 | unsigned src_row_size = src->get_row_size (); |
2962 | unsigned src_word_count = src->wordCount (); |
2963 | bool src_long_words = src->longWords (); |
2964 | |
2965 | bool has_long = false; |
2966 | if (src_long_words) |
2967 | { |
2968 | for (r = 0; r < src_word_count; r++) |
2969 | { |
2970 | for (unsigned old_gid : inner_map.keys()) |
2971 | { |
2972 | int32_t delta = src->get_item_delta_fast (old_gid, r, src_delta_bytes, src_row_size); |
2973 | if (delta < -65536 || 65535 < delta) |
2974 | { |
2975 | has_long = true; |
2976 | break; |
2977 | } |
2978 | } |
2979 | } |
2980 | } |
2981 | |
2982 | signed min_threshold = has_long ? -65536 : -128; |
2983 | signed max_threshold = has_long ? +65535 : +127; |
2984 | for (r = 0; r < ri_count; r++) |
2985 | { |
2986 | bool short_circuit = src_long_words == has_long && src_word_count <= r; |
2987 | |
2988 | delta_sz[r] = kZero; |
2989 | for (unsigned old_gid : inner_map.keys()) |
2990 | { |
2991 | int32_t delta = src->get_item_delta_fast (old_gid, r, src_delta_bytes, src_row_size); |
2992 | if (delta < min_threshold || max_threshold < delta) |
2993 | { |
2994 | delta_sz[r] = kWord; |
2995 | new_word_count++; |
2996 | break; |
2997 | } |
2998 | else if (delta != 0) |
2999 | { |
3000 | delta_sz[r] = kNonWord; |
3001 | if (short_circuit) |
3002 | break; |
3003 | } |
3004 | } |
3005 | } |
3006 | |
3007 | unsigned int word_index = 0; |
3008 | unsigned int non_word_index = new_word_count; |
3009 | unsigned int new_ri_count = 0; |
3010 | for (r = 0; r < ri_count; r++) |
3011 | if (delta_sz[r]) |
3012 | { |
3013 | unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++; |
3014 | ri_map[new_r] = r; |
3015 | new_ri_count++; |
3016 | } |
3017 | |
3018 | wordSizeCount = new_word_count | (has_long ? 0x8000u /* LONG_WORDS */ : 0); |
3019 | |
3020 | regionIndices.len = new_ri_count; |
3021 | |
3022 | if (unlikely (!c->extend (this))__builtin_expect (bool(!c->extend (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3022); |
3023 | |
3024 | for (r = 0; r < new_ri_count; r++) |
3025 | regionIndices[r] = region_map[src->regionIndices[ri_map[r]]]; |
3026 | |
3027 | HBUINT8 *delta_bytes = get_delta_bytes (); |
3028 | unsigned row_size = get_row_size (); |
3029 | unsigned count = itemCount; |
3030 | for (unsigned int i = 0; i < count; i++) |
3031 | { |
3032 | unsigned int old = inner_map.backward (i); |
3033 | for (unsigned int r = 0; r < new_ri_count; r++) |
3034 | set_item_delta_fast (i, r, |
3035 | src->get_item_delta_fast (old, ri_map[r], |
3036 | src_delta_bytes, src_row_size), |
3037 | delta_bytes, row_size); |
3038 | } |
3039 | |
3040 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 3040); |
3041 | } |
3042 | |
3043 | void collect_region_refs (hb_set_t ®ion_indices, const hb_inc_bimap_t &inner_map) const |
3044 | { |
3045 | const HBUINT8 *delta_bytes = get_delta_bytes (); |
3046 | unsigned row_size = get_row_size (); |
3047 | |
3048 | for (unsigned int r = 0; r < regionIndices.len; r++) |
3049 | { |
3050 | unsigned int region = regionIndices.arrayZ[r]; |
3051 | if (region_indices.has (region)) continue; |
3052 | for (hb_codepoint_t old_gid : inner_map.keys()) |
3053 | if (get_item_delta_fast (old_gid, r, delta_bytes, row_size) != 0) |
3054 | { |
3055 | region_indices.add (region); |
3056 | break; |
3057 | } |
3058 | } |
3059 | } |
3060 | |
3061 | public: |
3062 | const HBUINT8 *get_delta_bytes () const |
3063 | { return &StructAfter<HBUINT8> (regionIndices); } |
3064 | |
3065 | protected: |
3066 | HBUINT8 *get_delta_bytes () |
3067 | { return &StructAfter<HBUINT8> (regionIndices); } |
3068 | |
3069 | public: |
3070 | int32_t get_item_delta_fast (unsigned int item, unsigned int region, |
3071 | const HBUINT8 *delta_bytes, unsigned row_size) const |
3072 | { |
3073 | if (unlikely (item >= itemCount || region >= regionIndices.len)__builtin_expect (bool(item >= itemCount || region >= regionIndices .len), 0)) return 0; |
3074 | |
3075 | const HBINT8 *p = (const HBINT8 *) delta_bytes + item * row_size; |
3076 | unsigned word_count = wordCount (); |
3077 | bool is_long = longWords (); |
3078 | if (is_long) |
3079 | { |
3080 | if (region < word_count) |
3081 | return ((const HBINT32 *) p)[region]; |
3082 | else |
3083 | return ((const HBINT16 *)(p + HBINT32::static_size * word_count))[region - word_count]; |
3084 | } |
3085 | else |
3086 | { |
3087 | if (region < word_count) |
3088 | return ((const HBINT16 *) p)[region]; |
3089 | else |
3090 | return (p + HBINT16::static_size * word_count)[region - word_count]; |
3091 | } |
3092 | } |
3093 | int32_t get_item_delta (unsigned int item, unsigned int region) const |
3094 | { |
3095 | return get_item_delta_fast (item, region, |
3096 | get_delta_bytes (), |
3097 | get_row_size ()); |
3098 | } |
3099 | |
3100 | protected: |
3101 | void set_item_delta_fast (unsigned int item, unsigned int region, int32_t delta, |
3102 | HBUINT8 *delta_bytes, unsigned row_size) |
3103 | { |
3104 | HBINT8 *p = (HBINT8 *) delta_bytes + item * row_size; |
3105 | unsigned word_count = wordCount (); |
3106 | bool is_long = longWords (); |
3107 | if (is_long) |
3108 | { |
3109 | if (region < word_count) |
3110 | ((HBINT32 *) p)[region] = delta; |
3111 | else |
3112 | ((HBINT16 *)(p + HBINT32::static_size * word_count))[region - word_count] = delta; |
3113 | } |
3114 | else |
3115 | { |
3116 | if (region < word_count) |
3117 | ((HBINT16 *) p)[region] = delta; |
3118 | else |
3119 | (p + HBINT16::static_size * word_count)[region - word_count] = delta; |
3120 | } |
3121 | } |
3122 | void set_item_delta (unsigned int item, unsigned int region, int32_t delta) |
3123 | { |
3124 | set_item_delta_fast (item, region, delta, |
3125 | get_delta_bytes (), |
3126 | get_row_size ()); |
3127 | } |
3128 | |
3129 | bool longWords () const { return wordSizeCount & 0x8000u /* LONG_WORDS */; } |
3130 | unsigned wordCount () const { return wordSizeCount & 0x7FFFu /* WORD_DELTA_COUNT_MASK */; } |
3131 | |
3132 | protected: |
3133 | HBUINT16 itemCount; |
3134 | HBUINT16 wordSizeCount; |
3135 | Array16Of<HBUINT16> regionIndices; |
3136 | /*UnsizedArrayOf<HBUINT8>bytesX;*/ |
3137 | public: |
3138 | DEFINE_SIZE_ARRAY (6, regionIndices)void _compiles_assertion_on_line_3138 () const { (void) (regionIndices )[0].static_size; } void _instance_assertion_on_line_3138 () const { static_assert ((sizeof (*this) == (6) + (1 +0) * sizeof (( regionIndices)[0])), ""); } static constexpr unsigned null_size = (6); static constexpr unsigned min_size = (6); |
3139 | }; |
3140 | |
3141 | struct MultiVarData |
3142 | { |
3143 | unsigned int get_size () const |
3144 | { return min_size |
3145 | - regionIndices.min_size + regionIndices.get_size () |
3146 | + StructAfter<CFF2Index> (regionIndices).get_size (); |
3147 | } |
3148 | |
3149 | void get_delta (unsigned int inner, |
3150 | const int *coords, unsigned int coord_count, |
3151 | const SparseVarRegionList ®ions, |
3152 | hb_array_t<float> out, |
3153 | SparseVarRegionList::cache_t *cache = nullptr) const |
3154 | { |
3155 | auto &deltaSets = StructAfter<decltype (deltaSetsX)> (regionIndices); |
3156 | |
3157 | auto values_iter = deltaSets.fetcher (inner); |
3158 | unsigned regionCount = regionIndices.len; |
3159 | for (unsigned regionIndex = 0; regionIndex < regionCount; regionIndex++) |
3160 | { |
3161 | float scalar = regions.evaluate (regionIndices.arrayZ[regionIndex], |
3162 | coords, coord_count, |
3163 | cache); |
3164 | values_iter.add_to (out, scalar); |
3165 | } |
3166 | } |
3167 | |
3168 | bool sanitize (hb_sanitize_context_t *c) const |
3169 | { |
3170 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
3171 | return_trace (format.sanitize (c) &&return trace.ret (format.sanitize (c) && hb_barrier ( ) && format == 1 && regionIndices.sanitize (c ) && hb_barrier () && StructAfter<decltype (deltaSetsX)> (regionIndices).sanitize (c), __PRETTY_FUNCTION__ , 3176) |
3172 | hb_barrier () &&return trace.ret (format.sanitize (c) && hb_barrier ( ) && format == 1 && regionIndices.sanitize (c ) && hb_barrier () && StructAfter<decltype (deltaSetsX)> (regionIndices).sanitize (c), __PRETTY_FUNCTION__ , 3176) |
3173 | format == 1 &&return trace.ret (format.sanitize (c) && hb_barrier ( ) && format == 1 && regionIndices.sanitize (c ) && hb_barrier () && StructAfter<decltype (deltaSetsX)> (regionIndices).sanitize (c), __PRETTY_FUNCTION__ , 3176) |
3174 | regionIndices.sanitize (c) &&return trace.ret (format.sanitize (c) && hb_barrier ( ) && format == 1 && regionIndices.sanitize (c ) && hb_barrier () && StructAfter<decltype (deltaSetsX)> (regionIndices).sanitize (c), __PRETTY_FUNCTION__ , 3176) |
3175 | hb_barrier () &&return trace.ret (format.sanitize (c) && hb_barrier ( ) && format == 1 && regionIndices.sanitize (c ) && hb_barrier () && StructAfter<decltype (deltaSetsX)> (regionIndices).sanitize (c), __PRETTY_FUNCTION__ , 3176) |
3176 | StructAfter<decltype (deltaSetsX)> (regionIndices).sanitize (c))return trace.ret (format.sanitize (c) && hb_barrier ( ) && format == 1 && regionIndices.sanitize (c ) && hb_barrier () && StructAfter<decltype (deltaSetsX)> (regionIndices).sanitize (c), __PRETTY_FUNCTION__ , 3176); |
3177 | } |
3178 | |
3179 | protected: |
3180 | HBUINT8 format; // 1 |
3181 | Array16Of<HBUINT16> regionIndices; |
3182 | TupleList deltaSetsX; |
3183 | public: |
3184 | DEFINE_SIZE_MIN (8)void _instance_assertion_on_line_3184 () const { static_assert ((sizeof (*this) >= (8)), ""); } static constexpr unsigned null_size = (8); static constexpr unsigned min_size = (8); |
3185 | }; |
3186 | |
3187 | struct ItemVariationStore |
3188 | { |
3189 | friend struct item_variations_t; |
3190 | using cache_t = VarRegionList::cache_t; |
3191 | |
3192 | cache_t *create_cache () const |
3193 | { |
3194 | #ifdef HB_NO_VAR |
3195 | return nullptr; |
3196 | #endif |
3197 | unsigned count = (this+regions).regionCount; |
3198 | if (!count) return nullptr; |
3199 | |
3200 | cache_t *cache = (cache_t *) hb_malloc (sizeof (float) * count); |
3201 | if (unlikely (!cache)__builtin_expect (bool(!cache), 0)) return nullptr; |
3202 | |
3203 | for (unsigned i = 0; i < count; i++) |
3204 | cache[i] = REGION_CACHE_ITEM_CACHE_INVALID; |
3205 | |
3206 | return cache; |
3207 | } |
3208 | |
3209 | static void destroy_cache (cache_t *cache) { hb_free (cache); } |
3210 | |
3211 | private: |
3212 | float get_delta (unsigned int outer, unsigned int inner, |
3213 | const int *coords, unsigned int coord_count, |
3214 | VarRegionList::cache_t *cache = nullptr) const |
3215 | { |
3216 | #ifdef HB_NO_VAR |
3217 | return 0.f; |
3218 | #endif |
3219 | |
3220 | if (unlikely (outer >= dataSets.len)__builtin_expect (bool(outer >= dataSets.len), 0)) |
3221 | return 0.f; |
3222 | |
3223 | return (this+dataSets[outer]).get_delta (inner, |
3224 | coords, coord_count, |
3225 | this+regions, |
3226 | cache); |
3227 | } |
3228 | |
3229 | public: |
3230 | float get_delta (unsigned int index, |
3231 | const int *coords, unsigned int coord_count, |
3232 | VarRegionList::cache_t *cache = nullptr) const |
3233 | { |
3234 | unsigned int outer = index >> 16; |
3235 | unsigned int inner = index & 0xFFFF; |
3236 | return get_delta (outer, inner, coords, coord_count, cache); |
3237 | } |
3238 | float get_delta (unsigned int index, |
3239 | hb_array_t<const int> coords, |
3240 | VarRegionList::cache_t *cache = nullptr) const |
3241 | { |
3242 | return get_delta (index, |
3243 | coords.arrayZ, coords.length, |
3244 | cache); |
3245 | } |
3246 | |
3247 | bool sanitize (hb_sanitize_context_t *c) const |
3248 | { |
3249 | #ifdef HB_NO_VAR |
3250 | return true; |
3251 | #endif |
3252 | |
3253 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
3254 | return_trace (c->check_struct (this) &&return trace.ret (c->check_struct (this) && hb_barrier () && format == 1 && regions.sanitize (c, this ) && dataSets.sanitize (c, this), __PRETTY_FUNCTION__ , 3258) |
3255 | hb_barrier () &&return trace.ret (c->check_struct (this) && hb_barrier () && format == 1 && regions.sanitize (c, this ) && dataSets.sanitize (c, this), __PRETTY_FUNCTION__ , 3258) |
3256 | format == 1 &&return trace.ret (c->check_struct (this) && hb_barrier () && format == 1 && regions.sanitize (c, this ) && dataSets.sanitize (c, this), __PRETTY_FUNCTION__ , 3258) |
3257 | regions.sanitize (c, this) &&return trace.ret (c->check_struct (this) && hb_barrier () && format == 1 && regions.sanitize (c, this ) && dataSets.sanitize (c, this), __PRETTY_FUNCTION__ , 3258) |
3258 | dataSets.sanitize (c, this))return trace.ret (c->check_struct (this) && hb_barrier () && format == 1 && regions.sanitize (c, this ) && dataSets.sanitize (c, this), __PRETTY_FUNCTION__ , 3258); |
3259 | } |
3260 | |
3261 | bool serialize (hb_serialize_context_t *c, |
3262 | bool has_long, |
3263 | const hb_vector_t<hb_tag_t>& axis_tags, |
3264 | const hb_vector_t<const hb_hashmap_t<hb_tag_t, Triple>*>& region_list, |
3265 | const hb_vector_t<delta_row_encoding_t>& vardata_encodings) |
3266 | { |
3267 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
3268 | #ifdef HB_NO_VAR |
3269 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3269); |
3270 | #endif |
3271 | if (unlikely (!c->extend_min (this))__builtin_expect (bool(!c->extend_min (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3271); |
3272 | |
3273 | format = 1; |
3274 | if (!regions.serialize_serialize (c, axis_tags, region_list)) |
3275 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3275); |
3276 | |
3277 | unsigned num_var_data = vardata_encodings.length; |
3278 | if (!num_var_data) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3278); |
3279 | if (unlikely (!c->check_assign (dataSets.len, num_var_data,__builtin_expect (bool(!c->check_assign (dataSets.len, num_var_data , HB_SERIALIZE_ERROR_INT_OVERFLOW)), 0) |
3280 | HB_SERIALIZE_ERROR_INT_OVERFLOW))__builtin_expect (bool(!c->check_assign (dataSets.len, num_var_data , HB_SERIALIZE_ERROR_INT_OVERFLOW)), 0)) |
3281 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3281); |
3282 | |
3283 | if (unlikely (!c->extend (dataSets))__builtin_expect (bool(!c->extend (dataSets)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3283); |
3284 | for (unsigned i = 0; i < num_var_data; i++) |
3285 | if (!dataSets[i].serialize_serialize (c, has_long, vardata_encodings[i].items)) |
3286 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3286); |
3287 | |
3288 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 3288); |
3289 | } |
3290 | |
3291 | bool serialize (hb_serialize_context_t *c, |
3292 | const ItemVariationStore *src, |
3293 | const hb_array_t <const hb_inc_bimap_t> &inner_maps) |
3294 | { |
3295 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
3296 | #ifdef HB_NO_VAR |
3297 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3297); |
3298 | #endif |
3299 | |
3300 | if (unlikely (!c->extend_min (this))__builtin_expect (bool(!c->extend_min (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3300); |
3301 | |
3302 | unsigned int set_count = 0; |
3303 | for (unsigned int i = 0; i < inner_maps.length; i++) |
3304 | if (inner_maps[i].get_population ()) |
3305 | set_count++; |
3306 | |
3307 | format = 1; |
3308 | |
3309 | const auto &src_regions = src+src->regions; |
3310 | |
3311 | hb_set_t region_indices; |
3312 | for (unsigned int i = 0; i < inner_maps.length; i++) |
3313 | (src+src->dataSets[i]).collect_region_refs (region_indices, inner_maps[i]); |
3314 | |
3315 | if (region_indices.in_error ()) |
3316 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3316); |
3317 | |
3318 | region_indices.del_range ((src_regions).regionCount, hb_set_t::INVALID); |
3319 | |
3320 | /* TODO use constructor when our data-structures support that. */ |
3321 | hb_inc_bimap_t region_map; |
3322 | + hb_iter (region_indices) |
3323 | | hb_apply ([®ion_map] (unsigned _) { region_map.add(_); }) |
3324 | ; |
3325 | if (region_map.in_error()) |
3326 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3326); |
3327 | |
3328 | if (unlikely (!regions.serialize_serialize (c, &src_regions, region_map))__builtin_expect (bool(!regions.serialize_serialize (c, & src_regions, region_map)), 0)) |
3329 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3329); |
3330 | |
3331 | dataSets.len = set_count; |
3332 | if (unlikely (!c->extend (dataSets))__builtin_expect (bool(!c->extend (dataSets)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3332); |
3333 | |
3334 | /* TODO: The following code could be simplified when |
3335 | * List16OfOffset16To::subset () can take a custom param to be passed to VarData::serialize () */ |
3336 | unsigned int set_index = 0; |
3337 | for (unsigned int i = 0; i < inner_maps.length; i++) |
3338 | { |
3339 | if (!inner_maps[i].get_population ()) continue; |
3340 | if (unlikely (!dataSets[set_index++]__builtin_expect (bool(!dataSets[set_index++] .serialize_serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map )), 0) |
3341 | .serialize_serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map))__builtin_expect (bool(!dataSets[set_index++] .serialize_serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map )), 0)) |
3342 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3342); |
3343 | } |
3344 | |
3345 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 3345); |
3346 | } |
3347 | |
3348 | ItemVariationStore *copy (hb_serialize_context_t *c) const |
3349 | { |
3350 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
3351 | auto *out = c->start_embed (this); |
3352 | if (unlikely (!out)__builtin_expect (bool(!out), 0)) return_trace (nullptr)return trace.ret (nullptr, __PRETTY_FUNCTION__, 3352); |
3353 | |
3354 | hb_vector_t <hb_inc_bimap_t> inner_maps; |
3355 | unsigned count = dataSets.len; |
3356 | for (unsigned i = 0; i < count; i++) |
3357 | { |
3358 | hb_inc_bimap_t *map = inner_maps.push (); |
3359 | if (!c->propagate_error(inner_maps)) |
3360 | return_trace(nullptr)return trace.ret (nullptr, __PRETTY_FUNCTION__, 3360); |
3361 | auto &data = this+dataSets[i]; |
3362 | |
3363 | unsigned itemCount = data.get_item_count (); |
3364 | for (unsigned j = 0; j < itemCount; j++) |
3365 | map->add (j); |
3366 | } |
3367 | |
3368 | if (unlikely (!out->serialize (c, this, inner_maps))__builtin_expect (bool(!out->serialize (c, this, inner_maps )), 0)) return_trace (nullptr)return trace.ret (nullptr, __PRETTY_FUNCTION__, 3368); |
3369 | |
3370 | return_trace (out)return trace.ret (out, __PRETTY_FUNCTION__, 3370); |
3371 | } |
3372 | |
3373 | bool subset (hb_subset_context_t *c, const hb_array_t<const hb_inc_bimap_t> &inner_maps) const |
3374 | { |
3375 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
3376 | #ifdef HB_NO_VAR |
3377 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3377); |
3378 | #endif |
3379 | |
3380 | ItemVariationStore *varstore_prime = c->serializer->start_embed<ItemVariationStore> (); |
3381 | if (unlikely (!varstore_prime)__builtin_expect (bool(!varstore_prime), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3381); |
3382 | |
3383 | varstore_prime->serialize (c->serializer, this, inner_maps); |
3384 | |
3385 | return_trace (return trace.ret (!c->serializer->in_error() && varstore_prime->dataSets, __PRETTY_FUNCTION__, 3387) |
3386 | !c->serializer->in_error()return trace.ret (!c->serializer->in_error() && varstore_prime->dataSets, __PRETTY_FUNCTION__, 3387) |
3387 | && varstore_prime->dataSets)return trace.ret (!c->serializer->in_error() && varstore_prime->dataSets, __PRETTY_FUNCTION__, 3387); |
3388 | } |
3389 | |
3390 | unsigned int get_region_index_count (unsigned int major) const |
3391 | { |
3392 | #ifdef HB_NO_VAR |
3393 | return 0; |
3394 | #endif |
3395 | return (this+dataSets[major]).get_region_index_count (); |
3396 | } |
3397 | |
3398 | void get_region_scalars (unsigned int major, |
3399 | const int *coords, unsigned int coord_count, |
3400 | float *scalars /*OUT*/, |
3401 | unsigned int num_scalars) const |
3402 | { |
3403 | #ifdef HB_NO_VAR |
3404 | for (unsigned i = 0; i < num_scalars; i++) |
3405 | scalars[i] = 0.f; |
3406 | return; |
3407 | #endif |
3408 | |
3409 | (this+dataSets[major]).get_region_scalars (coords, coord_count, |
3410 | this+regions, |
3411 | &scalars[0], num_scalars); |
3412 | } |
3413 | |
3414 | unsigned int get_sub_table_count () const |
3415 | { |
3416 | #ifdef HB_NO_VAR |
3417 | return 0; |
3418 | #endif |
3419 | return dataSets.len; |
3420 | } |
3421 | |
3422 | const VarData& get_sub_table (unsigned i) const |
3423 | { |
3424 | #ifdef HB_NO_VAR |
3425 | return Null (VarData)NullHelper<VarData>::get_null (); |
3426 | #endif |
3427 | return this+dataSets[i]; |
3428 | } |
3429 | |
3430 | const VarRegionList& get_region_list () const |
3431 | { |
3432 | #ifdef HB_NO_VAR |
3433 | return Null (VarRegionList)NullHelper<VarRegionList>::get_null (); |
3434 | #endif |
3435 | return this+regions; |
3436 | } |
3437 | |
3438 | protected: |
3439 | HBUINT16 format; |
3440 | Offset32To<VarRegionList> regions; |
3441 | Array16OfOffset32To<VarData> dataSets; |
3442 | public: |
3443 | DEFINE_SIZE_ARRAY_SIZED (8, dataSets)unsigned int get_size () const { return (8 - (dataSets).min_size + (dataSets).get_size ()); } void _compiles_assertion_on_line_3443 () const { (void) (dataSets)[0].static_size; } void _instance_assertion_on_line_3443 () const { static_assert ((sizeof (*this) == (8) + (1 +0) * sizeof ((dataSets)[0])), ""); } static constexpr unsigned null_size = (8); static constexpr unsigned min_size = (8); |
3444 | }; |
3445 | |
3446 | struct MultiItemVariationStore |
3447 | { |
3448 | using cache_t = SparseVarRegionList::cache_t; |
3449 | |
3450 | cache_t *create_cache (hb_array_t<cache_t> static_cache = hb_array_t<cache_t> ()) const |
3451 | { |
3452 | #ifdef HB_NO_VAR |
3453 | return nullptr; |
3454 | #endif |
3455 | auto &r = this+regions; |
3456 | unsigned count = r.regions.len; |
3457 | |
3458 | cache_t *cache; |
3459 | if (count <= static_cache.length) |
3460 | cache = static_cache.arrayZ; |
3461 | else |
3462 | { |
3463 | cache = (cache_t *) hb_malloc (sizeof (float) * count); |
3464 | if (unlikely (!cache)__builtin_expect (bool(!cache), 0)) return nullptr; |
3465 | } |
3466 | |
3467 | for (unsigned i = 0; i < count; i++) |
3468 | cache[i] = REGION_CACHE_ITEM_CACHE_INVALID; |
3469 | |
3470 | return cache; |
3471 | } |
3472 | |
3473 | static void destroy_cache (cache_t *cache, |
3474 | hb_array_t<cache_t> static_cache = hb_array_t<cache_t> ()) |
3475 | { |
3476 | if (cache != static_cache.arrayZ) |
3477 | hb_free (cache); |
3478 | } |
3479 | |
3480 | private: |
3481 | void get_delta (unsigned int outer, unsigned int inner, |
3482 | const int *coords, unsigned int coord_count, |
3483 | hb_array_t<float> out, |
3484 | VarRegionList::cache_t *cache = nullptr) const |
3485 | { |
3486 | #ifdef HB_NO_VAR |
3487 | return; |
3488 | #endif |
3489 | |
3490 | if (unlikely (outer >= dataSets.len)__builtin_expect (bool(outer >= dataSets.len), 0)) |
3491 | return; |
3492 | |
3493 | return (this+dataSets[outer]).get_delta (inner, |
3494 | coords, coord_count, |
3495 | this+regions, |
3496 | out, |
3497 | cache); |
3498 | } |
3499 | |
3500 | public: |
3501 | void get_delta (unsigned int index, |
3502 | const int *coords, unsigned int coord_count, |
3503 | hb_array_t<float> out, |
3504 | VarRegionList::cache_t *cache = nullptr) const |
3505 | { |
3506 | unsigned int outer = index >> 16; |
3507 | unsigned int inner = index & 0xFFFF; |
3508 | get_delta (outer, inner, coords, coord_count, out, cache); |
3509 | } |
3510 | void get_delta (unsigned int index, |
3511 | hb_array_t<const int> coords, |
3512 | hb_array_t<float> out, |
3513 | VarRegionList::cache_t *cache = nullptr) const |
3514 | { |
3515 | return get_delta (index, |
3516 | coords.arrayZ, coords.length, |
3517 | out, |
3518 | cache); |
3519 | } |
3520 | |
3521 | bool sanitize (hb_sanitize_context_t *c) const |
3522 | { |
3523 | #ifdef HB_NO_VAR |
3524 | return true; |
3525 | #endif |
3526 | |
3527 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
3528 | return_trace (c->check_struct (this) &&return trace.ret (c->check_struct (this) && hb_barrier () && format == 1 && regions.sanitize (c, this ) && dataSets.sanitize (c, this), __PRETTY_FUNCTION__ , 3532) |
3529 | hb_barrier () &&return trace.ret (c->check_struct (this) && hb_barrier () && format == 1 && regions.sanitize (c, this ) && dataSets.sanitize (c, this), __PRETTY_FUNCTION__ , 3532) |
3530 | format == 1 &&return trace.ret (c->check_struct (this) && hb_barrier () && format == 1 && regions.sanitize (c, this ) && dataSets.sanitize (c, this), __PRETTY_FUNCTION__ , 3532) |
3531 | regions.sanitize (c, this) &&return trace.ret (c->check_struct (this) && hb_barrier () && format == 1 && regions.sanitize (c, this ) && dataSets.sanitize (c, this), __PRETTY_FUNCTION__ , 3532) |
3532 | dataSets.sanitize (c, this))return trace.ret (c->check_struct (this) && hb_barrier () && format == 1 && regions.sanitize (c, this ) && dataSets.sanitize (c, this), __PRETTY_FUNCTION__ , 3532); |
3533 | } |
3534 | |
3535 | protected: |
3536 | HBUINT16 format; // 1 |
3537 | Offset32To<SparseVarRegionList> regions; |
3538 | Array16OfOffset32To<MultiVarData> dataSets; |
3539 | public: |
3540 | DEFINE_SIZE_ARRAY_SIZED (8, dataSets)unsigned int get_size () const { return (8 - (dataSets).min_size + (dataSets).get_size ()); } void _compiles_assertion_on_line_3540 () const { (void) (dataSets)[0].static_size; } void _instance_assertion_on_line_3540 () const { static_assert ((sizeof (*this) == (8) + (1 +0) * sizeof ((dataSets)[0])), ""); } static constexpr unsigned null_size = (8); static constexpr unsigned min_size = (8); |
3541 | }; |
3542 | |
3543 | #undef REGION_CACHE_ITEM_CACHE_INVALID |
3544 | |
3545 | template <typename MapCountT> |
3546 | struct DeltaSetIndexMapFormat01 |
3547 | { |
3548 | friend struct DeltaSetIndexMap; |
3549 | |
3550 | unsigned get_size () const |
3551 | { return min_size + mapCount * get_width (); } |
3552 | |
3553 | private: |
3554 | DeltaSetIndexMapFormat01* copy (hb_serialize_context_t *c) const |
3555 | { |
3556 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
3557 | return_trace (c->embed (this))return trace.ret (c->embed (this), __PRETTY_FUNCTION__, 3557 ); |
3558 | } |
3559 | |
3560 | template <typename T> |
3561 | bool serialize (hb_serialize_context_t *c, const T &plan) |
3562 | { |
3563 | unsigned int width = plan.get_width (); |
3564 | unsigned int inner_bit_count = plan.get_inner_bit_count (); |
3565 | const hb_array_t<const uint32_t> output_map = plan.get_output_map (); |
3566 | |
3567 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
3568 | if (unlikely (output_map.length && ((((inner_bit_count-1)&~0xF)!=0) || (((width-1)&~0x3)!=0)))__builtin_expect (bool(output_map.length && ((((inner_bit_count -1)&~0xF)!=0) || (((width-1)&~0x3)!=0))), 0)) |
3569 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3569); |
3570 | if (unlikely (!c->extend_min (this))__builtin_expect (bool(!c->extend_min (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3570); |
3571 | |
3572 | entryFormat = ((width-1)<<4)|(inner_bit_count-1); |
3573 | mapCount = output_map.length; |
3574 | HBUINT8 *p = c->allocate_size<HBUINT8> (width * output_map.length); |
3575 | if (unlikely (!p)__builtin_expect (bool(!p), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3575); |
3576 | for (unsigned int i = 0; i < output_map.length; i++) |
3577 | { |
3578 | unsigned int v = output_map.arrayZ[i]; |
3579 | if (v) |
3580 | { |
3581 | unsigned int outer = v >> 16; |
3582 | unsigned int inner = v & 0xFFFF; |
3583 | unsigned int u = (outer << inner_bit_count) | inner; |
3584 | for (unsigned int w = width; w > 0;) |
3585 | { |
3586 | p[--w] = u; |
3587 | u >>= 8; |
3588 | } |
3589 | } |
3590 | p += width; |
3591 | } |
3592 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 3592); |
3593 | } |
3594 | |
3595 | uint32_t map (unsigned int v) const /* Returns 16.16 outer.inner. */ |
3596 | { |
3597 | /* If count is zero, pass value unchanged. This takes |
3598 | * care of direct mapping for advance map. */ |
3599 | if (!mapCount) |
3600 | return v; |
3601 | |
3602 | if (v >= mapCount) |
3603 | v = mapCount - 1; |
3604 | |
3605 | unsigned int u = 0; |
3606 | { /* Fetch it. */ |
3607 | unsigned int w = get_width (); |
3608 | const HBUINT8 *p = mapDataZ.arrayZ + w * v; |
3609 | for (; w; w--) |
3610 | u = (u << 8) + *p++; |
3611 | } |
3612 | |
3613 | { /* Repack it. */ |
3614 | unsigned int n = get_inner_bit_count (); |
3615 | unsigned int outer = u >> n; |
3616 | unsigned int inner = u & ((1 << n) - 1); |
3617 | u = (outer<<16) | inner; |
3618 | } |
3619 | |
3620 | return u; |
3621 | } |
3622 | |
3623 | unsigned get_map_count () const { return mapCount; } |
3624 | unsigned get_width () const { return ((entryFormat >> 4) & 3) + 1; } |
3625 | unsigned get_inner_bit_count () const { return (entryFormat & 0xF) + 1; } |
3626 | |
3627 | |
3628 | bool sanitize (hb_sanitize_context_t *c) const |
3629 | { |
3630 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
3631 | return_trace (c->check_struct (this) &&return trace.ret (c->check_struct (this) && hb_barrier () && c->check_range (mapDataZ.arrayZ, mapCount, get_width ()), __PRETTY_FUNCTION__, 3635) |
3632 | hb_barrier () &&return trace.ret (c->check_struct (this) && hb_barrier () && c->check_range (mapDataZ.arrayZ, mapCount, get_width ()), __PRETTY_FUNCTION__, 3635) |
3633 | c->check_range (mapDataZ.arrayZ,return trace.ret (c->check_struct (this) && hb_barrier () && c->check_range (mapDataZ.arrayZ, mapCount, get_width ()), __PRETTY_FUNCTION__, 3635) |
3634 | mapCount,return trace.ret (c->check_struct (this) && hb_barrier () && c->check_range (mapDataZ.arrayZ, mapCount, get_width ()), __PRETTY_FUNCTION__, 3635) |
3635 | get_width ()))return trace.ret (c->check_struct (this) && hb_barrier () && c->check_range (mapDataZ.arrayZ, mapCount, get_width ()), __PRETTY_FUNCTION__, 3635); |
3636 | } |
3637 | |
3638 | protected: |
3639 | HBUINT8 format; /* Format identifier--format = 0 */ |
3640 | HBUINT8 entryFormat; /* A packed field that describes the compressed |
3641 | * representation of delta-set indices. */ |
3642 | MapCountT mapCount; /* The number of mapping entries. */ |
3643 | UnsizedArrayOf<HBUINT8> |
3644 | mapDataZ; /* The delta-set index mapping data. */ |
3645 | |
3646 | public: |
3647 | DEFINE_SIZE_ARRAY (2+MapCountT::static_size, mapDataZ)void _compiles_assertion_on_line_3647 () const { (void) (mapDataZ )[0].static_size; } void _instance_assertion_on_line_3647 () const { static_assert ((sizeof (*this) == (2+MapCountT::static_size ) + (1 +0) * sizeof ((mapDataZ)[0])), ""); } static constexpr unsigned null_size = (2+MapCountT::static_size); static constexpr unsigned min_size = (2+MapCountT::static_size); |
3648 | }; |
3649 | |
3650 | struct DeltaSetIndexMap |
3651 | { |
3652 | template <typename T> |
3653 | bool serialize (hb_serialize_context_t *c, const T &plan) |
3654 | { |
3655 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
3656 | unsigned length = plan.get_output_map ().length; |
3657 | u.format = length <= 0xFFFF ? 0 : 1; |
3658 | switch (u.format) { |
3659 | case 0: hb_barrier (); return_trace (u.format0.serialize (c, plan))return trace.ret (u.format0.serialize (c, plan), __PRETTY_FUNCTION__ , 3659); |
3660 | case 1: hb_barrier (); return_trace (u.format1.serialize (c, plan))return trace.ret (u.format1.serialize (c, plan), __PRETTY_FUNCTION__ , 3660); |
3661 | default:return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3661); |
3662 | } |
3663 | } |
3664 | |
3665 | uint32_t map (unsigned v) const |
3666 | { |
3667 | switch (u.format) { |
3668 | case 0: hb_barrier (); return (u.format0.map (v)); |
3669 | case 1: hb_barrier (); return (u.format1.map (v)); |
3670 | default:return v; |
3671 | } |
3672 | } |
3673 | |
3674 | unsigned get_map_count () const |
3675 | { |
3676 | switch (u.format) { |
3677 | case 0: hb_barrier (); return u.format0.get_map_count (); |
3678 | case 1: hb_barrier (); return u.format1.get_map_count (); |
3679 | default:return 0; |
3680 | } |
3681 | } |
3682 | |
3683 | unsigned get_width () const |
3684 | { |
3685 | switch (u.format) { |
3686 | case 0: hb_barrier (); return u.format0.get_width (); |
3687 | case 1: hb_barrier (); return u.format1.get_width (); |
3688 | default:return 0; |
3689 | } |
3690 | } |
3691 | |
3692 | unsigned get_inner_bit_count () const |
3693 | { |
3694 | switch (u.format) { |
3695 | case 0: hb_barrier (); return u.format0.get_inner_bit_count (); |
3696 | case 1: hb_barrier (); return u.format1.get_inner_bit_count (); |
3697 | default:return 0; |
3698 | } |
3699 | } |
3700 | |
3701 | bool sanitize (hb_sanitize_context_t *c) const |
3702 | { |
3703 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
3704 | if (!u.format.sanitize (c)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3704); |
3705 | hb_barrier (); |
3706 | switch (u.format) { |
3707 | case 0: hb_barrier (); return_trace (u.format0.sanitize (c))return trace.ret (u.format0.sanitize (c), __PRETTY_FUNCTION__ , 3707); |
3708 | case 1: hb_barrier (); return_trace (u.format1.sanitize (c))return trace.ret (u.format1.sanitize (c), __PRETTY_FUNCTION__ , 3708); |
3709 | default:return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 3709); |
3710 | } |
3711 | } |
3712 | |
3713 | DeltaSetIndexMap* copy (hb_serialize_context_t *c) const |
3714 | { |
3715 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
3716 | switch (u.format) { |
3717 | case 0: hb_barrier (); return_trace (reinterpret_cast<DeltaSetIndexMap *> (u.format0.copy (c)))return trace.ret (reinterpret_cast<DeltaSetIndexMap *> ( u.format0.copy (c)), __PRETTY_FUNCTION__, 3717); |
3718 | case 1: hb_barrier (); return_trace (reinterpret_cast<DeltaSetIndexMap *> (u.format1.copy (c)))return trace.ret (reinterpret_cast<DeltaSetIndexMap *> ( u.format1.copy (c)), __PRETTY_FUNCTION__, 3718); |
3719 | default:return_trace (nullptr)return trace.ret (nullptr, __PRETTY_FUNCTION__, 3719); |
3720 | } |
3721 | } |
3722 | |
3723 | protected: |
3724 | union { |
3725 | HBUINT8 format; /* Format identifier */ |
3726 | DeltaSetIndexMapFormat01<HBUINT16> format0; |
3727 | DeltaSetIndexMapFormat01<HBUINT32> format1; |
3728 | } u; |
3729 | public: |
3730 | DEFINE_SIZE_UNION (1, format)void _compiles_assertion_on_line_3730 () const { (void) this-> u.format.static_size; } void _instance_assertion_on_line_3730 () const { static_assert ((sizeof(this->u.format) == (1)) , ""); } static constexpr unsigned null_size = (1); static constexpr unsigned min_size = (1); |
3731 | }; |
3732 | |
3733 | |
3734 | struct ItemVarStoreInstancer |
3735 | { |
3736 | ItemVarStoreInstancer (const ItemVariationStore *varStore_, |
3737 | const DeltaSetIndexMap *varIdxMap, |
3738 | hb_array_t<const int> coords, |
3739 | VarRegionList::cache_t *cache = nullptr) : |
3740 | varStore (varStore_), varIdxMap (varIdxMap), coords (coords), cache (cache) |
3741 | { |
3742 | if (!varStore) |
3743 | varStore = &Null(ItemVariationStore)NullHelper<ItemVariationStore>::get_null (); |
3744 | } |
3745 | |
3746 | operator bool () const { return varStore && bool (coords); } |
3747 | |
3748 | float operator[] (uint32_t varIdx) const |
3749 | { return (*this) (varIdx); } |
3750 | |
3751 | float operator() (uint32_t varIdx, unsigned short offset = 0) const |
3752 | { |
3753 | if (!coords || varIdx == VarIdx::NO_VARIATION) |
3754 | return 0.f; |
3755 | |
3756 | varIdx += offset; |
3757 | if (varIdxMap) |
3758 | varIdx = varIdxMap->map (varIdx); |
3759 | return varStore->get_delta (varIdx, coords, cache); |
3760 | } |
3761 | |
3762 | const ItemVariationStore *varStore; |
3763 | const DeltaSetIndexMap *varIdxMap; |
3764 | hb_array_t<const int> coords; |
3765 | VarRegionList::cache_t *cache; |
3766 | }; |
3767 | |
3768 | struct MultiItemVarStoreInstancer |
3769 | { |
3770 | MultiItemVarStoreInstancer (const MultiItemVariationStore *varStore, |
3771 | const DeltaSetIndexMap *varIdxMap, |
3772 | hb_array_t<const int> coords, |
3773 | SparseVarRegionList::cache_t *cache = nullptr) : |
3774 | varStore (varStore), varIdxMap (varIdxMap), coords (coords), cache (cache) |
3775 | { |
3776 | if (!varStore) |
3777 | varStore = &Null(MultiItemVariationStore)NullHelper<MultiItemVariationStore>::get_null (); |
Value stored to 'varStore' is never read | |
3778 | } |
3779 | |
3780 | operator bool () const { return varStore && bool (coords); } |
3781 | |
3782 | float operator[] (uint32_t varIdx) const |
3783 | { |
3784 | float v = 0; |
3785 | (*this) (hb_array (&v, 1), varIdx); |
3786 | return v; |
3787 | } |
3788 | |
3789 | void operator() (hb_array_t<float> out, uint32_t varIdx, unsigned short offset = 0) const |
3790 | { |
3791 | if (coords && varIdx != VarIdx::NO_VARIATION) |
3792 | { |
3793 | varIdx += offset; |
3794 | if (varIdxMap) |
3795 | varIdx = varIdxMap->map (varIdx); |
3796 | varStore->get_delta (varIdx, coords, out, cache); |
3797 | } |
3798 | else |
3799 | for (unsigned i = 0; i < out.length; i++) |
3800 | out.arrayZ[i] = 0.f; |
3801 | } |
3802 | |
3803 | const MultiItemVariationStore *varStore; |
3804 | const DeltaSetIndexMap *varIdxMap; |
3805 | hb_array_t<const int> coords; |
3806 | SparseVarRegionList::cache_t *cache; |
3807 | }; |
3808 | |
3809 | |
3810 | /* |
3811 | * Feature Variations |
3812 | */ |
3813 | enum Cond_with_Var_flag_t |
3814 | { |
3815 | KEEP_COND_WITH_VAR = 0, |
3816 | KEEP_RECORD_WITH_VAR = 1, |
3817 | DROP_COND_WITH_VAR = 2, |
3818 | DROP_RECORD_WITH_VAR = 3, |
3819 | }; |
3820 | |
3821 | struct Condition; |
3822 | |
3823 | template <typename Instancer> |
3824 | static bool |
3825 | _hb_recurse_condition_evaluate (const struct Condition &condition, |
3826 | const int *coords, |
3827 | unsigned int coord_len, |
3828 | Instancer *instancer); |
3829 | |
3830 | struct ConditionAxisRange |
3831 | { |
3832 | friend struct Condition; |
3833 | |
3834 | bool subset (hb_subset_context_t *c) const |
3835 | { |
3836 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
3837 | auto *out = c->serializer->embed (this); |
3838 | if (unlikely (!out)__builtin_expect (bool(!out), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3838); |
3839 | |
3840 | const hb_map_t *index_map = &c->plan->axes_index_map; |
3841 | if (index_map->is_empty ()) return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 3841); |
3842 | |
3843 | const hb_map_t& axes_old_index_tag_map = c->plan->axes_old_index_tag_map; |
3844 | hb_codepoint_t *axis_tag; |
3845 | if (!axes_old_index_tag_map.has (axisIndex, &axis_tag) || |
3846 | !index_map->has (axisIndex)) |
3847 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3847); |
3848 | |
3849 | const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location = c->plan->axes_location; |
3850 | Triple axis_limit{-1.0, 0.0, 1.0}; |
3851 | Triple *normalized_limit; |
3852 | if (normalized_axes_location.has (*axis_tag, &normalized_limit)) |
3853 | axis_limit = *normalized_limit; |
3854 | |
3855 | const hb_hashmap_t<hb_tag_t, TripleDistances>& axes_triple_distances = c->plan->axes_triple_distances; |
3856 | TripleDistances axis_triple_distances{1.0, 1.0}; |
3857 | TripleDistances *triple_dists; |
3858 | if (axes_triple_distances.has (*axis_tag, &triple_dists)) |
3859 | axis_triple_distances = *triple_dists; |
3860 | |
3861 | float normalized_min = renormalizeValue ((double) filterRangeMinValue.to_float (), axis_limit, axis_triple_distances, false); |
3862 | float normalized_max = renormalizeValue ((double) filterRangeMaxValue.to_float (), axis_limit, axis_triple_distances, false); |
3863 | out->filterRangeMinValue.set_float (normalized_min); |
3864 | out->filterRangeMaxValue.set_float (normalized_max); |
3865 | |
3866 | return_trace (c->serializer->check_assign (out->axisIndex, index_map->get (axisIndex),return trace.ret (c->serializer->check_assign (out-> axisIndex, index_map->get (axisIndex), HB_SERIALIZE_ERROR_INT_OVERFLOW ), __PRETTY_FUNCTION__, 3867) |
3867 | HB_SERIALIZE_ERROR_INT_OVERFLOW))return trace.ret (c->serializer->check_assign (out-> axisIndex, index_map->get (axisIndex), HB_SERIALIZE_ERROR_INT_OVERFLOW ), __PRETTY_FUNCTION__, 3867); |
3868 | } |
3869 | |
3870 | private: |
3871 | Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c, |
3872 | hb_map_t *condition_map /* OUT */) const |
3873 | { |
3874 | //invalid axis index, drop the entire record |
3875 | if (!c->axes_index_tag_map->has (axisIndex)) |
3876 | return DROP_RECORD_WITH_VAR; |
3877 | |
3878 | hb_tag_t axis_tag = c->axes_index_tag_map->get (axisIndex); |
3879 | |
3880 | Triple axis_range (-1.0, 0.0, 1.0); |
3881 | Triple *axis_limit; |
3882 | bool axis_set_by_user = false; |
3883 | if (c->axes_location->has (axis_tag, &axis_limit)) |
3884 | { |
3885 | axis_range = *axis_limit; |
3886 | axis_set_by_user = true; |
3887 | } |
3888 | |
3889 | float axis_min_val = axis_range.minimum; |
3890 | float axis_default_val = axis_range.middle; |
3891 | float axis_max_val = axis_range.maximum; |
3892 | |
3893 | float filter_min_val = filterRangeMinValue.to_float (); |
3894 | float filter_max_val = filterRangeMaxValue.to_float (); |
3895 | |
3896 | if (axis_default_val < filter_min_val || |
3897 | axis_default_val > filter_max_val) |
3898 | c->apply = false; |
3899 | |
3900 | //condition not met, drop the entire record |
3901 | if (axis_min_val > filter_max_val || axis_max_val < filter_min_val || |
3902 | filter_min_val > filter_max_val) |
3903 | return DROP_RECORD_WITH_VAR; |
3904 | |
3905 | //condition met and axis pinned, drop the condition |
3906 | if (axis_set_by_user && axis_range.is_point ()) |
3907 | return DROP_COND_WITH_VAR; |
3908 | |
3909 | if (filter_max_val != axis_max_val || filter_min_val != axis_min_val) |
3910 | { |
3911 | // add axisIndex->value into the hashmap so we can check if the record is |
3912 | // unique with variations |
3913 | uint16_t int_filter_max_val = (uint16_t) filterRangeMaxValue.to_int (); |
3914 | uint16_t int_filter_min_val = (uint16_t) filterRangeMinValue.to_int (); |
3915 | hb_codepoint_t val = (int_filter_max_val << 16) + int_filter_min_val; |
3916 | |
3917 | condition_map->set (axisIndex, val); |
3918 | return KEEP_COND_WITH_VAR; |
3919 | } |
3920 | return KEEP_RECORD_WITH_VAR; |
3921 | } |
3922 | |
3923 | template <typename Instancer> |
3924 | bool evaluate (const int *coords, unsigned int coord_len, |
3925 | Instancer *instancer HB_UNUSED__attribute__((unused))) const |
3926 | { |
3927 | int coord = axisIndex < coord_len ? coords[axisIndex] : 0; |
3928 | return filterRangeMinValue.to_int () <= coord && coord <= filterRangeMaxValue.to_int (); |
3929 | } |
3930 | |
3931 | bool sanitize (hb_sanitize_context_t *c) const |
3932 | { |
3933 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
3934 | return_trace (c->check_struct (this))return trace.ret (c->check_struct (this), __PRETTY_FUNCTION__ , 3934); |
3935 | } |
3936 | |
3937 | protected: |
3938 | HBUINT16 format; /* Format identifier--format = 1 */ |
3939 | HBUINT16 axisIndex; |
3940 | F2DOT14 filterRangeMinValue; |
3941 | F2DOT14 filterRangeMaxValue; |
3942 | public: |
3943 | DEFINE_SIZE_STATIC (8)void _instance_assertion_on_line_3943 () const { static_assert ((sizeof (*this) == (8)), ""); } unsigned int get_size () const { return (8); } static constexpr unsigned null_size = (8); static constexpr unsigned min_size = (8); static constexpr unsigned static_size = (8); |
3944 | }; |
3945 | |
3946 | struct ConditionValue |
3947 | { |
3948 | friend struct Condition; |
3949 | |
3950 | bool subset (hb_subset_context_t *c) const |
3951 | { |
3952 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
3953 | // TODO(subset) |
3954 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3954); |
3955 | } |
3956 | |
3957 | private: |
3958 | template <typename Instancer> |
3959 | bool evaluate (const int *coords, unsigned int coord_len, |
3960 | Instancer *instancer) const |
3961 | { |
3962 | signed value = defaultValue; |
3963 | value += (*instancer)[varIdx]; |
3964 | return value > 0; |
3965 | } |
3966 | |
3967 | bool subset (hb_subset_context_t *c, |
3968 | hb_subset_layout_context_t *l, |
3969 | bool insert_catch_all) const |
3970 | { |
3971 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
3972 | // TODO(subset) |
3973 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3973); |
3974 | } |
3975 | |
3976 | bool sanitize (hb_sanitize_context_t *c) const |
3977 | { |
3978 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
3979 | return_trace (c->check_struct (this))return trace.ret (c->check_struct (this), __PRETTY_FUNCTION__ , 3979); |
3980 | } |
3981 | |
3982 | protected: |
3983 | HBUINT16 format; /* Format identifier--format = 2 */ |
3984 | HBINT16 defaultValue; /* Value at default instance. */ |
3985 | VarIdx varIdx; /* Variation index */ |
3986 | public: |
3987 | DEFINE_SIZE_STATIC (8)void _instance_assertion_on_line_3987 () const { static_assert ((sizeof (*this) == (8)), ""); } unsigned int get_size () const { return (8); } static constexpr unsigned null_size = (8); static constexpr unsigned min_size = (8); static constexpr unsigned static_size = (8); |
3988 | }; |
3989 | |
3990 | struct ConditionAnd |
3991 | { |
3992 | friend struct Condition; |
3993 | |
3994 | bool subset (hb_subset_context_t *c) const |
3995 | { |
3996 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
3997 | // TODO(subset) |
3998 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 3998); |
3999 | } |
4000 | |
4001 | private: |
4002 | template <typename Instancer> |
4003 | bool evaluate (const int *coords, unsigned int coord_len, |
4004 | Instancer *instancer) const |
4005 | { |
4006 | unsigned int count = conditions.len; |
4007 | for (unsigned int i = 0; i < count; i++) |
4008 | if (!_hb_recurse_condition_evaluate (this+conditions.arrayZ[i], |
4009 | coords, coord_len, |
4010 | instancer)) |
4011 | return false; |
4012 | return true; |
4013 | } |
4014 | |
4015 | bool subset (hb_subset_context_t *c, |
4016 | hb_subset_layout_context_t *l, |
4017 | bool insert_catch_all) const |
4018 | { |
4019 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
4020 | // TODO(subset) |
4021 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4021); |
4022 | } |
4023 | |
4024 | bool sanitize (hb_sanitize_context_t *c) const |
4025 | { |
4026 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
4027 | return_trace (conditions.sanitize (c, this))return trace.ret (conditions.sanitize (c, this), __PRETTY_FUNCTION__ , 4027); |
4028 | } |
4029 | |
4030 | protected: |
4031 | HBUINT16 format; /* Format identifier--format = 3 */ |
4032 | Array8OfOffset24To<struct Condition> conditions; |
4033 | public: |
4034 | DEFINE_SIZE_ARRAY (3, conditions)void _compiles_assertion_on_line_4034 () const { (void) (conditions )[0].static_size; } void _instance_assertion_on_line_4034 () const { static_assert ((sizeof (*this) == (3) + (1 +0) * sizeof (( conditions)[0])), ""); } static constexpr unsigned null_size = (3); static constexpr unsigned min_size = (3); |
4035 | }; |
4036 | |
4037 | struct ConditionOr |
4038 | { |
4039 | friend struct Condition; |
4040 | |
4041 | bool subset (hb_subset_context_t *c) const |
4042 | { |
4043 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
4044 | // TODO(subset) |
4045 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4045); |
4046 | } |
4047 | |
4048 | private: |
4049 | template <typename Instancer> |
4050 | bool evaluate (const int *coords, unsigned int coord_len, |
4051 | Instancer *instancer) const |
4052 | { |
4053 | unsigned int count = conditions.len; |
4054 | for (unsigned int i = 0; i < count; i++) |
4055 | if (_hb_recurse_condition_evaluate (this+conditions.arrayZ[i], |
4056 | coords, coord_len, |
4057 | instancer)) |
4058 | return true; |
4059 | return false; |
4060 | } |
4061 | |
4062 | bool subset (hb_subset_context_t *c, |
4063 | hb_subset_layout_context_t *l, |
4064 | bool insert_catch_all) const |
4065 | { |
4066 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
4067 | // TODO(subset) |
4068 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4068); |
4069 | } |
4070 | |
4071 | bool sanitize (hb_sanitize_context_t *c) const |
4072 | { |
4073 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
4074 | return_trace (conditions.sanitize (c, this))return trace.ret (conditions.sanitize (c, this), __PRETTY_FUNCTION__ , 4074); |
4075 | } |
4076 | |
4077 | protected: |
4078 | HBUINT16 format; /* Format identifier--format = 4 */ |
4079 | Array8OfOffset24To<struct Condition> conditions; |
4080 | public: |
4081 | DEFINE_SIZE_ARRAY (3, conditions)void _compiles_assertion_on_line_4081 () const { (void) (conditions )[0].static_size; } void _instance_assertion_on_line_4081 () const { static_assert ((sizeof (*this) == (3) + (1 +0) * sizeof (( conditions)[0])), ""); } static constexpr unsigned null_size = (3); static constexpr unsigned min_size = (3); |
4082 | }; |
4083 | |
4084 | struct ConditionNegate |
4085 | { |
4086 | friend struct Condition; |
4087 | |
4088 | bool subset (hb_subset_context_t *c) const |
4089 | { |
4090 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
4091 | // TODO(subset) |
4092 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4092); |
4093 | } |
4094 | |
4095 | private: |
4096 | template <typename Instancer> |
4097 | bool evaluate (const int *coords, unsigned int coord_len, |
4098 | Instancer *instancer) const |
4099 | { |
4100 | return !_hb_recurse_condition_evaluate (this+condition, |
4101 | coords, coord_len, |
4102 | instancer); |
4103 | } |
4104 | |
4105 | bool subset (hb_subset_context_t *c, |
4106 | hb_subset_layout_context_t *l, |
4107 | bool insert_catch_all) const |
4108 | { |
4109 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
4110 | // TODO(subset) |
4111 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4111); |
4112 | } |
4113 | |
4114 | bool sanitize (hb_sanitize_context_t *c) const |
4115 | { |
4116 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
4117 | return_trace (condition.sanitize (c, this))return trace.ret (condition.sanitize (c, this), __PRETTY_FUNCTION__ , 4117); |
4118 | } |
4119 | |
4120 | protected: |
4121 | HBUINT16 format; /* Format identifier--format = 5 */ |
4122 | Offset24To<struct Condition> condition; |
4123 | public: |
4124 | DEFINE_SIZE_STATIC (5)void _instance_assertion_on_line_4124 () const { static_assert ((sizeof (*this) == (5)), ""); } unsigned int get_size () const { return (5); } static constexpr unsigned null_size = (5); static constexpr unsigned min_size = (5); static constexpr unsigned static_size = (5); |
4125 | }; |
4126 | |
4127 | struct Condition |
4128 | { |
4129 | template <typename Instancer> |
4130 | bool evaluate (const int *coords, unsigned int coord_len, |
4131 | Instancer *instancer) const |
4132 | { |
4133 | switch (u.format) { |
4134 | case 1: hb_barrier (); return u.format1.evaluate (coords, coord_len, instancer); |
4135 | case 2: hb_barrier (); return u.format2.evaluate (coords, coord_len, instancer); |
4136 | case 3: hb_barrier (); return u.format3.evaluate (coords, coord_len, instancer); |
4137 | case 4: hb_barrier (); return u.format4.evaluate (coords, coord_len, instancer); |
4138 | case 5: hb_barrier (); return u.format5.evaluate (coords, coord_len, instancer); |
4139 | default:return false; |
4140 | } |
4141 | } |
4142 | |
4143 | Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c, |
4144 | hb_map_t *condition_map /* OUT */) const |
4145 | { |
4146 | switch (u.format) { |
4147 | case 1: hb_barrier (); return u.format1.keep_with_variations (c, condition_map); |
4148 | // TODO(subset) |
4149 | default: c->apply = false; return KEEP_COND_WITH_VAR; |
4150 | } |
4151 | } |
4152 | |
4153 | template <typename context_t, typename ...Ts> |
4154 | typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const |
4155 | { |
4156 | if (unlikely (!c->may_dispatch (this, &u.format))__builtin_expect (bool(!c->may_dispatch (this, &u.format )), 0)) return c->no_dispatch_return_value (); |
4157 | TRACE_DISPATCH (this, u.format)hb_no_trace_t<typename context_t::return_t> trace; |
4158 | switch (u.format) { |
4159 | case 1: hb_barrier (); return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...))return trace.ret (c->dispatch (u.format1, std::forward< Ts> (ds)...), __PRETTY_FUNCTION__, 4159); |
4160 | case 2: hb_barrier (); return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...))return trace.ret (c->dispatch (u.format2, std::forward< Ts> (ds)...), __PRETTY_FUNCTION__, 4160); |
4161 | case 3: hb_barrier (); return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...))return trace.ret (c->dispatch (u.format3, std::forward< Ts> (ds)...), __PRETTY_FUNCTION__, 4161); |
4162 | case 4: hb_barrier (); return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...))return trace.ret (c->dispatch (u.format4, std::forward< Ts> (ds)...), __PRETTY_FUNCTION__, 4162); |
4163 | case 5: hb_barrier (); return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...))return trace.ret (c->dispatch (u.format5, std::forward< Ts> (ds)...), __PRETTY_FUNCTION__, 4163); |
4164 | default:return_trace (c->default_return_value ())return trace.ret (c->default_return_value (), __PRETTY_FUNCTION__ , 4164); |
4165 | } |
4166 | } |
4167 | |
4168 | bool sanitize (hb_sanitize_context_t *c) const |
4169 | { |
4170 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
4171 | if (!u.format.sanitize (c)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4171); |
4172 | hb_barrier (); |
4173 | switch (u.format) { |
4174 | case 1: hb_barrier (); return_trace (u.format1.sanitize (c))return trace.ret (u.format1.sanitize (c), __PRETTY_FUNCTION__ , 4174); |
4175 | case 2: hb_barrier (); return_trace (u.format2.sanitize (c))return trace.ret (u.format2.sanitize (c), __PRETTY_FUNCTION__ , 4175); |
4176 | case 3: hb_barrier (); return_trace (u.format3.sanitize (c))return trace.ret (u.format3.sanitize (c), __PRETTY_FUNCTION__ , 4176); |
4177 | case 4: hb_barrier (); return_trace (u.format4.sanitize (c))return trace.ret (u.format4.sanitize (c), __PRETTY_FUNCTION__ , 4177); |
4178 | case 5: hb_barrier (); return_trace (u.format5.sanitize (c))return trace.ret (u.format5.sanitize (c), __PRETTY_FUNCTION__ , 4178); |
4179 | default:return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 4179); |
4180 | } |
4181 | } |
4182 | |
4183 | protected: |
4184 | union { |
4185 | HBUINT16 format; /* Format identifier */ |
4186 | ConditionAxisRange format1; |
4187 | ConditionValue format2; |
4188 | ConditionAnd format3; |
4189 | ConditionOr format4; |
4190 | ConditionNegate format5; |
4191 | } u; |
4192 | public: |
4193 | DEFINE_SIZE_UNION (2, format)void _compiles_assertion_on_line_4193 () const { (void) this-> u.format.static_size; } void _instance_assertion_on_line_4193 () const { static_assert ((sizeof(this->u.format) == (2)) , ""); } static constexpr unsigned null_size = (2); static constexpr unsigned min_size = (2); |
4194 | }; |
4195 | |
4196 | template <typename Instancer> |
4197 | bool |
4198 | _hb_recurse_condition_evaluate (const struct Condition &condition, |
4199 | const int *coords, |
4200 | unsigned int coord_len, |
4201 | Instancer *instancer) |
4202 | { |
4203 | return condition.evaluate (coords, coord_len, instancer); |
4204 | } |
4205 | |
4206 | struct ConditionList |
4207 | { |
4208 | const Condition& operator[] (unsigned i) const |
4209 | { return this+conditions[i]; } |
4210 | |
4211 | bool sanitize (hb_sanitize_context_t *c) const |
4212 | { |
4213 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
4214 | return_trace (conditions.sanitize (c, this))return trace.ret (conditions.sanitize (c, this), __PRETTY_FUNCTION__ , 4214); |
4215 | } |
4216 | |
4217 | protected: |
4218 | Array32OfOffset32To<Condition> conditions; |
4219 | public: |
4220 | DEFINE_SIZE_ARRAY (4, conditions)void _compiles_assertion_on_line_4220 () const { (void) (conditions )[0].static_size; } void _instance_assertion_on_line_4220 () const { static_assert ((sizeof (*this) == (4) + (1 +0) * sizeof (( conditions)[0])), ""); } static constexpr unsigned null_size = (4); static constexpr unsigned min_size = (4); |
4221 | }; |
4222 | |
4223 | struct ConditionSet |
4224 | { |
4225 | bool evaluate (const int *coords, unsigned int coord_len, |
4226 | ItemVarStoreInstancer *instancer) const |
4227 | { |
4228 | unsigned int count = conditions.len; |
4229 | for (unsigned int i = 0; i < count; i++) |
4230 | if (!(this+conditions.arrayZ[i]).evaluate (coords, coord_len, instancer)) |
4231 | return false; |
4232 | return true; |
4233 | } |
4234 | |
4235 | void keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const |
4236 | { |
4237 | hb_map_t *condition_map = hb_map_create (); |
4238 | if (unlikely (!condition_map)__builtin_expect (bool(!condition_map), 0)) return; |
4239 | hb::shared_ptr<hb_map_t> p {condition_map}; |
4240 | |
4241 | hb_set_t *cond_set = hb_set_create (); |
4242 | if (unlikely (!cond_set)__builtin_expect (bool(!cond_set), 0)) return; |
4243 | hb::shared_ptr<hb_set_t> s {cond_set}; |
4244 | |
4245 | c->apply = true; |
4246 | bool should_keep = false; |
4247 | unsigned num_kept_cond = 0, cond_idx = 0; |
4248 | for (const auto& offset : conditions) |
4249 | { |
4250 | Cond_with_Var_flag_t ret = (this+offset).keep_with_variations (c, condition_map); |
4251 | // condition is not met or condition out of range, drop the entire record |
4252 | if (ret == DROP_RECORD_WITH_VAR) |
4253 | return; |
4254 | |
4255 | if (ret == KEEP_COND_WITH_VAR) |
4256 | { |
4257 | should_keep = true; |
4258 | cond_set->add (cond_idx); |
4259 | num_kept_cond++; |
4260 | } |
4261 | |
4262 | if (ret == KEEP_RECORD_WITH_VAR) |
4263 | should_keep = true; |
4264 | |
4265 | cond_idx++; |
4266 | } |
4267 | |
4268 | if (!should_keep) return; |
4269 | |
4270 | //check if condition_set is unique with variations |
4271 | if (c->conditionset_map->has (p)) |
4272 | //duplicate found, drop the entire record |
4273 | return; |
4274 | |
4275 | c->conditionset_map->set (p, 1); |
4276 | c->record_cond_idx_map->set (c->cur_record_idx, s); |
4277 | if (should_keep && num_kept_cond == 0) |
4278 | c->universal = true; |
4279 | } |
4280 | |
4281 | bool subset (hb_subset_context_t *c, |
4282 | hb_subset_layout_context_t *l, |
4283 | bool insert_catch_all) const |
4284 | { |
4285 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
4286 | auto *out = c->serializer->start_embed (this); |
4287 | if (unlikely (!out || !c->serializer->extend_min (out))__builtin_expect (bool(!out || !c->serializer->extend_min (out)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4287); |
4288 | |
4289 | if (insert_catch_all) return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 4289); |
4290 | |
4291 | hb_set_t *retained_cond_set = nullptr; |
4292 | if (l->feature_record_cond_idx_map != nullptr) |
4293 | retained_cond_set = l->feature_record_cond_idx_map->get (l->cur_feature_var_record_idx); |
4294 | |
4295 | unsigned int count = conditions.len; |
4296 | for (unsigned int i = 0; i < count; i++) |
4297 | { |
4298 | if (retained_cond_set != nullptr && !retained_cond_set->has (i)) |
4299 | continue; |
4300 | subset_offset_array (c, out->conditions, this) (conditions[i]); |
4301 | } |
4302 | |
4303 | return_trace (bool (out->conditions))return trace.ret (bool (out->conditions), __PRETTY_FUNCTION__ , 4303); |
4304 | } |
4305 | |
4306 | bool sanitize (hb_sanitize_context_t *c) const |
4307 | { |
4308 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
4309 | return_trace (conditions.sanitize (c, this))return trace.ret (conditions.sanitize (c, this), __PRETTY_FUNCTION__ , 4309); |
4310 | } |
4311 | |
4312 | protected: |
4313 | Array16OfOffset32To<Condition> conditions; |
4314 | public: |
4315 | DEFINE_SIZE_ARRAY (2, conditions)void _compiles_assertion_on_line_4315 () const { (void) (conditions )[0].static_size; } void _instance_assertion_on_line_4315 () const { static_assert ((sizeof (*this) == (2) + (1 +0) * sizeof (( conditions)[0])), ""); } static constexpr unsigned null_size = (2); static constexpr unsigned min_size = (2); |
4316 | }; |
4317 | |
4318 | struct FeatureTableSubstitutionRecord |
4319 | { |
4320 | friend struct FeatureTableSubstitution; |
4321 | |
4322 | void collect_lookups (const void *base, hb_set_t *lookup_indexes /* OUT */) const |
4323 | { |
4324 | return (base+feature).add_lookup_indexes_to (lookup_indexes); |
4325 | } |
4326 | |
4327 | void closure_features (const void *base, |
4328 | const hb_map_t *lookup_indexes, |
4329 | hb_set_t *feature_indexes /* OUT */) const |
4330 | { |
4331 | if ((base+feature).intersects_lookup_indexes (lookup_indexes)) |
4332 | feature_indexes->add (featureIndex); |
4333 | } |
4334 | |
4335 | void collect_feature_substitutes_with_variations (hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map, |
4336 | hb_set_t& catch_all_record_feature_idxes, |
4337 | const hb_set_t *feature_indices, |
4338 | const void *base) const |
4339 | { |
4340 | if (feature_indices->has (featureIndex)) |
4341 | { |
4342 | feature_substitutes_map->set (featureIndex, &(base+feature)); |
4343 | catch_all_record_feature_idxes.add (featureIndex); |
4344 | } |
4345 | } |
4346 | |
4347 | bool serialize (hb_subset_layout_context_t *c, |
4348 | unsigned feature_index, |
4349 | const Feature *f, const Tag *tag) |
4350 | { |
4351 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
4352 | hb_serialize_context_t *s = c->subset_context->serializer; |
4353 | if (unlikely (!s->extend_min (this))__builtin_expect (bool(!s->extend_min (this)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4353); |
4354 | |
4355 | uint32_t *new_feature_idx; |
4356 | if (!c->feature_index_map->has (feature_index, &new_feature_idx)) |
4357 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4357); |
4358 | |
4359 | if (!s->check_assign (featureIndex, *new_feature_idx, HB_SERIALIZE_ERROR_INT_OVERFLOW)) |
4360 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4360); |
4361 | |
4362 | s->push (); |
4363 | bool ret = f->subset (c->subset_context, c, tag); |
4364 | if (ret) s->add_link (feature, s->pop_pack ()); |
4365 | else s->pop_discard (); |
4366 | |
4367 | return_trace (ret)return trace.ret (ret, __PRETTY_FUNCTION__, 4367); |
4368 | } |
4369 | |
4370 | bool subset (hb_subset_layout_context_t *c, const void *base) const |
4371 | { |
4372 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
4373 | uint32_t *new_feature_index; |
4374 | if (!c->feature_index_map->has (featureIndex, &new_feature_index)) |
4375 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4375); |
4376 | |
4377 | auto *out = c->subset_context->serializer->embed (this); |
4378 | if (unlikely (!out)__builtin_expect (bool(!out), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4378); |
4379 | |
4380 | out->featureIndex = *new_feature_index; |
4381 | return_trace (out->feature.serialize_subset (c->subset_context, feature, base, c))return trace.ret (out->feature.serialize_subset (c->subset_context , feature, base, c), __PRETTY_FUNCTION__, 4381); |
4382 | } |
4383 | |
4384 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
4385 | { |
4386 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
4387 | return_trace (c->check_struct (this) && feature.sanitize (c, base))return trace.ret (c->check_struct (this) && feature .sanitize (c, base), __PRETTY_FUNCTION__, 4387); |
4388 | } |
4389 | |
4390 | protected: |
4391 | HBUINT16 featureIndex; |
4392 | Offset32To<Feature> feature; |
4393 | public: |
4394 | DEFINE_SIZE_STATIC (6)void _instance_assertion_on_line_4394 () const { static_assert ((sizeof (*this) == (6)), ""); } unsigned int get_size () const { return (6); } static constexpr unsigned null_size = (6); static constexpr unsigned min_size = (6); static constexpr unsigned static_size = (6); |
4395 | }; |
4396 | |
4397 | struct FeatureTableSubstitution |
4398 | { |
4399 | const Feature *find_substitute (unsigned int feature_index) const |
4400 | { |
4401 | unsigned int count = substitutions.len; |
4402 | for (unsigned int i = 0; i < count; i++) |
4403 | { |
4404 | const FeatureTableSubstitutionRecord &record = substitutions.arrayZ[i]; |
4405 | if (record.featureIndex == feature_index) |
4406 | return &(this+record.feature); |
4407 | } |
4408 | return nullptr; |
4409 | } |
4410 | |
4411 | void collect_lookups (const hb_set_t *feature_indexes, |
4412 | hb_set_t *lookup_indexes /* OUT */) const |
4413 | { |
4414 | + hb_iter (substitutions) |
4415 | | hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex) |
4416 | | hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r) |
4417 | { r.collect_lookups (this, lookup_indexes); }) |
4418 | ; |
4419 | } |
4420 | |
4421 | void closure_features (const hb_map_t *lookup_indexes, |
4422 | hb_set_t *feature_indexes /* OUT */) const |
4423 | { |
4424 | for (const FeatureTableSubstitutionRecord& record : substitutions) |
4425 | record.closure_features (this, lookup_indexes, feature_indexes); |
4426 | } |
4427 | |
4428 | bool intersects_features (const hb_map_t *feature_index_map) const |
4429 | { |
4430 | for (const FeatureTableSubstitutionRecord& record : substitutions) |
4431 | { |
4432 | if (feature_index_map->has (record.featureIndex)) return true; |
4433 | } |
4434 | return false; |
4435 | } |
4436 | |
4437 | void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const |
4438 | { |
4439 | for (const FeatureTableSubstitutionRecord& record : substitutions) |
4440 | record.collect_feature_substitutes_with_variations (c->feature_substitutes_map, |
4441 | c->catch_all_record_feature_idxes, |
4442 | c->feature_indices, this); |
4443 | } |
4444 | |
4445 | bool subset (hb_subset_context_t *c, |
4446 | hb_subset_layout_context_t *l, |
4447 | bool insert_catch_all) const |
4448 | { |
4449 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
4450 | auto *out = c->serializer->start_embed (*this); |
4451 | if (unlikely (!out || !c->serializer->extend_min (out))__builtin_expect (bool(!out || !c->serializer->extend_min (out)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4451); |
4452 | |
4453 | out->version.major = version.major; |
4454 | out->version.minor = version.minor; |
4455 | |
4456 | if (insert_catch_all) |
4457 | { |
4458 | for (unsigned feature_index : *(l->catch_all_record_feature_idxes)) |
4459 | { |
4460 | hb_pair_t<const void*, const void*> *p; |
4461 | if (!l->feature_idx_tag_map->has (feature_index, &p)) |
4462 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4462); |
4463 | auto *o = out->substitutions.serialize_append (c->serializer); |
4464 | if (!o->serialize (l, feature_index, |
4465 | reinterpret_cast<const Feature*> (p->first), |
4466 | reinterpret_cast<const Tag*> (p->second))) |
4467 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4467); |
4468 | } |
4469 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 4469); |
4470 | } |
4471 | |
4472 | + substitutions.iter () |
4473 | | hb_apply (subset_record_array (l, &(out->substitutions), this)) |
4474 | ; |
4475 | |
4476 | return_trace (bool (out->substitutions))return trace.ret (bool (out->substitutions), __PRETTY_FUNCTION__ , 4476); |
4477 | } |
4478 | |
4479 | bool sanitize (hb_sanitize_context_t *c) const |
4480 | { |
4481 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
4482 | return_trace (version.sanitize (c) &&return trace.ret (version.sanitize (c) && hb_barrier ( ) && __builtin_expect (bool(version.major == 1), 1) && substitutions.sanitize (c, this), __PRETTY_FUNCTION__, 4485) |
4483 | hb_barrier () &&return trace.ret (version.sanitize (c) && hb_barrier ( ) && __builtin_expect (bool(version.major == 1), 1) && substitutions.sanitize (c, this), __PRETTY_FUNCTION__, 4485) |
4484 | likely (version.major == 1) &&return trace.ret (version.sanitize (c) && hb_barrier ( ) && __builtin_expect (bool(version.major == 1), 1) && substitutions.sanitize (c, this), __PRETTY_FUNCTION__, 4485) |
4485 | substitutions.sanitize (c, this))return trace.ret (version.sanitize (c) && hb_barrier ( ) && __builtin_expect (bool(version.major == 1), 1) && substitutions.sanitize (c, this), __PRETTY_FUNCTION__, 4485); |
4486 | } |
4487 | |
4488 | protected: |
4489 | FixedVersion<> version; /* Version--0x00010000u */ |
4490 | Array16Of<FeatureTableSubstitutionRecord> |
4491 | substitutions; |
4492 | public: |
4493 | DEFINE_SIZE_ARRAY (6, substitutions)void _compiles_assertion_on_line_4493 () const { (void) (substitutions )[0].static_size; } void _instance_assertion_on_line_4493 () const { static_assert ((sizeof (*this) == (6) + (1 +0) * sizeof (( substitutions)[0])), ""); } static constexpr unsigned null_size = (6); static constexpr unsigned min_size = (6); |
4494 | }; |
4495 | |
4496 | struct FeatureVariationRecord |
4497 | { |
4498 | friend struct FeatureVariations; |
4499 | |
4500 | void collect_lookups (const void *base, |
4501 | const hb_set_t *feature_indexes, |
4502 | hb_set_t *lookup_indexes /* OUT */) const |
4503 | { |
4504 | return (base+substitutions).collect_lookups (feature_indexes, lookup_indexes); |
4505 | } |
4506 | |
4507 | void closure_features (const void *base, |
4508 | const hb_map_t *lookup_indexes, |
4509 | hb_set_t *feature_indexes /* OUT */) const |
4510 | { |
4511 | (base+substitutions).closure_features (lookup_indexes, feature_indexes); |
4512 | } |
4513 | |
4514 | bool intersects_features (const void *base, const hb_map_t *feature_index_map) const |
4515 | { |
4516 | return (base+substitutions).intersects_features (feature_index_map); |
4517 | } |
4518 | |
4519 | void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c, |
4520 | const void *base) const |
4521 | { |
4522 | (base+conditions).keep_with_variations (c); |
4523 | if (c->apply && !c->variation_applied) |
4524 | { |
4525 | (base+substitutions).collect_feature_substitutes_with_variations (c); |
4526 | c->variation_applied = true; // set variations only once |
4527 | } |
4528 | } |
4529 | |
4530 | bool subset (hb_subset_layout_context_t *c, const void *base, |
4531 | bool insert_catch_all = false) const |
4532 | { |
4533 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
4534 | auto *out = c->subset_context->serializer->embed (this); |
4535 | if (unlikely (!out)__builtin_expect (bool(!out), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4535); |
4536 | |
4537 | out->conditions.serialize_subset (c->subset_context, conditions, base, c, insert_catch_all); |
4538 | out->substitutions.serialize_subset (c->subset_context, substitutions, base, c, insert_catch_all); |
4539 | |
4540 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 4540); |
4541 | } |
4542 | |
4543 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
4544 | { |
4545 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
4546 | return_trace (conditions.sanitize (c, base) &&return trace.ret (conditions.sanitize (c, base) && substitutions .sanitize (c, base), __PRETTY_FUNCTION__, 4547) |
4547 | substitutions.sanitize (c, base))return trace.ret (conditions.sanitize (c, base) && substitutions .sanitize (c, base), __PRETTY_FUNCTION__, 4547); |
4548 | } |
4549 | |
4550 | protected: |
4551 | Offset32To<ConditionSet> |
4552 | conditions; |
4553 | Offset32To<FeatureTableSubstitution> |
4554 | substitutions; |
4555 | public: |
4556 | DEFINE_SIZE_STATIC (8)void _instance_assertion_on_line_4556 () const { static_assert ((sizeof (*this) == (8)), ""); } unsigned int get_size () const { return (8); } static constexpr unsigned null_size = (8); static constexpr unsigned min_size = (8); static constexpr unsigned static_size = (8); |
4557 | }; |
4558 | |
4559 | struct FeatureVariations |
4560 | { |
4561 | static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFFFFFu; |
4562 | |
4563 | bool find_index (const int *coords, unsigned int coord_len, |
4564 | unsigned int *index, |
4565 | ItemVarStoreInstancer *instancer) const |
4566 | { |
4567 | unsigned int count = varRecords.len; |
4568 | for (unsigned int i = 0; i < count; i++) |
4569 | { |
4570 | const FeatureVariationRecord &record = varRecords.arrayZ[i]; |
4571 | if ((this+record.conditions).evaluate (coords, coord_len, instancer)) |
4572 | { |
4573 | *index = i; |
4574 | return true; |
4575 | } |
4576 | } |
4577 | *index = NOT_FOUND_INDEX; |
4578 | return false; |
4579 | } |
4580 | |
4581 | const Feature *find_substitute (unsigned int variations_index, |
4582 | unsigned int feature_index) const |
4583 | { |
4584 | const FeatureVariationRecord &record = varRecords[variations_index]; |
4585 | return (this+record.substitutions).find_substitute (feature_index); |
4586 | } |
4587 | |
4588 | void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const |
4589 | { |
4590 | unsigned int count = varRecords.len; |
4591 | for (unsigned int i = 0; i < count; i++) |
4592 | { |
4593 | c->cur_record_idx = i; |
4594 | varRecords[i].collect_feature_substitutes_with_variations (c, this); |
4595 | if (c->universal) |
4596 | break; |
4597 | } |
4598 | if (c->universal || c->record_cond_idx_map->is_empty ()) |
4599 | c->catch_all_record_feature_idxes.reset (); |
4600 | } |
4601 | |
4602 | FeatureVariations* copy (hb_serialize_context_t *c) const |
4603 | { |
4604 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
4605 | return_trace (c->embed (*this))return trace.ret (c->embed (*this), __PRETTY_FUNCTION__, 4605 ); |
4606 | } |
4607 | |
4608 | void collect_lookups (const hb_set_t *feature_indexes, |
4609 | const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, |
4610 | hb_set_t *lookup_indexes /* OUT */) const |
4611 | { |
4612 | unsigned count = varRecords.len; |
4613 | for (unsigned int i = 0; i < count; i++) |
4614 | { |
4615 | if (feature_record_cond_idx_map && |
4616 | !feature_record_cond_idx_map->has (i)) |
4617 | continue; |
4618 | varRecords[i].collect_lookups (this, feature_indexes, lookup_indexes); |
4619 | } |
4620 | } |
4621 | |
4622 | void closure_features (const hb_map_t *lookup_indexes, |
4623 | const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, |
4624 | hb_set_t *feature_indexes /* OUT */) const |
4625 | { |
4626 | unsigned int count = varRecords.len; |
4627 | for (unsigned int i = 0; i < count; i++) |
4628 | { |
4629 | if (feature_record_cond_idx_map != nullptr && |
4630 | !feature_record_cond_idx_map->has (i)) |
4631 | continue; |
4632 | varRecords[i].closure_features (this, lookup_indexes, feature_indexes); |
4633 | } |
4634 | } |
4635 | |
4636 | bool subset (hb_subset_context_t *c, |
4637 | hb_subset_layout_context_t *l) const |
4638 | { |
4639 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
4640 | auto *out = c->serializer->start_embed (*this); |
4641 | if (unlikely (!out || !c->serializer->extend_min (out))__builtin_expect (bool(!out || !c->serializer->extend_min (out)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4641); |
4642 | |
4643 | out->version.major = version.major; |
4644 | out->version.minor = version.minor; |
4645 | |
4646 | int keep_up_to = -1; |
4647 | for (int i = varRecords.len - 1; i >= 0; i--) { |
4648 | if (varRecords[i].intersects_features (this, l->feature_index_map)) { |
4649 | keep_up_to = i; |
4650 | break; |
4651 | } |
4652 | } |
4653 | |
4654 | unsigned count = (unsigned) (keep_up_to + 1); |
4655 | for (unsigned i = 0; i < count; i++) |
4656 | { |
4657 | if (l->feature_record_cond_idx_map != nullptr && |
4658 | !l->feature_record_cond_idx_map->has (i)) |
4659 | continue; |
4660 | |
4661 | l->cur_feature_var_record_idx = i; |
4662 | subset_record_array (l, &(out->varRecords), this) (varRecords[i]); |
4663 | } |
4664 | |
4665 | if (out->varRecords.len && !l->catch_all_record_feature_idxes->is_empty ()) |
4666 | { |
4667 | bool insert_catch_all_record = true; |
4668 | subset_record_array (l, &(out->varRecords), this, insert_catch_all_record) (varRecords[0]); |
4669 | } |
4670 | |
4671 | return_trace (bool (out->varRecords))return trace.ret (bool (out->varRecords), __PRETTY_FUNCTION__ , 4671); |
4672 | } |
4673 | |
4674 | bool sanitize (hb_sanitize_context_t *c) const |
4675 | { |
4676 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
4677 | return_trace (version.sanitize (c) &&return trace.ret (version.sanitize (c) && hb_barrier ( ) && __builtin_expect (bool(version.major == 1), 1) && varRecords.sanitize (c, this), __PRETTY_FUNCTION__, 4680) |
4678 | hb_barrier () &&return trace.ret (version.sanitize (c) && hb_barrier ( ) && __builtin_expect (bool(version.major == 1), 1) && varRecords.sanitize (c, this), __PRETTY_FUNCTION__, 4680) |
4679 | likely (version.major == 1) &&return trace.ret (version.sanitize (c) && hb_barrier ( ) && __builtin_expect (bool(version.major == 1), 1) && varRecords.sanitize (c, this), __PRETTY_FUNCTION__, 4680) |
4680 | varRecords.sanitize (c, this))return trace.ret (version.sanitize (c) && hb_barrier ( ) && __builtin_expect (bool(version.major == 1), 1) && varRecords.sanitize (c, this), __PRETTY_FUNCTION__, 4680); |
4681 | } |
4682 | |
4683 | protected: |
4684 | FixedVersion<> version; /* Version--0x00010000u */ |
4685 | Array32Of<FeatureVariationRecord> |
4686 | varRecords; |
4687 | public: |
4688 | DEFINE_SIZE_ARRAY_SIZED (8, varRecords)unsigned int get_size () const { return (8 - (varRecords).min_size + (varRecords).get_size ()); } void _compiles_assertion_on_line_4688 () const { (void) (varRecords)[0].static_size; } void _instance_assertion_on_line_4688 () const { static_assert ((sizeof (*this) == (8) + (1 +0) * sizeof ((varRecords)[0])), ""); } static constexpr unsigned null_size = (8); static constexpr unsigned min_size = (8); |
4689 | }; |
4690 | |
4691 | |
4692 | /* |
4693 | * Device Tables |
4694 | */ |
4695 | |
4696 | struct HintingDevice |
4697 | { |
4698 | friend struct Device; |
4699 | |
4700 | private: |
4701 | |
4702 | hb_position_t get_x_delta (hb_font_t *font) const |
4703 | { return get_delta (font->x_ppem, font->x_scale); } |
4704 | |
4705 | hb_position_t get_y_delta (hb_font_t *font) const |
4706 | { return get_delta (font->y_ppem, font->y_scale); } |
4707 | |
4708 | public: |
4709 | |
4710 | unsigned int get_size () const |
4711 | { |
4712 | unsigned int f = deltaFormat; |
4713 | if (unlikely (f < 1 || f > 3 || startSize > endSize)__builtin_expect (bool(f < 1 || f > 3 || startSize > endSize), 0)) return 3 * HBUINT16::static_size; |
4714 | return HBUINT16::static_size * (4 + ((endSize - startSize) >> (4 - f))); |
4715 | } |
4716 | |
4717 | bool sanitize (hb_sanitize_context_t *c) const |
4718 | { |
4719 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
4720 | return_trace (c->check_struct (this) && c->check_range (this, this->get_size ()))return trace.ret (c->check_struct (this) && c-> check_range (this, this->get_size ()), __PRETTY_FUNCTION__ , 4720); |
4721 | } |
4722 | |
4723 | HintingDevice* copy (hb_serialize_context_t *c) const |
4724 | { |
4725 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
4726 | return_trace (c->embed<HintingDevice> (this))return trace.ret (c->embed<HintingDevice> (this), __PRETTY_FUNCTION__ , 4726); |
4727 | } |
4728 | |
4729 | private: |
4730 | |
4731 | int get_delta (unsigned int ppem, int scale) const |
4732 | { |
4733 | if (!ppem) return 0; |
4734 | |
4735 | int pixels = get_delta_pixels (ppem); |
4736 | |
4737 | if (!pixels) return 0; |
4738 | |
4739 | return (int) (pixels * (int64_t) scale / ppem); |
4740 | } |
4741 | int get_delta_pixels (unsigned int ppem_size) const |
4742 | { |
4743 | unsigned int f = deltaFormat; |
4744 | if (unlikely (f < 1 || f > 3)__builtin_expect (bool(f < 1 || f > 3), 0)) |
4745 | return 0; |
4746 | |
4747 | if (ppem_size < startSize || ppem_size > endSize) |
4748 | return 0; |
4749 | |
4750 | unsigned int s = ppem_size - startSize; |
4751 | |
4752 | unsigned int byte = deltaValueZ[s >> (4 - f)]; |
4753 | unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f))); |
4754 | unsigned int mask = (0xFFFFu >> (16 - (1 << f))); |
4755 | |
4756 | int delta = bits & mask; |
4757 | |
4758 | if ((unsigned int) delta >= ((mask + 1) >> 1)) |
4759 | delta -= mask + 1; |
4760 | |
4761 | return delta; |
4762 | } |
4763 | |
4764 | protected: |
4765 | HBUINT16 startSize; /* Smallest size to correct--in ppem */ |
4766 | HBUINT16 endSize; /* Largest size to correct--in ppem */ |
4767 | HBUINT16 deltaFormat; /* Format of DeltaValue array data: 1, 2, or 3 |
4768 | * 1 Signed 2-bit value, 8 values per uint16 |
4769 | * 2 Signed 4-bit value, 4 values per uint16 |
4770 | * 3 Signed 8-bit value, 2 values per uint16 |
4771 | */ |
4772 | UnsizedArrayOf<HBUINT16> |
4773 | deltaValueZ; /* Array of compressed data */ |
4774 | public: |
4775 | DEFINE_SIZE_ARRAY (6, deltaValueZ)void _compiles_assertion_on_line_4775 () const { (void) (deltaValueZ )[0].static_size; } void _instance_assertion_on_line_4775 () const { static_assert ((sizeof (*this) == (6) + (1 +0) * sizeof (( deltaValueZ)[0])), ""); } static constexpr unsigned null_size = (6); static constexpr unsigned min_size = (6); |
4776 | }; |
4777 | |
4778 | struct VariationDevice |
4779 | { |
4780 | friend struct Device; |
4781 | |
4782 | private: |
4783 | |
4784 | hb_position_t get_x_delta (hb_font_t *font, |
4785 | const ItemVariationStore &store, |
4786 | ItemVariationStore::cache_t *store_cache = nullptr) const |
4787 | { return !font->num_coords ? 0 : font->em_scalef_x (get_delta (font, store, store_cache)); } |
4788 | |
4789 | hb_position_t get_y_delta (hb_font_t *font, |
4790 | const ItemVariationStore &store, |
4791 | ItemVariationStore::cache_t *store_cache = nullptr) const |
4792 | { return !font->num_coords ? 0 : font->em_scalef_y (get_delta (font, store, store_cache)); } |
4793 | |
4794 | VariationDevice* copy (hb_serialize_context_t *c, |
4795 | const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const |
4796 | { |
4797 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
4798 | if (!layout_variation_idx_delta_map) return_trace (nullptr)return trace.ret (nullptr, __PRETTY_FUNCTION__, 4798); |
4799 | |
4800 | hb_pair_t<unsigned, int> *v; |
4801 | if (!layout_variation_idx_delta_map->has (varIdx, &v)) |
4802 | return_trace (nullptr)return trace.ret (nullptr, __PRETTY_FUNCTION__, 4802); |
4803 | |
4804 | c->start_zerocopy (this->static_size); |
4805 | auto *out = c->embed (this); |
4806 | if (unlikely (!out)__builtin_expect (bool(!out), 0)) return_trace (nullptr)return trace.ret (nullptr, __PRETTY_FUNCTION__, 4806); |
4807 | |
4808 | if (!c->check_assign (out->varIdx, hb_first (*v), HB_SERIALIZE_ERROR_INT_OVERFLOW)) |
4809 | return_trace (nullptr)return trace.ret (nullptr, __PRETTY_FUNCTION__, 4809); |
4810 | return_trace (out)return trace.ret (out, __PRETTY_FUNCTION__, 4810); |
4811 | } |
4812 | |
4813 | void collect_variation_index (hb_collect_variation_indices_context_t *c) const |
4814 | { c->layout_variation_indices->add (varIdx); } |
4815 | |
4816 | bool sanitize (hb_sanitize_context_t *c) const |
4817 | { |
4818 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
4819 | return_trace (c->check_struct (this))return trace.ret (c->check_struct (this), __PRETTY_FUNCTION__ , 4819); |
4820 | } |
4821 | |
4822 | private: |
4823 | |
4824 | float get_delta (hb_font_t *font, |
4825 | const ItemVariationStore &store, |
4826 | ItemVariationStore::cache_t *store_cache = nullptr) const |
4827 | { |
4828 | return store.get_delta (varIdx, font->coords, font->num_coords, (ItemVariationStore::cache_t *) store_cache); |
4829 | } |
4830 | |
4831 | protected: |
4832 | VarIdx varIdx; /* Variation index */ |
4833 | HBUINT16 deltaFormat; /* Format identifier for this table: 0x0x8000 */ |
4834 | public: |
4835 | DEFINE_SIZE_STATIC (6)void _instance_assertion_on_line_4835 () const { static_assert ((sizeof (*this) == (6)), ""); } unsigned int get_size () const { return (6); } static constexpr unsigned null_size = (6); static constexpr unsigned min_size = (6); static constexpr unsigned static_size = (6); |
4836 | }; |
4837 | |
4838 | struct DeviceHeader |
4839 | { |
4840 | protected: |
4841 | HBUINT16 reserved1; |
4842 | HBUINT16 reserved2; |
4843 | public: |
4844 | HBUINT16 format; /* Format identifier */ |
4845 | public: |
4846 | DEFINE_SIZE_STATIC (6)void _instance_assertion_on_line_4846 () const { static_assert ((sizeof (*this) == (6)), ""); } unsigned int get_size () const { return (6); } static constexpr unsigned null_size = (6); static constexpr unsigned min_size = (6); static constexpr unsigned static_size = (6); |
4847 | }; |
4848 | |
4849 | struct Device |
4850 | { |
4851 | hb_position_t get_x_delta (hb_font_t *font, |
4852 | const ItemVariationStore &store=Null (ItemVariationStore)NullHelper<ItemVariationStore>::get_null (), |
4853 | ItemVariationStore::cache_t *store_cache = nullptr) const |
4854 | { |
4855 | switch (u.b.format) |
4856 | { |
4857 | #ifndef HB_NO_HINTING |
4858 | case 1: case 2: case 3: |
4859 | return u.hinting.get_x_delta (font); |
4860 | #endif |
4861 | #ifndef HB_NO_VAR |
4862 | case 0x8000: |
4863 | return u.variation.get_x_delta (font, store, store_cache); |
4864 | #endif |
4865 | default: |
4866 | return 0; |
4867 | } |
4868 | } |
4869 | hb_position_t get_y_delta (hb_font_t *font, |
4870 | const ItemVariationStore &store=Null (ItemVariationStore)NullHelper<ItemVariationStore>::get_null (), |
4871 | ItemVariationStore::cache_t *store_cache = nullptr) const |
4872 | { |
4873 | switch (u.b.format) |
4874 | { |
4875 | case 1: case 2: case 3: |
4876 | #ifndef HB_NO_HINTING |
4877 | return u.hinting.get_y_delta (font); |
4878 | #endif |
4879 | #ifndef HB_NO_VAR |
4880 | case 0x8000: |
4881 | return u.variation.get_y_delta (font, store, store_cache); |
4882 | #endif |
4883 | default: |
4884 | return 0; |
4885 | } |
4886 | } |
4887 | |
4888 | bool sanitize (hb_sanitize_context_t *c) const |
4889 | { |
4890 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
4891 | if (!u.b.format.sanitize (c)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 4891); |
4892 | switch (u.b.format) { |
4893 | #ifndef HB_NO_HINTING |
4894 | case 1: case 2: case 3: |
4895 | return_trace (u.hinting.sanitize (c))return trace.ret (u.hinting.sanitize (c), __PRETTY_FUNCTION__ , 4895); |
4896 | #endif |
4897 | #ifndef HB_NO_VAR |
4898 | case 0x8000: |
4899 | return_trace (u.variation.sanitize (c))return trace.ret (u.variation.sanitize (c), __PRETTY_FUNCTION__ , 4899); |
4900 | #endif |
4901 | default: |
4902 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 4902); |
4903 | } |
4904 | } |
4905 | |
4906 | Device* copy (hb_serialize_context_t *c, |
4907 | const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map=nullptr) const |
4908 | { |
4909 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
4910 | switch (u.b.format) { |
4911 | #ifndef HB_NO_HINTING |
4912 | case 1: |
4913 | case 2: |
4914 | case 3: |
4915 | return_trace (reinterpret_cast<Device *> (u.hinting.copy (c)))return trace.ret (reinterpret_cast<Device *> (u.hinting .copy (c)), __PRETTY_FUNCTION__, 4915); |
4916 | #endif |
4917 | #ifndef HB_NO_VAR |
4918 | case 0x8000: |
4919 | return_trace (reinterpret_cast<Device *> (u.variation.copy (c, layout_variation_idx_delta_map)))return trace.ret (reinterpret_cast<Device *> (u.variation .copy (c, layout_variation_idx_delta_map)), __PRETTY_FUNCTION__ , 4919); |
4920 | #endif |
4921 | default: |
4922 | return_trace (nullptr)return trace.ret (nullptr, __PRETTY_FUNCTION__, 4922); |
4923 | } |
4924 | } |
4925 | |
4926 | void collect_variation_indices (hb_collect_variation_indices_context_t *c) const |
4927 | { |
4928 | switch (u.b.format) { |
4929 | #ifndef HB_NO_HINTING |
4930 | case 1: |
4931 | case 2: |
4932 | case 3: |
4933 | return; |
4934 | #endif |
4935 | #ifndef HB_NO_VAR |
4936 | case 0x8000: |
4937 | u.variation.collect_variation_index (c); |
4938 | return; |
4939 | #endif |
4940 | default: |
4941 | return; |
4942 | } |
4943 | } |
4944 | |
4945 | unsigned get_variation_index () const |
4946 | { |
4947 | switch (u.b.format) { |
4948 | #ifndef HB_NO_VAR |
4949 | case 0x8000: |
4950 | return u.variation.varIdx; |
4951 | #endif |
4952 | default: |
4953 | return HB_OT_LAYOUT_NO_VARIATIONS_INDEX0xFFFFFFFFu; |
4954 | } |
4955 | } |
4956 | |
4957 | protected: |
4958 | union { |
4959 | DeviceHeader b; |
4960 | HintingDevice hinting; |
4961 | #ifndef HB_NO_VAR |
4962 | VariationDevice variation; |
4963 | #endif |
4964 | } u; |
4965 | public: |
4966 | DEFINE_SIZE_UNION (6, b)void _compiles_assertion_on_line_4966 () const { (void) this-> u.b.static_size; } void _instance_assertion_on_line_4966 () const { static_assert ((sizeof(this->u.b) == (6)), ""); } static constexpr unsigned null_size = (6); static constexpr unsigned min_size = (6); |
4967 | }; |
4968 | |
4969 | |
4970 | } /* namespace OT */ |
4971 | |
4972 | |
4973 | #endif /* HB_OT_LAYOUT_COMMON_HH */ |