File: | root/firefox-clang/gfx/harfbuzz/src/OT/Layout/GSUB/ReverseChainSingleSubstFormat1.hh |
Warning: | line 102, column 5 Value stored to 'count' is never read |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | #ifndef OT_LAYOUT_GSUB_REVERSECHAINSINGLESUBSTFORMAT1_HH |
2 | #define OT_LAYOUT_GSUB_REVERSECHAINSINGLESUBSTFORMAT1_HH |
3 | |
4 | #include "Common.hh" |
5 | |
6 | namespace OT { |
7 | namespace Layout { |
8 | namespace GSUB_impl { |
9 | |
10 | struct ReverseChainSingleSubstFormat1 |
11 | { |
12 | protected: |
13 | HBUINT16 format; /* Format identifier--format = 1 */ |
14 | Offset16To<Coverage> |
15 | coverage; /* Offset to Coverage table--from |
16 | * beginning of table */ |
17 | Array16OfOffset16To<Coverage> |
18 | backtrack; /* Array of coverage tables |
19 | * in backtracking sequence, in glyph |
20 | * sequence order */ |
21 | Array16OfOffset16To<Coverage> |
22 | lookaheadX; /* Array of coverage tables |
23 | * in lookahead sequence, in glyph |
24 | * sequence order */ |
25 | Array16Of<HBGlyphID16> |
26 | substituteX; /* Array of substitute |
27 | * GlyphIDs--ordered by Coverage Index */ |
28 | public: |
29 | DEFINE_SIZE_MIN (10)void _instance_assertion_on_line_29 () const { static_assert ( (sizeof (*this) >= (10)), ""); } static constexpr unsigned null_size = (10); static constexpr unsigned min_size = (10); |
30 | |
31 | bool sanitize (hb_sanitize_context_t *c) const |
32 | { |
33 | TRACE_SANITIZE (this)hb_no_trace_t<bool> trace; |
34 | if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this))) |
35 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 35); |
36 | hb_barrier (); |
37 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack); |
38 | if (!lookahead.sanitize (c, this)) |
39 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 39); |
40 | hb_barrier (); |
41 | const auto &substitute = StructAfter<decltype (substituteX)> (lookahead); |
42 | return_trace (substitute.sanitize (c))return trace.ret (substitute.sanitize (c), __PRETTY_FUNCTION__ , 42); |
43 | } |
44 | |
45 | bool intersects (const hb_set_t *glyphs) const |
46 | { |
47 | if (!(this+coverage).intersects (glyphs)) |
48 | return false; |
49 | |
50 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack); |
51 | |
52 | unsigned int count; |
53 | |
54 | count = backtrack.len; |
55 | for (unsigned int i = 0; i < count; i++) |
56 | if (!(this+backtrack[i]).intersects (glyphs)) |
57 | return false; |
58 | |
59 | count = lookahead.len; |
60 | for (unsigned int i = 0; i < count; i++) |
61 | if (!(this+lookahead[i]).intersects (glyphs)) |
62 | return false; |
63 | |
64 | return true; |
65 | } |
66 | |
67 | bool may_have_non_1to1 () const |
68 | { return false; } |
69 | |
70 | void closure (hb_closure_context_t *c) const |
71 | { |
72 | if (!intersects (c->glyphs)) return; |
73 | |
74 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack); |
75 | const auto &substitute = StructAfter<decltype (substituteX)> (lookahead); |
76 | |
77 | + hb_zip (this+coverage, substitute) |
78 | | hb_filter (c->parent_active_glyphs (), hb_first) |
79 | | hb_map (hb_second) |
80 | | hb_sink (c->output) |
81 | ; |
82 | } |
83 | |
84 | void closure_lookups (hb_closure_lookups_context_t *c) const {} |
85 | |
86 | void collect_glyphs (hb_collect_glyphs_context_t *c) const |
87 | { |
88 | if (unlikely (!(this+coverage).collect_coverage (c->input))__builtin_expect (bool(!(this+coverage).collect_coverage (c-> input)), 0)) return; |
89 | |
90 | unsigned int count; |
91 | |
92 | count = backtrack.len; |
93 | for (unsigned int i = 0; i < count; i++) |
94 | if (unlikely (!(this+backtrack[i]).collect_coverage (c->before))__builtin_expect (bool(!(this+backtrack[i]).collect_coverage ( c->before)), 0)) return; |
95 | |
96 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack); |
97 | count = lookahead.len; |
98 | for (unsigned int i = 0; i < count; i++) |
99 | if (unlikely (!(this+lookahead[i]).collect_coverage (c->after))__builtin_expect (bool(!(this+lookahead[i]).collect_coverage ( c->after)), 0)) return; |
100 | |
101 | const auto &substitute = StructAfter<decltype (substituteX)> (lookahead); |
102 | count = substitute.len; |
Value stored to 'count' is never read | |
103 | c->output->add_array (substitute.arrayZ, substitute.len); |
104 | } |
105 | |
106 | const Coverage &get_coverage () const { return this+coverage; } |
107 | |
108 | bool would_apply (hb_would_apply_context_t *c) const |
109 | { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED((unsigned int) -1); } |
110 | |
111 | bool apply (hb_ot_apply_context_t *c) const |
112 | { |
113 | TRACE_APPLY (this)hb_no_trace_t<bool> trace; |
114 | unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint); |
115 | if (index == NOT_COVERED((unsigned int) -1)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 115); |
116 | |
117 | if (unlikely (c->nesting_level_left != HB_MAX_NESTING_LEVEL)__builtin_expect (bool(c->nesting_level_left != 64), 0)) |
118 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 118); /* No chaining to this type */ |
119 | |
120 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack); |
121 | const auto &substitute = StructAfter<decltype (substituteX)> (lookahead); |
122 | |
123 | if (unlikely (index >= substitute.len)__builtin_expect (bool(index >= substitute.len), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 123); |
124 | |
125 | unsigned int start_index = 0, end_index = 0; |
126 | if (match_backtrack (c, |
127 | backtrack.len, (HBUINT16 *) backtrack.arrayZ, |
128 | match_coverage, this, |
129 | &start_index) && |
130 | match_lookahead (c, |
131 | lookahead.len, (HBUINT16 *) lookahead.arrayZ, |
132 | match_coverage, this, |
133 | c->buffer->idx + 1, &end_index)) |
134 | { |
135 | c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index); |
136 | |
137 | if (HB_BUFFER_MESSAGE_MORE(0 +1) && c->buffer->messaging ()) |
138 | { |
139 | c->buffer->message (c->font, |
140 | "replacing glyph at %u (reverse chaining substitution)", |
141 | c->buffer->idx); |
142 | } |
143 | |
144 | c->replace_glyph_inplace (substitute[index]); |
145 | |
146 | if (HB_BUFFER_MESSAGE_MORE(0 +1) && c->buffer->messaging ()) |
147 | { |
148 | c->buffer->message (c->font, |
149 | "replaced glyph at %u (reverse chaining substitution)", |
150 | c->buffer->idx); |
151 | } |
152 | |
153 | /* Note: We DON'T decrease buffer->idx. The main loop does it |
154 | * for us. This is useful for preventing surprises if someone |
155 | * calls us through a Context lookup. */ |
156 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 156); |
157 | } |
158 | else |
159 | { |
160 | c->buffer->unsafe_to_concat_from_outbuffer (start_index, end_index); |
161 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 161); |
162 | } |
163 | } |
164 | |
165 | template<typename Iterator, |
166 | hb_requires (hb_is_iterator (Iterator))typename hb_enable_if<((hb_is_iterator_of<Iterator, typename Iterator::item_t>::value))>::type* = nullptr> |
167 | bool serialize_coverage_offset_array (hb_subset_context_t *c, Iterator it) const |
168 | { |
169 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
170 | auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> (); |
171 | |
172 | if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size))__builtin_expect (bool(!c->serializer->allocate_size< HBUINT16> (HBUINT16::static_size)), 0)) |
173 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 173); |
174 | |
175 | for (auto& offset : it) { |
176 | auto *o = out->serialize_append (c->serializer); |
177 | if (unlikely (!o)__builtin_expect (bool(!o), 0) || !o->serialize_subset (c, offset, this)) |
178 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 178); |
179 | } |
180 | |
181 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 181); |
182 | } |
183 | |
184 | template<typename Iterator, typename BacktrackIterator, typename LookaheadIterator, |
185 | hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_pair_t))typename hb_enable_if<(((hb_is_source_of<Iterator, hb_codepoint_pair_t >::value && Iterator::is_sorted_iterator)))>::type * = nullptr, |
186 | hb_requires (hb_is_iterator (BacktrackIterator))typename hb_enable_if<((hb_is_iterator_of<BacktrackIterator , typename BacktrackIterator::item_t>::value))>::type* = nullptr, |
187 | hb_requires (hb_is_iterator (LookaheadIterator))typename hb_enable_if<((hb_is_iterator_of<LookaheadIterator , typename LookaheadIterator::item_t>::value))>::type* = nullptr> |
188 | bool serialize (hb_subset_context_t *c, |
189 | Iterator coverage_subst_iter, |
190 | BacktrackIterator backtrack_iter, |
191 | LookaheadIterator lookahead_iter) const |
192 | { |
193 | TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace; |
194 | |
195 | auto *out = c->serializer->start_embed (this); |
196 | if (unlikely (!c->serializer->embed (this->format))__builtin_expect (bool(!c->serializer->embed (this-> format)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 196); |
197 | if (unlikely (!c->serializer->embed (this->coverage))__builtin_expect (bool(!c->serializer->embed (this-> coverage)), 0)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 197); |
198 | |
199 | if (!serialize_coverage_offset_array (c, backtrack_iter)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 199); |
200 | if (!serialize_coverage_offset_array (c, lookahead_iter)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 200); |
201 | |
202 | auto *substitute_out = c->serializer->start_embed<Array16Of<HBGlyphID16>> (); |
203 | auto substitutes = |
204 | + coverage_subst_iter |
205 | | hb_map (hb_second) |
206 | ; |
207 | |
208 | auto glyphs = |
209 | + coverage_subst_iter |
210 | | hb_map_retains_sorting (hb_first) |
211 | ; |
212 | if (unlikely (! c->serializer->check_success (substitute_out->serialize (c->serializer, substitutes)))__builtin_expect (bool(! c->serializer->check_success ( substitute_out->serialize (c->serializer, substitutes)) ), 0)) |
213 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 213); |
214 | |
215 | if (unlikely (!out->coverage.serialize_serialize (c->serializer, glyphs))__builtin_expect (bool(!out->coverage.serialize_serialize ( c->serializer, glyphs)), 0)) |
216 | return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 216); |
217 | return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 217); |
218 | } |
219 | |
220 | bool subset (hb_subset_context_t *c) const |
221 | { |
222 | TRACE_SUBSET (this)hb_no_trace_t<bool> trace; |
223 | const hb_set_t &glyphset = *c->plan->glyphset_gsub (); |
224 | const hb_map_t &glyph_map = *c->plan->glyph_map; |
225 | |
226 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack); |
227 | const auto &substitute = StructAfter<decltype (substituteX)> (lookahead); |
228 | |
229 | auto it = |
230 | + hb_zip (this+coverage, substitute) |
231 | | hb_filter (glyphset, hb_first) |
232 | | hb_filter (glyphset, hb_second) |
233 | | hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const HBGlyphID16 &> p) -> hb_codepoint_pair_t |
234 | { return hb_pair (glyph_map[p.first], glyph_map[p.second]); }) |
235 | ; |
236 | |
237 | return_trace (bool (it) && serialize (c, it, backtrack.iter (), lookahead.iter ()))return trace.ret (bool (it) && serialize (c, it, backtrack .iter (), lookahead.iter ()), __PRETTY_FUNCTION__, 237); |
238 | } |
239 | }; |
240 | |
241 | } |
242 | } |
243 | } |
244 | |
245 | #endif /* HB_OT_LAYOUT_GSUB_REVERSECHAINSINGLESUBSTFORMAT1_HH */ |