XROAD
flatbuffers_common_reader.h
1 #ifndef FLATBUFFERS_COMMON_READER_H
2 #define FLATBUFFERS_COMMON_READER_H
3 
4 /* Generated by flatcc 0.5.3-pre FlatBuffers schema compiler for C by dvide.com */
5 
6 /* Common FlatBuffers read functionality for C. */
7 
8 #include "flatcc/flatcc_prologue.h"
9 #include "flatcc/flatcc_flatbuffers.h"
10 
11 
12 #define __flatbuffers_read_scalar_at_byteoffset(N, p, o) N ## _read_from_pe((uint8_t *)(p) + (o))
13 #define __flatbuffers_read_scalar(N, p) N ## _read_from_pe(p)
14 #define __flatbuffers_read_vt(ID, offset, t)\
15 flatbuffers_voffset_t offset = 0;\
16 { flatbuffers_voffset_t id__tmp, *vt__tmp;\
17  assert(t != 0 && "null pointer table access");\
18  id__tmp = ID;\
19  vt__tmp = (flatbuffers_voffset_t *)((uint8_t *)(t) -\
20  __flatbuffers_soffset_read_from_pe(t));\
21  if (__flatbuffers_voffset_read_from_pe(vt__tmp) >= sizeof(vt__tmp[0]) * (id__tmp + 3)) {\
22  offset = __flatbuffers_voffset_read_from_pe(vt__tmp + id__tmp + 2);\
23  }\
24 }
25 #define __flatbuffers_field_present(ID, t) { __flatbuffers_read_vt(ID, offset__tmp, t) return offset__tmp != 0; }
26 #define __flatbuffers_scalar_field(T, ID, t)\
27 {\
28  __flatbuffers_read_vt(ID, offset__tmp, t)\
29  if (offset__tmp) {\
30  return (const T *)((uint8_t *)(t) + offset__tmp);\
31  }\
32  return 0;\
33 }
34 #define __flatbuffers_define_scalar_field(ID, N, NK, TK, T, V)\
35 static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
36 { __flatbuffers_read_vt(ID, offset__tmp, t__tmp)\
37  return offset__tmp ? __flatbuffers_read_scalar_at_byteoffset(TK, t__tmp, offset__tmp) : V;\
38 }\
39 static inline const T *N ## _ ## NK ## _get_ptr(N ## _table_t t__tmp)\
40 __flatbuffers_scalar_field(T, ID, t__tmp)\
41 static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
42 __flatbuffers_field_present(ID, t__tmp)\
43 __flatbuffers_define_scan_by_scalar_field(N, NK, T)
44 #define __flatbuffers_struct_field(T, ID, t, r)\
45 {\
46  __flatbuffers_read_vt(ID, offset__tmp, t)\
47  if (offset__tmp) {\
48  return (T)((uint8_t *)(t) + offset__tmp);\
49  }\
50  assert(!(r) && "required field missing");\
51  return 0;\
52 }
53 #define __flatbuffers_offset_field(T, ID, t, r, adjust)\
54 {\
55  flatbuffers_uoffset_t *elem__tmp;\
56  __flatbuffers_read_vt(ID, offset__tmp, t)\
57  if (offset__tmp) {\
58  elem__tmp = (flatbuffers_uoffset_t *)((uint8_t *)(t) + offset__tmp);\
59  /* Add sizeof so C api can have raw access past header field. */\
60  return (T)((uint8_t *)(elem__tmp) + adjust +\
61  __flatbuffers_uoffset_read_from_pe(elem__tmp));\
62  }\
63  assert(!(r) && "required field missing");\
64  return 0;\
65 }
66 #define __flatbuffers_vector_field(T, ID, t, r) __flatbuffers_offset_field(T, ID, t, r, sizeof(flatbuffers_uoffset_t))
67 #define __flatbuffers_table_field(T, ID, t, r) __flatbuffers_offset_field(T, ID, t, r, 0)
68 #define __flatbuffers_define_struct_field(ID, N, NK, T, r)\
69 static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
70 __flatbuffers_struct_field(T, ID, t__tmp, r)\
71 static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
72 __flatbuffers_field_present(ID, t__tmp)
73 #define __flatbuffers_define_vector_field(ID, N, NK, T, r)\
74 static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
75 __flatbuffers_vector_field(T, ID, t__tmp, r)\
76 static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
77 __flatbuffers_field_present(ID, t__tmp)
78 #define __flatbuffers_define_table_field(ID, N, NK, T, r)\
79 static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
80 __flatbuffers_table_field(T, ID, t__tmp, r)\
81 static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
82 __flatbuffers_field_present(ID, t__tmp)
83 #define __flatbuffers_define_string_field(ID, N, NK, r)\
84 static inline flatbuffers_string_t N ## _ ## NK ## _get(N ## _table_t t__tmp)\
85 __flatbuffers_vector_field(flatbuffers_string_t, ID, t__tmp, r)\
86 static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
87 __flatbuffers_field_present(ID, t__tmp)\
88 __flatbuffers_define_scan_by_string_field(N, NK)
89 #define __flatbuffers_vec_len(vec)\
90 { return (vec) ? (size_t)__flatbuffers_uoffset_read_from_pe((flatbuffers_uoffset_t *)vec - 1) : 0; }
91 #define __flatbuffers_string_len(s) __flatbuffers_vec_len(s)
92 static inline size_t flatbuffers_vec_len(const void *vec)
93 __flatbuffers_vec_len(vec)
94 #define __flatbuffers_scalar_vec_at(N, vec, i)\
95 { assert(flatbuffers_vec_len(vec) > (i) && "index out of range");\
96  return __flatbuffers_read_scalar(N, &(vec)[i]); }
97 #define __flatbuffers_struct_vec_at(vec, i)\
98 { assert(flatbuffers_vec_len(vec) > (i) && "index out of range"); return (vec) + (i); }
99 /* `adjust` skips past the header for string vectors. */
100 #define __flatbuffers_offset_vec_at(T, vec, i, adjust)\
101 { const flatbuffers_uoffset_t *elem__tmp = (vec) + (i);\
102  assert(flatbuffers_vec_len(vec) > (i) && "index out of range");\
103  return (T)((uint8_t *)(elem__tmp) + (size_t)__flatbuffers_uoffset_read_from_pe(elem__tmp) + (adjust)); }
104 #define __flatbuffers_define_scalar_vec_len(N)\
105 static inline size_t N ## _vec_len(N ##_vec_t vec__tmp)\
106 { return flatbuffers_vec_len(vec__tmp); }
107 #define __flatbuffers_define_scalar_vec_at(N, T) \
108 static inline T N ## _vec_at(N ## _vec_t vec__tmp, size_t i__tmp)\
109 __flatbuffers_scalar_vec_at(N, vec__tmp, i__tmp)
110 typedef const char *flatbuffers_string_t;
111 static inline size_t flatbuffers_string_len(flatbuffers_string_t s)
112 __flatbuffers_string_len(s)
113 typedef const flatbuffers_uoffset_t *flatbuffers_string_vec_t;
114 typedef flatbuffers_uoffset_t *flatbuffers_string_mutable_vec_t;
115 static inline size_t flatbuffers_string_vec_len(flatbuffers_string_vec_t vec)
116 __flatbuffers_vec_len(vec)
117 static inline flatbuffers_string_t flatbuffers_string_vec_at(flatbuffers_string_vec_t vec, size_t i)
118 __flatbuffers_offset_vec_at(flatbuffers_string_t, vec, i, sizeof(vec[0]))
119 typedef const void *flatbuffers_generic_t;
120 static inline flatbuffers_string_t flatbuffers_string_cast_from_generic(const flatbuffers_generic_t p)
121 { return p ? ((const char *)p) + __flatbuffers_uoffset__size() : 0; }
122 typedef const flatbuffers_uoffset_t *flatbuffers_generic_vec_t;
123 typedef flatbuffers_uoffset_t *flatbuffers_generic_table_mutable_vec_t;
124 static inline size_t flatbuffers_generic_vec_len(flatbuffers_generic_vec_t vec)
125 __flatbuffers_vec_len(vec)
126 static inline flatbuffers_generic_t flatbuffers_generic_vec_at(flatbuffers_generic_vec_t vec, size_t i)
127 __flatbuffers_offset_vec_at(flatbuffers_generic_t, vec, i, 0)
128 static inline flatbuffers_generic_t flatbuffers_generic_vec_at_as_string(flatbuffers_generic_vec_t vec, size_t i)
129 __flatbuffers_offset_vec_at(flatbuffers_generic_t, vec, i, sizeof(vec[0]))
130 typedef struct flatbuffers_union {
131  flatbuffers_union_type_t type;
132  flatbuffers_generic_t value;
133 } flatbuffers_union_t;
134 typedef struct flatbuffers_union_vec {
135  const flatbuffers_union_type_t *type;
136  const flatbuffers_uoffset_t *value;
138 #define __flatbuffers_union_type_field(ID, t)\
139 {\
140  __flatbuffers_read_vt(ID, offset__tmp, t)\
141  return offset__tmp ? __flatbuffers_read_scalar_at_byteoffset(__flatbuffers_utype, t, offset__tmp) : 0;\
142 }
143 static inline flatbuffers_string_t flatbuffers_string_cast_from_union(const flatbuffers_union_t u__tmp)\
144 { return flatbuffers_string_cast_from_generic(u__tmp.value); }
145 #define __flatbuffers_define_union_field(NS, ID, N, NK, T, r)\
146 static inline T ## _union_type_t N ## _ ## NK ## _type_get(N ## _table_t t__tmp)\
147 __## NS ## union_type_field(((ID) - 1), t__tmp)\
148 static inline NS ## generic_t N ## _ ## NK ## _get(N ## _table_t t__tmp)\
149 __## NS ## table_field(NS ## generic_t, ID, t__tmp, r)\
150 static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
151 __## NS ## field_present(ID, t__tmp)\
152 static inline T ## _union_t N ## _ ## NK ## _union(N ## _table_t t__tmp)\
153 { T ## _union_t u__tmp = { 0, 0 }; u__tmp.type = N ## _ ## NK ## _type_get(t__tmp);\
154  if (u__tmp.type == 0) { return u__tmp; } u__tmp.value = N ## _ ## NK ## _get(t__tmp); return u__tmp; }\
155 static inline NS ## string_t N ## _ ## NK ## _as_string(N ## _table_t t__tmp)\
156 { return NS ## string_cast_from_generic(N ## _ ## NK ## _get(t__tmp)); }\
157 
158 #define __flatbuffers_define_union_vector_ops(NS, T)\
159 static inline size_t T ## _union_vec_len(T ## _union_vec_t uv__tmp)\
160 { return NS ## vec_len(uv__tmp.type); }\
161 static inline T ## _union_t T ## _union_vec_at(T ## _union_vec_t uv__tmp, size_t i__tmp)\
162 { T ## _union_t u__tmp = { 0, 0 }; size_t n__tmp = NS ## vec_len(uv__tmp.type);\
163  assert(n__tmp > (i__tmp) && "index out of range"); u__tmp.type = uv__tmp.type[i__tmp];\
164  /* Unknown type is treated as NONE for schema evolution. */\
165  if (u__tmp.type == 0) return u__tmp;\
166  u__tmp.value = NS ## generic_vec_at(uv__tmp.value, i__tmp); return u__tmp; }\
167 static inline NS ## string_t T ## _union_vec_at_as_string(T ## _union_vec_t uv__tmp, size_t i__tmp)\
168 { return (NS ## string_t) NS ## generic_vec_at_as_string(uv__tmp.value, i__tmp); }\
169 
170 #define __flatbuffers_define_union_vector(NS, T)\
171 typedef NS ## union_vec_t T ## _union_vec_t;\
172 __## NS ## define_union_vector_ops(NS, T)
173 #define __flatbuffers_define_union(NS, T)\
174 typedef NS ## union_t T ## _union_t;\
175 __## NS ## define_union_vector(NS, T)
176 #define __flatbuffers_define_union_vector_field(NS, ID, N, NK, T, r)\
177 __## NS ## define_vector_field(ID - 1, N, NK ## _type, T ## _vec_t, r)\
178 __## NS ## define_vector_field(ID, N, NK, flatbuffers_generic_vec_t, r)\
179 static inline T ## _union_vec_t N ## _ ## NK ## _union(N ## _table_t t__tmp)\
180 { T ## _union_vec_t uv__tmp; uv__tmp.type = N ## _ ## NK ## _type_get(t__tmp);\
181  uv__tmp.value = N ## _ ## NK(t__tmp);\
182  assert(NS ## vec_len(uv__tmp.type) == NS ## vec_len(uv__tmp.value)\
183  && "union vector type length mismatch"); return uv__tmp; }
184 #include <string.h>
185 static size_t flatbuffers_not_found = (size_t)-1;
186 static size_t flatbuffers_end = (size_t)-1;
187 #define __flatbuffers_identity(n) (n)
188 #define __flatbuffers_min(a, b) ((a) < (b) ? (a) : (b))
189 /* Subtraction doesn't work for unsigned types. */
190 #define __flatbuffers_scalar_cmp(x, y, n) ((x) < (y) ? -1 : (x) > (y))
191 static inline int __flatbuffers_string_n_cmp(flatbuffers_string_t v, const char *s, size_t n)
192 { size_t nv = flatbuffers_string_len(v); int x = strncmp(v, s, nv < n ? nv : n);
193  return x != 0 ? x : nv < n ? -1 : nv > n; }
194 /* `n` arg unused, but needed by string find macro expansion. */
195 static inline int __flatbuffers_string_cmp(flatbuffers_string_t v, const char *s, size_t n) { (void)n; return strcmp(v, s); }
196 /* A = identity if searching scalar vectors rather than key fields. */
197 /* Returns lowest matching index or not_found. */
198 #define __flatbuffers_find_by_field(A, V, E, L, K, Kn, T, D)\
199 { T v__tmp; size_t a__tmp = 0, b__tmp, m__tmp; if (!(b__tmp = L(V))) { return flatbuffers_not_found; }\
200  --b__tmp;\
201  while (a__tmp < b__tmp) {\
202  m__tmp = a__tmp + ((b__tmp - a__tmp) >> 1);\
203  v__tmp = A(E(V, m__tmp));\
204  if ((D(v__tmp, (K), (Kn))) < 0) {\
205  a__tmp = m__tmp + 1;\
206  } else {\
207  b__tmp = m__tmp;\
208  }\
209  }\
210  if (a__tmp == b__tmp) {\
211  v__tmp = A(E(V, a__tmp));\
212  if (D(v__tmp, (K), (Kn)) == 0) {\
213  return a__tmp;\
214  }\
215  }\
216  return flatbuffers_not_found;\
217 }
218 #define __flatbuffers_find_by_scalar_field(A, V, E, L, K, T)\
219 __flatbuffers_find_by_field(A, V, E, L, K, 0, T, __flatbuffers_scalar_cmp)
220 #define __flatbuffers_find_by_string_field(A, V, E, L, K)\
221 __flatbuffers_find_by_field(A, V, E, L, K, 0, flatbuffers_string_t, __flatbuffers_string_cmp)
222 #define __flatbuffers_find_by_string_n_field(A, V, E, L, K, Kn)\
223 __flatbuffers_find_by_field(A, V, E, L, K, Kn, flatbuffers_string_t, __flatbuffers_string_n_cmp)
224 #define __flatbuffers_define_find_by_scalar_field(N, NK, TK)\
225 static inline size_t N ## _vec_find_by_ ## NK(N ## _vec_t vec__tmp, TK key__tmp)\
226 __flatbuffers_find_by_scalar_field(N ## _ ## NK, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, TK)
227 #define __flatbuffers_define_scalar_find(N, T)\
228 static inline size_t N ## _vec_find(N ## _vec_t vec__tmp, T key__tmp)\
229 __flatbuffers_find_by_scalar_field(__flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)
230 #define __flatbuffers_define_find_by_string_field(N, NK) \
231 /* Note: find only works on vectors sorted by this field. */\
232 static inline size_t N ## _vec_find_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp)\
233 __flatbuffers_find_by_string_field(N ## _ ## NK, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
234 static inline size_t N ## _vec_find_n_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp, int n__tmp)\
235 __flatbuffers_find_by_string_n_field(N ## _ ## NK, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)
236 #define __flatbuffers_define_default_find_by_scalar_field(N, NK, TK)\
237 static inline size_t N ## _vec_find(N ## _vec_t vec__tmp, TK key__tmp)\
238 { return N ## _vec_find_by_ ## NK(vec__tmp, key__tmp); }
239 #define __flatbuffers_define_default_find_by_string_field(N, NK) \
240 static inline size_t N ## _vec_find(N ## _vec_t vec__tmp, const char *s__tmp)\
241 { return N ## _vec_find_by_ ## NK(vec__tmp, s__tmp); }\
242 static inline size_t N ## _vec_find_n(N ## _vec_t vec__tmp, const char *s__tmp, int n__tmp)\
243 { return N ## _vec_find_n_by_ ## NK(vec__tmp, s__tmp, n__tmp); }
244 /* A = identity if searching scalar vectors rather than key fields. */
245 /* Returns lowest matching index or not_found. */
246 #define __flatbuffers_scan_by_field(b, e, A, V, E, L, K, Kn, T, D)\
247 { T v__tmp; size_t i__tmp;\
248  for (i__tmp = b; i__tmp < e; ++i__tmp) {\
249  v__tmp = A(E(V, i__tmp));\
250  if (D(v__tmp, (K), (Kn)) == 0) {\
251  return i__tmp;\
252  }\
253  }\
254  return flatbuffers_not_found;\
255 }
256 #define __flatbuffers_rscan_by_field(b, e, A, V, E, L, K, Kn, T, D)\
257 { T v__tmp; size_t i__tmp = e;\
258  while (i__tmp-- > b) {\
259  v__tmp = A(E(V, i__tmp));\
260  if (D(v__tmp, (K), (Kn)) == 0) {\
261  return i__tmp;\
262  }\
263  }\
264  return flatbuffers_not_found;\
265 }
266 #define __flatbuffers_scan_by_scalar_field(b, e, A, V, E, L, K, T)\
267 __flatbuffers_scan_by_field(b, e, A, V, E, L, K, 0, T, __flatbuffers_scalar_cmp)
268 #define __flatbuffers_scan_by_string_field(b, e, A, V, E, L, K)\
269 __flatbuffers_scan_by_field(b, e, A, V, E, L, K, 0, flatbuffers_string_t, __flatbuffers_string_cmp)
270 #define __flatbuffers_scan_by_string_n_field(b, e, A, V, E, L, K, Kn)\
271 __flatbuffers_scan_by_field(b, e, A, V, E, L, K, Kn, flatbuffers_string_t, __flatbuffers_string_n_cmp)
272 #define __flatbuffers_rscan_by_scalar_field(b, e, A, V, E, L, K, T)\
273 __flatbuffers_rscan_by_field(b, e, A, V, E, L, K, 0, T, __flatbuffers_scalar_cmp)
274 #define __flatbuffers_rscan_by_string_field(b, e, A, V, E, L, K)\
275 __flatbuffers_rscan_by_field(b, e, A, V, E, L, K, 0, flatbuffers_string_t, __flatbuffers_string_cmp)
276 #define __flatbuffers_rscan_by_string_n_field(b, e, A, V, E, L, K, Kn)\
277 __flatbuffers_rscan_by_field(b, e, A, V, E, L, K, Kn, flatbuffers_string_t, __flatbuffers_string_n_cmp)
278 #define __flatbuffers_define_scan_by_scalar_field(N, NK, T)\
279 static inline size_t N ## _vec_scan_by_ ## NK(N ## _vec_t vec__tmp, T key__tmp)\
280 __flatbuffers_scan_by_scalar_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
281 static inline size_t N ## _vec_scan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
282 __flatbuffers_scan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
283 static inline size_t N ## _vec_rscan_by_ ## NK(N ## _vec_t vec__tmp, T key__tmp)\
284 __flatbuffers_rscan_by_scalar_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
285 static inline size_t N ## _vec_rscan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
286 __flatbuffers_rscan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)
287 #define __flatbuffers_define_scalar_scan(N, T)\
288 static inline size_t N ## _vec_scan(N ## _vec_t vec__tmp, T key__tmp)\
289 __flatbuffers_scan_by_scalar_field(0, N ## _vec_len(vec__tmp), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
290 static inline size_t N ## _vec_scan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
291 __flatbuffers_scan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
292 static inline size_t N ## _vec_rscan(N ## _vec_t vec__tmp, T key__tmp)\
293 __flatbuffers_rscan_by_scalar_field(0, N ## _vec_len(vec__tmp), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
294 static inline size_t N ## _vec_rscan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
295 __flatbuffers_rscan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)
296 #define __flatbuffers_define_scan_by_string_field(N, NK) \
297 static inline size_t N ## _vec_scan_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp)\
298 __flatbuffers_scan_by_string_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
299 static inline size_t N ## _vec_scan_n_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp, int n__tmp)\
300 __flatbuffers_scan_by_string_n_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)\
301 static inline size_t N ## _vec_scan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
302 __flatbuffers_scan_by_string_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
303 static inline size_t N ## _vec_scan_ex_n_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, int n__tmp)\
304 __flatbuffers_scan_by_string_n_field(begin__tmp, __flatbuffers_min( end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)\
305 static inline size_t N ## _vec_rscan_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp)\
306 __flatbuffers_rscan_by_string_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
307 static inline size_t N ## _vec_rscan_n_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp, int n__tmp)\
308 __flatbuffers_rscan_by_string_n_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)\
309 static inline size_t N ## _vec_rscan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
310 __flatbuffers_rscan_by_string_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
311 static inline size_t N ## _vec_rscan_ex_n_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, int n__tmp)\
312 __flatbuffers_rscan_by_string_n_field(begin__tmp, __flatbuffers_min( end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)
313 #define __flatbuffers_define_default_scan_by_scalar_field(N, NK, TK)\
314 static inline size_t N ## _vec_scan(N ## _vec_t vec__tmp, TK key__tmp)\
315 { return N ## _vec_scan_by_ ## NK(vec__tmp, key__tmp); }\
316 static inline size_t N ## _vec_scan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, TK key__tmp)\
317 { return N ## _vec_scan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, key__tmp); }\
318 static inline size_t N ## _vec_rscan(N ## _vec_t vec__tmp, TK key__tmp)\
319 { return N ## _vec_rscan_by_ ## NK(vec__tmp, key__tmp); }\
320 static inline size_t N ## _vec_rscan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, TK key__tmp)\
321 { return N ## _vec_rscan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, key__tmp); }
322 #define __flatbuffers_define_default_scan_by_string_field(N, NK) \
323 static inline size_t N ## _vec_scan(N ## _vec_t vec__tmp, const char *s__tmp)\
324 { return N ## _vec_scan_by_ ## NK(vec__tmp, s__tmp); }\
325 static inline size_t N ## _vec_scan_n(N ## _vec_t vec__tmp, const char *s__tmp, int n__tmp)\
326 { return N ## _vec_scan_n_by_ ## NK(vec__tmp, s__tmp, n__tmp); }\
327 static inline size_t N ## _vec_scan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
328 { return N ## _vec_scan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp); }\
329 static inline size_t N ## _vec_scan_ex_n(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, int n__tmp)\
330 { return N ## _vec_scan_ex_n_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp, n__tmp); }\
331 static inline size_t N ## _vec_rscan(N ## _vec_t vec__tmp, const char *s__tmp)\
332 { return N ## _vec_rscan_by_ ## NK(vec__tmp, s__tmp); }\
333 static inline size_t N ## _vec_rscan_n(N ## _vec_t vec__tmp, const char *s__tmp, int n__tmp)\
334 { return N ## _vec_rscan_n_by_ ## NK(vec__tmp, s__tmp, n__tmp); }\
335 static inline size_t N ## _vec_rscan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
336 { return N ## _vec_rscan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp); }\
337 static inline size_t N ## _vec_rscan_ex_n(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, int n__tmp)\
338 { return N ## _vec_rscan_ex_n_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp, n__tmp); }
339 #define __flatbuffers_heap_sort(N, X, A, E, L, TK, TE, D, S)\
340 static inline void __ ## N ## X ## __heap_sift_down(\
341  N ## _mutable_vec_t vec__tmp, size_t start__tmp, size_t end__tmp)\
342 { size_t child__tmp, root__tmp; TK v1__tmp, v2__tmp, vroot__tmp;\
343  root__tmp = start__tmp;\
344  while ((root__tmp << 1) <= end__tmp) {\
345  child__tmp = root__tmp << 1;\
346  if (child__tmp < end__tmp) {\
347  v1__tmp = A(E(vec__tmp, child__tmp));\
348  v2__tmp = A(E(vec__tmp, child__tmp + 1));\
349  if (D(v1__tmp, v2__tmp) < 0) {\
350  child__tmp++;\
351  }\
352  }\
353  vroot__tmp = A(E(vec__tmp, root__tmp));\
354  v1__tmp = A(E(vec__tmp, child__tmp));\
355  if (D(vroot__tmp, v1__tmp) < 0) {\
356  S(vec__tmp, root__tmp, child__tmp, TE);\
357  root__tmp = child__tmp;\
358  } else {\
359  return;\
360  }\
361  }\
362 }\
363 static inline void __ ## N ## X ## __heap_sort(N ## _mutable_vec_t vec__tmp)\
364 { size_t start__tmp, end__tmp, size__tmp;\
365  size__tmp = L(vec__tmp); if (size__tmp == 0) return; end__tmp = size__tmp - 1; start__tmp = size__tmp >> 1;\
366  do { __ ## N ## X ## __heap_sift_down(vec__tmp, start__tmp, end__tmp); } while (start__tmp--);\
367  while (end__tmp > 0) { \
368  S(vec__tmp, 0, end__tmp, TE);\
369  __ ## N ## X ## __heap_sift_down(vec__tmp, 0, --end__tmp); } }
370 #define __flatbuffers_define_sort_by_field(N, NK, TK, TE, D, S)\
371  __flatbuffers_heap_sort(N, _sort_by_ ## NK, N ## _ ## NK ## _get, N ## _vec_at, N ## _vec_len, TK, TE, D, S)\
372 static inline void N ## _vec_sort_by_ ## NK(N ## _mutable_vec_t vec__tmp)\
373 { __ ## N ## _sort_by_ ## NK ## __heap_sort(vec__tmp); }
374 #define __flatbuffers_define_sort(N, TK, TE, D, S)\
375 __flatbuffers_heap_sort(N, , __flatbuffers_identity, N ## _vec_at, N ## _vec_len, TK, TE, D, S)\
376 static inline void N ## _vec_sort(N ## _mutable_vec_t vec__tmp) { __ ## N ## __heap_sort(vec__tmp); }
377 #define __flatbuffers_scalar_diff(x, y) ((x) < (y) ? -1 : (x) > (y))
378 #define __flatbuffers_string_diff(x, y) __flatbuffers_string_n_cmp((x), (const char *)(y), flatbuffers_string_len(y))
379 #define __flatbuffers_value_swap(vec, a, b, TE) { TE x__tmp = vec[b]; vec[b] = vec[a]; vec[a] = x__tmp; }
380 #define __flatbuffers_uoffset_swap(vec, a, b, TE)\
381 { TE ta__tmp, tb__tmp, d__tmp;\
382  d__tmp = (TE)((a - b) * sizeof(vec[0]));\
383  ta__tmp = __flatbuffers_uoffset_read_from_pe(vec + b) - d__tmp;\
384  tb__tmp = __flatbuffers_uoffset_read_from_pe(vec + a) + d__tmp;\
385  __flatbuffers_uoffset_write_to_pe(vec + a, ta__tmp);\
386  __flatbuffers_uoffset_write_to_pe(vec + b, tb__tmp); }
387 #define __flatbuffers_scalar_swap(vec, a, b, TE) __flatbuffers_value_swap(vec, a, b, TE)
388 #define __flatbuffers_string_swap(vec, a, b, TE) __flatbuffers_uoffset_swap(vec, a, b, TE)
389 #define __flatbuffers_struct_swap(vec, a, b, TE) __flatbuffers_value_swap(vec, a, b, TE)
390 #define __flatbuffers_table_swap(vec, a, b, TE) __flatbuffers_uoffset_swap(vec, a, b, TE)
391 #define __flatbuffers_define_struct_sort_by_scalar_field(N, NK, TK, TE)\
392  __flatbuffers_define_sort_by_field(N, NK, TK, TE, __flatbuffers_scalar_diff, __flatbuffers_struct_swap)
393 #define __flatbuffers_define_table_sort_by_scalar_field(N, NK, TK)\
394  __flatbuffers_define_sort_by_field(N, NK, TK, flatbuffers_uoffset_t, __flatbuffers_scalar_diff, __flatbuffers_table_swap)
395 #define __flatbuffers_define_table_sort_by_string_field(N, NK)\
396  __flatbuffers_define_sort_by_field(N, NK, flatbuffers_string_t, flatbuffers_uoffset_t, __flatbuffers_string_diff, __flatbuffers_table_swap)
397 #define __flatbuffers_define_scalar_sort(N, T) __flatbuffers_define_sort(N, T, T, __flatbuffers_scalar_diff, __flatbuffers_scalar_swap)
398 #define __flatbuffers_define_string_sort() __flatbuffers_define_sort(flatbuffers_string, flatbuffers_string_t, flatbuffers_uoffset_t, __flatbuffers_string_diff, __flatbuffers_string_swap)
399 #define __flatbuffers_define_scalar_vector(N, T)\
400 typedef const T *N ## _vec_t;\
401 typedef T *N ## _mutable_vec_t;\
402 __flatbuffers_define_scalar_vec_len(N)\
403 __flatbuffers_define_scalar_vec_at(N, T)\
404 __flatbuffers_define_scalar_find(N, T)\
405 __flatbuffers_define_scalar_scan(N, T)\
406 __flatbuffers_define_scalar_sort(N, T)
407 
408 #define __flatbuffers_define_integer_type(N, T, W)\
409 __flatcc_define_integer_accessors(N, T, W, flatbuffers_endian)\
410 __flatbuffers_define_scalar_vector(N, T)
411 __flatbuffers_define_scalar_vector(flatbuffers_bool, flatbuffers_bool_t)
412 __flatbuffers_define_scalar_vector(flatbuffers_uint8, uint8_t)
413 __flatbuffers_define_scalar_vector(flatbuffers_int8, int8_t)
414 __flatbuffers_define_scalar_vector(flatbuffers_uint16, uint16_t)
415 __flatbuffers_define_scalar_vector(flatbuffers_int16, int16_t)
416 __flatbuffers_define_scalar_vector(flatbuffers_uint32, uint32_t)
417 __flatbuffers_define_scalar_vector(flatbuffers_int32, int32_t)
418 __flatbuffers_define_scalar_vector(flatbuffers_uint64, uint64_t)
419 __flatbuffers_define_scalar_vector(flatbuffers_int64, int64_t)
420 __flatbuffers_define_scalar_vector(flatbuffers_float, float)
421 __flatbuffers_define_scalar_vector(flatbuffers_double, double)
422 __flatbuffers_define_scalar_vector(flatbuffers_union_type, flatbuffers_union_type_t)
423 static inline size_t flatbuffers_string_vec_find(flatbuffers_string_vec_t vec, const char *s)
424 __flatbuffers_find_by_string_field(__flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
425 static inline size_t flatbuffers_string_vec_find_n(flatbuffers_string_vec_t vec, const char *s, size_t n)
426 __flatbuffers_find_by_string_n_field(__flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
427 static inline size_t flatbuffers_string_vec_scan(flatbuffers_string_vec_t vec, const char *s)
428 __flatbuffers_scan_by_string_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
429 static inline size_t flatbuffers_string_vec_scan_n(flatbuffers_string_vec_t vec, const char *s, size_t n)
430 __flatbuffers_scan_by_string_n_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
431 static inline size_t flatbuffers_string_vec_scan_ex(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s)
432 __flatbuffers_scan_by_string_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
433 static inline size_t flatbuffers_string_vec_scan_ex_n(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s, size_t n)
434 __flatbuffers_scan_by_string_n_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
435 static inline size_t flatbuffers_string_vec_rscan(flatbuffers_string_vec_t vec, const char *s)
436 __flatbuffers_rscan_by_string_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
437 static inline size_t flatbuffers_string_vec_rscan_n(flatbuffers_string_vec_t vec, const char *s, size_t n)
438 __flatbuffers_rscan_by_string_n_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
439 static inline size_t flatbuffers_string_vec_rscan_ex(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s)
440 __flatbuffers_rscan_by_string_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
441 static inline size_t flatbuffers_string_vec_rscan_ex_n(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s, size_t n)
442 __flatbuffers_rscan_by_string_n_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
443 __flatbuffers_define_string_sort()
444 #define __flatbuffers_define_struct_scalar_field(N, NK, TK, T)\
445 static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp)\
446 { return t__tmp ? __flatbuffers_read_scalar(TK, &(t__tmp->NK)) : 0; }\
447 static inline const T *N ## _ ## NK ## _get_ptr(N ## _struct_t t__tmp)\
448 { return t__tmp ? &(t__tmp->NK) : 0; }\
449 __flatbuffers_define_scan_by_scalar_field(N, NK, T)
450 #define __flatbuffers_define_struct_struct_field(N, NK, T)\
451 static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp) { return t__tmp ? &(t__tmp->NK) : 0; }
452 /* If fid is null, the function returns true without testing as buffer is not expected to have any id. */
453 static inline int flatbuffers_has_identifier(const void *buffer, const char *fid)
454 { flatbuffers_thash_t id, id2 = 0; if (fid == 0) { return 1; };
455  id2 = flatbuffers_type_hash_from_string(fid);
456  id = __flatbuffers_thash_read_from_pe(((flatbuffers_uoffset_t *)buffer) + 1);
457  return id2 == 0 || id == id2; }
458 static inline int flatbuffers_has_type_hash(const void *buffer, flatbuffers_thash_t thash)
459 { return thash == 0 || (__flatbuffers_thash_read_from_pe((flatbuffers_uoffset_t *)buffer + 1) == thash); }
460 
461 static inline flatbuffers_thash_t flatbuffers_get_type_hash(const void *buffer)
462 { return __flatbuffers_thash_read_from_pe((flatbuffers_uoffset_t *)buffer + 1); }
463 
464 #define flatbuffers_verify_endian() flatbuffers_has_identifier("\x00\x00\x00\x00" "1234", "1234")
465 static inline void *flatbuffers_read_size_prefix(void *b, size_t *size_out)
466 { if (size_out) { *size_out = (size_t)__flatbuffers_uoffset_read_from_pe(b); }
467  return (uint8_t *)b + sizeof(flatbuffers_uoffset_t); }
468 /* Null file identifier accepts anything, otherwise fid should be 4 characters. */
469 #define __flatbuffers_read_root(T, K, buffer, fid)\
470  ((!buffer || !flatbuffers_has_identifier(buffer, fid)) ? 0 :\
471  ((T ## _ ## K ## t)(((uint8_t *)buffer) +\
472  __flatbuffers_uoffset_read_from_pe(buffer))))
473 #define __flatbuffers_read_typed_root(T, K, buffer, thash)\
474  ((!buffer || !flatbuffers_has_type_hash(buffer, thash)) ? 0 :\
475  ((T ## _ ## K ## t)(((uint8_t *)buffer) +\
476  __flatbuffers_uoffset_read_from_pe(buffer))))
477 #define __flatbuffers_nested_buffer_as_root(C, N, T, K)\
478 static inline T ## _ ## K ## t C ## _ ## N ## _as_root_with_identifier(C ## _ ## table_t t__tmp, const char *fid__tmp)\
479 { const uint8_t *buffer__tmp = C ## _ ## N(t__tmp); return __flatbuffers_read_root(T, K, buffer__tmp, fid__tmp); }\
480 static inline T ## _ ## K ## t C ## _ ## N ## _as_typed_root(C ## _ ## table_t t__tmp)\
481 { const uint8_t *buffer__tmp = C ## _ ## N(t__tmp); return __flatbuffers_read_root(T, K, buffer__tmp, C ## _ ## type_identifier); }\
482 static inline T ## _ ## K ## t C ## _ ## N ## _as_root(C ## _ ## table_t t__tmp)\
483 { const char *fid__tmp = T ## _identifier;\
484  const uint8_t *buffer__tmp = C ## _ ## N(t__tmp); return __flatbuffers_read_root(T, K, buffer__tmp, fid__tmp); }
485 #define __flatbuffers_buffer_as_root(N, K)\
486 static inline N ## _ ## K ## t N ## _as_root_with_identifier(const void *buffer__tmp, const char *fid__tmp)\
487 { return __flatbuffers_read_root(N, K, buffer__tmp, fid__tmp); }\
488 static inline N ## _ ## K ## t N ## _as_root_with_type_hash(const void *buffer__tmp, flatbuffers_thash_t thash__tmp)\
489 { return __flatbuffers_read_typed_root(N, K, buffer__tmp, thash__tmp); }\
490 static inline N ## _ ## K ## t N ## _as_root(const void *buffer__tmp)\
491 { const char *fid__tmp = N ## _identifier;\
492  return __flatbuffers_read_root(N, K, buffer__tmp, fid__tmp); }\
493 static inline N ## _ ## K ## t N ## _as_typed_root(const void *buffer__tmp)\
494 { return __flatbuffers_read_typed_root(N, K, buffer__tmp, N ## _type_hash); }
495 #define __flatbuffers_struct_as_root(N) __flatbuffers_buffer_as_root(N, struct_)
496 #define __flatbuffers_table_as_root(N) __flatbuffers_buffer_as_root(N, table_)
497 
498 #include "flatcc/flatcc_epilogue.h"
499 #endif /* FLATBUFFERS_COMMON_H */
Definition: flatbuffers_common_reader.h:134