damus

nostr ios client
git clone git://jb55.com/damus
Log | Files | Refs | README | LICENSE

flatbuffers_common_reader.h (37997B)


      1 #ifndef FLATBUFFERS_COMMON_READER_H
      2 #define FLATBUFFERS_COMMON_READER_H
      3 
      4 /* Generated by flatcc 0.6.1 FlatBuffers schema compiler for C by dvide.com */
      5 
      6 /* Common FlatBuffers read functionality for C. */
      7 
      8 #include "flatcc_prologue.h"
      9 #include "flatcc_flatbuffers.h"
     10 
     11 
     12 #define __flatbuffers_read_scalar_at_byteoffset(N, p, o) N ## _read_from_pe((uint8_t *)(p) + (o))
     13 #define __flatbuffers_read_scalar(N, p) N ## _read_from_pe(p)
     14 #define __flatbuffers_read_vt(ID, offset, t)\
     15 flatbuffers_voffset_t offset = 0;\
     16 {   flatbuffers_voffset_t id__tmp, *vt__tmp;\
     17     FLATCC_ASSERT(t != 0 && "null pointer table access");\
     18     id__tmp = ID;\
     19     vt__tmp = (flatbuffers_voffset_t *)((uint8_t *)(t) -\
     20         __flatbuffers_soffset_read_from_pe(t));\
     21     if (__flatbuffers_voffset_read_from_pe(vt__tmp) >= sizeof(vt__tmp[0]) * (id__tmp + 3u)) {\
     22         offset = __flatbuffers_voffset_read_from_pe(vt__tmp + id__tmp + 2);\
     23     }\
     24 }
     25 #define __flatbuffers_field_present(ID, t) { __flatbuffers_read_vt(ID, offset__tmp, t) return offset__tmp != 0; }
     26 #define __flatbuffers_scalar_field(T, ID, t)\
     27 {\
     28     __flatbuffers_read_vt(ID, offset__tmp, t)\
     29     if (offset__tmp) {\
     30         return (const T *)((uint8_t *)(t) + offset__tmp);\
     31     }\
     32     return 0;\
     33 }
     34 #define __flatbuffers_define_scalar_field(ID, N, NK, TK, T, V)\
     35 static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
     36 { __flatbuffers_read_vt(ID, offset__tmp, t__tmp)\
     37   return offset__tmp ? __flatbuffers_read_scalar_at_byteoffset(TK, t__tmp, offset__tmp) : V;\
     38 }\
     39 static inline T N ## _ ## NK(N ## _table_t t__tmp)\
     40 { __flatbuffers_read_vt(ID, offset__tmp, t__tmp)\
     41   return offset__tmp ? __flatbuffers_read_scalar_at_byteoffset(TK, t__tmp, offset__tmp) : V;\
     42 }\
     43 static inline const T *N ## _ ## NK ## _get_ptr(N ## _table_t t__tmp)\
     44 __flatbuffers_scalar_field(T, ID, t__tmp)\
     45 static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
     46 __flatbuffers_field_present(ID, t__tmp)\
     47 __flatbuffers_define_scan_by_scalar_field(N, NK, T)
     48 #define __flatbuffers_define_scalar_optional_field(ID, N, NK, TK, T, V)\
     49 __flatbuffers_define_scalar_field(ID, N, NK, TK, T, V)\
     50 static inline TK ## _option_t N ## _ ## NK ## _option(N ## _table_t t__tmp)\
     51 { TK ## _option_t ret; __flatbuffers_read_vt(ID, offset__tmp, t__tmp)\
     52   ret.is_null = offset__tmp == 0; ret.value = offset__tmp ?\
     53   __flatbuffers_read_scalar_at_byteoffset(TK, t__tmp, offset__tmp) : V;\
     54   return ret; }
     55 #define __flatbuffers_struct_field(T, ID, t, r)\
     56 {\
     57     __flatbuffers_read_vt(ID, offset__tmp, t)\
     58     if (offset__tmp) {\
     59         return (T)((uint8_t *)(t) + offset__tmp);\
     60     }\
     61     FLATCC_ASSERT(!(r) && "required field missing");\
     62     return 0;\
     63 }
     64 #define __flatbuffers_offset_field(T, ID, t, r, adjust)\
     65 {\
     66     flatbuffers_uoffset_t *elem__tmp;\
     67     __flatbuffers_read_vt(ID, offset__tmp, t)\
     68     if (offset__tmp) {\
     69         elem__tmp = (flatbuffers_uoffset_t *)((uint8_t *)(t) + offset__tmp);\
     70         /* Add sizeof so C api can have raw access past header field. */\
     71         return (T)((uint8_t *)(elem__tmp) + adjust +\
     72               __flatbuffers_uoffset_read_from_pe(elem__tmp));\
     73     }\
     74     FLATCC_ASSERT(!(r) && "required field missing");\
     75     return 0;\
     76 }
     77 #define __flatbuffers_vector_field(T, ID, t, r) __flatbuffers_offset_field(T, ID, t, r, sizeof(flatbuffers_uoffset_t))
     78 #define __flatbuffers_table_field(T, ID, t, r) __flatbuffers_offset_field(T, ID, t, r, 0)
     79 #define __flatbuffers_define_struct_field(ID, N, NK, T, r)\
     80 static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
     81 __flatbuffers_struct_field(T, ID, t__tmp, r)\
     82 static inline T N ## _ ## NK(N ## _table_t t__tmp)\
     83 __flatbuffers_struct_field(T, ID, t__tmp, r)\
     84 static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
     85 __flatbuffers_field_present(ID, t__tmp)
     86 #define __flatbuffers_define_vector_field(ID, N, NK, T, r)\
     87 static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
     88 __flatbuffers_vector_field(T, ID, t__tmp, r)\
     89 static inline T N ## _ ## NK(N ## _table_t t__tmp)\
     90 __flatbuffers_vector_field(T, ID, t__tmp, r)\
     91 static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
     92 __flatbuffers_field_present(ID, t__tmp)
     93 #define __flatbuffers_define_table_field(ID, N, NK, T, r)\
     94 static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
     95 __flatbuffers_table_field(T, ID, t__tmp, r)\
     96 static inline T N ## _ ## NK(N ## _table_t t__tmp)\
     97 __flatbuffers_table_field(T, ID, t__tmp, r)\
     98 static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
     99 __flatbuffers_field_present(ID, t__tmp)
    100 #define __flatbuffers_define_string_field(ID, N, NK, r)\
    101 static inline flatbuffers_string_t N ## _ ## NK ## _get(N ## _table_t t__tmp)\
    102 __flatbuffers_vector_field(flatbuffers_string_t, ID, t__tmp, r)\
    103 static inline flatbuffers_string_t N ## _ ## NK(N ## _table_t t__tmp)\
    104 __flatbuffers_vector_field(flatbuffers_string_t, ID, t__tmp, r)\
    105 static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
    106 __flatbuffers_field_present(ID, t__tmp)\
    107 __flatbuffers_define_scan_by_string_field(N, NK)
    108 #define __flatbuffers_vec_len(vec)\
    109 { return (vec) ? (size_t)__flatbuffers_uoffset_read_from_pe((flatbuffers_uoffset_t *)vec - 1) : 0; }
    110 #define __flatbuffers_string_len(s) __flatbuffers_vec_len(s)
    111 static inline size_t flatbuffers_vec_len(const void *vec)
    112 __flatbuffers_vec_len(vec)
    113 #define __flatbuffers_scalar_vec_at(N, vec, i)\
    114 { FLATCC_ASSERT(flatbuffers_vec_len(vec) > (i) && "index out of range");\
    115   return __flatbuffers_read_scalar(N, &(vec)[i]); }
    116 #define __flatbuffers_struct_vec_at(vec, i)\
    117 { FLATCC_ASSERT(flatbuffers_vec_len(vec) > (i) && "index out of range"); return (vec) + (i); }
    118 /* `adjust` skips past the header for string vectors. */
    119 #define __flatbuffers_offset_vec_at(T, vec, i, adjust)\
    120 { const flatbuffers_uoffset_t *elem__tmp = (vec) + (i);\
    121   FLATCC_ASSERT(flatbuffers_vec_len(vec) > (i) && "index out of range");\
    122   return (T)((uint8_t *)(elem__tmp) + (size_t)__flatbuffers_uoffset_read_from_pe(elem__tmp) + (adjust)); }
    123 #define __flatbuffers_define_scalar_vec_len(N)\
    124 static inline size_t N ## _vec_len(N ##_vec_t vec__tmp)\
    125 { return flatbuffers_vec_len(vec__tmp); }
    126 #define __flatbuffers_define_scalar_vec_at(N, T) \
    127 static inline T N ## _vec_at(N ## _vec_t vec__tmp, size_t i__tmp)\
    128 __flatbuffers_scalar_vec_at(N, vec__tmp, i__tmp)
    129 typedef const char *flatbuffers_string_t;
    130 static inline size_t flatbuffers_string_len(flatbuffers_string_t s)
    131 __flatbuffers_string_len(s)
    132 typedef const flatbuffers_uoffset_t *flatbuffers_string_vec_t;
    133 typedef flatbuffers_uoffset_t *flatbuffers_string_mutable_vec_t;
    134 static inline size_t flatbuffers_string_vec_len(flatbuffers_string_vec_t vec)
    135 __flatbuffers_vec_len(vec)
    136 static inline flatbuffers_string_t flatbuffers_string_vec_at(flatbuffers_string_vec_t vec, size_t i)
    137 __flatbuffers_offset_vec_at(flatbuffers_string_t, vec, i, sizeof(vec[0]))
    138 typedef const void *flatbuffers_generic_t;
    139 typedef void *flatbuffers_mutable_generic_t;
    140 static inline flatbuffers_string_t flatbuffers_string_cast_from_generic(const flatbuffers_generic_t p)
    141 { return p ? ((const char *)p) + __flatbuffers_uoffset__size() : 0; }
    142 typedef const flatbuffers_uoffset_t *flatbuffers_generic_vec_t;
    143 typedef flatbuffers_uoffset_t *flatbuffers_generic_table_mutable_vec_t;
    144 static inline size_t flatbuffers_generic_vec_len(flatbuffers_generic_vec_t vec)
    145 __flatbuffers_vec_len(vec)
    146 static inline flatbuffers_generic_t flatbuffers_generic_vec_at(flatbuffers_generic_vec_t vec, size_t i)
    147 __flatbuffers_offset_vec_at(flatbuffers_generic_t, vec, i, 0)
    148 static inline flatbuffers_generic_t flatbuffers_generic_vec_at_as_string(flatbuffers_generic_vec_t vec, size_t i)
    149 __flatbuffers_offset_vec_at(flatbuffers_generic_t, vec, i, sizeof(vec[0]))
    150 typedef struct flatbuffers_union {
    151     flatbuffers_union_type_t type;
    152     flatbuffers_generic_t value;
    153 } flatbuffers_union_t;
    154 typedef struct flatbuffers_union_vec {
    155     const flatbuffers_union_type_t *type;
    156     const flatbuffers_uoffset_t *value;
    157 } flatbuffers_union_vec_t;
    158 typedef struct flatbuffers_mutable_union {
    159     flatbuffers_union_type_t type;
    160     flatbuffers_mutable_generic_t value;
    161 } flatbuffers_mutable_union_t;
    162 typedef struct flatbuffers_mutable_union_vec {
    163     flatbuffers_union_type_t *type;
    164     flatbuffers_uoffset_t *value;
    165 } flatbuffers_mutable_union_vec_t;
    166 static inline flatbuffers_mutable_union_t flatbuffers_mutable_union_cast(flatbuffers_union_t u__tmp)\
    167 { flatbuffers_mutable_union_t mu = { u__tmp.type, (flatbuffers_mutable_generic_t)u__tmp.value };\
    168   return mu; }
    169 static inline flatbuffers_mutable_union_vec_t flatbuffers_mutable_union_vec_cast(flatbuffers_union_vec_t uv__tmp)\
    170 { flatbuffers_mutable_union_vec_t muv =\
    171   { (flatbuffers_union_type_t *)uv__tmp.type, (flatbuffers_uoffset_t *)uv__tmp.value }; return muv; }
    172 #define __flatbuffers_union_type_field(ID, t)\
    173 {\
    174     __flatbuffers_read_vt(ID, offset__tmp, t)\
    175     return offset__tmp ? __flatbuffers_read_scalar_at_byteoffset(__flatbuffers_utype, t, offset__tmp) : 0;\
    176 }
    177 static inline flatbuffers_string_t flatbuffers_string_cast_from_union(const flatbuffers_union_t u__tmp)\
    178 { return flatbuffers_string_cast_from_generic(u__tmp.value); }
    179 #define __flatbuffers_define_union_field(NS, ID, N, NK, T, r)\
    180 static inline T ## _union_type_t N ## _ ## NK ## _type_get(N ## _table_t t__tmp)\
    181 __## NS ## union_type_field(((ID) - 1), t__tmp)\
    182 static inline NS ## generic_t N ## _ ## NK ## _get(N ## _table_t t__tmp)\
    183 __## NS ## table_field(NS ## generic_t, ID, t__tmp, r)\
    184 static inline T ## _union_type_t N ## _ ## NK ## _type(N ## _table_t t__tmp)\
    185 __## NS ## union_type_field(((ID) - 1), t__tmp)\
    186 static inline NS ## generic_t N ## _ ## NK(N ## _table_t t__tmp)\
    187 __## NS ## table_field(NS ## generic_t, ID, t__tmp, r)\
    188 static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
    189 __## NS ## field_present(ID, t__tmp)\
    190 static inline T ## _union_t N ## _ ## NK ## _union(N ## _table_t t__tmp)\
    191 { T ## _union_t u__tmp = { 0, 0 }; u__tmp.type = N ## _ ## NK ## _type_get(t__tmp);\
    192   if (u__tmp.type == 0) return u__tmp; u__tmp.value = N ## _ ## NK ## _get(t__tmp); return u__tmp; }\
    193 static inline NS ## string_t N ## _ ## NK ## _as_string(N ## _table_t t__tmp)\
    194 { return NS ## string_cast_from_generic(N ## _ ## NK ## _get(t__tmp)); }\
    195 
    196 #define __flatbuffers_define_union_vector_ops(NS, T)\
    197 static inline size_t T ## _union_vec_len(T ## _union_vec_t uv__tmp)\
    198 { return NS ## vec_len(uv__tmp.type); }\
    199 static inline T ## _union_t T ## _union_vec_at(T ## _union_vec_t uv__tmp, size_t i__tmp)\
    200 { T ## _union_t u__tmp = { 0, 0 }; size_t n__tmp = NS ## vec_len(uv__tmp.type);\
    201   FLATCC_ASSERT(n__tmp > (i__tmp) && "index out of range"); u__tmp.type = uv__tmp.type[i__tmp];\
    202   /* Unknown type is treated as NONE for schema evolution. */\
    203   if (u__tmp.type == 0) return u__tmp;\
    204   u__tmp.value = NS ## generic_vec_at(uv__tmp.value, i__tmp); return u__tmp; }\
    205 static inline NS ## string_t T ## _union_vec_at_as_string(T ## _union_vec_t uv__tmp, size_t i__tmp)\
    206 { return (NS ## string_t) NS ## generic_vec_at_as_string(uv__tmp.value, i__tmp); }\
    207 
    208 #define __flatbuffers_define_union_vector(NS, T)\
    209 typedef NS ## union_vec_t T ## _union_vec_t;\
    210 typedef NS ## mutable_union_vec_t T ## _mutable_union_vec_t;\
    211 static inline T ## _mutable_union_vec_t T ## _mutable_union_vec_cast(T ## _union_vec_t u__tmp)\
    212 { return NS ## mutable_union_vec_cast(u__tmp); }\
    213 __## NS ## define_union_vector_ops(NS, T)
    214 #define __flatbuffers_define_union(NS, T)\
    215 typedef NS ## union_t T ## _union_t;\
    216 typedef NS ## mutable_union_t T ## _mutable_union_t;\
    217 static inline T ## _mutable_union_t T ## _mutable_union_cast(T ## _union_t u__tmp)\
    218 { return NS ## mutable_union_cast(u__tmp); }\
    219 __## NS ## define_union_vector(NS, T)
    220 #define __flatbuffers_define_union_vector_field(NS, ID, N, NK, T, r)\
    221 __## NS ## define_vector_field(ID - 1, N, NK ## _type, T ## _vec_t, r)\
    222 __## NS ## define_vector_field(ID, N, NK, flatbuffers_generic_vec_t, r)\
    223 static inline T ## _union_vec_t N ## _ ## NK ## _union(N ## _table_t t__tmp)\
    224 { T ## _union_vec_t uv__tmp; uv__tmp.type = N ## _ ## NK ## _type_get(t__tmp);\
    225   uv__tmp.value = N ## _ ## NK(t__tmp);\
    226   FLATCC_ASSERT(NS ## vec_len(uv__tmp.type) == NS ## vec_len(uv__tmp.value)\
    227   && "union vector type length mismatch"); return uv__tmp; }
    228 #include <string.h>
    229 static const size_t flatbuffers_not_found = (size_t)-1;
    230 static const size_t flatbuffers_end = (size_t)-1;
    231 #define __flatbuffers_identity(n) (n)
    232 #define __flatbuffers_min(a, b) ((a) < (b) ? (a) : (b))
    233 /* Subtraction doesn't work for unsigned types. */
    234 #define __flatbuffers_scalar_cmp(x, y, n) ((x) < (y) ? -1 : (x) > (y))
    235 static inline int __flatbuffers_string_n_cmp(flatbuffers_string_t v, const char *s, size_t n)
    236 { size_t nv = flatbuffers_string_len(v); int x = strncmp(v, s, nv < n ? nv : n);
    237   return x != 0 ? x : nv < n ? -1 : nv > n; }
    238 /* `n` arg unused, but needed by string find macro expansion. */
    239 static inline int __flatbuffers_string_cmp(flatbuffers_string_t v, const char *s, size_t n) { (void)n; return strcmp(v, s); }
    240 /* A = identity if searching scalar vectors rather than key fields. */
    241 /* Returns lowest matching index or not_found. */
    242 #define __flatbuffers_find_by_field(A, V, E, L, K, Kn, T, D)\
    243 { T v__tmp; size_t a__tmp = 0, b__tmp, m__tmp; if (!(b__tmp = L(V))) { return flatbuffers_not_found; }\
    244   --b__tmp;\
    245   while (a__tmp < b__tmp) {\
    246     m__tmp = a__tmp + ((b__tmp - a__tmp) >> 1);\
    247     v__tmp = A(E(V, m__tmp));\
    248     if ((D(v__tmp, (K), (Kn))) < 0) {\
    249       a__tmp = m__tmp + 1;\
    250     } else {\
    251       b__tmp = m__tmp;\
    252     }\
    253   }\
    254   if (a__tmp == b__tmp) {\
    255     v__tmp = A(E(V, a__tmp));\
    256     if (D(v__tmp, (K), (Kn)) == 0) {\
    257        return a__tmp;\
    258     }\
    259   }\
    260   return flatbuffers_not_found;\
    261 }
    262 #define __flatbuffers_find_by_scalar_field(A, V, E, L, K, T)\
    263 __flatbuffers_find_by_field(A, V, E, L, K, 0, T, __flatbuffers_scalar_cmp)
    264 #define __flatbuffers_find_by_string_field(A, V, E, L, K)\
    265 __flatbuffers_find_by_field(A, V, E, L, K, 0, flatbuffers_string_t, __flatbuffers_string_cmp)
    266 #define __flatbuffers_find_by_string_n_field(A, V, E, L, K, Kn)\
    267 __flatbuffers_find_by_field(A, V, E, L, K, Kn, flatbuffers_string_t, __flatbuffers_string_n_cmp)
    268 #define __flatbuffers_define_find_by_scalar_field(N, NK, TK)\
    269 static inline size_t N ## _vec_find_by_ ## NK(N ## _vec_t vec__tmp, TK key__tmp)\
    270 __flatbuffers_find_by_scalar_field(N ## _ ## NK, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, TK)
    271 #define __flatbuffers_define_scalar_find(N, T)\
    272 static inline size_t N ## _vec_find(N ## _vec_t vec__tmp, T key__tmp)\
    273 __flatbuffers_find_by_scalar_field(__flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)
    274 #define __flatbuffers_define_find_by_string_field(N, NK) \
    275 /* Note: find only works on vectors sorted by this field. */\
    276 static inline size_t N ## _vec_find_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp)\
    277 __flatbuffers_find_by_string_field(N ## _ ## NK, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
    278 static inline size_t N ## _vec_find_n_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
    279 __flatbuffers_find_by_string_n_field(N ## _ ## NK, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)
    280 #define __flatbuffers_define_default_find_by_scalar_field(N, NK, TK)\
    281 static inline size_t N ## _vec_find(N ## _vec_t vec__tmp, TK key__tmp)\
    282 { return N ## _vec_find_by_ ## NK(vec__tmp, key__tmp); }
    283 #define __flatbuffers_define_default_find_by_string_field(N, NK) \
    284 static inline size_t N ## _vec_find(N ## _vec_t vec__tmp, const char *s__tmp)\
    285 { return N ## _vec_find_by_ ## NK(vec__tmp, s__tmp); }\
    286 static inline size_t N ## _vec_find_n(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
    287 { return N ## _vec_find_n_by_ ## NK(vec__tmp, s__tmp, n__tmp); }
    288 /* A = identity if searching scalar vectors rather than key fields. */
    289 /* Returns lowest matching index or not_found. */
    290 #define __flatbuffers_scan_by_field(b, e, A, V, E, L, K, Kn, T, D)\
    291 { T v__tmp; size_t i__tmp;\
    292   for (i__tmp = b; i__tmp < e; ++i__tmp) {\
    293     v__tmp = A(E(V, i__tmp));\
    294     if (D(v__tmp, (K), (Kn)) == 0) {\
    295        return i__tmp;\
    296     }\
    297   }\
    298   return flatbuffers_not_found;\
    299 }
    300 #define __flatbuffers_rscan_by_field(b, e, A, V, E, L, K, Kn, T, D)\
    301 { T v__tmp; size_t i__tmp = e;\
    302   while (i__tmp-- > b) {\
    303     v__tmp = A(E(V, i__tmp));\
    304     if (D(v__tmp, (K), (Kn)) == 0) {\
    305        return i__tmp;\
    306     }\
    307   }\
    308   return flatbuffers_not_found;\
    309 }
    310 #define __flatbuffers_scan_by_scalar_field(b, e, A, V, E, L, K, T)\
    311 __flatbuffers_scan_by_field(b, e, A, V, E, L, K, 0, T, __flatbuffers_scalar_cmp)
    312 #define __flatbuffers_scan_by_string_field(b, e, A, V, E, L, K)\
    313 __flatbuffers_scan_by_field(b, e, A, V, E, L, K, 0, flatbuffers_string_t, __flatbuffers_string_cmp)
    314 #define __flatbuffers_scan_by_string_n_field(b, e, A, V, E, L, K, Kn)\
    315 __flatbuffers_scan_by_field(b, e, A, V, E, L, K, Kn, flatbuffers_string_t, __flatbuffers_string_n_cmp)
    316 #define __flatbuffers_rscan_by_scalar_field(b, e, A, V, E, L, K, T)\
    317 __flatbuffers_rscan_by_field(b, e, A, V, E, L, K, 0, T, __flatbuffers_scalar_cmp)
    318 #define __flatbuffers_rscan_by_string_field(b, e, A, V, E, L, K)\
    319 __flatbuffers_rscan_by_field(b, e, A, V, E, L, K, 0, flatbuffers_string_t, __flatbuffers_string_cmp)
    320 #define __flatbuffers_rscan_by_string_n_field(b, e, A, V, E, L, K, Kn)\
    321 __flatbuffers_rscan_by_field(b, e, A, V, E, L, K, Kn, flatbuffers_string_t, __flatbuffers_string_n_cmp)
    322 #define __flatbuffers_define_scan_by_scalar_field(N, NK, T)\
    323 static inline size_t N ## _vec_scan_by_ ## NK(N ## _vec_t vec__tmp, T key__tmp)\
    324 __flatbuffers_scan_by_scalar_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
    325 static inline size_t N ## _vec_scan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
    326 __flatbuffers_scan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
    327 static inline size_t N ## _vec_rscan_by_ ## NK(N ## _vec_t vec__tmp, T key__tmp)\
    328 __flatbuffers_rscan_by_scalar_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
    329 static inline size_t N ## _vec_rscan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
    330 __flatbuffers_rscan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)
    331 #define __flatbuffers_define_scalar_scan(N, T)\
    332 static inline size_t N ## _vec_scan(N ## _vec_t vec__tmp, T key__tmp)\
    333 __flatbuffers_scan_by_scalar_field(0, N ## _vec_len(vec__tmp), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
    334 static inline size_t N ## _vec_scan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
    335 __flatbuffers_scan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
    336 static inline size_t N ## _vec_rscan(N ## _vec_t vec__tmp, T key__tmp)\
    337 __flatbuffers_rscan_by_scalar_field(0, N ## _vec_len(vec__tmp), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
    338 static inline size_t N ## _vec_rscan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
    339 __flatbuffers_rscan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)
    340 #define __flatbuffers_define_scan_by_string_field(N, NK) \
    341 static inline size_t N ## _vec_scan_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp)\
    342 __flatbuffers_scan_by_string_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
    343 static inline size_t N ## _vec_scan_n_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
    344 __flatbuffers_scan_by_string_n_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)\
    345 static inline size_t N ## _vec_scan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
    346 __flatbuffers_scan_by_string_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
    347 static inline size_t N ## _vec_scan_ex_n_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, size_t n__tmp)\
    348 __flatbuffers_scan_by_string_n_field(begin__tmp, __flatbuffers_min( end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)\
    349 static inline size_t N ## _vec_rscan_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp)\
    350 __flatbuffers_rscan_by_string_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
    351 static inline size_t N ## _vec_rscan_n_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
    352 __flatbuffers_rscan_by_string_n_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)\
    353 static inline size_t N ## _vec_rscan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
    354 __flatbuffers_rscan_by_string_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
    355 static inline size_t N ## _vec_rscan_ex_n_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, size_t n__tmp)\
    356 __flatbuffers_rscan_by_string_n_field(begin__tmp, __flatbuffers_min( end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)
    357 #define __flatbuffers_define_default_scan_by_scalar_field(N, NK, TK)\
    358 static inline size_t N ## _vec_scan(N ## _vec_t vec__tmp, TK key__tmp)\
    359 { return N ## _vec_scan_by_ ## NK(vec__tmp, key__tmp); }\
    360 static inline size_t N ## _vec_scan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, TK key__tmp)\
    361 { return N ## _vec_scan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, key__tmp); }\
    362 static inline size_t N ## _vec_rscan(N ## _vec_t vec__tmp, TK key__tmp)\
    363 { return N ## _vec_rscan_by_ ## NK(vec__tmp, key__tmp); }\
    364 static inline size_t N ## _vec_rscan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, TK key__tmp)\
    365 { return N ## _vec_rscan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, key__tmp); }
    366 #define __flatbuffers_define_default_scan_by_string_field(N, NK) \
    367 static inline size_t N ## _vec_scan(N ## _vec_t vec__tmp, const char *s__tmp)\
    368 { return N ## _vec_scan_by_ ## NK(vec__tmp, s__tmp); }\
    369 static inline size_t N ## _vec_scan_n(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
    370 { return N ## _vec_scan_n_by_ ## NK(vec__tmp, s__tmp, n__tmp); }\
    371 static inline size_t N ## _vec_scan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
    372 { return N ## _vec_scan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp); }\
    373 static inline size_t N ## _vec_scan_ex_n(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, size_t n__tmp)\
    374 { return N ## _vec_scan_ex_n_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp, n__tmp); }\
    375 static inline size_t N ## _vec_rscan(N ## _vec_t vec__tmp, const char *s__tmp)\
    376 { return N ## _vec_rscan_by_ ## NK(vec__tmp, s__tmp); }\
    377 static inline size_t N ## _vec_rscan_n(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
    378 { return N ## _vec_rscan_n_by_ ## NK(vec__tmp, s__tmp, n__tmp); }\
    379 static inline size_t N ## _vec_rscan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
    380 { return N ## _vec_rscan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp); }\
    381 static inline size_t N ## _vec_rscan_ex_n(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, size_t n__tmp)\
    382 { return N ## _vec_rscan_ex_n_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp, n__tmp); }
    383 #define __flatbuffers_heap_sort(N, X, A, E, L, TK, TE, D, S)\
    384 static inline void __ ## N ## X ## __heap_sift_down(\
    385         N ## _mutable_vec_t vec__tmp, size_t start__tmp, size_t end__tmp)\
    386 { size_t child__tmp, root__tmp; TK v1__tmp, v2__tmp, vroot__tmp;\
    387   root__tmp = start__tmp;\
    388   while ((root__tmp << 1) <= end__tmp) {\
    389     child__tmp = root__tmp << 1;\
    390     if (child__tmp < end__tmp) {\
    391       v1__tmp = A(E(vec__tmp, child__tmp));\
    392       v2__tmp = A(E(vec__tmp, child__tmp + 1));\
    393       if (D(v1__tmp, v2__tmp) < 0) {\
    394         child__tmp++;\
    395       }\
    396     }\
    397     vroot__tmp = A(E(vec__tmp, root__tmp));\
    398     v1__tmp = A(E(vec__tmp, child__tmp));\
    399     if (D(vroot__tmp, v1__tmp) < 0) {\
    400       S(vec__tmp, root__tmp, child__tmp, TE);\
    401       root__tmp = child__tmp;\
    402     } else {\
    403       return;\
    404     }\
    405   }\
    406 }\
    407 static inline void __ ## N ## X ## __heap_sort(N ## _mutable_vec_t vec__tmp)\
    408 { size_t start__tmp, end__tmp, size__tmp;\
    409   size__tmp = L(vec__tmp); if (size__tmp == 0) return; end__tmp = size__tmp - 1; start__tmp = size__tmp >> 1;\
    410   do { __ ## N ## X ## __heap_sift_down(vec__tmp, start__tmp, end__tmp); } while (start__tmp--);\
    411   while (end__tmp > 0) { \
    412     S(vec__tmp, 0, end__tmp, TE);\
    413     __ ## N ## X ## __heap_sift_down(vec__tmp, 0, --end__tmp); } }
    414 #define __flatbuffers_define_sort_by_field(N, NK, TK, TE, D, S)\
    415   __flatbuffers_heap_sort(N, _sort_by_ ## NK, N ## _ ## NK ## _get, N ## _vec_at, N ## _vec_len, TK, TE, D, S)\
    416 static inline void N ## _vec_sort_by_ ## NK(N ## _mutable_vec_t vec__tmp)\
    417 { __ ## N ## _sort_by_ ## NK ## __heap_sort(vec__tmp); }
    418 #define __flatbuffers_define_sort(N, TK, TE, D, S)\
    419 __flatbuffers_heap_sort(N, , __flatbuffers_identity, N ## _vec_at, N ## _vec_len, TK, TE, D, S)\
    420 static inline void N ## _vec_sort(N ## _mutable_vec_t vec__tmp) { __ ## N ## __heap_sort(vec__tmp); }
    421 #define __flatbuffers_scalar_diff(x, y) ((x) < (y) ? -1 : (x) > (y))
    422 #define __flatbuffers_string_diff(x, y) __flatbuffers_string_n_cmp((x), (const char *)(y), flatbuffers_string_len(y))
    423 #define __flatbuffers_value_swap(vec, a, b, TE) { TE x__tmp = vec[b]; vec[b] = vec[a]; vec[a] = x__tmp; }
    424 #define __flatbuffers_uoffset_swap(vec, a, b, TE)\
    425 { TE ta__tmp, tb__tmp, d__tmp;\
    426   d__tmp = (TE)((a - b) * sizeof(vec[0]));\
    427   ta__tmp =  __flatbuffers_uoffset_read_from_pe(vec + b) - d__tmp;\
    428   tb__tmp =  __flatbuffers_uoffset_read_from_pe(vec + a) + d__tmp;\
    429   __flatbuffers_uoffset_write_to_pe(vec + a, ta__tmp);\
    430   __flatbuffers_uoffset_write_to_pe(vec + b, tb__tmp); }
    431 #define __flatbuffers_scalar_swap(vec, a, b, TE) __flatbuffers_value_swap(vec, a, b, TE)
    432 #define __flatbuffers_string_swap(vec, a, b, TE) __flatbuffers_uoffset_swap(vec, a, b, TE)
    433 #define __flatbuffers_struct_swap(vec, a, b, TE) __flatbuffers_value_swap(vec, a, b, TE)
    434 #define __flatbuffers_table_swap(vec, a, b, TE) __flatbuffers_uoffset_swap(vec, a, b, TE)
    435 #define __flatbuffers_define_struct_sort_by_scalar_field(N, NK, TK, TE)\
    436   __flatbuffers_define_sort_by_field(N, NK, TK, TE, __flatbuffers_scalar_diff, __flatbuffers_struct_swap)
    437 #define __flatbuffers_define_table_sort_by_scalar_field(N, NK, TK)\
    438   __flatbuffers_define_sort_by_field(N, NK, TK, flatbuffers_uoffset_t, __flatbuffers_scalar_diff, __flatbuffers_table_swap)
    439 #define __flatbuffers_define_table_sort_by_string_field(N, NK)\
    440   __flatbuffers_define_sort_by_field(N, NK, flatbuffers_string_t, flatbuffers_uoffset_t, __flatbuffers_string_diff, __flatbuffers_table_swap)
    441 #define __flatbuffers_define_scalar_sort(N, T) __flatbuffers_define_sort(N, T, T, __flatbuffers_scalar_diff, __flatbuffers_scalar_swap)
    442 #define __flatbuffers_define_string_sort() __flatbuffers_define_sort(flatbuffers_string, flatbuffers_string_t, flatbuffers_uoffset_t, __flatbuffers_string_diff, __flatbuffers_string_swap)
    443 #define __flatbuffers_sort_vector_field(N, NK, T, t)\
    444 { T ## _mutable_vec_t v__tmp = (T ## _mutable_vec_t) N ## _ ## NK ## _get(t);\
    445   if (v__tmp) T ## _vec_sort(v__tmp); }
    446 #define __flatbuffers_sort_table_field(N, NK, T, t)\
    447 { T ## _sort((T ## _mutable_table_t)N ## _ ## NK ## _get(t)); }
    448 #define __flatbuffers_sort_union_field(N, NK, T, t)\
    449 { T ## _sort(T ## _mutable_union_cast(N ## _ ## NK ## _union(t))); }
    450 #define __flatbuffers_sort_table_vector_field_elements(N, NK, T, t)\
    451 { T ## _vec_t v__tmp = N ## _ ## NK ## _get(t); size_t i__tmp, n__tmp;\
    452   n__tmp = T ## _vec_len(v__tmp); for (i__tmp = 0; i__tmp < n__tmp; ++i__tmp) {\
    453   T ## _sort((T ## _mutable_table_t)T ## _vec_at(v__tmp, i__tmp)); }}
    454 #define __flatbuffers_sort_union_vector_field_elements(N, NK, T, t)\
    455 { T ## _union_vec_t v__tmp = N ## _ ## NK ## _union(t); size_t i__tmp, n__tmp;\
    456   n__tmp = T ## _union_vec_len(v__tmp); for (i__tmp = 0; i__tmp < n__tmp; ++i__tmp) {\
    457   T ## _sort(T ## _mutable_union_cast(T ## _union_vec_at(v__tmp, i__tmp))); }}
    458 #define __flatbuffers_define_scalar_vector(N, T)\
    459 typedef const T *N ## _vec_t;\
    460 typedef T *N ## _mutable_vec_t;\
    461 __flatbuffers_define_scalar_vec_len(N)\
    462 __flatbuffers_define_scalar_vec_at(N, T)\
    463 __flatbuffers_define_scalar_find(N, T)\
    464 __flatbuffers_define_scalar_scan(N, T)\
    465 __flatbuffers_define_scalar_sort(N, T)
    466 
    467 #define __flatbuffers_define_integer_type(N, T, W)\
    468 __flatcc_define_integer_accessors(N, T, W, flatbuffers_endian)\
    469 __flatbuffers_define_scalar_vector(N, T)
    470 __flatbuffers_define_scalar_vector(flatbuffers_bool, flatbuffers_bool_t)
    471 __flatbuffers_define_scalar_vector(flatbuffers_char, char)
    472 __flatbuffers_define_scalar_vector(flatbuffers_uint8, uint8_t)
    473 __flatbuffers_define_scalar_vector(flatbuffers_int8, int8_t)
    474 __flatbuffers_define_scalar_vector(flatbuffers_uint16, uint16_t)
    475 __flatbuffers_define_scalar_vector(flatbuffers_int16, int16_t)
    476 __flatbuffers_define_scalar_vector(flatbuffers_uint32, uint32_t)
    477 __flatbuffers_define_scalar_vector(flatbuffers_int32, int32_t)
    478 __flatbuffers_define_scalar_vector(flatbuffers_uint64, uint64_t)
    479 __flatbuffers_define_scalar_vector(flatbuffers_int64, int64_t)
    480 __flatbuffers_define_scalar_vector(flatbuffers_float, float)
    481 __flatbuffers_define_scalar_vector(flatbuffers_double, double)
    482 __flatbuffers_define_scalar_vector(flatbuffers_union_type, flatbuffers_union_type_t)
    483 static inline size_t flatbuffers_string_vec_find(flatbuffers_string_vec_t vec, const char *s)
    484 __flatbuffers_find_by_string_field(__flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
    485 static inline size_t flatbuffers_string_vec_find_n(flatbuffers_string_vec_t vec, const char *s, size_t n)
    486 __flatbuffers_find_by_string_n_field(__flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
    487 static inline size_t flatbuffers_string_vec_scan(flatbuffers_string_vec_t vec, const char *s)
    488 __flatbuffers_scan_by_string_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
    489 static inline size_t flatbuffers_string_vec_scan_n(flatbuffers_string_vec_t vec, const char *s, size_t n)
    490 __flatbuffers_scan_by_string_n_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
    491 static inline size_t flatbuffers_string_vec_scan_ex(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s)
    492 __flatbuffers_scan_by_string_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
    493 static inline size_t flatbuffers_string_vec_scan_ex_n(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s, size_t n)
    494 __flatbuffers_scan_by_string_n_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
    495 static inline size_t flatbuffers_string_vec_rscan(flatbuffers_string_vec_t vec, const char *s)
    496 __flatbuffers_rscan_by_string_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
    497 static inline size_t flatbuffers_string_vec_rscan_n(flatbuffers_string_vec_t vec, const char *s, size_t n)
    498 __flatbuffers_rscan_by_string_n_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
    499 static inline size_t flatbuffers_string_vec_rscan_ex(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s)
    500 __flatbuffers_rscan_by_string_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
    501 static inline size_t flatbuffers_string_vec_rscan_ex_n(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s, size_t n)
    502 __flatbuffers_rscan_by_string_n_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
    503 __flatbuffers_define_string_sort()
    504 #define __flatbuffers_define_struct_scalar_fixed_array_field(N, NK, TK, T, L)\
    505 static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp, size_t i__tmp)\
    506 { if (!t__tmp || i__tmp >= L) return 0;\
    507   return __flatbuffers_read_scalar(TK, &(t__tmp->NK[i__tmp])); }\
    508 static inline const T *N ## _ ## NK ## _get_ptr(N ## _struct_t t__tmp)\
    509 { return t__tmp ? t__tmp->NK : 0; }\
    510 static inline size_t N ## _ ## NK ## _get_len(void) { return L; }\
    511 static inline T N ## _ ## NK (N ## _struct_t t__tmp, size_t i__tmp)\
    512 { return N ## _ ## NK ## _get(t__tmp, i__tmp); }
    513 #define __flatbuffers_define_struct_struct_fixed_array_field(N, NK, T, L)\
    514 static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp, size_t i__tmp)\
    515 { if (!t__tmp || i__tmp >= L) return 0; return t__tmp->NK + i__tmp; }static inline T N ## _ ## NK ## _get_ptr(N ## _struct_t t__tmp)\
    516 { return t__tmp ? t__tmp->NK : 0; }\
    517 static inline size_t N ## _ ## NK ## _get_len(void) { return L; }\
    518 static inline T N ## _ ## NK(N ## _struct_t t__tmp, size_t i__tmp)\
    519 { if (!t__tmp || i__tmp >= L) return 0; return t__tmp->NK + i__tmp; }
    520 #define __flatbuffers_define_struct_scalar_field(N, NK, TK, T)\
    521 static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp)\
    522 { return t__tmp ? __flatbuffers_read_scalar(TK, &(t__tmp->NK)) : 0; }\
    523 static inline const T *N ## _ ## NK ## _get_ptr(N ## _struct_t t__tmp)\
    524 { return t__tmp ? &(t__tmp->NK) : 0; }\
    525 static inline T N ## _ ## NK (N ## _struct_t t__tmp)\
    526 { return t__tmp ? __flatbuffers_read_scalar(TK, &(t__tmp->NK)) : 0; }\
    527 __flatbuffers_define_scan_by_scalar_field(N, NK, T)
    528 #define __flatbuffers_define_struct_struct_field(N, NK, T)\
    529 static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp) { return t__tmp ? &(t__tmp->NK) : 0; }\
    530 static inline T N ## _ ## NK (N ## _struct_t t__tmp) { return t__tmp ? &(t__tmp->NK) : 0; }
    531 /* If fid is null, the function returns true without testing as buffer is not expected to have any id. */
    532 static inline int flatbuffers_has_identifier(const void *buffer, const char *fid)
    533 { flatbuffers_thash_t id, id2 = 0; if (fid == 0) { return 1; };
    534   id2 = flatbuffers_type_hash_from_string(fid);
    535   id = __flatbuffers_thash_read_from_pe(((flatbuffers_uoffset_t *)buffer) + 1);
    536   return id2 == 0 || id == id2; }
    537 static inline int flatbuffers_has_type_hash(const void *buffer, flatbuffers_thash_t thash)
    538 { return thash == 0 || (__flatbuffers_thash_read_from_pe((flatbuffers_uoffset_t *)buffer + 1) == thash); }
    539 
    540 static inline flatbuffers_thash_t flatbuffers_get_type_hash(const void *buffer)
    541 { return __flatbuffers_thash_read_from_pe((flatbuffers_uoffset_t *)buffer + 1); }
    542 
    543 #define flatbuffers_verify_endian() flatbuffers_has_identifier("\x00\x00\x00\x00" "1234", "1234")
    544 static inline void *flatbuffers_read_size_prefix(void *b, size_t *size_out)
    545 { if (size_out) { *size_out = (size_t)__flatbuffers_uoffset_read_from_pe(b); }
    546   return (uint8_t *)b + sizeof(flatbuffers_uoffset_t); }
    547 /* Null file identifier accepts anything, otherwise fid should be 4 characters. */
    548 #define __flatbuffers_read_root(T, K, buffer, fid)\
    549   ((!buffer || !flatbuffers_has_identifier(buffer, fid)) ? 0 :\
    550   ((T ## _ ## K ## t)(((uint8_t *)buffer) +\
    551     __flatbuffers_uoffset_read_from_pe(buffer))))
    552 #define __flatbuffers_read_typed_root(T, K, buffer, thash)\
    553   ((!buffer || !flatbuffers_has_type_hash(buffer, thash)) ? 0 :\
    554   ((T ## _ ## K ## t)(((uint8_t *)buffer) +\
    555     __flatbuffers_uoffset_read_from_pe(buffer))))
    556 #define __flatbuffers_nested_buffer_as_root(C, N, T, K)\
    557 static inline T ## _ ## K ## t C ## _ ## N ## _as_root_with_identifier(C ## _ ## table_t t__tmp, const char *fid__tmp)\
    558 { const uint8_t *buffer__tmp = C ## _ ## N(t__tmp); return __flatbuffers_read_root(T, K, buffer__tmp, fid__tmp); }\
    559 static inline T ## _ ## K ## t C ## _ ## N ## _as_typed_root(C ## _ ## table_t t__tmp)\
    560 { const uint8_t *buffer__tmp = C ## _ ## N(t__tmp); return __flatbuffers_read_root(T, K, buffer__tmp, C ## _ ## type_identifier); }\
    561 static inline T ## _ ## K ## t C ## _ ## N ## _as_root(C ## _ ## table_t t__tmp)\
    562 { const char *fid__tmp = T ## _file_identifier;\
    563   const uint8_t *buffer__tmp = C ## _ ## N(t__tmp); return __flatbuffers_read_root(T, K, buffer__tmp, fid__tmp); }
    564 #define __flatbuffers_buffer_as_root(N, K)\
    565 static inline N ## _ ## K ## t N ## _as_root_with_identifier(const void *buffer__tmp, const char *fid__tmp)\
    566 { return __flatbuffers_read_root(N, K, buffer__tmp, fid__tmp); }\
    567 static inline N ## _ ## K ## t N ## _as_root_with_type_hash(const void *buffer__tmp, flatbuffers_thash_t thash__tmp)\
    568 { return __flatbuffers_read_typed_root(N, K, buffer__tmp, thash__tmp); }\
    569 static inline N ## _ ## K ## t N ## _as_root(const void *buffer__tmp)\
    570 { const char *fid__tmp = N ## _file_identifier;\
    571   return __flatbuffers_read_root(N, K, buffer__tmp, fid__tmp); }\
    572 static inline N ## _ ## K ## t N ## _as_typed_root(const void *buffer__tmp)\
    573 { return __flatbuffers_read_typed_root(N, K, buffer__tmp, N ## _type_hash); }
    574 #define __flatbuffers_struct_as_root(N) __flatbuffers_buffer_as_root(N, struct_)
    575 #define __flatbuffers_table_as_root(N) __flatbuffers_buffer_as_root(N, table_)
    576 
    577 #include "flatcc_epilogue.h"
    578 #endif /* FLATBUFFERS_COMMON_H */