/* Generated by ./src/xlat/gen.sh from ./src/xlat/perf_hw_cache_id.in; do not edit. */

#include "gcc_compat.h"
#include "static_assert.h"

#if defined(PERF_COUNT_HW_CACHE_L1D) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_L1D) && HAVE_DECL_PERF_COUNT_HW_CACHE_L1D)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((PERF_COUNT_HW_CACHE_L1D) == (0), "PERF_COUNT_HW_CACHE_L1D != 0");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define PERF_COUNT_HW_CACHE_L1D 0
#endif
#if defined(PERF_COUNT_HW_CACHE_L1D) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_L1D) && HAVE_DECL_PERF_COUNT_HW_CACHE_L1D)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (PERF_COUNT_HW_CACHE_L1D)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: PERF_COUNT_HW_CACHE_L1D"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (PERF_COUNT_HW_CACHE_L1D)
#endif
#if defined(PERF_COUNT_HW_CACHE_L1I) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_L1I) && HAVE_DECL_PERF_COUNT_HW_CACHE_L1I)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((PERF_COUNT_HW_CACHE_L1I) == (1), "PERF_COUNT_HW_CACHE_L1I != 1");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define PERF_COUNT_HW_CACHE_L1I 1
#endif
#if defined(PERF_COUNT_HW_CACHE_L1I) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_L1I) && HAVE_DECL_PERF_COUNT_HW_CACHE_L1I)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (PERF_COUNT_HW_CACHE_L1I)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: PERF_COUNT_HW_CACHE_L1I"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (PERF_COUNT_HW_CACHE_L1I)
#endif
#if defined(PERF_COUNT_HW_CACHE_LL) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_LL) && HAVE_DECL_PERF_COUNT_HW_CACHE_LL)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((PERF_COUNT_HW_CACHE_LL) == (2), "PERF_COUNT_HW_CACHE_LL != 2");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define PERF_COUNT_HW_CACHE_LL 2
#endif
#if defined(PERF_COUNT_HW_CACHE_LL) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_LL) && HAVE_DECL_PERF_COUNT_HW_CACHE_LL)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (PERF_COUNT_HW_CACHE_LL)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: PERF_COUNT_HW_CACHE_LL"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (PERF_COUNT_HW_CACHE_LL)
#endif
#if defined(PERF_COUNT_HW_CACHE_DTLB) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_DTLB) && HAVE_DECL_PERF_COUNT_HW_CACHE_DTLB)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((PERF_COUNT_HW_CACHE_DTLB) == (3), "PERF_COUNT_HW_CACHE_DTLB != 3");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define PERF_COUNT_HW_CACHE_DTLB 3
#endif
#if defined(PERF_COUNT_HW_CACHE_DTLB) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_DTLB) && HAVE_DECL_PERF_COUNT_HW_CACHE_DTLB)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (PERF_COUNT_HW_CACHE_DTLB)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: PERF_COUNT_HW_CACHE_DTLB"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (PERF_COUNT_HW_CACHE_DTLB)
#endif
#if defined(PERF_COUNT_HW_CACHE_ITLB) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_ITLB) && HAVE_DECL_PERF_COUNT_HW_CACHE_ITLB)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((PERF_COUNT_HW_CACHE_ITLB) == (4), "PERF_COUNT_HW_CACHE_ITLB != 4");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define PERF_COUNT_HW_CACHE_ITLB 4
#endif
#if defined(PERF_COUNT_HW_CACHE_ITLB) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_ITLB) && HAVE_DECL_PERF_COUNT_HW_CACHE_ITLB)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (PERF_COUNT_HW_CACHE_ITLB)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: PERF_COUNT_HW_CACHE_ITLB"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (PERF_COUNT_HW_CACHE_ITLB)
#endif
#if defined(PERF_COUNT_HW_CACHE_BPU) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_BPU) && HAVE_DECL_PERF_COUNT_HW_CACHE_BPU)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((PERF_COUNT_HW_CACHE_BPU) == (5), "PERF_COUNT_HW_CACHE_BPU != 5");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define PERF_COUNT_HW_CACHE_BPU 5
#endif
#if defined(PERF_COUNT_HW_CACHE_BPU) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_BPU) && HAVE_DECL_PERF_COUNT_HW_CACHE_BPU)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (PERF_COUNT_HW_CACHE_BPU)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: PERF_COUNT_HW_CACHE_BPU"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (PERF_COUNT_HW_CACHE_BPU)
#endif
#if defined(PERF_COUNT_HW_CACHE_NODE) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_NODE) && HAVE_DECL_PERF_COUNT_HW_CACHE_NODE)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((PERF_COUNT_HW_CACHE_NODE) == (6), "PERF_COUNT_HW_CACHE_NODE != 6");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define PERF_COUNT_HW_CACHE_NODE 6
#endif
#if defined(PERF_COUNT_HW_CACHE_NODE) || (defined(HAVE_DECL_PERF_COUNT_HW_CACHE_NODE) && HAVE_DECL_PERF_COUNT_HW_CACHE_NODE)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (PERF_COUNT_HW_CACHE_NODE)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: PERF_COUNT_HW_CACHE_NODE"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (PERF_COUNT_HW_CACHE_NODE)
#endif
#undef XLAT_PREV_VAL

#ifndef XLAT_MACROS_ONLY

# ifdef IN_MPERS

#  error static const struct xlat perf_hw_cache_id in mpers mode

# else

DIAG_PUSH_IGNORE_TAUTOLOGICAL_CONSTANT_COMPARE
static const struct xlat_data perf_hw_cache_id_xdata[] = {
 XLAT(PERF_COUNT_HW_CACHE_L1D),
 #define XLAT_VAL_0 ((unsigned) (PERF_COUNT_HW_CACHE_L1D))
 #define XLAT_STR_0 STRINGIFY(PERF_COUNT_HW_CACHE_L1D)
 XLAT(PERF_COUNT_HW_CACHE_L1I),
 #define XLAT_VAL_1 ((unsigned) (PERF_COUNT_HW_CACHE_L1I))
 #define XLAT_STR_1 STRINGIFY(PERF_COUNT_HW_CACHE_L1I)
 XLAT(PERF_COUNT_HW_CACHE_LL),
 #define XLAT_VAL_2 ((unsigned) (PERF_COUNT_HW_CACHE_LL))
 #define XLAT_STR_2 STRINGIFY(PERF_COUNT_HW_CACHE_LL)
 XLAT(PERF_COUNT_HW_CACHE_DTLB),
 #define XLAT_VAL_3 ((unsigned) (PERF_COUNT_HW_CACHE_DTLB))
 #define XLAT_STR_3 STRINGIFY(PERF_COUNT_HW_CACHE_DTLB)
 XLAT(PERF_COUNT_HW_CACHE_ITLB),
 #define XLAT_VAL_4 ((unsigned) (PERF_COUNT_HW_CACHE_ITLB))
 #define XLAT_STR_4 STRINGIFY(PERF_COUNT_HW_CACHE_ITLB)
 XLAT(PERF_COUNT_HW_CACHE_BPU),
 #define XLAT_VAL_5 ((unsigned) (PERF_COUNT_HW_CACHE_BPU))
 #define XLAT_STR_5 STRINGIFY(PERF_COUNT_HW_CACHE_BPU)
 XLAT(PERF_COUNT_HW_CACHE_NODE),
 #define XLAT_VAL_6 ((unsigned) (PERF_COUNT_HW_CACHE_NODE))
 #define XLAT_STR_6 STRINGIFY(PERF_COUNT_HW_CACHE_NODE)
};
static
const struct xlat perf_hw_cache_id[1] = { {
 .data = perf_hw_cache_id_xdata,
 .size = ARRAY_SIZE(perf_hw_cache_id_xdata),
 .type = XT_SORTED,
 .flags_mask = 0
#  ifdef XLAT_VAL_0
  | XLAT_VAL_0
#  endif
#  ifdef XLAT_VAL_1
  | XLAT_VAL_1
#  endif
#  ifdef XLAT_VAL_2
  | XLAT_VAL_2
#  endif
#  ifdef XLAT_VAL_3
  | XLAT_VAL_3
#  endif
#  ifdef XLAT_VAL_4
  | XLAT_VAL_4
#  endif
#  ifdef XLAT_VAL_5
  | XLAT_VAL_5
#  endif
#  ifdef XLAT_VAL_6
  | XLAT_VAL_6
#  endif
  ,
 .flags_strsz = 0
#  ifdef XLAT_STR_0
  + sizeof(XLAT_STR_0)
#  endif
#  ifdef XLAT_STR_1
  + sizeof(XLAT_STR_1)
#  endif
#  ifdef XLAT_STR_2
  + sizeof(XLAT_STR_2)
#  endif
#  ifdef XLAT_STR_3
  + sizeof(XLAT_STR_3)
#  endif
#  ifdef XLAT_STR_4
  + sizeof(XLAT_STR_4)
#  endif
#  ifdef XLAT_STR_5
  + sizeof(XLAT_STR_5)
#  endif
#  ifdef XLAT_STR_6
  + sizeof(XLAT_STR_6)
#  endif
  ,
} };
DIAG_POP_IGNORE_TAUTOLOGICAL_CONSTANT_COMPARE

#  undef XLAT_STR_0
#  undef XLAT_VAL_0
#  undef XLAT_STR_1
#  undef XLAT_VAL_1
#  undef XLAT_STR_2
#  undef XLAT_VAL_2
#  undef XLAT_STR_3
#  undef XLAT_VAL_3
#  undef XLAT_STR_4
#  undef XLAT_VAL_4
#  undef XLAT_STR_5
#  undef XLAT_VAL_5
#  undef XLAT_STR_6
#  undef XLAT_VAL_6
# endif /* !IN_MPERS */

#endif /* !XLAT_MACROS_ONLY */