/* Generated by ./src/xlat/gen.sh from ./src/xlat/kd_key_slock_keys.in; do not edit. */

#include "gcc_compat.h"
#include "static_assert.h"

#if defined(K_SHIFT_SLOCK) || (defined(HAVE_DECL_K_SHIFT_SLOCK) && HAVE_DECL_K_SHIFT_SLOCK)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((K_SHIFT_SLOCK) == (K(KT_SLOCK,KG_SHIFT)), "K_SHIFT_SLOCK != K(KT_SLOCK,KG_SHIFT)");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define K_SHIFT_SLOCK K(KT_SLOCK,KG_SHIFT)
#endif
#if defined(K_SHIFT_SLOCK) || (defined(HAVE_DECL_K_SHIFT_SLOCK) && HAVE_DECL_K_SHIFT_SLOCK)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (K_SHIFT_SLOCK)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: K_SHIFT_SLOCK"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (K_SHIFT_SLOCK)
#endif
#if defined(K_ALTGR_SLOCK) || (defined(HAVE_DECL_K_ALTGR_SLOCK) && HAVE_DECL_K_ALTGR_SLOCK)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((K_ALTGR_SLOCK) == (K(KT_SLOCK,KG_ALTGR)), "K_ALTGR_SLOCK != K(KT_SLOCK,KG_ALTGR)");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define K_ALTGR_SLOCK K(KT_SLOCK,KG_ALTGR)
#endif
#if defined(K_ALTGR_SLOCK) || (defined(HAVE_DECL_K_ALTGR_SLOCK) && HAVE_DECL_K_ALTGR_SLOCK)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (K_ALTGR_SLOCK)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: K_ALTGR_SLOCK"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (K_ALTGR_SLOCK)
#endif
#if defined(K_CTRL_SLOCK) || (defined(HAVE_DECL_K_CTRL_SLOCK) && HAVE_DECL_K_CTRL_SLOCK)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((K_CTRL_SLOCK) == (K(KT_SLOCK,KG_CTRL)), "K_CTRL_SLOCK != K(KT_SLOCK,KG_CTRL)");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define K_CTRL_SLOCK K(KT_SLOCK,KG_CTRL)
#endif
#if defined(K_CTRL_SLOCK) || (defined(HAVE_DECL_K_CTRL_SLOCK) && HAVE_DECL_K_CTRL_SLOCK)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (K_CTRL_SLOCK)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: K_CTRL_SLOCK"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (K_CTRL_SLOCK)
#endif
#if defined(K_ALT_SLOCK) || (defined(HAVE_DECL_K_ALT_SLOCK) && HAVE_DECL_K_ALT_SLOCK)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((K_ALT_SLOCK) == (K(KT_SLOCK,KG_ALT)), "K_ALT_SLOCK != K(KT_SLOCK,KG_ALT)");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define K_ALT_SLOCK K(KT_SLOCK,KG_ALT)
#endif
#if defined(K_ALT_SLOCK) || (defined(HAVE_DECL_K_ALT_SLOCK) && HAVE_DECL_K_ALT_SLOCK)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (K_ALT_SLOCK)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: K_ALT_SLOCK"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (K_ALT_SLOCK)
#endif
#if defined(K_SHIFTL_SLOCK) || (defined(HAVE_DECL_K_SHIFTL_SLOCK) && HAVE_DECL_K_SHIFTL_SLOCK)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((K_SHIFTL_SLOCK) == (K(KT_SLOCK,KG_SHIFTL)), "K_SHIFTL_SLOCK != K(KT_SLOCK,KG_SHIFTL)");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define K_SHIFTL_SLOCK K(KT_SLOCK,KG_SHIFTL)
#endif
#if defined(K_SHIFTL_SLOCK) || (defined(HAVE_DECL_K_SHIFTL_SLOCK) && HAVE_DECL_K_SHIFTL_SLOCK)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (K_SHIFTL_SLOCK)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: K_SHIFTL_SLOCK"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (K_SHIFTL_SLOCK)
#endif
#if defined(K_SHIFTR_SLOCK) || (defined(HAVE_DECL_K_SHIFTR_SLOCK) && HAVE_DECL_K_SHIFTR_SLOCK)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((K_SHIFTR_SLOCK) == (K(KT_SLOCK,KG_SHIFTR)), "K_SHIFTR_SLOCK != K(KT_SLOCK,KG_SHIFTR)");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define K_SHIFTR_SLOCK K(KT_SLOCK,KG_SHIFTR)
#endif
#if defined(K_SHIFTR_SLOCK) || (defined(HAVE_DECL_K_SHIFTR_SLOCK) && HAVE_DECL_K_SHIFTR_SLOCK)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (K_SHIFTR_SLOCK)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: K_SHIFTR_SLOCK"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (K_SHIFTR_SLOCK)
#endif
#if defined(K_CTRLL_SLOCK) || (defined(HAVE_DECL_K_CTRLL_SLOCK) && HAVE_DECL_K_CTRLL_SLOCK)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((K_CTRLL_SLOCK) == (K(KT_SLOCK,KG_CTRLL)), "K_CTRLL_SLOCK != K(KT_SLOCK,KG_CTRLL)");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define K_CTRLL_SLOCK K(KT_SLOCK,KG_CTRLL)
#endif
#if defined(K_CTRLL_SLOCK) || (defined(HAVE_DECL_K_CTRLL_SLOCK) && HAVE_DECL_K_CTRLL_SLOCK)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (K_CTRLL_SLOCK)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: K_CTRLL_SLOCK"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (K_CTRLL_SLOCK)
#endif
#if defined(K_CTRLR_SLOCK) || (defined(HAVE_DECL_K_CTRLR_SLOCK) && HAVE_DECL_K_CTRLR_SLOCK)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((K_CTRLR_SLOCK) == (K(KT_SLOCK,KG_CTRLR)), "K_CTRLR_SLOCK != K(KT_SLOCK,KG_CTRLR)");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define K_CTRLR_SLOCK K(KT_SLOCK,KG_CTRLR)
#endif
#if defined(K_CTRLR_SLOCK) || (defined(HAVE_DECL_K_CTRLR_SLOCK) && HAVE_DECL_K_CTRLR_SLOCK)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (K_CTRLR_SLOCK)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: K_CTRLR_SLOCK"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (K_CTRLR_SLOCK)
#endif
#if defined(K_CAPSSHIFT_SLOCK) || (defined(HAVE_DECL_K_CAPSSHIFT_SLOCK) && HAVE_DECL_K_CAPSSHIFT_SLOCK)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((K_CAPSSHIFT_SLOCK) == (K(KT_SLOCK,KG_CAPSSHIFT)), "K_CAPSSHIFT_SLOCK != K(KT_SLOCK,KG_CAPSSHIFT)");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define K_CAPSSHIFT_SLOCK K(KT_SLOCK,KG_CAPSSHIFT)
#endif
#if defined(K_CAPSSHIFT_SLOCK) || (defined(HAVE_DECL_K_CAPSSHIFT_SLOCK) && HAVE_DECL_K_CAPSSHIFT_SLOCK)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (K_CAPSSHIFT_SLOCK)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: K_CAPSSHIFT_SLOCK"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (K_CAPSSHIFT_SLOCK)
#endif
#undef XLAT_PREV_VAL

#ifndef XLAT_MACROS_ONLY

# ifdef IN_MPERS

#  error static const struct xlat kd_key_slock_keys in mpers mode

# else

DIAG_PUSH_IGNORE_TAUTOLOGICAL_CONSTANT_COMPARE
static const struct xlat_data kd_key_slock_keys_xdata[] = {
 XLAT(K_SHIFT_SLOCK),
 #define XLAT_VAL_0 ((unsigned) (K_SHIFT_SLOCK))
 #define XLAT_STR_0 STRINGIFY(K_SHIFT_SLOCK)
 XLAT(K_ALTGR_SLOCK),
 #define XLAT_VAL_1 ((unsigned) (K_ALTGR_SLOCK))
 #define XLAT_STR_1 STRINGIFY(K_ALTGR_SLOCK)
 XLAT(K_CTRL_SLOCK),
 #define XLAT_VAL_2 ((unsigned) (K_CTRL_SLOCK))
 #define XLAT_STR_2 STRINGIFY(K_CTRL_SLOCK)
 XLAT(K_ALT_SLOCK),
 #define XLAT_VAL_3 ((unsigned) (K_ALT_SLOCK))
 #define XLAT_STR_3 STRINGIFY(K_ALT_SLOCK)
 XLAT(K_SHIFTL_SLOCK),
 #define XLAT_VAL_4 ((unsigned) (K_SHIFTL_SLOCK))
 #define XLAT_STR_4 STRINGIFY(K_SHIFTL_SLOCK)
 XLAT(K_SHIFTR_SLOCK),
 #define XLAT_VAL_5 ((unsigned) (K_SHIFTR_SLOCK))
 #define XLAT_STR_5 STRINGIFY(K_SHIFTR_SLOCK)
 XLAT(K_CTRLL_SLOCK),
 #define XLAT_VAL_6 ((unsigned) (K_CTRLL_SLOCK))
 #define XLAT_STR_6 STRINGIFY(K_CTRLL_SLOCK)
 XLAT(K_CTRLR_SLOCK),
 #define XLAT_VAL_7 ((unsigned) (K_CTRLR_SLOCK))
 #define XLAT_STR_7 STRINGIFY(K_CTRLR_SLOCK)
 XLAT(K_CAPSSHIFT_SLOCK),
 #define XLAT_VAL_8 ((unsigned) (K_CAPSSHIFT_SLOCK))
 #define XLAT_STR_8 STRINGIFY(K_CAPSSHIFT_SLOCK)
};
static
const struct xlat kd_key_slock_keys[1] = { {
 .data = kd_key_slock_keys_xdata,
 .size = ARRAY_SIZE(kd_key_slock_keys_xdata),
 .type = XT_SORTED,
 .flags_mask = 0
#  ifdef XLAT_VAL_0
  | XLAT_VAL_0
#  endif
#  ifdef XLAT_VAL_1
  | XLAT_VAL_1
#  endif
#  ifdef XLAT_VAL_2
  | XLAT_VAL_2
#  endif
#  ifdef XLAT_VAL_3
  | XLAT_VAL_3
#  endif
#  ifdef XLAT_VAL_4
  | XLAT_VAL_4
#  endif
#  ifdef XLAT_VAL_5
  | XLAT_VAL_5
#  endif
#  ifdef XLAT_VAL_6
  | XLAT_VAL_6
#  endif
#  ifdef XLAT_VAL_7
  | XLAT_VAL_7
#  endif
#  ifdef XLAT_VAL_8
  | XLAT_VAL_8
#  endif
  ,
 .flags_strsz = 0
#  ifdef XLAT_STR_0
  + sizeof(XLAT_STR_0)
#  endif
#  ifdef XLAT_STR_1
  + sizeof(XLAT_STR_1)
#  endif
#  ifdef XLAT_STR_2
  + sizeof(XLAT_STR_2)
#  endif
#  ifdef XLAT_STR_3
  + sizeof(XLAT_STR_3)
#  endif
#  ifdef XLAT_STR_4
  + sizeof(XLAT_STR_4)
#  endif
#  ifdef XLAT_STR_5
  + sizeof(XLAT_STR_5)
#  endif
#  ifdef XLAT_STR_6
  + sizeof(XLAT_STR_6)
#  endif
#  ifdef XLAT_STR_7
  + sizeof(XLAT_STR_7)
#  endif
#  ifdef XLAT_STR_8
  + sizeof(XLAT_STR_8)
#  endif
  ,
} };
DIAG_POP_IGNORE_TAUTOLOGICAL_CONSTANT_COMPARE

#  undef XLAT_STR_0
#  undef XLAT_VAL_0
#  undef XLAT_STR_1
#  undef XLAT_VAL_1
#  undef XLAT_STR_2
#  undef XLAT_VAL_2
#  undef XLAT_STR_3
#  undef XLAT_VAL_3
#  undef XLAT_STR_4
#  undef XLAT_VAL_4
#  undef XLAT_STR_5
#  undef XLAT_VAL_5
#  undef XLAT_STR_6
#  undef XLAT_VAL_6
#  undef XLAT_STR_7
#  undef XLAT_VAL_7
#  undef XLAT_STR_8
#  undef XLAT_VAL_8
# endif /* !IN_MPERS */

#endif /* !XLAT_MACROS_ONLY */