/* Generated by ./src/xlat/gen.sh from ./src/xlat/sysctl_vm.in; do not edit. */

#include "gcc_compat.h"
#include "static_assert.h"

#if defined(VM_UNUSED1) || (defined(HAVE_DECL_VM_UNUSED1) && HAVE_DECL_VM_UNUSED1)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_UNUSED1) == (1), "VM_UNUSED1 != 1");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_UNUSED1 1
#endif
#if defined(VM_UNUSED1) || (defined(HAVE_DECL_VM_UNUSED1) && HAVE_DECL_VM_UNUSED1)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_UNUSED1)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_UNUSED1"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_UNUSED1)
#endif
#if defined(VM_UNUSED2) || (defined(HAVE_DECL_VM_UNUSED2) && HAVE_DECL_VM_UNUSED2)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_UNUSED2) == (2), "VM_UNUSED2 != 2");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_UNUSED2 2
#endif
#if defined(VM_UNUSED2) || (defined(HAVE_DECL_VM_UNUSED2) && HAVE_DECL_VM_UNUSED2)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_UNUSED2)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_UNUSED2"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_UNUSED2)
#endif
#if defined(VM_UNUSED3) || (defined(HAVE_DECL_VM_UNUSED3) && HAVE_DECL_VM_UNUSED3)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_UNUSED3) == (3), "VM_UNUSED3 != 3");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_UNUSED3 3
#endif
#if defined(VM_UNUSED3) || (defined(HAVE_DECL_VM_UNUSED3) && HAVE_DECL_VM_UNUSED3)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_UNUSED3)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_UNUSED3"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_UNUSED3)
#endif
#if defined(VM_UNUSED4) || (defined(HAVE_DECL_VM_UNUSED4) && HAVE_DECL_VM_UNUSED4)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_UNUSED4) == (4), "VM_UNUSED4 != 4");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_UNUSED4 4
#endif
#if defined(VM_UNUSED4) || (defined(HAVE_DECL_VM_UNUSED4) && HAVE_DECL_VM_UNUSED4)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_UNUSED4)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_UNUSED4"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_UNUSED4)
#endif
#if defined(VM_OVERCOMMIT_MEMORY) || (defined(HAVE_DECL_VM_OVERCOMMIT_MEMORY) && HAVE_DECL_VM_OVERCOMMIT_MEMORY)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_OVERCOMMIT_MEMORY) == (5), "VM_OVERCOMMIT_MEMORY != 5");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_OVERCOMMIT_MEMORY 5
#endif
#if defined(VM_OVERCOMMIT_MEMORY) || (defined(HAVE_DECL_VM_OVERCOMMIT_MEMORY) && HAVE_DECL_VM_OVERCOMMIT_MEMORY)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_OVERCOMMIT_MEMORY)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_OVERCOMMIT_MEMORY"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_OVERCOMMIT_MEMORY)
#endif
#if defined(VM_UNUSED5) || (defined(HAVE_DECL_VM_UNUSED5) && HAVE_DECL_VM_UNUSED5)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_UNUSED5) == (6), "VM_UNUSED5 != 6");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_UNUSED5 6
#endif
#if defined(VM_UNUSED5) || (defined(HAVE_DECL_VM_UNUSED5) && HAVE_DECL_VM_UNUSED5)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_UNUSED5)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_UNUSED5"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_UNUSED5)
#endif
#if defined(VM_UNUSED7) || (defined(HAVE_DECL_VM_UNUSED7) && HAVE_DECL_VM_UNUSED7)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_UNUSED7) == (7), "VM_UNUSED7 != 7");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_UNUSED7 7
#endif
#if defined(VM_UNUSED7) || (defined(HAVE_DECL_VM_UNUSED7) && HAVE_DECL_VM_UNUSED7)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_UNUSED7)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_UNUSED7"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_UNUSED7)
#endif
#if defined(VM_UNUSED8) || (defined(HAVE_DECL_VM_UNUSED8) && HAVE_DECL_VM_UNUSED8)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_UNUSED8) == (8), "VM_UNUSED8 != 8");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_UNUSED8 8
#endif
#if defined(VM_UNUSED8) || (defined(HAVE_DECL_VM_UNUSED8) && HAVE_DECL_VM_UNUSED8)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_UNUSED8)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_UNUSED8"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_UNUSED8)
#endif
#if defined(VM_UNUSED9) || (defined(HAVE_DECL_VM_UNUSED9) && HAVE_DECL_VM_UNUSED9)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_UNUSED9) == (9), "VM_UNUSED9 != 9");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_UNUSED9 9
#endif
#if defined(VM_UNUSED9) || (defined(HAVE_DECL_VM_UNUSED9) && HAVE_DECL_VM_UNUSED9)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_UNUSED9)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_UNUSED9"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_UNUSED9)
#endif
#if defined(VM_PAGE_CLUSTER) || (defined(HAVE_DECL_VM_PAGE_CLUSTER) && HAVE_DECL_VM_PAGE_CLUSTER)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_PAGE_CLUSTER) == (10), "VM_PAGE_CLUSTER != 10");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_PAGE_CLUSTER 10
#endif
#if defined(VM_PAGE_CLUSTER) || (defined(HAVE_DECL_VM_PAGE_CLUSTER) && HAVE_DECL_VM_PAGE_CLUSTER)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_PAGE_CLUSTER)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_PAGE_CLUSTER"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_PAGE_CLUSTER)
#endif
#if defined(VM_DIRTY_BACKGROUND) || (defined(HAVE_DECL_VM_DIRTY_BACKGROUND) && HAVE_DECL_VM_DIRTY_BACKGROUND)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_DIRTY_BACKGROUND) == (11), "VM_DIRTY_BACKGROUND != 11");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_DIRTY_BACKGROUND 11
#endif
#if defined(VM_DIRTY_BACKGROUND) || (defined(HAVE_DECL_VM_DIRTY_BACKGROUND) && HAVE_DECL_VM_DIRTY_BACKGROUND)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_DIRTY_BACKGROUND)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_DIRTY_BACKGROUND"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_DIRTY_BACKGROUND)
#endif
#if defined(VM_DIRTY_RATIO) || (defined(HAVE_DECL_VM_DIRTY_RATIO) && HAVE_DECL_VM_DIRTY_RATIO)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_DIRTY_RATIO) == (12), "VM_DIRTY_RATIO != 12");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_DIRTY_RATIO 12
#endif
#if defined(VM_DIRTY_RATIO) || (defined(HAVE_DECL_VM_DIRTY_RATIO) && HAVE_DECL_VM_DIRTY_RATIO)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_DIRTY_RATIO)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_DIRTY_RATIO"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_DIRTY_RATIO)
#endif
#if defined(VM_DIRTY_WB_CS) || (defined(HAVE_DECL_VM_DIRTY_WB_CS) && HAVE_DECL_VM_DIRTY_WB_CS)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_DIRTY_WB_CS) == (13), "VM_DIRTY_WB_CS != 13");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_DIRTY_WB_CS 13
#endif
#if defined(VM_DIRTY_WB_CS) || (defined(HAVE_DECL_VM_DIRTY_WB_CS) && HAVE_DECL_VM_DIRTY_WB_CS)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_DIRTY_WB_CS)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_DIRTY_WB_CS"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_DIRTY_WB_CS)
#endif
#if defined(VM_DIRTY_EXPIRE_CS) || (defined(HAVE_DECL_VM_DIRTY_EXPIRE_CS) && HAVE_DECL_VM_DIRTY_EXPIRE_CS)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_DIRTY_EXPIRE_CS) == (14), "VM_DIRTY_EXPIRE_CS != 14");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_DIRTY_EXPIRE_CS 14
#endif
#if defined(VM_DIRTY_EXPIRE_CS) || (defined(HAVE_DECL_VM_DIRTY_EXPIRE_CS) && HAVE_DECL_VM_DIRTY_EXPIRE_CS)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_DIRTY_EXPIRE_CS)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_DIRTY_EXPIRE_CS"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_DIRTY_EXPIRE_CS)
#endif
#if defined(VM_NR_PDFLUSH_THREADS) || (defined(HAVE_DECL_VM_NR_PDFLUSH_THREADS) && HAVE_DECL_VM_NR_PDFLUSH_THREADS)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_NR_PDFLUSH_THREADS) == (15), "VM_NR_PDFLUSH_THREADS != 15");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_NR_PDFLUSH_THREADS 15
#endif
#if defined(VM_NR_PDFLUSH_THREADS) || (defined(HAVE_DECL_VM_NR_PDFLUSH_THREADS) && HAVE_DECL_VM_NR_PDFLUSH_THREADS)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_NR_PDFLUSH_THREADS)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_NR_PDFLUSH_THREADS"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_NR_PDFLUSH_THREADS)
#endif
#if defined(VM_OVERCOMMIT_RATIO) || (defined(HAVE_DECL_VM_OVERCOMMIT_RATIO) && HAVE_DECL_VM_OVERCOMMIT_RATIO)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_OVERCOMMIT_RATIO) == (16), "VM_OVERCOMMIT_RATIO != 16");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_OVERCOMMIT_RATIO 16
#endif
#if defined(VM_OVERCOMMIT_RATIO) || (defined(HAVE_DECL_VM_OVERCOMMIT_RATIO) && HAVE_DECL_VM_OVERCOMMIT_RATIO)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_OVERCOMMIT_RATIO)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_OVERCOMMIT_RATIO"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_OVERCOMMIT_RATIO)
#endif
#if defined(VM_PAGEBUF) || (defined(HAVE_DECL_VM_PAGEBUF) && HAVE_DECL_VM_PAGEBUF)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_PAGEBUF) == (17), "VM_PAGEBUF != 17");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_PAGEBUF 17
#endif
#if defined(VM_PAGEBUF) || (defined(HAVE_DECL_VM_PAGEBUF) && HAVE_DECL_VM_PAGEBUF)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_PAGEBUF)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_PAGEBUF"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_PAGEBUF)
#endif
#if defined(VM_HUGETLB_PAGES) || (defined(HAVE_DECL_VM_HUGETLB_PAGES) && HAVE_DECL_VM_HUGETLB_PAGES)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_HUGETLB_PAGES) == (18), "VM_HUGETLB_PAGES != 18");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_HUGETLB_PAGES 18
#endif
#if defined(VM_HUGETLB_PAGES) || (defined(HAVE_DECL_VM_HUGETLB_PAGES) && HAVE_DECL_VM_HUGETLB_PAGES)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_HUGETLB_PAGES)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_HUGETLB_PAGES"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_HUGETLB_PAGES)
#endif
#if defined(VM_SWAPPINESS) || (defined(HAVE_DECL_VM_SWAPPINESS) && HAVE_DECL_VM_SWAPPINESS)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_SWAPPINESS) == (19), "VM_SWAPPINESS != 19");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_SWAPPINESS 19
#endif
#if defined(VM_SWAPPINESS) || (defined(HAVE_DECL_VM_SWAPPINESS) && HAVE_DECL_VM_SWAPPINESS)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_SWAPPINESS)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_SWAPPINESS"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_SWAPPINESS)
#endif
#if defined(VM_LOWMEM_RESERVE_RATIO) || (defined(HAVE_DECL_VM_LOWMEM_RESERVE_RATIO) && HAVE_DECL_VM_LOWMEM_RESERVE_RATIO)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_LOWMEM_RESERVE_RATIO) == (20), "VM_LOWMEM_RESERVE_RATIO != 20");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_LOWMEM_RESERVE_RATIO 20
#endif
#if defined(VM_LOWMEM_RESERVE_RATIO) || (defined(HAVE_DECL_VM_LOWMEM_RESERVE_RATIO) && HAVE_DECL_VM_LOWMEM_RESERVE_RATIO)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_LOWMEM_RESERVE_RATIO)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_LOWMEM_RESERVE_RATIO"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_LOWMEM_RESERVE_RATIO)
#endif
#if defined(VM_MIN_FREE_KBYTES) || (defined(HAVE_DECL_VM_MIN_FREE_KBYTES) && HAVE_DECL_VM_MIN_FREE_KBYTES)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_MIN_FREE_KBYTES) == (21), "VM_MIN_FREE_KBYTES != 21");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_MIN_FREE_KBYTES 21
#endif
#if defined(VM_MIN_FREE_KBYTES) || (defined(HAVE_DECL_VM_MIN_FREE_KBYTES) && HAVE_DECL_VM_MIN_FREE_KBYTES)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_MIN_FREE_KBYTES)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_MIN_FREE_KBYTES"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_MIN_FREE_KBYTES)
#endif
#if defined(VM_MAX_MAP_COUNT) || (defined(HAVE_DECL_VM_MAX_MAP_COUNT) && HAVE_DECL_VM_MAX_MAP_COUNT)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_MAX_MAP_COUNT) == (22), "VM_MAX_MAP_COUNT != 22");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_MAX_MAP_COUNT 22
#endif
#if defined(VM_MAX_MAP_COUNT) || (defined(HAVE_DECL_VM_MAX_MAP_COUNT) && HAVE_DECL_VM_MAX_MAP_COUNT)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_MAX_MAP_COUNT)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_MAX_MAP_COUNT"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_MAX_MAP_COUNT)
#endif
#if defined(VM_LAPTOP_MODE) || (defined(HAVE_DECL_VM_LAPTOP_MODE) && HAVE_DECL_VM_LAPTOP_MODE)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_LAPTOP_MODE) == (23), "VM_LAPTOP_MODE != 23");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_LAPTOP_MODE 23
#endif
#if defined(VM_LAPTOP_MODE) || (defined(HAVE_DECL_VM_LAPTOP_MODE) && HAVE_DECL_VM_LAPTOP_MODE)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_LAPTOP_MODE)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_LAPTOP_MODE"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_LAPTOP_MODE)
#endif
#if defined(VM_BLOCK_DUMP) || (defined(HAVE_DECL_VM_BLOCK_DUMP) && HAVE_DECL_VM_BLOCK_DUMP)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_BLOCK_DUMP) == (24), "VM_BLOCK_DUMP != 24");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_BLOCK_DUMP 24
#endif
#if defined(VM_BLOCK_DUMP) || (defined(HAVE_DECL_VM_BLOCK_DUMP) && HAVE_DECL_VM_BLOCK_DUMP)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_BLOCK_DUMP)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_BLOCK_DUMP"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_BLOCK_DUMP)
#endif
#if defined(VM_HUGETLB_GROUP) || (defined(HAVE_DECL_VM_HUGETLB_GROUP) && HAVE_DECL_VM_HUGETLB_GROUP)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_HUGETLB_GROUP) == (25), "VM_HUGETLB_GROUP != 25");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_HUGETLB_GROUP 25
#endif
#if defined(VM_HUGETLB_GROUP) || (defined(HAVE_DECL_VM_HUGETLB_GROUP) && HAVE_DECL_VM_HUGETLB_GROUP)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_HUGETLB_GROUP)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_HUGETLB_GROUP"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_HUGETLB_GROUP)
#endif
#if defined(VM_VFS_CACHE_PRESSURE) || (defined(HAVE_DECL_VM_VFS_CACHE_PRESSURE) && HAVE_DECL_VM_VFS_CACHE_PRESSURE)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_VFS_CACHE_PRESSURE) == (26), "VM_VFS_CACHE_PRESSURE != 26");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_VFS_CACHE_PRESSURE 26
#endif
#if defined(VM_VFS_CACHE_PRESSURE) || (defined(HAVE_DECL_VM_VFS_CACHE_PRESSURE) && HAVE_DECL_VM_VFS_CACHE_PRESSURE)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_VFS_CACHE_PRESSURE)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_VFS_CACHE_PRESSURE"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_VFS_CACHE_PRESSURE)
#endif
#if defined(VM_LEGACY_VA_LAYOUT) || (defined(HAVE_DECL_VM_LEGACY_VA_LAYOUT) && HAVE_DECL_VM_LEGACY_VA_LAYOUT)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_LEGACY_VA_LAYOUT) == (27), "VM_LEGACY_VA_LAYOUT != 27");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_LEGACY_VA_LAYOUT 27
#endif
#if defined(VM_LEGACY_VA_LAYOUT) || (defined(HAVE_DECL_VM_LEGACY_VA_LAYOUT) && HAVE_DECL_VM_LEGACY_VA_LAYOUT)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_LEGACY_VA_LAYOUT)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_LEGACY_VA_LAYOUT"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_LEGACY_VA_LAYOUT)
#endif
#if defined(VM_SWAP_TOKEN_TIMEOUT) || (defined(HAVE_DECL_VM_SWAP_TOKEN_TIMEOUT) && HAVE_DECL_VM_SWAP_TOKEN_TIMEOUT)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_SWAP_TOKEN_TIMEOUT) == (28), "VM_SWAP_TOKEN_TIMEOUT != 28");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_SWAP_TOKEN_TIMEOUT 28
#endif
#if defined(VM_SWAP_TOKEN_TIMEOUT) || (defined(HAVE_DECL_VM_SWAP_TOKEN_TIMEOUT) && HAVE_DECL_VM_SWAP_TOKEN_TIMEOUT)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_SWAP_TOKEN_TIMEOUT)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_SWAP_TOKEN_TIMEOUT"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_SWAP_TOKEN_TIMEOUT)
#endif
#if defined(VM_DROP_PAGECACHE) || (defined(HAVE_DECL_VM_DROP_PAGECACHE) && HAVE_DECL_VM_DROP_PAGECACHE)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_DROP_PAGECACHE) == (29), "VM_DROP_PAGECACHE != 29");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_DROP_PAGECACHE 29
#endif
#if defined(VM_DROP_PAGECACHE) || (defined(HAVE_DECL_VM_DROP_PAGECACHE) && HAVE_DECL_VM_DROP_PAGECACHE)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_DROP_PAGECACHE)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_DROP_PAGECACHE"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_DROP_PAGECACHE)
#endif
#if defined(VM_PERCPU_PAGELIST_FRACTION) || (defined(HAVE_DECL_VM_PERCPU_PAGELIST_FRACTION) && HAVE_DECL_VM_PERCPU_PAGELIST_FRACTION)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_PERCPU_PAGELIST_FRACTION) == (30), "VM_PERCPU_PAGELIST_FRACTION != 30");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_PERCPU_PAGELIST_FRACTION 30
#endif
#if defined(VM_PERCPU_PAGELIST_FRACTION) || (defined(HAVE_DECL_VM_PERCPU_PAGELIST_FRACTION) && HAVE_DECL_VM_PERCPU_PAGELIST_FRACTION)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_PERCPU_PAGELIST_FRACTION)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_PERCPU_PAGELIST_FRACTION"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_PERCPU_PAGELIST_FRACTION)
#endif
#if defined(VM_ZONE_RECLAIM_MODE) || (defined(HAVE_DECL_VM_ZONE_RECLAIM_MODE) && HAVE_DECL_VM_ZONE_RECLAIM_MODE)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_ZONE_RECLAIM_MODE) == (31), "VM_ZONE_RECLAIM_MODE != 31");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_ZONE_RECLAIM_MODE 31
#endif
#if defined(VM_ZONE_RECLAIM_MODE) || (defined(HAVE_DECL_VM_ZONE_RECLAIM_MODE) && HAVE_DECL_VM_ZONE_RECLAIM_MODE)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_ZONE_RECLAIM_MODE)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_ZONE_RECLAIM_MODE"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_ZONE_RECLAIM_MODE)
#endif
#if defined(VM_MIN_UNMAPPED) || (defined(HAVE_DECL_VM_MIN_UNMAPPED) && HAVE_DECL_VM_MIN_UNMAPPED)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_MIN_UNMAPPED) == (32), "VM_MIN_UNMAPPED != 32");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_MIN_UNMAPPED 32
#endif
#if defined(VM_MIN_UNMAPPED) || (defined(HAVE_DECL_VM_MIN_UNMAPPED) && HAVE_DECL_VM_MIN_UNMAPPED)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_MIN_UNMAPPED)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_MIN_UNMAPPED"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_MIN_UNMAPPED)
#endif
#if defined(VM_PANIC_ON_OOM) || (defined(HAVE_DECL_VM_PANIC_ON_OOM) && HAVE_DECL_VM_PANIC_ON_OOM)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_PANIC_ON_OOM) == (33), "VM_PANIC_ON_OOM != 33");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_PANIC_ON_OOM 33
#endif
#if defined(VM_PANIC_ON_OOM) || (defined(HAVE_DECL_VM_PANIC_ON_OOM) && HAVE_DECL_VM_PANIC_ON_OOM)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_PANIC_ON_OOM)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_PANIC_ON_OOM"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_PANIC_ON_OOM)
#endif
#if defined(VM_VDSO_ENABLED) || (defined(HAVE_DECL_VM_VDSO_ENABLED) && HAVE_DECL_VM_VDSO_ENABLED)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_VDSO_ENABLED) == (34), "VM_VDSO_ENABLED != 34");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_VDSO_ENABLED 34
#endif
#if defined(VM_VDSO_ENABLED) || (defined(HAVE_DECL_VM_VDSO_ENABLED) && HAVE_DECL_VM_VDSO_ENABLED)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_VDSO_ENABLED)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_VDSO_ENABLED"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_VDSO_ENABLED)
#endif
#if defined(VM_MIN_SLAB) || (defined(HAVE_DECL_VM_MIN_SLAB) && HAVE_DECL_VM_MIN_SLAB)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_MIN_SLAB) == (35), "VM_MIN_SLAB != 35");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_MIN_SLAB 35
#endif
#if defined(VM_MIN_SLAB) || (defined(HAVE_DECL_VM_MIN_SLAB) && HAVE_DECL_VM_MIN_SLAB)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_MIN_SLAB)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_MIN_SLAB"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_MIN_SLAB)
#endif
#if defined(VM_CMM_PAGES) || (defined(HAVE_DECL_VM_CMM_PAGES) && HAVE_DECL_VM_CMM_PAGES)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_CMM_PAGES) == (1111), "VM_CMM_PAGES != 1111");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_CMM_PAGES 1111
#endif
#if defined(VM_CMM_PAGES) || (defined(HAVE_DECL_VM_CMM_PAGES) && HAVE_DECL_VM_CMM_PAGES)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_CMM_PAGES)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_CMM_PAGES"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_CMM_PAGES)
#endif
#if defined(VM_CMM_TIMED_PAGES) || (defined(HAVE_DECL_VM_CMM_TIMED_PAGES) && HAVE_DECL_VM_CMM_TIMED_PAGES)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_CMM_TIMED_PAGES) == (1112), "VM_CMM_TIMED_PAGES != 1112");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_CMM_TIMED_PAGES 1112
#endif
#if defined(VM_CMM_TIMED_PAGES) || (defined(HAVE_DECL_VM_CMM_TIMED_PAGES) && HAVE_DECL_VM_CMM_TIMED_PAGES)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_CMM_TIMED_PAGES)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_CMM_TIMED_PAGES"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_CMM_TIMED_PAGES)
#endif
#if defined(VM_CMM_TIMEOUT) || (defined(HAVE_DECL_VM_CMM_TIMEOUT) && HAVE_DECL_VM_CMM_TIMEOUT)
DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE
static_assert((VM_CMM_TIMEOUT) == (1113), "VM_CMM_TIMEOUT != 1113");
DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE
#else
# define VM_CMM_TIMEOUT 1113
#endif
#if defined(VM_CMM_TIMEOUT) || (defined(HAVE_DECL_VM_CMM_TIMEOUT) && HAVE_DECL_VM_CMM_TIMEOUT)
#if defined XLAT_PREV_VAL
static_assert((unsigned long long) (VM_CMM_TIMEOUT)
      > (unsigned long long) (XLAT_PREV_VAL),
      "Incorrect order in #sorted xlat: VM_CMM_TIMEOUT"
      " is not larger than the previous value");
#endif
#undef XLAT_PREV_VAL
#define XLAT_PREV_VAL (VM_CMM_TIMEOUT)
#endif
#undef XLAT_PREV_VAL

#ifndef XLAT_MACROS_ONLY

# ifdef IN_MPERS

#  error static const struct xlat sysctl_vm in mpers mode

# else

DIAG_PUSH_IGNORE_TAUTOLOGICAL_CONSTANT_COMPARE
static const struct xlat_data sysctl_vm_xdata[] = {


 XLAT(VM_UNUSED1),
 #define XLAT_VAL_0 ((unsigned) (VM_UNUSED1))
 #define XLAT_STR_0 STRINGIFY(VM_UNUSED1)





 XLAT(VM_UNUSED2),
 #define XLAT_VAL_1 ((unsigned) (VM_UNUSED2))
 #define XLAT_STR_1 STRINGIFY(VM_UNUSED2)



 XLAT(VM_UNUSED3),
 #define XLAT_VAL_2 ((unsigned) (VM_UNUSED3))
 #define XLAT_STR_2 STRINGIFY(VM_UNUSED3)




 XLAT(VM_UNUSED4),
 #define XLAT_VAL_3 ((unsigned) (VM_UNUSED4))
 #define XLAT_STR_3 STRINGIFY(VM_UNUSED4)

 XLAT(VM_OVERCOMMIT_MEMORY),
 #define XLAT_VAL_4 ((unsigned) (VM_OVERCOMMIT_MEMORY))
 #define XLAT_STR_4 STRINGIFY(VM_OVERCOMMIT_MEMORY)



 XLAT(VM_UNUSED5),
 #define XLAT_VAL_5 ((unsigned) (VM_UNUSED5))
 #define XLAT_STR_5 STRINGIFY(VM_UNUSED5)



 XLAT(VM_UNUSED7),
 #define XLAT_VAL_6 ((unsigned) (VM_UNUSED7))
 #define XLAT_STR_6 STRINGIFY(VM_UNUSED7)


 XLAT(VM_UNUSED8),
 #define XLAT_VAL_7 ((unsigned) (VM_UNUSED8))
 #define XLAT_STR_7 STRINGIFY(VM_UNUSED8)



 XLAT(VM_UNUSED9),
 #define XLAT_VAL_8 ((unsigned) (VM_UNUSED9))
 #define XLAT_STR_8 STRINGIFY(VM_UNUSED9)


 XLAT(VM_PAGE_CLUSTER),
 #define XLAT_VAL_9 ((unsigned) (VM_PAGE_CLUSTER))
 #define XLAT_STR_9 STRINGIFY(VM_PAGE_CLUSTER)

 XLAT(VM_DIRTY_BACKGROUND),
 #define XLAT_VAL_10 ((unsigned) (VM_DIRTY_BACKGROUND))
 #define XLAT_STR_10 STRINGIFY(VM_DIRTY_BACKGROUND)

 XLAT(VM_DIRTY_RATIO),
 #define XLAT_VAL_11 ((unsigned) (VM_DIRTY_RATIO))
 #define XLAT_STR_11 STRINGIFY(VM_DIRTY_RATIO)


 XLAT(VM_DIRTY_WB_CS),
 #define XLAT_VAL_12 ((unsigned) (VM_DIRTY_WB_CS))
 #define XLAT_STR_12 STRINGIFY(VM_DIRTY_WB_CS)

 XLAT(VM_DIRTY_EXPIRE_CS),
 #define XLAT_VAL_13 ((unsigned) (VM_DIRTY_EXPIRE_CS))
 #define XLAT_STR_13 STRINGIFY(VM_DIRTY_EXPIRE_CS)

 XLAT(VM_NR_PDFLUSH_THREADS),
 #define XLAT_VAL_14 ((unsigned) (VM_NR_PDFLUSH_THREADS))
 #define XLAT_STR_14 STRINGIFY(VM_NR_PDFLUSH_THREADS)

 XLAT(VM_OVERCOMMIT_RATIO),
 #define XLAT_VAL_15 ((unsigned) (VM_OVERCOMMIT_RATIO))
 #define XLAT_STR_15 STRINGIFY(VM_OVERCOMMIT_RATIO)

 XLAT(VM_PAGEBUF),
 #define XLAT_VAL_16 ((unsigned) (VM_PAGEBUF))
 #define XLAT_STR_16 STRINGIFY(VM_PAGEBUF)

 XLAT(VM_HUGETLB_PAGES),
 #define XLAT_VAL_17 ((unsigned) (VM_HUGETLB_PAGES))
 #define XLAT_STR_17 STRINGIFY(VM_HUGETLB_PAGES)

 XLAT(VM_SWAPPINESS),
 #define XLAT_VAL_18 ((unsigned) (VM_SWAPPINESS))
 #define XLAT_STR_18 STRINGIFY(VM_SWAPPINESS)


 XLAT(VM_LOWMEM_RESERVE_RATIO),
 #define XLAT_VAL_19 ((unsigned) (VM_LOWMEM_RESERVE_RATIO))
 #define XLAT_STR_19 STRINGIFY(VM_LOWMEM_RESERVE_RATIO)

 XLAT(VM_MIN_FREE_KBYTES),
 #define XLAT_VAL_20 ((unsigned) (VM_MIN_FREE_KBYTES))
 #define XLAT_STR_20 STRINGIFY(VM_MIN_FREE_KBYTES)

 XLAT(VM_MAX_MAP_COUNT),
 #define XLAT_VAL_21 ((unsigned) (VM_MAX_MAP_COUNT))
 #define XLAT_STR_21 STRINGIFY(VM_MAX_MAP_COUNT)

 XLAT(VM_LAPTOP_MODE),
 #define XLAT_VAL_22 ((unsigned) (VM_LAPTOP_MODE))
 #define XLAT_STR_22 STRINGIFY(VM_LAPTOP_MODE)
 XLAT(VM_BLOCK_DUMP),
 #define XLAT_VAL_23 ((unsigned) (VM_BLOCK_DUMP))
 #define XLAT_STR_23 STRINGIFY(VM_BLOCK_DUMP)

 XLAT(VM_HUGETLB_GROUP),
 #define XLAT_VAL_24 ((unsigned) (VM_HUGETLB_GROUP))
 #define XLAT_STR_24 STRINGIFY(VM_HUGETLB_GROUP)

 XLAT(VM_VFS_CACHE_PRESSURE),
 #define XLAT_VAL_25 ((unsigned) (VM_VFS_CACHE_PRESSURE))
 #define XLAT_STR_25 STRINGIFY(VM_VFS_CACHE_PRESSURE)

 XLAT(VM_LEGACY_VA_LAYOUT),
 #define XLAT_VAL_26 ((unsigned) (VM_LEGACY_VA_LAYOUT))
 #define XLAT_STR_26 STRINGIFY(VM_LEGACY_VA_LAYOUT)

 XLAT(VM_SWAP_TOKEN_TIMEOUT),
 #define XLAT_VAL_27 ((unsigned) (VM_SWAP_TOKEN_TIMEOUT))
 #define XLAT_STR_27 STRINGIFY(VM_SWAP_TOKEN_TIMEOUT)

 XLAT(VM_DROP_PAGECACHE),
 #define XLAT_VAL_28 ((unsigned) (VM_DROP_PAGECACHE))
 #define XLAT_STR_28 STRINGIFY(VM_DROP_PAGECACHE)

 XLAT(VM_PERCPU_PAGELIST_FRACTION),
 #define XLAT_VAL_29 ((unsigned) (VM_PERCPU_PAGELIST_FRACTION))
 #define XLAT_STR_29 STRINGIFY(VM_PERCPU_PAGELIST_FRACTION)

 XLAT(VM_ZONE_RECLAIM_MODE),
 #define XLAT_VAL_30 ((unsigned) (VM_ZONE_RECLAIM_MODE))
 #define XLAT_STR_30 STRINGIFY(VM_ZONE_RECLAIM_MODE)


 XLAT(VM_MIN_UNMAPPED),
 #define XLAT_VAL_31 ((unsigned) (VM_MIN_UNMAPPED))
 #define XLAT_STR_31 STRINGIFY(VM_MIN_UNMAPPED)

 XLAT(VM_PANIC_ON_OOM),
 #define XLAT_VAL_32 ((unsigned) (VM_PANIC_ON_OOM))
 #define XLAT_STR_32 STRINGIFY(VM_PANIC_ON_OOM)

 XLAT(VM_VDSO_ENABLED),
 #define XLAT_VAL_33 ((unsigned) (VM_VDSO_ENABLED))
 #define XLAT_STR_33 STRINGIFY(VM_VDSO_ENABLED)

 XLAT(VM_MIN_SLAB),
 #define XLAT_VAL_34 ((unsigned) (VM_MIN_SLAB))
 #define XLAT_STR_34 STRINGIFY(VM_MIN_SLAB)



 XLAT(VM_CMM_PAGES),
 #define XLAT_VAL_35 ((unsigned) (VM_CMM_PAGES))
 #define XLAT_STR_35 STRINGIFY(VM_CMM_PAGES)
 XLAT(VM_CMM_TIMED_PAGES),
 #define XLAT_VAL_36 ((unsigned) (VM_CMM_TIMED_PAGES))
 #define XLAT_STR_36 STRINGIFY(VM_CMM_TIMED_PAGES)
 XLAT(VM_CMM_TIMEOUT),
 #define XLAT_VAL_37 ((unsigned) (VM_CMM_TIMEOUT))
 #define XLAT_STR_37 STRINGIFY(VM_CMM_TIMEOUT)
};
static
const struct xlat sysctl_vm[1] = { {
 .data = sysctl_vm_xdata,
 .size = ARRAY_SIZE(sysctl_vm_xdata),
 .type = XT_SORTED,
 .flags_mask = 0
#  ifdef XLAT_VAL_0
  | XLAT_VAL_0
#  endif
#  ifdef XLAT_VAL_1
  | XLAT_VAL_1
#  endif
#  ifdef XLAT_VAL_2
  | XLAT_VAL_2
#  endif
#  ifdef XLAT_VAL_3
  | XLAT_VAL_3
#  endif
#  ifdef XLAT_VAL_4
  | XLAT_VAL_4
#  endif
#  ifdef XLAT_VAL_5
  | XLAT_VAL_5
#  endif
#  ifdef XLAT_VAL_6
  | XLAT_VAL_6
#  endif
#  ifdef XLAT_VAL_7
  | XLAT_VAL_7
#  endif
#  ifdef XLAT_VAL_8
  | XLAT_VAL_8
#  endif
#  ifdef XLAT_VAL_9
  | XLAT_VAL_9
#  endif
#  ifdef XLAT_VAL_10
  | XLAT_VAL_10
#  endif
#  ifdef XLAT_VAL_11
  | XLAT_VAL_11
#  endif
#  ifdef XLAT_VAL_12
  | XLAT_VAL_12
#  endif
#  ifdef XLAT_VAL_13
  | XLAT_VAL_13
#  endif
#  ifdef XLAT_VAL_14
  | XLAT_VAL_14
#  endif
#  ifdef XLAT_VAL_15
  | XLAT_VAL_15
#  endif
#  ifdef XLAT_VAL_16
  | XLAT_VAL_16
#  endif
#  ifdef XLAT_VAL_17
  | XLAT_VAL_17
#  endif
#  ifdef XLAT_VAL_18
  | XLAT_VAL_18
#  endif
#  ifdef XLAT_VAL_19
  | XLAT_VAL_19
#  endif
#  ifdef XLAT_VAL_20
  | XLAT_VAL_20
#  endif
#  ifdef XLAT_VAL_21
  | XLAT_VAL_21
#  endif
#  ifdef XLAT_VAL_22
  | XLAT_VAL_22
#  endif
#  ifdef XLAT_VAL_23
  | XLAT_VAL_23
#  endif
#  ifdef XLAT_VAL_24
  | XLAT_VAL_24
#  endif
#  ifdef XLAT_VAL_25
  | XLAT_VAL_25
#  endif
#  ifdef XLAT_VAL_26
  | XLAT_VAL_26
#  endif
#  ifdef XLAT_VAL_27
  | XLAT_VAL_27
#  endif
#  ifdef XLAT_VAL_28
  | XLAT_VAL_28
#  endif
#  ifdef XLAT_VAL_29
  | XLAT_VAL_29
#  endif
#  ifdef XLAT_VAL_30
  | XLAT_VAL_30
#  endif
#  ifdef XLAT_VAL_31
  | XLAT_VAL_31
#  endif
#  ifdef XLAT_VAL_32
  | XLAT_VAL_32
#  endif
#  ifdef XLAT_VAL_33
  | XLAT_VAL_33
#  endif
#  ifdef XLAT_VAL_34
  | XLAT_VAL_34
#  endif
#  ifdef XLAT_VAL_35
  | XLAT_VAL_35
#  endif
#  ifdef XLAT_VAL_36
  | XLAT_VAL_36
#  endif
#  ifdef XLAT_VAL_37
  | XLAT_VAL_37
#  endif
  ,
 .flags_strsz = 0
#  ifdef XLAT_STR_0
  + sizeof(XLAT_STR_0)
#  endif
#  ifdef XLAT_STR_1
  + sizeof(XLAT_STR_1)
#  endif
#  ifdef XLAT_STR_2
  + sizeof(XLAT_STR_2)
#  endif
#  ifdef XLAT_STR_3
  + sizeof(XLAT_STR_3)
#  endif
#  ifdef XLAT_STR_4
  + sizeof(XLAT_STR_4)
#  endif
#  ifdef XLAT_STR_5
  + sizeof(XLAT_STR_5)
#  endif
#  ifdef XLAT_STR_6
  + sizeof(XLAT_STR_6)
#  endif
#  ifdef XLAT_STR_7
  + sizeof(XLAT_STR_7)
#  endif
#  ifdef XLAT_STR_8
  + sizeof(XLAT_STR_8)
#  endif
#  ifdef XLAT_STR_9
  + sizeof(XLAT_STR_9)
#  endif
#  ifdef XLAT_STR_10
  + sizeof(XLAT_STR_10)
#  endif
#  ifdef XLAT_STR_11
  + sizeof(XLAT_STR_11)
#  endif
#  ifdef XLAT_STR_12
  + sizeof(XLAT_STR_12)
#  endif
#  ifdef XLAT_STR_13
  + sizeof(XLAT_STR_13)
#  endif
#  ifdef XLAT_STR_14
  + sizeof(XLAT_STR_14)
#  endif
#  ifdef XLAT_STR_15
  + sizeof(XLAT_STR_15)
#  endif
#  ifdef XLAT_STR_16
  + sizeof(XLAT_STR_16)
#  endif
#  ifdef XLAT_STR_17
  + sizeof(XLAT_STR_17)
#  endif
#  ifdef XLAT_STR_18
  + sizeof(XLAT_STR_18)
#  endif
#  ifdef XLAT_STR_19
  + sizeof(XLAT_STR_19)
#  endif
#  ifdef XLAT_STR_20
  + sizeof(XLAT_STR_20)
#  endif
#  ifdef XLAT_STR_21
  + sizeof(XLAT_STR_21)
#  endif
#  ifdef XLAT_STR_22
  + sizeof(XLAT_STR_22)
#  endif
#  ifdef XLAT_STR_23
  + sizeof(XLAT_STR_23)
#  endif
#  ifdef XLAT_STR_24
  + sizeof(XLAT_STR_24)
#  endif
#  ifdef XLAT_STR_25
  + sizeof(XLAT_STR_25)
#  endif
#  ifdef XLAT_STR_26
  + sizeof(XLAT_STR_26)
#  endif
#  ifdef XLAT_STR_27
  + sizeof(XLAT_STR_27)
#  endif
#  ifdef XLAT_STR_28
  + sizeof(XLAT_STR_28)
#  endif
#  ifdef XLAT_STR_29
  + sizeof(XLAT_STR_29)
#  endif
#  ifdef XLAT_STR_30
  + sizeof(XLAT_STR_30)
#  endif
#  ifdef XLAT_STR_31
  + sizeof(XLAT_STR_31)
#  endif
#  ifdef XLAT_STR_32
  + sizeof(XLAT_STR_32)
#  endif
#  ifdef XLAT_STR_33
  + sizeof(XLAT_STR_33)
#  endif
#  ifdef XLAT_STR_34
  + sizeof(XLAT_STR_34)
#  endif
#  ifdef XLAT_STR_35
  + sizeof(XLAT_STR_35)
#  endif
#  ifdef XLAT_STR_36
  + sizeof(XLAT_STR_36)
#  endif
#  ifdef XLAT_STR_37
  + sizeof(XLAT_STR_37)
#  endif
  ,
} };
DIAG_POP_IGNORE_TAUTOLOGICAL_CONSTANT_COMPARE

#  undef XLAT_STR_0
#  undef XLAT_VAL_0
#  undef XLAT_STR_1
#  undef XLAT_VAL_1
#  undef XLAT_STR_2
#  undef XLAT_VAL_2
#  undef XLAT_STR_3
#  undef XLAT_VAL_3
#  undef XLAT_STR_4
#  undef XLAT_VAL_4
#  undef XLAT_STR_5
#  undef XLAT_VAL_5
#  undef XLAT_STR_6
#  undef XLAT_VAL_6
#  undef XLAT_STR_7
#  undef XLAT_VAL_7
#  undef XLAT_STR_8
#  undef XLAT_VAL_8
#  undef XLAT_STR_9
#  undef XLAT_VAL_9
#  undef XLAT_STR_10
#  undef XLAT_VAL_10
#  undef XLAT_STR_11
#  undef XLAT_VAL_11
#  undef XLAT_STR_12
#  undef XLAT_VAL_12
#  undef XLAT_STR_13
#  undef XLAT_VAL_13
#  undef XLAT_STR_14
#  undef XLAT_VAL_14
#  undef XLAT_STR_15
#  undef XLAT_VAL_15
#  undef XLAT_STR_16
#  undef XLAT_VAL_16
#  undef XLAT_STR_17
#  undef XLAT_VAL_17
#  undef XLAT_STR_18
#  undef XLAT_VAL_18
#  undef XLAT_STR_19
#  undef XLAT_VAL_19
#  undef XLAT_STR_20
#  undef XLAT_VAL_20
#  undef XLAT_STR_21
#  undef XLAT_VAL_21
#  undef XLAT_STR_22
#  undef XLAT_VAL_22
#  undef XLAT_STR_23
#  undef XLAT_VAL_23
#  undef XLAT_STR_24
#  undef XLAT_VAL_24
#  undef XLAT_STR_25
#  undef XLAT_VAL_25
#  undef XLAT_STR_26
#  undef XLAT_VAL_26
#  undef XLAT_STR_27
#  undef XLAT_VAL_27
#  undef XLAT_STR_28
#  undef XLAT_VAL_28
#  undef XLAT_STR_29
#  undef XLAT_VAL_29
#  undef XLAT_STR_30
#  undef XLAT_VAL_30
#  undef XLAT_STR_31
#  undef XLAT_VAL_31
#  undef XLAT_STR_32
#  undef XLAT_VAL_32
#  undef XLAT_STR_33
#  undef XLAT_VAL_33
#  undef XLAT_STR_34
#  undef XLAT_VAL_34
#  undef XLAT_STR_35
#  undef XLAT_VAL_35
#  undef XLAT_STR_36
#  undef XLAT_VAL_36
#  undef XLAT_STR_37
#  undef XLAT_VAL_37
# endif /* !IN_MPERS */

#endif /* !XLAT_MACROS_ONLY */