/* Generated by ./src/xlat/gen.sh from ./src/xlat/perf_sw_ids.in; do not edit. */ #include "gcc_compat.h" #include "static_assert.h" #if defined(PERF_COUNT_SW_CPU_CLOCK) || (defined(HAVE_DECL_PERF_COUNT_SW_CPU_CLOCK) && HAVE_DECL_PERF_COUNT_SW_CPU_CLOCK) DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE static_assert((PERF_COUNT_SW_CPU_CLOCK) == (0), "PERF_COUNT_SW_CPU_CLOCK != 0"); DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE #else # define PERF_COUNT_SW_CPU_CLOCK 0 #endif #if defined(PERF_COUNT_SW_CPU_CLOCK) || (defined(HAVE_DECL_PERF_COUNT_SW_CPU_CLOCK) && HAVE_DECL_PERF_COUNT_SW_CPU_CLOCK) #if defined XLAT_PREV_VAL static_assert((unsigned long long) (PERF_COUNT_SW_CPU_CLOCK) > (unsigned long long) (XLAT_PREV_VAL), "Incorrect order in #sorted xlat: PERF_COUNT_SW_CPU_CLOCK" " is not larger than the previous value"); #endif #undef XLAT_PREV_VAL #define XLAT_PREV_VAL (PERF_COUNT_SW_CPU_CLOCK) #endif #if defined(PERF_COUNT_SW_TASK_CLOCK) || (defined(HAVE_DECL_PERF_COUNT_SW_TASK_CLOCK) && HAVE_DECL_PERF_COUNT_SW_TASK_CLOCK) DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE static_assert((PERF_COUNT_SW_TASK_CLOCK) == (1), "PERF_COUNT_SW_TASK_CLOCK != 1"); DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE #else # define PERF_COUNT_SW_TASK_CLOCK 1 #endif #if defined(PERF_COUNT_SW_TASK_CLOCK) || (defined(HAVE_DECL_PERF_COUNT_SW_TASK_CLOCK) && HAVE_DECL_PERF_COUNT_SW_TASK_CLOCK) #if defined XLAT_PREV_VAL static_assert((unsigned long long) (PERF_COUNT_SW_TASK_CLOCK) > (unsigned long long) (XLAT_PREV_VAL), "Incorrect order in #sorted xlat: PERF_COUNT_SW_TASK_CLOCK" " is not larger than the previous value"); #endif #undef XLAT_PREV_VAL #define XLAT_PREV_VAL (PERF_COUNT_SW_TASK_CLOCK) #endif #if defined(PERF_COUNT_SW_PAGE_FAULTS) || (defined(HAVE_DECL_PERF_COUNT_SW_PAGE_FAULTS) && HAVE_DECL_PERF_COUNT_SW_PAGE_FAULTS) DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE static_assert((PERF_COUNT_SW_PAGE_FAULTS) == (2), "PERF_COUNT_SW_PAGE_FAULTS != 2"); DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE #else # define PERF_COUNT_SW_PAGE_FAULTS 2 #endif #if defined(PERF_COUNT_SW_PAGE_FAULTS) || (defined(HAVE_DECL_PERF_COUNT_SW_PAGE_FAULTS) && HAVE_DECL_PERF_COUNT_SW_PAGE_FAULTS) #if defined XLAT_PREV_VAL static_assert((unsigned long long) (PERF_COUNT_SW_PAGE_FAULTS) > (unsigned long long) (XLAT_PREV_VAL), "Incorrect order in #sorted xlat: PERF_COUNT_SW_PAGE_FAULTS" " is not larger than the previous value"); #endif #undef XLAT_PREV_VAL #define XLAT_PREV_VAL (PERF_COUNT_SW_PAGE_FAULTS) #endif #if defined(PERF_COUNT_SW_CONTEXT_SWITCHES) || (defined(HAVE_DECL_PERF_COUNT_SW_CONTEXT_SWITCHES) && HAVE_DECL_PERF_COUNT_SW_CONTEXT_SWITCHES) DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE static_assert((PERF_COUNT_SW_CONTEXT_SWITCHES) == (3), "PERF_COUNT_SW_CONTEXT_SWITCHES != 3"); DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE #else # define PERF_COUNT_SW_CONTEXT_SWITCHES 3 #endif #if defined(PERF_COUNT_SW_CONTEXT_SWITCHES) || (defined(HAVE_DECL_PERF_COUNT_SW_CONTEXT_SWITCHES) && HAVE_DECL_PERF_COUNT_SW_CONTEXT_SWITCHES) #if defined XLAT_PREV_VAL static_assert((unsigned long long) (PERF_COUNT_SW_CONTEXT_SWITCHES) > (unsigned long long) (XLAT_PREV_VAL), "Incorrect order in #sorted xlat: PERF_COUNT_SW_CONTEXT_SWITCHES" " is not larger than the previous value"); #endif #undef XLAT_PREV_VAL #define XLAT_PREV_VAL (PERF_COUNT_SW_CONTEXT_SWITCHES) #endif #if defined(PERF_COUNT_SW_CPU_MIGRATIONS) || (defined(HAVE_DECL_PERF_COUNT_SW_CPU_MIGRATIONS) && HAVE_DECL_PERF_COUNT_SW_CPU_MIGRATIONS) DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE static_assert((PERF_COUNT_SW_CPU_MIGRATIONS) == (4), "PERF_COUNT_SW_CPU_MIGRATIONS != 4"); DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE #else # define PERF_COUNT_SW_CPU_MIGRATIONS 4 #endif #if defined(PERF_COUNT_SW_CPU_MIGRATIONS) || (defined(HAVE_DECL_PERF_COUNT_SW_CPU_MIGRATIONS) && HAVE_DECL_PERF_COUNT_SW_CPU_MIGRATIONS) #if defined XLAT_PREV_VAL static_assert((unsigned long long) (PERF_COUNT_SW_CPU_MIGRATIONS) > (unsigned long long) (XLAT_PREV_VAL), "Incorrect order in #sorted xlat: PERF_COUNT_SW_CPU_MIGRATIONS" " is not larger than the previous value"); #endif #undef XLAT_PREV_VAL #define XLAT_PREV_VAL (PERF_COUNT_SW_CPU_MIGRATIONS) #endif #if defined(PERF_COUNT_SW_PAGE_FAULTS_MIN) || (defined(HAVE_DECL_PERF_COUNT_SW_PAGE_FAULTS_MIN) && HAVE_DECL_PERF_COUNT_SW_PAGE_FAULTS_MIN) DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE static_assert((PERF_COUNT_SW_PAGE_FAULTS_MIN) == (5), "PERF_COUNT_SW_PAGE_FAULTS_MIN != 5"); DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE #else # define PERF_COUNT_SW_PAGE_FAULTS_MIN 5 #endif #if defined(PERF_COUNT_SW_PAGE_FAULTS_MIN) || (defined(HAVE_DECL_PERF_COUNT_SW_PAGE_FAULTS_MIN) && HAVE_DECL_PERF_COUNT_SW_PAGE_FAULTS_MIN) #if defined XLAT_PREV_VAL static_assert((unsigned long long) (PERF_COUNT_SW_PAGE_FAULTS_MIN) > (unsigned long long) (XLAT_PREV_VAL), "Incorrect order in #sorted xlat: PERF_COUNT_SW_PAGE_FAULTS_MIN" " is not larger than the previous value"); #endif #undef XLAT_PREV_VAL #define XLAT_PREV_VAL (PERF_COUNT_SW_PAGE_FAULTS_MIN) #endif #if defined(PERF_COUNT_SW_PAGE_FAULTS_MAJ) || (defined(HAVE_DECL_PERF_COUNT_SW_PAGE_FAULTS_MAJ) && HAVE_DECL_PERF_COUNT_SW_PAGE_FAULTS_MAJ) DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE static_assert((PERF_COUNT_SW_PAGE_FAULTS_MAJ) == (6), "PERF_COUNT_SW_PAGE_FAULTS_MAJ != 6"); DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE #else # define PERF_COUNT_SW_PAGE_FAULTS_MAJ 6 #endif #if defined(PERF_COUNT_SW_PAGE_FAULTS_MAJ) || (defined(HAVE_DECL_PERF_COUNT_SW_PAGE_FAULTS_MAJ) && HAVE_DECL_PERF_COUNT_SW_PAGE_FAULTS_MAJ) #if defined XLAT_PREV_VAL static_assert((unsigned long long) (PERF_COUNT_SW_PAGE_FAULTS_MAJ) > (unsigned long long) (XLAT_PREV_VAL), "Incorrect order in #sorted xlat: PERF_COUNT_SW_PAGE_FAULTS_MAJ" " is not larger than the previous value"); #endif #undef XLAT_PREV_VAL #define XLAT_PREV_VAL (PERF_COUNT_SW_PAGE_FAULTS_MAJ) #endif #if defined(PERF_COUNT_SW_ALIGNMENT_FAULTS) || (defined(HAVE_DECL_PERF_COUNT_SW_ALIGNMENT_FAULTS) && HAVE_DECL_PERF_COUNT_SW_ALIGNMENT_FAULTS) DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE static_assert((PERF_COUNT_SW_ALIGNMENT_FAULTS) == (7), "PERF_COUNT_SW_ALIGNMENT_FAULTS != 7"); DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE #else # define PERF_COUNT_SW_ALIGNMENT_FAULTS 7 #endif #if defined(PERF_COUNT_SW_ALIGNMENT_FAULTS) || (defined(HAVE_DECL_PERF_COUNT_SW_ALIGNMENT_FAULTS) && HAVE_DECL_PERF_COUNT_SW_ALIGNMENT_FAULTS) #if defined XLAT_PREV_VAL static_assert((unsigned long long) (PERF_COUNT_SW_ALIGNMENT_FAULTS) > (unsigned long long) (XLAT_PREV_VAL), "Incorrect order in #sorted xlat: PERF_COUNT_SW_ALIGNMENT_FAULTS" " is not larger than the previous value"); #endif #undef XLAT_PREV_VAL #define XLAT_PREV_VAL (PERF_COUNT_SW_ALIGNMENT_FAULTS) #endif #if defined(PERF_COUNT_SW_EMULATION_FAULTS) || (defined(HAVE_DECL_PERF_COUNT_SW_EMULATION_FAULTS) && HAVE_DECL_PERF_COUNT_SW_EMULATION_FAULTS) DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE static_assert((PERF_COUNT_SW_EMULATION_FAULTS) == (8), "PERF_COUNT_SW_EMULATION_FAULTS != 8"); DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE #else # define PERF_COUNT_SW_EMULATION_FAULTS 8 #endif #if defined(PERF_COUNT_SW_EMULATION_FAULTS) || (defined(HAVE_DECL_PERF_COUNT_SW_EMULATION_FAULTS) && HAVE_DECL_PERF_COUNT_SW_EMULATION_FAULTS) #if defined XLAT_PREV_VAL static_assert((unsigned long long) (PERF_COUNT_SW_EMULATION_FAULTS) > (unsigned long long) (XLAT_PREV_VAL), "Incorrect order in #sorted xlat: PERF_COUNT_SW_EMULATION_FAULTS" " is not larger than the previous value"); #endif #undef XLAT_PREV_VAL #define XLAT_PREV_VAL (PERF_COUNT_SW_EMULATION_FAULTS) #endif #if defined(PERF_COUNT_SW_DUMMY) || (defined(HAVE_DECL_PERF_COUNT_SW_DUMMY) && HAVE_DECL_PERF_COUNT_SW_DUMMY) DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE static_assert((PERF_COUNT_SW_DUMMY) == (9), "PERF_COUNT_SW_DUMMY != 9"); DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE #else # define PERF_COUNT_SW_DUMMY 9 #endif #if defined(PERF_COUNT_SW_DUMMY) || (defined(HAVE_DECL_PERF_COUNT_SW_DUMMY) && HAVE_DECL_PERF_COUNT_SW_DUMMY) #if defined XLAT_PREV_VAL static_assert((unsigned long long) (PERF_COUNT_SW_DUMMY) > (unsigned long long) (XLAT_PREV_VAL), "Incorrect order in #sorted xlat: PERF_COUNT_SW_DUMMY" " is not larger than the previous value"); #endif #undef XLAT_PREV_VAL #define XLAT_PREV_VAL (PERF_COUNT_SW_DUMMY) #endif #if defined(PERF_COUNT_SW_BPF_OUTPUT) || (defined(HAVE_DECL_PERF_COUNT_SW_BPF_OUTPUT) && HAVE_DECL_PERF_COUNT_SW_BPF_OUTPUT) DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE static_assert((PERF_COUNT_SW_BPF_OUTPUT) == (10), "PERF_COUNT_SW_BPF_OUTPUT != 10"); DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE #else # define PERF_COUNT_SW_BPF_OUTPUT 10 #endif #if defined(PERF_COUNT_SW_BPF_OUTPUT) || (defined(HAVE_DECL_PERF_COUNT_SW_BPF_OUTPUT) && HAVE_DECL_PERF_COUNT_SW_BPF_OUTPUT) #if defined XLAT_PREV_VAL static_assert((unsigned long long) (PERF_COUNT_SW_BPF_OUTPUT) > (unsigned long long) (XLAT_PREV_VAL), "Incorrect order in #sorted xlat: PERF_COUNT_SW_BPF_OUTPUT" " is not larger than the previous value"); #endif #undef XLAT_PREV_VAL #define XLAT_PREV_VAL (PERF_COUNT_SW_BPF_OUTPUT) #endif #if defined(PERF_COUNT_SW_CGROUP_SWITCHES) || (defined(HAVE_DECL_PERF_COUNT_SW_CGROUP_SWITCHES) && HAVE_DECL_PERF_COUNT_SW_CGROUP_SWITCHES) DIAG_PUSH_IGNORE_TAUTOLOGICAL_COMPARE static_assert((PERF_COUNT_SW_CGROUP_SWITCHES) == (11), "PERF_COUNT_SW_CGROUP_SWITCHES != 11"); DIAG_POP_IGNORE_TAUTOLOGICAL_COMPARE #else # define PERF_COUNT_SW_CGROUP_SWITCHES 11 #endif #if defined(PERF_COUNT_SW_CGROUP_SWITCHES) || (defined(HAVE_DECL_PERF_COUNT_SW_CGROUP_SWITCHES) && HAVE_DECL_PERF_COUNT_SW_CGROUP_SWITCHES) #if defined XLAT_PREV_VAL static_assert((unsigned long long) (PERF_COUNT_SW_CGROUP_SWITCHES) > (unsigned long long) (XLAT_PREV_VAL), "Incorrect order in #sorted xlat: PERF_COUNT_SW_CGROUP_SWITCHES" " is not larger than the previous value"); #endif #undef XLAT_PREV_VAL #define XLAT_PREV_VAL (PERF_COUNT_SW_CGROUP_SWITCHES) #endif #undef XLAT_PREV_VAL #ifndef XLAT_MACROS_ONLY # ifdef IN_MPERS # error static const struct xlat perf_sw_ids in mpers mode # else DIAG_PUSH_IGNORE_TAUTOLOGICAL_CONSTANT_COMPARE static const struct xlat_data perf_sw_ids_xdata[] = { XLAT(PERF_COUNT_SW_CPU_CLOCK), #define XLAT_VAL_0 ((unsigned) (PERF_COUNT_SW_CPU_CLOCK)) #define XLAT_STR_0 STRINGIFY(PERF_COUNT_SW_CPU_CLOCK) XLAT(PERF_COUNT_SW_TASK_CLOCK), #define XLAT_VAL_1 ((unsigned) (PERF_COUNT_SW_TASK_CLOCK)) #define XLAT_STR_1 STRINGIFY(PERF_COUNT_SW_TASK_CLOCK) XLAT(PERF_COUNT_SW_PAGE_FAULTS), #define XLAT_VAL_2 ((unsigned) (PERF_COUNT_SW_PAGE_FAULTS)) #define XLAT_STR_2 STRINGIFY(PERF_COUNT_SW_PAGE_FAULTS) XLAT(PERF_COUNT_SW_CONTEXT_SWITCHES), #define XLAT_VAL_3 ((unsigned) (PERF_COUNT_SW_CONTEXT_SWITCHES)) #define XLAT_STR_3 STRINGIFY(PERF_COUNT_SW_CONTEXT_SWITCHES) XLAT(PERF_COUNT_SW_CPU_MIGRATIONS), #define XLAT_VAL_4 ((unsigned) (PERF_COUNT_SW_CPU_MIGRATIONS)) #define XLAT_STR_4 STRINGIFY(PERF_COUNT_SW_CPU_MIGRATIONS) XLAT(PERF_COUNT_SW_PAGE_FAULTS_MIN), #define XLAT_VAL_5 ((unsigned) (PERF_COUNT_SW_PAGE_FAULTS_MIN)) #define XLAT_STR_5 STRINGIFY(PERF_COUNT_SW_PAGE_FAULTS_MIN) XLAT(PERF_COUNT_SW_PAGE_FAULTS_MAJ), #define XLAT_VAL_6 ((unsigned) (PERF_COUNT_SW_PAGE_FAULTS_MAJ)) #define XLAT_STR_6 STRINGIFY(PERF_COUNT_SW_PAGE_FAULTS_MAJ) XLAT(PERF_COUNT_SW_ALIGNMENT_FAULTS), #define XLAT_VAL_7 ((unsigned) (PERF_COUNT_SW_ALIGNMENT_FAULTS)) #define XLAT_STR_7 STRINGIFY(PERF_COUNT_SW_ALIGNMENT_FAULTS) XLAT(PERF_COUNT_SW_EMULATION_FAULTS), #define XLAT_VAL_8 ((unsigned) (PERF_COUNT_SW_EMULATION_FAULTS)) #define XLAT_STR_8 STRINGIFY(PERF_COUNT_SW_EMULATION_FAULTS) XLAT(PERF_COUNT_SW_DUMMY), #define XLAT_VAL_9 ((unsigned) (PERF_COUNT_SW_DUMMY)) #define XLAT_STR_9 STRINGIFY(PERF_COUNT_SW_DUMMY) XLAT(PERF_COUNT_SW_BPF_OUTPUT), #define XLAT_VAL_10 ((unsigned) (PERF_COUNT_SW_BPF_OUTPUT)) #define XLAT_STR_10 STRINGIFY(PERF_COUNT_SW_BPF_OUTPUT) XLAT(PERF_COUNT_SW_CGROUP_SWITCHES), #define XLAT_VAL_11 ((unsigned) (PERF_COUNT_SW_CGROUP_SWITCHES)) #define XLAT_STR_11 STRINGIFY(PERF_COUNT_SW_CGROUP_SWITCHES) }; static const struct xlat perf_sw_ids[1] = { { .data = perf_sw_ids_xdata, .size = ARRAY_SIZE(perf_sw_ids_xdata), .type = XT_SORTED, .flags_mask = 0 # ifdef XLAT_VAL_0 | XLAT_VAL_0 # endif # ifdef XLAT_VAL_1 | XLAT_VAL_1 # endif # ifdef XLAT_VAL_2 | XLAT_VAL_2 # endif # ifdef XLAT_VAL_3 | XLAT_VAL_3 # endif # ifdef XLAT_VAL_4 | XLAT_VAL_4 # endif # ifdef XLAT_VAL_5 | XLAT_VAL_5 # endif # ifdef XLAT_VAL_6 | XLAT_VAL_6 # endif # ifdef XLAT_VAL_7 | XLAT_VAL_7 # endif # ifdef XLAT_VAL_8 | XLAT_VAL_8 # endif # ifdef XLAT_VAL_9 | XLAT_VAL_9 # endif # ifdef XLAT_VAL_10 | XLAT_VAL_10 # endif # ifdef XLAT_VAL_11 | XLAT_VAL_11 # endif , .flags_strsz = 0 # ifdef XLAT_STR_0 + sizeof(XLAT_STR_0) # endif # ifdef XLAT_STR_1 + sizeof(XLAT_STR_1) # endif # ifdef XLAT_STR_2 + sizeof(XLAT_STR_2) # endif # ifdef XLAT_STR_3 + sizeof(XLAT_STR_3) # endif # ifdef XLAT_STR_4 + sizeof(XLAT_STR_4) # endif # ifdef XLAT_STR_5 + sizeof(XLAT_STR_5) # endif # ifdef XLAT_STR_6 + sizeof(XLAT_STR_6) # endif # ifdef XLAT_STR_7 + sizeof(XLAT_STR_7) # endif # ifdef XLAT_STR_8 + sizeof(XLAT_STR_8) # endif # ifdef XLAT_STR_9 + sizeof(XLAT_STR_9) # endif # ifdef XLAT_STR_10 + sizeof(XLAT_STR_10) # endif # ifdef XLAT_STR_11 + sizeof(XLAT_STR_11) # endif , } }; DIAG_POP_IGNORE_TAUTOLOGICAL_CONSTANT_COMPARE # undef XLAT_STR_0 # undef XLAT_VAL_0 # undef XLAT_STR_1 # undef XLAT_VAL_1 # undef XLAT_STR_2 # undef XLAT_VAL_2 # undef XLAT_STR_3 # undef XLAT_VAL_3 # undef XLAT_STR_4 # undef XLAT_VAL_4 # undef XLAT_STR_5 # undef XLAT_VAL_5 # undef XLAT_STR_6 # undef XLAT_VAL_6 # undef XLAT_STR_7 # undef XLAT_VAL_7 # undef XLAT_STR_8 # undef XLAT_VAL_8 # undef XLAT_STR_9 # undef XLAT_VAL_9 # undef XLAT_STR_10 # undef XLAT_VAL_10 # undef XLAT_STR_11 # undef XLAT_VAL_11 # endif /* !IN_MPERS */ #endif /* !XLAT_MACROS_ONLY */