--- zzzz-none-000/linux-5.4.213/arch/arm/include/asm/atomic.h 2022-09-15 10:04:56.000000000 +0000 +++ alder-5690pro-762/linux-5.4.213/arch/arm/include/asm/atomic.h 2024-08-14 09:01:27.000000000 +0000 @@ -155,6 +155,59 @@ } #define atomic_fetch_add_unless atomic_fetch_add_unless +static inline int atomic_inc_with_max_return(atomic_t *v, unsigned int max_value) +{ + int oldval, newval; + unsigned long tmp; + + max_value -= 1; + smp_mb(); + + __asm__ __volatile__ ("@ atomic_inc_with_max_return\n" +"1: ldrex %0, [%4]\n" +" teq %0, %5\n" +" beq 2f\n" +" add %1, %0, #1\n" +" strex %2, %1, [%4]\n" +" teq %2, #0\n" +" bne 1b\n" +"2:" + : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) + : "r" (&v->counter), "r" (max_value) + : "cc"); + + if (likely((unsigned int)oldval != max_value)) { + smp_mb(); + return newval; + } + return -1; +} + +static inline int atomic_inc_with_wrap_return(atomic_t *v, unsigned int max_value) +{ + int oldval, newval; + unsigned long tmp; + + max_value -= 1; + smp_mb(); + + __asm__ __volatile__ ("@ atomic_inc_with_wrap_return\n" +"1: ldrex %0, [%4]\n" +" teq %0, %5\n" +" moveq %1, #0\n" +" addne %1, %0, #1\n" +" strex %2, %1, [%4]\n" +" teq %2, #0\n" +" bne 1b\n" +"2:" + : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) + : "r" (&v->counter), "r" (max_value) + : "cc"); + + smp_mb(); + + return newval; +} #else /* ARM_ARCH_6 */ #ifdef CONFIG_SMP