--- zzzz-none-000/linux-2.4.17/include/asm-mips64/system.h 2001-07-04 18:50:39.000000000 +0000 +++ sangam-fb-322/linux-2.4.17/include/asm-mips64/system.h 2004-11-24 13:21:49.000000000 +0000 @@ -13,23 +13,30 @@ #include #include +#include + #include +__asm__ ( + ".macro\t__sti\n\t" + ".set\tpush\n\t" + ".set\treorder\n\t" + ".set\tnoat\n\t" + "mfc0\t$1,$12\n\t" + "ori\t$1,0x1f\n\t" + "xori\t$1,0x1e\n\t" + "mtc0\t$1,$12\n\t" + ".set\tpop\n\t" + ".endm"); + extern __inline__ void __sti(void) { __asm__ __volatile__( - ".set\tnoreorder\n\t" - ".set\tnoat\n\t" - "mfc0\t$1,$12\n\t" - "ori\t$1,0x1f\n\t" - "xori\t$1,0x1e\n\t" - "mtc0\t$1,$12\n\t" - ".set\tat\n\t" - ".set\treorder" + "__sti" : /* no outputs */ : /* no inputs */ - : "$1", "memory"); + : "memory"); } /* @@ -39,71 +46,93 @@ * R4000/R4400 need three nops, the R4600 two nops and the R10000 needs * no nops at all. */ +__asm__ ( + ".macro\t__cli\n\t" + ".set\tpush\n\t" + ".set\treorder\n\t" + ".set\tnoat\n\t" + "mfc0\t$1,$12\n\t" + "ori\t$1,1\n\t" + "xori\t$1,1\n\t" + ".set\tnoreorder\n\t" + "mtc0\t$1,$12\n\t" + "nop\n\t" + "nop\n\t" + "nop\n\t" + ".set\tpop\n\t" + ".endm"); + extern __inline__ void __cli(void) { __asm__ __volatile__( - ".set\tnoreorder\n\t" - ".set\tnoat\n\t" - "mfc0\t$1,$12\n\t" - "ori\t$1,1\n\t" - "xori\t$1,1\n\t" - "mtc0\t$1,$12\n\t" - "nop\n\t" - "nop\n\t" - "nop\n\t" - ".set\tat\n\t" - ".set\treorder" + "__cli" : /* no outputs */ : /* no inputs */ - : "$1", "memory"); + : "memory"); } +__asm__ ( + ".macro\t__save_flags flags\n\t" + ".set\tpush\n\t" + ".set\treorder\n\t" + "mfc0\t\\flags, $12\n\t" + ".set\tpop\n\t" + ".endm"); + #define __save_flags(x) \ __asm__ __volatile__( \ - ".set\tnoreorder\n\t" \ - "mfc0\t%0,$12\n\t" \ - ".set\treorder" \ + "__save_flags %0" \ : "=r" (x)) +__asm__ ( + ".macro\t__save_and_cli result\n\t" + ".set\tpush\n\t" + ".set\treorder\n\t" + ".set\tnoat\n\t" + "mfc0\t\\result, $12\n\t" + "ori\t$1, \\result, 1\n\t" + "xori\t$1, 1\n\t" + ".set\tnoreorder\n\t" + "mtc0\t$1, $12\n\t" + "nop\n\t" + "nop\n\t" + "nop\n\t" + ".set\tpop\n\t" + ".endm"); + #define __save_and_cli(x) \ __asm__ __volatile__( \ - ".set\tnoreorder\n\t" \ - ".set\tnoat\n\t" \ - "mfc0\t%0,$12\n\t" \ - "ori\t$1,%0,1\n\t" \ - "xori\t$1,1\n\t" \ - "mtc0\t$1,$12\n\t" \ - "nop\n\t" \ - "nop\n\t" \ - "nop\n\t" \ - ".set\tat\n\t" \ - ".set\treorder" \ + "__save_and_cli\t%0" \ : "=r" (x) \ : /* no inputs */ \ - : "$1", "memory") + : "memory") + +__asm__(".macro\t__restore_flags flags\n\t" + ".set\tnoreorder\n\t" + ".set\tnoat\n\t" + "mfc0\t$1, $12\n\t" + "andi\t\\flags, 1\n\t" + "ori\t$1, 1\n\t" + "xori\t$1, 1\n\t" + "or\t\\flags, $1\n\t" + "mtc0\t\\flags, $12\n\t" + "nop\n\t" + "nop\n\t" + "nop\n\t" + ".set\tat\n\t" + ".set\treorder\n\t" + ".endm"); #define __restore_flags(flags) \ do { \ unsigned long __tmp1; \ \ __asm__ __volatile__( \ - ".set\tnoreorder\t\t\t# __restore_flags\n\t" \ - ".set\tnoat\n\t" \ - "mfc0\t$1, $12\n\t" \ - "andi\t%0, 1\n\t" \ - "ori\t$1, 1\n\t" \ - "xori\t$1, 1\n\t" \ - "or\t%0, $1\n\t" \ - "mtc0\t%0, $12\n\t" \ - "nop\n\t" \ - "nop\n\t" \ - "nop\n\t" \ - ".set\tat\n\t" \ - ".set\treorder" \ + "__restore_flags\t%0" \ : "=r" (__tmp1) \ : "0" (flags) \ - : "$1", "memory"); \ + : "memory"); \ } while(0) #ifdef CONFIG_SMP @@ -183,7 +212,7 @@ #define SWITCH_DO_LAZY_FPU \ if (prev->flags & PF_USEDFPU) { \ lazy_fpu_switch(prev, 0); \ - set_cp0_status(ST0_CU1, ~ST0_CU1); \ + clear_cp0_status(ST0_CU1); \ prev->flags &= ~PF_USEDFPU; \ } #else /* CONFIG_SMP */ @@ -201,38 +230,38 @@ unsigned long dummy; __asm__ __volatile__( - ".set\tnoreorder\t\t\t# xchg_u32\n\t" - ".set\tnoat\n\t" + ".set\tpush\t\t\t\t# xchg_u32\n\t" + ".set\tnoreorder\n\t" + ".set\tnomacro\n\t" "ll\t%0, %3\n" - "1:\tmove\t$1, %2\n\t" - "sc\t$1, %1\n\t" - "beqzl\t$1, 1b\n\t" + "1:\tmove\t%2, %z4\n\t" + "sc\t%2, %1\n\t" + "beqzl\t%2, 1b\n\t" " ll\t%0, %3\n\t" - ".set\tat\n\t" - ".set\treorder" - : "=r" (val), "=o" (*m), "=r" (dummy) - : "o" (*m), "2" (val) + ".set\tpop" + : "=&r" (val), "=m" (*m), "=&r" (dummy) + : "R" (*m), "Jr" (val) : "memory"); return val; } -extern __inline__ unsigned long xchg_u64(volatile long * m, unsigned long val) +extern __inline__ unsigned long xchg_u64(volatile int * m, unsigned long val) { unsigned long dummy; __asm__ __volatile__( - ".set\tnoreorder\t\t\t# xchg_u64\n\t" - ".set\tnoat\n\t" + ".set\tpush\t\t\t\t# xchg_u64\n\t" + ".set\tnoreorder\n\t" + ".set\tnomacro\n\t" "lld\t%0, %3\n" - "1:\tmove\t$1, %2\n\t" - "scd\t$1, %1\n\t" - "beqzl\t$1, 1b\n\t" + "1:\tmove\t%2, %z4\n\t" + "scd\t%2, %1\n\t" + "beqzl\t%2, 1b\n\t" " lld\t%0, %3\n\t" - ".set\tat\n\t" - ".set\treorder" - : "=r" (val), "=o" (*m), "=r" (dummy) - : "o" (*m), "2" (val) + ".set\tpop" + : "=&r" (val), "=m" (*m), "=&r" (dummy) + : "R" (*m), "Jr" (val) : "memory"); return val; @@ -242,8 +271,8 @@ #define tas(ptr) (xchg((ptr),1)) -static __inline__ unsigned long -__xchg(unsigned long x, volatile void * ptr, int size) +static inline unsigned long __xchg(unsigned long x, volatile void * ptr, + int size) { switch (size) { case 4: