Name: Fix cpu_local_* for generic and x86 variants Status: Untested Signed-off-by: Rusty Russell cpu_local_inc() must disable preemption on non-atomic implementations, otherwise they can give the wrong results if preempted at the wrong time. Index: linux-2.6.10-rc2-bk13-Percpu/include/asm-x86_64/local.h =================================================================== --- linux-2.6.10-rc2-bk13-Percpu.orig/include/asm-x86_64/local.h 2003-09-22 10:26:46.000000000 +1000 +++ linux-2.6.10-rc2-bk13-Percpu/include/asm-x86_64/local.h 2004-12-02 13:09:06.000000000 +1100 @@ -60,10 +60,14 @@ */ #define cpu_local_read(v) local_read(&__get_cpu_var(v)) #define cpu_local_set(v, i) local_set(&__get_cpu_var(v), (i)) -#define cpu_local_inc(v) local_inc(&__get_cpu_var(v)) -#define cpu_local_dec(v) local_dec(&__get_cpu_var(v)) -#define cpu_local_add(i, v) local_add((i), &__get_cpu_var(v)) -#define cpu_local_sub(i, v) local_sub((i), &__get_cpu_var(v)) +#define cpu_local_inc(v) \ + do { local_inc(&get_cpu_var(v)); put_cpu_var(v); } while(0) +#define cpu_local_dec(v) \ + do { local_dec(&get_cpu_var(v)); put_cpu_var(v); } while(0) +#define cpu_local_add(i, v) \ + do { local_add((i), &get_cpu_var(v)); put_cpu_var(v); } while(0) +#define cpu_local_sub(i, v) \ + do { local_sub((i), &get_cpu_var(v)); put_cpu_var(v); } while(0) #define __cpu_local_inc(v) cpu_local_inc(v) #define __cpu_local_dec(v) cpu_local_dec(v) Index: linux-2.6.10-rc2-bk13-Percpu/include/asm-generic/local.h =================================================================== --- linux-2.6.10-rc2-bk13-Percpu.orig/include/asm-generic/local.h 2004-10-19 14:34:17.000000000 +1000 +++ linux-2.6.10-rc2-bk13-Percpu/include/asm-generic/local.h 2004-12-02 13:09:06.000000000 +1100 @@ -102,10 +102,14 @@ */ #define cpu_local_read(v) local_read(&__get_cpu_var(v)) #define cpu_local_set(v, i) local_set(&__get_cpu_var(v), (i)) -#define cpu_local_inc(v) local_inc(&__get_cpu_var(v)) -#define cpu_local_dec(v) local_dec(&__get_cpu_var(v)) -#define cpu_local_add(i, v) local_add((i), &__get_cpu_var(v)) -#define cpu_local_sub(i, v) local_sub((i), &__get_cpu_var(v)) +#define cpu_local_inc(v) \ + do { local_inc(&get_cpu_var(v)); put_cpu_var(v); } while(0) +#define cpu_local_dec(v) \ + do { local_dec(&get_cpu_var(v)); put_cpu_var(v); } while(0) +#define cpu_local_add(i, v) \ + do { local_add((i), &get_cpu_var(v)); put_cpu_var(v); } while(0) +#define cpu_local_sub(i, v) \ + do { local_sub((i), &get_cpu_var(v)); put_cpu_var(v); } while(0) /* Non-atomic increments, ie. preemption disabled and won't be touched * in interrupt, etc. Some archs can optimize this case well. Index: linux-2.6.10-rc2-bk13-Percpu/include/asm-i386/local.h =================================================================== --- linux-2.6.10-rc2-bk13-Percpu.orig/include/asm-i386/local.h 2003-09-22 10:26:12.000000000 +1000 +++ linux-2.6.10-rc2-bk13-Percpu/include/asm-i386/local.h 2004-12-02 13:09:06.000000000 +1100 @@ -57,10 +57,14 @@ */ #define cpu_local_read(v) local_read(&__get_cpu_var(v)) #define cpu_local_set(v, i) local_set(&__get_cpu_var(v), (i)) -#define cpu_local_inc(v) local_inc(&__get_cpu_var(v)) -#define cpu_local_dec(v) local_dec(&__get_cpu_var(v)) -#define cpu_local_add(i, v) local_add((i), &__get_cpu_var(v)) -#define cpu_local_sub(i, v) local_sub((i), &__get_cpu_var(v)) +#define cpu_local_inc(v) \ + do { local_inc(&get_cpu_var(v)); put_cpu_var(v); } while(0) +#define cpu_local_dec(v) \ + do { local_dec(&get_cpu_var(v)); put_cpu_var(v); } while(0) +#define cpu_local_add(i, v) \ + do { local_add((i), &get_cpu_var(v)); put_cpu_var(v); } while(0) +#define cpu_local_sub(i, v) \ + do { local_sub((i), &get_cpu_var(v)); put_cpu_var(v); } while(0) #define __cpu_local_inc(v) cpu_local_inc(v) #define __cpu_local_dec(v) cpu_local_dec(v)