From 24f287e412ae90de8d281543c8b1043b6ed6c019 Mon Sep 17 00:00:00 2001 From: "David S. Miller" Date: Mon, 15 Oct 2007 16:41:44 -0700 Subject: [SPARC64]: Implement atomic backoff. When the cpu count is high and contention hits an atomic object, the processors can synchronize such that some cpus continually get knocked out and cannot complete the atomic update. So implement an exponential backoff when SMP. Signed-off-by: David S. Miller --- arch/sparc64/lib/atomic.S | 38 +++++++++++++++++++++++++++----------- arch/sparc64/lib/bitops.S | 30 +++++++++++++++++++++--------- 2 files changed, 48 insertions(+), 20 deletions(-) (limited to 'arch') diff --git a/arch/sparc64/lib/atomic.S b/arch/sparc64/lib/atomic.S index 9633750167d..70ac4186f62 100644 --- a/arch/sparc64/lib/atomic.S +++ b/arch/sparc64/lib/atomic.S @@ -1,10 +1,10 @@ -/* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $ - * atomic.S: These things are too big to do inline. +/* atomic.S: These things are too big to do inline. * - * Copyright (C) 1999 David S. Miller (davem@redhat.com) + * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net) */ #include +#include .text @@ -16,27 +16,31 @@ .globl atomic_add .type atomic_add,#function atomic_add: /* %o0 = increment, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) 1: lduw [%o1], %g1 add %g1, %o0, %g7 cas [%o1], %g1, %g7 cmp %g1, %g7 - bne,pn %icc, 1b + bne,pn %icc, 2f nop retl nop +2: BACKOFF_SPIN(%o2, %o3, 1b) .size atomic_add, .-atomic_add .globl atomic_sub .type atomic_sub,#function atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) 1: lduw [%o1], %g1 sub %g1, %o0, %g7 cas [%o1], %g1, %g7 cmp %g1, %g7 - bne,pn %icc, 1b + bne,pn %icc, 2f nop retl nop +2: BACKOFF_SPIN(%o2, %o3, 1b) .size atomic_sub, .-atomic_sub /* On SMP we need to use memory barriers to ensure @@ -60,89 +64,101 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */ .globl atomic_add_ret .type atomic_add_ret,#function atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) ATOMIC_PRE_BARRIER 1: lduw [%o1], %g1 add %g1, %o0, %g7 cas [%o1], %g1, %g7 cmp %g1, %g7 - bne,pn %icc, 1b + bne,pn %icc, 2f add %g7, %o0, %g7 sra %g7, 0, %o0 ATOMIC_POST_BARRIER retl nop +2: BACKOFF_SPIN(%o2, %o3, 1b) .size atomic_add_ret, .-atomic_add_ret .globl atomic_sub_ret .type atomic_sub_ret,#function atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) ATOMIC_PRE_BARRIER 1: lduw [%o1], %g1 sub %g1, %o0, %g7 cas [%o1], %g1, %g7 cmp %g1, %g7 - bne,pn %icc, 1b + bne,pn %icc, 2f sub %g7, %o0, %g7 sra %g7, 0, %o0 ATOMIC_POST_BARRIER retl nop +2: BACKOFF_SPIN(%o2, %o3, 1b) .size atomic_sub_ret, .-atomic_sub_ret .globl atomic64_add .type atomic64_add,#function atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) 1: ldx [%o1], %g1 add %g1, %o0, %g7 casx [%o1], %g1, %g7 cmp %g1, %g7 - bne,pn %xcc, 1b + bne,pn %xcc, 2f nop retl nop +2: BACKOFF_SPIN(%o2, %o3, 1b) .size atomic64_add, .-atomic64_add .globl atomic64_sub .type atomic64_sub,#function atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) 1: ldx [%o1], %g1 sub %g1, %o0, %g7 casx [%o1], %g1, %g7 cmp %g1, %g7 - bne,pn %xcc, 1b + bne,pn %xcc, 2f nop retl nop +2: BACKOFF_SPIN(%o2, %o3, 1b) .size atomic64_sub, .-atomic64_sub .globl atomic64_add_ret .type atomic64_add_ret,#function atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) ATOMIC_PRE_BARRIER 1: ldx [%o1], %g1 add %g1, %o0, %g7 casx [%o1], %g1, %g7 cmp %g1, %g7 - bne,pn %xcc, 1b + bne,pn %xcc, 2f add %g7, %o0, %g7 mov %g7, %o0 ATOMIC_POST_BARRIER retl nop +2: BACKOFF_SPIN(%o2, %o3, 1b) .size atomic64_add_ret, .-atomic64_add_ret .globl atomic64_sub_ret .type atomic64_sub_ret,#function atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ + BACKOFF_SETUP(%o2) ATOMIC_PRE_BARRIER 1: ldx [%o1], %g1 sub %g1, %o0, %g7 casx [%o1], %g1, %g7 cmp %g1, %g7 - bne,pn %xcc, 1b + bne,pn %xcc, 2f sub %g7, %o0, %g7 mov %g7, %o0 ATOMIC_POST_BARRIER retl nop +2: BACKOFF_SPIN(%o2, %o3, 1b) .size atomic64_sub_ret, .-atomic64_sub_ret diff --git a/arch/sparc64/lib/bitops.S b/arch/sparc64/lib/bitops.S index 892431a8213..6b015a6eefb 100644 --- a/arch/sparc64/lib/bitops.S +++ b/arch/sparc64/lib/bitops.S @@ -1,10 +1,10 @@ -/* $Id: bitops.S,v 1.3 2001/11/18 00:12:56 davem Exp $ - * bitops.S: Sparc64 atomic bit operations. +/* bitops.S: Sparc64 atomic bit operations. * - * Copyright (C) 2000 David S. Miller (davem@redhat.com) + * Copyright (C) 2000, 2007 David S. Miller (davem@davemloft.net) */ #include +#include .text @@ -29,6 +29,7 @@ .globl test_and_set_bit .type test_and_set_bit,#function test_and_set_bit: /* %o0=nr, %o1=addr */ + BACKOFF_SETUP(%o3) BITOP_PRE_BARRIER srlx %o0, 6, %g1 mov 1, %o2 @@ -40,18 +41,20 @@ test_and_set_bit: /* %o0=nr, %o1=addr */ or %g7, %o2, %g1 casx [%o1], %g7, %g1 cmp %g7, %g1 - bne,pn %xcc, 1b + bne,pn %xcc, 2f and %g7, %o2, %g2 clr %o0 movrne %g2, 1, %o0 BITOP_POST_BARRIER retl nop +2: BACKOFF_SPIN(%o3, %o4, 1b) .size test_and_set_bit, .-test_and_set_bit .globl test_and_clear_bit .type test_and_clear_bit,#function test_and_clear_bit: /* %o0=nr, %o1=addr */ + BACKOFF_SETUP(%o3) BITOP_PRE_BARRIER srlx %o0, 6, %g1 mov 1, %o2 @@ -63,18 +66,20 @@ test_and_clear_bit: /* %o0=nr, %o1=addr */ andn %g7, %o2, %g1 casx [%o1], %g7, %g1 cmp %g7, %g1 - bne,pn %xcc, 1b + bne,pn %xcc, 2f and %g7, %o2, %g2 clr %o0 movrne %g2, 1, %o0 BITOP_POST_BARRIER retl nop +2: BACKOFF_SPIN(%o3, %o4, 1b) .size test_and_clear_bit, .-test_and_clear_bit .globl test_and_change_bit .type test_and_change_bit,#function test_and_change_bit: /* %o0=nr, %o1=addr */ + BACKOFF_SETUP(%o3) BITOP_PRE_BARRIER srlx %o0, 6, %g1 mov 1, %o2 @@ -86,18 +91,20 @@ test_and_change_bit: /* %o0=nr, %o1=addr */ xor %g7, %o2, %g1 casx [%o1], %g7, %g1 cmp %g7, %g1 - bne,pn %xcc, 1b + bne,pn %xcc, 2f and %g7, %o2, %g2 clr %o0 movrne %g2, 1, %o0 BITOP_POST_BARRIER retl nop +2: BACKOFF_SPIN(%o3, %o4, 1b) .size test_and_change_bit, .-test_and_change_bit .globl set_bit .type set_bit,#function set_bit: /* %o0=nr, %o1=addr */ + BACKOFF_SETUP(%o3) srlx %o0, 6, %g1 mov 1, %o2 sllx %g1, 3, %g3 @@ -108,15 +115,17 @@ set_bit: /* %o0=nr, %o1=addr */ or %g7, %o2, %g1 casx [%o1], %g7, %g1 cmp %g7, %g1 - bne,pn %xcc, 1b + bne,pn %xcc, 2f nop retl nop +2: BACKOFF_SPIN(%o3, %o4, 1b) .size set_bit, .-set_bit .globl clear_bit .type clear_bit,#function clear_bit: /* %o0=nr, %o1=addr */ + BACKOFF_SETUP(%o3) srlx %o0, 6, %g1 mov 1, %o2 sllx %g1, 3, %g3 @@ -127,15 +136,17 @@ clear_bit: /* %o0=nr, %o1=addr */ andn %g7, %o2, %g1 casx [%o1], %g7, %g1 cmp %g7, %g1 - bne,pn %xcc, 1b + bne,pn %xcc, 2f nop retl nop +2: BACKOFF_SPIN(%o3, %o4, 1b) .size clear_bit, .-clear_bit .globl change_bit .type change_bit,#function change_bit: /* %o0=nr, %o1=addr */ + BACKOFF_SETUP(%o3) srlx %o0, 6, %g1 mov 1, %o2 sllx %g1, 3, %g3 @@ -146,8 +157,9 @@ change_bit: /* %o0=nr, %o1=addr */ xor %g7, %o2, %g1 casx [%o1], %g7, %g1 cmp %g7, %g1 - bne,pn %xcc, 1b + bne,pn %xcc, 2f nop retl nop +2: BACKOFF_SPIN(%o3, %o4, 1b) .size change_bit, .-change_bit -- cgit v1.2.3