diff options
Diffstat (limited to 'include/asm-i386')
-rw-r--r-- | include/asm-i386/alternative.h | 20 | ||||
-rw-r--r-- | include/asm-i386/mach-default/mach_mpspec.h | 4 | ||||
-rw-r--r-- | include/asm-i386/rwlock.h | 28 | ||||
-rw-r--r-- | include/asm-i386/spinlock.h | 17 | ||||
-rw-r--r-- | include/asm-i386/unistd.h | 4 | ||||
-rw-r--r-- | include/asm-i386/unwind.h | 1 |
6 files changed, 23 insertions, 51 deletions
diff --git a/include/asm-i386/alternative.h b/include/asm-i386/alternative.h index 96adbabec74..b01a7ec409c 100644 --- a/include/asm-i386/alternative.h +++ b/include/asm-i386/alternative.h @@ -88,9 +88,6 @@ static inline void alternatives_smp_switch(int smp) {} /* * Alternative inline assembly for SMP. * - * alternative_smp() takes two versions (SMP first, UP second) and is - * for more complex stuff such as spinlocks. - * * The LOCK_PREFIX macro defined here replaces the LOCK and * LOCK_PREFIX macros used everywhere in the source tree. * @@ -110,21 +107,6 @@ static inline void alternatives_smp_switch(int smp) {} */ #ifdef CONFIG_SMP -#define alternative_smp(smpinstr, upinstr, args...) \ - asm volatile ("661:\n\t" smpinstr "\n662:\n" \ - ".section .smp_altinstructions,\"a\"\n" \ - " .align 4\n" \ - " .long 661b\n" /* label */ \ - " .long 663f\n" /* new instruction */ \ - " .byte 0x68\n" /* X86_FEATURE_UP */ \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ - ".previous\n" \ - ".section .smp_altinstr_replacement,\"awx\"\n" \ - "663:\n\t" upinstr "\n" /* replacement */ \ - "664:\n\t.fill 662b-661b,1,0x42\n" /* space for original */ \ - ".previous" : args) - #define LOCK_PREFIX \ ".section .smp_locks,\"a\"\n" \ " .align 4\n" \ @@ -133,8 +115,6 @@ static inline void alternatives_smp_switch(int smp) {} "661:\n\tlock; " #else /* ! CONFIG_SMP */ -#define alternative_smp(smpinstr, upinstr, args...) \ - asm volatile (upinstr : args) #define LOCK_PREFIX "" #endif diff --git a/include/asm-i386/mach-default/mach_mpspec.h b/include/asm-i386/mach-default/mach_mpspec.h index 6b5dadcf1d0..51c9a977593 100644 --- a/include/asm-i386/mach-default/mach_mpspec.h +++ b/include/asm-i386/mach-default/mach_mpspec.h @@ -3,6 +3,10 @@ #define MAX_IRQ_SOURCES 256 +#if CONFIG_BASE_SMALL == 0 +#define MAX_MP_BUSSES 256 +#else #define MAX_MP_BUSSES 32 +#endif #endif /* __ASM_MACH_MPSPEC_H */ diff --git a/include/asm-i386/rwlock.h b/include/asm-i386/rwlock.h index 96b0bef2ea5..87c069ccba0 100644 --- a/include/asm-i386/rwlock.h +++ b/include/asm-i386/rwlock.h @@ -21,23 +21,21 @@ #define RW_LOCK_BIAS_STR "0x01000000" #define __build_read_lock_ptr(rw, helper) \ - alternative_smp("lock; subl $1,(%0)\n\t" \ + asm volatile(LOCK_PREFIX " subl $1,(%0)\n\t" \ "jns 1f\n" \ "call " helper "\n\t" \ - "1:\n", \ - "subl $1,(%0)\n\t", \ - :"a" (rw) : "memory") + "1:\n" \ + ::"a" (rw) : "memory") #define __build_read_lock_const(rw, helper) \ - alternative_smp("lock; subl $1,%0\n\t" \ + asm volatile(LOCK_PREFIX " subl $1,%0\n\t" \ "jns 1f\n" \ "pushl %%eax\n\t" \ "leal %0,%%eax\n\t" \ "call " helper "\n\t" \ "popl %%eax\n\t" \ - "1:\n", \ - "subl $1,%0\n\t", \ - "+m" (*(volatile int *)rw) : : "memory") + "1:\n" \ + :"+m" (*(volatile int *)rw) : : "memory") #define __build_read_lock(rw, helper) do { \ if (__builtin_constant_p(rw)) \ @@ -47,23 +45,21 @@ } while (0) #define __build_write_lock_ptr(rw, helper) \ - alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \ + asm volatile(LOCK_PREFIX " subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \ "jz 1f\n" \ "call " helper "\n\t" \ - "1:\n", \ - "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t", \ - :"a" (rw) : "memory") + "1:\n" \ + ::"a" (rw) : "memory") #define __build_write_lock_const(rw, helper) \ - alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",%0\n\t" \ + asm volatile(LOCK_PREFIX " subl $" RW_LOCK_BIAS_STR ",%0\n\t" \ "jz 1f\n" \ "pushl %%eax\n\t" \ "leal %0,%%eax\n\t" \ "call " helper "\n\t" \ "popl %%eax\n\t" \ - "1:\n", \ - "subl $" RW_LOCK_BIAS_STR ",%0\n\t", \ - "+m" (*(volatile int *)rw) : : "memory") + "1:\n" \ + :"+m" (*(volatile int *)rw) : : "memory") #define __build_write_lock(rw, helper) do { \ if (__builtin_constant_p(rw)) \ diff --git a/include/asm-i386/spinlock.h b/include/asm-i386/spinlock.h index d816c62a7a1..d1020363c41 100644 --- a/include/asm-i386/spinlock.h +++ b/include/asm-i386/spinlock.h @@ -22,7 +22,7 @@ #define __raw_spin_lock_string \ "\n1:\t" \ - "lock ; decb %0\n\t" \ + LOCK_PREFIX " ; decb %0\n\t" \ "jns 3f\n" \ "2:\t" \ "rep;nop\n\t" \ @@ -38,7 +38,7 @@ */ #define __raw_spin_lock_string_flags \ "\n1:\t" \ - "lock ; decb %0\n\t" \ + LOCK_PREFIX " ; decb %0\n\t" \ "jns 5f\n" \ "2:\t" \ "testl $0x200, %1\n\t" \ @@ -57,15 +57,9 @@ "jmp 4b\n" \ "5:\n\t" -#define __raw_spin_lock_string_up \ - "\n\tdecb %0" - static inline void __raw_spin_lock(raw_spinlock_t *lock) { - alternative_smp( - __raw_spin_lock_string, - __raw_spin_lock_string_up, - "+m" (lock->slock) : : "memory"); + asm(__raw_spin_lock_string : "+m" (lock->slock) : : "memory"); } /* @@ -76,10 +70,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) #ifndef CONFIG_PROVE_LOCKING static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags) { - alternative_smp( - __raw_spin_lock_string_flags, - __raw_spin_lock_string_up, - "+m" (lock->slock) : "r" (flags) : "memory"); + asm(__raw_spin_lock_string_flags : "+m" (lock->slock) : "r" (flags) : "memory"); } #endif diff --git a/include/asm-i386/unistd.h b/include/asm-i386/unistd.h index fc1c8ddae14..d983b74e4d9 100644 --- a/include/asm-i386/unistd.h +++ b/include/asm-i386/unistd.h @@ -324,8 +324,6 @@ #define __NR_vmsplice 316 #define __NR_move_pages 317 -#ifdef __KERNEL__ - #define NR_syscalls 318 /* @@ -425,6 +423,8 @@ __asm__ volatile ("push %%ebp ; push %%ebx ; movl 4(%2),%%ebp ; " \ __syscall_return(type,__res); \ } +#ifdef __KERNEL__ + #define __ARCH_WANT_IPC_PARSE_VERSION #define __ARCH_WANT_OLD_READDIR #define __ARCH_WANT_OLD_STAT diff --git a/include/asm-i386/unwind.h b/include/asm-i386/unwind.h index 69f0f1df672..4c1a0b96856 100644 --- a/include/asm-i386/unwind.h +++ b/include/asm-i386/unwind.h @@ -87,6 +87,7 @@ static inline int arch_unw_user_mode(const struct unwind_frame_info *info) #else #define UNW_PC(frame) ((void)(frame), 0) +#define UNW_SP(frame) ((void)(frame), 0) static inline int arch_unw_user_mode(const void *info) { |