aboutsummaryrefslogtreecommitdiff
path: root/include/asm-x86/xor_64.h
diff options
context:
space:
mode:
authorIngo Molnar <mingo@elte.hu>2008-07-10 18:55:17 +0200
committerIngo Molnar <mingo@elte.hu>2008-07-10 18:55:17 +0200
commit520b9617ab4aea764ddfc5d58cae21c16b3318e1 (patch)
tree1612249d11d455cfd6a0d691f5564673ae179c5f /include/asm-x86/xor_64.h
parentf57e91682d141ea50d8c6d42cdc251b6256a3755 (diff)
parentf87f38ec5a5157aa39f44f6018dc58ea62f8e0e2 (diff)
Merge branch 'x86/core' into x86/generalize-visws
Diffstat (limited to 'include/asm-x86/xor_64.h')
-rw-r--r--include/asm-x86/xor_64.h5
1 files changed, 5 insertions, 0 deletions
diff --git a/include/asm-x86/xor_64.h b/include/asm-x86/xor_64.h
index 24957e39ac8..2d3a18de295 100644
--- a/include/asm-x86/xor_64.h
+++ b/include/asm-x86/xor_64.h
@@ -1,3 +1,6 @@
+#ifndef ASM_X86__XOR_64_H
+#define ASM_X86__XOR_64_H
+
/*
* Optimized RAID-5 checksumming functions for MMX and SSE.
*
@@ -354,3 +357,5 @@ do { \
We may also be able to load into the L1 only depending on how the cpu
deals with a load to a line that is being prefetched. */
#define XOR_SELECT_TEMPLATE(FASTEST) (&xor_block_sse)
+
+#endif /* ASM_X86__XOR_64_H */