aboutsummaryrefslogtreecommitdiff
path: root/include
diff options
context:
space:
mode:
authorPaul Mundt <lethal@linux-sh.org>2007-11-10 20:35:53 +0900
committerPaul Mundt <lethal@linux-sh.org>2008-01-28 13:18:44 +0900
commit959f7d587e236a2d218f527771f156c336409d11 (patch)
tree9cbc0ec58d072dc58c04e91488a8013518582af4 /include
parent256b22ca66987c537064dc25b0b267966189b5ba (diff)
sh: Move over the SH-5 head.S and tlb.h.
Signed-off-by: Paul Mundt <lethal@linux-sh.org>
Diffstat (limited to 'include')
-rw-r--r--include/asm-sh/tlb.h10
-rw-r--r--include/asm-sh/tlb_64.h (renamed from include/asm-sh64/tlb.h)45
2 files changed, 20 insertions, 35 deletions
diff --git a/include/asm-sh/tlb.h b/include/asm-sh/tlb.h
index 53d185bcf87..56ad1fb888a 100644
--- a/include/asm-sh/tlb.h
+++ b/include/asm-sh/tlb.h
@@ -1,6 +1,12 @@
#ifndef __ASM_SH_TLB_H
#define __ASM_SH_TLB_H
+#ifdef CONFIG_SUPERH64
+# include "tlb_64.h"
+#endif
+
+#ifndef __ASSEMBLY__
+
#define tlb_start_vma(tlb, vma) \
flush_cache_range(vma, vma->vm_start, vma->vm_end)
@@ -15,4 +21,6 @@
#define tlb_flush(tlb) flush_tlb_mm((tlb)->mm)
#include <asm-generic/tlb.h>
-#endif
+
+#endif /* __ASSEMBLY__ */
+#endif /* __ASM_SH_TLB_H */
diff --git a/include/asm-sh64/tlb.h b/include/asm-sh/tlb_64.h
index 4979408bd88..0308e05fc57 100644
--- a/include/asm-sh64/tlb.h
+++ b/include/asm-sh/tlb_64.h
@@ -1,20 +1,14 @@
/*
- * include/asm-sh64/tlb.h
+ * include/asm-sh/tlb_64.h
*
* Copyright (C) 2003 Paul Mundt
*
* This file is subject to the terms and conditions of the GNU General Public
* License. See the file "COPYING" in the main directory of this archive
* for more details.
- *
- */
-#ifndef __ASM_SH64_TLB_H
-#define __ASM_SH64_TLB_H
-
-/*
- * Note! These are mostly unused, we just need the xTLB_LAST_VAR_UNRESTRICTED
- * for head.S! Once this limitation is gone, we can clean the rest of this up.
*/
+#ifndef __ASM_SH_TLB_64_H
+#define __ASM_SH_TLB_64_H
/* ITLB defines */
#define ITLB_FIXED 0x00000000 /* First fixed ITLB, see head.S */
@@ -63,30 +57,13 @@ static inline void __flush_tlb_slot(unsigned long long slot)
}
/* arch/sh64/mm/tlb.c */
-extern int sh64_tlb_init(void);
-extern unsigned long long sh64_next_free_dtlb_entry(void);
-extern unsigned long long sh64_get_wired_dtlb_entry(void);
-extern int sh64_put_wired_dtlb_entry(unsigned long long entry);
-
-extern void sh64_setup_tlb_slot(unsigned long long config_addr, unsigned long eaddr, unsigned long asid, unsigned long paddr);
-extern void sh64_teardown_tlb_slot(unsigned long long config_addr);
-
-#define tlb_start_vma(tlb, vma) \
- flush_cache_range(vma, vma->vm_start, vma->vm_end)
-
-#define tlb_end_vma(tlb, vma) \
- flush_tlb_range(vma, vma->vm_start, vma->vm_end)
-
-#define __tlb_remove_tlb_entry(tlb, pte, address) do { } while (0)
-
-/*
- * Flush whole TLBs for MM
- */
-#define tlb_flush(tlb) flush_tlb_mm((tlb)->mm)
-
-#include <asm-generic/tlb.h>
+int sh64_tlb_init(void);
+unsigned long long sh64_next_free_dtlb_entry(void);
+unsigned long long sh64_get_wired_dtlb_entry(void);
+int sh64_put_wired_dtlb_entry(unsigned long long entry);
+void sh64_setup_tlb_slot(unsigned long long config_addr, unsigned long eaddr,
+ unsigned long asid, unsigned long paddr);
+void sh64_teardown_tlb_slot(unsigned long long config_addr);
#endif /* __ASSEMBLY__ */
-
-#endif /* __ASM_SH64_TLB_H */
-
+#endif /* __ASM_SH_TLB_64_H */