ppc/85xx: Move code around to prep for NAND_SPL

If we move some of the functions in tlb.c around we need less
ifdefs.  The first stage loader just needs invalidate_tlb and
init_tlbs.

Signed-off-by: Kumar Gala <galak@kernel.crashing.org>
This commit is contained in:
Kumar Gala 2009-09-11 12:32:01 -05:00
parent 206af3527c
commit b2eec281a8
1 changed files with 23 additions and 23 deletions

View File

@ -32,6 +32,29 @@
DECLARE_GLOBAL_DATA_PTR;
void invalidate_tlb(u8 tlb)
{
if (tlb == 0)
mtspr(MMUCSR0, 0x4);
if (tlb == 1)
mtspr(MMUCSR0, 0x2);
}
void init_tlbs(void)
{
int i;
for (i = 0; i < num_tlb_entries; i++) {
write_tlb(tlb_table[i].mas0,
tlb_table[i].mas1,
tlb_table[i].mas2,
tlb_table[i].mas3,
tlb_table[i].mas7);
}
return ;
}
void set_tlb(u8 tlb, u32 epn, u64 rpn,
u8 perms, u8 wimge,
u8 ts, u8 esel, u8 tsize, u8 iprot)
@ -77,29 +100,6 @@ void disable_tlb(u8 esel)
#endif
}
void invalidate_tlb(u8 tlb)
{
if (tlb == 0)
mtspr(MMUCSR0, 0x4);
if (tlb == 1)
mtspr(MMUCSR0, 0x2);
}
void init_tlbs(void)
{
int i;
for (i = 0; i < num_tlb_entries; i++) {
write_tlb(tlb_table[i].mas0,
tlb_table[i].mas1,
tlb_table[i].mas2,
tlb_table[i].mas3,
tlb_table[i].mas7);
}
return ;
}
static void tlbsx (const volatile unsigned *addr)
{
__asm__ __volatile__ ("tlbsx 0,%0" : : "r" (addr), "m" (*addr));