Commit 8d823083 authored by Kumar Gala's avatar Kumar Gala Committed by Tom Rix

ppc/85xx: Move code around to prep for NAND_SPL

If we move some of the functions in tlb.c around we need less
ifdefs.  The first stage loader just needs invalidate_tlb and
init_tlbs.
Signed-off-by: default avatarKumar Gala <galak@kernel.crashing.org>
parent b855dc47
......@@ -32,6 +32,29 @@
DECLARE_GLOBAL_DATA_PTR;
void invalidate_tlb(u8 tlb)
{
if (tlb == 0)
mtspr(MMUCSR0, 0x4);
if (tlb == 1)
mtspr(MMUCSR0, 0x2);
}
void init_tlbs(void)
{
int i;
for (i = 0; i < num_tlb_entries; i++) {
write_tlb(tlb_table[i].mas0,
tlb_table[i].mas1,
tlb_table[i].mas2,
tlb_table[i].mas3,
tlb_table[i].mas7);
}
return ;
}
void set_tlb(u8 tlb, u32 epn, u64 rpn,
u8 perms, u8 wimge,
u8 ts, u8 esel, u8 tsize, u8 iprot)
......@@ -77,29 +100,6 @@ void disable_tlb(u8 esel)
#endif
}
void invalidate_tlb(u8 tlb)
{
if (tlb == 0)
mtspr(MMUCSR0, 0x4);
if (tlb == 1)
mtspr(MMUCSR0, 0x2);
}
void init_tlbs(void)
{
int i;
for (i = 0; i < num_tlb_entries; i++) {
write_tlb(tlb_table[i].mas0,
tlb_table[i].mas1,
tlb_table[i].mas2,
tlb_table[i].mas3,
tlb_table[i].mas7);
}
return ;
}
static void tlbsx (const volatile unsigned *addr)
{
__asm__ __volatile__ ("tlbsx 0,%0" : : "r" (addr), "m" (*addr));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment