[U-Boot-Users] [PATCH 4/8] New board SIMPC8313 support: support for booting from NAND in start.S

Ron Madrid ron_madrid at sbcglobal.net
Fri May 30 20:13:41 CEST 2008


New board SIMPC8313 support: support for booting from
NAND in start.S

Reorganization/optimization of a few functions to fit
into the 4K FCM boot RAM of the MPC8313 for nand_spl
build.

Signed-off-by: Ron Madrid
---
 cpu/mpc83xx/start.S |  310
++++++++++++++++++++++++++++++++++++---------------
 1 files changed, 220 insertions(+), 90 deletions(-)

diff --git a/cpu/mpc83xx/start.S b/cpu/mpc83xx/start.S
index 309eb30..39bcaa8 100644
--- a/cpu/mpc83xx/start.S
+++ b/cpu/mpc83xx/start.S
@@ -63,6 +63,9 @@
  * Use r14 to access the GOT
  */
 	START_GOT
+#if defined(CONFIG_NAND_SPL)
+	GOT_ENTRY(_GOT_TABLE_)
+#else
 	GOT_ENTRY(_GOT2_TABLE_)
 	GOT_ENTRY(_FIXUP_TABLE_)
 
@@ -74,6 +77,7 @@
 	GOT_ENTRY(__init_end)
 	GOT_ENTRY(_end)
 	GOT_ENTRY(__bss_start)
+#endif /* CONFIG_NAND_SPL */
 	END_GOT
 
 /*
@@ -103,7 +107,56 @@ version_string:
 	.ascii U_BOOT_VERSION
 	.ascii " (", __DATE__, " - ", __TIME__, ")"
 	.ascii " ", CONFIG_IDENT_STRING, "\0"
+	.align 4
+
+/*****************************************************************/
+
+	.globl enable_addr_trans
+enable_addr_trans:
+	/* enable address translation */
+	mfmsr	r5
+	ori	r5, r5, (MSR_IR | MSR_DR)
+	mtmsr	r5
+	isync
+	blr
+
+/* Cache functions.
+ *
+ * Note: requires that all cache bits in
+ * HID0 are in the low half word.
+ */
+	.globl	icache_enable
+icache_enable:
+	mfspr	r3, HID0
+	ori	r3, r3, HID0_ICE
+	li	r4, HID0_ILOCK
+	andc	r3, r3, r4
+	ori	r4, r3, HID0_ICFI
+	isync
+	mtspr	HID0, r4    /* sets enable and invalidate,
clears lock */
+	isync
+	mtspr	HID0, r3	/* clears invalidate */
+	blr
+
+	.globl	dcache_enable
+dcache_enable:
+	mfspr	r3, HID0
+	li	r5, HID0_DCFI|HID0_DLOCK
+	andc	r3, r3, r5
+	mtspr	HID0, r3		/* no invalidate, unlock */
+	ori	r3, r3, HID0_DCE
+	ori	r5, r3, HID0_DCFI
+	mtspr	HID0, r5		/* enable + invalidate */
+	mtspr	HID0, r3		/* enable */
+	sync
+	blr
+
+	.globl get_pvr
+get_pvr:
+	mfspr	r3, PVR
+	blr
 
+/*****************************************************************/
 
 #ifndef CONFIG_DEFAULT_IMMR
 #error CONFIG_DEFAULT_IMMR must be defined
@@ -165,7 +218,7 @@ boot_warm: /* time t 5 */
 
 	bl	init_e300_core
 
-#ifndef CFG_RAMBOOT
+#if !defined(CFG_RAMBOOT) &&
!defined(CONFIG_NAND_U_BOOT)
 
 	/* Inflate flash location so it appears everywhere,
calculate */
 	/* the absolute address in final location of the
FLASH, jump  */
@@ -181,7 +234,7 @@ in_flash:
 #if 1 /* Remapping flash with LAW0. */
 	bl remap_flash_by_law0
 #endif
-#endif	/* CFG_RAMBOOT */
+#endif	/* !defined(CFG_RAMBOOT) &&
!defined(CONFIG_NAND_U_BOOT) */
 
 	/* setup the bats */
 	bl	setup_bats
@@ -234,6 +287,7 @@ in_flash:
 	/* run low-level CPU init code (in Flash)*/
 	bl	cpu_init_f
 
+#if !defined(CONFIG_NAND_SPL)
 	/* r3: BOOTFLAG */
 	mr	r3, r21
 	/* run 1st part of board init code (in Flash)*/
@@ -428,6 +482,7 @@ int_return:
 	lwz	r1,GPR1(r1)
 	SYNC
 	rfi
+#endif /* CONFIG_NAND_SPL */
 
 /*
  * This code initialises the E300 processor core
@@ -550,6 +605,7 @@ init_e300_core: /* time t 10 */
 	/*------------------------------*/
 	blr
 
+#if !defined(CONFIG_NAND_SPL)
 	.globl	invalidate_bats
 invalidate_bats:
 	/* invalidate BATs */
@@ -577,6 +633,7 @@ invalidate_bats:
 	isync
 	sync
 	blr
+#endif /* CONFIG_NAND_SPL */
 
 	/* setup_bats - set them up to some initial state */
 	.globl	setup_bats
@@ -584,148 +641,228 @@ setup_bats:
 	addis	r0, r0, 0x0000
 
 	/* IBAT 0 */
+#if ((CFG_IBAT0L == 0) && (CFG_IBAT0U == 0))
+	mtspr	IBAT0L, r0
+	mtspr	IBAT0U, r0
+#else
 	addis	r4, r0, CFG_IBAT0L at h
 	ori	r4, r4, CFG_IBAT0L at l
 	addis	r3, r0, CFG_IBAT0U at h
 	ori	r3, r3, CFG_IBAT0U at l
 	mtspr	IBAT0L, r4
 	mtspr	IBAT0U, r3
+#endif
 	isync
 
 	/* DBAT 0 */
+#if ((CFG_DBAT0L == 0) && (CFG_DBAT0U == 0))
+	mtspr	DBAT0L, r0
+	mtspr	DBAT0U, r0
+#else
 	addis	r4, r0, CFG_DBAT0L at h
 	ori	r4, r4, CFG_DBAT0L at l
 	addis	r3, r0, CFG_DBAT0U at h
 	ori	r3, r3, CFG_DBAT0U at l
 	mtspr	DBAT0L, r4
 	mtspr	DBAT0U, r3
+#endif
 	isync
 
 	/* IBAT 1 */
+#if ((CFG_IBAT1L == 0) && (CFG_IBAT1U == 0))
+	mtspr	IBAT1L, r0
+	mtspr	IBAT1U, r0
+#else
 	addis	r4, r0, CFG_IBAT1L at h
 	ori	r4, r4, CFG_IBAT1L at l
 	addis	r3, r0, CFG_IBAT1U at h
 	ori	r3, r3, CFG_IBAT1U at l
 	mtspr	IBAT1L, r4
 	mtspr	IBAT1U, r3
+#endif
 	isync
 
 	/* DBAT 1 */
+#if ((CFG_DBAT1L == 0) && (CFG_DBAT1U == 0))
+	mtspr	DBAT1L, r0
+	mtspr	DBAT1U, r0
+#else
 	addis	r4, r0, CFG_DBAT1L at h
 	ori	r4, r4, CFG_DBAT1L at l
 	addis	r3, r0, CFG_DBAT1U at h
 	ori	r3, r3, CFG_DBAT1U at l
 	mtspr	DBAT1L, r4
 	mtspr	DBAT1U, r3
+#endif
 	isync
 
 	/* IBAT 2 */
+#if ((CFG_IBAT2L == 0) && (CFG_IBAT2U == 0))
+	mtspr	IBAT2L, r0
+	mtspr	IBAT2U, r0
+#else
 	addis	r4, r0, CFG_IBAT2L at h
 	ori	r4, r4, CFG_IBAT2L at l
 	addis	r3, r0, CFG_IBAT2U at h
 	ori	r3, r3, CFG_IBAT2U at l
 	mtspr	IBAT2L, r4
 	mtspr	IBAT2U, r3
+#endif
 	isync
 
 	/* DBAT 2 */
+#if ((CFG_DBAT2L == 0) && (CFG_DBAT2U == 0))
+	mtspr	DBAT2L, r0
+	mtspr	DBAT2U, r0
+#else
 	addis	r4, r0, CFG_DBAT2L at h
 	ori	r4, r4, CFG_DBAT2L at l
 	addis	r3, r0, CFG_DBAT2U at h
 	ori	r3, r3, CFG_DBAT2U at l
 	mtspr	DBAT2L, r4
 	mtspr	DBAT2U, r3
+#endif
 	isync
 
 	/* IBAT 3 */
+#if ((CFG_IBAT3L == 0) && (CFG_IBAT3U == 0))
+	mtspr	IBAT3L, r0
+	mtspr	IBAT3U, r0
+#else
 	addis	r4, r0, CFG_IBAT3L at h
 	ori	r4, r4, CFG_IBAT3L at l
 	addis	r3, r0, CFG_IBAT3U at h
 	ori	r3, r3, CFG_IBAT3U at l
 	mtspr	IBAT3L, r4
 	mtspr	IBAT3U, r3
+#endif
 	isync
 
 	/* DBAT 3 */
+#if ((CFG_DBAT3L == 0) && (CFG_DBAT3U == 0))
+	mtspr	DBAT3L, r0
+	mtspr	DBAT3U, r0
+#else
 	addis	r4, r0, CFG_DBAT3L at h
 	ori	r4, r4, CFG_DBAT3L at l
 	addis	r3, r0, CFG_DBAT3U at h
 	ori	r3, r3, CFG_DBAT3U at l
 	mtspr	DBAT3L, r4
 	mtspr	DBAT3U, r3
+#endif
 	isync
 
 #if (CFG_HID2 & HID2_HBE)
 	/* IBAT 4 */
+#if ((CFG_IBAT4L == 0) && (CFG_IBAT4U == 0))
+	mtspr	IBAT4L, r0
+	mtspr	IBAT4U, r0
+#else
 	addis   r4, r0, CFG_IBAT4L at h
 	ori     r4, r4, CFG_IBAT4L at l
 	addis   r3, r0, CFG_IBAT4U at h
 	ori     r3, r3, CFG_IBAT4U at l
 	mtspr   IBAT4L, r4
 	mtspr   IBAT4U, r3
+#endif
 	isync
 
 	/* DBAT 4 */
+#if ((CFG_DBAT4L == 0) && (CFG_DBAT4U == 0))
+	mtspr	DBAT4L, r0
+	mtspr	DBAT4U, r0
+#else
 	addis   r4, r0, CFG_DBAT4L at h
 	ori     r4, r4, CFG_DBAT4L at l
 	addis   r3, r0, CFG_DBAT4U at h
 	ori     r3, r3, CFG_DBAT4U at l
 	mtspr   DBAT4L, r4
 	mtspr   DBAT4U, r3
+#endif
 	isync
 
 	/* IBAT 5 */
+#if ((CFG_IBAT5L == 0) && (CFG_IBAT5U == 0))
+	mtspr	IBAT5L, r0
+	mtspr	IBAT5U, r0
+#else
 	addis   r4, r0, CFG_IBAT5L at h
 	ori     r4, r4, CFG_IBAT5L at l
 	addis   r3, r0, CFG_IBAT5U at h
 	ori     r3, r3, CFG_IBAT5U at l
 	mtspr   IBAT5L, r4
 	mtspr   IBAT5U, r3
+#endif
 	isync
 
 	/* DBAT 5 */
+#if ((CFG_DBAT5L == 0) && (CFG_DBAT5U == 0))
+	mtspr	DBAT5L, r0
+	mtspr	DBAT5U, r0
+#else
 	addis   r4, r0, CFG_DBAT5L at h
 	ori     r4, r4, CFG_DBAT5L at l
 	addis   r3, r0, CFG_DBAT5U at h
 	ori     r3, r3, CFG_DBAT5U at l
 	mtspr   DBAT5L, r4
 	mtspr   DBAT5U, r3
+#endif
 	isync
 
 	/* IBAT 6 */
+#if ((CFG_IBAT6L == 0) && (CFG_IBAT6U == 0))
+	mtspr	IBAT6L, r0
+	mtspr	IBAT6U, r0
+#else
 	addis   r4, r0, CFG_IBAT6L at h
 	ori     r4, r4, CFG_IBAT6L at l
 	addis   r3, r0, CFG_IBAT6U at h
 	ori     r3, r3, CFG_IBAT6U at l
 	mtspr   IBAT6L, r4
 	mtspr   IBAT6U, r3
+#endif
 	isync
 
 	/* DBAT 6 */
+#if ((CFG_DBAT6L == 0) && (CFG_DBAT6U == 0))
+	mtspr	DBAT6L, r0
+	mtspr	DBAT6U, r0
+#else
 	addis   r4, r0, CFG_DBAT6L at h
 	ori     r4, r4, CFG_DBAT6L at l
 	addis   r3, r0, CFG_DBAT6U at h
 	ori     r3, r3, CFG_DBAT6U at l
 	mtspr   DBAT6L, r4
 	mtspr   DBAT6U, r3
+#endif
 	isync
 
 	/* IBAT 7 */
+#if ((CFG_IBAT7L == 0) && (CFG_IBAT7U == 0))
+	mtspr	IBAT7L, r0
+	mtspr	IBAT7U, r0
+#else
 	addis   r4, r0, CFG_IBAT7L at h
 	ori     r4, r4, CFG_IBAT7L at l
 	addis   r3, r0, CFG_IBAT7U at h
 	ori     r3, r3, CFG_IBAT7U at l
 	mtspr   IBAT7L, r4
 	mtspr   IBAT7U, r3
+#endif
 	isync
 
 	/* DBAT 7 */
+#if ((CFG_DBAT7L == 0) && (CFG_DBAT7U == 0))
+	mtspr	DBAT7L, r0
+	mtspr	DBAT7U, r0
+#else
 	addis   r4, r0, CFG_DBAT7L at h
 	ori     r4, r4, CFG_DBAT7L at l
 	addis   r3, r0, CFG_DBAT7U at h
 	ori     r3, r3, CFG_DBAT7U at l
 	mtspr   DBAT7L, r4
 	mtspr   DBAT7U, r3
+#endif
 	isync
 #endif
 
@@ -744,15 +881,7 @@ setup_bats:
 
 	blr
 
-	.globl enable_addr_trans
-enable_addr_trans:
-	/* enable address translation */
-	mfmsr	r5
-	ori	r5, r5, (MSR_IR | MSR_DR)
-	mtmsr	r5
-	isync
-	blr
-
+#if !defined(CONFIG_NAND_SPL)
 	.globl disable_addr_trans
 disable_addr_trans:
 	/* disable address translation */
@@ -770,20 +899,6 @@ disable_addr_trans:
  * Note: requires that all cache bits in
  * HID0 are in the low half word.
  */
-	.globl	icache_enable
-icache_enable:
-	mfspr	r3, HID0
-	ori	r3, r3, HID0_ICE
-	lis	r4, 0
-	ori	r4, r4, HID0_ILOCK
-	andc	r3, r3, r4
-	ori	r4, r3, HID0_ICFI
-	isync
-	mtspr	HID0, r4    /* sets enable and invalidate,
clears lock */
-	isync
-	mtspr	HID0, r3	/* clears invalidate */
-	blr
-
 	.globl	icache_disable
 icache_disable:
 	mfspr	r3, HID0
@@ -802,20 +917,9 @@ icache_status:
 	mfspr	r3, HID0
 	rlwinm	r3, r3, (31 - HID0_ICE_SHIFT + 1), 31, 31
 	blr
+#endif /* CONFIG_NAND_SPL */
 
-	.globl	dcache_enable
-dcache_enable:
-	mfspr	r3, HID0
-	li	r5, HID0_DCFI|HID0_DLOCK
-	andc	r3, r3, r5
-	mtspr	HID0, r3		/* no invalidate, unlock */
-	ori	r3, r3, HID0_DCE
-	ori	r5, r3, HID0_DCFI
-	mtspr	HID0, r5		/* enable + invalidate */
-	mtspr	HID0, r3		/* enable */
-	sync
-	blr
-
+#if !defined(CONFIG_NAND_SPL)
 	.globl	dcache_disable
 dcache_disable:
 	mfspr	r3, HID0
@@ -835,11 +939,6 @@ dcache_status:
 	rlwinm	r3, r3, (31 - HID0_DCE_SHIFT + 1), 31, 31
 	blr
 
-	.globl get_pvr
-get_pvr:
-	mfspr	r3, PVR
-	blr
-
 	.globl	ppcDWstore
 ppcDWstore:
 	lfd	1, 0(r4)
@@ -853,6 +952,7 @@ ppcDWload:
 	blr
 

/*-------------------------------------------------------------------*/
+#endif /* CONFIG_NAND_SPL */
 
 /*
  * void relocate_code (addr_sp, gd, addr_moni)
@@ -872,10 +972,17 @@ relocate_code:
 	mr	r10, r5		/* Save copy of Destination Address */
 
 	mr	r3,  r5				/* Destination Address */
+
+#if !defined(CONFIG_NAND_SPL)
 	lis	r4, CFG_MONITOR_BASE at h		/* Source      Address
*/
 	ori	r4, r4, CFG_MONITOR_BASE at l
 	lwz	r5, GOT(__init_end)
 	sub	r5, r5, r4
+#else
+	lis	r4, CFG_NAND_BASE at h		/* Source      Address */
+	ori	r4, r4, CFG_NAND_BASE at l
+	li	r5, 0x1000			/* 4 kbyte bootloader */
+#endif /* CONFIG_NAND_SPL */
 	li	r6, CFG_CACHELINE_SIZE		/* Cache Line Size */
 
 	/*
@@ -968,8 +1075,72 @@ relocate_code:
 	mtlr	r0
 	blr
 
+#ifdef CFG_INIT_RAM_LOCK
+lock_ram_in_cache:
+	/* Allocate Initial RAM in data cache.
+	 */
+	lis	r3, (CFG_INIT_RAM_ADDR & ~31)@h
+	ori	r3, r3, (CFG_INIT_RAM_ADDR & ~31)@l
+	li	r2, ((CFG_INIT_RAM_END & ~31) + \
+		     (CFG_INIT_RAM_ADDR & 31) + 31) / 32
+	mtctr	r2
+1:
+	dcbz	r0, r3
+	addi	r3, r3, 32
+	bdnz	1b
+
+	/* Lock the data cache */
+	mfspr	r0, HID0
+	ori	r0, r0, 0x1000
+	sync
+	mtspr	HID0, r0
+	sync
+	blr
+
+.globl unlock_ram_in_cache
+unlock_ram_in_cache:
+	/* invalidate the INIT_RAM section */
+	lis	r3, (CFG_INIT_RAM_ADDR & ~31)@h
+	ori	r3, r3, (CFG_INIT_RAM_ADDR & ~31)@l
+	li	r2,512
+	mtctr	r2
+1:	icbi	r0, r3
+	dcbi	r0, r3
+	addi	r3, r3, 32
+	bdnz	1b
+	sync			/* Wait for all icbi to complete on bus	*/
+	isync
+
+	/* Unlock the data cache and invalidate it */
+	mfspr   r3, HID0
+	li	r5, HID0_DLOCK|HID0_DCFI
+	andc	r3, r3, r5		/* no invalidate, unlock */
+	ori	r5, r3, HID0_DCFI	/* invalidate, unlock */
+	mtspr	HID0, r5		/* invalidate, unlock */
+	mtspr	HID0, r3		/* no invalidate, unlock */
+	sync
+	blr
+#endif
+
 in_ram:
 
+#if defined(CONFIG_NAND_SPL)
+	/*
+	 * Adjust got table entries to fix pointers to
strings
+	 */
+	li	r0,__got_entries at sectoff@l
+	la	r4,GOT(_GOT_TABLE_)
+	cmpwi	r0,0
+	mtctr	r0
+	addi	r4,r4,-4
+	beq	4f
+3:	lwzu	r3,4(r4)
+	add	r3,r3,r15
+	stw	r3,0(r4)
+	bdnz	3b
+4:
+
+#else
 	/*
 	 * Relocation Function, r14 point to got2+0x8000
 	 *
@@ -1035,8 +1206,11 @@ clear_bss:
 
 	mr	r3, r9		/* Global Data pointer		*/
 	mr	r4, r10		/* Destination Address		*/
+#endif /* CONFIG_NAND_SPL */
+
 	bl	board_init_r
 
+#if !defined(CONFIG_NAND_SPL)
 	/*
 	 * Copy exception vector code to low memory
 	 *
@@ -1120,53 +1294,7 @@ trap_reloc:
 
 	blr
 
-#ifdef CFG_INIT_RAM_LOCK
-lock_ram_in_cache:
-	/* Allocate Initial RAM in data cache.
-	 */
-	lis	r3, (CFG_INIT_RAM_ADDR & ~31)@h
-	ori	r3, r3, (CFG_INIT_RAM_ADDR & ~31)@l
-	li	r2, ((CFG_INIT_RAM_END & ~31) + \
-		     (CFG_INIT_RAM_ADDR & 31) + 31) / 32
-	mtctr	r2
-1:
-	dcbz	r0, r3
-	addi	r3, r3, 32
-	bdnz	1b
-
-	/* Lock the data cache */
-	mfspr	r0, HID0
-	ori	r0, r0, 0x1000
-	sync
-	mtspr	HID0, r0
-	sync
-	blr
-
-.globl unlock_ram_in_cache
-unlock_ram_in_cache:
-	/* invalidate the INIT_RAM section */
-	lis	r3, (CFG_INIT_RAM_ADDR & ~31)@h
-	ori	r3, r3, (CFG_INIT_RAM_ADDR & ~31)@l
-	li	r2,512
-	mtctr	r2
-1:	icbi	r0, r3
-	dcbi	r0, r3
-	addi	r3, r3, 32
-	bdnz	1b
-	sync			/* Wait for all icbi to complete on bus	*/
-	isync
-
-	/* Unlock the data cache and invalidate it */
-	mfspr   r3, HID0
-	li	r5, HID0_DLOCK|HID0_DCFI
-	andc	r3, r3, r5		/* no invalidate, unlock */
-	ori	r5, r3, HID0_DCFI	/* invalidate, unlock */
-	mtspr	HID0, r5		/* invalidate, unlock */
-	mtspr	HID0, r3		/* no invalidate, unlock */
-	sync
-	blr
-#endif
-
+#if !defined(CONFIG_NAND_U_BOOT)
 map_flash_by_law1:
 	/* When booting from ROM (Flash or EPROM), clear the
 */
 	/* Address Mask in OR0 so ROM appears everywhere    
 */
@@ -1245,3 +1373,5 @@ remap_flash_by_law0:
 	stw r4, LBLAWBAR1(r3)
 	stw r4, LBLAWAR1(r3) /* Off LBIU LAW1 */
 	blr
+#endif /* CONFIG_NAND_U_BOOT */
+#endif /* CONFIG_NAND_SPL */
-- 
1.5.5.1






More information about the U-Boot mailing list