/* * (C) Copyright 2008 - 2013 Tensilica Inc. * (C) Copyright 2014 - 2016 Cadence Design Systems Inc. * * SPDX-License-Identifier: GPL-2.0+ */ #include #include #include #include #include #include /* * Offsets into the the pt_regs struture. * Make sure these always match with the structure defined in ptrace.h! */ #define PT_PC 0 #define PT_PS 4 #define PT_DEPC 8 #define PT_EXCCAUSE 12 #define PT_EXCVADDR 16 #define PT_DEBUGCAUSE 20 #define PT_WMASK 24 #define PT_LBEG 28 #define PT_LEND 32 #define PT_LCOUNT 36 #define PT_SAR 40 #define PT_WINDOWBASE 44 #define PT_WINDOWSTART 48 #define PT_SYSCALL 52 #define PT_ICOUNTLEVEL 56 #define PT_RESERVED 60 #define PT_AREG 64 #define PT_SIZE (64 + 64) /* * Cache attributes are different for full MMU and region protection. */ #if XCHAL_HAVE_PTP_MMU #define CA_WRITEBACK (0x7) #else #define CA_WRITEBACK (0x4) #endif /* * Reset vector. * Only a trampoline to jump to _start * (Note that we have to mark the section writable as the section contains * a relocatable literal) */ .section .ResetVector.text, "awx" .global _ResetVector _ResetVector: j 1f .align 4 2: .long _start 1: l32r a2, 2b jx a2 /* * Processor initialization. We still run in rom space. * * NOTE: Running in ROM * For Xtensa, we currently don't allow to run some code from ROM but * unpack the data immediately to memory. This requires, for example, * that DDR has been set up before running U-Boot. (See also comments * inline for ways to change it) */ .section .reset.text, "ax" .global _start .align 4 _start: /* Keep a0 = 0 for various initializations */ movi a0, 0 /* * For full MMU cores, put page table at unmapped virtual address. * This ensures that accesses outside the static maps result * in miss exceptions rather than random behaviour. */ #if XCHAL_HAVE_PTP_MMU wsr a0, PTEVADDR #endif /* Disable dbreak debug exceptions */ #if XCHAL_HAVE_DEBUG && XCHAL_NUM_DBREAK > 0 .set _index, 0 .rept XCHAL_NUM_DBREAK wsr a0, DBREAKC + _index .set _index, _index + 1 .endr #endif /* Reset windowbase and windowstart */ #if XCHAL_HAVE_WINDOWED movi a3, 1 wsr a3, windowstart wsr a0, windowbase rsync movi a0, 0 /* windowbase might have changed */ #endif /* * Vecbase in bitstream may differ from header files * set or check it. */ #if XCHAL_HAVE_VECBASE movi a3, XCHAL_VECBASE_RESET_VADDR /* VECBASE reset value */ wsr a3, VECBASE #endif #if XCHAL_HAVE_LOOPS /* Disable loops */ wsr a0, LCOUNT #endif /* Set PS.WOE = 0, PS.EXCM = 0 (for loop), PS.INTLEVEL = EXCM level */ #if XCHAL_HAVE_XEA1 movi a2, 1 #else movi a2, XCHAL_EXCM_LEVEL #endif wsr a2, PS rsync /* Unlock and invalidate caches */ ___unlock_dcache_all a2, a3 ___invalidate_dcache_all a2, a3 ___unlock_icache_all a2, a3 ___invalidate_icache_all a2, a3 isync /* Unpack data sections */ movi a2, __reloc_table_start movi a3, __reloc_table_end 1: beq a2, a3, 3f # no more entries? l32i a4, a2, 0 # start destination (in RAM) l32i a5, a2, 4 # end destination (in RAM) l32i a6, a2, 8 # start source (in ROM) addi a2, a2, 12 # next entry beq a4, a5, 1b # skip, empty entry beq a4, a6, 1b # skip, source and destination are the same /* If there's memory protection option with 512MB TLB regions and * cache attributes in TLB entries and caching is not inhibited, * enable data/instruction cache for relocated image. */ #if XCHAL_HAVE_SPANNING_WAY && \ (!defined(CONFIG_SYS_DCACHE_OFF) || \ !defined(CONFIG_SYS_ICACHE_OFF)) srli a7, a4, 29 slli a7, a7, 29 addi a7, a7, XCHAL_SPANNING_WAY #ifndef CONFIG_SYS_DCACHE_OFF rdtlb1 a8, a7 srli a8, a8, 4 slli a8, a8, 4 addi a8, a8, CA_WRITEBACK wdtlb a8, a7 #endif #ifndef CONFIG_SYS_ICACHE_OFF ritlb1 a8, a7 srli a8, a8, 4 slli a8, a8, 4 addi a8, a8, CA_WRITEBACK witlb a8, a7 #endif isync #endif 2: l32i a7, a6, 0 addi a6, a6, 4 s32i a7, a4, 0 addi a4, a4, 4 bltu a4, a5, 2b j 1b 3: /* All code and initalized data segments have been copied */ /* Setup PS, PS.WOE = 1, PS.EXCM = 0, PS.INTLEVEL = EXCM level. */ #if __XTENSA_CALL0_ABI__ movi a2, XCHAL_EXCM_LEVEL #else movi a2, (1<