cache_init.S 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472
  1. /*
  2. * Cache-handling routined for MIPS CPUs
  3. *
  4. * Copyright (c) 2003 Wolfgang Denk <wd@denx.de>
  5. *
  6. * SPDX-License-Identifier: GPL-2.0+
  7. */
  8. #include <asm-offsets.h>
  9. #include <config.h>
  10. #include <asm/asm.h>
  11. #include <asm/regdef.h>
  12. #include <asm/mipsregs.h>
  13. #include <asm/addrspace.h>
  14. #include <asm/cacheops.h>
  15. #include <asm/cm.h>
  16. #ifndef CONFIG_SYS_MIPS_CACHE_MODE
  17. #define CONFIG_SYS_MIPS_CACHE_MODE CONF_CM_CACHABLE_NONCOHERENT
  18. #endif
  19. #define INDEX_BASE CKSEG0
  20. .macro f_fill64 dst, offset, val
  21. LONG_S \val, (\offset + 0 * LONGSIZE)(\dst)
  22. LONG_S \val, (\offset + 1 * LONGSIZE)(\dst)
  23. LONG_S \val, (\offset + 2 * LONGSIZE)(\dst)
  24. LONG_S \val, (\offset + 3 * LONGSIZE)(\dst)
  25. LONG_S \val, (\offset + 4 * LONGSIZE)(\dst)
  26. LONG_S \val, (\offset + 5 * LONGSIZE)(\dst)
  27. LONG_S \val, (\offset + 6 * LONGSIZE)(\dst)
  28. LONG_S \val, (\offset + 7 * LONGSIZE)(\dst)
  29. #if LONGSIZE == 4
  30. LONG_S \val, (\offset + 8 * LONGSIZE)(\dst)
  31. LONG_S \val, (\offset + 9 * LONGSIZE)(\dst)
  32. LONG_S \val, (\offset + 10 * LONGSIZE)(\dst)
  33. LONG_S \val, (\offset + 11 * LONGSIZE)(\dst)
  34. LONG_S \val, (\offset + 12 * LONGSIZE)(\dst)
  35. LONG_S \val, (\offset + 13 * LONGSIZE)(\dst)
  36. LONG_S \val, (\offset + 14 * LONGSIZE)(\dst)
  37. LONG_S \val, (\offset + 15 * LONGSIZE)(\dst)
  38. #endif
  39. .endm
  40. .macro cache_loop curr, end, line_sz, op
  41. 10: cache \op, 0(\curr)
  42. PTR_ADDU \curr, \curr, \line_sz
  43. bne \curr, \end, 10b
  44. .endm
  45. .macro l1_info sz, line_sz, off
  46. .set push
  47. .set noat
  48. mfc0 $1, CP0_CONFIG, 1
  49. /* detect line size */
  50. srl \line_sz, $1, \off + MIPS_CONF1_DL_SHF - MIPS_CONF1_DA_SHF
  51. andi \line_sz, \line_sz, (MIPS_CONF1_DL >> MIPS_CONF1_DL_SHF)
  52. move \sz, zero
  53. beqz \line_sz, 10f
  54. li \sz, 2
  55. sllv \line_sz, \sz, \line_sz
  56. /* detect associativity */
  57. srl \sz, $1, \off + MIPS_CONF1_DA_SHF - MIPS_CONF1_DA_SHF
  58. andi \sz, \sz, (MIPS_CONF1_DA >> MIPS_CONF1_DA_SHF)
  59. addiu \sz, \sz, 1
  60. /* sz *= line_sz */
  61. mul \sz, \sz, \line_sz
  62. /* detect log32(sets) */
  63. srl $1, $1, \off + MIPS_CONF1_DS_SHF - MIPS_CONF1_DA_SHF
  64. andi $1, $1, (MIPS_CONF1_DS >> MIPS_CONF1_DS_SHF)
  65. addiu $1, $1, 1
  66. andi $1, $1, 0x7
  67. /* sz <<= log32(sets) */
  68. sllv \sz, \sz, $1
  69. /* sz *= 32 */
  70. li $1, 32
  71. mul \sz, \sz, $1
  72. 10:
  73. .set pop
  74. .endm
  75. /*
  76. * mips_cache_reset - low level initialisation of the primary caches
  77. *
  78. * This routine initialises the primary caches to ensure that they have good
  79. * parity. It must be called by the ROM before any cached locations are used
  80. * to prevent the possibility of data with bad parity being written to memory.
  81. *
  82. * To initialise the instruction cache it is essential that a source of data
  83. * with good parity is available. This routine will initialise an area of
  84. * memory starting at location zero to be used as a source of parity.
  85. *
  86. * Note that this function does not follow the standard calling convention &
  87. * may clobber typically callee-saved registers.
  88. *
  89. * RETURNS: N/A
  90. *
  91. */
  92. #define R_RETURN s0
  93. #define R_IC_SIZE s1
  94. #define R_IC_LINE s2
  95. #define R_DC_SIZE s3
  96. #define R_DC_LINE s4
  97. #define R_L2_SIZE s5
  98. #define R_L2_LINE s6
  99. #define R_L2_BYPASSED s7
  100. #define R_L2_L2C t8
  101. LEAF(mips_cache_reset)
  102. move R_RETURN, ra
  103. #ifdef CONFIG_MIPS_L2_CACHE
  104. /*
  105. * For there to be an L2 present, Config2 must be present. If it isn't
  106. * then we proceed knowing there's no L2 cache.
  107. */
  108. move R_L2_SIZE, zero
  109. move R_L2_LINE, zero
  110. move R_L2_BYPASSED, zero
  111. move R_L2_L2C, zero
  112. mfc0 t0, CP0_CONFIG, 1
  113. bgez t0, l2_probe_done
  114. /*
  115. * From MIPSr6 onwards the L2 cache configuration might not be reported
  116. * by Config2. The Config5.L2C bit indicates whether this is the case,
  117. * and if it is then we need knowledge of where else to look. For cores
  118. * from Imagination Technologies this is a CM GCR.
  119. */
  120. # if __mips_isa_rev >= 6
  121. /* Check that Config5 exists */
  122. mfc0 t0, CP0_CONFIG, 2
  123. bgez t0, l2_probe_cop0
  124. mfc0 t0, CP0_CONFIG, 3
  125. bgez t0, l2_probe_cop0
  126. mfc0 t0, CP0_CONFIG, 4
  127. bgez t0, l2_probe_cop0
  128. /* Check Config5.L2C is set */
  129. mfc0 t0, CP0_CONFIG, 5
  130. and R_L2_L2C, t0, MIPS_CONF5_L2C
  131. beqz R_L2_L2C, l2_probe_cop0
  132. /* Config5.L2C is set */
  133. # ifdef CONFIG_MIPS_CM
  134. /* The CM will provide L2 configuration */
  135. PTR_LI t0, CKSEG1ADDR(CONFIG_MIPS_CM_BASE)
  136. lw t1, GCR_L2_CONFIG(t0)
  137. bgez t1, l2_probe_done
  138. ext R_L2_LINE, t1, \
  139. GCR_L2_CONFIG_LINESZ_SHIFT, GCR_L2_CONFIG_LINESZ_BITS
  140. beqz R_L2_LINE, l2_probe_done
  141. li t2, 2
  142. sllv R_L2_LINE, t2, R_L2_LINE
  143. ext t2, t1, GCR_L2_CONFIG_ASSOC_SHIFT, GCR_L2_CONFIG_ASSOC_BITS
  144. addiu t2, t2, 1
  145. mul R_L2_SIZE, R_L2_LINE, t2
  146. ext t2, t1, GCR_L2_CONFIG_SETSZ_SHIFT, GCR_L2_CONFIG_SETSZ_BITS
  147. sllv R_L2_SIZE, R_L2_SIZE, t2
  148. li t2, 64
  149. mul R_L2_SIZE, R_L2_SIZE, t2
  150. /* Bypass the L2 cache so that we can init the L1s early */
  151. or t1, t1, GCR_L2_CONFIG_BYPASS
  152. sw t1, GCR_L2_CONFIG(t0)
  153. sync
  154. li R_L2_BYPASSED, 1
  155. /* Zero the L2 tag registers */
  156. sw zero, GCR_L2_TAG_ADDR(t0)
  157. sw zero, GCR_L2_TAG_ADDR_UPPER(t0)
  158. sw zero, GCR_L2_TAG_STATE(t0)
  159. sw zero, GCR_L2_TAG_STATE_UPPER(t0)
  160. sw zero, GCR_L2_DATA(t0)
  161. sw zero, GCR_L2_DATA_UPPER(t0)
  162. sync
  163. # else
  164. /* We don't know how to retrieve L2 configuration on this system */
  165. # endif
  166. b l2_probe_done
  167. # endif
  168. /*
  169. * For pre-r6 systems, or r6 systems with Config5.L2C==0, probe the L2
  170. * cache configuration from the cop0 Config2 register.
  171. */
  172. l2_probe_cop0:
  173. mfc0 t0, CP0_CONFIG, 2
  174. srl R_L2_LINE, t0, MIPS_CONF2_SL_SHF
  175. andi R_L2_LINE, R_L2_LINE, MIPS_CONF2_SL >> MIPS_CONF2_SL_SHF
  176. beqz R_L2_LINE, l2_probe_done
  177. li t1, 2
  178. sllv R_L2_LINE, t1, R_L2_LINE
  179. srl t1, t0, MIPS_CONF2_SA_SHF
  180. andi t1, t1, MIPS_CONF2_SA >> MIPS_CONF2_SA_SHF
  181. addiu t1, t1, 1
  182. mul R_L2_SIZE, R_L2_LINE, t1
  183. srl t1, t0, MIPS_CONF2_SS_SHF
  184. andi t1, t1, MIPS_CONF2_SS >> MIPS_CONF2_SS_SHF
  185. sllv R_L2_SIZE, R_L2_SIZE, t1
  186. li t1, 64
  187. mul R_L2_SIZE, R_L2_SIZE, t1
  188. /* Attempt to bypass the L2 so that we can init the L1s early */
  189. or t0, t0, MIPS_CONF2_L2B
  190. mtc0 t0, CP0_CONFIG, 2
  191. ehb
  192. mfc0 t0, CP0_CONFIG, 2
  193. and R_L2_BYPASSED, t0, MIPS_CONF2_L2B
  194. /* Zero the L2 tag registers */
  195. mtc0 zero, CP0_TAGLO, 4
  196. ehb
  197. l2_probe_done:
  198. #endif
  199. #ifndef CONFIG_SYS_CACHE_SIZE_AUTO
  200. li R_IC_SIZE, CONFIG_SYS_ICACHE_SIZE
  201. li R_IC_LINE, CONFIG_SYS_ICACHE_LINE_SIZE
  202. #else
  203. l1_info R_IC_SIZE, R_IC_LINE, MIPS_CONF1_IA_SHF
  204. #endif
  205. #ifndef CONFIG_SYS_CACHE_SIZE_AUTO
  206. li R_DC_SIZE, CONFIG_SYS_DCACHE_SIZE
  207. li R_DC_LINE, CONFIG_SYS_DCACHE_LINE_SIZE
  208. #else
  209. l1_info R_DC_SIZE, R_DC_LINE, MIPS_CONF1_DA_SHF
  210. #endif
  211. #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
  212. /* Determine the largest L1 cache size */
  213. #ifndef CONFIG_SYS_CACHE_SIZE_AUTO
  214. #if CONFIG_SYS_ICACHE_SIZE > CONFIG_SYS_DCACHE_SIZE
  215. li v0, CONFIG_SYS_ICACHE_SIZE
  216. #else
  217. li v0, CONFIG_SYS_DCACHE_SIZE
  218. #endif
  219. #else
  220. move v0, R_IC_SIZE
  221. sltu t1, R_IC_SIZE, R_DC_SIZE
  222. movn v0, R_DC_SIZE, t1
  223. #endif
  224. /*
  225. * Now clear that much memory starting from zero.
  226. */
  227. PTR_LI a0, CKSEG1
  228. PTR_ADDU a1, a0, v0
  229. 2: PTR_ADDIU a0, 64
  230. f_fill64 a0, -64, zero
  231. bne a0, a1, 2b
  232. #endif /* CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD */
  233. #ifdef CONFIG_MIPS_L2_CACHE
  234. /*
  235. * If the L2 is bypassed, init the L1 first so that we can execute the
  236. * rest of the cache initialisation using the L1 instruction cache.
  237. */
  238. bnez R_L2_BYPASSED, l1_init
  239. l2_init:
  240. PTR_LI t0, INDEX_BASE
  241. PTR_ADDU t1, t0, R_L2_SIZE
  242. 1: cache INDEX_STORE_TAG_SD, 0(t0)
  243. PTR_ADDU t0, t0, R_L2_LINE
  244. bne t0, t1, 1b
  245. /*
  246. * If the L2 was bypassed then we already initialised the L1s before
  247. * the L2, so we are now done.
  248. */
  249. bnez R_L2_BYPASSED, l2_unbypass
  250. #endif
  251. /*
  252. * The TagLo registers used depend upon the CPU implementation, but the
  253. * architecture requires that it is safe for software to write to both
  254. * TagLo selects 0 & 2 covering supported cases.
  255. */
  256. l1_init:
  257. mtc0 zero, CP0_TAGLO
  258. mtc0 zero, CP0_TAGLO, 2
  259. ehb
  260. /*
  261. * The caches are probably in an indeterminate state, so we force good
  262. * parity into them by doing an invalidate for each line. If
  263. * CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD is set then we'll proceed to
  264. * perform a load/fill & a further invalidate for each line, assuming
  265. * that the bottom of RAM (having just been cleared) will generate good
  266. * parity for the cache.
  267. */
  268. /*
  269. * Initialize the I-cache first,
  270. */
  271. blez R_IC_SIZE, 1f
  272. PTR_LI t0, INDEX_BASE
  273. PTR_ADDU t1, t0, R_IC_SIZE
  274. /* clear tag to invalidate */
  275. cache_loop t0, t1, R_IC_LINE, INDEX_STORE_TAG_I
  276. #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
  277. /* fill once, so data field parity is correct */
  278. PTR_LI t0, INDEX_BASE
  279. cache_loop t0, t1, R_IC_LINE, FILL
  280. /* invalidate again - prudent but not strictly neccessary */
  281. PTR_LI t0, INDEX_BASE
  282. cache_loop t0, t1, R_IC_LINE, INDEX_STORE_TAG_I
  283. #endif
  284. /* Enable use of the I-cache by setting Config.K0 */
  285. sync
  286. mfc0 t0, CP0_CONFIG
  287. li t1, CONFIG_SYS_MIPS_CACHE_MODE
  288. #if __mips_isa_rev >= 2
  289. ins t0, t1, 0, 3
  290. #else
  291. ori t0, t0, CONF_CM_CMASK
  292. xori t0, t0, CONF_CM_CMASK
  293. or t0, t0, t1
  294. #endif
  295. mtc0 t0, CP0_CONFIG
  296. /*
  297. * then initialize D-cache.
  298. */
  299. 1: blez R_DC_SIZE, 3f
  300. PTR_LI t0, INDEX_BASE
  301. PTR_ADDU t1, t0, R_DC_SIZE
  302. /* clear all tags */
  303. cache_loop t0, t1, R_DC_LINE, INDEX_STORE_TAG_D
  304. #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
  305. /* load from each line (in cached space) */
  306. PTR_LI t0, INDEX_BASE
  307. 2: LONG_L zero, 0(t0)
  308. PTR_ADDU t0, R_DC_LINE
  309. bne t0, t1, 2b
  310. /* clear all tags */
  311. PTR_LI t0, INDEX_BASE
  312. cache_loop t0, t1, R_DC_LINE, INDEX_STORE_TAG_D
  313. #endif
  314. 3:
  315. #ifdef CONFIG_MIPS_L2_CACHE
  316. /* If the L2 isn't bypassed then we're done */
  317. beqz R_L2_BYPASSED, return
  318. /* The L2 is bypassed - go initialise it */
  319. b l2_init
  320. l2_unbypass:
  321. # if __mips_isa_rev >= 6
  322. beqz R_L2_L2C, 1f
  323. li t0, CKSEG1ADDR(CONFIG_MIPS_CM_BASE)
  324. lw t1, GCR_L2_CONFIG(t0)
  325. xor t1, t1, GCR_L2_CONFIG_BYPASS
  326. sw t1, GCR_L2_CONFIG(t0)
  327. sync
  328. ehb
  329. b 2f
  330. # endif
  331. 1: mfc0 t0, CP0_CONFIG, 2
  332. xor t0, t0, MIPS_CONF2_L2B
  333. mtc0 t0, CP0_CONFIG, 2
  334. ehb
  335. 2:
  336. # ifdef CONFIG_MIPS_CM
  337. /* Config3 must exist for a CM to be present */
  338. mfc0 t0, CP0_CONFIG, 1
  339. bgez t0, 2f
  340. mfc0 t0, CP0_CONFIG, 2
  341. bgez t0, 2f
  342. /* Check Config3.CMGCR to determine CM presence */
  343. mfc0 t0, CP0_CONFIG, 3
  344. and t0, t0, MIPS_CONF3_CMGCR
  345. beqz t0, 2f
  346. /* Change Config.K0 to a coherent CCA */
  347. mfc0 t0, CP0_CONFIG
  348. li t1, CONF_CM_CACHABLE_COW
  349. #if __mips_isa_rev >= 2
  350. ins t0, t1, 0, 3
  351. #else
  352. ori t0, t0, CONF_CM_CMASK
  353. xori t0, t0, CONF_CM_CMASK
  354. or t0, t0, t1
  355. #endif
  356. mtc0 t0, CP0_CONFIG
  357. /*
  358. * Join the coherent domain such that the caches of this core are kept
  359. * coherent with those of other cores.
  360. */
  361. PTR_LI t0, CKSEG1ADDR(CONFIG_MIPS_CM_BASE)
  362. lw t1, GCR_REV(t0)
  363. li t2, GCR_REV_CM3
  364. li t3, GCR_Cx_COHERENCE_EN
  365. bge t1, t2, 1f
  366. li t3, GCR_Cx_COHERENCE_DOM_EN
  367. 1: sw t3, GCR_Cx_COHERENCE(t0)
  368. ehb
  369. 2:
  370. # endif
  371. #endif
  372. return:
  373. /* Ensure all cache operations complete before returning */
  374. sync
  375. jr ra
  376. END(mips_cache_reset)
  377. /*
  378. * dcache_status - get cache status
  379. *
  380. * RETURNS: 0 - cache disabled; 1 - cache enabled
  381. *
  382. */
  383. LEAF(dcache_status)
  384. mfc0 t0, CP0_CONFIG
  385. li t1, CONF_CM_UNCACHED
  386. andi t0, t0, CONF_CM_CMASK
  387. move v0, zero
  388. beq t0, t1, 2f
  389. li v0, 1
  390. 2: jr ra
  391. END(dcache_status)
  392. /*
  393. * dcache_disable - disable cache
  394. *
  395. * RETURNS: N/A
  396. *
  397. */
  398. LEAF(dcache_disable)
  399. mfc0 t0, CP0_CONFIG
  400. li t1, -8
  401. and t0, t0, t1
  402. ori t0, t0, CONF_CM_UNCACHED
  403. mtc0 t0, CP0_CONFIG
  404. jr ra
  405. END(dcache_disable)
  406. /*
  407. * dcache_enable - enable cache
  408. *
  409. * RETURNS: N/A
  410. *
  411. */
  412. LEAF(dcache_enable)
  413. mfc0 t0, CP0_CONFIG
  414. ori t0, CONF_CM_CMASK
  415. xori t0, CONF_CM_CMASK
  416. ori t0, CONFIG_SYS_MIPS_CACHE_MODE
  417. mtc0 t0, CP0_CONFIG
  418. jr ra
  419. END(dcache_enable)