emif.h 40 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302
  1. /*
  2. * OMAP44xx EMIF header
  3. *
  4. * Copyright (C) 2009-2010 Texas Instruments, Inc.
  5. *
  6. * Aneesh V <aneesh@ti.com>
  7. *
  8. * This program is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU General Public License version 2 as
  10. * published by the Free Software Foundation.
  11. */
  12. #ifndef _EMIF_H_
  13. #define _EMIF_H_
  14. #include <asm/types.h>
  15. #include <common.h>
  16. #include <asm/io.h>
  17. /* Base address */
  18. #define EMIF1_BASE 0x4c000000
  19. #define EMIF2_BASE 0x4d000000
  20. #define EMIF_4D 0x4
  21. #define EMIF_4D5 0x5
  22. /* Registers shifts, masks and values */
  23. /* EMIF_MOD_ID_REV */
  24. #define EMIF_REG_SCHEME_SHIFT 30
  25. #define EMIF_REG_SCHEME_MASK (0x3 << 30)
  26. #define EMIF_REG_MODULE_ID_SHIFT 16
  27. #define EMIF_REG_MODULE_ID_MASK (0xfff << 16)
  28. #define EMIF_REG_RTL_VERSION_SHIFT 11
  29. #define EMIF_REG_RTL_VERSION_MASK (0x1f << 11)
  30. #define EMIF_REG_MAJOR_REVISION_SHIFT 8
  31. #define EMIF_REG_MAJOR_REVISION_MASK (0x7 << 8)
  32. #define EMIF_REG_MINOR_REVISION_SHIFT 0
  33. #define EMIF_REG_MINOR_REVISION_MASK (0x3f << 0)
  34. /* STATUS */
  35. #define EMIF_REG_BE_SHIFT 31
  36. #define EMIF_REG_BE_MASK (1 << 31)
  37. #define EMIF_REG_DUAL_CLK_MODE_SHIFT 30
  38. #define EMIF_REG_DUAL_CLK_MODE_MASK (1 << 30)
  39. #define EMIF_REG_FAST_INIT_SHIFT 29
  40. #define EMIF_REG_FAST_INIT_MASK (1 << 29)
  41. #define EMIF_REG_LEVLING_TO_SHIFT 4
  42. #define EMIF_REG_LEVELING_TO_MASK (7 << 4)
  43. #define EMIF_REG_PHY_DLL_READY_SHIFT 2
  44. #define EMIF_REG_PHY_DLL_READY_MASK (1 << 2)
  45. /* SDRAM_CONFIG */
  46. #define EMIF_REG_SDRAM_TYPE_SHIFT 29
  47. #define EMIF_REG_SDRAM_TYPE_MASK (0x7 << 29)
  48. #define EMIF_REG_SDRAM_TYPE_DDR1 0
  49. #define EMIF_REG_SDRAM_TYPE_LPDDR1 1
  50. #define EMIF_REG_SDRAM_TYPE_DDR2 2
  51. #define EMIF_REG_SDRAM_TYPE_DDR3 3
  52. #define EMIF_REG_SDRAM_TYPE_LPDDR2_S4 4
  53. #define EMIF_REG_SDRAM_TYPE_LPDDR2_S2 5
  54. #define EMIF_REG_IBANK_POS_SHIFT 27
  55. #define EMIF_REG_IBANK_POS_MASK (0x3 << 27)
  56. #define EMIF_REG_DDR_TERM_SHIFT 24
  57. #define EMIF_REG_DDR_TERM_MASK (0x7 << 24)
  58. #define EMIF_REG_DDR2_DDQS_SHIFT 23
  59. #define EMIF_REG_DDR2_DDQS_MASK (1 << 23)
  60. #define EMIF_REG_DYN_ODT_SHIFT 21
  61. #define EMIF_REG_DYN_ODT_MASK (0x3 << 21)
  62. #define EMIF_REG_DDR_DISABLE_DLL_SHIFT 20
  63. #define EMIF_REG_DDR_DISABLE_DLL_MASK (1 << 20)
  64. #define EMIF_REG_SDRAM_DRIVE_SHIFT 18
  65. #define EMIF_REG_SDRAM_DRIVE_MASK (0x3 << 18)
  66. #define EMIF_REG_CWL_SHIFT 16
  67. #define EMIF_REG_CWL_MASK (0x3 << 16)
  68. #define EMIF_REG_NARROW_MODE_SHIFT 14
  69. #define EMIF_REG_NARROW_MODE_MASK (0x3 << 14)
  70. #define EMIF_REG_CL_SHIFT 10
  71. #define EMIF_REG_CL_MASK (0xf << 10)
  72. #define EMIF_REG_ROWSIZE_SHIFT 7
  73. #define EMIF_REG_ROWSIZE_MASK (0x7 << 7)
  74. #define EMIF_REG_IBANK_SHIFT 4
  75. #define EMIF_REG_IBANK_MASK (0x7 << 4)
  76. #define EMIF_REG_EBANK_SHIFT 3
  77. #define EMIF_REG_EBANK_MASK (1 << 3)
  78. #define EMIF_REG_PAGESIZE_SHIFT 0
  79. #define EMIF_REG_PAGESIZE_MASK (0x7 << 0)
  80. /* SDRAM_CONFIG_2 */
  81. #define EMIF_REG_CS1NVMEN_SHIFT 30
  82. #define EMIF_REG_CS1NVMEN_MASK (1 << 30)
  83. #define EMIF_REG_EBANK_POS_SHIFT 27
  84. #define EMIF_REG_EBANK_POS_MASK (1 << 27)
  85. #define EMIF_REG_RDBNUM_SHIFT 4
  86. #define EMIF_REG_RDBNUM_MASK (0x3 << 4)
  87. #define EMIF_REG_RDBSIZE_SHIFT 0
  88. #define EMIF_REG_RDBSIZE_MASK (0x7 << 0)
  89. /* SDRAM_REF_CTRL */
  90. #define EMIF_REG_INITREF_DIS_SHIFT 31
  91. #define EMIF_REG_INITREF_DIS_MASK (1 << 31)
  92. #define EMIF_REG_SRT_SHIFT 29
  93. #define EMIF_REG_SRT_MASK (1 << 29)
  94. #define EMIF_REG_ASR_SHIFT 28
  95. #define EMIF_REG_ASR_MASK (1 << 28)
  96. #define EMIF_REG_PASR_SHIFT 24
  97. #define EMIF_REG_PASR_MASK (0x7 << 24)
  98. #define EMIF_REG_REFRESH_RATE_SHIFT 0
  99. #define EMIF_REG_REFRESH_RATE_MASK (0xffff << 0)
  100. /* SDRAM_REF_CTRL_SHDW */
  101. #define EMIF_REG_REFRESH_RATE_SHDW_SHIFT 0
  102. #define EMIF_REG_REFRESH_RATE_SHDW_MASK (0xffff << 0)
  103. /* SDRAM_TIM_1 */
  104. #define EMIF_REG_T_RP_SHIFT 25
  105. #define EMIF_REG_T_RP_MASK (0xf << 25)
  106. #define EMIF_REG_T_RCD_SHIFT 21
  107. #define EMIF_REG_T_RCD_MASK (0xf << 21)
  108. #define EMIF_REG_T_WR_SHIFT 17
  109. #define EMIF_REG_T_WR_MASK (0xf << 17)
  110. #define EMIF_REG_T_RAS_SHIFT 12
  111. #define EMIF_REG_T_RAS_MASK (0x1f << 12)
  112. #define EMIF_REG_T_RC_SHIFT 6
  113. #define EMIF_REG_T_RC_MASK (0x3f << 6)
  114. #define EMIF_REG_T_RRD_SHIFT 3
  115. #define EMIF_REG_T_RRD_MASK (0x7 << 3)
  116. #define EMIF_REG_T_WTR_SHIFT 0
  117. #define EMIF_REG_T_WTR_MASK (0x7 << 0)
  118. /* SDRAM_TIM_1_SHDW */
  119. #define EMIF_REG_T_RP_SHDW_SHIFT 25
  120. #define EMIF_REG_T_RP_SHDW_MASK (0xf << 25)
  121. #define EMIF_REG_T_RCD_SHDW_SHIFT 21
  122. #define EMIF_REG_T_RCD_SHDW_MASK (0xf << 21)
  123. #define EMIF_REG_T_WR_SHDW_SHIFT 17
  124. #define EMIF_REG_T_WR_SHDW_MASK (0xf << 17)
  125. #define EMIF_REG_T_RAS_SHDW_SHIFT 12
  126. #define EMIF_REG_T_RAS_SHDW_MASK (0x1f << 12)
  127. #define EMIF_REG_T_RC_SHDW_SHIFT 6
  128. #define EMIF_REG_T_RC_SHDW_MASK (0x3f << 6)
  129. #define EMIF_REG_T_RRD_SHDW_SHIFT 3
  130. #define EMIF_REG_T_RRD_SHDW_MASK (0x7 << 3)
  131. #define EMIF_REG_T_WTR_SHDW_SHIFT 0
  132. #define EMIF_REG_T_WTR_SHDW_MASK (0x7 << 0)
  133. /* SDRAM_TIM_2 */
  134. #define EMIF_REG_T_XP_SHIFT 28
  135. #define EMIF_REG_T_XP_MASK (0x7 << 28)
  136. #define EMIF_REG_T_ODT_SHIFT 25
  137. #define EMIF_REG_T_ODT_MASK (0x7 << 25)
  138. #define EMIF_REG_T_XSNR_SHIFT 16
  139. #define EMIF_REG_T_XSNR_MASK (0x1ff << 16)
  140. #define EMIF_REG_T_XSRD_SHIFT 6
  141. #define EMIF_REG_T_XSRD_MASK (0x3ff << 6)
  142. #define EMIF_REG_T_RTP_SHIFT 3
  143. #define EMIF_REG_T_RTP_MASK (0x7 << 3)
  144. #define EMIF_REG_T_CKE_SHIFT 0
  145. #define EMIF_REG_T_CKE_MASK (0x7 << 0)
  146. /* SDRAM_TIM_2_SHDW */
  147. #define EMIF_REG_T_XP_SHDW_SHIFT 28
  148. #define EMIF_REG_T_XP_SHDW_MASK (0x7 << 28)
  149. #define EMIF_REG_T_ODT_SHDW_SHIFT 25
  150. #define EMIF_REG_T_ODT_SHDW_MASK (0x7 << 25)
  151. #define EMIF_REG_T_XSNR_SHDW_SHIFT 16
  152. #define EMIF_REG_T_XSNR_SHDW_MASK (0x1ff << 16)
  153. #define EMIF_REG_T_XSRD_SHDW_SHIFT 6
  154. #define EMIF_REG_T_XSRD_SHDW_MASK (0x3ff << 6)
  155. #define EMIF_REG_T_RTP_SHDW_SHIFT 3
  156. #define EMIF_REG_T_RTP_SHDW_MASK (0x7 << 3)
  157. #define EMIF_REG_T_CKE_SHDW_SHIFT 0
  158. #define EMIF_REG_T_CKE_SHDW_MASK (0x7 << 0)
  159. /* SDRAM_TIM_3 */
  160. #define EMIF_REG_T_CKESR_SHIFT 21
  161. #define EMIF_REG_T_CKESR_MASK (0x7 << 21)
  162. #define EMIF_REG_ZQ_ZQCS_SHIFT 15
  163. #define EMIF_REG_ZQ_ZQCS_MASK (0x3f << 15)
  164. #define EMIF_REG_T_TDQSCKMAX_SHIFT 13
  165. #define EMIF_REG_T_TDQSCKMAX_MASK (0x3 << 13)
  166. #define EMIF_REG_T_RFC_SHIFT 4
  167. #define EMIF_REG_T_RFC_MASK (0x1ff << 4)
  168. #define EMIF_REG_T_RAS_MAX_SHIFT 0
  169. #define EMIF_REG_T_RAS_MAX_MASK (0xf << 0)
  170. /* SDRAM_TIM_3_SHDW */
  171. #define EMIF_REG_T_CKESR_SHDW_SHIFT 21
  172. #define EMIF_REG_T_CKESR_SHDW_MASK (0x7 << 21)
  173. #define EMIF_REG_ZQ_ZQCS_SHDW_SHIFT 15
  174. #define EMIF_REG_ZQ_ZQCS_SHDW_MASK (0x3f << 15)
  175. #define EMIF_REG_T_TDQSCKMAX_SHDW_SHIFT 13
  176. #define EMIF_REG_T_TDQSCKMAX_SHDW_MASK (0x3 << 13)
  177. #define EMIF_REG_T_RFC_SHDW_SHIFT 4
  178. #define EMIF_REG_T_RFC_SHDW_MASK (0x1ff << 4)
  179. #define EMIF_REG_T_RAS_MAX_SHDW_SHIFT 0
  180. #define EMIF_REG_T_RAS_MAX_SHDW_MASK (0xf << 0)
  181. /* LPDDR2_NVM_TIM */
  182. #define EMIF_REG_NVM_T_XP_SHIFT 28
  183. #define EMIF_REG_NVM_T_XP_MASK (0x7 << 28)
  184. #define EMIF_REG_NVM_T_WTR_SHIFT 24
  185. #define EMIF_REG_NVM_T_WTR_MASK (0x7 << 24)
  186. #define EMIF_REG_NVM_T_RP_SHIFT 20
  187. #define EMIF_REG_NVM_T_RP_MASK (0xf << 20)
  188. #define EMIF_REG_NVM_T_WRA_SHIFT 16
  189. #define EMIF_REG_NVM_T_WRA_MASK (0xf << 16)
  190. #define EMIF_REG_NVM_T_RRD_SHIFT 8
  191. #define EMIF_REG_NVM_T_RRD_MASK (0xff << 8)
  192. #define EMIF_REG_NVM_T_RCDMIN_SHIFT 0
  193. #define EMIF_REG_NVM_T_RCDMIN_MASK (0xff << 0)
  194. /* LPDDR2_NVM_TIM_SHDW */
  195. #define EMIF_REG_NVM_T_XP_SHDW_SHIFT 28
  196. #define EMIF_REG_NVM_T_XP_SHDW_MASK (0x7 << 28)
  197. #define EMIF_REG_NVM_T_WTR_SHDW_SHIFT 24
  198. #define EMIF_REG_NVM_T_WTR_SHDW_MASK (0x7 << 24)
  199. #define EMIF_REG_NVM_T_RP_SHDW_SHIFT 20
  200. #define EMIF_REG_NVM_T_RP_SHDW_MASK (0xf << 20)
  201. #define EMIF_REG_NVM_T_WRA_SHDW_SHIFT 16
  202. #define EMIF_REG_NVM_T_WRA_SHDW_MASK (0xf << 16)
  203. #define EMIF_REG_NVM_T_RRD_SHDW_SHIFT 8
  204. #define EMIF_REG_NVM_T_RRD_SHDW_MASK (0xff << 8)
  205. #define EMIF_REG_NVM_T_RCDMIN_SHDW_SHIFT 0
  206. #define EMIF_REG_NVM_T_RCDMIN_SHDW_MASK (0xff << 0)
  207. /* PWR_MGMT_CTRL */
  208. #define EMIF_REG_IDLEMODE_SHIFT 30
  209. #define EMIF_REG_IDLEMODE_MASK (0x3 << 30)
  210. #define EMIF_REG_PD_TIM_SHIFT 12
  211. #define EMIF_REG_PD_TIM_MASK (0xf << 12)
  212. #define EMIF_REG_DPD_EN_SHIFT 11
  213. #define EMIF_REG_DPD_EN_MASK (1 << 11)
  214. #define EMIF_REG_LP_MODE_SHIFT 8
  215. #define EMIF_REG_LP_MODE_MASK (0x7 << 8)
  216. #define EMIF_REG_SR_TIM_SHIFT 4
  217. #define EMIF_REG_SR_TIM_MASK (0xf << 4)
  218. #define EMIF_REG_CS_TIM_SHIFT 0
  219. #define EMIF_REG_CS_TIM_MASK (0xf << 0)
  220. /* PWR_MGMT_CTRL_SHDW */
  221. #define EMIF_REG_PD_TIM_SHDW_SHIFT 12
  222. #define EMIF_REG_PD_TIM_SHDW_MASK (0xf << 12)
  223. #define EMIF_REG_SR_TIM_SHDW_SHIFT 4
  224. #define EMIF_REG_SR_TIM_SHDW_MASK (0xf << 4)
  225. #define EMIF_REG_CS_TIM_SHDW_SHIFT 0
  226. #define EMIF_REG_CS_TIM_SHDW_MASK (0xf << 0)
  227. /* LPDDR2_MODE_REG_DATA */
  228. #define EMIF_REG_VALUE_0_SHIFT 0
  229. #define EMIF_REG_VALUE_0_MASK (0x7f << 0)
  230. /* LPDDR2_MODE_REG_CFG */
  231. #define EMIF_REG_CS_SHIFT 31
  232. #define EMIF_REG_CS_MASK (1 << 31)
  233. #define EMIF_REG_REFRESH_EN_SHIFT 30
  234. #define EMIF_REG_REFRESH_EN_MASK (1 << 30)
  235. #define EMIF_REG_ADDRESS_SHIFT 0
  236. #define EMIF_REG_ADDRESS_MASK (0xff << 0)
  237. /* OCP_CONFIG */
  238. #define EMIF_REG_SYS_THRESH_MAX_SHIFT 24
  239. #define EMIF_REG_SYS_THRESH_MAX_MASK (0xf << 24)
  240. #define EMIF_REG_MPU_THRESH_MAX_SHIFT 20
  241. #define EMIF_REG_MPU_THRESH_MAX_MASK (0xf << 20)
  242. #define EMIF_REG_LL_THRESH_MAX_SHIFT 16
  243. #define EMIF_REG_LL_THRESH_MAX_MASK (0xf << 16)
  244. #define EMIF_REG_PR_OLD_COUNT_SHIFT 0
  245. #define EMIF_REG_PR_OLD_COUNT_MASK (0xff << 0)
  246. /* OCP_CFG_VAL_1 */
  247. #define EMIF_REG_SYS_BUS_WIDTH_SHIFT 30
  248. #define EMIF_REG_SYS_BUS_WIDTH_MASK (0x3 << 30)
  249. #define EMIF_REG_LL_BUS_WIDTH_SHIFT 28
  250. #define EMIF_REG_LL_BUS_WIDTH_MASK (0x3 << 28)
  251. #define EMIF_REG_WR_FIFO_DEPTH_SHIFT 8
  252. #define EMIF_REG_WR_FIFO_DEPTH_MASK (0xff << 8)
  253. #define EMIF_REG_CMD_FIFO_DEPTH_SHIFT 0
  254. #define EMIF_REG_CMD_FIFO_DEPTH_MASK (0xff << 0)
  255. /* OCP_CFG_VAL_2 */
  256. #define EMIF_REG_RREG_FIFO_DEPTH_SHIFT 16
  257. #define EMIF_REG_RREG_FIFO_DEPTH_MASK (0xff << 16)
  258. #define EMIF_REG_RSD_FIFO_DEPTH_SHIFT 8
  259. #define EMIF_REG_RSD_FIFO_DEPTH_MASK (0xff << 8)
  260. #define EMIF_REG_RCMD_FIFO_DEPTH_SHIFT 0
  261. #define EMIF_REG_RCMD_FIFO_DEPTH_MASK (0xff << 0)
  262. /* IODFT_TLGC */
  263. #define EMIF_REG_TLEC_SHIFT 16
  264. #define EMIF_REG_TLEC_MASK (0xffff << 16)
  265. #define EMIF_REG_MT_SHIFT 14
  266. #define EMIF_REG_MT_MASK (1 << 14)
  267. #define EMIF_REG_ACT_CAP_EN_SHIFT 13
  268. #define EMIF_REG_ACT_CAP_EN_MASK (1 << 13)
  269. #define EMIF_REG_OPG_LD_SHIFT 12
  270. #define EMIF_REG_OPG_LD_MASK (1 << 12)
  271. #define EMIF_REG_RESET_PHY_SHIFT 10
  272. #define EMIF_REG_RESET_PHY_MASK (1 << 10)
  273. #define EMIF_REG_MMS_SHIFT 8
  274. #define EMIF_REG_MMS_MASK (1 << 8)
  275. #define EMIF_REG_MC_SHIFT 4
  276. #define EMIF_REG_MC_MASK (0x3 << 4)
  277. #define EMIF_REG_PC_SHIFT 1
  278. #define EMIF_REG_PC_MASK (0x7 << 1)
  279. #define EMIF_REG_TM_SHIFT 0
  280. #define EMIF_REG_TM_MASK (1 << 0)
  281. /* IODFT_CTRL_MISR_RSLT */
  282. #define EMIF_REG_DQM_TLMR_SHIFT 16
  283. #define EMIF_REG_DQM_TLMR_MASK (0x3ff << 16)
  284. #define EMIF_REG_CTL_TLMR_SHIFT 0
  285. #define EMIF_REG_CTL_TLMR_MASK (0x7ff << 0)
  286. /* IODFT_ADDR_MISR_RSLT */
  287. #define EMIF_REG_ADDR_TLMR_SHIFT 0
  288. #define EMIF_REG_ADDR_TLMR_MASK (0x1fffff << 0)
  289. /* IODFT_DATA_MISR_RSLT_1 */
  290. #define EMIF_REG_DATA_TLMR_31_0_SHIFT 0
  291. #define EMIF_REG_DATA_TLMR_31_0_MASK (0xffffffff << 0)
  292. /* IODFT_DATA_MISR_RSLT_2 */
  293. #define EMIF_REG_DATA_TLMR_63_32_SHIFT 0
  294. #define EMIF_REG_DATA_TLMR_63_32_MASK (0xffffffff << 0)
  295. /* IODFT_DATA_MISR_RSLT_3 */
  296. #define EMIF_REG_DATA_TLMR_66_64_SHIFT 0
  297. #define EMIF_REG_DATA_TLMR_66_64_MASK (0x7 << 0)
  298. /* PERF_CNT_1 */
  299. #define EMIF_REG_COUNTER1_SHIFT 0
  300. #define EMIF_REG_COUNTER1_MASK (0xffffffff << 0)
  301. /* PERF_CNT_2 */
  302. #define EMIF_REG_COUNTER2_SHIFT 0
  303. #define EMIF_REG_COUNTER2_MASK (0xffffffff << 0)
  304. /* PERF_CNT_CFG */
  305. #define EMIF_REG_CNTR2_MCONNID_EN_SHIFT 31
  306. #define EMIF_REG_CNTR2_MCONNID_EN_MASK (1 << 31)
  307. #define EMIF_REG_CNTR2_REGION_EN_SHIFT 30
  308. #define EMIF_REG_CNTR2_REGION_EN_MASK (1 << 30)
  309. #define EMIF_REG_CNTR2_CFG_SHIFT 16
  310. #define EMIF_REG_CNTR2_CFG_MASK (0xf << 16)
  311. #define EMIF_REG_CNTR1_MCONNID_EN_SHIFT 15
  312. #define EMIF_REG_CNTR1_MCONNID_EN_MASK (1 << 15)
  313. #define EMIF_REG_CNTR1_REGION_EN_SHIFT 14
  314. #define EMIF_REG_CNTR1_REGION_EN_MASK (1 << 14)
  315. #define EMIF_REG_CNTR1_CFG_SHIFT 0
  316. #define EMIF_REG_CNTR1_CFG_MASK (0xf << 0)
  317. /* PERF_CNT_SEL */
  318. #define EMIF_REG_MCONNID2_SHIFT 24
  319. #define EMIF_REG_MCONNID2_MASK (0xff << 24)
  320. #define EMIF_REG_REGION_SEL2_SHIFT 16
  321. #define EMIF_REG_REGION_SEL2_MASK (0x3 << 16)
  322. #define EMIF_REG_MCONNID1_SHIFT 8
  323. #define EMIF_REG_MCONNID1_MASK (0xff << 8)
  324. #define EMIF_REG_REGION_SEL1_SHIFT 0
  325. #define EMIF_REG_REGION_SEL1_MASK (0x3 << 0)
  326. /* PERF_CNT_TIM */
  327. #define EMIF_REG_TOTAL_TIME_SHIFT 0
  328. #define EMIF_REG_TOTAL_TIME_MASK (0xffffffff << 0)
  329. /* READ_IDLE_CTRL */
  330. #define EMIF_REG_READ_IDLE_LEN_SHIFT 16
  331. #define EMIF_REG_READ_IDLE_LEN_MASK (0xf << 16)
  332. #define EMIF_REG_READ_IDLE_INTERVAL_SHIFT 0
  333. #define EMIF_REG_READ_IDLE_INTERVAL_MASK (0x1ff << 0)
  334. /* READ_IDLE_CTRL_SHDW */
  335. #define EMIF_REG_READ_IDLE_LEN_SHDW_SHIFT 16
  336. #define EMIF_REG_READ_IDLE_LEN_SHDW_MASK (0xf << 16)
  337. #define EMIF_REG_READ_IDLE_INTERVAL_SHDW_SHIFT 0
  338. #define EMIF_REG_READ_IDLE_INTERVAL_SHDW_MASK (0x1ff << 0)
  339. /* IRQ_EOI */
  340. #define EMIF_REG_EOI_SHIFT 0
  341. #define EMIF_REG_EOI_MASK (1 << 0)
  342. /* IRQSTATUS_RAW_SYS */
  343. #define EMIF_REG_DNV_SYS_SHIFT 2
  344. #define EMIF_REG_DNV_SYS_MASK (1 << 2)
  345. #define EMIF_REG_TA_SYS_SHIFT 1
  346. #define EMIF_REG_TA_SYS_MASK (1 << 1)
  347. #define EMIF_REG_ERR_SYS_SHIFT 0
  348. #define EMIF_REG_ERR_SYS_MASK (1 << 0)
  349. /* IRQSTATUS_RAW_LL */
  350. #define EMIF_REG_DNV_LL_SHIFT 2
  351. #define EMIF_REG_DNV_LL_MASK (1 << 2)
  352. #define EMIF_REG_TA_LL_SHIFT 1
  353. #define EMIF_REG_TA_LL_MASK (1 << 1)
  354. #define EMIF_REG_ERR_LL_SHIFT 0
  355. #define EMIF_REG_ERR_LL_MASK (1 << 0)
  356. /* IRQSTATUS_SYS */
  357. /* IRQSTATUS_LL */
  358. /* IRQENABLE_SET_SYS */
  359. #define EMIF_REG_EN_DNV_SYS_SHIFT 2
  360. #define EMIF_REG_EN_DNV_SYS_MASK (1 << 2)
  361. #define EMIF_REG_EN_TA_SYS_SHIFT 1
  362. #define EMIF_REG_EN_TA_SYS_MASK (1 << 1)
  363. #define EMIF_REG_EN_ERR_SYS_SHIFT 0
  364. #define EMIF_REG_EN_ERR_SYS_MASK (1 << 0)
  365. /* IRQENABLE_SET_LL */
  366. #define EMIF_REG_EN_DNV_LL_SHIFT 2
  367. #define EMIF_REG_EN_DNV_LL_MASK (1 << 2)
  368. #define EMIF_REG_EN_TA_LL_SHIFT 1
  369. #define EMIF_REG_EN_TA_LL_MASK (1 << 1)
  370. #define EMIF_REG_EN_ERR_LL_SHIFT 0
  371. #define EMIF_REG_EN_ERR_LL_MASK (1 << 0)
  372. /* IRQENABLE_CLR_SYS */
  373. /* IRQENABLE_CLR_LL */
  374. /* ZQ_CONFIG */
  375. #define EMIF_REG_ZQ_CS1EN_SHIFT 31
  376. #define EMIF_REG_ZQ_CS1EN_MASK (1 << 31)
  377. #define EMIF_REG_ZQ_CS0EN_SHIFT 30
  378. #define EMIF_REG_ZQ_CS0EN_MASK (1 << 30)
  379. #define EMIF_REG_ZQ_DUALCALEN_SHIFT 29
  380. #define EMIF_REG_ZQ_DUALCALEN_MASK (1 << 29)
  381. #define EMIF_REG_ZQ_SFEXITEN_SHIFT 28
  382. #define EMIF_REG_ZQ_SFEXITEN_MASK (1 << 28)
  383. #define EMIF_REG_ZQ_ZQINIT_MULT_SHIFT 18
  384. #define EMIF_REG_ZQ_ZQINIT_MULT_MASK (0x3 << 18)
  385. #define EMIF_REG_ZQ_ZQCL_MULT_SHIFT 16
  386. #define EMIF_REG_ZQ_ZQCL_MULT_MASK (0x3 << 16)
  387. #define EMIF_REG_ZQ_REFINTERVAL_SHIFT 0
  388. #define EMIF_REG_ZQ_REFINTERVAL_MASK (0xffff << 0)
  389. /* TEMP_ALERT_CONFIG */
  390. #define EMIF_REG_TA_CS1EN_SHIFT 31
  391. #define EMIF_REG_TA_CS1EN_MASK (1 << 31)
  392. #define EMIF_REG_TA_CS0EN_SHIFT 30
  393. #define EMIF_REG_TA_CS0EN_MASK (1 << 30)
  394. #define EMIF_REG_TA_SFEXITEN_SHIFT 28
  395. #define EMIF_REG_TA_SFEXITEN_MASK (1 << 28)
  396. #define EMIF_REG_TA_DEVWDT_SHIFT 26
  397. #define EMIF_REG_TA_DEVWDT_MASK (0x3 << 26)
  398. #define EMIF_REG_TA_DEVCNT_SHIFT 24
  399. #define EMIF_REG_TA_DEVCNT_MASK (0x3 << 24)
  400. #define EMIF_REG_TA_REFINTERVAL_SHIFT 0
  401. #define EMIF_REG_TA_REFINTERVAL_MASK (0x3fffff << 0)
  402. /* OCP_ERR_LOG */
  403. #define EMIF_REG_MADDRSPACE_SHIFT 14
  404. #define EMIF_REG_MADDRSPACE_MASK (0x3 << 14)
  405. #define EMIF_REG_MBURSTSEQ_SHIFT 11
  406. #define EMIF_REG_MBURSTSEQ_MASK (0x7 << 11)
  407. #define EMIF_REG_MCMD_SHIFT 8
  408. #define EMIF_REG_MCMD_MASK (0x7 << 8)
  409. #define EMIF_REG_MCONNID_SHIFT 0
  410. #define EMIF_REG_MCONNID_MASK (0xff << 0)
  411. /* DDR_PHY_CTRL_1 */
  412. #define EMIF_REG_DDR_PHY_CTRL_1_SHIFT 4
  413. #define EMIF_REG_DDR_PHY_CTRL_1_MASK (0xfffffff << 4)
  414. #define EMIF_REG_READ_LATENCY_SHIFT 0
  415. #define EMIF_REG_READ_LATENCY_MASK (0xf << 0)
  416. #define EMIF_REG_DLL_SLAVE_DLY_CTRL_SHIFT 4
  417. #define EMIF_REG_DLL_SLAVE_DLY_CTRL_MASK (0xFF << 4)
  418. #define EMIF_EMIF_DDR_PHY_CTRL_1_BASE_VAL_SHIFT 12
  419. #define EMIF_EMIF_DDR_PHY_CTRL_1_BASE_VAL_MASK (0xFFFFF << 12)
  420. /* DDR_PHY_CTRL_1_SHDW */
  421. #define EMIF_REG_DDR_PHY_CTRL_1_SHDW_SHIFT 4
  422. #define EMIF_REG_DDR_PHY_CTRL_1_SHDW_MASK (0xfffffff << 4)
  423. #define EMIF_REG_READ_LATENCY_SHDW_SHIFT 0
  424. #define EMIF_REG_READ_LATENCY_SHDW_MASK (0xf << 0)
  425. #define EMIF_REG_DLL_SLAVE_DLY_CTRL_SHDW_SHIFT 4
  426. #define EMIF_REG_DLL_SLAVE_DLY_CTRL_SHDW_MASK (0xFF << 4)
  427. #define EMIF_EMIF_DDR_PHY_CTRL_1_BASE_VAL_SHDW_SHIFT 12
  428. #define EMIF_EMIF_DDR_PHY_CTRL_1_BASE_VAL_SHDW_MASK (0xFFFFF << 12)
  429. #define EMIF_DDR_PHY_CTRL_1_WRLVL_MASK_SHIFT 25
  430. #define EMIF_DDR_PHY_CTRL_1_WRLVL_MASK_MASK (1 << 25)
  431. #define EMIF_DDR_PHY_CTRL_1_RDLVLGATE_MASK_SHIFT 26
  432. #define EMIF_DDR_PHY_CTRL_1_RDLVLGATE_MASK_MASK (1 << 26)
  433. #define EMIF_DDR_PHY_CTRL_1_RDLVL_MASK_SHIFT 27
  434. #define EMIF_DDR_PHY_CTRL_1_RDLVL_MASK_MASK (1 << 27)
  435. /* DDR_PHY_CTRL_2 */
  436. #define EMIF_REG_DDR_PHY_CTRL_2_SHIFT 0
  437. #define EMIF_REG_DDR_PHY_CTRL_2_MASK (0xffffffff << 0)
  438. /*EMIF_READ_WRITE_LEVELING_CONTROL*/
  439. #define EMIF_REG_RDWRLVLFULL_START_SHIFT 31
  440. #define EMIF_REG_RDWRLVLFULL_START_MASK (1 << 31)
  441. #define EMIF_REG_RDWRLVLINC_PRE_SHIFT 24
  442. #define EMIF_REG_RDWRLVLINC_PRE_MASK (0x7F << 24)
  443. #define EMIF_REG_RDLVLINC_INT_SHIFT 16
  444. #define EMIF_REG_RDLVLINC_INT_MASK (0xFF << 16)
  445. #define EMIF_REG_RDLVLGATEINC_INT_SHIFT 8
  446. #define EMIF_REG_RDLVLGATEINC_INT_MASK (0xFF << 8)
  447. #define EMIF_REG_WRLVLINC_INT_SHIFT 0
  448. #define EMIF_REG_WRLVLINC_INT_MASK (0xFF << 0)
  449. /*EMIF_READ_WRITE_LEVELING_RAMP_CONTROL*/
  450. #define EMIF_REG_RDWRLVL_EN_SHIFT 31
  451. #define EMIF_REG_RDWRLVL_EN_MASK (1 << 31)
  452. #define EMIF_REG_RDWRLVLINC_RMP_PRE_SHIFT 24
  453. #define EMIF_REG_RDWRLVLINC_RMP_PRE_MASK (0x7F << 24)
  454. #define EMIF_REG_RDLVLINC_RMP_INT_SHIFT 16
  455. #define EMIF_REG_RDLVLINC_RMP_INT_MASK (0xFF << 16)
  456. #define EMIF_REG_RDLVLGATEINC_RMP_INT_SHIFT 8
  457. #define EMIF_REG_RDLVLGATEINC_RMP_INT_MASK (0xFF << 8)
  458. #define EMIF_REG_WRLVLINC_RMP_INT_SHIFT 0
  459. #define EMIF_REG_WRLVLINC_RMP_INT_MASK (0xFF << 0)
  460. /*EMIF_READ_WRITE_LEVELING_RAMP_WINDOW*/
  461. #define EMIF_REG_RDWRLVLINC_RMP_WIN_SHIFT 0
  462. #define EMIF_REG_RDWRLVLINC_RMP_WIN_MASK (0x1FFF << 0)
  463. /* EMIF_PHY_CTRL_36 */
  464. #define EMIF_REG_PHY_FIFO_WE_IN_MISALINED_CLR (1 << 8)
  465. #define PHY_RDDQS_RATIO_REGS 5
  466. #define PHY_FIFO_WE_SLAVE_RATIO_REGS 5
  467. #define PHY_REG_WR_DQ_SLAVE_RATIO_REGS 10
  468. /*Leveling Fields */
  469. #define DDR3_WR_LVL_INT 0x73
  470. #define DDR3_RD_LVL_INT 0x33
  471. #define DDR3_RD_LVL_GATE_INT 0x59
  472. #define RD_RW_LVL_INC_PRE 0x0
  473. #define DDR3_FULL_LVL (1 << EMIF_REG_RDWRLVL_EN_SHIFT)
  474. #define DDR3_INC_LVL ((DDR3_WR_LVL_INT << EMIF_REG_WRLVLINC_INT_SHIFT) \
  475. | (DDR3_RD_LVL_GATE_INT << EMIF_REG_RDLVLGATEINC_INT_SHIFT) \
  476. | (DDR3_RD_LVL_INT << EMIF_REG_RDLVLINC_RMP_INT_SHIFT) \
  477. | (RD_RW_LVL_INC_PRE << EMIF_REG_RDWRLVLINC_RMP_PRE_SHIFT))
  478. #define SDRAM_CONFIG_EXT_RD_LVL_11_SAMPLES 0x0000C1A7
  479. #define SDRAM_CONFIG_EXT_RD_LVL_4_SAMPLES 0x000001A7
  480. #define SDRAM_CONFIG_EXT_RD_LVL_11_SAMPLES_ES2 0x0000C1C7
  481. /* DMM */
  482. #define DMM_BASE 0x4E000040
  483. /* Memory Adapter */
  484. #define MA_BASE 0x482AF040
  485. #define MA_PRIORITY 0x482A2000
  486. #define MA_HIMEM_INTERLEAVE_UN_SHIFT 8
  487. #define MA_HIMEM_INTERLEAVE_UN_MASK (1 << 8)
  488. /* DMM_LISA_MAP */
  489. #define EMIF_SYS_ADDR_SHIFT 24
  490. #define EMIF_SYS_ADDR_MASK (0xff << 24)
  491. #define EMIF_SYS_SIZE_SHIFT 20
  492. #define EMIF_SYS_SIZE_MASK (0x7 << 20)
  493. #define EMIF_SDRC_INTL_SHIFT 18
  494. #define EMIF_SDRC_INTL_MASK (0x3 << 18)
  495. #define EMIF_SDRC_ADDRSPC_SHIFT 16
  496. #define EMIF_SDRC_ADDRSPC_MASK (0x3 << 16)
  497. #define EMIF_SDRC_MAP_SHIFT 8
  498. #define EMIF_SDRC_MAP_MASK (0x3 << 8)
  499. #define EMIF_SDRC_ADDR_SHIFT 0
  500. #define EMIF_SDRC_ADDR_MASK (0xff << 0)
  501. /* DMM_LISA_MAP fields */
  502. #define DMM_SDRC_MAP_UNMAPPED 0
  503. #define DMM_SDRC_MAP_EMIF1_ONLY 1
  504. #define DMM_SDRC_MAP_EMIF2_ONLY 2
  505. #define DMM_SDRC_MAP_EMIF1_AND_EMIF2 3
  506. #define DMM_SDRC_INTL_NONE 0
  507. #define DMM_SDRC_INTL_128B 1
  508. #define DMM_SDRC_INTL_256B 2
  509. #define DMM_SDRC_INTL_512 3
  510. #define DMM_SDRC_ADDR_SPC_SDRAM 0
  511. #define DMM_SDRC_ADDR_SPC_NVM 1
  512. #define DMM_SDRC_ADDR_SPC_INVALID 2
  513. #define DMM_LISA_MAP_INTERLEAVED_BASE_VAL (\
  514. (DMM_SDRC_MAP_EMIF1_AND_EMIF2 << EMIF_SDRC_MAP_SHIFT) |\
  515. (DMM_SDRC_ADDR_SPC_SDRAM << EMIF_SDRC_ADDRSPC_SHIFT) |\
  516. (DMM_SDRC_INTL_128B << EMIF_SDRC_INTL_SHIFT) |\
  517. (CONFIG_SYS_SDRAM_BASE << EMIF_SYS_ADDR_SHIFT))
  518. #define DMM_LISA_MAP_EMIF1_ONLY_BASE_VAL (\
  519. (DMM_SDRC_MAP_EMIF1_ONLY << EMIF_SDRC_MAP_SHIFT)|\
  520. (DMM_SDRC_ADDR_SPC_SDRAM << EMIF_SDRC_ADDRSPC_SHIFT)|\
  521. (DMM_SDRC_INTL_NONE << EMIF_SDRC_INTL_SHIFT))
  522. #define DMM_LISA_MAP_EMIF2_ONLY_BASE_VAL (\
  523. (DMM_SDRC_MAP_EMIF2_ONLY << EMIF_SDRC_MAP_SHIFT)|\
  524. (DMM_SDRC_ADDR_SPC_SDRAM << EMIF_SDRC_ADDRSPC_SHIFT)|\
  525. (DMM_SDRC_INTL_NONE << EMIF_SDRC_INTL_SHIFT))
  526. /* Trap for invalid TILER PAT entries */
  527. #define DMM_LISA_MAP_0_INVAL_ADDR_TRAP (\
  528. (0 << EMIF_SDRC_ADDR_SHIFT) |\
  529. (DMM_SDRC_MAP_EMIF1_ONLY << EMIF_SDRC_MAP_SHIFT)|\
  530. (DMM_SDRC_ADDR_SPC_INVALID << EMIF_SDRC_ADDRSPC_SHIFT)|\
  531. (DMM_SDRC_INTL_NONE << EMIF_SDRC_INTL_SHIFT)|\
  532. (0xFF << EMIF_SYS_ADDR_SHIFT))
  533. #define EMIF_EXT_PHY_CTRL_TIMING_REG 0x5
  534. /* EMIF ECC CTRL reg */
  535. #define EMIF_ECC_CTRL_REG_ECC_EN_SHIFT 31
  536. #define EMIF_ECC_CTRL_REG_ECC_EN_MASK (1 << 31)
  537. #define EMIF_ECC_CTRL_REG_ECC_ADDR_RGN_PROT_SHIFT 30
  538. #define EMIF_ECC_CTRL_REG_ECC_ADDR_RGN_PROT_MASK (1 << 31)
  539. #define EMIF_ECC_CTRL_REG_ECC_VERIFY_DIS_SHIFT 29
  540. #define EMIF_ECC_CTRL_REG_ECC_VERIFY_DIS_MASK (1 << 29)
  541. #define EMIF_ECC_REG_RMW_EN_SHIFT 28
  542. #define EMIF_ECC_REG_RMW_EN_MASK (1 << 28)
  543. #define EMIF_ECC_REG_ECC_ADDR_RGN_2_EN_SHIFT 1
  544. #define EMIF_ECC_REG_ECC_ADDR_RGN_2_EN_MASK (1 << 1)
  545. #define EMIF_ECC_REG_ECC_ADDR_RGN_1_EN_SHIFT 0
  546. #define EMIF_ECC_REG_ECC_ADDR_RGN_1_EN_MASK (1 << 0)
  547. /* EMIF ECC ADDRESS RANGE */
  548. #define EMIF_ECC_REG_ECC_END_ADDR_SHIFT 16
  549. #define EMIF_ECC_REG_ECC_END_ADDR_MASK (0xffff << 16)
  550. #define EMIF_ECC_REG_ECC_START_ADDR_SHIFT 0
  551. #define EMIF_ECC_REG_ECC_START_ADDR_MASK (0xffff << 0)
  552. /* EMIF_SYSTEM_OCP_INTERRUPT_RAW_STATUS */
  553. #define EMIF_INT_ONEBIT_ECC_ERR_SYS_SHIFT 5
  554. #define EMIF_INT_ONEBIT_ECC_ERR_SYS_MASK (1 << 5)
  555. #define EMIF_INT_TWOBIT_ECC_ERR_SYS_SHIFT 4
  556. #define EMIF_INT_TWOBIT_ECC_ERR_SYS_MASK (1 << 4)
  557. #define EMIF_INT_WR_ECC_ERR_SYS_SHIFT 3
  558. #define EMIF_INT_WR_ECC_ERR_SYS_MASK (1 << 3)
  559. /* Reg mapping structure */
  560. struct emif_reg_struct {
  561. u32 emif_mod_id_rev;
  562. u32 emif_status;
  563. u32 emif_sdram_config;
  564. u32 emif_lpddr2_nvm_config;
  565. u32 emif_sdram_ref_ctrl;
  566. u32 emif_sdram_ref_ctrl_shdw;
  567. u32 emif_sdram_tim_1;
  568. u32 emif_sdram_tim_1_shdw;
  569. u32 emif_sdram_tim_2;
  570. u32 emif_sdram_tim_2_shdw;
  571. u32 emif_sdram_tim_3;
  572. u32 emif_sdram_tim_3_shdw;
  573. u32 emif_lpddr2_nvm_tim;
  574. u32 emif_lpddr2_nvm_tim_shdw;
  575. u32 emif_pwr_mgmt_ctrl;
  576. u32 emif_pwr_mgmt_ctrl_shdw;
  577. u32 emif_lpddr2_mode_reg_data;
  578. u32 padding1[1];
  579. u32 emif_lpddr2_mode_reg_data_es2;
  580. u32 padding11[1];
  581. u32 emif_lpddr2_mode_reg_cfg;
  582. u32 emif_l3_config;
  583. u32 emif_l3_cfg_val_1;
  584. u32 emif_l3_cfg_val_2;
  585. u32 emif_iodft_tlgc;
  586. u32 padding2[7];
  587. u32 emif_perf_cnt_1;
  588. u32 emif_perf_cnt_2;
  589. u32 emif_perf_cnt_cfg;
  590. u32 emif_perf_cnt_sel;
  591. u32 emif_perf_cnt_tim;
  592. u32 padding3;
  593. u32 emif_read_idlectrl;
  594. u32 emif_read_idlectrl_shdw;
  595. u32 padding4;
  596. u32 emif_irqstatus_raw_sys;
  597. u32 emif_irqstatus_raw_ll;
  598. u32 emif_irqstatus_sys;
  599. u32 emif_irqstatus_ll;
  600. u32 emif_irqenable_set_sys;
  601. u32 emif_irqenable_set_ll;
  602. u32 emif_irqenable_clr_sys;
  603. u32 emif_irqenable_clr_ll;
  604. u32 padding5;
  605. u32 emif_zq_config;
  606. u32 emif_temp_alert_config;
  607. u32 emif_l3_err_log;
  608. u32 emif_rd_wr_lvl_rmp_win;
  609. u32 emif_rd_wr_lvl_rmp_ctl;
  610. u32 emif_rd_wr_lvl_ctl;
  611. u32 padding6[1];
  612. u32 emif_ddr_phy_ctrl_1;
  613. u32 emif_ddr_phy_ctrl_1_shdw;
  614. u32 emif_ddr_phy_ctrl_2;
  615. u32 padding7[4];
  616. u32 emif_prio_class_serv_map;
  617. u32 emif_connect_id_serv_1_map;
  618. u32 emif_connect_id_serv_2_map;
  619. u32 padding8;
  620. u32 emif_ecc_ctrl_reg;
  621. u32 emif_ecc_address_range_1;
  622. u32 emif_ecc_address_range_2;
  623. u32 padding8_1;
  624. u32 emif_rd_wr_exec_thresh;
  625. u32 emif_cos_config;
  626. #ifdef CONFIG_DRA7XX
  627. u32 padding9[2];
  628. u32 emif_1b_ecc_err_cnt;
  629. u32 emif_1b_ecc_err_thrush;
  630. u32 emif_1b_ecc_err_dist_1;
  631. u32 emif_1b_ecc_err_addr_log;
  632. u32 emif_2b_ecc_err_addr_log;
  633. u32 emif_ddr_phy_status[28];
  634. u32 padding10[19];
  635. #else
  636. u32 padding9[6];
  637. u32 emif_ddr_phy_status[28];
  638. u32 padding10[20];
  639. #endif
  640. u32 emif_ddr_ext_phy_ctrl_1;
  641. u32 emif_ddr_ext_phy_ctrl_1_shdw;
  642. u32 emif_ddr_ext_phy_ctrl_2;
  643. u32 emif_ddr_ext_phy_ctrl_2_shdw;
  644. u32 emif_ddr_ext_phy_ctrl_3;
  645. u32 emif_ddr_ext_phy_ctrl_3_shdw;
  646. u32 emif_ddr_ext_phy_ctrl_4;
  647. u32 emif_ddr_ext_phy_ctrl_4_shdw;
  648. u32 emif_ddr_ext_phy_ctrl_5;
  649. u32 emif_ddr_ext_phy_ctrl_5_shdw;
  650. u32 emif_ddr_ext_phy_ctrl_6;
  651. u32 emif_ddr_ext_phy_ctrl_6_shdw;
  652. u32 emif_ddr_ext_phy_ctrl_7;
  653. u32 emif_ddr_ext_phy_ctrl_7_shdw;
  654. u32 emif_ddr_ext_phy_ctrl_8;
  655. u32 emif_ddr_ext_phy_ctrl_8_shdw;
  656. u32 emif_ddr_ext_phy_ctrl_9;
  657. u32 emif_ddr_ext_phy_ctrl_9_shdw;
  658. u32 emif_ddr_ext_phy_ctrl_10;
  659. u32 emif_ddr_ext_phy_ctrl_10_shdw;
  660. u32 emif_ddr_ext_phy_ctrl_11;
  661. u32 emif_ddr_ext_phy_ctrl_11_shdw;
  662. u32 emif_ddr_ext_phy_ctrl_12;
  663. u32 emif_ddr_ext_phy_ctrl_12_shdw;
  664. u32 emif_ddr_ext_phy_ctrl_13;
  665. u32 emif_ddr_ext_phy_ctrl_13_shdw;
  666. u32 emif_ddr_ext_phy_ctrl_14;
  667. u32 emif_ddr_ext_phy_ctrl_14_shdw;
  668. u32 emif_ddr_ext_phy_ctrl_15;
  669. u32 emif_ddr_ext_phy_ctrl_15_shdw;
  670. u32 emif_ddr_ext_phy_ctrl_16;
  671. u32 emif_ddr_ext_phy_ctrl_16_shdw;
  672. u32 emif_ddr_ext_phy_ctrl_17;
  673. u32 emif_ddr_ext_phy_ctrl_17_shdw;
  674. u32 emif_ddr_ext_phy_ctrl_18;
  675. u32 emif_ddr_ext_phy_ctrl_18_shdw;
  676. u32 emif_ddr_ext_phy_ctrl_19;
  677. u32 emif_ddr_ext_phy_ctrl_19_shdw;
  678. u32 emif_ddr_ext_phy_ctrl_20;
  679. u32 emif_ddr_ext_phy_ctrl_20_shdw;
  680. u32 emif_ddr_ext_phy_ctrl_21;
  681. u32 emif_ddr_ext_phy_ctrl_21_shdw;
  682. u32 emif_ddr_ext_phy_ctrl_22;
  683. u32 emif_ddr_ext_phy_ctrl_22_shdw;
  684. u32 emif_ddr_ext_phy_ctrl_23;
  685. u32 emif_ddr_ext_phy_ctrl_23_shdw;
  686. u32 emif_ddr_ext_phy_ctrl_24;
  687. u32 emif_ddr_ext_phy_ctrl_24_shdw;
  688. u32 emif_ddr_ext_phy_ctrl_25;
  689. u32 emif_ddr_ext_phy_ctrl_25_shdw;
  690. u32 emif_ddr_ext_phy_ctrl_26;
  691. u32 emif_ddr_ext_phy_ctrl_26_shdw;
  692. u32 emif_ddr_ext_phy_ctrl_27;
  693. u32 emif_ddr_ext_phy_ctrl_27_shdw;
  694. u32 emif_ddr_ext_phy_ctrl_28;
  695. u32 emif_ddr_ext_phy_ctrl_28_shdw;
  696. u32 emif_ddr_ext_phy_ctrl_29;
  697. u32 emif_ddr_ext_phy_ctrl_29_shdw;
  698. u32 emif_ddr_ext_phy_ctrl_30;
  699. u32 emif_ddr_ext_phy_ctrl_30_shdw;
  700. u32 emif_ddr_ext_phy_ctrl_31;
  701. u32 emif_ddr_ext_phy_ctrl_31_shdw;
  702. u32 emif_ddr_ext_phy_ctrl_32;
  703. u32 emif_ddr_ext_phy_ctrl_32_shdw;
  704. u32 emif_ddr_ext_phy_ctrl_33;
  705. u32 emif_ddr_ext_phy_ctrl_33_shdw;
  706. u32 emif_ddr_ext_phy_ctrl_34;
  707. u32 emif_ddr_ext_phy_ctrl_34_shdw;
  708. u32 emif_ddr_ext_phy_ctrl_35;
  709. u32 emif_ddr_ext_phy_ctrl_35_shdw;
  710. union {
  711. u32 emif_ddr_ext_phy_ctrl_36;
  712. u32 emif_ddr_fifo_misaligned_clear_1;
  713. };
  714. union {
  715. u32 emif_ddr_ext_phy_ctrl_36_shdw;
  716. u32 emif_ddr_fifo_misaligned_clear_2;
  717. };
  718. };
  719. struct dmm_lisa_map_regs {
  720. u32 dmm_lisa_map_0;
  721. u32 dmm_lisa_map_1;
  722. u32 dmm_lisa_map_2;
  723. u32 dmm_lisa_map_3;
  724. u8 is_ma_present;
  725. };
  726. #define CS0 0
  727. #define CS1 1
  728. /* The maximum frequency at which the LPDDR2 interface can operate in Hz*/
  729. #define MAX_LPDDR2_FREQ 400000000 /* 400 MHz */
  730. /*
  731. * The period of DDR clk is represented as numerator and denominator for
  732. * better accuracy in integer based calculations. However, if the numerator
  733. * and denominator are very huge there may be chances of overflow in
  734. * calculations. So, as a trade-off keep denominator(and consequently
  735. * numerator) within a limit sacrificing some accuracy - but not much
  736. * If denominator and numerator are already small (such as at 400 MHz)
  737. * no adjustment is needed
  738. */
  739. #define EMIF_PERIOD_DEN_LIMIT 1000
  740. /*
  741. * Maximum number of different frequencies supported by EMIF driver
  742. * Determines the number of entries in the pointer array for register
  743. * cache
  744. */
  745. #define EMIF_MAX_NUM_FREQUENCIES 6
  746. /*
  747. * Indices into the Addressing Table array.
  748. * One entry each for all the different types of devices with different
  749. * addressing schemes
  750. */
  751. #define ADDR_TABLE_INDEX64M 0
  752. #define ADDR_TABLE_INDEX128M 1
  753. #define ADDR_TABLE_INDEX256M 2
  754. #define ADDR_TABLE_INDEX512M 3
  755. #define ADDR_TABLE_INDEX1GS4 4
  756. #define ADDR_TABLE_INDEX2GS4 5
  757. #define ADDR_TABLE_INDEX4G 6
  758. #define ADDR_TABLE_INDEX8G 7
  759. #define ADDR_TABLE_INDEX1GS2 8
  760. #define ADDR_TABLE_INDEX2GS2 9
  761. #define ADDR_TABLE_INDEXMAX 10
  762. /* Number of Row bits */
  763. #define ROW_9 0
  764. #define ROW_10 1
  765. #define ROW_11 2
  766. #define ROW_12 3
  767. #define ROW_13 4
  768. #define ROW_14 5
  769. #define ROW_15 6
  770. #define ROW_16 7
  771. /* Number of Column bits */
  772. #define COL_8 0
  773. #define COL_9 1
  774. #define COL_10 2
  775. #define COL_11 3
  776. #define COL_7 4 /*Not supported by OMAP included for completeness */
  777. /* Number of Banks*/
  778. #define BANKS1 0
  779. #define BANKS2 1
  780. #define BANKS4 2
  781. #define BANKS8 3
  782. /* Refresh rate in micro seconds x 10 */
  783. #define T_REFI_15_6 156
  784. #define T_REFI_7_8 78
  785. #define T_REFI_3_9 39
  786. #define EBANK_CS1_DIS 0
  787. #define EBANK_CS1_EN 1
  788. /* Read Latency used by the device at reset */
  789. #define RL_BOOT 3
  790. /* Read Latency for the highest frequency you want to use */
  791. #ifdef CONFIG_OMAP54XX
  792. #define RL_FINAL 8
  793. #else
  794. #define RL_FINAL 6
  795. #endif
  796. /* Interleaving policies at EMIF level- between banks and Chip Selects */
  797. #define EMIF_INTERLEAVING_POLICY_MAX_INTERLEAVING 0
  798. #define EMIF_INTERLEAVING_POLICY_NO_BANK_INTERLEAVING 3
  799. /*
  800. * Interleaving policy to be used
  801. * Currently set to MAX interleaving for better performance
  802. */
  803. #define EMIF_INTERLEAVING_POLICY EMIF_INTERLEAVING_POLICY_MAX_INTERLEAVING
  804. /* State of the core voltage:
  805. * This is important for some parameters such as read idle control and
  806. * ZQ calibration timings. Timings are much stricter when voltage ramp
  807. * is happening compared to when the voltage is stable.
  808. * We need to calculate two sets of values for these parameters and use
  809. * them accordingly
  810. */
  811. #define LPDDR2_VOLTAGE_STABLE 0
  812. #define LPDDR2_VOLTAGE_RAMPING 1
  813. /* Length of the forced read idle period in terms of cycles */
  814. #define EMIF_REG_READ_IDLE_LEN_VAL 5
  815. /* Interval between forced 'read idles' */
  816. /* To be used when voltage is changed for DPS/DVFS - 1us */
  817. #define READ_IDLE_INTERVAL_DVFS (1*1000)
  818. /*
  819. * To be used when voltage is not scaled except by Smart Reflex
  820. * 50us - or maximum value will do
  821. */
  822. #define READ_IDLE_INTERVAL_NORMAL (50*1000)
  823. /*
  824. * Unless voltage is changing due to DVFS one ZQCS command every 50ms should
  825. * be enough. This shoule be enough also in the case when voltage is changing
  826. * due to smart-reflex.
  827. */
  828. #define EMIF_ZQCS_INTERVAL_NORMAL_IN_US (50*1000)
  829. /*
  830. * If voltage is changing due to DVFS ZQCS should be performed more
  831. * often(every 50us)
  832. */
  833. #define EMIF_ZQCS_INTERVAL_DVFS_IN_US 50
  834. /* The interval between ZQCL commands as a multiple of ZQCS interval */
  835. #define REG_ZQ_ZQCL_MULT 4
  836. /* The interval between ZQINIT commands as a multiple of ZQCL interval */
  837. #define REG_ZQ_ZQINIT_MULT 3
  838. /* Enable ZQ Calibration on exiting Self-refresh */
  839. #define REG_ZQ_SFEXITEN_ENABLE 1
  840. /*
  841. * ZQ Calibration simultaneously on both chip-selects:
  842. * Needs one calibration resistor per CS
  843. * None of the boards that we know of have this capability
  844. * So disabled by default
  845. */
  846. #define REG_ZQ_DUALCALEN_DISABLE 0
  847. /*
  848. * Enable ZQ Calibration by default on CS0. If we are asked to program
  849. * the EMIF there will be something connected to CS0 for sure
  850. */
  851. #define REG_ZQ_CS0EN_ENABLE 1
  852. /* EMIF_PWR_MGMT_CTRL register */
  853. /* Low power modes */
  854. #define LP_MODE_DISABLE 0
  855. #define LP_MODE_CLOCK_STOP 1
  856. #define LP_MODE_SELF_REFRESH 2
  857. #define LP_MODE_PWR_DN 3
  858. /* REG_DPD_EN */
  859. #define DPD_DISABLE 0
  860. #define DPD_ENABLE 1
  861. /* Maximum delay before Low Power Modes */
  862. #define REG_CS_TIM 0x0
  863. #define REG_SR_TIM 0xF
  864. #define REG_PD_TIM 0xF
  865. /* EMIF_PWR_MGMT_CTRL register */
  866. #define EMIF_PWR_MGMT_CTRL (\
  867. ((REG_CS_TIM << EMIF_REG_CS_TIM_SHIFT) & EMIF_REG_CS_TIM_MASK)|\
  868. ((REG_SR_TIM << EMIF_REG_SR_TIM_SHIFT) & EMIF_REG_SR_TIM_MASK)|\
  869. ((REG_PD_TIM << EMIF_REG_PD_TIM_SHIFT) & EMIF_REG_PD_TIM_MASK)|\
  870. ((LP_MODE_SELF_REFRESH << EMIF_REG_LP_MODE_SHIFT)\
  871. & EMIF_REG_LP_MODE_MASK) |\
  872. ((DPD_DISABLE << EMIF_REG_DPD_EN_SHIFT)\
  873. & EMIF_REG_DPD_EN_MASK))\
  874. #define EMIF_PWR_MGMT_CTRL_SHDW (\
  875. ((REG_CS_TIM << EMIF_REG_CS_TIM_SHDW_SHIFT)\
  876. & EMIF_REG_CS_TIM_SHDW_MASK) |\
  877. ((REG_SR_TIM << EMIF_REG_SR_TIM_SHDW_SHIFT)\
  878. & EMIF_REG_SR_TIM_SHDW_MASK) |\
  879. ((REG_PD_TIM << EMIF_REG_PD_TIM_SHDW_SHIFT)\
  880. & EMIF_REG_PD_TIM_SHDW_MASK))
  881. /* EMIF_L3_CONFIG register value */
  882. #define EMIF_L3_CONFIG_VAL_SYS_10_LL_0 0x0A0000FF
  883. #define EMIF_L3_CONFIG_VAL_SYS_10_MPU_3_LL_0 0x0A300000
  884. #define EMIF_L3_CONFIG_VAL_SYS_10_MPU_5_LL_0 0x0A500000
  885. /*
  886. * Value of bits 12:31 of DDR_PHY_CTRL_1 register:
  887. * All these fields have magic values dependent on frequency and
  888. * determined by PHY and DLL integration with EMIF. Setting the magic
  889. * values suggested by hw team.
  890. */
  891. #define EMIF_DDR_PHY_CTRL_1_BASE_VAL 0x049FF
  892. #define EMIF_DLL_SLAVE_DLY_CTRL_400_MHZ 0x41
  893. #define EMIF_DLL_SLAVE_DLY_CTRL_200_MHZ 0x80
  894. #define EMIF_DLL_SLAVE_DLY_CTRL_100_MHZ_AND_LESS 0xFF
  895. /*
  896. * MR1 value:
  897. * Burst length : 8
  898. * Burst type : sequential
  899. * Wrap : enabled
  900. * nWR : 3(default). EMIF does not do pre-charge.
  901. * : So nWR is don't care
  902. */
  903. #define MR1_BL_8_BT_SEQ_WRAP_EN_NWR_3 0x23
  904. #define MR1_BL_8_BT_SEQ_WRAP_EN_NWR_8 0xc3
  905. /* MR2 */
  906. #define MR2_RL3_WL1 1
  907. #define MR2_RL4_WL2 2
  908. #define MR2_RL5_WL2 3
  909. #define MR2_RL6_WL3 4
  910. /* MR10: ZQ calibration codes */
  911. #define MR10_ZQ_ZQCS 0x56
  912. #define MR10_ZQ_ZQCL 0xAB
  913. #define MR10_ZQ_ZQINIT 0xFF
  914. #define MR10_ZQ_ZQRESET 0xC3
  915. /* TEMP_ALERT_CONFIG */
  916. #define TEMP_ALERT_POLL_INTERVAL_MS 360 /* for temp gradient - 5 C/s */
  917. #define TEMP_ALERT_CONFIG_DEVCT_1 0
  918. #define TEMP_ALERT_CONFIG_DEVWDT_32 2
  919. /* MR16 value: refresh full array(no partial array self refresh) */
  920. #define MR16_REF_FULL_ARRAY 0
  921. /*
  922. * Maximum number of entries we keep in our array of timing tables
  923. * We need not keep all the speed bins supported by the device
  924. * We need to keep timing tables for only the speed bins that we
  925. * are interested in
  926. */
  927. #define MAX_NUM_SPEEDBINS 4
  928. /* LPDDR2 Densities */
  929. #define LPDDR2_DENSITY_64Mb 0
  930. #define LPDDR2_DENSITY_128Mb 1
  931. #define LPDDR2_DENSITY_256Mb 2
  932. #define LPDDR2_DENSITY_512Mb 3
  933. #define LPDDR2_DENSITY_1Gb 4
  934. #define LPDDR2_DENSITY_2Gb 5
  935. #define LPDDR2_DENSITY_4Gb 6
  936. #define LPDDR2_DENSITY_8Gb 7
  937. #define LPDDR2_DENSITY_16Gb 8
  938. #define LPDDR2_DENSITY_32Gb 9
  939. /* LPDDR2 type */
  940. #define LPDDR2_TYPE_S4 0
  941. #define LPDDR2_TYPE_S2 1
  942. #define LPDDR2_TYPE_NVM 2
  943. /* LPDDR2 IO width */
  944. #define LPDDR2_IO_WIDTH_32 0
  945. #define LPDDR2_IO_WIDTH_16 1
  946. #define LPDDR2_IO_WIDTH_8 2
  947. /* Mode register numbers */
  948. #define LPDDR2_MR0 0
  949. #define LPDDR2_MR1 1
  950. #define LPDDR2_MR2 2
  951. #define LPDDR2_MR3 3
  952. #define LPDDR2_MR4 4
  953. #define LPDDR2_MR5 5
  954. #define LPDDR2_MR6 6
  955. #define LPDDR2_MR7 7
  956. #define LPDDR2_MR8 8
  957. #define LPDDR2_MR9 9
  958. #define LPDDR2_MR10 10
  959. #define LPDDR2_MR11 11
  960. #define LPDDR2_MR16 16
  961. #define LPDDR2_MR17 17
  962. #define LPDDR2_MR18 18
  963. /* MR0 */
  964. #define LPDDR2_MR0_DAI_SHIFT 0
  965. #define LPDDR2_MR0_DAI_MASK 1
  966. #define LPDDR2_MR0_DI_SHIFT 1
  967. #define LPDDR2_MR0_DI_MASK (1 << 1)
  968. #define LPDDR2_MR0_DNVI_SHIFT 2
  969. #define LPDDR2_MR0_DNVI_MASK (1 << 2)
  970. /* MR4 */
  971. #define MR4_SDRAM_REF_RATE_SHIFT 0
  972. #define MR4_SDRAM_REF_RATE_MASK 7
  973. #define MR4_TUF_SHIFT 7
  974. #define MR4_TUF_MASK (1 << 7)
  975. /* MR4 SDRAM Refresh Rate field values */
  976. #define SDRAM_TEMP_LESS_LOW_SHUTDOWN 0x0
  977. #define SDRAM_TEMP_LESS_4X_REFRESH_AND_TIMINGS 0x1
  978. #define SDRAM_TEMP_LESS_2X_REFRESH_AND_TIMINGS 0x2
  979. #define SDRAM_TEMP_NOMINAL 0x3
  980. #define SDRAM_TEMP_RESERVED_4 0x4
  981. #define SDRAM_TEMP_HIGH_DERATE_REFRESH 0x5
  982. #define SDRAM_TEMP_HIGH_DERATE_REFRESH_AND_TIMINGS 0x6
  983. #define SDRAM_TEMP_VERY_HIGH_SHUTDOWN 0x7
  984. #define LPDDR2_MANUFACTURER_SAMSUNG 1
  985. #define LPDDR2_MANUFACTURER_QIMONDA 2
  986. #define LPDDR2_MANUFACTURER_ELPIDA 3
  987. #define LPDDR2_MANUFACTURER_ETRON 4
  988. #define LPDDR2_MANUFACTURER_NANYA 5
  989. #define LPDDR2_MANUFACTURER_HYNIX 6
  990. #define LPDDR2_MANUFACTURER_MOSEL 7
  991. #define LPDDR2_MANUFACTURER_WINBOND 8
  992. #define LPDDR2_MANUFACTURER_ESMT 9
  993. #define LPDDR2_MANUFACTURER_SPANSION 11
  994. #define LPDDR2_MANUFACTURER_SST 12
  995. #define LPDDR2_MANUFACTURER_ZMOS 13
  996. #define LPDDR2_MANUFACTURER_INTEL 14
  997. #define LPDDR2_MANUFACTURER_NUMONYX 254
  998. #define LPDDR2_MANUFACTURER_MICRON 255
  999. /* MR8 register fields */
  1000. #define MR8_TYPE_SHIFT 0x0
  1001. #define MR8_TYPE_MASK 0x3
  1002. #define MR8_DENSITY_SHIFT 0x2
  1003. #define MR8_DENSITY_MASK (0xF << 0x2)
  1004. #define MR8_IO_WIDTH_SHIFT 0x6
  1005. #define MR8_IO_WIDTH_MASK (0x3 << 0x6)
  1006. /* SDRAM TYPE */
  1007. #define EMIF_SDRAM_TYPE_DDR2 0x2
  1008. #define EMIF_SDRAM_TYPE_DDR3 0x3
  1009. #define EMIF_SDRAM_TYPE_LPDDR2 0x4
  1010. struct lpddr2_addressing {
  1011. u8 num_banks;
  1012. u8 t_REFI_us_x10;
  1013. u8 row_sz[2]; /* One entry each for x32 and x16 */
  1014. u8 col_sz[2]; /* One entry each for x32 and x16 */
  1015. };
  1016. /* Structure for timings from the DDR datasheet */
  1017. struct lpddr2_ac_timings {
  1018. u32 max_freq;
  1019. u8 RL;
  1020. u8 tRPab;
  1021. u8 tRCD;
  1022. u8 tWR;
  1023. u8 tRASmin;
  1024. u8 tRRD;
  1025. u8 tWTRx2;
  1026. u8 tXSR;
  1027. u8 tXPx2;
  1028. u8 tRFCab;
  1029. u8 tRTPx2;
  1030. u8 tCKE;
  1031. u8 tCKESR;
  1032. u8 tZQCS;
  1033. u32 tZQCL;
  1034. u32 tZQINIT;
  1035. u8 tDQSCKMAXx2;
  1036. u8 tRASmax;
  1037. u8 tFAW;
  1038. };
  1039. /*
  1040. * Min tCK values for some of the parameters:
  1041. * If the calculated clock cycles for the respective parameter is
  1042. * less than the corresponding min tCK value, we need to set the min
  1043. * tCK value. This may happen at lower frequencies.
  1044. */
  1045. struct lpddr2_min_tck {
  1046. u32 tRL;
  1047. u32 tRP_AB;
  1048. u32 tRCD;
  1049. u32 tWR;
  1050. u32 tRAS_MIN;
  1051. u32 tRRD;
  1052. u32 tWTR;
  1053. u32 tXP;
  1054. u32 tRTP;
  1055. u8 tCKE;
  1056. u32 tCKESR;
  1057. u32 tFAW;
  1058. };
  1059. struct lpddr2_device_details {
  1060. u8 type;
  1061. u8 density;
  1062. u8 io_width;
  1063. u8 manufacturer;
  1064. };
  1065. struct lpddr2_device_timings {
  1066. const struct lpddr2_ac_timings **ac_timings;
  1067. const struct lpddr2_min_tck *min_tck;
  1068. };
  1069. /* Details of the devices connected to each chip-select of an EMIF instance */
  1070. struct emif_device_details {
  1071. const struct lpddr2_device_details *cs0_device_details;
  1072. const struct lpddr2_device_details *cs1_device_details;
  1073. const struct lpddr2_device_timings *cs0_device_timings;
  1074. const struct lpddr2_device_timings *cs1_device_timings;
  1075. };
  1076. /*
  1077. * Structure containing shadow of important registers in EMIF
  1078. * The calculation function fills in this structure to be later used for
  1079. * initialization and DVFS
  1080. */
  1081. struct emif_regs {
  1082. u32 freq;
  1083. u32 sdram_config_init;
  1084. u32 sdram_config;
  1085. u32 sdram_config2;
  1086. u32 ref_ctrl;
  1087. u32 ref_ctrl_final;
  1088. u32 sdram_tim1;
  1089. u32 sdram_tim2;
  1090. u32 sdram_tim3;
  1091. u32 ocp_config;
  1092. u32 read_idle_ctrl;
  1093. u32 zq_config;
  1094. u32 temp_alert_config;
  1095. u32 emif_ddr_phy_ctlr_1_init;
  1096. u32 emif_ddr_phy_ctlr_1;
  1097. u32 emif_ddr_ext_phy_ctrl_1;
  1098. u32 emif_ddr_ext_phy_ctrl_2;
  1099. u32 emif_ddr_ext_phy_ctrl_3;
  1100. u32 emif_ddr_ext_phy_ctrl_4;
  1101. u32 emif_ddr_ext_phy_ctrl_5;
  1102. u32 emif_rd_wr_lvl_rmp_win;
  1103. u32 emif_rd_wr_lvl_rmp_ctl;
  1104. u32 emif_rd_wr_lvl_ctl;
  1105. u32 emif_rd_wr_exec_thresh;
  1106. u32 emif_prio_class_serv_map;
  1107. u32 emif_connect_id_serv_1_map;
  1108. u32 emif_connect_id_serv_2_map;
  1109. u32 emif_cos_config;
  1110. u32 emif_ecc_ctrl_reg;
  1111. u32 emif_ecc_address_range_1;
  1112. u32 emif_ecc_address_range_2;
  1113. };
  1114. struct lpddr2_mr_regs {
  1115. s8 mr1;
  1116. s8 mr2;
  1117. s8 mr3;
  1118. s8 mr10;
  1119. s8 mr16;
  1120. };
  1121. struct read_write_regs {
  1122. u32 read_reg;
  1123. u32 write_reg;
  1124. };
  1125. static inline u32 get_emif_rev(u32 base)
  1126. {
  1127. struct emif_reg_struct *emif = (struct emif_reg_struct *)base;
  1128. return (readl(&emif->emif_mod_id_rev) & EMIF_REG_MAJOR_REVISION_MASK)
  1129. >> EMIF_REG_MAJOR_REVISION_SHIFT;
  1130. }
  1131. /*
  1132. * Get SDRAM type connected to EMIF.
  1133. * Assuming similar SDRAM parts are connected to both EMIF's
  1134. * which is typically the case. So it is sufficient to get
  1135. * SDRAM type from EMIF1.
  1136. */
  1137. static inline u32 emif_sdram_type(u32 sdram_config)
  1138. {
  1139. return (sdram_config & EMIF_REG_SDRAM_TYPE_MASK)
  1140. >> EMIF_REG_SDRAM_TYPE_SHIFT;
  1141. }
  1142. /* assert macros */
  1143. #if defined(DEBUG)
  1144. #define emif_assert(c) ({ if (!(c)) for (;;); })
  1145. #else
  1146. #define emif_assert(c) ({ if (0) hang(); })
  1147. #endif
  1148. #ifdef CONFIG_SYS_EMIF_PRECALCULATED_TIMING_REGS
  1149. void emif_get_reg_dump(u32 emif_nr, const struct emif_regs **regs);
  1150. void emif_get_dmm_regs(const struct dmm_lisa_map_regs **dmm_lisa_regs);
  1151. #else
  1152. struct lpddr2_device_details *emif_get_device_details(u32 emif_nr, u8 cs,
  1153. struct lpddr2_device_details *lpddr2_dev_details);
  1154. void emif_get_device_timings(u32 emif_nr,
  1155. const struct lpddr2_device_timings **cs0_device_timings,
  1156. const struct lpddr2_device_timings **cs1_device_timings);
  1157. #endif
  1158. void do_ext_phy_settings(u32 base, const struct emif_regs *regs);
  1159. void get_lpddr2_mr_regs(const struct lpddr2_mr_regs **regs);
  1160. #ifndef CONFIG_SYS_EMIF_PRECALCULATED_TIMING_REGS
  1161. extern u32 *const T_num;
  1162. extern u32 *const T_den;
  1163. #endif
  1164. void config_data_eye_leveling_samples(u32 emif_base);
  1165. const struct read_write_regs *get_bug_regs(u32 *iterations);
  1166. #endif