csl_cacheAux.h 66 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583
  1. /**
  2. * @file csl_cacheAux.h
  3. *
  4. * @brief
  5. * This is the CACHE Auxilary Header File which exposes the various
  6. * CSL Functional Layer API's to configure the CACHE Module.
  7. *
  8. * \par
  9. * ============================================================================
  10. * @n (C) Copyright 2002, 2003, 2004, 2005, 2008, 2009, 2016 Texas Instruments, Inc.
  11. *
  12. * Redistribution and use in source and binary forms, with or without
  13. * modification, are permitted provided that the following conditions
  14. * are met:
  15. *
  16. * Redistributions of source code must retain the above copyright
  17. * notice, this list of conditions and the following disclaimer.
  18. *
  19. * Redistributions in binary form must reproduce the above copyright
  20. * notice, this list of conditions and the following disclaimer in the
  21. * documentation and/or other materials provided with the
  22. * distribution.
  23. *
  24. * Neither the name of Texas Instruments Incorporated nor the names of
  25. * its contributors may be used to endorse or promote products derived
  26. * from this software without specific prior written permission.
  27. *
  28. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  29. * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  30. * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
  31. * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
  32. * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  33. * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
  34. * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  35. * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  36. * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  37. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  38. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  39. *
  40. */
  41. #ifndef CSL_CACHEAUX_H
  42. #define CSL_CACHEAUX_H
  43. #ifdef __cplusplus
  44. extern "C" {
  45. #endif
  46. #include <ti/csl/csl_cache.h>
  47. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  48. #include <ti/csl/csl_xmcAux.h>
  49. #endif
  50. /** @addtogroup CSL_CACHE_FUNCTION
  51. @{ */
  52. static inline void CACHE_AsmNop (void);/* for misra warnings*/
  53. static inline void CACHE_AsmNop (void)
  54. {
  55. asm (" nop 4");
  56. asm (" nop 4");
  57. asm (" nop 4");
  58. asm (" nop 4");
  59. }
  60. /** ============================================================================
  61. * @n@b CACHE_setMemRegionWritethrough
  62. *
  63. * @b Description
  64. * @n This function sets write through mode for a specific memory region.
  65. *
  66. * @b Arguments
  67. @verbatim
  68. mar Memory region for which cache is to be set to writethrough mode.
  69. arg TRUE to set to write through mode, otherwise to wribeback mode.
  70. @endverbatim
  71. *
  72. * <b> Return Value </b>
  73. * @n None
  74. *
  75. * <b> Pre Condition </b>
  76. * @n None
  77. *
  78. * <b> Post Condition </b>
  79. * @n Memory region is now set to writethrough mode.
  80. *
  81. * @b Writes
  82. * @n CGEM_MAR0_WTE=1, when arg == TRUE, zero otherwise
  83. *
  84. * @b Example
  85. * @verbatim
  86. CACHE_setMemRegionWritethrough (20, TRUE);
  87. @endverbatim
  88. * =============================================================================
  89. */
  90. /*for Misra warnings */
  91. static inline void CACHE_setMemRegionWritethrough (Uint8 mar, Bool arg);/* for misra warnings*/
  92. static inline void CACHE_setMemRegionWritethrough (Uint8 mar, Bool arg)
  93. {
  94. uint32_t temp = 0;
  95. CSL_FINS(temp, CGEM_MAR0_WTE, (uint32_t)1U);
  96. if (arg == (Bool)TRUE)
  97. {
  98. hCache->MAR[mar] |= temp;
  99. }
  100. else
  101. {
  102. hCache->MAR[mar] &= ~temp;
  103. }
  104. }
  105. /** ============================================================================
  106. * @n@b CACHE_getMemRegionWritethrough
  107. *
  108. * @b Description
  109. * @n This function gets write through mode for a specific memory region.
  110. *
  111. * @b Arguments
  112. @verbatim
  113. mar Memory region for which WTE bit information to be read.
  114. wte Address of WTE value
  115. @endverbatim
  116. *
  117. * <b> Return Value </b>
  118. * @n None
  119. *
  120. * <b> Pre Condition </b>
  121. * @n None
  122. *
  123. * <b> Post Condition </b>
  124. * @n WTE bit value reflecting the write through mode is returned.
  125. *
  126. * @b Writes
  127. * @n None
  128. *
  129. * @b Example
  130. * @verbatim
  131. CACHE_getMemRegionWritethrough (20, &wte);
  132. @endverbatim
  133. * =============================================================================
  134. */
  135. static inline void CACHE_getMemRegionWritethrough (Uint8 mar, Uint8 *wte);/* for misra warnings*/
  136. static inline void CACHE_getMemRegionWritethrough (Uint8 mar, Uint8 *wte)
  137. {
  138. Uint32 value = hCache->MAR[mar];
  139. *wte = (Uint8)CSL_FEXT (value, CGEM_MAR0_WTE);
  140. }
  141. /** ============================================================================
  142. * @n@b CACHE_enableCaching
  143. *
  144. * @b Description
  145. * @n This function enables caching for a specific memory region.
  146. *
  147. * @b Arguments
  148. @verbatim
  149. mar Memory region for which cache is to be enabled.
  150. @endverbatim
  151. *
  152. * <b> Return Value </b>
  153. * @n None
  154. *
  155. * <b> Pre Condition </b>
  156. * @n None
  157. *
  158. * <b> Post Condition </b>
  159. * @n Memory region is now cacheable.
  160. *
  161. * @b Writes
  162. * @n CGEM_MAR0_PC=1
  163. *
  164. * @b Example
  165. * @verbatim
  166. CACHE_enableCaching (20);
  167. @endverbatim
  168. * =============================================================================
  169. */
  170. static inline void CACHE_enableCaching (Uint8 mar);/* for misra warnings*/
  171. static inline void CACHE_enableCaching (Uint8 mar)
  172. {
  173. CSL_FINS(hCache->MAR[mar], CGEM_MAR0_PC, (uint32_t)1U);
  174. }
  175. /** ============================================================================
  176. * @n@b CACHE_disableCaching
  177. *
  178. * @b Description
  179. * @n This function disables caching for a specific memory region.
  180. *
  181. * @b Arguments
  182. @verbatim
  183. mar Memory region for which cache is to be disabled.
  184. @endverbatim
  185. *
  186. * <b> Return Value </b>
  187. * @n None
  188. *
  189. * <b> Pre Condition </b>
  190. * @n None
  191. *
  192. * <b> Post Condition </b>
  193. * @n Memory region is now *not* cacheable.
  194. *
  195. * @b Writes
  196. * @n CGEM_MAR0_PC=0
  197. *
  198. * @b Example
  199. * @verbatim
  200. CACHE_disableCaching (20);
  201. @endverbatim
  202. * =============================================================================
  203. */
  204. static inline void CACHE_disableCaching (Uint8 mar);/* for misra warnings*/
  205. static inline void CACHE_disableCaching (Uint8 mar)
  206. {
  207. CSL_FINS(hCache->MAR[mar], CGEM_MAR0_PC, (uint32_t)0);
  208. }
  209. /** ============================================================================
  210. * @n@b CACHE_getMemRegionInfo
  211. *
  212. * @b Description
  213. * @n This function is used to get memory region information.
  214. *
  215. * @b Arguments
  216. @verbatim
  217. mar Memory region for which the information is required.
  218. pcx Is address cacheable in external cache (MSMC)
  219. pfx Is address prefetchable
  220. @endverbatim
  221. *
  222. * <b> Return Value </b>
  223. * @n None
  224. *
  225. * <b> Pre Condition </b>
  226. * @n None
  227. *
  228. * <b> Post Condition </b>
  229. * @n None
  230. *
  231. * @b Reads
  232. * @n CGEM_MAR0_PCX, CGEM_MAR0_PFX
  233. *
  234. * @b Example
  235. * @verbatim
  236. Uint8 pcx;
  237. Uint8 pfx;
  238. // Get the memory region information for 20
  239. CACHE_getMemRegionInfo (20, &pcx, &pfx);
  240. @endverbatim
  241. * =============================================================================
  242. */
  243. static inline void CACHE_getMemRegionInfo (Uint8 mar, Uint8* pcx, Uint8* pfx);/* for misra warnings*/
  244. static inline void CACHE_getMemRegionInfo (Uint8 mar, Uint8* pcx, Uint8* pfx)
  245. {
  246. Uint32 value = hCache->MAR[mar];
  247. *pcx = (Uint8)CSL_FEXT (value, CGEM_MAR0_PCX);
  248. *pfx = (Uint8)CSL_FEXT (value, CGEM_MAR0_PFX);
  249. }
  250. /** ============================================================================
  251. * @n@b CACHE_setMemRegionInfo
  252. *
  253. * @b Description
  254. * @n This function is used to set memory region information.
  255. *
  256. * @b Arguments
  257. @verbatim
  258. mar Memory region for which the information is required.
  259. pcx Is address cacheable in external cache (MSMC)
  260. pfx Is address prefetchable
  261. @endverbatim
  262. *
  263. * <b> Return Value </b>
  264. * @n None
  265. *
  266. * <b> Pre Condition </b>
  267. * @n None
  268. *
  269. * <b> Post Condition </b>
  270. * @n None
  271. *
  272. * @b Writes
  273. * @n CGEM_MAR0_PCX, CGEM_MAR0_PFX
  274. *
  275. * @b Example
  276. * @verbatim
  277. Uint8 pcx;
  278. Uint8 pfx;
  279. // Get the memory region information for 20
  280. CACHE_getMemRegionInfo (20, &pcx, &pfx);
  281. ...
  282. // Ensure Memory Region 20 is not prefetchable.
  283. CACHE_setMemRegionInfo(20, pcx, 0);
  284. @endverbatim
  285. * =============================================================================
  286. */
  287. static inline void CACHE_setMemRegionInfo (Uint8 mar, Uint8 pcx, Uint8 pfx);/* for misra warnings*/
  288. static inline void CACHE_setMemRegionInfo (Uint8 mar, Uint8 pcx, Uint8 pfx)
  289. {
  290. CSL_FINS (hCache->MAR[mar], CGEM_MAR0_PCX, (Uint32)pcx);
  291. CSL_FINS (hCache->MAR[mar], CGEM_MAR0_PFX, (Uint32)pfx);
  292. }
  293. /** ============================================================================
  294. * @n@b CACHE_setL1DSize
  295. *
  296. * @b Description
  297. * @n This function is used to set the L1 Data Cache Size.
  298. *
  299. * @b Arguments
  300. @verbatim
  301. newSize Cache Size to be configured.
  302. @endverbatim
  303. *
  304. * <b> Return Value </b>
  305. * @n None
  306. *
  307. * <b> Pre Condition </b>
  308. * @n None
  309. *
  310. * <b> Post Condition </b>
  311. * @n None
  312. *
  313. * @b Writes
  314. * @n CGEM_L1DCFG_L1DMODE
  315. *
  316. * @b Example
  317. * @verbatim
  318. CACHE_setL1DSize(1); // Configure 4K Cache Size
  319. @endverbatim
  320. * =============================================================================
  321. */
  322. static inline void CACHE_setL1DSize (CACHE_L1Size newSize);/* for misra warnings*/
  323. static inline CACHE_L1Size CACHE_getL1DSize (void);/* for misra warnings*/
  324. static inline void CACHE_setL1DSize (CACHE_L1Size newSize)
  325. {
  326. CSL_FINS (hCache->L1DCFG, CGEM_L1DCFG_L1DMODE, newSize);
  327. /* Read back L1DCFG. This stalls the DSP until the mode change completes */
  328. CACHE_getL1DSize();
  329. }
  330. /** ============================================================================
  331. * @n@b CACHE_getL1DSize
  332. *
  333. * @b Description
  334. * @n This function is used to get the L1 Data Cache Size.
  335. *
  336. * @b Arguments
  337. * @n None
  338. *
  339. * <b> Return Value </b>
  340. * @n None
  341. *
  342. * <b> Pre Condition </b>
  343. * @n None
  344. *
  345. * <b> Post Condition </b>
  346. * @n None
  347. *
  348. * @b Reads
  349. * @n CGEM_L1DCFG_L1DMODE
  350. *
  351. * @b Example
  352. * @verbatim
  353. CACHE_L1Size cacheSize;
  354. cacheSize = CACHE_getL1DSize();
  355. @endverbatim
  356. * =============================================================================
  357. */
  358. static inline CACHE_L1Size CACHE_getL1DSize (void)
  359. {
  360. return (CACHE_L1Size)CSL_FEXT (hCache->L1DCFG, CGEM_L1DCFG_L1DMODE);
  361. }
  362. /** ============================================================================
  363. * @n@b CACHE_freezeL1D
  364. *
  365. * @b Description
  366. * @n This function is used to freeze the L1D cache.
  367. *
  368. * @b Arguments
  369. * @n None
  370. *
  371. * <b> Return Value </b>
  372. * @n None
  373. *
  374. * <b> Pre Condition </b>
  375. * @n None
  376. *
  377. * <b> Post Condition </b>
  378. * @n None
  379. *
  380. * @b Writes
  381. * @n CGEM_L1DCC_OPER=1
  382. *
  383. * @b Example
  384. * @verbatim
  385. CACHE_freezeL1D();
  386. @endverbatim
  387. * =============================================================================
  388. */
  389. static inline void CACHE_freezeL1D(void);/* for misra warnings*/
  390. static inline void CACHE_freezeL1D(void)
  391. {
  392. /* Set the Freeze Mode Enabled bit. */
  393. CSL_FINS (hCache->L1DCC, CGEM_L1DCC_OPER, (uint32_t)1U);
  394. }
  395. /** ============================================================================
  396. * @n@b CACHE_unfreezeL1D
  397. *
  398. * @b Description
  399. * @n This function is used to unfreeze the L1D cache.
  400. *
  401. * @b Arguments
  402. * @n None
  403. *
  404. * <b> Return Value </b>
  405. * @n None
  406. *
  407. * <b> Pre Condition </b>
  408. * @n None
  409. *
  410. * <b> Post Condition </b>
  411. * @n None
  412. *
  413. * @b Writes
  414. * @n CGEM_L1DCC_OPER=0
  415. *
  416. * @b Example
  417. * @verbatim
  418. CACHE_unfreezeL1D();
  419. @endverbatim
  420. * =============================================================================
  421. */
  422. static inline void CACHE_unfreezeL1D(void);/* for misra warnings*/
  423. static inline void CACHE_unfreezeL1D(void)
  424. {
  425. /* Reset the Freeze Mode Enabled bit. */
  426. CSL_FINS (hCache->L1DCC, CGEM_L1DCC_OPER, (uint32_t)0);
  427. }
  428. /** ============================================================================
  429. * @n@b CACHE_getPrevL1DMode
  430. *
  431. * @b Description
  432. * @n This function is used get the previous operating state of the L1D cache
  433. *
  434. * @b Arguments
  435. * @n None
  436. *
  437. * <b> Return Value </b>
  438. * @n None
  439. *
  440. * <b> Pre Condition </b>
  441. * @n None
  442. *
  443. * <b> Post Condition </b>
  444. * @n None
  445. *
  446. * @b Reads
  447. * @n CGEM_L1DCC_POPER
  448. *
  449. * @b Example
  450. * @verbatim
  451. Uint32 prev;
  452. prev = CACHE_getPrevL1DMode();
  453. @endverbatim
  454. * =============================================================================
  455. */
  456. static inline Uint32 CACHE_getPrevL1DMode(void);/* for misra warnings*/
  457. static inline Uint32 CACHE_getPrevL1DMode(void)
  458. {
  459. return CSL_FEXT (hCache->L1DCC, CGEM_L1DCC_POPER);
  460. }
  461. /** ============================================================================
  462. * @n@b CACHE_invAllL1dWait
  463. *
  464. * @b Description
  465. * @n This function is used to wait for the L1D global invalidate operation
  466. * to complete. This API should be used only if the CACHE_invAllL1d was called
  467. * with the CACHE_NOWAIT argument.
  468. *
  469. * @b Arguments
  470. * @n None
  471. *
  472. * <b> Return Value </b>
  473. * @n None
  474. *
  475. * <b> Pre Condition </b>
  476. * @n @a CACHE_invAllL1d(wait=CACHE_NOWAIT) must be called.
  477. *
  478. * <b> Post Condition </b>
  479. * @n None
  480. *
  481. * @b Reads
  482. * @n CGEM_L1DINV_I=0
  483. *
  484. * @b Example
  485. * @verbatim
  486. CACHE_invAllL1d(CACHE_NOWAIT); // Invalidate the L1D cache
  487. ...
  488. CACHE_invAllL1dWait(); // Wait for the invalidate operation to complete.
  489. @endverbatim
  490. * =============================================================================
  491. */
  492. static inline void CACHE_invAllL1dWait (void);/* for misra warnings*/
  493. static inline void CACHE_invAllL1dWait (void)
  494. {
  495. /* Wait for the Invalidate operation to complete. */
  496. while (CSL_FEXT(hCache->L1DINV, CGEM_L1DINV_I) == (Uint32)1U) {}
  497. }
  498. /** ============================================================================
  499. * @n@b CACHE_invAllL1d
  500. *
  501. * @b Description
  502. * @n This function is used to globally invalidate the L1D cache.
  503. *
  504. * @b Arguments
  505. @verbatim
  506. wait Indicates if the call should block or not.
  507. @endverbatim
  508. *
  509. * <b> Return Value </b>
  510. * @n None
  511. *
  512. * <b> Pre Condition </b>
  513. * @n None
  514. *
  515. * <b> Post Condition </b>
  516. * @n The L1D Cache is being invalidated.
  517. *
  518. * @b Writes
  519. * @n CGEM_L1DINV_I=1
  520. *
  521. * @b Example
  522. * @verbatim
  523. CACHE_invAllL1d(CACHE_WAIT); // Invalidate the L1D cache
  524. @endverbatim
  525. * =============================================================================
  526. */
  527. static inline void CACHE_invAllL1d (CACHE_Wait wait);/* for misra warnings*/
  528. static inline void CACHE_invAllL1d (CACHE_Wait wait)
  529. {
  530. uint32_t gie, advisory6;
  531. if ( (wait == CACHE_WAIT ) ||
  532. (wait == CACHE_FENCE_WAIT) )
  533. {
  534. advisory6 = (uint32_t)1U;
  535. }
  536. else
  537. {
  538. advisory6 = 0;
  539. }
  540. if ( advisory6 )
  541. {
  542. /* disable the interrupts */
  543. gie = _disable_interrupts ();
  544. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  545. CSL_XMC_invalidatePrefetchBuffer();
  546. #endif
  547. }
  548. /* Invalidate the Cache Line. */
  549. CSL_FINS (hCache->L1DINV, CGEM_L1DINV_I, (uint32_t)1U);
  550. /* Determine if we need to wait for the operation to complete. */
  551. if (wait)
  552. {
  553. CACHE_invAllL1dWait();
  554. }
  555. if (advisory6)
  556. {
  557. CACHE_AsmNop();
  558. _restore_interrupts (gie);
  559. }
  560. }
  561. /** ============================================================================
  562. * @n@b CACHE_wbAllL1dWait
  563. *
  564. * @b Description
  565. * @n This function is used to wait for the L1D writeback operation
  566. * to complete. This API should be used only if the CACHE_wbAllL1d was called
  567. * with the CACHE_NOWAIT argument.
  568. *
  569. * @b Arguments
  570. * @n None
  571. *
  572. * <b> Return Value </b>
  573. * @n None
  574. *
  575. * <b> Pre Condition </b>
  576. * @n @a CACHE_wbAllL1d(wait=CACHE_NOWAIT) must be called.
  577. *
  578. * <b> Post Condition </b>
  579. * @n The L1D Dirty lines are written back
  580. *
  581. * @b Reads
  582. * @n CGEM_L1DWB_C=0
  583. *
  584. * @b Example
  585. * @verbatim
  586. CACHE_wbAllL1d(CACHE_NOWAIT); // Writeback the L1D cache
  587. ...
  588. CACHE_wbAllL1dWait(); // Wait for the writeback operation to complete.
  589. @endverbatim
  590. * =============================================================================
  591. */
  592. static inline void CACHE_wbAllL1dWait (void);/* for misra warnings*/
  593. static inline void CACHE_wbAllL1dWait (void)
  594. {
  595. /* Wait for the Writeback operation to complete. */
  596. while (CSL_FEXT(hCache->L1DWB, CGEM_L1DWB_C) == (uint32_t)1U) {}
  597. }
  598. /** ============================================================================
  599. * @n@b CACHE_wbAllL1d
  600. *
  601. * @b Description
  602. * @n This function is used to writeback the dirty lines of the L1D Cache
  603. *
  604. * @b Arguments
  605. @verbatim
  606. wait Indicates if the call should block or not.
  607. @endverbatim
  608. *
  609. * <b> Return Value </b>
  610. * @n None
  611. *
  612. * <b> Pre Condition </b>
  613. * @n None
  614. *
  615. * <b> Post Condition </b>
  616. * @n The dirty lines of the L1D Cache are being written back
  617. *
  618. * @b Writes
  619. * @n CGEM_L1DWB_C=1
  620. *
  621. * @b Example
  622. * @verbatim
  623. CACHE_wbAllL1d(CACHE_WAIT); // Writeback the Dirty Lines of the L1D cache
  624. @endverbatim
  625. * =============================================================================
  626. */
  627. static inline void CACHE_wbAllL1d (CACHE_Wait wait);/* for misra warnings*/
  628. static inline void CACHE_wbAllL1d (CACHE_Wait wait)
  629. {
  630. uint32_t gie, advisory6;
  631. if ( (wait == CACHE_WAIT ) ||
  632. (wait == CACHE_FENCE_WAIT) )
  633. {
  634. advisory6 = (uint32_t)1U;
  635. }
  636. else
  637. {
  638. advisory6 = 0;
  639. }
  640. if ( advisory6 )
  641. {
  642. /* disable the interrupts */
  643. gie = _disable_interrupts ();
  644. }
  645. /* Writeback the Cache Line. */
  646. CSL_FINS (hCache->L1DWB, CGEM_L1DWB_C, (uint32_t)1U);
  647. /* Determine if we need to wait for the operation to complete. */
  648. if (wait)
  649. {
  650. CACHE_wbAllL1dWait();
  651. }
  652. if (advisory6)
  653. {
  654. CACHE_AsmNop();
  655. _restore_interrupts (gie);
  656. }
  657. }
  658. /** ============================================================================
  659. * @n@b CACHE_wbInvAllL1dWait
  660. *
  661. * @b Description
  662. * @n This function is used to wait for the L1D writeback invalidate operation
  663. * to complete. This API should be used only if the CACHE_wbInvAllL1d was called
  664. * with the CACHE_NOWAIT argument.
  665. *
  666. * @b Arguments
  667. * @n None
  668. *
  669. * <b> Return Value </b>
  670. * @n None
  671. *
  672. * <b> Pre Condition </b>
  673. * @n @a CACHE_wbInvAllL1d(wait=CACHE_NOWAIT) must be called.
  674. *
  675. * <b> Post Condition </b>
  676. * @n The L1D Dirty lines are written back
  677. *
  678. * @b Reads
  679. * @n CGEM_L1DWBINV_C=0
  680. *
  681. * @b Example
  682. * @verbatim
  683. CACHE_wbInvAllL1d(CACHE_NOWAIT); // Invalidate/Writeback the L1D cache
  684. ...
  685. CACHE_wbInvAllL1dWait(); // Wait for the Invalidate/Writeback operation to complete.
  686. @endverbatim
  687. * =============================================================================
  688. */
  689. static inline void CACHE_wbInvAllL1dWait (void);/* for misra warnings*/
  690. static inline void CACHE_wbInvAllL1dWait (void)
  691. {
  692. /* Wait for the Invalidate Writeback operation to complete. */
  693. while (CSL_FEXT(hCache->L1DWBINV, CGEM_L1DWBINV_C) == (uint32_t)1U) {}
  694. }
  695. /** ============================================================================
  696. * @n@b CACHE_wbInvAllL1d
  697. *
  698. * @b Description
  699. * @n This function is used to invalidate and writeback the dirty lines of the
  700. * L1D Cache
  701. *
  702. * @b Arguments
  703. @verbatim
  704. wait Indicates if the call should block or not.
  705. @endverbatim
  706. *
  707. * <b> Return Value </b>
  708. * @n None
  709. *
  710. * <b> Pre Condition </b>
  711. * @n None
  712. *
  713. * <b> Post Condition </b>
  714. * @n Invalidates and Writebacks the dirty lines of the L1D Cache
  715. *
  716. * @b Writes
  717. * @n CGEM_L1DWBINV_C=1
  718. *
  719. * @b Example
  720. * @verbatim
  721. CACHE_wbInvAllL1d(CACHE_WAIT);
  722. @endverbatim
  723. * =============================================================================
  724. */
  725. static inline void CACHE_wbInvAllL1d (CACHE_Wait wait);/* for misra warnings*/
  726. static inline void CACHE_wbInvAllL1d (CACHE_Wait wait)
  727. {
  728. uint32_t gie, advisory6;
  729. if ( (wait == CACHE_WAIT ) ||
  730. (wait == CACHE_FENCE_WAIT) )
  731. {
  732. advisory6 = (uint32_t)1U;
  733. }
  734. else
  735. {
  736. advisory6 = 0;
  737. }
  738. if ( advisory6 )
  739. {
  740. /* disable the interrupts */
  741. gie = _disable_interrupts ();
  742. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  743. CSL_XMC_invalidatePrefetchBuffer();
  744. #endif
  745. }
  746. /* Invalidate and writeback the cache line. */
  747. CSL_FINS (hCache->L1DWBINV, CGEM_L1DWBINV_C, (uint32_t)1U);
  748. /* Determine if we need to wait for the operation to complete. */
  749. if (wait)
  750. {
  751. CACHE_wbInvAllL1dWait();
  752. }
  753. if (advisory6)
  754. {
  755. CACHE_AsmNop();
  756. _restore_interrupts (gie);
  757. }
  758. }
  759. /** ============================================================================
  760. * @n@b CACHE_invL1dWait
  761. *
  762. * @b Description
  763. * @n This function is used to wait for the L1D invalidate block operation to
  764. * complete. This API should be used only if the CACHE_invL1d was called
  765. * with the CACHE_NOWAIT argument.
  766. *
  767. * @b Arguments
  768. * @n None
  769. *
  770. * <b> Return Value </b>
  771. * @n None
  772. *
  773. * <b> Pre Condition </b>
  774. * @n @a CACHE_invL1d(wait=CACHE_NOWAIT) must be called.
  775. *
  776. * <b> Post Condition </b>
  777. * @n The L1D Block Cache is invalidated.
  778. *
  779. * @b Reads
  780. * @n CGEM_L1DIWC_WC=0
  781. *
  782. * @b Example
  783. * @verbatim
  784. CACHE_invL1d((void *)ptr_buffer, 128, CACHE_NOWAIT);
  785. ...
  786. CACHE_invL1dWait(); // Wait for the Invalidate/Writeback operation to complete.
  787. @endverbatim
  788. * =============================================================================
  789. */
  790. static inline void CACHE_invL1dWait (void);/* for misra warnings*/
  791. static inline void CACHE_invL1dWait (void)
  792. {
  793. /* Wait for the Invalidate operation to complete. */
  794. while (CSL_FEXT(hCache->L1DIWC, CGEM_L1DIWC_WC) != 0) {}
  795. }
  796. /** ============================================================================
  797. * @n@b CACHE_invL1d
  798. *
  799. * @b Description
  800. * @n This function is used to invalidate a block in the L1D Cache. Although
  801. * the block size can be specified in the number of bytes, the cache
  802. * controller operates on whole cache lines. To prevent unintended behavior
  803. * "blockPtr" should be aligned on the cache line size and "byteCnt" should
  804. * be a multiple of the cache line size.
  805. *
  806. * @b Arguments
  807. @verbatim
  808. blockPtr Address of the block which is to be invalidated
  809. byteCnt Size of the block to be invalidated.
  810. wait Indicates if the call should block or not.
  811. @endverbatim
  812. *
  813. * <b> Return Value </b>
  814. * @n None
  815. *
  816. * <b> Pre Condition </b>
  817. * @n None
  818. *
  819. * <b> Post Condition </b>
  820. * @n The contents of the blockPtr are being invalidated
  821. *
  822. * @b Writes
  823. * @n CGEM_L1DIBAR_ADDR,CGEM_L1DIWC_WC
  824. *
  825. * @b Example
  826. * @verbatim
  827. Uint8* ptr_buffer;
  828. // Invalidate 128 bytes of the buffer.
  829. CACHE_invL1d((void *)ptr_buffer, 128, CACHE_WAIT);
  830. @endverbatim
  831. * =============================================================================
  832. */
  833. static inline void CACHE_invL1d
  834. (
  835. const void* blockPtr,
  836. Uint32 byteCnt,
  837. CACHE_Wait wait
  838. ); /*for misra warnings*/
  839. static inline void CACHE_invL1d
  840. (
  841. const void* blockPtr,
  842. Uint32 byteCnt,
  843. CACHE_Wait wait
  844. )
  845. {
  846. uint32_t gie, advisory6;
  847. if ( (wait == CACHE_WAIT ) ||
  848. (wait == CACHE_FENCE_WAIT) )
  849. {
  850. advisory6 = (uint32_t)1U;
  851. }
  852. else
  853. {
  854. advisory6 = 0;
  855. }
  856. if ( advisory6 )
  857. {
  858. /* disable the interrupts */
  859. gie = _disable_interrupts ();
  860. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  861. CSL_XMC_invalidatePrefetchBuffer();
  862. #endif
  863. }
  864. /* Setup the block address and length */
  865. hCache->L1DIBAR = CSL_FMK(CGEM_L1DIBAR_ADDR, (Uint32)blockPtr);
  866. hCache->L1DIWC = CSL_FMK(CGEM_L1DIWC_WC, (Uint32)((byteCnt+((uint32_t)3U))>>2));
  867. /* Determine if we need to wait for the operation to complete. */
  868. if ( (wait == CACHE_WAIT) ||
  869. (wait == CACHE_ONLY_WAIT) )
  870. {
  871. CACHE_invL1dWait();
  872. }
  873. else
  874. {
  875. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  876. if ( (wait == CACHE_FENCE_WAIT) ||
  877. (wait == CACHE_FENCE_ONLY_WAIT ) )
  878. {
  879. _mfence();
  880. /* Add another mfence to address single mfence issue
  881. * Under very particular circumstances, MFENCE may allow
  882. * the transaction after the MFENCE to proceed before
  883. * the preceding STORE completes */
  884. _mfence();
  885. }
  886. #endif
  887. }
  888. if (advisory6)
  889. {
  890. CACHE_AsmNop();
  891. _restore_interrupts (gie);
  892. }
  893. }
  894. /** ============================================================================
  895. * @n@b CACHE_wbL1dWait
  896. *
  897. * @b Description
  898. * @n This function is used to wait for the L1D writeback block operation to
  899. * complete. This API should be used only if the CACHE_wbL1d was called
  900. * with the CACHE_NOWAIT argument.
  901. *
  902. * @b Arguments
  903. * @n None
  904. *
  905. * <b> Return Value </b>
  906. * @n None
  907. *
  908. * <b> Pre Condition </b>
  909. * @n @a CACHE_wbL1d(wait=CACHE_NOWAIT) must be called.
  910. *
  911. * <b> Post Condition </b>
  912. * @n The dirty lines of the L1D Block Cache have been written back.
  913. *
  914. * @b Reads
  915. * @n CGEM_L1DWWC_WC=0
  916. *
  917. * @b Example
  918. * @verbatim
  919. CACHE_wbL1d((void *)ptr_buffer, 128, CACHE_NOWAIT);
  920. ...
  921. CACHE_wbL1dWait(); // Wait for the writeback operation to complete.
  922. @endverbatim
  923. * =============================================================================
  924. */
  925. static inline void CACHE_wbL1dWait (void);/* for misra warnings*/
  926. static inline void CACHE_wbL1dWait (void)
  927. {
  928. /* Wait for the Writeback operation to complete. */
  929. while (CSL_FEXT(hCache->L1DWWC, CGEM_L1DWWC_WC) != 0) {}
  930. }
  931. /** ============================================================================
  932. * @n@b CACHE_wbL1d
  933. *
  934. * @b Description
  935. * @n This function is used to writeback the dirty lines of the block address.
  936. * Although the block size can be specified in the number of bytes, the cache
  937. * controller operates on whole cache lines. To prevent unintended behavior
  938. * "blockPtr" should be aligned on the cache line size and "byteCnt" should
  939. * be a multiple of the cache line size.
  940. *
  941. * @b Arguments
  942. @verbatim
  943. blockPtr Address of the block which is to be written back
  944. byteCnt Size of the block to be written back.
  945. wait Indicates if the call should block or not.
  946. @endverbatim
  947. *
  948. * <b> Return Value </b>
  949. * @n None
  950. *
  951. * <b> Pre Condition </b>
  952. * @n None
  953. *
  954. * <b> Post Condition </b>
  955. * @n The contents of the blockPtr are being written back
  956. *
  957. * @b Writes
  958. * @n CGEM_L1DWBAR_ADDR,CGEM_L1DWWC_WC
  959. *
  960. * @b Example
  961. * @verbatim
  962. Uint8* ptr_buffer;
  963. // Writeback 128 bytes of the buffer.
  964. CACHE_wbL1d((void *)ptr_buffer, 128, CACHE_WAIT);
  965. @endverbatim
  966. * =============================================================================
  967. */
  968. static inline void CACHE_wbL1d
  969. (
  970. const void* blockPtr,
  971. Uint32 byteCnt,
  972. CACHE_Wait wait
  973. ); /*for misra warnings*/
  974. static inline void CACHE_wbL1d
  975. (
  976. const void* blockPtr,
  977. Uint32 byteCnt,
  978. CACHE_Wait wait
  979. )
  980. {
  981. uint32_t gie, advisory6;
  982. if ( (wait == CACHE_WAIT ) ||
  983. (wait == CACHE_FENCE_WAIT) )
  984. {
  985. advisory6 = (uint32_t)1U;
  986. }
  987. else
  988. {
  989. advisory6 = 0;
  990. }
  991. if ( advisory6 )
  992. {
  993. /* disable the interrupts */
  994. gie = _disable_interrupts ();
  995. }
  996. /* Setup the block address and length */
  997. hCache->L1DWBAR = CSL_FMK (CGEM_L1DWBAR_ADDR, (Uint32)blockPtr);
  998. hCache->L1DWWC = CSL_FMK (CGEM_L1DWWC_WC, (Uint32)((byteCnt+((uint32_t)3U))>>2));
  999. /* Determine if we need to wait for the operation to complete. */
  1000. if ( (wait == CACHE_WAIT) ||
  1001. (wait == CACHE_ONLY_WAIT ) )
  1002. {
  1003. CACHE_wbL1dWait();
  1004. }
  1005. else
  1006. {
  1007. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  1008. if ( (wait == CACHE_FENCE_WAIT) ||
  1009. (wait == CACHE_FENCE_ONLY_WAIT) )
  1010. {
  1011. _mfence();
  1012. /* Add another mfence to address single mfence issue
  1013. * Under very particular circumstances, MFENCE may allow
  1014. * the transaction after the MFENCE to proceed before
  1015. * the preceding STORE completes */
  1016. _mfence();
  1017. }
  1018. #endif
  1019. }
  1020. if (advisory6)
  1021. {
  1022. CACHE_AsmNop();
  1023. _restore_interrupts (gie);
  1024. }
  1025. }
  1026. /** ============================================================================
  1027. * @n@b CACHE_wbInvL1dWait
  1028. *
  1029. * @b Description
  1030. * @n This function is used to wait for the L1D invalidate/writeback block
  1031. * operation to complete. This API should be used only if the CACHE_wbInvL1d
  1032. * was called with the CACHE_NOWAIT argument.
  1033. *
  1034. * @b Arguments
  1035. * @n None
  1036. *
  1037. * <b> Return Value </b>
  1038. * @n None
  1039. *
  1040. * <b> Pre Condition </b>
  1041. * @n @a CACHE_wbInvL1d(wait=CACHE_NOWAIT) must be called.
  1042. *
  1043. * <b> Post Condition </b>
  1044. * @n The dirty lines of the L1D Block Cache have been written back and the cache
  1045. * contents pointed to by the block address are also invalidated.
  1046. *
  1047. * @b Reads
  1048. * @n CGEM_L1DWIWC_WC=0
  1049. *
  1050. * @b Example
  1051. * @verbatim
  1052. CACHE_wbInvL1d((void *)ptr_buffer, 128, CACHE_NOWAIT);
  1053. ...
  1054. CACHE_wbInvL1dWait(); // Wait for the operation to complete.
  1055. @endverbatim
  1056. * =============================================================================
  1057. */
  1058. static inline void CACHE_wbInvL1dWait (void);/* for misra warnings*/
  1059. static inline void CACHE_wbInvL1dWait (void)
  1060. {
  1061. /* Wait for the Block Writeback/Invalidate operation to complete. */
  1062. while (CSL_FEXT(hCache->L1DWIWC, CGEM_L1DWIWC_WC) != 0) {}
  1063. }
  1064. /** ============================================================================
  1065. * @n@b CACHE_wbInvL1d
  1066. *
  1067. * @b Description
  1068. * @n This function is used to invalidate and writeback the dirty lines
  1069. * of the block address. Although the block size can be specified in
  1070. * the number of bytes, the cache controller operates on whole cache lines.
  1071. * To prevent unintended behavior "blockPtr" should be aligned on the
  1072. * cache line size and "byteCnt" should be a multiple of the cache line size.
  1073. *
  1074. * @b Arguments
  1075. @verbatim
  1076. blockPtr Address of the block which is to be invalidated/written back
  1077. byteCnt Size of the block to be invalidated/written back.
  1078. wait Indicates if the call should block or not.
  1079. @endverbatim
  1080. *
  1081. * <b> Return Value </b>
  1082. * @n None
  1083. *
  1084. * <b> Pre Condition </b>
  1085. * @n None
  1086. *
  1087. * <b> Post Condition </b>
  1088. * @n The contents of the blockPtr are being invalidated and the dirty lines are
  1089. * written back
  1090. *
  1091. * @b Writes
  1092. * @n CGEM_L1DWIBAR_ADDR,CGEM_L1DWIWC_WC
  1093. *
  1094. * @b Example
  1095. * @verbatim
  1096. Uint8* ptr_buffer;
  1097. // Writeback/Invalidate 128 bytes of the buffer.
  1098. CACHE_wbInvL1d((void *)ptr_buffer, 128, CACHE_WAIT);
  1099. @endverbatim
  1100. * =============================================================================
  1101. */
  1102. static inline void CACHE_wbInvL1d
  1103. (
  1104. const void* blockPtr,
  1105. Uint32 byteCnt,
  1106. CACHE_Wait wait
  1107. ); /*for misra warnings*/
  1108. static inline void CACHE_wbInvL1d
  1109. (
  1110. const void* blockPtr,
  1111. Uint32 byteCnt,
  1112. CACHE_Wait wait
  1113. )
  1114. {
  1115. uint32_t gie, advisory6;
  1116. if ( (wait == CACHE_WAIT ) ||
  1117. (wait == CACHE_FENCE_WAIT) )
  1118. {
  1119. advisory6 = (uint32_t)1U;
  1120. }
  1121. else
  1122. {
  1123. advisory6 = 0;
  1124. }
  1125. if ( advisory6 )
  1126. {
  1127. /* disable the interrupts */
  1128. gie = _disable_interrupts ();
  1129. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  1130. CSL_XMC_invalidatePrefetchBuffer();
  1131. #endif
  1132. }
  1133. /* Setup the block address and length */
  1134. hCache->L1DWIBAR = CSL_FMK(CGEM_L1DWIBAR_ADDR, (Uint32)blockPtr);
  1135. hCache->L1DWIWC = CSL_FMK(CGEM_L1DWIWC_WC, (Uint32)((byteCnt+((uint32_t)3U))>>2));
  1136. /* Determine if we need to wait for the operation to complete. */
  1137. if ( (wait == CACHE_WAIT) ||
  1138. (wait == CACHE_ONLY_WAIT) )
  1139. {
  1140. CACHE_wbInvL1dWait();
  1141. }
  1142. else
  1143. {
  1144. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  1145. if ( (wait == CACHE_FENCE_WAIT) ||
  1146. (wait == CACHE_FENCE_ONLY_WAIT) )
  1147. {
  1148. _mfence();
  1149. /* Add another mfence to address single mfence issue
  1150. * Under very particular circumstances, MFENCE may allow
  1151. * the transaction after the MFENCE to proceed before
  1152. * the preceding STORE completes */
  1153. _mfence();
  1154. }
  1155. #endif
  1156. }
  1157. if (advisory6)
  1158. {
  1159. CACHE_AsmNop();
  1160. _restore_interrupts (gie);
  1161. }
  1162. }
  1163. /** ============================================================================
  1164. * @n@b CACHE_setL1PSize
  1165. *
  1166. * @b Description
  1167. * @n This function is used to set the L1P Cache Size.
  1168. *
  1169. * @b Arguments
  1170. @verbatim
  1171. newSize Cache Size to be configured.
  1172. @endverbatim
  1173. *
  1174. * <b> Return Value </b>
  1175. * @n None
  1176. *
  1177. * <b> Pre Condition </b>
  1178. * @n None
  1179. *
  1180. * <b> Post Condition </b>
  1181. * @n None
  1182. *
  1183. * @b Writes
  1184. * @n CGEM_L1PCFG_L1PMODE
  1185. *
  1186. * @b Example
  1187. * @verbatim
  1188. CACHE_setL1PSize(1); // Configure 4K Cache Size
  1189. @endverbatim
  1190. * =============================================================================
  1191. */
  1192. /* for misra warnings*/
  1193. static inline void CACHE_setL1PSize (CACHE_L1Size newSize);
  1194. static inline CACHE_L1Size CACHE_getL1PSize (void);
  1195. static inline void CACHE_setL1PSize (CACHE_L1Size newSize)
  1196. {
  1197. CSL_FINS (hCache->L1PCFG, CGEM_L1PCFG_L1PMODE, newSize);
  1198. /* Read back L1PCFG. This stalls the DSP until the mode change completes */
  1199. CACHE_getL1PSize();
  1200. }
  1201. /** ============================================================================
  1202. * @n@b CACHE_getL1PSize
  1203. *
  1204. * @b Description
  1205. * @n This function is used to get the L1P Cache Size.
  1206. *
  1207. * @b Arguments
  1208. * @n None
  1209. *
  1210. * <b> Return Value </b>
  1211. * @n None
  1212. *
  1213. * <b> Pre Condition </b>
  1214. * @n None
  1215. *
  1216. * <b> Post Condition </b>
  1217. * @n None
  1218. *
  1219. * @b Reads
  1220. * @n CGEM_L1PCFG_L1PMODE
  1221. *
  1222. * @b Example
  1223. * @verbatim
  1224. CACHE_L1Size cacheSize;
  1225. cacheSize = CACHE_getL1PSize();
  1226. @endverbatim
  1227. * =============================================================================
  1228. */
  1229. static inline CACHE_L1Size CACHE_getL1PSize (void)
  1230. {
  1231. return (CACHE_L1Size)CSL_FEXT (hCache->L1PCFG, CGEM_L1PCFG_L1PMODE);
  1232. }
  1233. /** ============================================================================
  1234. * @n@b CACHE_freezeL1P
  1235. *
  1236. * @b Description
  1237. * @n This function is used to freeze the L1P cache.
  1238. *
  1239. * @b Arguments
  1240. * @n None
  1241. *
  1242. * <b> Return Value </b>
  1243. * @n None
  1244. *
  1245. * <b> Pre Condition </b>
  1246. * @n None
  1247. *
  1248. * <b> Post Condition </b>
  1249. * @n None
  1250. *
  1251. * @b Writes
  1252. * @n CGEM_L1PCC_OPER=1
  1253. *
  1254. * @b Example
  1255. * @verbatim
  1256. CACHE_freezeL1P();
  1257. @endverbatim
  1258. * =============================================================================
  1259. */
  1260. static inline void CACHE_freezeL1P(void);/* for misra warnings*/
  1261. static inline void CACHE_freezeL1P(void)
  1262. {
  1263. /* Set the Freeze Mode Enabled bit. */
  1264. CSL_FINS (hCache->L1PCC, CGEM_L1PCC_OPER, (uint32_t)1U);
  1265. }
  1266. /** ============================================================================
  1267. * @n@b CACHE_unfreezeL1P
  1268. *
  1269. * @b Description
  1270. * @n This function is used to unfreeze the L1D cache.
  1271. *
  1272. * @b Arguments
  1273. * @n None
  1274. *
  1275. * <b> Return Value </b>
  1276. * @n None
  1277. *
  1278. * <b> Pre Condition </b>
  1279. * @n None
  1280. *
  1281. * <b> Post Condition </b>
  1282. * @n None
  1283. *
  1284. * @b Writes
  1285. * @n CGEM_L1PCC_OPER=0
  1286. *
  1287. * @b Example
  1288. * @verbatim
  1289. CACHE_unfreezeL1D();
  1290. @endverbatim
  1291. * =============================================================================
  1292. */
  1293. static inline void CACHE_unfreezeL1P(void);/* for misra warnings*/
  1294. static inline void CACHE_unfreezeL1P(void)
  1295. {
  1296. /* Reset the Freeze Mode Enabled bit. */
  1297. CSL_FINS (hCache->L1PCC, CGEM_L1PCC_OPER, (uint32_t)0);
  1298. }
  1299. /** ============================================================================
  1300. * @n@b CACHE_getPrevL1PMode
  1301. *
  1302. * @b Description
  1303. * @n This function is used get the previous operating state of the L1P cache
  1304. *
  1305. * @b Arguments
  1306. * @n None
  1307. *
  1308. * <b> Return Value </b>
  1309. * @n None
  1310. *
  1311. * <b> Pre Condition </b>
  1312. * @n None
  1313. *
  1314. * <b> Post Condition </b>
  1315. * @n None
  1316. *
  1317. * @b Reads
  1318. * @n CGEM_L1PCC_POPER
  1319. *
  1320. * @b Example
  1321. * @verbatim
  1322. Uint32 prev;
  1323. prev = CACHE_getPrevL1PMode();
  1324. @endverbatim
  1325. * =============================================================================
  1326. */
  1327. static inline Uint32 CACHE_getPrevL1PMode(void);/* for misra warnings*/
  1328. static inline Uint32 CACHE_getPrevL1PMode(void)
  1329. {
  1330. return CSL_FEXT (hCache->L1PCC, CGEM_L1PCC_POPER);
  1331. }
  1332. /** ============================================================================
  1333. * @n@b CACHE_invL1pWait
  1334. *
  1335. * @b Description
  1336. * @n This function is used to wait for the L1D invalidate block operation to
  1337. * complete. This API should be used only if the CACHE_invL1p was called
  1338. * with the CACHE_NOWAIT argument.
  1339. *
  1340. * @b Arguments
  1341. * @n None
  1342. *
  1343. * <b> Return Value </b>
  1344. * @n None
  1345. *
  1346. * <b> Pre Condition </b>
  1347. * @n @a CACHE_invL1p(wait=CACHE_NOWAIT) must be called.
  1348. *
  1349. * <b> Post Condition </b>
  1350. * @n The L1D Block Cache is invalidated.
  1351. *
  1352. * @b Reads
  1353. * @n CGEM_L1PIWC_WC=0
  1354. *
  1355. * @b Example
  1356. * @verbatim
  1357. CACHE_invL1p((void *)&foo, 128, CACHE_NOWAIT);
  1358. ...
  1359. CACHE_invL1pWait(); // Wait for the Invalidate operation to complete.
  1360. @endverbatim
  1361. * =============================================================================
  1362. */
  1363. static inline void CACHE_invL1pWait (void);/* for misra warnings*/
  1364. static inline void CACHE_invL1pWait (void)
  1365. {
  1366. /* Wait for the Invalidate operation to complete. */
  1367. while (CSL_FEXT(hCache->L1PIWC, CGEM_L1PIWC_WC) != 0) {}
  1368. }
  1369. /** ============================================================================
  1370. * @n@b CACHE_invL1p
  1371. *
  1372. * @b Description
  1373. * @n This function is used to invalidate the L1P Cache pointed by the block
  1374. * address. Although the block size can be specified in the number of bytes,
  1375. * the cache controller operates on whole cache lines. To prevent unintended
  1376. * behavior "blockPtr" should be aligned on the cache line size and "byteCnt"
  1377. * should be a multiple of the cache line size.
  1378. *
  1379. * @b Arguments
  1380. @verbatim
  1381. blockPtr Address of the block which is to be invalidated
  1382. byteCnt Size of the block to be invalidated.
  1383. wait Indicates if the call should block or not.
  1384. @endverbatim
  1385. *
  1386. * <b> Return Value </b>
  1387. * @n None
  1388. *
  1389. * <b> Pre Condition </b>
  1390. * @n None
  1391. *
  1392. * <b> Post Condition </b>
  1393. * @n The contents of the blockPtr are being invalidated
  1394. *
  1395. * @b Writes
  1396. * @n CGEM_L1PIBAR_ADDR,CGEM_L1PIWC_WC
  1397. *
  1398. * @b Example
  1399. * @verbatim
  1400. // Invalidate the 128 bytes of the function 'foo'
  1401. CACHE_invL1p((void *)&foo, 128, CACHE_WAIT);
  1402. @endverbatim
  1403. * =============================================================================
  1404. */
  1405. static inline void CACHE_invL1p
  1406. (
  1407. const void* blockPtr,
  1408. Uint32 byteCnt,
  1409. CACHE_Wait wait
  1410. ); /*for misra warnings*/
  1411. static inline void CACHE_invL1p
  1412. (
  1413. const void* blockPtr,
  1414. Uint32 byteCnt,
  1415. CACHE_Wait wait
  1416. )
  1417. {
  1418. /* Setup the block address and length which is to be invalidated */
  1419. hCache->L1PIBAR = CSL_FMK(CGEM_L1PIBAR_ADDR, (Uint32)blockPtr);
  1420. hCache->L1PIWC = CSL_FMK(CGEM_L1PIWC_WC, (Uint32)((byteCnt+((uint32_t)3U))>>2));
  1421. /* Determine if we need to wait for the operation to complete. */
  1422. if (wait == CACHE_WAIT)
  1423. {
  1424. CACHE_invL1pWait();
  1425. }
  1426. else
  1427. {
  1428. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  1429. if (wait == CACHE_FENCE_WAIT)
  1430. {
  1431. _mfence();
  1432. /* Add another mfence to address single mfence issue
  1433. * Under very particular circumstances, MFENCE may allow
  1434. * the transaction after the MFENCE to proceed before
  1435. * the preceding STORE completes */
  1436. _mfence();
  1437. }
  1438. #endif
  1439. }
  1440. }
  1441. /** ============================================================================
  1442. * @n@b CACHE_invAllL1pWait
  1443. *
  1444. * @b Description
  1445. * @n This function is used to wait for the L1P invalidate operation to complete.
  1446. * This API should be used only if the CACHE_invAllL1p was called with the
  1447. * CACHE_NOWAIT argument.
  1448. *
  1449. * @b Arguments
  1450. * @n None
  1451. *
  1452. * <b> Return Value </b>
  1453. * @n None
  1454. *
  1455. * <b> Pre Condition </b>
  1456. * @n @a CACHE_invAllL1p(wait=CACHE_NOWAIT) must be called.
  1457. *
  1458. * <b> Post Condition </b>
  1459. * @n The L1P Cache is invalidated.
  1460. *
  1461. * @b Reads
  1462. * @n CGEM_L1PINV_I=0
  1463. *
  1464. * @b Example
  1465. * @verbatim
  1466. CACHE_invAllL1p(CACHE_NOWAIT);
  1467. ...
  1468. CACHE_invAllL1pWait(); // Wait for the Invalidate operation to complete.
  1469. @endverbatim
  1470. * =============================================================================
  1471. */
  1472. static inline void CACHE_invAllL1pWait (void);/* for misra warnings*/
  1473. static inline void CACHE_invAllL1pWait (void)
  1474. {
  1475. /* Wait for the Invalidate operation to complete. */
  1476. while (CSL_FEXT(hCache->L1PINV, CGEM_L1PINV_I) == (uint32_t)1U) {}
  1477. }
  1478. /** ============================================================================
  1479. * @n@b CACHE_invAllL1p
  1480. *
  1481. * @b Description
  1482. * @n This function is used to invalidate the entire L1P Cache
  1483. *
  1484. * @b Arguments
  1485. @verbatim
  1486. wait Indicates if the call should block or not.
  1487. @endverbatim
  1488. *
  1489. * <b> Return Value </b>
  1490. * @n None
  1491. *
  1492. * <b> Pre Condition </b>
  1493. * @n None
  1494. *
  1495. * <b> Post Condition </b>
  1496. * @n The entire L1P cache is being invalidated.
  1497. *
  1498. * @b Writes
  1499. * @n CGEM_L1PINV_I=1
  1500. *
  1501. * @b Example
  1502. * @verbatim
  1503. CACHE_invAllL1p(CACHE_WAIT);
  1504. @endverbatim
  1505. * =============================================================================
  1506. */
  1507. static inline void CACHE_invAllL1p (CACHE_Wait wait);/* for misra warnings*/
  1508. static inline void CACHE_invAllL1p (CACHE_Wait wait)
  1509. {
  1510. /* Invalidate the L1P Cache. */
  1511. CSL_FINS (hCache->L1PINV, CGEM_L1PINV_I, (uint32_t)1U);
  1512. /* Determine if we need to wait for the operation to complete. */
  1513. if (wait)
  1514. {
  1515. CACHE_invAllL1pWait();
  1516. }
  1517. }
  1518. /** ============================================================================
  1519. * @n@b CACHE_setL2Size
  1520. *
  1521. * @b Description
  1522. * @n This function is used to set the new size of the L2 Cache.
  1523. *
  1524. * @b Arguments
  1525. @verbatim
  1526. newSize New Size of the L2 Cache to be set.
  1527. @endverbatim
  1528. *
  1529. * <b> Return Value </b>
  1530. * @n None
  1531. *
  1532. * <b> Pre Condition </b>
  1533. * @n None
  1534. *
  1535. * <b> Post Condition </b>
  1536. * @n The L2 Cache is configured to use the new size.
  1537. *
  1538. * @b Writes
  1539. * @n CGEM_L2CFG_L2MODE
  1540. *
  1541. * @b Example
  1542. * @verbatim
  1543. CACHE_setL2Size(CACHE_32KCACHE); // Use 32K L2 Cache.
  1544. @endverbatim
  1545. * =============================================================================
  1546. */
  1547. static inline void CACHE_setL2Size (CACHE_L2Size newSize);/* for misra warnings*/
  1548. static inline CACHE_L2Size CACHE_getL2Size (void);/* for misra warnings*/
  1549. static inline void CACHE_setL2Size (CACHE_L2Size newSize)
  1550. {
  1551. /* Set the new L2 cache size. */
  1552. CSL_FINS (hCache->L2CFG, CGEM_L2CFG_L2MODE, newSize);
  1553. /* Read back L2CFG. This stalls the DSP until the mode change completes */
  1554. CACHE_getL2Size();
  1555. }
  1556. /** ============================================================================
  1557. * @n@b CACHE_getL2Size
  1558. *
  1559. * @b Description
  1560. * @n This function is used to get the L2 cache size.
  1561. *
  1562. * @b Arguments
  1563. * @n None
  1564. *
  1565. * <b> Return Value </b>
  1566. * @n CACHE_L2Size
  1567. *
  1568. * <b> Pre Condition </b>
  1569. * @n None
  1570. *
  1571. * <b> Post Condition </b>
  1572. * @n None
  1573. *
  1574. * @b Reads
  1575. * @n CGEM_L2CFG_L2MODE
  1576. *
  1577. * @b Example
  1578. * @verbatim
  1579. CACHE_L2Size size;
  1580. size = CACHE_getL2Size();
  1581. @endverbatim
  1582. * =============================================================================
  1583. */
  1584. static inline CACHE_L2Size CACHE_getL2Size (void)
  1585. {
  1586. return (CACHE_L2Size) CSL_FEXT (hCache->L2CFG, CGEM_L2CFG_L2MODE);
  1587. }
  1588. /** ============================================================================
  1589. * @n@b CACHE_freezeL2
  1590. *
  1591. * @b Description
  1592. * @n This function is used to freeze the L2 Cache
  1593. *
  1594. * @b Arguments
  1595. * @n None
  1596. *
  1597. * <b> Return Value </b>
  1598. * @n None
  1599. *
  1600. * <b> Pre Condition </b>
  1601. * @n None
  1602. *
  1603. * <b> Post Condition </b>
  1604. * @n The L2 Cache is frozen.
  1605. *
  1606. * @b Example
  1607. * @verbatim
  1608. CACHE_freezeL2();
  1609. @endverbatim
  1610. * =============================================================================
  1611. */
  1612. static inline void CACHE_freezeL2 (void);/* for misra warnings*/
  1613. static inline void CACHE_freezeL2 (void)
  1614. {
  1615. /* The RL File does not define the L2CC bit so we used the RAW macro to
  1616. * configure the corresponding bit. */
  1617. CSL_FINSR(hCache->L2CFG, (uint32_t)3U, (uint32_t)3U, (uint32_t)1U);
  1618. }
  1619. /** ============================================================================
  1620. * @n@b CACHE_unfreezeL2
  1621. *
  1622. * @b Description
  1623. * @n This function is used to unfreeze the L2 Cache
  1624. *
  1625. * @b Arguments
  1626. * @n None
  1627. *
  1628. * <b> Return Value </b>
  1629. * @n None
  1630. *
  1631. * <b> Pre Condition </b>
  1632. * @n None
  1633. *
  1634. * <b> Post Condition </b>
  1635. * @n The L2 Cache is unfrozen
  1636. *
  1637. * @b Example
  1638. * @verbatim
  1639. CACHE_unfreezeL2();
  1640. @endverbatim
  1641. * =============================================================================
  1642. */
  1643. static inline void CACHE_unfreezeL2 (void);/* for misra warnings*/
  1644. static inline void CACHE_unfreezeL2 (void)
  1645. {
  1646. /* The RL File does not define the L2CC bit so we used the RAW macro to
  1647. * configure the corresponding bit. */
  1648. CSL_FINSR(hCache->L2CFG, (uint32_t)3U, (uint32_t)3U, (uint32_t)0);
  1649. }
  1650. /** ============================================================================
  1651. * @n@b CACHE_wbL2Wait
  1652. *
  1653. * @b Description
  1654. * @n This function is used to wait for the L2 writeback block operation to
  1655. * complete. This API should be used only if the CACHE_wbL2 was called
  1656. * with the CACHE_NOWAIT argument.
  1657. *
  1658. * @b Arguments
  1659. * @n None
  1660. *
  1661. * <b> Return Value </b>
  1662. * @n None
  1663. *
  1664. * <b> Pre Condition </b>
  1665. * @n @a CACHE_wbL2(wait=CACHE_NOWAIT) must be called.
  1666. *
  1667. * <b> Post Condition </b>
  1668. * @n The dirty lines of the L1D Block Cache have been written back.
  1669. *
  1670. * @b Reads
  1671. * @n CGEM_L2WWC_WC=0
  1672. *
  1673. * @b Example
  1674. * @verbatim
  1675. CACHE_wbL2((void *)ptr_buffer, 128, CACHE_NOWAIT);
  1676. ...
  1677. CACHE_wbL2Wait(); // Wait for the writeback operation to complete.
  1678. @endverbatim
  1679. * =============================================================================
  1680. */
  1681. static inline void CACHE_wbL2Wait (void);/* for misra warnings*/
  1682. static inline void CACHE_wbL2Wait (void)
  1683. {
  1684. /* Wait for the Writeback operation to complete. */
  1685. while (CSL_FEXT(hCache->L2WWC, CGEM_L2WWC_WC) != 0) {}
  1686. }
  1687. /** ============================================================================
  1688. * @n@b CACHE_wbL2
  1689. *
  1690. * @b Description
  1691. * @n This function is used to writeback the contents of the L2 Cache. Although
  1692. * the block size can be specified in the number of bytes, the cache
  1693. * controller operates on whole cache lines. To prevent unintended behavior
  1694. * "blockPtr" should be aligned on the cache line size and "byteCnt"
  1695. * should be a multiple of the cache line size.
  1696. *
  1697. * @b Arguments
  1698. @verbatim
  1699. blockPtr Address of the block which is to be written back
  1700. byteCnt Size of the block to be written block.
  1701. wait Indicates if the call should block or not.
  1702. @endverbatim
  1703. *
  1704. * <b> Return Value </b>
  1705. * @n None
  1706. *
  1707. * <b> Pre Condition </b>
  1708. * @n None
  1709. *
  1710. * <b> Post Condition </b>
  1711. * @n The dirty lines of the L2 Cache are being written back.
  1712. *
  1713. * @b Writes
  1714. * @n CGEM_L2WBAR_ADDR,CGEM_L2WWC_WC
  1715. *
  1716. * @b Example
  1717. * @verbatim
  1718. Uint8* ptr_buffer;
  1719. // Writeback the contents of the buffer.
  1720. CACHE_wbL2(ptr_buffer, 100, CACHE_WAIT);
  1721. @endverbatim
  1722. * =============================================================================
  1723. */
  1724. static inline void CACHE_wbL2
  1725. (
  1726. const void* blockPtr,
  1727. Uint32 byteCnt,
  1728. CACHE_Wait wait
  1729. ); /*for misra warnings*/
  1730. static inline void CACHE_wbL2
  1731. (
  1732. const void* blockPtr,
  1733. Uint32 byteCnt,
  1734. CACHE_Wait wait
  1735. )
  1736. {
  1737. uint32_t gie, advisory6;
  1738. if ( (wait == CACHE_WAIT ) ||
  1739. (wait == CACHE_FENCE_WAIT) )
  1740. {
  1741. advisory6 = (uint32_t)1U;
  1742. }
  1743. else
  1744. {
  1745. advisory6 = 0;
  1746. }
  1747. if ( advisory6 )
  1748. {
  1749. /* disable the interrupts */
  1750. gie = _disable_interrupts ();
  1751. }
  1752. /* Setup the block address and length */
  1753. hCache->L2WBAR = CSL_FMK (CGEM_L2WBAR_ADDR, (Uint32)blockPtr);
  1754. hCache->L2WWC = CSL_FMK (CGEM_L2WWC_WC, (Uint32)((byteCnt+((uint32_t)3))>>2));
  1755. /* Determine if we need to wait for the operation to complete. */
  1756. if ( (wait == CACHE_WAIT) ||
  1757. (wait == CACHE_ONLY_WAIT) )
  1758. {
  1759. CACHE_wbL2Wait();
  1760. }
  1761. else
  1762. {
  1763. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  1764. if ( (wait == CACHE_FENCE_WAIT) ||
  1765. (wait == CACHE_FENCE_ONLY_WAIT) )
  1766. {
  1767. _mfence();
  1768. /* Add another mfence to address single mfence issue
  1769. * Under very particular circumstances, MFENCE may allow
  1770. * the transaction after the MFENCE to proceed before
  1771. * the preceding STORE completes */
  1772. _mfence();
  1773. }
  1774. #endif
  1775. }
  1776. if (advisory6)
  1777. {
  1778. CACHE_AsmNop();
  1779. _restore_interrupts (gie);
  1780. }
  1781. }
  1782. /** ============================================================================
  1783. * @n@b CACHE_invL2Wait
  1784. *
  1785. * @b Description
  1786. * @n This function is used to wait for the L2 invalidate block operation to
  1787. * complete. This API should be used only if the CACHE_invL2 was called
  1788. * with the CACHE_NOWAIT argument.
  1789. *
  1790. * @b Arguments
  1791. * @n None
  1792. *
  1793. * <b> Return Value </b>
  1794. * @n None
  1795. *
  1796. * <b> Pre Condition </b>
  1797. * @n @a CACHE_invL2(wait=CACHE_NOWAIT) must be called.
  1798. *
  1799. * <b> Post Condition </b>
  1800. * @n Invalidate the contents of the L2 Cache.
  1801. *
  1802. * @b Reads
  1803. * @n CGEM_L2IWC_WC=0
  1804. *
  1805. * @b Example
  1806. * @verbatim
  1807. CACHE_invL2((void *)ptr_buffer, 128, CACHE_NOWAIT);
  1808. ...
  1809. CACHE_invL2Wait(); // Wait for the Invalidate operation to complete.
  1810. @endverbatim
  1811. * =============================================================================
  1812. */
  1813. static inline void CACHE_invL2Wait (void);/* for misra warnings*/
  1814. static inline void CACHE_invL2Wait (void)
  1815. {
  1816. /* Wait for the Invalidate operation to complete. */
  1817. while (CSL_FEXT(hCache->L2IWC, CGEM_L2IWC_WC) != 0) {}
  1818. }
  1819. /** ============================================================================
  1820. * @n@b CACHE_invL2
  1821. *
  1822. * @b Description
  1823. * @n This function is used to invalidate the contents of the L2 Cache.
  1824. * Although the block size can be specified in the number of bytes,
  1825. * the cache controller operates on whole cache lines. To prevent unintended
  1826. * behavior "blockPtr" should be aligned on the cache line size and "byteCnt"
  1827. * should be a multiple of the cache line size.
  1828. *
  1829. * @b Arguments
  1830. @verbatim
  1831. blockPtr Address of the block which is to be invalidated
  1832. byteCnt Size of the block to be invalidated.
  1833. wait Indicates if the call should block or not.
  1834. @endverbatim
  1835. *
  1836. * <b> Return Value </b>
  1837. * @n None
  1838. *
  1839. * <b> Pre Condition </b>
  1840. * @n None
  1841. *
  1842. * <b> Post Condition </b>
  1843. * @n The contents of the L2 Cache are being invalidated.
  1844. *
  1845. * @b Writes
  1846. * @n CGEM_L2IBAR_ADDR,CGEM_L2IWC_WC
  1847. *
  1848. * @b Example
  1849. * @verbatim
  1850. Uint8* ptr_buffer;
  1851. // Invalidate the contents of the buffer.
  1852. CACHE_invL2(ptr_buffer, 100, CACHE_WAIT);
  1853. @endverbatim
  1854. * =============================================================================
  1855. */
  1856. static inline void CACHE_invL2
  1857. (
  1858. const void* blockPtr,
  1859. Uint32 byteCnt,
  1860. CACHE_Wait wait
  1861. ); /*for misra warnings*/
  1862. static inline void CACHE_invL2
  1863. (
  1864. const void* blockPtr,
  1865. Uint32 byteCnt,
  1866. CACHE_Wait wait
  1867. )
  1868. {
  1869. uint32_t gie, advisory6;
  1870. if ( (wait == CACHE_WAIT ) ||
  1871. (wait == CACHE_FENCE_WAIT) )
  1872. {
  1873. advisory6 = (uint32_t)1U;
  1874. }
  1875. else
  1876. {
  1877. advisory6 = 0;
  1878. }
  1879. if ( advisory6 )
  1880. {
  1881. /* disable the interrupts */
  1882. gie = _disable_interrupts ();
  1883. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  1884. CSL_XMC_invalidatePrefetchBuffer();
  1885. #endif
  1886. }
  1887. /* Setup the block address and length */
  1888. hCache->L2IBAR = CSL_FMK (CGEM_L2IBAR_ADDR, (Uint32)blockPtr);
  1889. hCache->L2IWC = CSL_FMK (CGEM_L2IWC_WC, (Uint32)((byteCnt+((uint32_t)3U))>>2));
  1890. /* Determine if we need to wait for the operation to complete. */
  1891. if ( (wait == CACHE_WAIT) ||
  1892. (wait == CACHE_ONLY_WAIT) )
  1893. {
  1894. CACHE_invL2Wait();
  1895. }
  1896. else
  1897. {
  1898. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  1899. if ((wait == CACHE_FENCE_WAIT) ||
  1900. (wait == CACHE_FENCE_ONLY_WAIT))
  1901. {
  1902. _mfence();
  1903. /* Add another mfence to address single mfence issue
  1904. * Under very particular circumstances, MFENCE may allow
  1905. * the transaction after the MFENCE to proceed before
  1906. * the preceding STORE completes */
  1907. _mfence();
  1908. }
  1909. #endif
  1910. }
  1911. if (advisory6)
  1912. {
  1913. CACHE_AsmNop();
  1914. _restore_interrupts (gie);
  1915. }
  1916. }
  1917. /** ============================================================================
  1918. * @n@b CACHE_wbInvL2Wait
  1919. *
  1920. * @b Description
  1921. * @n This function is used to wait for the L2 Writeback & invalidate block
  1922. * operation to complete. This API should be used only if the CACHE_wbInvL2
  1923. * was called with the CACHE_NOWAIT argument.
  1924. *
  1925. * @b Arguments
  1926. * @n None
  1927. *
  1928. * <b> Return Value </b>
  1929. * @n None
  1930. *
  1931. * <b> Pre Condition </b>
  1932. * @n @a CACHE_wbInvL2(wait=CACHE_NOWAIT) must be called.
  1933. *
  1934. * <b> Post Condition </b>
  1935. * @n Invalidate the contents of the L2 Cache.
  1936. *
  1937. * @b Reads
  1938. * @n CGEM_L2WIWC_WC=0
  1939. *
  1940. * @b Example
  1941. * @verbatim
  1942. CACHE_wbInvL2((void *)ptr_buffer, 128, CACHE_NOWAIT);
  1943. ...
  1944. CACHE_wbInvL2Wait(); // Wait for the Writeback-Invalidate operation to complete.
  1945. @endverbatim
  1946. * =============================================================================
  1947. */
  1948. static inline void CACHE_wbInvL2Wait (void);/* for misra warnings*/
  1949. static inline void CACHE_wbInvL2Wait (void)
  1950. {
  1951. /* Wait for the Writeback & Invalidate operation to complete. */
  1952. while (CSL_FEXT(hCache->L2WIWC, CGEM_L2WIWC_WC) != 0) {}
  1953. }
  1954. /** ============================================================================
  1955. * @n@b CACHE_wbInvL2
  1956. *
  1957. * @b Description
  1958. * @n This function is used to write back and invalidate the contents of the L2 Cache.
  1959. * Although the block size can be specified in the number of bytes,
  1960. * the cache controller operates on whole cache lines. To prevent unintended
  1961. * behavior "blockPtr" should be aligned on the cache line size and "byteCnt"
  1962. * should be a multiple of the cache line size.
  1963. *
  1964. * @b Arguments
  1965. @verbatim
  1966. blockPtr Address of the block which is to be written back & invalidated
  1967. byteCnt Size of the block to be written back & invalidated.
  1968. wait Indicates if the call should block or not.
  1969. @endverbatim
  1970. *
  1971. * <b> Return Value </b>
  1972. * @n None
  1973. *
  1974. * <b> Pre Condition </b>
  1975. * @n None
  1976. *
  1977. * <b> Post Condition </b>
  1978. * @n The contents of the L2 Cache are being written back & invalidated.
  1979. *
  1980. * @b Writes
  1981. * @n CGEM_L2WIBAR_ADDR,CGEM_L2WIWC_WC
  1982. *
  1983. * @b Example
  1984. * @verbatim
  1985. Uint8* ptr_buffer;
  1986. // Invalidate the contents of the buffer.
  1987. CACHE_wbInvL2(ptr_buffer, 100, CACHE_WAIT);
  1988. @endverbatim
  1989. * =============================================================================
  1990. */
  1991. static inline void CACHE_wbInvL2 (
  1992. const void* blockPtr,
  1993. Uint32 byteCnt,
  1994. CACHE_Wait wait
  1995. ); /* for misra warnings */
  1996. static inline void CACHE_wbInvL2 (
  1997. const void* blockPtr,
  1998. Uint32 byteCnt,
  1999. CACHE_Wait wait
  2000. )
  2001. {
  2002. uint32_t gie, advisory6;
  2003. if ( (wait == CACHE_WAIT ) ||
  2004. (wait == CACHE_FENCE_WAIT) )
  2005. {
  2006. advisory6 = (uint32_t)1U;
  2007. }
  2008. else
  2009. {
  2010. advisory6 = 0;
  2011. }
  2012. if ( advisory6 )
  2013. {
  2014. /* disable the interrupts */
  2015. gie = _disable_interrupts ();
  2016. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  2017. CSL_XMC_invalidatePrefetchBuffer();
  2018. #endif
  2019. }
  2020. /* Setup the block address and length */
  2021. hCache->L2WIBAR = CSL_FMK(CGEM_L2WIBAR_ADDR, (Uint32)blockPtr);
  2022. hCache->L2WIWC = CSL_FMK(CGEM_L2WIWC_WC, (Uint32)((byteCnt+((uint32_t)3U))>>2));
  2023. /* Determine if we need to wait for the operation to complete. */
  2024. if ( (wait == CACHE_WAIT) ||
  2025. (wait == CACHE_ONLY_WAIT) )
  2026. {
  2027. CACHE_wbInvL2Wait();
  2028. }
  2029. else
  2030. {
  2031. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  2032. if ( (wait == CACHE_FENCE_WAIT) ||
  2033. (wait == CACHE_FENCE_ONLY_WAIT) )
  2034. {
  2035. _mfence();
  2036. /* Add another mfence to address single mfence issue
  2037. * Under very particular circumstances, MFENCE may allow
  2038. * the transaction after the MFENCE to proceed before
  2039. * the preceding STORE completes */
  2040. _mfence();
  2041. }
  2042. #endif
  2043. }
  2044. if (advisory6)
  2045. {
  2046. CACHE_AsmNop();
  2047. _restore_interrupts (gie);
  2048. }
  2049. }
  2050. /** ============================================================================
  2051. * @n@b CACHE_wbAllL2Wait
  2052. *
  2053. * @b Description
  2054. * @n This function is used to wait for the L2 Writeback & invalidate operation
  2055. * to complete. This API should be used only if the CACHE_wbAllL2 was called
  2056. * with the CACHE_NOWAIT argument.
  2057. *
  2058. * @b Arguments
  2059. * @n None
  2060. *
  2061. * <b> Return Value </b>
  2062. * @n None
  2063. *
  2064. * <b> Pre Condition </b>
  2065. * @n @a CACHE_wbAllL2(wait=CACHE_NOWAIT) must be called.
  2066. *
  2067. * <b> Post Condition </b>
  2068. * @n The contents of the L2 Cache have been written back
  2069. *
  2070. * @b Reads
  2071. * @n CGEM_L2WB_C=0
  2072. *
  2073. * @b Example
  2074. * @verbatim
  2075. // Writeback the contents of the L2 Cache.
  2076. CACHE_wbAllL2(CACHE_NOWAIT);
  2077. // Wait for the operation to complete.
  2078. CACHE_wbAllL2Wait();
  2079. @endverbatim
  2080. * =============================================================================
  2081. */
  2082. static inline void CACHE_wbAllL2Wait (void);/* for misra warnings*/
  2083. static inline void CACHE_wbAllL2Wait (void)
  2084. {
  2085. /* Wait for the writeback operation to complete. */
  2086. while (CSL_FEXT(hCache->L2WB, CGEM_L2WB_C) == (uint32_t)1U) {}
  2087. }
  2088. /** ============================================================================
  2089. * @n@b CACHE_wbAllL2
  2090. *
  2091. * @b Description
  2092. * @n This function is used to write back all the contents of the L2 Cache.
  2093. *
  2094. * @b Arguments
  2095. @verbatim
  2096. wait Indicates if the call should block or not.
  2097. @endverbatim
  2098. *
  2099. * <b> Return Value </b>
  2100. * @n None
  2101. *
  2102. * <b> Pre Condition </b>
  2103. * @n None
  2104. *
  2105. * <b> Post Condition </b>
  2106. * @n The contents of the L2 Cache are being written back.
  2107. *
  2108. * @b Writes
  2109. * @n CGEM_L2WB_C=1
  2110. *
  2111. * @b Example
  2112. * @verbatim
  2113. // Writeback the contents of the L2 Cache.
  2114. CACHE_wbAllL2(CACHE_WAIT);
  2115. @endverbatim
  2116. * =============================================================================
  2117. */
  2118. static inline void CACHE_wbAllL2 (CACHE_Wait wait);/* for misra warnings*/
  2119. static inline void CACHE_wbAllL2 (CACHE_Wait wait)
  2120. {
  2121. uint32_t gie, advisory6;
  2122. if ( (wait == CACHE_WAIT ) ||
  2123. (wait == CACHE_FENCE_WAIT) )
  2124. {
  2125. advisory6 = (uint32_t)1U;
  2126. }
  2127. else
  2128. {
  2129. advisory6 = 0;
  2130. }
  2131. if ( advisory6 )
  2132. {
  2133. /* disable the interrupts */
  2134. gie = _disable_interrupts ();
  2135. }
  2136. CSL_FINS (hCache->L2WB, CGEM_L2WB_C, (uint32_t)1U);
  2137. /* Determine if we need to wait for the operation to complete. */
  2138. if (wait)
  2139. {
  2140. CACHE_wbAllL2Wait();
  2141. }
  2142. if (advisory6)
  2143. {
  2144. CACHE_AsmNop();
  2145. _restore_interrupts (gie);
  2146. }
  2147. }
  2148. /** ============================================================================
  2149. * @n@b CACHE_invAllL2Wait
  2150. *
  2151. * @b Description
  2152. * @n This function is used to wait for the L2 Invalidate operation to complete.
  2153. * This API should be used only if the CACHE_invAllL2 was called with the
  2154. * CACHE_NOWAIT argument.
  2155. *
  2156. * @b Arguments
  2157. * @n None
  2158. *
  2159. * <b> Return Value </b>
  2160. * @n None
  2161. *
  2162. * <b> Pre Condition </b>
  2163. * @n @a CACHE_invAllL2(wait=CACHE_NOWAIT) must be called.
  2164. *
  2165. * <b> Post Condition </b>
  2166. * @n The contents of the L2 Cache have been invalidated
  2167. *
  2168. * @b Reads
  2169. * @n CGEM_L2INV_I=0
  2170. *
  2171. * @b Example
  2172. * @verbatim
  2173. // Invalidate the contents of the L2 Cache.
  2174. CACHE_invAllL2(CACHE_NOWAIT);
  2175. // Wait for the operation to complete.
  2176. CACHE_invAllL2Wait();
  2177. @endverbatim
  2178. * =============================================================================
  2179. */
  2180. static inline void CACHE_invAllL2Wait (void);/* for misra warnings*/
  2181. static inline void CACHE_invAllL2Wait (void)
  2182. {
  2183. /* Wait for the invalidate operation to complete. */
  2184. while (CSL_FEXT(hCache->L2INV, CGEM_L2INV_I) == (uint32_t)1U) {}
  2185. }
  2186. /** ============================================================================
  2187. * @n@b CACHE_invAllL2
  2188. *
  2189. * @b Description
  2190. * @n This function is used to invalidate all the contents of the L2 Cache.
  2191. *
  2192. * @b Arguments
  2193. @verbatim
  2194. wait Indicates if the call should block or not.
  2195. @endverbatim
  2196. *
  2197. * <b> Return Value </b>
  2198. * @n None
  2199. *
  2200. * <b> Pre Condition </b>
  2201. * @n None
  2202. *
  2203. * <b> Post Condition </b>
  2204. * @n The contents of the L2 Cache are being invalidated.
  2205. *
  2206. * @b Writes
  2207. * @n CGEM_L2INV_I=1
  2208. *
  2209. * @b Example
  2210. * @verbatim
  2211. // Invalidate the contents of the L2 Cache.
  2212. CACHE_invAllL2(CACHE_WAIT);
  2213. @endverbatim
  2214. * =============================================================================
  2215. */
  2216. static inline void CACHE_invAllL2 (CACHE_Wait wait);/* for misra warnings*/
  2217. static inline void CACHE_invAllL2 (CACHE_Wait wait)
  2218. {
  2219. uint32_t gie, advisory6;
  2220. if ( (wait == CACHE_WAIT ) ||
  2221. (wait == CACHE_FENCE_WAIT) )
  2222. {
  2223. advisory6 = (uint32_t)1U;
  2224. }
  2225. else
  2226. {
  2227. advisory6 = 0;
  2228. }
  2229. if ( advisory6 )
  2230. {
  2231. /* disable the interrupts */
  2232. gie = _disable_interrupts ();
  2233. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  2234. CSL_XMC_invalidatePrefetchBuffer();
  2235. #endif
  2236. }
  2237. CSL_FINS (hCache->L2INV, CGEM_L2INV_I, (uint32_t)1U);
  2238. /* Determine if we need to wait for the operation to complete. */
  2239. if (wait)
  2240. {
  2241. CACHE_invAllL2Wait();
  2242. }
  2243. if (advisory6)
  2244. {
  2245. CACHE_AsmNop();
  2246. _restore_interrupts (gie);
  2247. }
  2248. }
  2249. /** ============================================================================
  2250. * @n@b CACHE_wbInvAllL2Wait
  2251. *
  2252. * @b Description
  2253. * @n This function is used to wait for the L2 Writeback and Invalidate
  2254. * operation to complete. This API should be used only if the CACHE_wbInvAllL2 was
  2255. * called with the CACHE_NOWAIT argument.
  2256. *
  2257. * @b Arguments
  2258. * @n None
  2259. *
  2260. * <b> Return Value </b>
  2261. * @n None
  2262. *
  2263. * <b> Pre Condition </b>
  2264. * @n @a CACHE_wbInvAllL2(wait=CACHE_NOWAIT) must be called.
  2265. *
  2266. * <b> Post Condition </b>
  2267. * @n The contents of the L2 Cache have been invalidated and written back
  2268. *
  2269. * @b Reads
  2270. * @n CGEM_L2WBINV_C=0
  2271. *
  2272. * @b Example
  2273. * @verbatim
  2274. // Writeback & Invalidate the contents of the L2 Cache.
  2275. CACHE_wbInvAllL2(CACHE_NOWAIT);
  2276. // Wait for the operation to complete.
  2277. CACHE_wbInvAllL2Wait();
  2278. @endverbatim
  2279. * =============================================================================
  2280. */
  2281. static inline void CACHE_wbInvAllL2Wait (void);/* for misra warnings*/
  2282. static inline void CACHE_wbInvAllL2Wait (void)
  2283. {
  2284. /* Wait for the writeback-invalidate operation to complete. */
  2285. while (CSL_FEXT(hCache->L2WBINV, CGEM_L2WBINV_C) == (uint32_t)1U) {}
  2286. }
  2287. /** ============================================================================
  2288. * @n@b CACHE_wbInvAllL2
  2289. *
  2290. * @b Description
  2291. * @n This function is used to writeback and invalidate all the contents of the L2 Cache.
  2292. *
  2293. * @b Arguments
  2294. @verbatim
  2295. wait Indicates if the call should block or not.
  2296. @endverbatim
  2297. *
  2298. * <b> Return Value </b>
  2299. * @n None
  2300. *
  2301. * <b> Pre Condition </b>
  2302. * @n None
  2303. *
  2304. * <b> Post Condition </b>
  2305. * @n The contents of the L2 Cache are being written back & invalidated.
  2306. *
  2307. * @b Writes
  2308. * @n CGEM_L2WBINV_C=1
  2309. *
  2310. * @b Example
  2311. * @verbatim
  2312. // Invalidate the contents of the L2 Cache.
  2313. CACHE_wbInvAllL2(CACHE_WAIT);
  2314. @endverbatim
  2315. * =============================================================================
  2316. */
  2317. static inline void CACHE_wbInvAllL2 (CACHE_Wait wait);/* for misra warnings*/
  2318. static inline void CACHE_wbInvAllL2 (CACHE_Wait wait)
  2319. {
  2320. uint32_t gie, advisory6;
  2321. if ( (wait == CACHE_WAIT ) ||
  2322. (wait == CACHE_FENCE_WAIT) )
  2323. {
  2324. advisory6 = (uint32_t)1U;
  2325. }
  2326. else
  2327. {
  2328. advisory6 = 0;
  2329. }
  2330. if ( advisory6 )
  2331. {
  2332. /* disable the interrupts */
  2333. gie = _disable_interrupts ();
  2334. #if !(defined (SOC_OMAPL137) || defined(SOC_OMAPL138))
  2335. CSL_XMC_invalidatePrefetchBuffer();
  2336. #endif
  2337. }
  2338. CSL_FINS (hCache->L2WBINV, CGEM_L2WBINV_C, (uint32_t)1U);
  2339. /* Determine if we need to wait for the operation to complete. */
  2340. if (wait)
  2341. {
  2342. CACHE_wbInvAllL2Wait();
  2343. }
  2344. if (advisory6)
  2345. {
  2346. CACHE_AsmNop();
  2347. _restore_interrupts (gie);
  2348. }
  2349. }
  2350. /**
  2351. @}
  2352. */
  2353. #ifdef __cplusplus
  2354. }
  2355. #endif
  2356. #endif /*CSL_CACHEAUX_H*/