perf_event.c 39 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112
  1. /*
  2. * PMU support
  3. *
  4. * Copyright (C) 2012 ARM Limited
  5. * Author: Will Deacon <will.deacon@arm.com>
  6. *
  7. * This code is based heavily on the ARMv7 perf event code.
  8. *
  9. * This program is free software; you can redistribute it and/or modify
  10. * it under the terms of the GNU General Public License version 2 as
  11. * published by the Free Software Foundation.
  12. *
  13. * This program is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. * GNU General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU General Public License
  19. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  20. */
  21. #include <asm/irq_regs.h>
  22. #include <asm/perf_event.h>
  23. #include <asm/sysreg.h>
  24. #include <asm/virt.h>
  25. #include <linux/acpi.h>
  26. #include <linux/of.h>
  27. #include <linux/perf/arm_pmu.h>
  28. #include <linux/platform_device.h>
  29. /*
  30. * ARMv8 PMUv3 Performance Events handling code.
  31. * Common event types (some are defined in asm/perf_event.h).
  32. */
  33. /* At least one of the following is required. */
  34. #define ARMV8_PMUV3_PERFCTR_INST_RETIRED 0x08
  35. #define ARMV8_PMUV3_PERFCTR_INST_SPEC 0x1B
  36. /* Common architectural events. */
  37. #define ARMV8_PMUV3_PERFCTR_LD_RETIRED 0x06
  38. #define ARMV8_PMUV3_PERFCTR_ST_RETIRED 0x07
  39. #define ARMV8_PMUV3_PERFCTR_EXC_TAKEN 0x09
  40. #define ARMV8_PMUV3_PERFCTR_EXC_RETURN 0x0A
  41. #define ARMV8_PMUV3_PERFCTR_CID_WRITE_RETIRED 0x0B
  42. #define ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED 0x0C
  43. #define ARMV8_PMUV3_PERFCTR_BR_IMMED_RETIRED 0x0D
  44. #define ARMV8_PMUV3_PERFCTR_BR_RETURN_RETIRED 0x0E
  45. #define ARMV8_PMUV3_PERFCTR_UNALIGNED_LDST_RETIRED 0x0F
  46. #define ARMV8_PMUV3_PERFCTR_TTBR_WRITE_RETIRED 0x1C
  47. #define ARMV8_PMUV3_PERFCTR_CHAIN 0x1E
  48. #define ARMV8_PMUV3_PERFCTR_BR_RETIRED 0x21
  49. /* Common microarchitectural events. */
  50. #define ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL 0x01
  51. #define ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL 0x02
  52. #define ARMV8_PMUV3_PERFCTR_L1D_TLB_REFILL 0x05
  53. #define ARMV8_PMUV3_PERFCTR_MEM_ACCESS 0x13
  54. #define ARMV8_PMUV3_PERFCTR_L1I_CACHE 0x14
  55. #define ARMV8_PMUV3_PERFCTR_L1D_CACHE_WB 0x15
  56. #define ARMV8_PMUV3_PERFCTR_L2D_CACHE 0x16
  57. #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_REFILL 0x17
  58. #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_WB 0x18
  59. #define ARMV8_PMUV3_PERFCTR_BUS_ACCESS 0x19
  60. #define ARMV8_PMUV3_PERFCTR_MEMORY_ERROR 0x1A
  61. #define ARMV8_PMUV3_PERFCTR_BUS_CYCLES 0x1D
  62. #define ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE 0x1F
  63. #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE 0x20
  64. #define ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED 0x22
  65. #define ARMV8_PMUV3_PERFCTR_STALL_FRONTEND 0x23
  66. #define ARMV8_PMUV3_PERFCTR_STALL_BACKEND 0x24
  67. #define ARMV8_PMUV3_PERFCTR_L1D_TLB 0x25
  68. #define ARMV8_PMUV3_PERFCTR_L1I_TLB 0x26
  69. #define ARMV8_PMUV3_PERFCTR_L2I_CACHE 0x27
  70. #define ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL 0x28
  71. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE 0x29
  72. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL 0x2A
  73. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE 0x2B
  74. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB 0x2C
  75. #define ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL 0x2D
  76. #define ARMV8_PMUV3_PERFCTR_L2I_TLB_REFILL 0x2E
  77. #define ARMV8_PMUV3_PERFCTR_L2D_TLB 0x2F
  78. #define ARMV8_PMUV3_PERFCTR_L2I_TLB 0x30
  79. /* ARMv8 recommended implementation defined event types */
  80. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD 0x40
  81. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR 0x41
  82. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD 0x42
  83. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR 0x43
  84. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_INNER 0x44
  85. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_OUTER 0x45
  86. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WB_VICTIM 0x46
  87. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WB_CLEAN 0x47
  88. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_INVAL 0x48
  89. #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD 0x4C
  90. #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR 0x4D
  91. #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD 0x4E
  92. #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR 0x4F
  93. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_RD 0x50
  94. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WR 0x51
  95. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_REFILL_RD 0x52
  96. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_REFILL_WR 0x53
  97. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WB_VICTIM 0x56
  98. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WB_CLEAN 0x57
  99. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_INVAL 0x58
  100. #define ARMV8_IMPDEF_PERFCTR_L2D_TLB_REFILL_RD 0x5C
  101. #define ARMV8_IMPDEF_PERFCTR_L2D_TLB_REFILL_WR 0x5D
  102. #define ARMV8_IMPDEF_PERFCTR_L2D_TLB_RD 0x5E
  103. #define ARMV8_IMPDEF_PERFCTR_L2D_TLB_WR 0x5F
  104. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_RD 0x60
  105. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_WR 0x61
  106. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_SHARED 0x62
  107. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_NOT_SHARED 0x63
  108. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_NORMAL 0x64
  109. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_PERIPH 0x65
  110. #define ARMV8_IMPDEF_PERFCTR_MEM_ACCESS_RD 0x66
  111. #define ARMV8_IMPDEF_PERFCTR_MEM_ACCESS_WR 0x67
  112. #define ARMV8_IMPDEF_PERFCTR_UNALIGNED_LD_SPEC 0x68
  113. #define ARMV8_IMPDEF_PERFCTR_UNALIGNED_ST_SPEC 0x69
  114. #define ARMV8_IMPDEF_PERFCTR_UNALIGNED_LDST_SPEC 0x6A
  115. #define ARMV8_IMPDEF_PERFCTR_LDREX_SPEC 0x6C
  116. #define ARMV8_IMPDEF_PERFCTR_STREX_PASS_SPEC 0x6D
  117. #define ARMV8_IMPDEF_PERFCTR_STREX_FAIL_SPEC 0x6E
  118. #define ARMV8_IMPDEF_PERFCTR_STREX_SPEC 0x6F
  119. #define ARMV8_IMPDEF_PERFCTR_LD_SPEC 0x70
  120. #define ARMV8_IMPDEF_PERFCTR_ST_SPEC 0x71
  121. #define ARMV8_IMPDEF_PERFCTR_LDST_SPEC 0x72
  122. #define ARMV8_IMPDEF_PERFCTR_DP_SPEC 0x73
  123. #define ARMV8_IMPDEF_PERFCTR_ASE_SPEC 0x74
  124. #define ARMV8_IMPDEF_PERFCTR_VFP_SPEC 0x75
  125. #define ARMV8_IMPDEF_PERFCTR_PC_WRITE_SPEC 0x76
  126. #define ARMV8_IMPDEF_PERFCTR_CRYPTO_SPEC 0x77
  127. #define ARMV8_IMPDEF_PERFCTR_BR_IMMED_SPEC 0x78
  128. #define ARMV8_IMPDEF_PERFCTR_BR_RETURN_SPEC 0x79
  129. #define ARMV8_IMPDEF_PERFCTR_BR_INDIRECT_SPEC 0x7A
  130. #define ARMV8_IMPDEF_PERFCTR_ISB_SPEC 0x7C
  131. #define ARMV8_IMPDEF_PERFCTR_DSB_SPEC 0x7D
  132. #define ARMV8_IMPDEF_PERFCTR_DMB_SPEC 0x7E
  133. #define ARMV8_IMPDEF_PERFCTR_EXC_UNDEF 0x81
  134. #define ARMV8_IMPDEF_PERFCTR_EXC_SVC 0x82
  135. #define ARMV8_IMPDEF_PERFCTR_EXC_PABORT 0x83
  136. #define ARMV8_IMPDEF_PERFCTR_EXC_DABORT 0x84
  137. #define ARMV8_IMPDEF_PERFCTR_EXC_IRQ 0x86
  138. #define ARMV8_IMPDEF_PERFCTR_EXC_FIQ 0x87
  139. #define ARMV8_IMPDEF_PERFCTR_EXC_SMC 0x88
  140. #define ARMV8_IMPDEF_PERFCTR_EXC_HVC 0x8A
  141. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_PABORT 0x8B
  142. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_DABORT 0x8C
  143. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_OTHER 0x8D
  144. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_IRQ 0x8E
  145. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_FIQ 0x8F
  146. #define ARMV8_IMPDEF_PERFCTR_RC_LD_SPEC 0x90
  147. #define ARMV8_IMPDEF_PERFCTR_RC_ST_SPEC 0x91
  148. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_RD 0xA0
  149. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WR 0xA1
  150. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_REFILL_RD 0xA2
  151. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_REFILL_WR 0xA3
  152. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WB_VICTIM 0xA6
  153. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WB_CLEAN 0xA7
  154. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_INVAL 0xA8
  155. /* ARMv8 Cortex-A53 specific event types. */
  156. #define ARMV8_A53_PERFCTR_PREF_LINEFILL 0xC2
  157. /* ARMv8 Cavium ThunderX specific event types. */
  158. #define ARMV8_THUNDER_PERFCTR_L1D_CACHE_MISS_ST 0xE9
  159. #define ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_ACCESS 0xEA
  160. #define ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_MISS 0xEB
  161. #define ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_ACCESS 0xEC
  162. #define ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_MISS 0xED
  163. /* PMUv3 HW events mapping. */
  164. /*
  165. * ARMv8 Architectural defined events, not all of these may
  166. * be supported on any given implementation. Undefined events will
  167. * be disabled at run-time.
  168. */
  169. static const unsigned armv8_pmuv3_perf_map[PERF_COUNT_HW_MAX] = {
  170. PERF_MAP_ALL_UNSUPPORTED,
  171. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  172. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  173. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  174. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  175. [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED,
  176. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  177. [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
  178. [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV8_PMUV3_PERFCTR_STALL_FRONTEND,
  179. [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = ARMV8_PMUV3_PERFCTR_STALL_BACKEND,
  180. };
  181. /* ARM Cortex-A53 HW events mapping. */
  182. static const unsigned armv8_a53_perf_map[PERF_COUNT_HW_MAX] = {
  183. PERF_MAP_ALL_UNSUPPORTED,
  184. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  185. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  186. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  187. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  188. [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED,
  189. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  190. [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
  191. };
  192. /* ARM Cortex-A57 and Cortex-A72 events mapping. */
  193. static const unsigned armv8_a57_perf_map[PERF_COUNT_HW_MAX] = {
  194. PERF_MAP_ALL_UNSUPPORTED,
  195. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  196. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  197. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  198. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  199. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  200. [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
  201. };
  202. static const unsigned armv8_thunder_perf_map[PERF_COUNT_HW_MAX] = {
  203. PERF_MAP_ALL_UNSUPPORTED,
  204. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  205. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  206. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  207. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  208. [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED,
  209. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  210. [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV8_PMUV3_PERFCTR_STALL_FRONTEND,
  211. [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = ARMV8_PMUV3_PERFCTR_STALL_BACKEND,
  212. };
  213. /* Broadcom Vulcan events mapping */
  214. static const unsigned armv8_vulcan_perf_map[PERF_COUNT_HW_MAX] = {
  215. PERF_MAP_ALL_UNSUPPORTED,
  216. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  217. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  218. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  219. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  220. [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_BR_RETIRED,
  221. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  222. [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
  223. [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV8_PMUV3_PERFCTR_STALL_FRONTEND,
  224. [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = ARMV8_PMUV3_PERFCTR_STALL_BACKEND,
  225. };
  226. static const unsigned armv8_pmuv3_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  227. [PERF_COUNT_HW_CACHE_OP_MAX]
  228. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  229. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  230. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  231. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  232. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  233. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  234. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  235. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  236. [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_TLB_REFILL,
  237. [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_TLB,
  238. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  239. [C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB,
  240. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  241. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  242. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  243. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  244. };
  245. static const unsigned armv8_a53_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  246. [PERF_COUNT_HW_CACHE_OP_MAX]
  247. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  248. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  249. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  250. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  251. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  252. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  253. [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_A53_PERFCTR_PREF_LINEFILL,
  254. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  255. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  256. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  257. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  258. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  259. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  260. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  261. };
  262. static const unsigned armv8_a57_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  263. [PERF_COUNT_HW_CACHE_OP_MAX]
  264. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  265. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  266. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD,
  267. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD,
  268. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR,
  269. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR,
  270. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  271. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  272. [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD,
  273. [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR,
  274. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  275. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  276. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  277. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  278. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  279. };
  280. static const unsigned armv8_thunder_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  281. [PERF_COUNT_HW_CACHE_OP_MAX]
  282. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  283. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  284. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD,
  285. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD,
  286. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR,
  287. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_THUNDER_PERFCTR_L1D_CACHE_MISS_ST,
  288. [C(L1D)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_ACCESS,
  289. [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_MISS,
  290. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  291. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  292. [C(L1I)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_ACCESS,
  293. [C(L1I)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_MISS,
  294. [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD,
  295. [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD,
  296. [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR,
  297. [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR,
  298. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  299. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  300. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  301. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  302. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  303. };
  304. static const unsigned armv8_vulcan_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  305. [PERF_COUNT_HW_CACHE_OP_MAX]
  306. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  307. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  308. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD,
  309. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD,
  310. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR,
  311. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR,
  312. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  313. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  314. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  315. [C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB,
  316. [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD,
  317. [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR,
  318. [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD,
  319. [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR,
  320. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  321. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  322. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  323. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  324. [C(NODE)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_RD,
  325. [C(NODE)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_WR,
  326. };
  327. static ssize_t
  328. armv8pmu_events_sysfs_show(struct device *dev,
  329. struct device_attribute *attr, char *page)
  330. {
  331. struct perf_pmu_events_attr *pmu_attr;
  332. pmu_attr = container_of(attr, struct perf_pmu_events_attr, attr);
  333. return sprintf(page, "event=0x%03llx\n", pmu_attr->id);
  334. }
  335. #define ARMV8_EVENT_ATTR_RESOLVE(m) #m
  336. #define ARMV8_EVENT_ATTR(name, config) \
  337. PMU_EVENT_ATTR(name, armv8_event_attr_##name, \
  338. config, armv8pmu_events_sysfs_show)
  339. ARMV8_EVENT_ATTR(sw_incr, ARMV8_PMUV3_PERFCTR_SW_INCR);
  340. ARMV8_EVENT_ATTR(l1i_cache_refill, ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL);
  341. ARMV8_EVENT_ATTR(l1i_tlb_refill, ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL);
  342. ARMV8_EVENT_ATTR(l1d_cache_refill, ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL);
  343. ARMV8_EVENT_ATTR(l1d_cache, ARMV8_PMUV3_PERFCTR_L1D_CACHE);
  344. ARMV8_EVENT_ATTR(l1d_tlb_refill, ARMV8_PMUV3_PERFCTR_L1D_TLB_REFILL);
  345. ARMV8_EVENT_ATTR(ld_retired, ARMV8_PMUV3_PERFCTR_LD_RETIRED);
  346. ARMV8_EVENT_ATTR(st_retired, ARMV8_PMUV3_PERFCTR_ST_RETIRED);
  347. ARMV8_EVENT_ATTR(inst_retired, ARMV8_PMUV3_PERFCTR_INST_RETIRED);
  348. ARMV8_EVENT_ATTR(exc_taken, ARMV8_PMUV3_PERFCTR_EXC_TAKEN);
  349. ARMV8_EVENT_ATTR(exc_return, ARMV8_PMUV3_PERFCTR_EXC_RETURN);
  350. ARMV8_EVENT_ATTR(cid_write_retired, ARMV8_PMUV3_PERFCTR_CID_WRITE_RETIRED);
  351. ARMV8_EVENT_ATTR(pc_write_retired, ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED);
  352. ARMV8_EVENT_ATTR(br_immed_retired, ARMV8_PMUV3_PERFCTR_BR_IMMED_RETIRED);
  353. ARMV8_EVENT_ATTR(br_return_retired, ARMV8_PMUV3_PERFCTR_BR_RETURN_RETIRED);
  354. ARMV8_EVENT_ATTR(unaligned_ldst_retired, ARMV8_PMUV3_PERFCTR_UNALIGNED_LDST_RETIRED);
  355. ARMV8_EVENT_ATTR(br_mis_pred, ARMV8_PMUV3_PERFCTR_BR_MIS_PRED);
  356. ARMV8_EVENT_ATTR(cpu_cycles, ARMV8_PMUV3_PERFCTR_CPU_CYCLES);
  357. ARMV8_EVENT_ATTR(br_pred, ARMV8_PMUV3_PERFCTR_BR_PRED);
  358. ARMV8_EVENT_ATTR(mem_access, ARMV8_PMUV3_PERFCTR_MEM_ACCESS);
  359. ARMV8_EVENT_ATTR(l1i_cache, ARMV8_PMUV3_PERFCTR_L1I_CACHE);
  360. ARMV8_EVENT_ATTR(l1d_cache_wb, ARMV8_PMUV3_PERFCTR_L1D_CACHE_WB);
  361. ARMV8_EVENT_ATTR(l2d_cache, ARMV8_PMUV3_PERFCTR_L2D_CACHE);
  362. ARMV8_EVENT_ATTR(l2d_cache_refill, ARMV8_PMUV3_PERFCTR_L2D_CACHE_REFILL);
  363. ARMV8_EVENT_ATTR(l2d_cache_wb, ARMV8_PMUV3_PERFCTR_L2D_CACHE_WB);
  364. ARMV8_EVENT_ATTR(bus_access, ARMV8_PMUV3_PERFCTR_BUS_ACCESS);
  365. ARMV8_EVENT_ATTR(memory_error, ARMV8_PMUV3_PERFCTR_MEMORY_ERROR);
  366. ARMV8_EVENT_ATTR(inst_spec, ARMV8_PMUV3_PERFCTR_INST_SPEC);
  367. ARMV8_EVENT_ATTR(ttbr_write_retired, ARMV8_PMUV3_PERFCTR_TTBR_WRITE_RETIRED);
  368. ARMV8_EVENT_ATTR(bus_cycles, ARMV8_PMUV3_PERFCTR_BUS_CYCLES);
  369. /* Don't expose the chain event in /sys, since it's useless in isolation */
  370. ARMV8_EVENT_ATTR(l1d_cache_allocate, ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE);
  371. ARMV8_EVENT_ATTR(l2d_cache_allocate, ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE);
  372. ARMV8_EVENT_ATTR(br_retired, ARMV8_PMUV3_PERFCTR_BR_RETIRED);
  373. ARMV8_EVENT_ATTR(br_mis_pred_retired, ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED);
  374. ARMV8_EVENT_ATTR(stall_frontend, ARMV8_PMUV3_PERFCTR_STALL_FRONTEND);
  375. ARMV8_EVENT_ATTR(stall_backend, ARMV8_PMUV3_PERFCTR_STALL_BACKEND);
  376. ARMV8_EVENT_ATTR(l1d_tlb, ARMV8_PMUV3_PERFCTR_L1D_TLB);
  377. ARMV8_EVENT_ATTR(l1i_tlb, ARMV8_PMUV3_PERFCTR_L1I_TLB);
  378. ARMV8_EVENT_ATTR(l2i_cache, ARMV8_PMUV3_PERFCTR_L2I_CACHE);
  379. ARMV8_EVENT_ATTR(l2i_cache_refill, ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL);
  380. ARMV8_EVENT_ATTR(l3d_cache_allocate, ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE);
  381. ARMV8_EVENT_ATTR(l3d_cache_refill, ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL);
  382. ARMV8_EVENT_ATTR(l3d_cache, ARMV8_PMUV3_PERFCTR_L3D_CACHE);
  383. ARMV8_EVENT_ATTR(l3d_cache_wb, ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB);
  384. ARMV8_EVENT_ATTR(l2d_tlb_refill, ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL);
  385. ARMV8_EVENT_ATTR(l2i_tlb_refill, ARMV8_PMUV3_PERFCTR_L2I_TLB_REFILL);
  386. ARMV8_EVENT_ATTR(l2d_tlb, ARMV8_PMUV3_PERFCTR_L2D_TLB);
  387. ARMV8_EVENT_ATTR(l2i_tlb, ARMV8_PMUV3_PERFCTR_L2I_TLB);
  388. static struct attribute *armv8_pmuv3_event_attrs[] = {
  389. &armv8_event_attr_sw_incr.attr.attr,
  390. &armv8_event_attr_l1i_cache_refill.attr.attr,
  391. &armv8_event_attr_l1i_tlb_refill.attr.attr,
  392. &armv8_event_attr_l1d_cache_refill.attr.attr,
  393. &armv8_event_attr_l1d_cache.attr.attr,
  394. &armv8_event_attr_l1d_tlb_refill.attr.attr,
  395. &armv8_event_attr_ld_retired.attr.attr,
  396. &armv8_event_attr_st_retired.attr.attr,
  397. &armv8_event_attr_inst_retired.attr.attr,
  398. &armv8_event_attr_exc_taken.attr.attr,
  399. &armv8_event_attr_exc_return.attr.attr,
  400. &armv8_event_attr_cid_write_retired.attr.attr,
  401. &armv8_event_attr_pc_write_retired.attr.attr,
  402. &armv8_event_attr_br_immed_retired.attr.attr,
  403. &armv8_event_attr_br_return_retired.attr.attr,
  404. &armv8_event_attr_unaligned_ldst_retired.attr.attr,
  405. &armv8_event_attr_br_mis_pred.attr.attr,
  406. &armv8_event_attr_cpu_cycles.attr.attr,
  407. &armv8_event_attr_br_pred.attr.attr,
  408. &armv8_event_attr_mem_access.attr.attr,
  409. &armv8_event_attr_l1i_cache.attr.attr,
  410. &armv8_event_attr_l1d_cache_wb.attr.attr,
  411. &armv8_event_attr_l2d_cache.attr.attr,
  412. &armv8_event_attr_l2d_cache_refill.attr.attr,
  413. &armv8_event_attr_l2d_cache_wb.attr.attr,
  414. &armv8_event_attr_bus_access.attr.attr,
  415. &armv8_event_attr_memory_error.attr.attr,
  416. &armv8_event_attr_inst_spec.attr.attr,
  417. &armv8_event_attr_ttbr_write_retired.attr.attr,
  418. &armv8_event_attr_bus_cycles.attr.attr,
  419. &armv8_event_attr_l1d_cache_allocate.attr.attr,
  420. &armv8_event_attr_l2d_cache_allocate.attr.attr,
  421. &armv8_event_attr_br_retired.attr.attr,
  422. &armv8_event_attr_br_mis_pred_retired.attr.attr,
  423. &armv8_event_attr_stall_frontend.attr.attr,
  424. &armv8_event_attr_stall_backend.attr.attr,
  425. &armv8_event_attr_l1d_tlb.attr.attr,
  426. &armv8_event_attr_l1i_tlb.attr.attr,
  427. &armv8_event_attr_l2i_cache.attr.attr,
  428. &armv8_event_attr_l2i_cache_refill.attr.attr,
  429. &armv8_event_attr_l3d_cache_allocate.attr.attr,
  430. &armv8_event_attr_l3d_cache_refill.attr.attr,
  431. &armv8_event_attr_l3d_cache.attr.attr,
  432. &armv8_event_attr_l3d_cache_wb.attr.attr,
  433. &armv8_event_attr_l2d_tlb_refill.attr.attr,
  434. &armv8_event_attr_l2i_tlb_refill.attr.attr,
  435. &armv8_event_attr_l2d_tlb.attr.attr,
  436. &armv8_event_attr_l2i_tlb.attr.attr,
  437. NULL,
  438. };
  439. static umode_t
  440. armv8pmu_event_attr_is_visible(struct kobject *kobj,
  441. struct attribute *attr, int unused)
  442. {
  443. struct device *dev = kobj_to_dev(kobj);
  444. struct pmu *pmu = dev_get_drvdata(dev);
  445. struct arm_pmu *cpu_pmu = container_of(pmu, struct arm_pmu, pmu);
  446. struct perf_pmu_events_attr *pmu_attr;
  447. pmu_attr = container_of(attr, struct perf_pmu_events_attr, attr.attr);
  448. if (test_bit(pmu_attr->id, cpu_pmu->pmceid_bitmap))
  449. return attr->mode;
  450. return 0;
  451. }
  452. static struct attribute_group armv8_pmuv3_events_attr_group = {
  453. .name = "events",
  454. .attrs = armv8_pmuv3_event_attrs,
  455. .is_visible = armv8pmu_event_attr_is_visible,
  456. };
  457. PMU_FORMAT_ATTR(event, "config:0-9");
  458. static struct attribute *armv8_pmuv3_format_attrs[] = {
  459. &format_attr_event.attr,
  460. NULL,
  461. };
  462. static struct attribute_group armv8_pmuv3_format_attr_group = {
  463. .name = "format",
  464. .attrs = armv8_pmuv3_format_attrs,
  465. };
  466. /*
  467. * Perf Events' indices
  468. */
  469. #define ARMV8_IDX_CYCLE_COUNTER 0
  470. #define ARMV8_IDX_COUNTER0 1
  471. #define ARMV8_IDX_COUNTER_LAST(cpu_pmu) \
  472. (ARMV8_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
  473. /*
  474. * ARMv8 low level PMU access
  475. */
  476. /*
  477. * Perf Event to low level counters mapping
  478. */
  479. #define ARMV8_IDX_TO_COUNTER(x) \
  480. (((x) - ARMV8_IDX_COUNTER0) & ARMV8_PMU_COUNTER_MASK)
  481. static inline u32 armv8pmu_pmcr_read(void)
  482. {
  483. return read_sysreg(pmcr_el0);
  484. }
  485. static inline void armv8pmu_pmcr_write(u32 val)
  486. {
  487. val &= ARMV8_PMU_PMCR_MASK;
  488. isb();
  489. write_sysreg(val, pmcr_el0);
  490. }
  491. static inline int armv8pmu_has_overflowed(u32 pmovsr)
  492. {
  493. return pmovsr & ARMV8_PMU_OVERFLOWED_MASK;
  494. }
  495. static inline int armv8pmu_counter_valid(struct arm_pmu *cpu_pmu, int idx)
  496. {
  497. return idx >= ARMV8_IDX_CYCLE_COUNTER &&
  498. idx <= ARMV8_IDX_COUNTER_LAST(cpu_pmu);
  499. }
  500. static inline int armv8pmu_counter_has_overflowed(u32 pmnc, int idx)
  501. {
  502. return pmnc & BIT(ARMV8_IDX_TO_COUNTER(idx));
  503. }
  504. static inline int armv8pmu_select_counter(int idx)
  505. {
  506. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  507. write_sysreg(counter, pmselr_el0);
  508. isb();
  509. return idx;
  510. }
  511. static inline u32 armv8pmu_read_counter(struct perf_event *event)
  512. {
  513. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  514. struct hw_perf_event *hwc = &event->hw;
  515. int idx = hwc->idx;
  516. u32 value = 0;
  517. if (!armv8pmu_counter_valid(cpu_pmu, idx))
  518. pr_err("CPU%u reading wrong counter %d\n",
  519. smp_processor_id(), idx);
  520. else if (idx == ARMV8_IDX_CYCLE_COUNTER)
  521. value = read_sysreg(pmccntr_el0);
  522. else if (armv8pmu_select_counter(idx) == idx)
  523. value = read_sysreg(pmxevcntr_el0);
  524. return value;
  525. }
  526. static inline void armv8pmu_write_counter(struct perf_event *event, u32 value)
  527. {
  528. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  529. struct hw_perf_event *hwc = &event->hw;
  530. int idx = hwc->idx;
  531. if (!armv8pmu_counter_valid(cpu_pmu, idx))
  532. pr_err("CPU%u writing wrong counter %d\n",
  533. smp_processor_id(), idx);
  534. else if (idx == ARMV8_IDX_CYCLE_COUNTER) {
  535. /*
  536. * Set the upper 32bits as this is a 64bit counter but we only
  537. * count using the lower 32bits and we want an interrupt when
  538. * it overflows.
  539. */
  540. u64 value64 = 0xffffffff00000000ULL | value;
  541. write_sysreg(value64, pmccntr_el0);
  542. } else if (armv8pmu_select_counter(idx) == idx)
  543. write_sysreg(value, pmxevcntr_el0);
  544. }
  545. static inline void armv8pmu_write_evtype(int idx, u32 val)
  546. {
  547. if (armv8pmu_select_counter(idx) == idx) {
  548. val &= ARMV8_PMU_EVTYPE_MASK;
  549. write_sysreg(val, pmxevtyper_el0);
  550. }
  551. }
  552. static inline int armv8pmu_enable_counter(int idx)
  553. {
  554. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  555. write_sysreg(BIT(counter), pmcntenset_el0);
  556. return idx;
  557. }
  558. static inline int armv8pmu_disable_counter(int idx)
  559. {
  560. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  561. write_sysreg(BIT(counter), pmcntenclr_el0);
  562. return idx;
  563. }
  564. static inline int armv8pmu_enable_intens(int idx)
  565. {
  566. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  567. write_sysreg(BIT(counter), pmintenset_el1);
  568. return idx;
  569. }
  570. static inline int armv8pmu_disable_intens(int idx)
  571. {
  572. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  573. write_sysreg(BIT(counter), pmintenclr_el1);
  574. isb();
  575. /* Clear the overflow flag in case an interrupt is pending. */
  576. write_sysreg(BIT(counter), pmovsclr_el0);
  577. isb();
  578. return idx;
  579. }
  580. static inline u32 armv8pmu_getreset_flags(void)
  581. {
  582. u32 value;
  583. /* Read */
  584. value = read_sysreg(pmovsclr_el0);
  585. /* Write to clear flags */
  586. value &= ARMV8_PMU_OVSR_MASK;
  587. write_sysreg(value, pmovsclr_el0);
  588. return value;
  589. }
  590. static void armv8pmu_enable_event(struct perf_event *event)
  591. {
  592. unsigned long flags;
  593. struct hw_perf_event *hwc = &event->hw;
  594. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  595. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  596. int idx = hwc->idx;
  597. /*
  598. * Enable counter and interrupt, and set the counter to count
  599. * the event that we're interested in.
  600. */
  601. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  602. /*
  603. * Disable counter
  604. */
  605. armv8pmu_disable_counter(idx);
  606. /*
  607. * Set event (if destined for PMNx counters).
  608. */
  609. armv8pmu_write_evtype(idx, hwc->config_base);
  610. /*
  611. * Enable interrupt for this counter
  612. */
  613. armv8pmu_enable_intens(idx);
  614. /*
  615. * Enable counter
  616. */
  617. armv8pmu_enable_counter(idx);
  618. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  619. }
  620. static void armv8pmu_disable_event(struct perf_event *event)
  621. {
  622. unsigned long flags;
  623. struct hw_perf_event *hwc = &event->hw;
  624. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  625. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  626. int idx = hwc->idx;
  627. /*
  628. * Disable counter and interrupt
  629. */
  630. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  631. /*
  632. * Disable counter
  633. */
  634. armv8pmu_disable_counter(idx);
  635. /*
  636. * Disable interrupt for this counter
  637. */
  638. armv8pmu_disable_intens(idx);
  639. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  640. }
  641. static irqreturn_t armv8pmu_handle_irq(int irq_num, void *dev)
  642. {
  643. u32 pmovsr;
  644. struct perf_sample_data data;
  645. struct arm_pmu *cpu_pmu = (struct arm_pmu *)dev;
  646. struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events);
  647. struct pt_regs *regs;
  648. int idx;
  649. /*
  650. * Get and reset the IRQ flags
  651. */
  652. pmovsr = armv8pmu_getreset_flags();
  653. /*
  654. * Did an overflow occur?
  655. */
  656. if (!armv8pmu_has_overflowed(pmovsr))
  657. return IRQ_NONE;
  658. /*
  659. * Handle the counter(s) overflow(s)
  660. */
  661. regs = get_irq_regs();
  662. for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
  663. struct perf_event *event = cpuc->events[idx];
  664. struct hw_perf_event *hwc;
  665. /* Ignore if we don't have an event. */
  666. if (!event)
  667. continue;
  668. /*
  669. * We have a single interrupt for all counters. Check that
  670. * each counter has overflowed before we process it.
  671. */
  672. if (!armv8pmu_counter_has_overflowed(pmovsr, idx))
  673. continue;
  674. hwc = &event->hw;
  675. armpmu_event_update(event);
  676. perf_sample_data_init(&data, 0, hwc->last_period);
  677. if (!armpmu_event_set_period(event))
  678. continue;
  679. if (perf_event_overflow(event, &data, regs))
  680. cpu_pmu->disable(event);
  681. }
  682. /*
  683. * Handle the pending perf events.
  684. *
  685. * Note: this call *must* be run with interrupts disabled. For
  686. * platforms that can have the PMU interrupts raised as an NMI, this
  687. * will not work.
  688. */
  689. irq_work_run();
  690. return IRQ_HANDLED;
  691. }
  692. static void armv8pmu_start(struct arm_pmu *cpu_pmu)
  693. {
  694. unsigned long flags;
  695. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  696. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  697. /* Enable all counters */
  698. armv8pmu_pmcr_write(armv8pmu_pmcr_read() | ARMV8_PMU_PMCR_E);
  699. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  700. }
  701. static void armv8pmu_stop(struct arm_pmu *cpu_pmu)
  702. {
  703. unsigned long flags;
  704. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  705. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  706. /* Disable all counters */
  707. armv8pmu_pmcr_write(armv8pmu_pmcr_read() & ~ARMV8_PMU_PMCR_E);
  708. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  709. }
  710. static int armv8pmu_get_event_idx(struct pmu_hw_events *cpuc,
  711. struct perf_event *event)
  712. {
  713. int idx;
  714. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  715. struct hw_perf_event *hwc = &event->hw;
  716. unsigned long evtype = hwc->config_base & ARMV8_PMU_EVTYPE_EVENT;
  717. /* Always place a cycle counter into the cycle counter. */
  718. if (evtype == ARMV8_PMUV3_PERFCTR_CPU_CYCLES) {
  719. if (test_and_set_bit(ARMV8_IDX_CYCLE_COUNTER, cpuc->used_mask))
  720. return -EAGAIN;
  721. return ARMV8_IDX_CYCLE_COUNTER;
  722. }
  723. /*
  724. * For anything other than a cycle counter, try and use
  725. * the events counters
  726. */
  727. for (idx = ARMV8_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
  728. if (!test_and_set_bit(idx, cpuc->used_mask))
  729. return idx;
  730. }
  731. /* The counters are all in use. */
  732. return -EAGAIN;
  733. }
  734. /*
  735. * Add an event filter to a given event. This will only work for PMUv2 PMUs.
  736. */
  737. static int armv8pmu_set_event_filter(struct hw_perf_event *event,
  738. struct perf_event_attr *attr)
  739. {
  740. unsigned long config_base = 0;
  741. if (attr->exclude_idle)
  742. return -EPERM;
  743. if (is_kernel_in_hyp_mode() &&
  744. attr->exclude_kernel != attr->exclude_hv)
  745. return -EINVAL;
  746. if (attr->exclude_user)
  747. config_base |= ARMV8_PMU_EXCLUDE_EL0;
  748. if (!is_kernel_in_hyp_mode() && attr->exclude_kernel)
  749. config_base |= ARMV8_PMU_EXCLUDE_EL1;
  750. if (!attr->exclude_hv)
  751. config_base |= ARMV8_PMU_INCLUDE_EL2;
  752. /*
  753. * Install the filter into config_base as this is used to
  754. * construct the event type.
  755. */
  756. event->config_base = config_base;
  757. return 0;
  758. }
  759. static void armv8pmu_reset(void *info)
  760. {
  761. struct arm_pmu *cpu_pmu = (struct arm_pmu *)info;
  762. u32 idx, nb_cnt = cpu_pmu->num_events;
  763. /* The counter and interrupt enable registers are unknown at reset. */
  764. for (idx = ARMV8_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
  765. armv8pmu_disable_counter(idx);
  766. armv8pmu_disable_intens(idx);
  767. }
  768. /*
  769. * Initialize & Reset PMNC. Request overflow interrupt for
  770. * 64 bit cycle counter but cheat in armv8pmu_write_counter().
  771. */
  772. armv8pmu_pmcr_write(ARMV8_PMU_PMCR_P | ARMV8_PMU_PMCR_C |
  773. ARMV8_PMU_PMCR_LC);
  774. }
  775. static int armv8_pmuv3_map_event(struct perf_event *event)
  776. {
  777. int hw_event_id;
  778. struct arm_pmu *armpmu = to_arm_pmu(event->pmu);
  779. hw_event_id = armpmu_map_event(event, &armv8_pmuv3_perf_map,
  780. &armv8_pmuv3_perf_cache_map,
  781. ARMV8_PMU_EVTYPE_EVENT);
  782. if (hw_event_id < 0)
  783. return hw_event_id;
  784. /* disable micro/arch events not supported by this PMU */
  785. if ((hw_event_id < ARMV8_PMUV3_MAX_COMMON_EVENTS) &&
  786. !test_bit(hw_event_id, armpmu->pmceid_bitmap)) {
  787. return -EOPNOTSUPP;
  788. }
  789. return hw_event_id;
  790. }
  791. static int armv8_a53_map_event(struct perf_event *event)
  792. {
  793. return armpmu_map_event(event, &armv8_a53_perf_map,
  794. &armv8_a53_perf_cache_map,
  795. ARMV8_PMU_EVTYPE_EVENT);
  796. }
  797. static int armv8_a57_map_event(struct perf_event *event)
  798. {
  799. return armpmu_map_event(event, &armv8_a57_perf_map,
  800. &armv8_a57_perf_cache_map,
  801. ARMV8_PMU_EVTYPE_EVENT);
  802. }
  803. static int armv8_thunder_map_event(struct perf_event *event)
  804. {
  805. return armpmu_map_event(event, &armv8_thunder_perf_map,
  806. &armv8_thunder_perf_cache_map,
  807. ARMV8_PMU_EVTYPE_EVENT);
  808. }
  809. static int armv8_vulcan_map_event(struct perf_event *event)
  810. {
  811. return armpmu_map_event(event, &armv8_vulcan_perf_map,
  812. &armv8_vulcan_perf_cache_map,
  813. ARMV8_PMU_EVTYPE_EVENT);
  814. }
  815. static void __armv8pmu_probe_pmu(void *info)
  816. {
  817. struct arm_pmu *cpu_pmu = info;
  818. u32 pmceid[2];
  819. /* Read the nb of CNTx counters supported from PMNC */
  820. cpu_pmu->num_events = (armv8pmu_pmcr_read() >> ARMV8_PMU_PMCR_N_SHIFT)
  821. & ARMV8_PMU_PMCR_N_MASK;
  822. /* Add the CPU cycles counter */
  823. cpu_pmu->num_events += 1;
  824. pmceid[0] = read_sysreg(pmceid0_el0);
  825. pmceid[1] = read_sysreg(pmceid1_el0);
  826. bitmap_from_u32array(cpu_pmu->pmceid_bitmap,
  827. ARMV8_PMUV3_MAX_COMMON_EVENTS, pmceid,
  828. ARRAY_SIZE(pmceid));
  829. }
  830. static int armv8pmu_probe_pmu(struct arm_pmu *cpu_pmu)
  831. {
  832. return smp_call_function_any(&cpu_pmu->supported_cpus,
  833. __armv8pmu_probe_pmu,
  834. cpu_pmu, 1);
  835. }
  836. static void armv8_pmu_init(struct arm_pmu *cpu_pmu)
  837. {
  838. cpu_pmu->handle_irq = armv8pmu_handle_irq,
  839. cpu_pmu->enable = armv8pmu_enable_event,
  840. cpu_pmu->disable = armv8pmu_disable_event,
  841. cpu_pmu->read_counter = armv8pmu_read_counter,
  842. cpu_pmu->write_counter = armv8pmu_write_counter,
  843. cpu_pmu->get_event_idx = armv8pmu_get_event_idx,
  844. cpu_pmu->start = armv8pmu_start,
  845. cpu_pmu->stop = armv8pmu_stop,
  846. cpu_pmu->reset = armv8pmu_reset,
  847. cpu_pmu->max_period = (1LLU << 32) - 1,
  848. cpu_pmu->set_event_filter = armv8pmu_set_event_filter;
  849. }
  850. static int armv8_pmuv3_init(struct arm_pmu *cpu_pmu)
  851. {
  852. armv8_pmu_init(cpu_pmu);
  853. cpu_pmu->name = "armv8_pmuv3";
  854. cpu_pmu->map_event = armv8_pmuv3_map_event;
  855. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
  856. &armv8_pmuv3_events_attr_group;
  857. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
  858. &armv8_pmuv3_format_attr_group;
  859. return armv8pmu_probe_pmu(cpu_pmu);
  860. }
  861. static int armv8_a53_pmu_init(struct arm_pmu *cpu_pmu)
  862. {
  863. armv8_pmu_init(cpu_pmu);
  864. cpu_pmu->name = "armv8_cortex_a53";
  865. cpu_pmu->map_event = armv8_a53_map_event;
  866. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
  867. &armv8_pmuv3_events_attr_group;
  868. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
  869. &armv8_pmuv3_format_attr_group;
  870. return armv8pmu_probe_pmu(cpu_pmu);
  871. }
  872. static int armv8_a57_pmu_init(struct arm_pmu *cpu_pmu)
  873. {
  874. armv8_pmu_init(cpu_pmu);
  875. cpu_pmu->name = "armv8_cortex_a57";
  876. cpu_pmu->map_event = armv8_a57_map_event;
  877. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
  878. &armv8_pmuv3_events_attr_group;
  879. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
  880. &armv8_pmuv3_format_attr_group;
  881. return armv8pmu_probe_pmu(cpu_pmu);
  882. }
  883. static int armv8_a72_pmu_init(struct arm_pmu *cpu_pmu)
  884. {
  885. armv8_pmu_init(cpu_pmu);
  886. cpu_pmu->name = "armv8_cortex_a72";
  887. cpu_pmu->map_event = armv8_a57_map_event;
  888. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
  889. &armv8_pmuv3_events_attr_group;
  890. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
  891. &armv8_pmuv3_format_attr_group;
  892. return armv8pmu_probe_pmu(cpu_pmu);
  893. }
  894. static int armv8_thunder_pmu_init(struct arm_pmu *cpu_pmu)
  895. {
  896. armv8_pmu_init(cpu_pmu);
  897. cpu_pmu->name = "armv8_cavium_thunder";
  898. cpu_pmu->map_event = armv8_thunder_map_event;
  899. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
  900. &armv8_pmuv3_events_attr_group;
  901. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
  902. &armv8_pmuv3_format_attr_group;
  903. return armv8pmu_probe_pmu(cpu_pmu);
  904. }
  905. static int armv8_vulcan_pmu_init(struct arm_pmu *cpu_pmu)
  906. {
  907. armv8_pmu_init(cpu_pmu);
  908. cpu_pmu->name = "armv8_brcm_vulcan";
  909. cpu_pmu->map_event = armv8_vulcan_map_event;
  910. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
  911. &armv8_pmuv3_events_attr_group;
  912. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
  913. &armv8_pmuv3_format_attr_group;
  914. return armv8pmu_probe_pmu(cpu_pmu);
  915. }
  916. static const struct of_device_id armv8_pmu_of_device_ids[] = {
  917. {.compatible = "arm,armv8-pmuv3", .data = armv8_pmuv3_init},
  918. {.compatible = "arm,cortex-a53-pmu", .data = armv8_a53_pmu_init},
  919. {.compatible = "arm,cortex-a57-pmu", .data = armv8_a57_pmu_init},
  920. {.compatible = "arm,cortex-a72-pmu", .data = armv8_a72_pmu_init},
  921. {.compatible = "cavium,thunder-pmu", .data = armv8_thunder_pmu_init},
  922. {.compatible = "brcm,vulcan-pmu", .data = armv8_vulcan_pmu_init},
  923. {},
  924. };
  925. /*
  926. * Non DT systems have their micro/arch events probed at run-time.
  927. * A fairly complete list of generic events are provided and ones that
  928. * aren't supported by the current PMU are disabled.
  929. */
  930. static const struct pmu_probe_info armv8_pmu_probe_table[] = {
  931. PMU_PROBE(0, 0, armv8_pmuv3_init), /* enable all defined counters */
  932. { /* sentinel value */ }
  933. };
  934. static int armv8_pmu_device_probe(struct platform_device *pdev)
  935. {
  936. if (acpi_disabled)
  937. return arm_pmu_device_probe(pdev, armv8_pmu_of_device_ids,
  938. NULL);
  939. return arm_pmu_device_probe(pdev, armv8_pmu_of_device_ids,
  940. armv8_pmu_probe_table);
  941. }
  942. static struct platform_driver armv8_pmu_driver = {
  943. .driver = {
  944. .name = ARMV8_PMU_PDEV_NAME,
  945. .of_match_table = armv8_pmu_of_device_ids,
  946. },
  947. .probe = armv8_pmu_device_probe,
  948. };
  949. builtin_platform_driver(armv8_pmu_driver);