ints_low.S 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179
  1. /*
  2. * Copyright (C) 2013-2015 Synopsys, Inc. All rights reserved.
  3. *
  4. * SPDX-License-Identifier: GPL-2.0+
  5. */
  6. #include <linux/linkage.h>
  7. /*
  8. * Note on the LD/ST addressing modes with address register write-back
  9. *
  10. * LD.a same as LD.aw
  11. *
  12. * LD.a reg1, [reg2, x] => Pre Incr
  13. * Eff Addr for load = [reg2 + x]
  14. *
  15. * LD.ab reg1, [reg2, x] => Post Incr
  16. * Eff Addr for load = [reg2]
  17. */
  18. .macro PUSH reg
  19. st.a \reg, [%sp, -4]
  20. .endm
  21. .macro PUSHAX aux
  22. lr %r9, [\aux]
  23. PUSH %r9
  24. .endm
  25. .macro SAVE_R1_TO_R24
  26. PUSH %r1
  27. PUSH %r2
  28. PUSH %r3
  29. PUSH %r4
  30. PUSH %r5
  31. PUSH %r6
  32. PUSH %r7
  33. PUSH %r8
  34. PUSH %r9
  35. PUSH %r10
  36. PUSH %r11
  37. PUSH %r12
  38. PUSH %r13
  39. PUSH %r14
  40. PUSH %r15
  41. PUSH %r16
  42. PUSH %r17
  43. PUSH %r18
  44. PUSH %r19
  45. PUSH %r20
  46. PUSH %r21
  47. PUSH %r22
  48. PUSH %r23
  49. PUSH %r24
  50. .endm
  51. .macro SAVE_ALL_SYS
  52. /* saving %r0 to reg->r0 in advance since we read %ecr into it */
  53. st %r0, [%sp, -8]
  54. lr %r0, [%ecr] /* all stack addressing is manual so far */
  55. st %r0, [%sp]
  56. st %sp, [%sp, -4]
  57. /* now move %sp to reg->r0 position so we can do "push" automatically */
  58. sub %sp, %sp, 8
  59. SAVE_R1_TO_R24
  60. PUSH %r25
  61. PUSH %gp
  62. PUSH %fp
  63. PUSH %blink
  64. PUSHAX %eret
  65. PUSHAX %erstatus
  66. PUSH %lp_count
  67. PUSHAX %lp_end
  68. PUSHAX %lp_start
  69. PUSHAX %erbta
  70. .endm
  71. .macro SAVE_EXCEPTION_SOURCE
  72. #ifdef CONFIG_MMU
  73. /* If MMU exists exception faulting address is loaded in EFA reg */
  74. lr %r0, [%efa]
  75. #else
  76. /* Otherwise in ERET (exception return) reg */
  77. lr %r0, [%eret]
  78. #endif
  79. .endm
  80. ENTRY(memory_error)
  81. SAVE_ALL_SYS
  82. SAVE_EXCEPTION_SOURCE
  83. mov %r1, %sp
  84. j do_memory_error
  85. ENDPROC(memory_error)
  86. ENTRY(instruction_error)
  87. SAVE_ALL_SYS
  88. SAVE_EXCEPTION_SOURCE
  89. mov %r1, %sp
  90. j do_instruction_error
  91. ENDPROC(instruction_error)
  92. ENTRY(interrupt_handler)
  93. /* Todo - save and restore CPU context when interrupts will be in use */
  94. bl do_interrupt_handler
  95. rtie
  96. ENDPROC(interrupt_handler)
  97. ENTRY(EV_MachineCheck)
  98. SAVE_ALL_SYS
  99. SAVE_EXCEPTION_SOURCE
  100. mov %r1, %sp
  101. j do_machine_check_fault
  102. ENDPROC(EV_MachineCheck)
  103. ENTRY(EV_TLBMissI)
  104. SAVE_ALL_SYS
  105. mov %r0, %sp
  106. j do_itlb_miss
  107. ENDPROC(EV_TLBMissI)
  108. ENTRY(EV_TLBMissD)
  109. SAVE_ALL_SYS
  110. mov %r0, %sp
  111. j do_dtlb_miss
  112. ENDPROC(EV_TLBMissD)
  113. ENTRY(EV_TLBProtV)
  114. SAVE_ALL_SYS
  115. SAVE_EXCEPTION_SOURCE
  116. mov %r1, %sp
  117. j do_tlb_prot_violation
  118. ENDPROC(EV_TLBProtV)
  119. ENTRY(EV_PrivilegeV)
  120. SAVE_ALL_SYS
  121. mov %r0, %sp
  122. j do_privilege_violation
  123. ENDPROC(EV_PrivilegeV)
  124. ENTRY(EV_Trap)
  125. SAVE_ALL_SYS
  126. mov %r0, %sp
  127. j do_trap
  128. ENDPROC(EV_Trap)
  129. ENTRY(EV_Extension)
  130. SAVE_ALL_SYS
  131. mov %r0, %sp
  132. j do_extension
  133. ENDPROC(EV_Extension)
  134. #ifdef CONFIG_ISA_ARCV2
  135. ENTRY(EV_SWI)
  136. SAVE_ALL_SYS
  137. mov %r0, %sp
  138. j do_swi
  139. ENDPROC(EV_SWI)
  140. ENTRY(EV_DivZero)
  141. SAVE_ALL_SYS
  142. SAVE_EXCEPTION_SOURCE
  143. mov %r1, %sp
  144. j do_divzero
  145. ENDPROC(EV_DivZero)
  146. ENTRY(EV_DCError)
  147. SAVE_ALL_SYS
  148. mov %r0, %sp
  149. j do_dcerror
  150. ENDPROC(EV_DCError)
  151. ENTRY(EV_Maligned)
  152. SAVE_ALL_SYS
  153. SAVE_EXCEPTION_SOURCE
  154. mov %r1, %sp
  155. j do_maligned
  156. ENDPROC(EV_Maligned)
  157. #endif