trampoline-armv4.S 6.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376
  1. // This file is generated from a similarly-named Perl script in the BoringSSL
  2. // source tree. Do not edit by hand.
  3. #if !defined(__has_feature)
  4. #define __has_feature(x) 0
  5. #endif
  6. #if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM)
  7. #define OPENSSL_NO_ASM
  8. #endif
  9. #if !defined(OPENSSL_NO_ASM)
  10. #if defined(BORINGSSL_PREFIX)
  11. #include <boringssl_prefix_symbols_asm.h>
  12. #endif
  13. .syntax unified
  14. .text
  15. @ abi_test_trampoline loads callee-saved registers from |state|, calls |func|
  16. @ with |argv|, then saves the callee-saved registers into |state|. It returns
  17. @ the result of |func|. The |unwind| argument is unused.
  18. @ uint32_t abi_test_trampoline(void (*func)(...), CallerState *state,
  19. @ const uint32_t *argv, size_t argc,
  20. @ int unwind);
  21. .globl _abi_test_trampoline
  22. .private_extern _abi_test_trampoline
  23. .align 4
  24. _abi_test_trampoline:
  25. @ Save parameters and all callee-saved registers. For convenience, we
  26. @ save r9 on iOS even though it's volatile.
  27. vstmdb sp!, {d8,d9,d10,d11,d12,d13,d14,d15}
  28. stmdb sp!, {r0,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11,lr}
  29. @ Reserve stack space for six (10-4) stack parameters, plus an extra 4
  30. @ bytes to keep it 8-byte-aligned (see AAPCS, section 5.3).
  31. sub sp, sp, #28
  32. @ Every register in AAPCS is either non-volatile or a parameter (except
  33. @ r9 on iOS), so this code, by the actual call, loses all its scratch
  34. @ registers. First fill in stack parameters while there are registers
  35. @ to spare.
  36. cmp r3, #4
  37. bls Lstack_args_done
  38. mov r4, sp @ r4 is the output pointer.
  39. add r5, r2, r3, lsl #2 @ Set r5 to the end of argv.
  40. add r2, r2, #16 @ Skip four arguments.
  41. Lstack_args_loop:
  42. ldr r6, [r2], #4
  43. cmp r2, r5
  44. str r6, [r4], #4
  45. bne Lstack_args_loop
  46. Lstack_args_done:
  47. @ Load registers from |r1|.
  48. vldmia r1!, {d8,d9,d10,d11,d12,d13,d14,d15}
  49. #if defined(__APPLE__)
  50. @ r9 is not volatile on iOS.
  51. ldmia r1!, {r4,r5,r6,r7,r8,r10-r11}
  52. #else
  53. ldmia r1!, {r4,r5,r6,r7,r8,r9,r10,r11}
  54. #endif
  55. @ Load register parameters. This uses up our remaining registers, so we
  56. @ repurpose lr as scratch space.
  57. ldr r3, [sp, #40] @ Reload argc.
  58. ldr lr, [sp, #36] @ Load argv into lr.
  59. cmp r3, #3
  60. bhi Larg_r3
  61. beq Larg_r2
  62. cmp r3, #1
  63. bhi Larg_r1
  64. beq Larg_r0
  65. b Largs_done
  66. Larg_r3:
  67. ldr r3, [lr, #12] @ argv[3]
  68. Larg_r2:
  69. ldr r2, [lr, #8] @ argv[2]
  70. Larg_r1:
  71. ldr r1, [lr, #4] @ argv[1]
  72. Larg_r0:
  73. ldr r0, [lr] @ argv[0]
  74. Largs_done:
  75. @ With every other register in use, load the function pointer into lr
  76. @ and call the function.
  77. ldr lr, [sp, #28]
  78. blx lr
  79. @ r1-r3 are free for use again. The trampoline only supports
  80. @ single-return functions. Pass r4-r11 to the caller.
  81. ldr r1, [sp, #32]
  82. vstmia r1!, {d8,d9,d10,d11,d12,d13,d14,d15}
  83. #if defined(__APPLE__)
  84. @ r9 is not volatile on iOS.
  85. stmia r1!, {r4,r5,r6,r7,r8,r10-r11}
  86. #else
  87. stmia r1!, {r4,r5,r6,r7,r8,r9,r10,r11}
  88. #endif
  89. @ Unwind the stack and restore registers.
  90. add sp, sp, #44 @ 44 = 28+16
  91. ldmia sp!, {r4,r5,r6,r7,r8,r9,r10,r11,lr} @ Skip r0-r3 (see +16 above).
  92. vldmia sp!, {d8,d9,d10,d11,d12,d13,d14,d15}
  93. bx lr
  94. .globl _abi_test_clobber_r0
  95. .private_extern _abi_test_clobber_r0
  96. .align 4
  97. _abi_test_clobber_r0:
  98. mov r0, #0
  99. bx lr
  100. .globl _abi_test_clobber_r1
  101. .private_extern _abi_test_clobber_r1
  102. .align 4
  103. _abi_test_clobber_r1:
  104. mov r1, #0
  105. bx lr
  106. .globl _abi_test_clobber_r2
  107. .private_extern _abi_test_clobber_r2
  108. .align 4
  109. _abi_test_clobber_r2:
  110. mov r2, #0
  111. bx lr
  112. .globl _abi_test_clobber_r3
  113. .private_extern _abi_test_clobber_r3
  114. .align 4
  115. _abi_test_clobber_r3:
  116. mov r3, #0
  117. bx lr
  118. .globl _abi_test_clobber_r4
  119. .private_extern _abi_test_clobber_r4
  120. .align 4
  121. _abi_test_clobber_r4:
  122. mov r4, #0
  123. bx lr
  124. .globl _abi_test_clobber_r5
  125. .private_extern _abi_test_clobber_r5
  126. .align 4
  127. _abi_test_clobber_r5:
  128. mov r5, #0
  129. bx lr
  130. .globl _abi_test_clobber_r6
  131. .private_extern _abi_test_clobber_r6
  132. .align 4
  133. _abi_test_clobber_r6:
  134. mov r6, #0
  135. bx lr
  136. .globl _abi_test_clobber_r7
  137. .private_extern _abi_test_clobber_r7
  138. .align 4
  139. _abi_test_clobber_r7:
  140. mov r7, #0
  141. bx lr
  142. .globl _abi_test_clobber_r8
  143. .private_extern _abi_test_clobber_r8
  144. .align 4
  145. _abi_test_clobber_r8:
  146. mov r8, #0
  147. bx lr
  148. .globl _abi_test_clobber_r9
  149. .private_extern _abi_test_clobber_r9
  150. .align 4
  151. _abi_test_clobber_r9:
  152. mov r9, #0
  153. bx lr
  154. .globl _abi_test_clobber_r10
  155. .private_extern _abi_test_clobber_r10
  156. .align 4
  157. _abi_test_clobber_r10:
  158. mov r10, #0
  159. bx lr
  160. .globl _abi_test_clobber_r11
  161. .private_extern _abi_test_clobber_r11
  162. .align 4
  163. _abi_test_clobber_r11:
  164. mov r11, #0
  165. bx lr
  166. .globl _abi_test_clobber_r12
  167. .private_extern _abi_test_clobber_r12
  168. .align 4
  169. _abi_test_clobber_r12:
  170. mov r12, #0
  171. bx lr
  172. .globl _abi_test_clobber_d0
  173. .private_extern _abi_test_clobber_d0
  174. .align 4
  175. _abi_test_clobber_d0:
  176. mov r0, #0
  177. vmov s0, r0
  178. vmov s1, r0
  179. bx lr
  180. .globl _abi_test_clobber_d1
  181. .private_extern _abi_test_clobber_d1
  182. .align 4
  183. _abi_test_clobber_d1:
  184. mov r0, #0
  185. vmov s2, r0
  186. vmov s3, r0
  187. bx lr
  188. .globl _abi_test_clobber_d2
  189. .private_extern _abi_test_clobber_d2
  190. .align 4
  191. _abi_test_clobber_d2:
  192. mov r0, #0
  193. vmov s4, r0
  194. vmov s5, r0
  195. bx lr
  196. .globl _abi_test_clobber_d3
  197. .private_extern _abi_test_clobber_d3
  198. .align 4
  199. _abi_test_clobber_d3:
  200. mov r0, #0
  201. vmov s6, r0
  202. vmov s7, r0
  203. bx lr
  204. .globl _abi_test_clobber_d4
  205. .private_extern _abi_test_clobber_d4
  206. .align 4
  207. _abi_test_clobber_d4:
  208. mov r0, #0
  209. vmov s8, r0
  210. vmov s9, r0
  211. bx lr
  212. .globl _abi_test_clobber_d5
  213. .private_extern _abi_test_clobber_d5
  214. .align 4
  215. _abi_test_clobber_d5:
  216. mov r0, #0
  217. vmov s10, r0
  218. vmov s11, r0
  219. bx lr
  220. .globl _abi_test_clobber_d6
  221. .private_extern _abi_test_clobber_d6
  222. .align 4
  223. _abi_test_clobber_d6:
  224. mov r0, #0
  225. vmov s12, r0
  226. vmov s13, r0
  227. bx lr
  228. .globl _abi_test_clobber_d7
  229. .private_extern _abi_test_clobber_d7
  230. .align 4
  231. _abi_test_clobber_d7:
  232. mov r0, #0
  233. vmov s14, r0
  234. vmov s15, r0
  235. bx lr
  236. .globl _abi_test_clobber_d8
  237. .private_extern _abi_test_clobber_d8
  238. .align 4
  239. _abi_test_clobber_d8:
  240. mov r0, #0
  241. vmov s16, r0
  242. vmov s17, r0
  243. bx lr
  244. .globl _abi_test_clobber_d9
  245. .private_extern _abi_test_clobber_d9
  246. .align 4
  247. _abi_test_clobber_d9:
  248. mov r0, #0
  249. vmov s18, r0
  250. vmov s19, r0
  251. bx lr
  252. .globl _abi_test_clobber_d10
  253. .private_extern _abi_test_clobber_d10
  254. .align 4
  255. _abi_test_clobber_d10:
  256. mov r0, #0
  257. vmov s20, r0
  258. vmov s21, r0
  259. bx lr
  260. .globl _abi_test_clobber_d11
  261. .private_extern _abi_test_clobber_d11
  262. .align 4
  263. _abi_test_clobber_d11:
  264. mov r0, #0
  265. vmov s22, r0
  266. vmov s23, r0
  267. bx lr
  268. .globl _abi_test_clobber_d12
  269. .private_extern _abi_test_clobber_d12
  270. .align 4
  271. _abi_test_clobber_d12:
  272. mov r0, #0
  273. vmov s24, r0
  274. vmov s25, r0
  275. bx lr
  276. .globl _abi_test_clobber_d13
  277. .private_extern _abi_test_clobber_d13
  278. .align 4
  279. _abi_test_clobber_d13:
  280. mov r0, #0
  281. vmov s26, r0
  282. vmov s27, r0
  283. bx lr
  284. .globl _abi_test_clobber_d14
  285. .private_extern _abi_test_clobber_d14
  286. .align 4
  287. _abi_test_clobber_d14:
  288. mov r0, #0
  289. vmov s28, r0
  290. vmov s29, r0
  291. bx lr
  292. .globl _abi_test_clobber_d15
  293. .private_extern _abi_test_clobber_d15
  294. .align 4
  295. _abi_test_clobber_d15:
  296. mov r0, #0
  297. vmov s30, r0
  298. vmov s31, r0
  299. bx lr
  300. #endif // !OPENSSL_NO_ASM