asmx64.h 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223
  1. /*
  2. * This file is part of the MicroPython project, http://micropython.org/
  3. *
  4. * The MIT License (MIT)
  5. *
  6. * Copyright (c) 2013, 2014 Damien P. George
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a copy
  9. * of this software and associated documentation files (the "Software"), to deal
  10. * in the Software without restriction, including without limitation the rights
  11. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  12. * copies of the Software, and to permit persons to whom the Software is
  13. * furnished to do so, subject to the following conditions:
  14. *
  15. * The above copyright notice and this permission notice shall be included in
  16. * all copies or substantial portions of the Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  19. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  20. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  21. * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  22. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  23. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  24. * THE SOFTWARE.
  25. */
  26. #ifndef MICROPY_INCLUDED_PY_ASMX64_H
  27. #define MICROPY_INCLUDED_PY_ASMX64_H
  28. #include "py/mpconfig.h"
  29. #include "py/misc.h"
  30. #include "py/asmbase.h"
  31. // AMD64 calling convention is:
  32. // - args pass in: RDI, RSI, RDX, RCX, R08, R09
  33. // - return value in RAX
  34. // - stack must be aligned on a 16-byte boundary before all calls
  35. // - RAX, RCX, RDX, RSI, RDI, R08, R09, R10, R11 are caller-save
  36. // - RBX, RBP, R12, R13, R14, R15 are callee-save
  37. // In the functions below, argument order follows x86 docs and generally
  38. // the destination is the first argument.
  39. // NOTE: this is a change from the old convention used in this file and
  40. // some functions still use the old (reverse) convention.
  41. #define ASM_X64_REG_RAX (0)
  42. #define ASM_X64_REG_RCX (1)
  43. #define ASM_X64_REG_RDX (2)
  44. #define ASM_X64_REG_RBX (3)
  45. #define ASM_X64_REG_RSP (4)
  46. #define ASM_X64_REG_RBP (5)
  47. #define ASM_X64_REG_RSI (6)
  48. #define ASM_X64_REG_RDI (7)
  49. #define ASM_X64_REG_R08 (8)
  50. #define ASM_X64_REG_R09 (9)
  51. #define ASM_X64_REG_R10 (10)
  52. #define ASM_X64_REG_R11 (11)
  53. #define ASM_X64_REG_R12 (12)
  54. #define ASM_X64_REG_R13 (13)
  55. #define ASM_X64_REG_R14 (14)
  56. #define ASM_X64_REG_R15 (15)
  57. // condition codes, used for jcc and setcc (despite their j-name!)
  58. #define ASM_X64_CC_JB (0x2) // below, unsigned
  59. #define ASM_X64_CC_JAE (0x3) // above or equal, unsigned
  60. #define ASM_X64_CC_JZ (0x4)
  61. #define ASM_X64_CC_JE (0x4)
  62. #define ASM_X64_CC_JNZ (0x5)
  63. #define ASM_X64_CC_JNE (0x5)
  64. #define ASM_X64_CC_JBE (0x6) // below or equal, unsigned
  65. #define ASM_X64_CC_JA (0x7) // above, unsigned
  66. #define ASM_X64_CC_JL (0xc) // less, signed
  67. #define ASM_X64_CC_JGE (0xd) // greater or equal, signed
  68. #define ASM_X64_CC_JLE (0xe) // less or equal, signed
  69. #define ASM_X64_CC_JG (0xf) // greater, signed
  70. typedef struct _asm_x64_t {
  71. mp_asm_base_t base;
  72. int num_locals;
  73. } asm_x64_t;
  74. static inline void asm_x64_end_pass(asm_x64_t *as) {
  75. (void)as;
  76. }
  77. void asm_x64_nop(asm_x64_t *as);
  78. void asm_x64_push_r64(asm_x64_t *as, int src_r64);
  79. void asm_x64_pop_r64(asm_x64_t *as, int dest_r64);
  80. void asm_x64_mov_r64_r64(asm_x64_t *as, int dest_r64, int src_r64);
  81. size_t asm_x64_mov_i32_to_r64(asm_x64_t *as, int src_i32, int dest_r64);
  82. void asm_x64_mov_i64_to_r64(asm_x64_t *as, int64_t src_i64, int dest_r64);
  83. void asm_x64_mov_i64_to_r64_optimised(asm_x64_t *as, int64_t src_i64, int dest_r64);
  84. void asm_x64_mov_r8_to_mem8(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp);
  85. void asm_x64_mov_r16_to_mem16(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp);
  86. void asm_x64_mov_r32_to_mem32(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp);
  87. void asm_x64_mov_r64_to_mem64(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp);
  88. void asm_x64_mov_mem8_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64);
  89. void asm_x64_mov_mem16_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64);
  90. void asm_x64_mov_mem32_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64);
  91. void asm_x64_mov_mem64_to_r64(asm_x64_t *as, int src_r64, int src_disp, int dest_r64);
  92. void asm_x64_not_r64(asm_x64_t *as, int dest_r64);
  93. void asm_x64_neg_r64(asm_x64_t *as, int dest_r64);
  94. void asm_x64_and_r64_r64(asm_x64_t *as, int dest_r64, int src_r64);
  95. void asm_x64_or_r64_r64(asm_x64_t *as, int dest_r64, int src_r64);
  96. void asm_x64_xor_r64_r64(asm_x64_t *as, int dest_r64, int src_r64);
  97. void asm_x64_shl_r64_cl(asm_x64_t *as, int dest_r64);
  98. void asm_x64_shr_r64_cl(asm_x64_t *as, int dest_r64);
  99. void asm_x64_sar_r64_cl(asm_x64_t *as, int dest_r64);
  100. void asm_x64_add_r64_r64(asm_x64_t *as, int dest_r64, int src_r64);
  101. void asm_x64_sub_r64_r64(asm_x64_t *as, int dest_r64, int src_r64);
  102. void asm_x64_mul_r64_r64(asm_x64_t *as, int dest_r64, int src_r64);
  103. void asm_x64_cmp_r64_with_r64(asm_x64_t *as, int src_r64_a, int src_r64_b);
  104. void asm_x64_test_r8_with_r8(asm_x64_t *as, int src_r64_a, int src_r64_b);
  105. void asm_x64_test_r64_with_r64(asm_x64_t *as, int src_r64_a, int src_r64_b);
  106. void asm_x64_setcc_r8(asm_x64_t *as, int jcc_type, int dest_r8);
  107. void asm_x64_jmp_reg(asm_x64_t *as, int src_r64);
  108. void asm_x64_jmp_label(asm_x64_t *as, mp_uint_t label);
  109. void asm_x64_jcc_label(asm_x64_t *as, int jcc_type, mp_uint_t label);
  110. void asm_x64_entry(asm_x64_t *as, int num_locals);
  111. void asm_x64_exit(asm_x64_t *as);
  112. void asm_x64_mov_local_to_r64(asm_x64_t *as, int src_local_num, int dest_r64);
  113. void asm_x64_mov_r64_to_local(asm_x64_t *as, int src_r64, int dest_local_num);
  114. void asm_x64_mov_local_addr_to_r64(asm_x64_t *as, int local_num, int dest_r64);
  115. void asm_x64_mov_reg_pcrel(asm_x64_t *as, int dest_r64, mp_uint_t label);
  116. void asm_x64_call_ind(asm_x64_t *as, size_t fun_id, int temp_r32);
  117. // Holds a pointer to mp_fun_table
  118. #define ASM_X64_REG_FUN_TABLE ASM_X64_REG_RBP
  119. #if GENERIC_ASM_API
  120. // The following macros provide a (mostly) arch-independent API to
  121. // generate native code, and are used by the native emitter.
  122. #define ASM_WORD_SIZE (8)
  123. #define REG_RET ASM_X64_REG_RAX
  124. #define REG_ARG_1 ASM_X64_REG_RDI
  125. #define REG_ARG_2 ASM_X64_REG_RSI
  126. #define REG_ARG_3 ASM_X64_REG_RDX
  127. #define REG_ARG_4 ASM_X64_REG_RCX
  128. #define REG_ARG_5 ASM_X64_REG_R08
  129. // caller-save
  130. #define REG_TEMP0 ASM_X64_REG_RAX
  131. #define REG_TEMP1 ASM_X64_REG_RDI
  132. #define REG_TEMP2 ASM_X64_REG_RSI
  133. // callee-save
  134. #define REG_LOCAL_1 ASM_X64_REG_RBX
  135. #define REG_LOCAL_2 ASM_X64_REG_R12
  136. #define REG_LOCAL_3 ASM_X64_REG_R13
  137. #define REG_LOCAL_NUM (3)
  138. // Holds a pointer to mp_fun_table
  139. #define REG_FUN_TABLE ASM_X64_REG_FUN_TABLE
  140. #define ASM_T asm_x64_t
  141. #define ASM_END_PASS asm_x64_end_pass
  142. #define ASM_ENTRY asm_x64_entry
  143. #define ASM_EXIT asm_x64_exit
  144. #define ASM_JUMP asm_x64_jmp_label
  145. #define ASM_JUMP_IF_REG_ZERO(as, reg, label, bool_test) \
  146. do { \
  147. if (bool_test) { \
  148. asm_x64_test_r8_with_r8((as), (reg), (reg)); \
  149. } else { \
  150. asm_x64_test_r64_with_r64((as), (reg), (reg)); \
  151. } \
  152. asm_x64_jcc_label(as, ASM_X64_CC_JZ, label); \
  153. } while (0)
  154. #define ASM_JUMP_IF_REG_NONZERO(as, reg, label, bool_test) \
  155. do { \
  156. if (bool_test) { \
  157. asm_x64_test_r8_with_r8((as), (reg), (reg)); \
  158. } else { \
  159. asm_x64_test_r64_with_r64((as), (reg), (reg)); \
  160. } \
  161. asm_x64_jcc_label(as, ASM_X64_CC_JNZ, label); \
  162. } while (0)
  163. #define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
  164. do { \
  165. asm_x64_cmp_r64_with_r64(as, reg1, reg2); \
  166. asm_x64_jcc_label(as, ASM_X64_CC_JE, label); \
  167. } while (0)
  168. #define ASM_JUMP_REG(as, reg) asm_x64_jmp_reg((as), (reg))
  169. #define ASM_CALL_IND(as, idx) asm_x64_call_ind(as, idx, ASM_X64_REG_RAX)
  170. #define ASM_MOV_LOCAL_REG(as, local_num, reg_src) asm_x64_mov_r64_to_local((as), (reg_src), (local_num))
  171. #define ASM_MOV_REG_IMM(as, reg_dest, imm) asm_x64_mov_i64_to_r64_optimised((as), (imm), (reg_dest))
  172. #define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_x64_mov_local_to_r64((as), (local_num), (reg_dest))
  173. #define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x64_mov_r64_r64((as), (reg_dest), (reg_src))
  174. #define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_x64_mov_local_addr_to_r64((as), (local_num), (reg_dest))
  175. #define ASM_MOV_REG_PCREL(as, reg_dest, label) asm_x64_mov_reg_pcrel((as), (reg_dest), (label))
  176. #define ASM_NOT_REG(as, reg) asm_x64_not_r64((as), (reg))
  177. #define ASM_NEG_REG(as, reg) asm_x64_neg_r64((as), (reg))
  178. #define ASM_LSL_REG(as, reg) asm_x64_shl_r64_cl((as), (reg))
  179. #define ASM_LSR_REG(as, reg) asm_x64_shr_r64_cl((as), (reg))
  180. #define ASM_ASR_REG(as, reg) asm_x64_sar_r64_cl((as), (reg))
  181. #define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_x64_or_r64_r64((as), (reg_dest), (reg_src))
  182. #define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_x64_xor_r64_r64((as), (reg_dest), (reg_src))
  183. #define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_x64_and_r64_r64((as), (reg_dest), (reg_src))
  184. #define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_x64_add_r64_r64((as), (reg_dest), (reg_src))
  185. #define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_x64_sub_r64_r64((as), (reg_dest), (reg_src))
  186. #define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_x64_mul_r64_r64((as), (reg_dest), (reg_src))
  187. #define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem64_to_r64((as), (reg_base), 0, (reg_dest))
  188. #define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_x64_mov_mem64_to_r64((as), (reg_base), 8 * (word_offset), (reg_dest))
  189. #define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem8_to_r64zx((as), (reg_base), 0, (reg_dest))
  190. #define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem16_to_r64zx((as), (reg_base), 0, (reg_dest))
  191. #define ASM_LOAD16_REG_REG_OFFSET(as, reg_dest, reg_base, uint16_offset) asm_x64_mov_mem16_to_r64zx((as), (reg_base), 2 * (uint16_offset), (reg_dest))
  192. #define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem32_to_r64zx((as), (reg_base), 0, (reg_dest))
  193. #define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_x64_mov_r64_to_mem64((as), (reg_src), (reg_base), 0)
  194. #define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_x64_mov_r64_to_mem64((as), (reg_src), (reg_base), 8 * (word_offset))
  195. #define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_x64_mov_r8_to_mem8((as), (reg_src), (reg_base), 0)
  196. #define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_x64_mov_r16_to_mem16((as), (reg_src), (reg_base), 0)
  197. #define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_x64_mov_r32_to_mem32((as), (reg_src), (reg_base), 0)
  198. #endif // GENERIC_ASM_API
  199. #endif // MICROPY_INCLUDED_PY_ASMX64_H