sljitNativeX86_64.c 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747
  1. /*
  2. * Stack-less Just-In-Time compiler
  3. *
  4. * Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
  5. *
  6. * Redistribution and use in source and binary forms, with or without modification, are
  7. * permitted provided that the following conditions are met:
  8. *
  9. * 1. Redistributions of source code must retain the above copyright notice, this list of
  10. * conditions and the following disclaimer.
  11. *
  12. * 2. Redistributions in binary form must reproduce the above copyright notice, this list
  13. * of conditions and the following disclaimer in the documentation and/or other materials
  14. * provided with the distribution.
  15. *
  16. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
  17. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
  18. * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
  19. * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
  20. * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
  21. * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
  22. * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
  23. * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
  24. * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  25. */
  26. /* x86 64-bit arch dependent functions. */
  27. static sljit_si emit_load_imm64(struct sljit_compiler *compiler, sljit_si reg, sljit_sw imm)
  28. {
  29. sljit_ub *inst;
  30. inst = (sljit_ub*)ensure_buf(compiler, 1 + 2 + sizeof(sljit_sw));
  31. FAIL_IF(!inst);
  32. INC_SIZE(2 + sizeof(sljit_sw));
  33. *inst++ = REX_W | ((reg_map[reg] <= 7) ? 0 : REX_B);
  34. *inst++ = MOV_r_i32 + (reg_map[reg] & 0x7);
  35. *(sljit_sw*)inst = imm;
  36. return SLJIT_SUCCESS;
  37. }
  38. static sljit_ub* generate_far_jump_code(struct sljit_jump *jump, sljit_ub *code_ptr, sljit_si type)
  39. {
  40. if (type < SLJIT_JUMP) {
  41. /* Invert type. */
  42. *code_ptr++ = get_jump_code(type ^ 0x1) - 0x10;
  43. *code_ptr++ = 10 + 3;
  44. }
  45. SLJIT_COMPILE_ASSERT(reg_map[TMP_REG3] == 9, tmp3_is_9_first);
  46. *code_ptr++ = REX_W | REX_B;
  47. *code_ptr++ = MOV_r_i32 + 1;
  48. jump->addr = (sljit_uw)code_ptr;
  49. if (jump->flags & JUMP_LABEL)
  50. jump->flags |= PATCH_MD;
  51. else
  52. *(sljit_sw*)code_ptr = jump->u.target;
  53. code_ptr += sizeof(sljit_sw);
  54. *code_ptr++ = REX_B;
  55. *code_ptr++ = GROUP_FF;
  56. *code_ptr++ = (type >= SLJIT_FAST_CALL) ? (MOD_REG | CALL_rm | 1) : (MOD_REG | JMP_rm | 1);
  57. return code_ptr;
  58. }
  59. static sljit_ub* generate_fixed_jump(sljit_ub *code_ptr, sljit_sw addr, sljit_si type)
  60. {
  61. sljit_sw delta = addr - ((sljit_sw)code_ptr + 1 + sizeof(sljit_si));
  62. if (delta <= HALFWORD_MAX && delta >= HALFWORD_MIN) {
  63. *code_ptr++ = (type == 2) ? CALL_i32 : JMP_i32;
  64. *(sljit_sw*)code_ptr = delta;
  65. }
  66. else {
  67. SLJIT_COMPILE_ASSERT(reg_map[TMP_REG3] == 9, tmp3_is_9_second);
  68. *code_ptr++ = REX_W | REX_B;
  69. *code_ptr++ = MOV_r_i32 + 1;
  70. *(sljit_sw*)code_ptr = addr;
  71. code_ptr += sizeof(sljit_sw);
  72. *code_ptr++ = REX_B;
  73. *code_ptr++ = GROUP_FF;
  74. *code_ptr++ = (type == 2) ? (MOD_REG | CALL_rm | 1) : (MOD_REG | JMP_rm | 1);
  75. }
  76. return code_ptr;
  77. }
  78. SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler,
  79. sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
  80. sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
  81. {
  82. sljit_si i, tmp, size, saved_register_size;
  83. sljit_ub *inst;
  84. CHECK_ERROR();
  85. CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
  86. set_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size);
  87. compiler->flags_saved = 0;
  88. /* Including the return address saved by the call instruction. */
  89. saved_register_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds, 1);
  90. tmp = saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - saveds) : SLJIT_FIRST_SAVED_REG;
  91. for (i = SLJIT_S0; i >= tmp; i--) {
  92. size = reg_map[i] >= 8 ? 2 : 1;
  93. inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
  94. FAIL_IF(!inst);
  95. INC_SIZE(size);
  96. if (reg_map[i] >= 8)
  97. *inst++ = REX_B;
  98. PUSH_REG(reg_lmap[i]);
  99. }
  100. for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
  101. size = reg_map[i] >= 8 ? 2 : 1;
  102. inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
  103. FAIL_IF(!inst);
  104. INC_SIZE(size);
  105. if (reg_map[i] >= 8)
  106. *inst++ = REX_B;
  107. PUSH_REG(reg_lmap[i]);
  108. }
  109. if (args > 0) {
  110. size = args * 3;
  111. inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
  112. FAIL_IF(!inst);
  113. INC_SIZE(size);
  114. #ifndef _WIN64
  115. if (args > 0) {
  116. *inst++ = REX_W;
  117. *inst++ = MOV_r_rm;
  118. *inst++ = MOD_REG | (reg_map[SLJIT_S0] << 3) | 0x7 /* rdi */;
  119. }
  120. if (args > 1) {
  121. *inst++ = REX_W | REX_R;
  122. *inst++ = MOV_r_rm;
  123. *inst++ = MOD_REG | (reg_lmap[SLJIT_S1] << 3) | 0x6 /* rsi */;
  124. }
  125. if (args > 2) {
  126. *inst++ = REX_W | REX_R;
  127. *inst++ = MOV_r_rm;
  128. *inst++ = MOD_REG | (reg_lmap[SLJIT_S2] << 3) | 0x2 /* rdx */;
  129. }
  130. #else
  131. if (args > 0) {
  132. *inst++ = REX_W;
  133. *inst++ = MOV_r_rm;
  134. *inst++ = MOD_REG | (reg_map[SLJIT_S0] << 3) | 0x1 /* rcx */;
  135. }
  136. if (args > 1) {
  137. *inst++ = REX_W;
  138. *inst++ = MOV_r_rm;
  139. *inst++ = MOD_REG | (reg_map[SLJIT_S1] << 3) | 0x2 /* rdx */;
  140. }
  141. if (args > 2) {
  142. *inst++ = REX_W | REX_B;
  143. *inst++ = MOV_r_rm;
  144. *inst++ = MOD_REG | (reg_map[SLJIT_S2] << 3) | 0x0 /* r8 */;
  145. }
  146. #endif
  147. }
  148. local_size = ((local_size + SLJIT_LOCALS_OFFSET + saved_register_size + 15) & ~15) - saved_register_size;
  149. compiler->local_size = local_size;
  150. #ifdef _WIN64
  151. if (local_size > 1024) {
  152. /* Allocate stack for the callback, which grows the stack. */
  153. inst = (sljit_ub*)ensure_buf(compiler, 1 + 4 + (3 + sizeof(sljit_si)));
  154. FAIL_IF(!inst);
  155. INC_SIZE(4 + (3 + sizeof(sljit_si)));
  156. *inst++ = REX_W;
  157. *inst++ = GROUP_BINARY_83;
  158. *inst++ = MOD_REG | SUB | 4;
  159. /* Allocated size for registers must be divisible by 8. */
  160. SLJIT_ASSERT(!(saved_register_size & 0x7));
  161. /* Aligned to 16 byte. */
  162. if (saved_register_size & 0x8) {
  163. *inst++ = 5 * sizeof(sljit_sw);
  164. local_size -= 5 * sizeof(sljit_sw);
  165. } else {
  166. *inst++ = 4 * sizeof(sljit_sw);
  167. local_size -= 4 * sizeof(sljit_sw);
  168. }
  169. /* Second instruction */
  170. SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R0] < 8, temporary_reg1_is_loreg);
  171. *inst++ = REX_W;
  172. *inst++ = MOV_rm_i32;
  173. *inst++ = MOD_REG | reg_lmap[SLJIT_R0];
  174. *(sljit_si*)inst = local_size;
  175. #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
  176. || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
  177. compiler->skip_checks = 1;
  178. #endif
  179. FAIL_IF(sljit_emit_ijump(compiler, SLJIT_CALL1, SLJIT_IMM, SLJIT_FUNC_OFFSET(sljit_grow_stack)));
  180. }
  181. #endif
  182. SLJIT_ASSERT(local_size > 0);
  183. if (local_size <= 127) {
  184. inst = (sljit_ub*)ensure_buf(compiler, 1 + 4);
  185. FAIL_IF(!inst);
  186. INC_SIZE(4);
  187. *inst++ = REX_W;
  188. *inst++ = GROUP_BINARY_83;
  189. *inst++ = MOD_REG | SUB | 4;
  190. *inst++ = local_size;
  191. }
  192. else {
  193. inst = (sljit_ub*)ensure_buf(compiler, 1 + 7);
  194. FAIL_IF(!inst);
  195. INC_SIZE(7);
  196. *inst++ = REX_W;
  197. *inst++ = GROUP_BINARY_81;
  198. *inst++ = MOD_REG | SUB | 4;
  199. *(sljit_si*)inst = local_size;
  200. inst += sizeof(sljit_si);
  201. }
  202. #ifdef _WIN64
  203. /* Save xmm6 register: movaps [rsp + 0x20], xmm6 */
  204. if (fscratches >= 6 || fsaveds >= 1) {
  205. inst = (sljit_ub*)ensure_buf(compiler, 1 + 5);
  206. FAIL_IF(!inst);
  207. INC_SIZE(5);
  208. *inst++ = GROUP_0F;
  209. *(sljit_si*)inst = 0x20247429;
  210. }
  211. #endif
  212. return SLJIT_SUCCESS;
  213. }
  214. SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler,
  215. sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds,
  216. sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
  217. {
  218. sljit_si saved_register_size;
  219. CHECK_ERROR();
  220. CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
  221. set_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size);
  222. /* Including the return address saved by the call instruction. */
  223. saved_register_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds, 1);
  224. compiler->local_size = ((local_size + SLJIT_LOCALS_OFFSET + saved_register_size + 15) & ~15) - saved_register_size;
  225. return SLJIT_SUCCESS;
  226. }
  227. SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
  228. {
  229. sljit_si i, tmp, size;
  230. sljit_ub *inst;
  231. CHECK_ERROR();
  232. CHECK(check_sljit_emit_return(compiler, op, src, srcw));
  233. compiler->flags_saved = 0;
  234. FAIL_IF(emit_mov_before_return(compiler, op, src, srcw));
  235. #ifdef _WIN64
  236. /* Restore xmm6 register: movaps xmm6, [rsp + 0x20] */
  237. if (compiler->fscratches >= 6 || compiler->fsaveds >= 1) {
  238. inst = (sljit_ub*)ensure_buf(compiler, 1 + 5);
  239. FAIL_IF(!inst);
  240. INC_SIZE(5);
  241. *inst++ = GROUP_0F;
  242. *(sljit_si*)inst = 0x20247428;
  243. }
  244. #endif
  245. SLJIT_ASSERT(compiler->local_size > 0);
  246. if (compiler->local_size <= 127) {
  247. inst = (sljit_ub*)ensure_buf(compiler, 1 + 4);
  248. FAIL_IF(!inst);
  249. INC_SIZE(4);
  250. *inst++ = REX_W;
  251. *inst++ = GROUP_BINARY_83;
  252. *inst++ = MOD_REG | ADD | 4;
  253. *inst = compiler->local_size;
  254. }
  255. else {
  256. inst = (sljit_ub*)ensure_buf(compiler, 1 + 7);
  257. FAIL_IF(!inst);
  258. INC_SIZE(7);
  259. *inst++ = REX_W;
  260. *inst++ = GROUP_BINARY_81;
  261. *inst++ = MOD_REG | ADD | 4;
  262. *(sljit_si*)inst = compiler->local_size;
  263. }
  264. tmp = compiler->scratches;
  265. for (i = SLJIT_FIRST_SAVED_REG; i <= tmp; i++) {
  266. size = reg_map[i] >= 8 ? 2 : 1;
  267. inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
  268. FAIL_IF(!inst);
  269. INC_SIZE(size);
  270. if (reg_map[i] >= 8)
  271. *inst++ = REX_B;
  272. POP_REG(reg_lmap[i]);
  273. }
  274. tmp = compiler->saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - compiler->saveds) : SLJIT_FIRST_SAVED_REG;
  275. for (i = tmp; i <= SLJIT_S0; i++) {
  276. size = reg_map[i] >= 8 ? 2 : 1;
  277. inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
  278. FAIL_IF(!inst);
  279. INC_SIZE(size);
  280. if (reg_map[i] >= 8)
  281. *inst++ = REX_B;
  282. POP_REG(reg_lmap[i]);
  283. }
  284. inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
  285. FAIL_IF(!inst);
  286. INC_SIZE(1);
  287. RET();
  288. return SLJIT_SUCCESS;
  289. }
  290. /* --------------------------------------------------------------------- */
  291. /* Operators */
  292. /* --------------------------------------------------------------------- */
  293. static sljit_si emit_do_imm32(struct sljit_compiler *compiler, sljit_ub rex, sljit_ub opcode, sljit_sw imm)
  294. {
  295. sljit_ub *inst;
  296. sljit_si length = 1 + (rex ? 1 : 0) + sizeof(sljit_si);
  297. inst = (sljit_ub*)ensure_buf(compiler, 1 + length);
  298. FAIL_IF(!inst);
  299. INC_SIZE(length);
  300. if (rex)
  301. *inst++ = rex;
  302. *inst++ = opcode;
  303. *(sljit_si*)inst = imm;
  304. return SLJIT_SUCCESS;
  305. }
  306. static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si size,
  307. /* The register or immediate operand. */
  308. sljit_si a, sljit_sw imma,
  309. /* The general operand (not immediate). */
  310. sljit_si b, sljit_sw immb)
  311. {
  312. sljit_ub *inst;
  313. sljit_ub *buf_ptr;
  314. sljit_ub rex = 0;
  315. sljit_si flags = size & ~0xf;
  316. sljit_si inst_size;
  317. /* The immediate operand must be 32 bit. */
  318. SLJIT_ASSERT(!(a & SLJIT_IMM) || compiler->mode32 || IS_HALFWORD(imma));
  319. /* Both cannot be switched on. */
  320. SLJIT_ASSERT((flags & (EX86_BIN_INS | EX86_SHIFT_INS)) != (EX86_BIN_INS | EX86_SHIFT_INS));
  321. /* Size flags not allowed for typed instructions. */
  322. SLJIT_ASSERT(!(flags & (EX86_BIN_INS | EX86_SHIFT_INS)) || (flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) == 0);
  323. /* Both size flags cannot be switched on. */
  324. SLJIT_ASSERT((flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) != (EX86_BYTE_ARG | EX86_HALF_ARG));
  325. /* SSE2 and immediate is not possible. */
  326. SLJIT_ASSERT(!(a & SLJIT_IMM) || !(flags & EX86_SSE2));
  327. SLJIT_ASSERT((flags & (EX86_PREF_F2 | EX86_PREF_F3)) != (EX86_PREF_F2 | EX86_PREF_F3)
  328. && (flags & (EX86_PREF_F2 | EX86_PREF_66)) != (EX86_PREF_F2 | EX86_PREF_66)
  329. && (flags & (EX86_PREF_F3 | EX86_PREF_66)) != (EX86_PREF_F3 | EX86_PREF_66));
  330. size &= 0xf;
  331. inst_size = size;
  332. if (!compiler->mode32 && !(flags & EX86_NO_REXW))
  333. rex |= REX_W;
  334. else if (flags & EX86_REX)
  335. rex |= REX;
  336. if (flags & (EX86_PREF_F2 | EX86_PREF_F3))
  337. inst_size++;
  338. if (flags & EX86_PREF_66)
  339. inst_size++;
  340. /* Calculate size of b. */
  341. inst_size += 1; /* mod r/m byte. */
  342. if (b & SLJIT_MEM) {
  343. if (!(b & OFFS_REG_MASK)) {
  344. if (NOT_HALFWORD(immb)) {
  345. if (emit_load_imm64(compiler, TMP_REG3, immb))
  346. return NULL;
  347. immb = 0;
  348. if (b & REG_MASK)
  349. b |= TO_OFFS_REG(TMP_REG3);
  350. else
  351. b |= TMP_REG3;
  352. }
  353. else if (reg_lmap[b & REG_MASK] == 4)
  354. b |= TO_OFFS_REG(SLJIT_SP);
  355. }
  356. if ((b & REG_MASK) == SLJIT_UNUSED)
  357. inst_size += 1 + sizeof(sljit_si); /* SIB byte required to avoid RIP based addressing. */
  358. else {
  359. if (reg_map[b & REG_MASK] >= 8)
  360. rex |= REX_B;
  361. if (immb != 0 && (!(b & OFFS_REG_MASK) || (b & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_SP))) {
  362. /* Immediate operand. */
  363. if (immb <= 127 && immb >= -128)
  364. inst_size += sizeof(sljit_sb);
  365. else
  366. inst_size += sizeof(sljit_si);
  367. }
  368. else if (reg_lmap[b & REG_MASK] == 5)
  369. inst_size += sizeof(sljit_sb);
  370. if ((b & OFFS_REG_MASK) != SLJIT_UNUSED) {
  371. inst_size += 1; /* SIB byte. */
  372. if (reg_map[OFFS_REG(b)] >= 8)
  373. rex |= REX_X;
  374. }
  375. }
  376. }
  377. else if (!(flags & EX86_SSE2_OP2) && reg_map[b] >= 8)
  378. rex |= REX_B;
  379. if (a & SLJIT_IMM) {
  380. if (flags & EX86_BIN_INS) {
  381. if (imma <= 127 && imma >= -128) {
  382. inst_size += 1;
  383. flags |= EX86_BYTE_ARG;
  384. } else
  385. inst_size += 4;
  386. }
  387. else if (flags & EX86_SHIFT_INS) {
  388. imma &= compiler->mode32 ? 0x1f : 0x3f;
  389. if (imma != 1) {
  390. inst_size ++;
  391. flags |= EX86_BYTE_ARG;
  392. }
  393. } else if (flags & EX86_BYTE_ARG)
  394. inst_size++;
  395. else if (flags & EX86_HALF_ARG)
  396. inst_size += sizeof(short);
  397. else
  398. inst_size += sizeof(sljit_si);
  399. }
  400. else {
  401. SLJIT_ASSERT(!(flags & EX86_SHIFT_INS) || a == SLJIT_PREF_SHIFT_REG);
  402. /* reg_map[SLJIT_PREF_SHIFT_REG] is less than 8. */
  403. if (!(flags & EX86_SSE2_OP1) && reg_map[a] >= 8)
  404. rex |= REX_R;
  405. }
  406. if (rex)
  407. inst_size++;
  408. inst = (sljit_ub*)ensure_buf(compiler, 1 + inst_size);
  409. PTR_FAIL_IF(!inst);
  410. /* Encoding the byte. */
  411. INC_SIZE(inst_size);
  412. if (flags & EX86_PREF_F2)
  413. *inst++ = 0xf2;
  414. if (flags & EX86_PREF_F3)
  415. *inst++ = 0xf3;
  416. if (flags & EX86_PREF_66)
  417. *inst++ = 0x66;
  418. if (rex)
  419. *inst++ = rex;
  420. buf_ptr = inst + size;
  421. /* Encode mod/rm byte. */
  422. if (!(flags & EX86_SHIFT_INS)) {
  423. if ((flags & EX86_BIN_INS) && (a & SLJIT_IMM))
  424. *inst = (flags & EX86_BYTE_ARG) ? GROUP_BINARY_83 : GROUP_BINARY_81;
  425. if ((a & SLJIT_IMM) || (a == 0))
  426. *buf_ptr = 0;
  427. else if (!(flags & EX86_SSE2_OP1))
  428. *buf_ptr = reg_lmap[a] << 3;
  429. else
  430. *buf_ptr = a << 3;
  431. }
  432. else {
  433. if (a & SLJIT_IMM) {
  434. if (imma == 1)
  435. *inst = GROUP_SHIFT_1;
  436. else
  437. *inst = GROUP_SHIFT_N;
  438. } else
  439. *inst = GROUP_SHIFT_CL;
  440. *buf_ptr = 0;
  441. }
  442. if (!(b & SLJIT_MEM))
  443. *buf_ptr++ |= MOD_REG + ((!(flags & EX86_SSE2_OP2)) ? reg_lmap[b] : b);
  444. else if ((b & REG_MASK) != SLJIT_UNUSED) {
  445. if ((b & OFFS_REG_MASK) == SLJIT_UNUSED || (b & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_SP)) {
  446. if (immb != 0 || reg_lmap[b & REG_MASK] == 5) {
  447. if (immb <= 127 && immb >= -128)
  448. *buf_ptr |= 0x40;
  449. else
  450. *buf_ptr |= 0x80;
  451. }
  452. if ((b & OFFS_REG_MASK) == SLJIT_UNUSED)
  453. *buf_ptr++ |= reg_lmap[b & REG_MASK];
  454. else {
  455. *buf_ptr++ |= 0x04;
  456. *buf_ptr++ = reg_lmap[b & REG_MASK] | (reg_lmap[OFFS_REG(b)] << 3);
  457. }
  458. if (immb != 0 || reg_lmap[b & REG_MASK] == 5) {
  459. if (immb <= 127 && immb >= -128)
  460. *buf_ptr++ = immb; /* 8 bit displacement. */
  461. else {
  462. *(sljit_si*)buf_ptr = immb; /* 32 bit displacement. */
  463. buf_ptr += sizeof(sljit_si);
  464. }
  465. }
  466. }
  467. else {
  468. if (reg_lmap[b & REG_MASK] == 5)
  469. *buf_ptr |= 0x40;
  470. *buf_ptr++ |= 0x04;
  471. *buf_ptr++ = reg_lmap[b & REG_MASK] | (reg_lmap[OFFS_REG(b)] << 3) | (immb << 6);
  472. if (reg_lmap[b & REG_MASK] == 5)
  473. *buf_ptr++ = 0;
  474. }
  475. }
  476. else {
  477. *buf_ptr++ |= 0x04;
  478. *buf_ptr++ = 0x25;
  479. *(sljit_si*)buf_ptr = immb; /* 32 bit displacement. */
  480. buf_ptr += sizeof(sljit_si);
  481. }
  482. if (a & SLJIT_IMM) {
  483. if (flags & EX86_BYTE_ARG)
  484. *buf_ptr = imma;
  485. else if (flags & EX86_HALF_ARG)
  486. *(short*)buf_ptr = imma;
  487. else if (!(flags & EX86_SHIFT_INS))
  488. *(sljit_si*)buf_ptr = imma;
  489. }
  490. return !(flags & EX86_SHIFT_INS) ? inst : (inst + 1);
  491. }
  492. /* --------------------------------------------------------------------- */
  493. /* Call / return instructions */
  494. /* --------------------------------------------------------------------- */
  495. static SLJIT_INLINE sljit_si call_with_args(struct sljit_compiler *compiler, sljit_si type)
  496. {
  497. sljit_ub *inst;
  498. #ifndef _WIN64
  499. SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R1] == 6 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8, args_registers);
  500. inst = (sljit_ub*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6));
  501. FAIL_IF(!inst);
  502. INC_SIZE((type < SLJIT_CALL3) ? 3 : 6);
  503. if (type >= SLJIT_CALL3) {
  504. *inst++ = REX_W;
  505. *inst++ = MOV_r_rm;
  506. *inst++ = MOD_REG | (0x2 /* rdx */ << 3) | reg_lmap[SLJIT_R2];
  507. }
  508. *inst++ = REX_W;
  509. *inst++ = MOV_r_rm;
  510. *inst++ = MOD_REG | (0x7 /* rdi */ << 3) | reg_lmap[SLJIT_R0];
  511. #else
  512. SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R1] == 2 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8, args_registers);
  513. inst = (sljit_ub*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6));
  514. FAIL_IF(!inst);
  515. INC_SIZE((type < SLJIT_CALL3) ? 3 : 6);
  516. if (type >= SLJIT_CALL3) {
  517. *inst++ = REX_W | REX_R;
  518. *inst++ = MOV_r_rm;
  519. *inst++ = MOD_REG | (0x0 /* r8 */ << 3) | reg_lmap[SLJIT_R2];
  520. }
  521. *inst++ = REX_W;
  522. *inst++ = MOV_r_rm;
  523. *inst++ = MOD_REG | (0x1 /* rcx */ << 3) | reg_lmap[SLJIT_R0];
  524. #endif
  525. return SLJIT_SUCCESS;
  526. }
  527. SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
  528. {
  529. sljit_ub *inst;
  530. CHECK_ERROR();
  531. CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
  532. ADJUST_LOCAL_OFFSET(dst, dstw);
  533. /* For UNUSED dst. Uncommon, but possible. */
  534. if (dst == SLJIT_UNUSED)
  535. dst = TMP_REG1;
  536. if (FAST_IS_REG(dst)) {
  537. if (reg_map[dst] < 8) {
  538. inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
  539. FAIL_IF(!inst);
  540. INC_SIZE(1);
  541. POP_REG(reg_lmap[dst]);
  542. return SLJIT_SUCCESS;
  543. }
  544. inst = (sljit_ub*)ensure_buf(compiler, 1 + 2);
  545. FAIL_IF(!inst);
  546. INC_SIZE(2);
  547. *inst++ = REX_B;
  548. POP_REG(reg_lmap[dst]);
  549. return SLJIT_SUCCESS;
  550. }
  551. /* REX_W is not necessary (src is not immediate). */
  552. compiler->mode32 = 1;
  553. inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
  554. FAIL_IF(!inst);
  555. *inst++ = POP_rm;
  556. return SLJIT_SUCCESS;
  557. }
  558. SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
  559. {
  560. sljit_ub *inst;
  561. CHECK_ERROR();
  562. CHECK(check_sljit_emit_fast_return(compiler, src, srcw));
  563. ADJUST_LOCAL_OFFSET(src, srcw);
  564. if ((src & SLJIT_IMM) && NOT_HALFWORD(srcw)) {
  565. FAIL_IF(emit_load_imm64(compiler, TMP_REG1, srcw));
  566. src = TMP_REG1;
  567. }
  568. if (FAST_IS_REG(src)) {
  569. if (reg_map[src] < 8) {
  570. inst = (sljit_ub*)ensure_buf(compiler, 1 + 1 + 1);
  571. FAIL_IF(!inst);
  572. INC_SIZE(1 + 1);
  573. PUSH_REG(reg_lmap[src]);
  574. }
  575. else {
  576. inst = (sljit_ub*)ensure_buf(compiler, 1 + 2 + 1);
  577. FAIL_IF(!inst);
  578. INC_SIZE(2 + 1);
  579. *inst++ = REX_B;
  580. PUSH_REG(reg_lmap[src]);
  581. }
  582. }
  583. else if (src & SLJIT_MEM) {
  584. /* REX_W is not necessary (src is not immediate). */
  585. compiler->mode32 = 1;
  586. inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
  587. FAIL_IF(!inst);
  588. *inst++ = GROUP_FF;
  589. *inst |= PUSH_rm;
  590. inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
  591. FAIL_IF(!inst);
  592. INC_SIZE(1);
  593. }
  594. else {
  595. SLJIT_ASSERT(IS_HALFWORD(srcw));
  596. /* SLJIT_IMM. */
  597. inst = (sljit_ub*)ensure_buf(compiler, 1 + 5 + 1);
  598. FAIL_IF(!inst);
  599. INC_SIZE(5 + 1);
  600. *inst++ = PUSH_i32;
  601. *(sljit_si*)inst = srcw;
  602. inst += sizeof(sljit_si);
  603. }
  604. RET();
  605. return SLJIT_SUCCESS;
  606. }
  607. /* --------------------------------------------------------------------- */
  608. /* Extend input */
  609. /* --------------------------------------------------------------------- */
  610. static sljit_si emit_mov_int(struct sljit_compiler *compiler, sljit_si sign,
  611. sljit_si dst, sljit_sw dstw,
  612. sljit_si src, sljit_sw srcw)
  613. {
  614. sljit_ub* inst;
  615. sljit_si dst_r;
  616. compiler->mode32 = 0;
  617. if (dst == SLJIT_UNUSED && !(src & SLJIT_MEM))
  618. return SLJIT_SUCCESS; /* Empty instruction. */
  619. if (src & SLJIT_IMM) {
  620. if (FAST_IS_REG(dst)) {
  621. if (sign || ((sljit_uw)srcw <= 0x7fffffff)) {
  622. inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_si)srcw, dst, dstw);
  623. FAIL_IF(!inst);
  624. *inst = MOV_rm_i32;
  625. return SLJIT_SUCCESS;
  626. }
  627. return emit_load_imm64(compiler, dst, srcw);
  628. }
  629. compiler->mode32 = 1;
  630. inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_si)srcw, dst, dstw);
  631. FAIL_IF(!inst);
  632. *inst = MOV_rm_i32;
  633. compiler->mode32 = 0;
  634. return SLJIT_SUCCESS;
  635. }
  636. dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
  637. if ((dst & SLJIT_MEM) && FAST_IS_REG(src))
  638. dst_r = src;
  639. else {
  640. if (sign) {
  641. inst = emit_x86_instruction(compiler, 1, dst_r, 0, src, srcw);
  642. FAIL_IF(!inst);
  643. *inst++ = MOVSXD_r_rm;
  644. } else {
  645. compiler->mode32 = 1;
  646. FAIL_IF(emit_mov(compiler, dst_r, 0, src, srcw));
  647. compiler->mode32 = 0;
  648. }
  649. }
  650. if (dst & SLJIT_MEM) {
  651. compiler->mode32 = 1;
  652. inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw);
  653. FAIL_IF(!inst);
  654. *inst = MOV_rm_r;
  655. compiler->mode32 = 0;
  656. }
  657. return SLJIT_SUCCESS;
  658. }