12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140 |
- /*
- * Stack-less Just-In-Time compiler
- *
- * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are
- * permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice, this list of
- * conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice, this list
- * of conditions and the following disclaimer in the documentation and/or other materials
- * provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
- * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
- * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
- * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
- * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
- * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
- SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
- {
- #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
- return "x86" SLJIT_CPUINFO " ABI:fastcall";
- #else
- return "x86" SLJIT_CPUINFO;
- #endif
- }
- /*
- 32b register indexes:
- 0 - EAX
- 1 - ECX
- 2 - EDX
- 3 - EBX
- 4 - ESP
- 5 - EBP
- 6 - ESI
- 7 - EDI
- */
- /*
- 64b register indexes:
- 0 - RAX
- 1 - RCX
- 2 - RDX
- 3 - RBX
- 4 - RSP
- 5 - RBP
- 6 - RSI
- 7 - RDI
- 8 - R8 - From now on REX prefix is required
- 9 - R9
- 10 - R10
- 11 - R11
- 12 - R12
- 13 - R13
- 14 - R14
- 15 - R15
- */
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- /* Last register + 1. */
- #define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
- static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 3] = {
- 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 7, 6, 3, 4, 5
- };
- #define CHECK_EXTRA_REGS(p, w, do) \
- if (p >= SLJIT_R3 && p <= SLJIT_S3) { \
- if (p <= compiler->scratches) \
- w = compiler->saveds_offset - ((p) - SLJIT_R2) * (sljit_sw)sizeof(sljit_sw); \
- else \
- w = compiler->locals_offset + ((p) - SLJIT_S2) * (sljit_sw)sizeof(sljit_sw); \
- p = SLJIT_MEM1(SLJIT_SP); \
- do; \
- }
- #else /* SLJIT_CONFIG_X86_32 */
- /* Last register + 1. */
- #define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
- #define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
- /* Note: r12 & 0x7 == 0b100, which decoded as SIB byte present
- Note: avoid to use r12 and r13 for memory addessing
- therefore r12 is better to be a higher saved register. */
- #ifndef _WIN64
- /* Args: rdi(=7), rsi(=6), rdx(=2), rcx(=1), r8, r9. Scratches: rax(=0), r10, r11 */
- static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
- 0, 0, 6, 7, 1, 8, 11, 10, 12, 5, 13, 14, 15, 3, 4, 2, 9
- };
- /* low-map. reg_map & 0x7. */
- static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
- 0, 0, 6, 7, 1, 0, 3, 2, 4, 5, 5, 6, 7, 3, 4, 2, 1
- };
- #else
- /* Args: rcx(=1), rdx(=2), r8, r9. Scratches: rax(=0), r10, r11 */
- static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
- 0, 0, 2, 8, 1, 11, 12, 5, 13, 14, 15, 7, 6, 3, 4, 9, 10
- };
- /* low-map. reg_map & 0x7. */
- static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
- 0, 0, 2, 0, 1, 3, 4, 5, 5, 6, 7, 7, 6, 3, 4, 1, 2
- };
- #endif
- /* Args: xmm0-xmm3 */
- static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
- 4, 0, 1, 2, 3, 5, 6
- };
- /* low-map. freg_map & 0x7. */
- static const sljit_u8 freg_lmap[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
- 4, 0, 1, 2, 3, 5, 6
- };
- #define REX_W 0x48
- #define REX_R 0x44
- #define REX_X 0x42
- #define REX_B 0x41
- #define REX 0x40
- #ifndef _WIN64
- #define HALFWORD_MAX 0x7fffffffl
- #define HALFWORD_MIN -0x80000000l
- #else
- #define HALFWORD_MAX 0x7fffffffll
- #define HALFWORD_MIN -0x80000000ll
- #endif
- #define IS_HALFWORD(x) ((x) <= HALFWORD_MAX && (x) >= HALFWORD_MIN)
- #define NOT_HALFWORD(x) ((x) > HALFWORD_MAX || (x) < HALFWORD_MIN)
- #define CHECK_EXTRA_REGS(p, w, do)
- #endif /* SLJIT_CONFIG_X86_32 */
- #define TMP_FREG (0)
- /* Size flags for emit_x86_instruction: */
- #define EX86_BIN_INS 0x0010
- #define EX86_SHIFT_INS 0x0020
- #define EX86_REX 0x0040
- #define EX86_NO_REXW 0x0080
- #define EX86_BYTE_ARG 0x0100
- #define EX86_HALF_ARG 0x0200
- #define EX86_PREF_66 0x0400
- #define EX86_PREF_F2 0x0800
- #define EX86_PREF_F3 0x1000
- #define EX86_SSE2_OP1 0x2000
- #define EX86_SSE2_OP2 0x4000
- #define EX86_SSE2 (EX86_SSE2_OP1 | EX86_SSE2_OP2)
- /* --------------------------------------------------------------------- */
- /* Instrucion forms */
- /* --------------------------------------------------------------------- */
- #define ADD (/* BINARY */ 0 << 3)
- #define ADD_EAX_i32 0x05
- #define ADD_r_rm 0x03
- #define ADD_rm_r 0x01
- #define ADDSD_x_xm 0x58
- #define ADC (/* BINARY */ 2 << 3)
- #define ADC_EAX_i32 0x15
- #define ADC_r_rm 0x13
- #define ADC_rm_r 0x11
- #define AND (/* BINARY */ 4 << 3)
- #define AND_EAX_i32 0x25
- #define AND_r_rm 0x23
- #define AND_rm_r 0x21
- #define ANDPD_x_xm 0x54
- #define BSR_r_rm (/* GROUP_0F */ 0xbd)
- #define CALL_i32 0xe8
- #define CALL_rm (/* GROUP_FF */ 2 << 3)
- #define CDQ 0x99
- #define CMOVE_r_rm (/* GROUP_0F */ 0x44)
- #define CMP (/* BINARY */ 7 << 3)
- #define CMP_EAX_i32 0x3d
- #define CMP_r_rm 0x3b
- #define CMP_rm_r 0x39
- #define CVTPD2PS_x_xm 0x5a
- #define CVTSI2SD_x_rm 0x2a
- #define CVTTSD2SI_r_xm 0x2c
- #define DIV (/* GROUP_F7 */ 6 << 3)
- #define DIVSD_x_xm 0x5e
- #define FSTPS 0xd9
- #define FSTPD 0xdd
- #define INT3 0xcc
- #define IDIV (/* GROUP_F7 */ 7 << 3)
- #define IMUL (/* GROUP_F7 */ 5 << 3)
- #define IMUL_r_rm (/* GROUP_0F */ 0xaf)
- #define IMUL_r_rm_i8 0x6b
- #define IMUL_r_rm_i32 0x69
- #define JE_i8 0x74
- #define JNE_i8 0x75
- #define JMP_i8 0xeb
- #define JMP_i32 0xe9
- #define JMP_rm (/* GROUP_FF */ 4 << 3)
- #define LEA_r_m 0x8d
- #define MOV_r_rm 0x8b
- #define MOV_r_i32 0xb8
- #define MOV_rm_r 0x89
- #define MOV_rm_i32 0xc7
- #define MOV_rm8_i8 0xc6
- #define MOV_rm8_r8 0x88
- #define MOVSD_x_xm 0x10
- #define MOVSD_xm_x 0x11
- #define MOVSXD_r_rm 0x63
- #define MOVSX_r_rm8 (/* GROUP_0F */ 0xbe)
- #define MOVSX_r_rm16 (/* GROUP_0F */ 0xbf)
- #define MOVZX_r_rm8 (/* GROUP_0F */ 0xb6)
- #define MOVZX_r_rm16 (/* GROUP_0F */ 0xb7)
- #define MUL (/* GROUP_F7 */ 4 << 3)
- #define MULSD_x_xm 0x59
- #define NEG_rm (/* GROUP_F7 */ 3 << 3)
- #define NOP 0x90
- #define NOT_rm (/* GROUP_F7 */ 2 << 3)
- #define OR (/* BINARY */ 1 << 3)
- #define OR_r_rm 0x0b
- #define OR_EAX_i32 0x0d
- #define OR_rm_r 0x09
- #define OR_rm8_r8 0x08
- #define POP_r 0x58
- #define POP_rm 0x8f
- #define POPF 0x9d
- #define PREFETCH 0x18
- #define PUSH_i32 0x68
- #define PUSH_r 0x50
- #define PUSH_rm (/* GROUP_FF */ 6 << 3)
- #define PUSHF 0x9c
- #define RET_near 0xc3
- #define RET_i16 0xc2
- #define SBB (/* BINARY */ 3 << 3)
- #define SBB_EAX_i32 0x1d
- #define SBB_r_rm 0x1b
- #define SBB_rm_r 0x19
- #define SAR (/* SHIFT */ 7 << 3)
- #define SHL (/* SHIFT */ 4 << 3)
- #define SHR (/* SHIFT */ 5 << 3)
- #define SUB (/* BINARY */ 5 << 3)
- #define SUB_EAX_i32 0x2d
- #define SUB_r_rm 0x2b
- #define SUB_rm_r 0x29
- #define SUBSD_x_xm 0x5c
- #define TEST_EAX_i32 0xa9
- #define TEST_rm_r 0x85
- #define UCOMISD_x_xm 0x2e
- #define UNPCKLPD_x_xm 0x14
- #define XCHG_EAX_r 0x90
- #define XCHG_r_rm 0x87
- #define XOR (/* BINARY */ 6 << 3)
- #define XOR_EAX_i32 0x35
- #define XOR_r_rm 0x33
- #define XOR_rm_r 0x31
- #define XORPD_x_xm 0x57
- #define GROUP_0F 0x0f
- #define GROUP_F7 0xf7
- #define GROUP_FF 0xff
- #define GROUP_BINARY_81 0x81
- #define GROUP_BINARY_83 0x83
- #define GROUP_SHIFT_1 0xd1
- #define GROUP_SHIFT_N 0xc1
- #define GROUP_SHIFT_CL 0xd3
- #define MOD_REG 0xc0
- #define MOD_DISP8 0x40
- #define INC_SIZE(s) (*inst++ = (s), compiler->size += (s))
- #define PUSH_REG(r) (*inst++ = (PUSH_r + (r)))
- #define POP_REG(r) (*inst++ = (POP_r + (r)))
- #define RET() (*inst++ = (RET_near))
- #define RET_I16(n) (*inst++ = (RET_i16), *inst++ = n, *inst++ = 0)
- /* r32, r/m32 */
- #define MOV_RM(mod, reg, rm) (*inst++ = (MOV_r_rm), *inst++ = (mod) << 6 | (reg) << 3 | (rm))
- /* Multithreading does not affect these static variables, since they store
- built-in CPU features. Therefore they can be overwritten by different threads
- if they detect the CPU features in the same time. */
- #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
- static sljit_s32 cpu_has_sse2 = -1;
- #endif
- static sljit_s32 cpu_has_cmov = -1;
- #ifdef _WIN32_WCE
- #include <cmnintrin.h>
- #elif defined(_MSC_VER) && _MSC_VER >= 1400
- #include <intrin.h>
- #endif
- /******************************************************/
- /* Unaligned-store functions */
- /******************************************************/
- static SLJIT_INLINE void sljit_unaligned_store_s16(void *addr, sljit_s16 value)
- {
- SLJIT_MEMCPY(addr, &value, sizeof(value));
- }
- static SLJIT_INLINE void sljit_unaligned_store_s32(void *addr, sljit_s32 value)
- {
- SLJIT_MEMCPY(addr, &value, sizeof(value));
- }
- static SLJIT_INLINE void sljit_unaligned_store_sw(void *addr, sljit_sw value)
- {
- SLJIT_MEMCPY(addr, &value, sizeof(value));
- }
- /******************************************************/
- /* Utility functions */
- /******************************************************/
- static void get_cpu_features(void)
- {
- sljit_u32 features;
- #if defined(_MSC_VER) && _MSC_VER >= 1400
- int CPUInfo[4];
- __cpuid(CPUInfo, 1);
- features = (sljit_u32)CPUInfo[3];
- #elif defined(__GNUC__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_C)
- /* AT&T syntax. */
- __asm__ (
- "movl $0x1, %%eax\n"
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- /* On x86-32, there is no red zone, so this
- should work (no need for a local variable). */
- "push %%ebx\n"
- #endif
- "cpuid\n"
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- "pop %%ebx\n"
- #endif
- "movl %%edx, %0\n"
- : "=g" (features)
- :
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- : "%eax", "%ecx", "%edx"
- #else
- : "%rax", "%rbx", "%rcx", "%rdx"
- #endif
- );
- #else /* _MSC_VER && _MSC_VER >= 1400 */
- /* Intel syntax. */
- __asm {
- mov eax, 1
- cpuid
- mov features, edx
- }
- #endif /* _MSC_VER && _MSC_VER >= 1400 */
- #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
- cpu_has_sse2 = (features >> 26) & 0x1;
- #endif
- cpu_has_cmov = (features >> 15) & 0x1;
- }
- static sljit_u8 get_jump_code(sljit_s32 type)
- {
- switch (type) {
- case SLJIT_EQUAL:
- case SLJIT_EQUAL_F64:
- return 0x84 /* je */;
- case SLJIT_NOT_EQUAL:
- case SLJIT_NOT_EQUAL_F64:
- return 0x85 /* jne */;
- case SLJIT_LESS:
- case SLJIT_LESS_F64:
- return 0x82 /* jc */;
- case SLJIT_GREATER_EQUAL:
- case SLJIT_GREATER_EQUAL_F64:
- return 0x83 /* jae */;
- case SLJIT_GREATER:
- case SLJIT_GREATER_F64:
- return 0x87 /* jnbe */;
- case SLJIT_LESS_EQUAL:
- case SLJIT_LESS_EQUAL_F64:
- return 0x86 /* jbe */;
- case SLJIT_SIG_LESS:
- return 0x8c /* jl */;
- case SLJIT_SIG_GREATER_EQUAL:
- return 0x8d /* jnl */;
- case SLJIT_SIG_GREATER:
- return 0x8f /* jnle */;
- case SLJIT_SIG_LESS_EQUAL:
- return 0x8e /* jle */;
- case SLJIT_OVERFLOW:
- return 0x80 /* jo */;
- case SLJIT_NOT_OVERFLOW:
- return 0x81 /* jno */;
- case SLJIT_UNORDERED_F64:
- return 0x8a /* jp */;
- case SLJIT_ORDERED_F64:
- return 0x8b /* jpo */;
- }
- return 0;
- }
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_sw executable_offset);
- #else
- static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr);
- static sljit_u8* generate_put_label_code(struct sljit_put_label *put_label, sljit_u8 *code_ptr, sljit_uw max_label);
- #endif
- static sljit_u8* generate_near_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset)
- {
- sljit_s32 type = jump->flags >> TYPE_SHIFT;
- sljit_s32 short_jump;
- sljit_uw label_addr;
- if (jump->flags & JUMP_LABEL)
- label_addr = (sljit_uw)(code + jump->u.label->size);
- else
- label_addr = jump->u.target - executable_offset;
- short_jump = (sljit_sw)(label_addr - (jump->addr + 2)) >= -128 && (sljit_sw)(label_addr - (jump->addr + 2)) <= 127;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if ((sljit_sw)(label_addr - (jump->addr + 1)) > HALFWORD_MAX || (sljit_sw)(label_addr - (jump->addr + 1)) < HALFWORD_MIN)
- return generate_far_jump_code(jump, code_ptr);
- #endif
- if (type == SLJIT_JUMP) {
- if (short_jump)
- *code_ptr++ = JMP_i8;
- else
- *code_ptr++ = JMP_i32;
- jump->addr++;
- }
- else if (type >= SLJIT_FAST_CALL) {
- short_jump = 0;
- *code_ptr++ = CALL_i32;
- jump->addr++;
- }
- else if (short_jump) {
- *code_ptr++ = get_jump_code(type) - 0x10;
- jump->addr++;
- }
- else {
- *code_ptr++ = GROUP_0F;
- *code_ptr++ = get_jump_code(type);
- jump->addr += 2;
- }
- if (short_jump) {
- jump->flags |= PATCH_MB;
- code_ptr += sizeof(sljit_s8);
- } else {
- jump->flags |= PATCH_MW;
- code_ptr += sizeof(sljit_s32);
- }
- return code_ptr;
- }
- SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
- {
- struct sljit_memory_fragment *buf;
- sljit_u8 *code;
- sljit_u8 *code_ptr;
- sljit_u8 *buf_ptr;
- sljit_u8 *buf_end;
- sljit_u8 len;
- sljit_sw executable_offset;
- sljit_sw jump_addr;
- struct sljit_label *label;
- struct sljit_jump *jump;
- struct sljit_const *const_;
- struct sljit_put_label *put_label;
- CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_generate_code(compiler));
- reverse_buf(compiler);
- /* Second code generation pass. */
- code = (sljit_u8*)SLJIT_MALLOC_EXEC(compiler->size, compiler->exec_allocator_data);
- PTR_FAIL_WITH_EXEC_IF(code);
- buf = compiler->buf;
- code_ptr = code;
- label = compiler->labels;
- jump = compiler->jumps;
- const_ = compiler->consts;
- put_label = compiler->put_labels;
- executable_offset = SLJIT_EXEC_OFFSET(code);
- do {
- buf_ptr = buf->memory;
- buf_end = buf_ptr + buf->used_size;
- do {
- len = *buf_ptr++;
- if (len > 0) {
- /* The code is already generated. */
- SLJIT_MEMCPY(code_ptr, buf_ptr, len);
- code_ptr += len;
- buf_ptr += len;
- }
- else {
- switch (*buf_ptr) {
- case 0:
- label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
- label->size = code_ptr - code;
- label = label->next;
- break;
- case 1:
- jump->addr = (sljit_uw)code_ptr;
- if (!(jump->flags & SLJIT_REWRITABLE_JUMP))
- code_ptr = generate_near_jump_code(jump, code_ptr, code, executable_offset);
- else {
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- code_ptr = generate_far_jump_code(jump, code_ptr, executable_offset);
- #else
- code_ptr = generate_far_jump_code(jump, code_ptr);
- #endif
- }
- jump = jump->next;
- break;
- case 2:
- const_->addr = ((sljit_uw)code_ptr) - sizeof(sljit_sw);
- const_ = const_->next;
- break;
- default:
- SLJIT_ASSERT(*buf_ptr == 3);
- SLJIT_ASSERT(put_label->label);
- put_label->addr = (sljit_uw)code_ptr;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- code_ptr = generate_put_label_code(put_label, code_ptr, (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + put_label->label->size);
- #endif
- put_label = put_label->next;
- break;
- }
- buf_ptr++;
- }
- } while (buf_ptr < buf_end);
- SLJIT_ASSERT(buf_ptr == buf_end);
- buf = buf->next;
- } while (buf);
- SLJIT_ASSERT(!label);
- SLJIT_ASSERT(!jump);
- SLJIT_ASSERT(!const_);
- SLJIT_ASSERT(!put_label);
- SLJIT_ASSERT(code_ptr <= code + compiler->size);
- jump = compiler->jumps;
- while (jump) {
- jump_addr = jump->addr + executable_offset;
- if (jump->flags & PATCH_MB) {
- SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s8))) >= -128 && (sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s8))) <= 127);
- *(sljit_u8*)jump->addr = (sljit_u8)(jump->u.label->addr - (jump_addr + sizeof(sljit_s8)));
- } else if (jump->flags & PATCH_MW) {
- if (jump->flags & JUMP_LABEL) {
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- sljit_unaligned_store_sw((void*)jump->addr, (sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_sw))));
- #else
- SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s32))) >= HALFWORD_MIN && (sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s32))) <= HALFWORD_MAX);
- sljit_unaligned_store_s32((void*)jump->addr, (sljit_s32)(jump->u.label->addr - (jump_addr + sizeof(sljit_s32))));
- #endif
- }
- else {
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- sljit_unaligned_store_sw((void*)jump->addr, (sljit_sw)(jump->u.target - (jump_addr + sizeof(sljit_sw))));
- #else
- SLJIT_ASSERT((sljit_sw)(jump->u.target - (jump_addr + sizeof(sljit_s32))) >= HALFWORD_MIN && (sljit_sw)(jump->u.target - (jump_addr + sizeof(sljit_s32))) <= HALFWORD_MAX);
- sljit_unaligned_store_s32((void*)jump->addr, (sljit_s32)(jump->u.target - (jump_addr + sizeof(sljit_s32))));
- #endif
- }
- }
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- else if (jump->flags & PATCH_MD)
- sljit_unaligned_store_sw((void*)jump->addr, jump->u.label->addr);
- #endif
- jump = jump->next;
- }
- put_label = compiler->put_labels;
- while (put_label) {
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- sljit_unaligned_store_sw((void*)(put_label->addr - sizeof(sljit_sw)), (sljit_sw)put_label->label->addr);
- #else
- if (put_label->flags & PATCH_MD) {
- SLJIT_ASSERT(put_label->label->addr > HALFWORD_MAX);
- sljit_unaligned_store_sw((void*)(put_label->addr - sizeof(sljit_sw)), (sljit_sw)put_label->label->addr);
- }
- else {
- SLJIT_ASSERT(put_label->label->addr <= HALFWORD_MAX);
- sljit_unaligned_store_s32((void*)(put_label->addr - sizeof(sljit_s32)), (sljit_s32)put_label->label->addr);
- }
- #endif
- put_label = put_label->next;
- }
- compiler->error = SLJIT_ERR_COMPILED;
- compiler->executable_offset = executable_offset;
- compiler->executable_size = code_ptr - code;
- code = (sljit_u8*)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
- SLJIT_UPDATE_WX_FLAGS(code, (sljit_u8*)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset), 1);
- return (void*)code;
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
- {
- switch (feature_type) {
- case SLJIT_HAS_FPU:
- #ifdef SLJIT_IS_FPU_AVAILABLE
- return SLJIT_IS_FPU_AVAILABLE;
- #elif (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
- if (cpu_has_sse2 == -1)
- get_cpu_features();
- return cpu_has_sse2;
- #else /* SLJIT_DETECT_SSE2 */
- return 1;
- #endif /* SLJIT_DETECT_SSE2 */
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- case SLJIT_HAS_VIRTUAL_REGISTERS:
- return 1;
- #endif
- case SLJIT_HAS_CLZ:
- case SLJIT_HAS_CMOV:
- if (cpu_has_cmov == -1)
- get_cpu_features();
- return cpu_has_cmov;
- case SLJIT_HAS_PREFETCH:
- return 1;
- case SLJIT_HAS_SSE2:
- #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
- if (cpu_has_sse2 == -1)
- get_cpu_features();
- return cpu_has_sse2;
- #else
- return 1;
- #endif
- default:
- return 0;
- }
- }
- /* --------------------------------------------------------------------- */
- /* Operators */
- /* --------------------------------------------------------------------- */
- #define BINARY_OPCODE(opcode) (((opcode ## _EAX_i32) << 24) | ((opcode ## _r_rm) << 16) | ((opcode ## _rm_r) << 8) | (opcode))
- static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
- sljit_u32 op_types,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w);
- static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler,
- sljit_u32 op_types,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w);
- static sljit_s32 emit_mov(struct sljit_compiler *compiler,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw);
- #define EMIT_MOV(compiler, dst, dstw, src, srcw) \
- FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw));
- static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
- sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src);
- static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
- sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw);
- static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w);
- static SLJIT_INLINE sljit_s32 emit_endbranch(struct sljit_compiler *compiler)
- {
- #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET)
- /* Emit endbr32/endbr64 when CET is enabled. */
- sljit_u8 *inst;
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
- FAIL_IF(!inst);
- INC_SIZE(4);
- *inst++ = 0xf3;
- *inst++ = 0x0f;
- *inst++ = 0x1e;
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- *inst = 0xfb;
- #else
- *inst = 0xfa;
- #endif
- #else /* !SLJIT_CONFIG_X86_CET */
- SLJIT_UNUSED_ARG(compiler);
- #endif /* SLJIT_CONFIG_X86_CET */
- return SLJIT_SUCCESS;
- }
- #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
- static SLJIT_INLINE sljit_s32 emit_rdssp(struct sljit_compiler *compiler, sljit_s32 reg)
- {
- sljit_u8 *inst;
- sljit_s32 size;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- size = 5;
- #else
- size = 4;
- #endif
- inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
- FAIL_IF(!inst);
- INC_SIZE(size);
- *inst++ = 0xf3;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- *inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : REX_B);
- #endif
- *inst++ = 0x0f;
- *inst++ = 0x1e;
- *inst = (0x3 << 6) | (0x1 << 3) | (reg_map[reg] & 0x7);
- return SLJIT_SUCCESS;
- }
- static SLJIT_INLINE sljit_s32 emit_incssp(struct sljit_compiler *compiler, sljit_s32 reg)
- {
- sljit_u8 *inst;
- sljit_s32 size;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- size = 5;
- #else
- size = 4;
- #endif
- inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
- FAIL_IF(!inst);
- INC_SIZE(size);
- *inst++ = 0xf3;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- *inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : REX_B);
- #endif
- *inst++ = 0x0f;
- *inst++ = 0xae;
- *inst = (0x3 << 6) | (0x5 << 3) | (reg_map[reg] & 0x7);
- return SLJIT_SUCCESS;
- }
- #endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
- static SLJIT_INLINE sljit_s32 cpu_has_shadow_stack(void)
- {
- #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
- return _get_ssp() != 0;
- #else /* !SLJIT_CONFIG_X86_CET || !__SHSTK__ */
- return 0;
- #endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
- }
- static SLJIT_INLINE sljit_s32 adjust_shadow_stack(struct sljit_compiler *compiler,
- sljit_s32 src, sljit_sw srcw, sljit_s32 base, sljit_sw disp)
- {
- #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
- sljit_u8 *inst, *jz_after_cmp_inst;
- sljit_uw size_jz_after_cmp_inst;
- sljit_uw size_before_rdssp_inst = compiler->size;
- /* Generate "RDSSP TMP_REG1". */
- FAIL_IF(emit_rdssp(compiler, TMP_REG1));
- /* Load return address on shadow stack into TMP_REG1. */
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- SLJIT_ASSERT(reg_map[TMP_REG1] == 5);
- /* Hand code unsupported "mov 0x0(%ebp),%ebp". */
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
- FAIL_IF(!inst);
- INC_SIZE(3);
- *inst++ = 0x8b;
- *inst++ = 0x6d;
- *inst = 0;
- #else /* !SLJIT_CONFIG_X86_32 */
- EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(TMP_REG1), 0);
- #endif /* SLJIT_CONFIG_X86_32 */
- if (src == SLJIT_UNUSED) {
- /* Return address is on stack. */
- src = SLJIT_MEM1(base);
- srcw = disp;
- }
- /* Compare return address against TMP_REG1. */
- FAIL_IF(emit_cmp_binary (compiler, TMP_REG1, 0, src, srcw));
- /* Generate JZ to skip shadow stack ajdustment when shadow
- stack matches normal stack. */
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
- FAIL_IF(!inst);
- INC_SIZE(2);
- *inst++ = get_jump_code(SLJIT_EQUAL) - 0x10;
- size_jz_after_cmp_inst = compiler->size;
- jz_after_cmp_inst = inst;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- /* REX_W is not necessary. */
- compiler->mode32 = 1;
- #endif
- /* Load 1 into TMP_REG1. */
- EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 1);
- /* Generate "INCSSP TMP_REG1". */
- FAIL_IF(emit_incssp(compiler, TMP_REG1));
- /* Jump back to "RDSSP TMP_REG1" to check shadow stack again. */
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
- FAIL_IF(!inst);
- INC_SIZE(2);
- *inst++ = JMP_i8;
- *inst = size_before_rdssp_inst - compiler->size;
- *jz_after_cmp_inst = compiler->size - size_jz_after_cmp_inst;
- #else /* !SLJIT_CONFIG_X86_CET || !__SHSTK__ */
- SLJIT_UNUSED_ARG(compiler);
- SLJIT_UNUSED_ARG(src);
- SLJIT_UNUSED_ARG(srcw);
- SLJIT_UNUSED_ARG(base);
- SLJIT_UNUSED_ARG(disp);
- #endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
- return SLJIT_SUCCESS;
- }
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- #include "sljitNativeX86_32.c"
- #else
- #include "sljitNativeX86_64.c"
- #endif
- static sljit_s32 emit_mov(struct sljit_compiler *compiler,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
- {
- sljit_u8* inst;
- SLJIT_ASSERT(dst != SLJIT_UNUSED);
- if (FAST_IS_REG(src)) {
- inst = emit_x86_instruction(compiler, 1, src, 0, dst, dstw);
- FAIL_IF(!inst);
- *inst = MOV_rm_r;
- return SLJIT_SUCCESS;
- }
- if (src & SLJIT_IMM) {
- if (FAST_IS_REG(dst)) {
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw);
- #else
- if (!compiler->mode32) {
- if (NOT_HALFWORD(srcw))
- return emit_load_imm64(compiler, dst, srcw);
- }
- else
- return emit_do_imm32(compiler, (reg_map[dst] >= 8) ? REX_B : 0, MOV_r_i32 + reg_lmap[dst], srcw);
- #endif
- }
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (!compiler->mode32 && NOT_HALFWORD(srcw)) {
- /* Immediate to memory move. Only SLJIT_MOV operation copies
- an immediate directly into memory so TMP_REG1 can be used. */
- FAIL_IF(emit_load_imm64(compiler, TMP_REG1, srcw));
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
- FAIL_IF(!inst);
- *inst = MOV_rm_r;
- return SLJIT_SUCCESS;
- }
- #endif
- inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, dstw);
- FAIL_IF(!inst);
- *inst = MOV_rm_i32;
- return SLJIT_SUCCESS;
- }
- if (FAST_IS_REG(dst)) {
- inst = emit_x86_instruction(compiler, 1, dst, 0, src, srcw);
- FAIL_IF(!inst);
- *inst = MOV_r_rm;
- return SLJIT_SUCCESS;
- }
- /* Memory to memory move. Only SLJIT_MOV operation copies
- data from memory to memory so TMP_REG1 can be used. */
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src, srcw);
- FAIL_IF(!inst);
- *inst = MOV_r_rm;
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
- FAIL_IF(!inst);
- *inst = MOV_rm_r;
- return SLJIT_SUCCESS;
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
- {
- sljit_u8 *inst;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- sljit_s32 size;
- #endif
- CHECK_ERROR();
- CHECK(check_sljit_emit_op0(compiler, op));
- switch (GET_OPCODE(op)) {
- case SLJIT_BREAKPOINT:
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst = INT3;
- break;
- case SLJIT_NOP:
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst = NOP;
- break;
- case SLJIT_LMUL_UW:
- case SLJIT_LMUL_SW:
- case SLJIT_DIVMOD_UW:
- case SLJIT_DIVMOD_SW:
- case SLJIT_DIV_UW:
- case SLJIT_DIV_SW:
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- #ifdef _WIN64
- SLJIT_ASSERT(
- reg_map[SLJIT_R0] == 0
- && reg_map[SLJIT_R1] == 2
- && reg_map[TMP_REG1] > 7);
- #else
- SLJIT_ASSERT(
- reg_map[SLJIT_R0] == 0
- && reg_map[SLJIT_R1] < 7
- && reg_map[TMP_REG1] == 2);
- #endif
- compiler->mode32 = op & SLJIT_I32_OP;
- #endif
- SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
- op = GET_OPCODE(op);
- if ((op | 0x2) == SLJIT_DIV_UW) {
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
- EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
- inst = emit_x86_instruction(compiler, 1, SLJIT_R1, 0, SLJIT_R1, 0);
- #else
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG1, 0);
- #endif
- FAIL_IF(!inst);
- *inst = XOR_r_rm;
- }
- if ((op | 0x2) == SLJIT_DIV_SW) {
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
- EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
- #endif
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst = CDQ;
- #else
- if (compiler->mode32) {
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst = CDQ;
- } else {
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
- FAIL_IF(!inst);
- INC_SIZE(2);
- *inst++ = REX_W;
- *inst = CDQ;
- }
- #endif
- }
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
- FAIL_IF(!inst);
- INC_SIZE(2);
- *inst++ = GROUP_F7;
- *inst = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_map[TMP_REG1] : reg_map[SLJIT_R1]);
- #else
- #ifdef _WIN64
- size = (!compiler->mode32 || op >= SLJIT_DIVMOD_UW) ? 3 : 2;
- #else
- size = (!compiler->mode32) ? 3 : 2;
- #endif
- inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
- FAIL_IF(!inst);
- INC_SIZE(size);
- #ifdef _WIN64
- if (!compiler->mode32)
- *inst++ = REX_W | ((op >= SLJIT_DIVMOD_UW) ? REX_B : 0);
- else if (op >= SLJIT_DIVMOD_UW)
- *inst++ = REX_B;
- *inst++ = GROUP_F7;
- *inst = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_lmap[TMP_REG1] : reg_lmap[SLJIT_R1]);
- #else
- if (!compiler->mode32)
- *inst++ = REX_W;
- *inst++ = GROUP_F7;
- *inst = MOD_REG | reg_map[SLJIT_R1];
- #endif
- #endif
- switch (op) {
- case SLJIT_LMUL_UW:
- *inst |= MUL;
- break;
- case SLJIT_LMUL_SW:
- *inst |= IMUL;
- break;
- case SLJIT_DIVMOD_UW:
- case SLJIT_DIV_UW:
- *inst |= DIV;
- break;
- case SLJIT_DIVMOD_SW:
- case SLJIT_DIV_SW:
- *inst |= IDIV;
- break;
- }
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) && !defined(_WIN64)
- if (op <= SLJIT_DIVMOD_SW)
- EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
- #else
- if (op >= SLJIT_DIV_UW)
- EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
- #endif
- break;
- case SLJIT_ENDBR:
- return emit_endbranch(compiler);
- case SLJIT_SKIP_FRAMES_BEFORE_RETURN:
- return skip_frames_before_return(compiler);
- }
- return SLJIT_SUCCESS;
- }
- #define ENCODE_PREFIX(prefix) \
- do { \
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1); \
- FAIL_IF(!inst); \
- INC_SIZE(1); \
- *inst = (prefix); \
- } while (0)
- static sljit_s32 emit_mov_byte(struct sljit_compiler *compiler, sljit_s32 sign,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
- {
- sljit_u8* inst;
- sljit_s32 dst_r;
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- sljit_s32 work_r;
- #endif
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = 0;
- #endif
- if (src & SLJIT_IMM) {
- if (FAST_IS_REG(dst)) {
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw);
- #else
- inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0);
- FAIL_IF(!inst);
- *inst = MOV_rm_i32;
- return SLJIT_SUCCESS;
- #endif
- }
- inst = emit_x86_instruction(compiler, 1 | EX86_BYTE_ARG | EX86_NO_REXW, SLJIT_IMM, srcw, dst, dstw);
- FAIL_IF(!inst);
- *inst = MOV_rm8_i8;
- return SLJIT_SUCCESS;
- }
- dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
- if ((dst & SLJIT_MEM) && FAST_IS_REG(src)) {
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- if (reg_map[src] >= 4) {
- SLJIT_ASSERT(dst_r == TMP_REG1);
- EMIT_MOV(compiler, TMP_REG1, 0, src, 0);
- } else
- dst_r = src;
- #else
- dst_r = src;
- #endif
- }
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- else if (FAST_IS_REG(src) && reg_map[src] >= 4) {
- /* src, dst are registers. */
- SLJIT_ASSERT(SLOW_IS_REG(dst));
- if (reg_map[dst] < 4) {
- if (dst != src)
- EMIT_MOV(compiler, dst, 0, src, 0);
- inst = emit_x86_instruction(compiler, 2, dst, 0, dst, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = sign ? MOVSX_r_rm8 : MOVZX_r_rm8;
- }
- else {
- if (dst != src)
- EMIT_MOV(compiler, dst, 0, src, 0);
- if (sign) {
- /* shl reg, 24 */
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0);
- FAIL_IF(!inst);
- *inst |= SHL;
- /* sar reg, 24 */
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0);
- FAIL_IF(!inst);
- *inst |= SAR;
- }
- else {
- inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 0xff, dst, 0);
- FAIL_IF(!inst);
- *(inst + 1) |= AND;
- }
- }
- return SLJIT_SUCCESS;
- }
- #endif
- else {
- /* src can be memory addr or reg_map[src] < 4 on x86_32 architectures. */
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = sign ? MOVSX_r_rm8 : MOVZX_r_rm8;
- }
- if (dst & SLJIT_MEM) {
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- if (dst_r == TMP_REG1) {
- /* Find a non-used register, whose reg_map[src] < 4. */
- if ((dst & REG_MASK) == SLJIT_R0) {
- if ((dst & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_R1))
- work_r = SLJIT_R2;
- else
- work_r = SLJIT_R1;
- }
- else {
- if ((dst & OFFS_REG_MASK) != TO_OFFS_REG(SLJIT_R0))
- work_r = SLJIT_R0;
- else if ((dst & REG_MASK) == SLJIT_R1)
- work_r = SLJIT_R2;
- else
- work_r = SLJIT_R1;
- }
- if (work_r == SLJIT_R0) {
- ENCODE_PREFIX(XCHG_EAX_r + reg_map[TMP_REG1]);
- }
- else {
- inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0);
- FAIL_IF(!inst);
- *inst = XCHG_r_rm;
- }
- inst = emit_x86_instruction(compiler, 1, work_r, 0, dst, dstw);
- FAIL_IF(!inst);
- *inst = MOV_rm8_r8;
- if (work_r == SLJIT_R0) {
- ENCODE_PREFIX(XCHG_EAX_r + reg_map[TMP_REG1]);
- }
- else {
- inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0);
- FAIL_IF(!inst);
- *inst = XCHG_r_rm;
- }
- }
- else {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw);
- FAIL_IF(!inst);
- *inst = MOV_rm8_r8;
- }
- #else
- inst = emit_x86_instruction(compiler, 1 | EX86_REX | EX86_NO_REXW, dst_r, 0, dst, dstw);
- FAIL_IF(!inst);
- *inst = MOV_rm8_r8;
- #endif
- }
- return SLJIT_SUCCESS;
- }
- static sljit_s32 emit_prefetch(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src, sljit_sw srcw)
- {
- sljit_u8* inst;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = 1;
- #endif
- inst = emit_x86_instruction(compiler, 2, 0, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst++ = PREFETCH;
- if (op == SLJIT_PREFETCH_L1)
- *inst |= (1 << 3);
- else if (op == SLJIT_PREFETCH_L2)
- *inst |= (2 << 3);
- else if (op == SLJIT_PREFETCH_L3)
- *inst |= (3 << 3);
- return SLJIT_SUCCESS;
- }
- static sljit_s32 emit_mov_half(struct sljit_compiler *compiler, sljit_s32 sign,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
- {
- sljit_u8* inst;
- sljit_s32 dst_r;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = 0;
- #endif
- if (src & SLJIT_IMM) {
- if (FAST_IS_REG(dst)) {
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw);
- #else
- inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0);
- FAIL_IF(!inst);
- *inst = MOV_rm_i32;
- return SLJIT_SUCCESS;
- #endif
- }
- inst = emit_x86_instruction(compiler, 1 | EX86_HALF_ARG | EX86_NO_REXW | EX86_PREF_66, SLJIT_IMM, srcw, dst, dstw);
- FAIL_IF(!inst);
- *inst = MOV_rm_i32;
- return SLJIT_SUCCESS;
- }
- dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
- if ((dst & SLJIT_MEM) && FAST_IS_REG(src))
- dst_r = src;
- else {
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = sign ? MOVSX_r_rm16 : MOVZX_r_rm16;
- }
- if (dst & SLJIT_MEM) {
- inst = emit_x86_instruction(compiler, 1 | EX86_NO_REXW | EX86_PREF_66, dst_r, 0, dst, dstw);
- FAIL_IF(!inst);
- *inst = MOV_rm_r;
- }
- return SLJIT_SUCCESS;
- }
- static sljit_s32 emit_unary(struct sljit_compiler *compiler, sljit_u8 opcode,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
- {
- sljit_u8* inst;
- if (dst == src && dstw == srcw) {
- /* Same input and output */
- inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
- FAIL_IF(!inst);
- *inst++ = GROUP_F7;
- *inst |= opcode;
- return SLJIT_SUCCESS;
- }
- if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED))
- dst = TMP_REG1;
- if (FAST_IS_REG(dst)) {
- EMIT_MOV(compiler, dst, 0, src, srcw);
- inst = emit_x86_instruction(compiler, 1, 0, 0, dst, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_F7;
- *inst |= opcode;
- return SLJIT_SUCCESS;
- }
- EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
- inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_F7;
- *inst |= opcode;
- EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
- return SLJIT_SUCCESS;
- }
- static sljit_s32 emit_not_with_flags(struct sljit_compiler *compiler,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
- {
- sljit_u8* inst;
- if (dst == SLJIT_UNUSED)
- dst = TMP_REG1;
- if (FAST_IS_REG(dst)) {
- EMIT_MOV(compiler, dst, 0, src, srcw);
- inst = emit_x86_instruction(compiler, 1, 0, 0, dst, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_F7;
- *inst |= NOT_rm;
- inst = emit_x86_instruction(compiler, 1, dst, 0, dst, 0);
- FAIL_IF(!inst);
- *inst = OR_r_rm;
- return SLJIT_SUCCESS;
- }
- EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
- inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_F7;
- *inst |= NOT_rm;
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst = OR_r_rm;
- EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
- return SLJIT_SUCCESS;
- }
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- static const sljit_sw emit_clz_arg = 32 + 31;
- #endif
- static sljit_s32 emit_clz(struct sljit_compiler *compiler, sljit_s32 op_flags,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
- {
- sljit_u8* inst;
- sljit_s32 dst_r;
- SLJIT_UNUSED_ARG(op_flags);
- if (cpu_has_cmov == -1)
- get_cpu_features();
- dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = BSR_r_rm;
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- if (cpu_has_cmov) {
- if (dst_r != TMP_REG1) {
- EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 32 + 31);
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG1, 0);
- }
- else
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, SLJIT_MEM0(), (sljit_sw)&emit_clz_arg);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = CMOVE_r_rm;
- }
- else
- FAIL_IF(sljit_emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, 32 + 31));
- inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 31, dst_r, 0);
- #else
- if (cpu_has_cmov) {
- EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_IMM, !(op_flags & SLJIT_I32_OP) ? (64 + 63) : (32 + 31));
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = CMOVE_r_rm;
- }
- else
- FAIL_IF(sljit_emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, !(op_flags & SLJIT_I32_OP) ? (64 + 63) : (32 + 31)));
- inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, !(op_flags & SLJIT_I32_OP) ? 63 : 31, dst_r, 0);
- #endif
- FAIL_IF(!inst);
- *(inst + 1) |= XOR;
- if (dst & SLJIT_MEM)
- EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
- return SLJIT_SUCCESS;
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
- {
- sljit_s32 op_flags = GET_ALL_FLAGS(op);
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- sljit_s32 dst_is_ereg = 0;
- #endif
- CHECK_ERROR();
- CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
- ADJUST_LOCAL_OFFSET(src, srcw);
- CHECK_EXTRA_REGS(dst, dstw, dst_is_ereg = 1);
- CHECK_EXTRA_REGS(src, srcw, (void)0);
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = op_flags & SLJIT_I32_OP;
- #endif
- op = GET_OPCODE(op);
- if (op >= SLJIT_MOV && op <= SLJIT_MOV_P) {
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = 0;
- #endif
- if (FAST_IS_REG(src) && src == dst) {
- if (!TYPE_CAST_NEEDED(op))
- return SLJIT_SUCCESS;
- }
- if (op_flags & SLJIT_I32_OP) {
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (src & SLJIT_MEM) {
- if (op == SLJIT_MOV_S32)
- op = SLJIT_MOV_U32;
- }
- else if (src & SLJIT_IMM) {
- if (op == SLJIT_MOV_U32)
- op = SLJIT_MOV_S32;
- }
- #endif
- }
- if (src & SLJIT_IMM) {
- switch (op) {
- case SLJIT_MOV_U8:
- srcw = (sljit_u8)srcw;
- break;
- case SLJIT_MOV_S8:
- srcw = (sljit_s8)srcw;
- break;
- case SLJIT_MOV_U16:
- srcw = (sljit_u16)srcw;
- break;
- case SLJIT_MOV_S16:
- srcw = (sljit_s16)srcw;
- break;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- case SLJIT_MOV_U32:
- srcw = (sljit_u32)srcw;
- break;
- case SLJIT_MOV_S32:
- srcw = (sljit_s32)srcw;
- break;
- #endif
- }
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- if (SLJIT_UNLIKELY(dst_is_ereg))
- return emit_mov(compiler, dst, dstw, src, srcw);
- #endif
- }
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- if (SLJIT_UNLIKELY(dst_is_ereg) && (!(op == SLJIT_MOV || op == SLJIT_MOV_U32 || op == SLJIT_MOV_S32 || op == SLJIT_MOV_P) || (src & SLJIT_MEM))) {
- SLJIT_ASSERT(dst == SLJIT_MEM1(SLJIT_SP));
- dst = TMP_REG1;
- }
- #endif
- switch (op) {
- case SLJIT_MOV:
- case SLJIT_MOV_P:
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- case SLJIT_MOV_U32:
- case SLJIT_MOV_S32:
- #endif
- FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw));
- break;
- case SLJIT_MOV_U8:
- FAIL_IF(emit_mov_byte(compiler, 0, dst, dstw, src, srcw));
- break;
- case SLJIT_MOV_S8:
- FAIL_IF(emit_mov_byte(compiler, 1, dst, dstw, src, srcw));
- break;
- case SLJIT_MOV_U16:
- FAIL_IF(emit_mov_half(compiler, 0, dst, dstw, src, srcw));
- break;
- case SLJIT_MOV_S16:
- FAIL_IF(emit_mov_half(compiler, 1, dst, dstw, src, srcw));
- break;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- case SLJIT_MOV_U32:
- FAIL_IF(emit_mov_int(compiler, 0, dst, dstw, src, srcw));
- break;
- case SLJIT_MOV_S32:
- FAIL_IF(emit_mov_int(compiler, 1, dst, dstw, src, srcw));
- break;
- #endif
- }
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- if (SLJIT_UNLIKELY(dst_is_ereg) && dst == TMP_REG1)
- return emit_mov(compiler, SLJIT_MEM1(SLJIT_SP), dstw, TMP_REG1, 0);
- #endif
- return SLJIT_SUCCESS;
- }
- switch (op) {
- case SLJIT_NOT:
- if (SLJIT_UNLIKELY(op_flags & SLJIT_SET_Z))
- return emit_not_with_flags(compiler, dst, dstw, src, srcw);
- return emit_unary(compiler, NOT_rm, dst, dstw, src, srcw);
- case SLJIT_NEG:
- return emit_unary(compiler, NEG_rm, dst, dstw, src, srcw);
- case SLJIT_CLZ:
- return emit_clz(compiler, op_flags, dst, dstw, src, srcw);
- }
- return SLJIT_SUCCESS;
- }
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- #define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \
- if (IS_HALFWORD(immw) || compiler->mode32) { \
- inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, immw, arg, argw); \
- FAIL_IF(!inst); \
- *(inst + 1) |= (op_imm); \
- } \
- else { \
- FAIL_IF(emit_load_imm64(compiler, (arg == TMP_REG1) ? TMP_REG2 : TMP_REG1, immw)); \
- inst = emit_x86_instruction(compiler, 1, (arg == TMP_REG1) ? TMP_REG2 : TMP_REG1, 0, arg, argw); \
- FAIL_IF(!inst); \
- *inst = (op_mr); \
- }
- #define BINARY_EAX_IMM(op_eax_imm, immw) \
- FAIL_IF(emit_do_imm32(compiler, (!compiler->mode32) ? REX_W : 0, (op_eax_imm), immw))
- #else
- #define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \
- inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, immw, arg, argw); \
- FAIL_IF(!inst); \
- *(inst + 1) |= (op_imm);
- #define BINARY_EAX_IMM(op_eax_imm, immw) \
- FAIL_IF(emit_do_imm(compiler, (op_eax_imm), immw))
- #endif
- static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
- sljit_u32 op_types,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
- {
- sljit_u8* inst;
- sljit_u8 op_eax_imm = (op_types >> 24);
- sljit_u8 op_rm = (op_types >> 16) & 0xff;
- sljit_u8 op_mr = (op_types >> 8) & 0xff;
- sljit_u8 op_imm = op_types & 0xff;
- if (dst == SLJIT_UNUSED) {
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- if (src2 & SLJIT_IMM) {
- BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
- }
- else {
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = op_rm;
- }
- return SLJIT_SUCCESS;
- }
- if (dst == src1 && dstw == src1w) {
- if (src2 & SLJIT_IMM) {
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
- #else
- if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
- #endif
- BINARY_EAX_IMM(op_eax_imm, src2w);
- }
- else {
- BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
- }
- }
- else if (FAST_IS_REG(dst)) {
- inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
- FAIL_IF(!inst);
- *inst = op_rm;
- }
- else if (FAST_IS_REG(src2)) {
- /* Special exception for sljit_emit_op_flags. */
- inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
- FAIL_IF(!inst);
- *inst = op_mr;
- }
- else {
- EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
- FAIL_IF(!inst);
- *inst = op_mr;
- }
- return SLJIT_SUCCESS;
- }
- /* Only for cumulative operations. */
- if (dst == src2 && dstw == src2w) {
- if (src1 & SLJIT_IMM) {
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
- #else
- if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128)) {
- #endif
- BINARY_EAX_IMM(op_eax_imm, src1w);
- }
- else {
- BINARY_IMM(op_imm, op_mr, src1w, dst, dstw);
- }
- }
- else if (FAST_IS_REG(dst)) {
- inst = emit_x86_instruction(compiler, 1, dst, dstw, src1, src1w);
- FAIL_IF(!inst);
- *inst = op_rm;
- }
- else if (FAST_IS_REG(src1)) {
- inst = emit_x86_instruction(compiler, 1, src1, src1w, dst, dstw);
- FAIL_IF(!inst);
- *inst = op_mr;
- }
- else {
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
- FAIL_IF(!inst);
- *inst = op_mr;
- }
- return SLJIT_SUCCESS;
- }
- /* General version. */
- if (FAST_IS_REG(dst)) {
- EMIT_MOV(compiler, dst, 0, src1, src1w);
- if (src2 & SLJIT_IMM) {
- BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
- }
- else {
- inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = op_rm;
- }
- }
- else {
- /* This version requires less memory writing. */
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- if (src2 & SLJIT_IMM) {
- BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
- }
- else {
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = op_rm;
- }
- EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
- }
- return SLJIT_SUCCESS;
- }
- static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler,
- sljit_u32 op_types,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
- {
- sljit_u8* inst;
- sljit_u8 op_eax_imm = (op_types >> 24);
- sljit_u8 op_rm = (op_types >> 16) & 0xff;
- sljit_u8 op_mr = (op_types >> 8) & 0xff;
- sljit_u8 op_imm = op_types & 0xff;
- if (dst == SLJIT_UNUSED) {
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- if (src2 & SLJIT_IMM) {
- BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
- }
- else {
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = op_rm;
- }
- return SLJIT_SUCCESS;
- }
- if (dst == src1 && dstw == src1w) {
- if (src2 & SLJIT_IMM) {
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
- #else
- if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
- #endif
- BINARY_EAX_IMM(op_eax_imm, src2w);
- }
- else {
- BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
- }
- }
- else if (FAST_IS_REG(dst)) {
- inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
- FAIL_IF(!inst);
- *inst = op_rm;
- }
- else if (FAST_IS_REG(src2)) {
- inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
- FAIL_IF(!inst);
- *inst = op_mr;
- }
- else {
- EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
- FAIL_IF(!inst);
- *inst = op_mr;
- }
- return SLJIT_SUCCESS;
- }
- /* General version. */
- if (FAST_IS_REG(dst) && dst != src2) {
- EMIT_MOV(compiler, dst, 0, src1, src1w);
- if (src2 & SLJIT_IMM) {
- BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
- }
- else {
- inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = op_rm;
- }
- }
- else {
- /* This version requires less memory writing. */
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- if (src2 & SLJIT_IMM) {
- BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
- }
- else {
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = op_rm;
- }
- EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
- }
- return SLJIT_SUCCESS;
- }
- static sljit_s32 emit_mul(struct sljit_compiler *compiler,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
- {
- sljit_u8* inst;
- sljit_s32 dst_r;
- dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
- /* Register destination. */
- if (dst_r == src1 && !(src2 & SLJIT_IMM)) {
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = IMUL_r_rm;
- }
- else if (dst_r == src2 && !(src1 & SLJIT_IMM)) {
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, src1, src1w);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = IMUL_r_rm;
- }
- else if (src1 & SLJIT_IMM) {
- if (src2 & SLJIT_IMM) {
- EMIT_MOV(compiler, dst_r, 0, SLJIT_IMM, src2w);
- src2 = dst_r;
- src2w = 0;
- }
- if (src1w <= 127 && src1w >= -128) {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = IMUL_r_rm_i8;
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst = (sljit_s8)src1w;
- }
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- else {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = IMUL_r_rm_i32;
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
- FAIL_IF(!inst);
- INC_SIZE(4);
- sljit_unaligned_store_sw(inst, src1w);
- }
- #else
- else if (IS_HALFWORD(src1w)) {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = IMUL_r_rm_i32;
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
- FAIL_IF(!inst);
- INC_SIZE(4);
- sljit_unaligned_store_s32(inst, (sljit_s32)src1w);
- }
- else {
- if (dst_r != src2)
- EMIT_MOV(compiler, dst_r, 0, src2, src2w);
- FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src1w));
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = IMUL_r_rm;
- }
- #endif
- }
- else if (src2 & SLJIT_IMM) {
- /* Note: src1 is NOT immediate. */
- if (src2w <= 127 && src2w >= -128) {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
- FAIL_IF(!inst);
- *inst = IMUL_r_rm_i8;
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1);
- *inst = (sljit_s8)src2w;
- }
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- else {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
- FAIL_IF(!inst);
- *inst = IMUL_r_rm_i32;
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
- FAIL_IF(!inst);
- INC_SIZE(4);
- sljit_unaligned_store_sw(inst, src2w);
- }
- #else
- else if (IS_HALFWORD(src2w)) {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
- FAIL_IF(!inst);
- *inst = IMUL_r_rm_i32;
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
- FAIL_IF(!inst);
- INC_SIZE(4);
- sljit_unaligned_store_s32(inst, (sljit_s32)src2w);
- }
- else {
- if (dst_r != src1)
- EMIT_MOV(compiler, dst_r, 0, src1, src1w);
- FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w));
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = IMUL_r_rm;
- }
- #endif
- }
- else {
- /* Neither argument is immediate. */
- if (ADDRESSING_DEPENDS_ON(src2, dst_r))
- dst_r = TMP_REG1;
- EMIT_MOV(compiler, dst_r, 0, src1, src1w);
- inst = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = IMUL_r_rm;
- }
- if (dst & SLJIT_MEM)
- EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
- return SLJIT_SUCCESS;
- }
- static sljit_s32 emit_lea_binary(struct sljit_compiler *compiler,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
- {
- sljit_u8* inst;
- sljit_s32 dst_r, done = 0;
- /* These cases better be left to handled by normal way. */
- if (dst == src1 && dstw == src1w)
- return SLJIT_ERR_UNSUPPORTED;
- if (dst == src2 && dstw == src2w)
- return SLJIT_ERR_UNSUPPORTED;
- dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
- if (FAST_IS_REG(src1)) {
- if (FAST_IS_REG(src2)) {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM2(src1, src2), 0);
- FAIL_IF(!inst);
- *inst = LEA_r_m;
- done = 1;
- }
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if ((src2 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src2w))) {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), (sljit_s32)src2w);
- #else
- if (src2 & SLJIT_IMM) {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), src2w);
- #endif
- FAIL_IF(!inst);
- *inst = LEA_r_m;
- done = 1;
- }
- }
- else if (FAST_IS_REG(src2)) {
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if ((src1 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src1w))) {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), (sljit_s32)src1w);
- #else
- if (src1 & SLJIT_IMM) {
- inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), src1w);
- #endif
- FAIL_IF(!inst);
- *inst = LEA_r_m;
- done = 1;
- }
- }
- if (done) {
- if (dst_r == TMP_REG1)
- return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
- return SLJIT_SUCCESS;
- }
- return SLJIT_ERR_UNSUPPORTED;
- }
- static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
- {
- sljit_u8* inst;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
- #else
- if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) {
- #endif
- BINARY_EAX_IMM(CMP_EAX_i32, src2w);
- return SLJIT_SUCCESS;
- }
- if (FAST_IS_REG(src1)) {
- if (src2 & SLJIT_IMM) {
- BINARY_IMM(CMP, CMP_rm_r, src2w, src1, 0);
- }
- else {
- inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = CMP_r_rm;
- }
- return SLJIT_SUCCESS;
- }
- if (FAST_IS_REG(src2) && !(src1 & SLJIT_IMM)) {
- inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w);
- FAIL_IF(!inst);
- *inst = CMP_rm_r;
- return SLJIT_SUCCESS;
- }
- if (src2 & SLJIT_IMM) {
- if (src1 & SLJIT_IMM) {
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- src1 = TMP_REG1;
- src1w = 0;
- }
- BINARY_IMM(CMP, CMP_rm_r, src2w, src1, src1w);
- }
- else {
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = CMP_r_rm;
- }
- return SLJIT_SUCCESS;
- }
- static sljit_s32 emit_test_binary(struct sljit_compiler *compiler,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
- {
- sljit_u8* inst;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
- #else
- if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) {
- #endif
- BINARY_EAX_IMM(TEST_EAX_i32, src2w);
- return SLJIT_SUCCESS;
- }
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (src2 == SLJIT_R0 && (src1 & SLJIT_IMM) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
- #else
- if (src2 == SLJIT_R0 && (src1 & SLJIT_IMM) && (src1w > 127 || src1w < -128)) {
- #endif
- BINARY_EAX_IMM(TEST_EAX_i32, src1w);
- return SLJIT_SUCCESS;
- }
- if (!(src1 & SLJIT_IMM)) {
- if (src2 & SLJIT_IMM) {
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (IS_HALFWORD(src2w) || compiler->mode32) {
- inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, src1w);
- FAIL_IF(!inst);
- *inst = GROUP_F7;
- }
- else {
- FAIL_IF(emit_load_imm64(compiler, TMP_REG1, src2w));
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src1, src1w);
- FAIL_IF(!inst);
- *inst = TEST_rm_r;
- }
- #else
- inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, src1w);
- FAIL_IF(!inst);
- *inst = GROUP_F7;
- #endif
- return SLJIT_SUCCESS;
- }
- else if (FAST_IS_REG(src1)) {
- inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = TEST_rm_r;
- return SLJIT_SUCCESS;
- }
- }
- if (!(src2 & SLJIT_IMM)) {
- if (src1 & SLJIT_IMM) {
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (IS_HALFWORD(src1w) || compiler->mode32) {
- inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src1w, src2, src2w);
- FAIL_IF(!inst);
- *inst = GROUP_F7;
- }
- else {
- FAIL_IF(emit_load_imm64(compiler, TMP_REG1, src1w));
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = TEST_rm_r;
- }
- #else
- inst = emit_x86_instruction(compiler, 1, src1, src1w, src2, src2w);
- FAIL_IF(!inst);
- *inst = GROUP_F7;
- #endif
- return SLJIT_SUCCESS;
- }
- else if (FAST_IS_REG(src2)) {
- inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w);
- FAIL_IF(!inst);
- *inst = TEST_rm_r;
- return SLJIT_SUCCESS;
- }
- }
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- if (src2 & SLJIT_IMM) {
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (IS_HALFWORD(src2w) || compiler->mode32) {
- inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst = GROUP_F7;
- }
- else {
- FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w));
- inst = emit_x86_instruction(compiler, 1, TMP_REG2, 0, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst = TEST_rm_r;
- }
- #else
- inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst = GROUP_F7;
- #endif
- }
- else {
- inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
- FAIL_IF(!inst);
- *inst = TEST_rm_r;
- }
- return SLJIT_SUCCESS;
- }
- static sljit_s32 emit_shift(struct sljit_compiler *compiler,
- sljit_u8 mode,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
- {
- sljit_u8* inst;
- if ((src2 & SLJIT_IMM) || (src2 == SLJIT_PREF_SHIFT_REG)) {
- if (dst == src1 && dstw == src1w) {
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, dstw);
- FAIL_IF(!inst);
- *inst |= mode;
- return SLJIT_SUCCESS;
- }
- if (dst == SLJIT_UNUSED) {
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst |= mode;
- return SLJIT_SUCCESS;
- }
- if (dst == SLJIT_PREF_SHIFT_REG && src2 == SLJIT_PREF_SHIFT_REG) {
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst |= mode;
- EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
- return SLJIT_SUCCESS;
- }
- if (FAST_IS_REG(dst)) {
- EMIT_MOV(compiler, dst, 0, src1, src1w);
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, 0);
- FAIL_IF(!inst);
- *inst |= mode;
- return SLJIT_SUCCESS;
- }
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst |= mode;
- EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
- return SLJIT_SUCCESS;
- }
- if (dst == SLJIT_PREF_SHIFT_REG) {
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst |= mode;
- EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
- }
- else if (SLOW_IS_REG(dst) && dst != src2 && !ADDRESSING_DEPENDS_ON(src2, dst)) {
- if (src1 != dst)
- EMIT_MOV(compiler, dst, 0, src1, src1w);
- EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
- EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, dst, 0);
- FAIL_IF(!inst);
- *inst |= mode;
- EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
- }
- else {
- /* This case is complex since ecx itself may be used for
- addressing, and this case must be supported as well. */
- EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_PREF_SHIFT_REG, 0);
- EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst |= mode;
- EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, SLJIT_MEM1(SLJIT_SP), 0);
- #else
- EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_PREF_SHIFT_REG, 0);
- EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
- inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
- FAIL_IF(!inst);
- *inst |= mode;
- EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG2, 0);
- #endif
- if (dst != SLJIT_UNUSED)
- return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
- }
- return SLJIT_SUCCESS;
- }
- static sljit_s32 emit_shift_with_flags(struct sljit_compiler *compiler,
- sljit_u8 mode, sljit_s32 set_flags,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
- {
- /* The CPU does not set flags if the shift count is 0. */
- if (src2 & SLJIT_IMM) {
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if ((src2w & 0x3f) != 0 || (compiler->mode32 && (src2w & 0x1f) != 0))
- return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
- #else
- if ((src2w & 0x1f) != 0)
- return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
- #endif
- if (!set_flags)
- return emit_mov(compiler, dst, dstw, src1, src1w);
- /* OR dst, src, 0 */
- return emit_cum_binary(compiler, BINARY_OPCODE(OR),
- dst, dstw, src1, src1w, SLJIT_IMM, 0);
- }
- if (!set_flags)
- return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
- if (!FAST_IS_REG(dst))
- FAIL_IF(emit_cmp_binary(compiler, src1, src1w, SLJIT_IMM, 0));
- FAIL_IF(emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w));
- if (FAST_IS_REG(dst))
- return emit_cmp_binary(compiler, (dst == SLJIT_UNUSED) ? TMP_REG1 : dst, dstw, SLJIT_IMM, 0);
- return SLJIT_SUCCESS;
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
- {
- CHECK_ERROR();
- CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
- ADJUST_LOCAL_OFFSET(dst, dstw);
- ADJUST_LOCAL_OFFSET(src1, src1w);
- ADJUST_LOCAL_OFFSET(src2, src2w);
- CHECK_EXTRA_REGS(dst, dstw, (void)0);
- CHECK_EXTRA_REGS(src1, src1w, (void)0);
- CHECK_EXTRA_REGS(src2, src2w, (void)0);
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = op & SLJIT_I32_OP;
- #endif
- if (dst == SLJIT_UNUSED && !HAS_FLAGS(op))
- return SLJIT_SUCCESS;
- switch (GET_OPCODE(op)) {
- case SLJIT_ADD:
- if (!HAS_FLAGS(op)) {
- if (emit_lea_binary(compiler, dst, dstw, src1, src1w, src2, src2w) != SLJIT_ERR_UNSUPPORTED)
- return compiler->error;
- }
- return emit_cum_binary(compiler, BINARY_OPCODE(ADD),
- dst, dstw, src1, src1w, src2, src2w);
- case SLJIT_ADDC:
- return emit_cum_binary(compiler, BINARY_OPCODE(ADC),
- dst, dstw, src1, src1w, src2, src2w);
- case SLJIT_SUB:
- if (!HAS_FLAGS(op)) {
- if ((src2 & SLJIT_IMM) && emit_lea_binary(compiler, dst, dstw, src1, src1w, SLJIT_IMM, -src2w) != SLJIT_ERR_UNSUPPORTED)
- return compiler->error;
- if (SLOW_IS_REG(dst) && src2 == dst) {
- FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB), dst, 0, dst, 0, src1, src1w));
- return emit_unary(compiler, NEG_rm, dst, 0, dst, 0);
- }
- }
- if (dst == SLJIT_UNUSED)
- return emit_cmp_binary(compiler, src1, src1w, src2, src2w);
- return emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
- dst, dstw, src1, src1w, src2, src2w);
- case SLJIT_SUBC:
- return emit_non_cum_binary(compiler, BINARY_OPCODE(SBB),
- dst, dstw, src1, src1w, src2, src2w);
- case SLJIT_MUL:
- return emit_mul(compiler, dst, dstw, src1, src1w, src2, src2w);
- case SLJIT_AND:
- if (dst == SLJIT_UNUSED)
- return emit_test_binary(compiler, src1, src1w, src2, src2w);
- return emit_cum_binary(compiler, BINARY_OPCODE(AND),
- dst, dstw, src1, src1w, src2, src2w);
- case SLJIT_OR:
- return emit_cum_binary(compiler, BINARY_OPCODE(OR),
- dst, dstw, src1, src1w, src2, src2w);
- case SLJIT_XOR:
- return emit_cum_binary(compiler, BINARY_OPCODE(XOR),
- dst, dstw, src1, src1w, src2, src2w);
- case SLJIT_SHL:
- return emit_shift_with_flags(compiler, SHL, HAS_FLAGS(op),
- dst, dstw, src1, src1w, src2, src2w);
- case SLJIT_LSHR:
- return emit_shift_with_flags(compiler, SHR, HAS_FLAGS(op),
- dst, dstw, src1, src1w, src2, src2w);
- case SLJIT_ASHR:
- return emit_shift_with_flags(compiler, SAR, HAS_FLAGS(op),
- dst, dstw, src1, src1w, src2, src2w);
- }
- return SLJIT_SUCCESS;
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src, sljit_sw srcw)
- {
- CHECK_ERROR();
- CHECK(check_sljit_emit_op_src(compiler, op, src, srcw));
- ADJUST_LOCAL_OFFSET(src, srcw);
- CHECK_EXTRA_REGS(src, srcw, (void)0);
- switch (op) {
- case SLJIT_FAST_RETURN:
- return emit_fast_return(compiler, src, srcw);
- case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN:
- /* Don't adjust shadow stack if it isn't enabled. */
- if (!cpu_has_shadow_stack ())
- return SLJIT_SUCCESS;
- return adjust_shadow_stack(compiler, src, srcw, SLJIT_UNUSED, 0);
- case SLJIT_PREFETCH_L1:
- case SLJIT_PREFETCH_L2:
- case SLJIT_PREFETCH_L3:
- case SLJIT_PREFETCH_ONCE:
- return emit_prefetch(compiler, op, src, srcw);
- }
- return SLJIT_SUCCESS;
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
- {
- CHECK_REG_INDEX(check_sljit_get_register_index(reg));
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- if (reg >= SLJIT_R3 && reg <= SLJIT_R8)
- return -1;
- #endif
- return reg_map[reg];
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
- {
- CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- return reg;
- #else
- return freg_map[reg];
- #endif
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
- void *instruction, sljit_s32 size)
- {
- sljit_u8 *inst;
- CHECK_ERROR();
- CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
- inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
- FAIL_IF(!inst);
- INC_SIZE(size);
- SLJIT_MEMCPY(inst, instruction, size);
- return SLJIT_SUCCESS;
- }
- /* --------------------------------------------------------------------- */
- /* Floating point operators */
- /* --------------------------------------------------------------------- */
- /* Alignment(3) + 4 * 16 bytes. */
- static sljit_s32 sse2_data[3 + (4 * 4)];
- static sljit_s32 *sse2_buffer;
- static void init_compiler(void)
- {
- /* Align to 16 bytes. */
- sse2_buffer = (sljit_s32*)(((sljit_uw)sse2_data + 15) & ~0xf);
- /* Single precision constants (each constant is 16 byte long). */
- sse2_buffer[0] = 0x80000000;
- sse2_buffer[4] = 0x7fffffff;
- /* Double precision constants (each constant is 16 byte long). */
- sse2_buffer[8] = 0;
- sse2_buffer[9] = 0x80000000;
- sse2_buffer[12] = 0xffffffff;
- sse2_buffer[13] = 0x7fffffff;
- }
- static sljit_s32 emit_sse2(struct sljit_compiler *compiler, sljit_u8 opcode,
- sljit_s32 single, sljit_s32 xmm1, sljit_s32 xmm2, sljit_sw xmm2w)
- {
- sljit_u8 *inst;
- inst = emit_x86_instruction(compiler, 2 | (single ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, xmm1, 0, xmm2, xmm2w);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = opcode;
- return SLJIT_SUCCESS;
- }
- static sljit_s32 emit_sse2_logic(struct sljit_compiler *compiler, sljit_u8 opcode,
- sljit_s32 pref66, sljit_s32 xmm1, sljit_s32 xmm2, sljit_sw xmm2w)
- {
- sljit_u8 *inst;
- inst = emit_x86_instruction(compiler, 2 | (pref66 ? EX86_PREF_66 : 0) | EX86_SSE2, xmm1, 0, xmm2, xmm2w);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = opcode;
- return SLJIT_SUCCESS;
- }
- static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
- sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
- {
- return emit_sse2(compiler, MOVSD_x_xm, single, dst, src, srcw);
- }
- static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
- sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src)
- {
- return emit_sse2(compiler, MOVSD_xm_x, single, src, dst, dstw);
- }
- static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
- {
- sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
- sljit_u8 *inst;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (GET_OPCODE(op) == SLJIT_CONV_SW_FROM_F64)
- compiler->mode32 = 0;
- #endif
- inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_F32_OP) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP2, dst_r, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = CVTTSD2SI_r_xm;
- if (dst & SLJIT_MEM)
- return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
- return SLJIT_SUCCESS;
- }
- static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
- {
- sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
- sljit_u8 *inst;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW)
- compiler->mode32 = 0;
- #endif
- if (src & SLJIT_IMM) {
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
- srcw = (sljit_s32)srcw;
- #endif
- EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
- src = TMP_REG1;
- srcw = 0;
- }
- inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_F32_OP) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP1, dst_r, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = CVTSI2SD_x_rm;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = 1;
- #endif
- if (dst_r == TMP_FREG)
- return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
- return SLJIT_SUCCESS;
- }
- static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
- {
- if (!FAST_IS_REG(src1)) {
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, TMP_FREG, src1, src1w));
- src1 = TMP_FREG;
- }
- return emit_sse2_logic(compiler, UCOMISD_x_xm, !(op & SLJIT_F32_OP), src1, src2, src2w);
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src, sljit_sw srcw)
- {
- sljit_s32 dst_r;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = 1;
- #endif
- CHECK_ERROR();
- SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
- if (GET_OPCODE(op) == SLJIT_MOV_F64) {
- if (FAST_IS_REG(dst))
- return emit_sse2_load(compiler, op & SLJIT_F32_OP, dst, src, srcw);
- if (FAST_IS_REG(src))
- return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, src);
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, TMP_FREG, src, srcw));
- return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
- }
- if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32) {
- dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
- if (FAST_IS_REG(src)) {
- /* We overwrite the high bits of source. From SLJIT point of view,
- this is not an issue.
- Note: In SSE3, we could also use MOVDDUP and MOVSLDUP. */
- FAIL_IF(emit_sse2_logic(compiler, UNPCKLPD_x_xm, op & SLJIT_F32_OP, src, src, 0));
- }
- else {
- FAIL_IF(emit_sse2_load(compiler, !(op & SLJIT_F32_OP), TMP_FREG, src, srcw));
- src = TMP_FREG;
- }
- FAIL_IF(emit_sse2_logic(compiler, CVTPD2PS_x_xm, op & SLJIT_F32_OP, dst_r, src, 0));
- if (dst_r == TMP_FREG)
- return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
- return SLJIT_SUCCESS;
- }
- if (FAST_IS_REG(dst)) {
- dst_r = dst;
- if (dst != src)
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, dst_r, src, srcw));
- }
- else {
- dst_r = TMP_FREG;
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, dst_r, src, srcw));
- }
- switch (GET_OPCODE(op)) {
- case SLJIT_NEG_F64:
- FAIL_IF(emit_sse2_logic(compiler, XORPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_F32_OP ? sse2_buffer : sse2_buffer + 8)));
- break;
- case SLJIT_ABS_F64:
- FAIL_IF(emit_sse2_logic(compiler, ANDPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_F32_OP ? sse2_buffer + 4 : sse2_buffer + 12)));
- break;
- }
- if (dst_r == TMP_FREG)
- return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
- return SLJIT_SUCCESS;
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 src1, sljit_sw src1w,
- sljit_s32 src2, sljit_sw src2w)
- {
- sljit_s32 dst_r;
- CHECK_ERROR();
- CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
- ADJUST_LOCAL_OFFSET(dst, dstw);
- ADJUST_LOCAL_OFFSET(src1, src1w);
- ADJUST_LOCAL_OFFSET(src2, src2w);
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = 1;
- #endif
- if (FAST_IS_REG(dst)) {
- dst_r = dst;
- if (dst == src1)
- ; /* Do nothing here. */
- else if (dst == src2 && (op == SLJIT_ADD_F64 || op == SLJIT_MUL_F64)) {
- /* Swap arguments. */
- src2 = src1;
- src2w = src1w;
- }
- else if (dst != src2)
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, dst_r, src1, src1w));
- else {
- dst_r = TMP_FREG;
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, TMP_FREG, src1, src1w));
- }
- }
- else {
- dst_r = TMP_FREG;
- FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, TMP_FREG, src1, src1w));
- }
- switch (GET_OPCODE(op)) {
- case SLJIT_ADD_F64:
- FAIL_IF(emit_sse2(compiler, ADDSD_x_xm, op & SLJIT_F32_OP, dst_r, src2, src2w));
- break;
- case SLJIT_SUB_F64:
- FAIL_IF(emit_sse2(compiler, SUBSD_x_xm, op & SLJIT_F32_OP, dst_r, src2, src2w));
- break;
- case SLJIT_MUL_F64:
- FAIL_IF(emit_sse2(compiler, MULSD_x_xm, op & SLJIT_F32_OP, dst_r, src2, src2w));
- break;
- case SLJIT_DIV_F64:
- FAIL_IF(emit_sse2(compiler, DIVSD_x_xm, op & SLJIT_F32_OP, dst_r, src2, src2w));
- break;
- }
- if (dst_r == TMP_FREG)
- return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
- return SLJIT_SUCCESS;
- }
- /* --------------------------------------------------------------------- */
- /* Conditional instructions */
- /* --------------------------------------------------------------------- */
- SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
- {
- sljit_u8 *inst;
- struct sljit_label *label;
- CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_label(compiler));
- if (compiler->last_label && compiler->last_label->size == compiler->size)
- return compiler->last_label;
- label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
- PTR_FAIL_IF(!label);
- set_label(label, compiler);
- inst = (sljit_u8*)ensure_buf(compiler, 2);
- PTR_FAIL_IF(!inst);
- *inst++ = 0;
- *inst++ = 0;
- return label;
- }
- SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
- {
- sljit_u8 *inst;
- struct sljit_jump *jump;
- CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_jump(compiler, type));
- jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
- PTR_FAIL_IF_NULL(jump);
- set_jump(jump, compiler, (type & SLJIT_REWRITABLE_JUMP) | ((type & 0xff) << TYPE_SHIFT));
- type &= 0xff;
- /* Worst case size. */
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- compiler->size += (type >= SLJIT_JUMP) ? 5 : 6;
- #else
- compiler->size += (type >= SLJIT_JUMP) ? (10 + 3) : (2 + 10 + 3);
- #endif
- inst = (sljit_u8*)ensure_buf(compiler, 2);
- PTR_FAIL_IF_NULL(inst);
- *inst++ = 0;
- *inst++ = 1;
- return jump;
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
- {
- sljit_u8 *inst;
- struct sljit_jump *jump;
- CHECK_ERROR();
- CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
- ADJUST_LOCAL_OFFSET(src, srcw);
- CHECK_EXTRA_REGS(src, srcw, (void)0);
- if (src == SLJIT_IMM) {
- jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
- FAIL_IF_NULL(jump);
- set_jump(jump, compiler, JUMP_ADDR | (type << TYPE_SHIFT));
- jump->u.target = srcw;
- /* Worst case size. */
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- compiler->size += 5;
- #else
- compiler->size += 10 + 3;
- #endif
- inst = (sljit_u8*)ensure_buf(compiler, 2);
- FAIL_IF_NULL(inst);
- *inst++ = 0;
- *inst++ = 1;
- }
- else {
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- /* REX_W is not necessary (src is not immediate). */
- compiler->mode32 = 1;
- #endif
- inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_FF;
- *inst |= (type >= SLJIT_FAST_CALL) ? CALL_rm : JMP_rm;
- }
- return SLJIT_SUCCESS;
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
- sljit_s32 dst, sljit_sw dstw,
- sljit_s32 type)
- {
- sljit_u8 *inst;
- sljit_u8 cond_set = 0;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- sljit_s32 reg;
- #endif
- /* ADJUST_LOCAL_OFFSET and CHECK_EXTRA_REGS might overwrite these values. */
- sljit_s32 dst_save = dst;
- sljit_sw dstw_save = dstw;
- CHECK_ERROR();
- CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
- ADJUST_LOCAL_OFFSET(dst, dstw);
- CHECK_EXTRA_REGS(dst, dstw, (void)0);
- type &= 0xff;
- /* setcc = jcc + 0x10. */
- cond_set = get_jump_code(type) + 0x10;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst)) {
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 + 3);
- FAIL_IF(!inst);
- INC_SIZE(4 + 3);
- /* Set low register to conditional flag. */
- *inst++ = (reg_map[TMP_REG1] <= 7) ? REX : REX_B;
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = MOD_REG | reg_lmap[TMP_REG1];
- *inst++ = REX | (reg_map[TMP_REG1] <= 7 ? 0 : REX_R) | (reg_map[dst] <= 7 ? 0 : REX_B);
- *inst++ = OR_rm8_r8;
- *inst++ = MOD_REG | (reg_lmap[TMP_REG1] << 3) | reg_lmap[dst];
- return SLJIT_SUCCESS;
- }
- reg = (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG1;
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 + 4);
- FAIL_IF(!inst);
- INC_SIZE(4 + 4);
- /* Set low register to conditional flag. */
- *inst++ = (reg_map[reg] <= 7) ? REX : REX_B;
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = MOD_REG | reg_lmap[reg];
- *inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : (REX_B | REX_R));
- /* The movzx instruction does not affect flags. */
- *inst++ = GROUP_0F;
- *inst++ = MOVZX_r_rm8;
- *inst = MOD_REG | (reg_lmap[reg] << 3) | reg_lmap[reg];
- if (reg != TMP_REG1)
- return SLJIT_SUCCESS;
- if (GET_OPCODE(op) < SLJIT_ADD) {
- compiler->mode32 = GET_OPCODE(op) != SLJIT_MOV;
- return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
- }
- #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
- || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- compiler->skip_checks = 1;
- #endif
- return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
- #else
- /* The SLJIT_CONFIG_X86_32 code path starts here. */
- if (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst)) {
- if (reg_map[dst] <= 4) {
- /* Low byte is accessible. */
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 3);
- FAIL_IF(!inst);
- INC_SIZE(3 + 3);
- /* Set low byte to conditional flag. */
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = MOD_REG | reg_map[dst];
- *inst++ = GROUP_0F;
- *inst++ = MOVZX_r_rm8;
- *inst = MOD_REG | (reg_map[dst] << 3) | reg_map[dst];
- return SLJIT_SUCCESS;
- }
- /* Low byte is not accessible. */
- if (cpu_has_cmov == -1)
- get_cpu_features();
- if (cpu_has_cmov) {
- EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 1);
- /* a xor reg, reg operation would overwrite the flags. */
- EMIT_MOV(compiler, dst, 0, SLJIT_IMM, 0);
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
- FAIL_IF(!inst);
- INC_SIZE(3);
- *inst++ = GROUP_0F;
- /* cmovcc = setcc - 0x50. */
- *inst++ = cond_set - 0x50;
- *inst++ = MOD_REG | (reg_map[dst] << 3) | reg_map[TMP_REG1];
- return SLJIT_SUCCESS;
- }
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1 + 3 + 3 + 1);
- *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
- /* Set al to conditional flag. */
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = MOD_REG | 0 /* eax */;
- *inst++ = GROUP_0F;
- *inst++ = MOVZX_r_rm8;
- *inst++ = MOD_REG | (reg_map[dst] << 3) | 0 /* eax */;
- *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
- return SLJIT_SUCCESS;
- }
- if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) && reg_map[dst] <= 4) {
- SLJIT_ASSERT(reg_map[SLJIT_R0] == 0);
- if (dst != SLJIT_R0) {
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 2 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1 + 3 + 2 + 1);
- /* Set low register to conditional flag. */
- *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = MOD_REG | 0 /* eax */;
- *inst++ = OR_rm8_r8;
- *inst++ = MOD_REG | (0 /* eax */ << 3) | reg_map[dst];
- *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
- }
- else {
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + 3 + 2 + 2);
- FAIL_IF(!inst);
- INC_SIZE(2 + 3 + 2 + 2);
- /* Set low register to conditional flag. */
- *inst++ = XCHG_r_rm;
- *inst++ = MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REG1];
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = MOD_REG | 1 /* ecx */;
- *inst++ = OR_rm8_r8;
- *inst++ = MOD_REG | (1 /* ecx */ << 3) | 0 /* eax */;
- *inst++ = XCHG_r_rm;
- *inst++ = MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REG1];
- }
- return SLJIT_SUCCESS;
- }
- /* Set TMP_REG1 to the bit. */
- inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
- FAIL_IF(!inst);
- INC_SIZE(1 + 3 + 3 + 1);
- *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
- /* Set al to conditional flag. */
- *inst++ = GROUP_0F;
- *inst++ = cond_set;
- *inst++ = MOD_REG | 0 /* eax */;
- *inst++ = GROUP_0F;
- *inst++ = MOVZX_r_rm8;
- *inst++ = MOD_REG | (0 << 3) /* eax */ | 0 /* eax */;
- *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
- if (GET_OPCODE(op) < SLJIT_ADD)
- return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
- #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
- || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
- compiler->skip_checks = 1;
- #endif
- return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
- #endif /* SLJIT_CONFIG_X86_64 */
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
- sljit_s32 dst_reg,
- sljit_s32 src, sljit_sw srcw)
- {
- sljit_u8* inst;
- CHECK_ERROR();
- CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- dst_reg &= ~SLJIT_I32_OP;
- if (!sljit_has_cpu_feature(SLJIT_HAS_CMOV) || (dst_reg >= SLJIT_R3 && dst_reg <= SLJIT_S3))
- return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);
- #else
- if (!sljit_has_cpu_feature(SLJIT_HAS_CMOV))
- return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);
- #endif
- /* ADJUST_LOCAL_OFFSET is not needed. */
- CHECK_EXTRA_REGS(src, srcw, (void)0);
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = dst_reg & SLJIT_I32_OP;
- dst_reg &= ~SLJIT_I32_OP;
- #endif
- if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
- EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcw);
- src = TMP_REG1;
- srcw = 0;
- }
- inst = emit_x86_instruction(compiler, 2, dst_reg, 0, src, srcw);
- FAIL_IF(!inst);
- *inst++ = GROUP_0F;
- *inst = get_jump_code(type & 0xff) - 0x40;
- return SLJIT_SUCCESS;
- }
- SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
- {
- CHECK_ERROR();
- CHECK(check_sljit_get_local_base(compiler, dst, dstw, offset));
- ADJUST_LOCAL_OFFSET(dst, dstw);
- CHECK_EXTRA_REGS(dst, dstw, (void)0);
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = 0;
- #endif
- ADJUST_LOCAL_OFFSET(SLJIT_MEM1(SLJIT_SP), offset);
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (NOT_HALFWORD(offset)) {
- FAIL_IF(emit_load_imm64(compiler, TMP_REG1, offset));
- #if (defined SLJIT_DEBUG && SLJIT_DEBUG)
- SLJIT_ASSERT(emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0) != SLJIT_ERR_UNSUPPORTED);
- return compiler->error;
- #else
- return emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0);
- #endif
- }
- #endif
- if (offset != 0)
- return emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, SLJIT_IMM, offset);
- return emit_mov(compiler, dst, dstw, SLJIT_SP, 0);
- }
- SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
- {
- sljit_u8 *inst;
- struct sljit_const *const_;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- sljit_s32 reg;
- #endif
- CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
- ADJUST_LOCAL_OFFSET(dst, dstw);
- CHECK_EXTRA_REGS(dst, dstw, (void)0);
- const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
- PTR_FAIL_IF(!const_);
- set_const(const_, compiler);
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = 0;
- reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
- if (emit_load_imm64(compiler, reg, init_value))
- return NULL;
- #else
- if (emit_mov(compiler, dst, dstw, SLJIT_IMM, init_value))
- return NULL;
- #endif
- inst = (sljit_u8*)ensure_buf(compiler, 2);
- PTR_FAIL_IF(!inst);
- *inst++ = 0;
- *inst++ = 2;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (dst & SLJIT_MEM)
- if (emit_mov(compiler, dst, dstw, TMP_REG1, 0))
- return NULL;
- #endif
- return const_;
- }
- SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
- {
- struct sljit_put_label *put_label;
- sljit_u8 *inst;
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- sljit_s32 reg;
- sljit_uw start_size;
- #endif
- CHECK_ERROR_PTR();
- CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
- ADJUST_LOCAL_OFFSET(dst, dstw);
- CHECK_EXTRA_REGS(dst, dstw, (void)0);
- put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
- PTR_FAIL_IF(!put_label);
- set_put_label(put_label, compiler, 0);
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- compiler->mode32 = 0;
- reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
- if (emit_load_imm64(compiler, reg, 0))
- return NULL;
- #else
- if (emit_mov(compiler, dst, dstw, SLJIT_IMM, 0))
- return NULL;
- #endif
- #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
- if (dst & SLJIT_MEM) {
- start_size = compiler->size;
- if (emit_mov(compiler, dst, dstw, TMP_REG1, 0))
- return NULL;
- put_label->flags = compiler->size - start_size;
- }
- #endif
- inst = (sljit_u8*)ensure_buf(compiler, 2);
- PTR_FAIL_IF(!inst);
- *inst++ = 0;
- *inst++ = 3;
- return put_label;
- }
- SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
- {
- SLJIT_UNUSED_ARG(executable_offset);
- SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_uw)), 0);
- #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
- sljit_unaligned_store_sw((void*)addr, new_target - (addr + 4) - (sljit_uw)executable_offset);
- #else
- sljit_unaligned_store_sw((void*)addr, (sljit_sw) new_target);
- #endif
- SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_uw)), 1);
- }
- SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
- {
- SLJIT_UNUSED_ARG(executable_offset);
- SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_sw)), 0);
- sljit_unaligned_store_sw((void*)addr, new_constant);
- SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_sw)), 1);
- }
|