InstEmitSimdArithmetic.cs 139 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098
  1. // https://github.com/intel/ARM_NEON_2_x86_SSE/blob/master/NEON_2_SSE.h
  2. // https://www.agner.org/optimize/#vectorclass @ vectori128.h
  3. using ARMeilleure.Decoders;
  4. using ARMeilleure.IntermediateRepresentation;
  5. using ARMeilleure.State;
  6. using ARMeilleure.Translation;
  7. using System;
  8. using System.Diagnostics;
  9. using static ARMeilleure.Instructions.InstEmitHelper;
  10. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  11. using static ARMeilleure.Instructions.InstEmitSimdHelper32;
  12. using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
  13. namespace ARMeilleure.Instructions
  14. {
  15. using Func2I = Func<Operand, Operand, Operand>;
  16. static partial class InstEmit
  17. {
  18. public static void Abs_S(ArmEmitterContext context)
  19. {
  20. EmitScalarUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  21. }
  22. public static void Abs_V(ArmEmitterContext context)
  23. {
  24. EmitVectorUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  25. }
  26. public static void Add_S(ArmEmitterContext context)
  27. {
  28. EmitScalarBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  29. }
  30. public static void Add_V(ArmEmitterContext context)
  31. {
  32. if (Optimizations.UseSse2)
  33. {
  34. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  35. Operand n = GetVec(op.Rn);
  36. Operand m = GetVec(op.Rm);
  37. Intrinsic addInst = X86PaddInstruction[op.Size];
  38. Operand res = context.AddIntrinsic(addInst, n, m);
  39. if (op.RegisterSize == RegisterSize.Simd64)
  40. {
  41. res = context.VectorZeroUpper64(res);
  42. }
  43. context.Copy(GetVec(op.Rd), res);
  44. }
  45. else
  46. {
  47. EmitVectorBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  48. }
  49. }
  50. public static void Addhn_V(ArmEmitterContext context)
  51. {
  52. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: false);
  53. }
  54. public static void Addp_S(ArmEmitterContext context)
  55. {
  56. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  57. Operand ne0 = EmitVectorExtractZx(context, op.Rn, 0, op.Size);
  58. Operand ne1 = EmitVectorExtractZx(context, op.Rn, 1, op.Size);
  59. Operand res = context.Add(ne0, ne1);
  60. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), res, 0, op.Size));
  61. }
  62. public static void Addp_V(ArmEmitterContext context)
  63. {
  64. if (Optimizations.UseSsse3)
  65. {
  66. EmitSsse3VectorPairwiseOp(context, X86PaddInstruction);
  67. }
  68. else
  69. {
  70. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Add(op1, op2));
  71. }
  72. }
  73. public static void Addv_V(ArmEmitterContext context)
  74. {
  75. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  76. }
  77. public static void Cls_V(ArmEmitterContext context)
  78. {
  79. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  80. Operand res = context.VectorZero();
  81. int elems = op.GetBytesCount() >> op.Size;
  82. int eSize = 8 << op.Size;
  83. for (int index = 0; index < elems; index++)
  84. {
  85. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  86. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingSigns)), ne, Const(eSize));
  87. res = EmitVectorInsert(context, res, de, index, op.Size);
  88. }
  89. context.Copy(GetVec(op.Rd), res);
  90. }
  91. public static void Clz_V(ArmEmitterContext context)
  92. {
  93. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  94. int eSize = 8 << op.Size;
  95. Operand res = eSize switch {
  96. 8 => Clz_V_I8 (context, GetVec(op.Rn)),
  97. 16 => Clz_V_I16(context, GetVec(op.Rn)),
  98. 32 => Clz_V_I32(context, GetVec(op.Rn)),
  99. _ => default
  100. };
  101. if (res != default)
  102. {
  103. if (op.RegisterSize == RegisterSize.Simd64)
  104. {
  105. res = context.VectorZeroUpper64(res);
  106. }
  107. }
  108. else
  109. {
  110. int elems = op.GetBytesCount() >> op.Size;
  111. res = context.VectorZero();
  112. for (int index = 0; index < elems; index++)
  113. {
  114. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  115. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingZeros)), ne, Const(eSize));
  116. res = EmitVectorInsert(context, res, de, index, op.Size);
  117. }
  118. }
  119. context.Copy(GetVec(op.Rd), res);
  120. }
  121. private static Operand Clz_V_I8(ArmEmitterContext context, Operand arg)
  122. {
  123. if (!Optimizations.UseSsse3)
  124. {
  125. return default;
  126. }
  127. // CLZ nibble table.
  128. Operand clzTable = X86GetScalar(context, 0x01_01_01_01_02_02_03_04);
  129. Operand maskLow = X86GetAllElements(context, 0x0f_0f_0f_0f);
  130. Operand c04 = X86GetAllElements(context, 0x04_04_04_04);
  131. // CLZ of low 4 bits of elements in arg.
  132. Operand loClz = context.AddIntrinsic(Intrinsic.X86Pshufb, clzTable, arg);
  133. // Get the high 4 bits of elements in arg.
  134. Operand hiArg = context.AddIntrinsic(Intrinsic.X86Psrlw, arg, Const(4));
  135. hiArg = context.AddIntrinsic(Intrinsic.X86Pand, hiArg, maskLow);
  136. // CLZ of high 4 bits of elements in arg.
  137. Operand hiClz = context.AddIntrinsic(Intrinsic.X86Pshufb, clzTable, hiArg);
  138. // If high 4 bits are not all zero, we discard the CLZ of the low 4 bits.
  139. Operand mask = context.AddIntrinsic(Intrinsic.X86Pcmpeqb, hiClz, c04);
  140. loClz = context.AddIntrinsic(Intrinsic.X86Pand, loClz, mask);
  141. return context.AddIntrinsic(Intrinsic.X86Paddb, loClz, hiClz);
  142. }
  143. private static Operand Clz_V_I16(ArmEmitterContext context, Operand arg)
  144. {
  145. if (!Optimizations.UseSsse3)
  146. {
  147. return default;
  148. }
  149. Operand maskSwap = X86GetElements(context, 0x80_0f_80_0d_80_0b_80_09, 0x80_07_80_05_80_03_80_01);
  150. Operand maskLow = X86GetAllElements(context, 0x00ff_00ff);
  151. Operand c0008 = X86GetAllElements(context, 0x0008_0008);
  152. // CLZ pair of high 8 and low 8 bits of elements in arg.
  153. Operand hiloClz = Clz_V_I8(context, arg);
  154. // Get CLZ of low 8 bits in each pair.
  155. Operand loClz = context.AddIntrinsic(Intrinsic.X86Pand, hiloClz, maskLow);
  156. // Get CLZ of high 8 bits in each pair.
  157. Operand hiClz = context.AddIntrinsic(Intrinsic.X86Pshufb, hiloClz, maskSwap);
  158. // If high 8 bits are not all zero, we discard the CLZ of the low 8 bits.
  159. Operand mask = context.AddIntrinsic(Intrinsic.X86Pcmpeqw, hiClz, c0008);
  160. loClz = context.AddIntrinsic(Intrinsic.X86Pand, loClz, mask);
  161. return context.AddIntrinsic(Intrinsic.X86Paddw, loClz, hiClz);
  162. }
  163. private static Operand Clz_V_I32(ArmEmitterContext context, Operand arg)
  164. {
  165. // TODO: Use vplzcntd when AVX-512 is supported.
  166. if (!Optimizations.UseSse2)
  167. {
  168. return default;
  169. }
  170. Operand AddVectorI32(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Paddd, op0, op1);
  171. Operand SubVectorI32(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Psubd, op0, op1);
  172. Operand ShiftRightVectorUI32(Operand op0, int imm8) => context.AddIntrinsic(Intrinsic.X86Psrld, op0, Const(imm8));
  173. Operand OrVector(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Por, op0, op1);
  174. Operand AndVector(Operand op0, Operand op1) => context.AddIntrinsic(Intrinsic.X86Pand, op0, op1);
  175. Operand NotVector(Operand op0) => context.AddIntrinsic(Intrinsic.X86Pandn, op0, context.VectorOne());
  176. Operand c55555555 = X86GetAllElements(context, 0x55555555);
  177. Operand c33333333 = X86GetAllElements(context, 0x33333333);
  178. Operand c0f0f0f0f = X86GetAllElements(context, 0x0f0f0f0f);
  179. Operand c0000003f = X86GetAllElements(context, 0x0000003f);
  180. Operand tmp0;
  181. Operand tmp1;
  182. Operand res;
  183. // Set all bits after highest set bit to 1.
  184. res = OrVector(ShiftRightVectorUI32(arg, 1), arg);
  185. res = OrVector(ShiftRightVectorUI32(res, 2), res);
  186. res = OrVector(ShiftRightVectorUI32(res, 4), res);
  187. res = OrVector(ShiftRightVectorUI32(res, 8), res);
  188. res = OrVector(ShiftRightVectorUI32(res, 16), res);
  189. // Make leading 0s into leading 1s.
  190. res = NotVector(res);
  191. // Count leading 1s, which is the population count.
  192. tmp0 = ShiftRightVectorUI32(res, 1);
  193. tmp0 = AndVector(tmp0, c55555555);
  194. res = SubVectorI32(res, tmp0);
  195. tmp0 = ShiftRightVectorUI32(res, 2);
  196. tmp0 = AndVector(tmp0, c33333333);
  197. tmp1 = AndVector(res, c33333333);
  198. res = AddVectorI32(tmp0, tmp1);
  199. tmp0 = ShiftRightVectorUI32(res, 4);
  200. tmp0 = AddVectorI32(tmp0, res);
  201. res = AndVector(tmp0, c0f0f0f0f);
  202. tmp0 = ShiftRightVectorUI32(res, 8);
  203. res = AddVectorI32(tmp0, res);
  204. tmp0 = ShiftRightVectorUI32(res, 16);
  205. res = AddVectorI32(tmp0, res);
  206. res = AndVector(res, c0000003f);
  207. return res;
  208. }
  209. public static void Cnt_V(ArmEmitterContext context)
  210. {
  211. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  212. Operand res = context.VectorZero();
  213. int elems = op.RegisterSize == RegisterSize.Simd128 ? 16 : 8;
  214. for (int index = 0; index < elems; index++)
  215. {
  216. Operand ne = EmitVectorExtractZx(context, op.Rn, index, 0);
  217. Operand de;
  218. if (Optimizations.UsePopCnt)
  219. {
  220. de = context.AddIntrinsicLong(Intrinsic.X86Popcnt, ne);
  221. }
  222. else
  223. {
  224. de = EmitCountSetBits8(context, ne);
  225. }
  226. res = EmitVectorInsert(context, res, de, index, 0);
  227. }
  228. context.Copy(GetVec(op.Rd), res);
  229. }
  230. public static void Fabd_S(ArmEmitterContext context)
  231. {
  232. if (Optimizations.FastFP && Optimizations.UseSse2)
  233. {
  234. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  235. int sizeF = op.Size & 1;
  236. if (sizeF == 0)
  237. {
  238. Operand res = context.AddIntrinsic(Intrinsic.X86Subss, GetVec(op.Rn), GetVec(op.Rm));
  239. res = EmitFloatAbs(context, res, true, false);
  240. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  241. }
  242. else /* if (sizeF == 1) */
  243. {
  244. Operand res = context.AddIntrinsic(Intrinsic.X86Subsd, GetVec(op.Rn), GetVec(op.Rm));
  245. res = EmitFloatAbs(context, res, false, false);
  246. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  247. }
  248. }
  249. else
  250. {
  251. EmitScalarBinaryOpF(context, (op1, op2) =>
  252. {
  253. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  254. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  255. });
  256. }
  257. }
  258. public static void Fabd_V(ArmEmitterContext context)
  259. {
  260. if (Optimizations.FastFP && Optimizations.UseSse2)
  261. {
  262. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  263. int sizeF = op.Size & 1;
  264. if (sizeF == 0)
  265. {
  266. Operand res = context.AddIntrinsic(Intrinsic.X86Subps, GetVec(op.Rn), GetVec(op.Rm));
  267. res = EmitFloatAbs(context, res, true, true);
  268. if (op.RegisterSize == RegisterSize.Simd64)
  269. {
  270. res = context.VectorZeroUpper64(res);
  271. }
  272. context.Copy(GetVec(op.Rd), res);
  273. }
  274. else /* if (sizeF == 1) */
  275. {
  276. Operand res = context.AddIntrinsic(Intrinsic.X86Subpd, GetVec(op.Rn), GetVec(op.Rm));
  277. res = EmitFloatAbs(context, res, false, true);
  278. context.Copy(GetVec(op.Rd), res);
  279. }
  280. }
  281. else
  282. {
  283. EmitVectorBinaryOpF(context, (op1, op2) =>
  284. {
  285. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  286. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  287. });
  288. }
  289. }
  290. public static void Fabs_S(ArmEmitterContext context)
  291. {
  292. if (Optimizations.UseSse2)
  293. {
  294. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  295. if (op.Size == 0)
  296. {
  297. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, false);
  298. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  299. }
  300. else /* if (op.Size == 1) */
  301. {
  302. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, false);
  303. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  304. }
  305. }
  306. else
  307. {
  308. EmitScalarUnaryOpF(context, (op1) =>
  309. {
  310. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  311. });
  312. }
  313. }
  314. public static void Fabs_V(ArmEmitterContext context)
  315. {
  316. if (Optimizations.UseSse2)
  317. {
  318. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  319. int sizeF = op.Size & 1;
  320. if (sizeF == 0)
  321. {
  322. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, true);
  323. if (op.RegisterSize == RegisterSize.Simd64)
  324. {
  325. res = context.VectorZeroUpper64(res);
  326. }
  327. context.Copy(GetVec(op.Rd), res);
  328. }
  329. else /* if (sizeF == 1) */
  330. {
  331. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, true);
  332. context.Copy(GetVec(op.Rd), res);
  333. }
  334. }
  335. else
  336. {
  337. EmitVectorUnaryOpF(context, (op1) =>
  338. {
  339. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  340. });
  341. }
  342. }
  343. public static void Fadd_S(ArmEmitterContext context)
  344. {
  345. if (Optimizations.FastFP && Optimizations.UseSse2)
  346. {
  347. EmitScalarBinaryOpF(context, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  348. }
  349. else if (Optimizations.FastFP)
  350. {
  351. EmitScalarBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  352. }
  353. else
  354. {
  355. EmitScalarBinaryOpF(context, (op1, op2) =>
  356. {
  357. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  358. });
  359. }
  360. }
  361. public static void Fadd_V(ArmEmitterContext context)
  362. {
  363. if (Optimizations.FastFP && Optimizations.UseSse2)
  364. {
  365. EmitVectorBinaryOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  366. }
  367. else if (Optimizations.FastFP)
  368. {
  369. EmitVectorBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  370. }
  371. else
  372. {
  373. EmitVectorBinaryOpF(context, (op1, op2) =>
  374. {
  375. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  376. });
  377. }
  378. }
  379. public static void Faddp_S(ArmEmitterContext context)
  380. {
  381. if (Optimizations.FastFP && Optimizations.UseSse3)
  382. {
  383. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  384. if ((op.Size & 1) == 0)
  385. {
  386. Operand res = context.AddIntrinsic(Intrinsic.X86Haddps, GetVec(op.Rn), GetVec(op.Rn));
  387. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  388. }
  389. else /* if ((op.Size & 1) == 1) */
  390. {
  391. Operand res = context.AddIntrinsic(Intrinsic.X86Haddpd, GetVec(op.Rn), GetVec(op.Rn));
  392. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  393. }
  394. }
  395. else
  396. {
  397. EmitScalarPairwiseOpF(context, (op1, op2) =>
  398. {
  399. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  400. });
  401. }
  402. }
  403. public static void Faddp_V(ArmEmitterContext context)
  404. {
  405. if (Optimizations.FastFP && Optimizations.UseSse41)
  406. {
  407. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  408. {
  409. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  410. {
  411. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  412. {
  413. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  414. Intrinsic addInst = (op.Size & 1) == 0 ? Intrinsic.X86Addps : Intrinsic.X86Addpd;
  415. return context.AddIntrinsic(addInst, op1, op2);
  416. }, scalar: false, op1, op2);
  417. }, scalar: false, op1, op2);
  418. });
  419. }
  420. else
  421. {
  422. EmitVectorPairwiseOpF(context, (op1, op2) =>
  423. {
  424. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  425. });
  426. }
  427. }
  428. public static void Fdiv_S(ArmEmitterContext context)
  429. {
  430. if (Optimizations.FastFP && Optimizations.UseSse2)
  431. {
  432. EmitScalarBinaryOpF(context, Intrinsic.X86Divss, Intrinsic.X86Divsd);
  433. }
  434. else if (Optimizations.FastFP)
  435. {
  436. EmitScalarBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  437. }
  438. else
  439. {
  440. EmitScalarBinaryOpF(context, (op1, op2) =>
  441. {
  442. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  443. });
  444. }
  445. }
  446. public static void Fdiv_V(ArmEmitterContext context)
  447. {
  448. if (Optimizations.FastFP && Optimizations.UseSse2)
  449. {
  450. EmitVectorBinaryOpF(context, Intrinsic.X86Divps, Intrinsic.X86Divpd);
  451. }
  452. else if (Optimizations.FastFP)
  453. {
  454. EmitVectorBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  455. }
  456. else
  457. {
  458. EmitVectorBinaryOpF(context, (op1, op2) =>
  459. {
  460. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  461. });
  462. }
  463. }
  464. public static void Fmadd_S(ArmEmitterContext context) // Fused.
  465. {
  466. if (Optimizations.FastFP && Optimizations.UseSse2)
  467. {
  468. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  469. Operand d = GetVec(op.Rd);
  470. Operand a = GetVec(op.Ra);
  471. Operand n = GetVec(op.Rn);
  472. Operand m = GetVec(op.Rm);
  473. if (op.Size == 0)
  474. {
  475. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  476. res = context.AddIntrinsic(Intrinsic.X86Addss, a, res);
  477. context.Copy(d, context.VectorZeroUpper96(res));
  478. }
  479. else /* if (op.Size == 1) */
  480. {
  481. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  482. res = context.AddIntrinsic(Intrinsic.X86Addsd, a, res);
  483. context.Copy(d, context.VectorZeroUpper64(res));
  484. }
  485. }
  486. else
  487. {
  488. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  489. {
  490. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  491. });
  492. }
  493. }
  494. public static void Fmax_S(ArmEmitterContext context)
  495. {
  496. if (Optimizations.FastFP && Optimizations.UseSse41)
  497. {
  498. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  499. {
  500. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  501. {
  502. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  503. }, scalar: true, op1, op2);
  504. }, scalar: true);
  505. }
  506. else
  507. {
  508. EmitScalarBinaryOpF(context, (op1, op2) =>
  509. {
  510. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  511. });
  512. }
  513. }
  514. public static void Fmax_V(ArmEmitterContext context)
  515. {
  516. if (Optimizations.FastFP && Optimizations.UseSse41)
  517. {
  518. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  519. {
  520. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  521. {
  522. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  523. }, scalar: false, op1, op2);
  524. }, scalar: false);
  525. }
  526. else
  527. {
  528. EmitVectorBinaryOpF(context, (op1, op2) =>
  529. {
  530. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  531. });
  532. }
  533. }
  534. public static void Fmaxnm_S(ArmEmitterContext context)
  535. {
  536. if (Optimizations.FastFP && Optimizations.UseSse41)
  537. {
  538. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true);
  539. }
  540. else
  541. {
  542. EmitScalarBinaryOpF(context, (op1, op2) =>
  543. {
  544. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  545. });
  546. }
  547. }
  548. public static void Fmaxnm_V(ArmEmitterContext context)
  549. {
  550. if (Optimizations.FastFP && Optimizations.UseSse41)
  551. {
  552. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false);
  553. }
  554. else
  555. {
  556. EmitVectorBinaryOpF(context, (op1, op2) =>
  557. {
  558. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  559. });
  560. }
  561. }
  562. public static void Fmaxnmp_S(ArmEmitterContext context)
  563. {
  564. if (Optimizations.FastFP && Optimizations.UseSse41)
  565. {
  566. EmitSse2ScalarPairwiseOpF(context, (op1, op2) =>
  567. {
  568. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true, op1, op2);
  569. });
  570. }
  571. else
  572. {
  573. EmitScalarPairwiseOpF(context, (op1, op2) =>
  574. {
  575. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  576. });
  577. }
  578. }
  579. public static void Fmaxnmp_V(ArmEmitterContext context)
  580. {
  581. if (Optimizations.FastFP && Optimizations.UseSse41)
  582. {
  583. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  584. {
  585. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  586. });
  587. }
  588. else
  589. {
  590. EmitVectorPairwiseOpF(context, (op1, op2) =>
  591. {
  592. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  593. });
  594. }
  595. }
  596. public static void Fmaxnmv_V(ArmEmitterContext context)
  597. {
  598. if (Optimizations.FastFP && Optimizations.UseSse41)
  599. {
  600. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  601. {
  602. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  603. });
  604. }
  605. else
  606. {
  607. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  608. {
  609. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  610. });
  611. }
  612. }
  613. public static void Fmaxp_V(ArmEmitterContext context)
  614. {
  615. if (Optimizations.FastFP && Optimizations.UseSse41)
  616. {
  617. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  618. {
  619. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  620. {
  621. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  622. {
  623. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  624. }, scalar: false, op1, op2);
  625. }, scalar: false, op1, op2);
  626. });
  627. }
  628. else
  629. {
  630. EmitVectorPairwiseOpF(context, (op1, op2) =>
  631. {
  632. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  633. });
  634. }
  635. }
  636. public static void Fmaxv_V(ArmEmitterContext context)
  637. {
  638. if (Optimizations.FastFP && Optimizations.UseSse41)
  639. {
  640. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  641. {
  642. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  643. {
  644. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  645. {
  646. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  647. }, scalar: false, op1, op2);
  648. }, scalar: false, op1, op2);
  649. });
  650. }
  651. else
  652. {
  653. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  654. {
  655. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  656. });
  657. }
  658. }
  659. public static void Fmin_S(ArmEmitterContext context)
  660. {
  661. if (Optimizations.FastFP && Optimizations.UseSse41)
  662. {
  663. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  664. {
  665. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  666. {
  667. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  668. }, scalar: true, op1, op2);
  669. }, scalar: true);
  670. }
  671. else
  672. {
  673. EmitScalarBinaryOpF(context, (op1, op2) =>
  674. {
  675. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  676. });
  677. }
  678. }
  679. public static void Fmin_V(ArmEmitterContext context)
  680. {
  681. if (Optimizations.FastFP && Optimizations.UseSse41)
  682. {
  683. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  684. {
  685. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  686. {
  687. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  688. }, scalar: false, op1, op2);
  689. }, scalar: false);
  690. }
  691. else
  692. {
  693. EmitVectorBinaryOpF(context, (op1, op2) =>
  694. {
  695. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  696. });
  697. }
  698. }
  699. public static void Fminnm_S(ArmEmitterContext context)
  700. {
  701. if (Optimizations.FastFP && Optimizations.UseSse41)
  702. {
  703. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true);
  704. }
  705. else
  706. {
  707. EmitScalarBinaryOpF(context, (op1, op2) =>
  708. {
  709. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  710. });
  711. }
  712. }
  713. public static void Fminnm_V(ArmEmitterContext context)
  714. {
  715. if (Optimizations.FastFP && Optimizations.UseSse41)
  716. {
  717. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false);
  718. }
  719. else
  720. {
  721. EmitVectorBinaryOpF(context, (op1, op2) =>
  722. {
  723. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  724. });
  725. }
  726. }
  727. public static void Fminnmp_S(ArmEmitterContext context)
  728. {
  729. if (Optimizations.FastFP && Optimizations.UseSse41)
  730. {
  731. EmitSse2ScalarPairwiseOpF(context, (op1, op2) =>
  732. {
  733. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true, op1, op2);
  734. });
  735. }
  736. else
  737. {
  738. EmitScalarPairwiseOpF(context, (op1, op2) =>
  739. {
  740. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  741. });
  742. }
  743. }
  744. public static void Fminnmp_V(ArmEmitterContext context)
  745. {
  746. if (Optimizations.FastFP && Optimizations.UseSse41)
  747. {
  748. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  749. {
  750. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  751. });
  752. }
  753. else
  754. {
  755. EmitVectorPairwiseOpF(context, (op1, op2) =>
  756. {
  757. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  758. });
  759. }
  760. }
  761. public static void Fminnmv_V(ArmEmitterContext context)
  762. {
  763. if (Optimizations.FastFP && Optimizations.UseSse41)
  764. {
  765. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  766. {
  767. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  768. });
  769. }
  770. else
  771. {
  772. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  773. {
  774. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  775. });
  776. }
  777. }
  778. public static void Fminp_V(ArmEmitterContext context)
  779. {
  780. if (Optimizations.FastFP && Optimizations.UseSse41)
  781. {
  782. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  783. {
  784. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  785. {
  786. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  787. {
  788. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  789. }, scalar: false, op1, op2);
  790. }, scalar: false, op1, op2);
  791. });
  792. }
  793. else
  794. {
  795. EmitVectorPairwiseOpF(context, (op1, op2) =>
  796. {
  797. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  798. });
  799. }
  800. }
  801. public static void Fminv_V(ArmEmitterContext context)
  802. {
  803. if (Optimizations.FastFP && Optimizations.UseSse41)
  804. {
  805. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  806. {
  807. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  808. {
  809. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  810. {
  811. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  812. }, scalar: false, op1, op2);
  813. }, scalar: false, op1, op2);
  814. });
  815. }
  816. else
  817. {
  818. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  819. {
  820. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  821. });
  822. }
  823. }
  824. public static void Fmla_Se(ArmEmitterContext context) // Fused.
  825. {
  826. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  827. {
  828. return context.Add(op1, context.Multiply(op2, op3));
  829. });
  830. }
  831. public static void Fmla_V(ArmEmitterContext context) // Fused.
  832. {
  833. if (Optimizations.FastFP && Optimizations.UseSse2)
  834. {
  835. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  836. Operand d = GetVec(op.Rd);
  837. Operand n = GetVec(op.Rn);
  838. Operand m = GetVec(op.Rm);
  839. int sizeF = op.Size & 1;
  840. if (sizeF == 0)
  841. {
  842. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  843. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  844. if (op.RegisterSize == RegisterSize.Simd64)
  845. {
  846. res = context.VectorZeroUpper64(res);
  847. }
  848. context.Copy(d, res);
  849. }
  850. else /* if (sizeF == 1) */
  851. {
  852. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  853. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  854. context.Copy(d, res);
  855. }
  856. }
  857. else
  858. {
  859. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  860. {
  861. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  862. });
  863. }
  864. }
  865. public static void Fmla_Ve(ArmEmitterContext context) // Fused.
  866. {
  867. if (Optimizations.FastFP && Optimizations.UseSse2)
  868. {
  869. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  870. Operand d = GetVec(op.Rd);
  871. Operand n = GetVec(op.Rn);
  872. Operand m = GetVec(op.Rm);
  873. int sizeF = op.Size & 1;
  874. if (sizeF == 0)
  875. {
  876. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  877. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  878. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  879. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  880. if (op.RegisterSize == RegisterSize.Simd64)
  881. {
  882. res = context.VectorZeroUpper64(res);
  883. }
  884. context.Copy(d, res);
  885. }
  886. else /* if (sizeF == 1) */
  887. {
  888. int shuffleMask = op.Index | op.Index << 1;
  889. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  890. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  891. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  892. context.Copy(d, res);
  893. }
  894. }
  895. else
  896. {
  897. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  898. {
  899. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  900. });
  901. }
  902. }
  903. public static void Fmls_Se(ArmEmitterContext context) // Fused.
  904. {
  905. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  906. {
  907. return context.Subtract(op1, context.Multiply(op2, op3));
  908. });
  909. }
  910. public static void Fmls_V(ArmEmitterContext context) // Fused.
  911. {
  912. if (Optimizations.FastFP && Optimizations.UseSse2)
  913. {
  914. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  915. Operand d = GetVec(op.Rd);
  916. Operand n = GetVec(op.Rn);
  917. Operand m = GetVec(op.Rm);
  918. int sizeF = op.Size & 1;
  919. if (sizeF == 0)
  920. {
  921. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  922. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  923. if (op.RegisterSize == RegisterSize.Simd64)
  924. {
  925. res = context.VectorZeroUpper64(res);
  926. }
  927. context.Copy(d, res);
  928. }
  929. else /* if (sizeF == 1) */
  930. {
  931. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  932. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  933. context.Copy(d, res);
  934. }
  935. }
  936. else
  937. {
  938. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  939. {
  940. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  941. });
  942. }
  943. }
  944. public static void Fmls_Ve(ArmEmitterContext context) // Fused.
  945. {
  946. if (Optimizations.FastFP && Optimizations.UseSse2)
  947. {
  948. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  949. Operand d = GetVec(op.Rd);
  950. Operand n = GetVec(op.Rn);
  951. Operand m = GetVec(op.Rm);
  952. int sizeF = op.Size & 1;
  953. if (sizeF == 0)
  954. {
  955. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  956. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  957. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  958. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  959. if (op.RegisterSize == RegisterSize.Simd64)
  960. {
  961. res = context.VectorZeroUpper64(res);
  962. }
  963. context.Copy(d, res);
  964. }
  965. else /* if (sizeF == 1) */
  966. {
  967. int shuffleMask = op.Index | op.Index << 1;
  968. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  969. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  970. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  971. context.Copy(d, res);
  972. }
  973. }
  974. else
  975. {
  976. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  977. {
  978. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  979. });
  980. }
  981. }
  982. public static void Fmsub_S(ArmEmitterContext context) // Fused.
  983. {
  984. if (Optimizations.FastFP && Optimizations.UseSse2)
  985. {
  986. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  987. Operand d = GetVec(op.Rd);
  988. Operand a = GetVec(op.Ra);
  989. Operand n = GetVec(op.Rn);
  990. Operand m = GetVec(op.Rm);
  991. if (op.Size == 0)
  992. {
  993. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  994. res = context.AddIntrinsic(Intrinsic.X86Subss, a, res);
  995. context.Copy(d, context.VectorZeroUpper96(res));
  996. }
  997. else /* if (op.Size == 1) */
  998. {
  999. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1000. res = context.AddIntrinsic(Intrinsic.X86Subsd, a, res);
  1001. context.Copy(d, context.VectorZeroUpper64(res));
  1002. }
  1003. }
  1004. else
  1005. {
  1006. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1007. {
  1008. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  1009. });
  1010. }
  1011. }
  1012. public static void Fmul_S(ArmEmitterContext context)
  1013. {
  1014. if (Optimizations.FastFP && Optimizations.UseSse2)
  1015. {
  1016. EmitScalarBinaryOpF(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd);
  1017. }
  1018. else if (Optimizations.FastFP)
  1019. {
  1020. EmitScalarBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  1021. }
  1022. else
  1023. {
  1024. EmitScalarBinaryOpF(context, (op1, op2) =>
  1025. {
  1026. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  1027. });
  1028. }
  1029. }
  1030. public static void Fmul_Se(ArmEmitterContext context)
  1031. {
  1032. EmitScalarBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  1033. }
  1034. public static void Fmul_V(ArmEmitterContext context)
  1035. {
  1036. if (Optimizations.FastFP && Optimizations.UseSse2)
  1037. {
  1038. EmitVectorBinaryOpF(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd);
  1039. }
  1040. else if (Optimizations.FastFP)
  1041. {
  1042. EmitVectorBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  1043. }
  1044. else
  1045. {
  1046. EmitVectorBinaryOpF(context, (op1, op2) =>
  1047. {
  1048. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  1049. });
  1050. }
  1051. }
  1052. public static void Fmul_Ve(ArmEmitterContext context)
  1053. {
  1054. if (Optimizations.FastFP && Optimizations.UseSse2)
  1055. {
  1056. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  1057. Operand n = GetVec(op.Rn);
  1058. Operand m = GetVec(op.Rm);
  1059. int sizeF = op.Size & 1;
  1060. if (sizeF == 0)
  1061. {
  1062. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  1063. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  1064. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  1065. if (op.RegisterSize == RegisterSize.Simd64)
  1066. {
  1067. res = context.VectorZeroUpper64(res);
  1068. }
  1069. context.Copy(GetVec(op.Rd), res);
  1070. }
  1071. else /* if (sizeF == 1) */
  1072. {
  1073. int shuffleMask = op.Index | op.Index << 1;
  1074. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  1075. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  1076. context.Copy(GetVec(op.Rd), res);
  1077. }
  1078. }
  1079. else if (Optimizations.FastFP)
  1080. {
  1081. EmitVectorBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  1082. }
  1083. else
  1084. {
  1085. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  1086. {
  1087. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  1088. });
  1089. }
  1090. }
  1091. public static void Fmulx_S(ArmEmitterContext context)
  1092. {
  1093. EmitScalarBinaryOpF(context, (op1, op2) =>
  1094. {
  1095. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1096. });
  1097. }
  1098. public static void Fmulx_Se(ArmEmitterContext context)
  1099. {
  1100. EmitScalarBinaryOpByElemF(context, (op1, op2) =>
  1101. {
  1102. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1103. });
  1104. }
  1105. public static void Fmulx_V(ArmEmitterContext context)
  1106. {
  1107. EmitVectorBinaryOpF(context, (op1, op2) =>
  1108. {
  1109. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1110. });
  1111. }
  1112. public static void Fmulx_Ve(ArmEmitterContext context)
  1113. {
  1114. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  1115. {
  1116. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1117. });
  1118. }
  1119. public static void Fneg_S(ArmEmitterContext context)
  1120. {
  1121. if (Optimizations.UseSse2)
  1122. {
  1123. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1124. if (op.Size == 0)
  1125. {
  1126. Operand mask = X86GetScalar(context, -0f);
  1127. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  1128. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1129. }
  1130. else /* if (op.Size == 1) */
  1131. {
  1132. Operand mask = X86GetScalar(context, -0d);
  1133. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  1134. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1135. }
  1136. }
  1137. else
  1138. {
  1139. EmitScalarUnaryOpF(context, (op1) => context.Negate(op1));
  1140. }
  1141. }
  1142. public static void Fneg_V(ArmEmitterContext context)
  1143. {
  1144. if (Optimizations.UseSse2)
  1145. {
  1146. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1147. int sizeF = op.Size & 1;
  1148. if (sizeF == 0)
  1149. {
  1150. Operand mask = X86GetAllElements(context, -0f);
  1151. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  1152. if (op.RegisterSize == RegisterSize.Simd64)
  1153. {
  1154. res = context.VectorZeroUpper64(res);
  1155. }
  1156. context.Copy(GetVec(op.Rd), res);
  1157. }
  1158. else /* if (sizeF == 1) */
  1159. {
  1160. Operand mask = X86GetAllElements(context, -0d);
  1161. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  1162. context.Copy(GetVec(op.Rd), res);
  1163. }
  1164. }
  1165. else
  1166. {
  1167. EmitVectorUnaryOpF(context, (op1) => context.Negate(op1));
  1168. }
  1169. }
  1170. public static void Fnmadd_S(ArmEmitterContext context) // Fused.
  1171. {
  1172. if (Optimizations.FastFP && Optimizations.UseSse2)
  1173. {
  1174. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1175. Operand d = GetVec(op.Rd);
  1176. Operand a = GetVec(op.Ra);
  1177. Operand n = GetVec(op.Rn);
  1178. Operand m = GetVec(op.Rm);
  1179. if (op.Size == 0)
  1180. {
  1181. Operand mask = X86GetScalar(context, -0f);
  1182. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1183. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1184. res = context.AddIntrinsic(Intrinsic.X86Subss, aNeg, res);
  1185. context.Copy(d, context.VectorZeroUpper96(res));
  1186. }
  1187. else /* if (op.Size == 1) */
  1188. {
  1189. Operand mask = X86GetScalar(context, -0d);
  1190. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1191. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1192. res = context.AddIntrinsic(Intrinsic.X86Subsd, aNeg, res);
  1193. context.Copy(d, context.VectorZeroUpper64(res));
  1194. }
  1195. }
  1196. else
  1197. {
  1198. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1199. {
  1200. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulAdd), op1, op2, op3);
  1201. });
  1202. }
  1203. }
  1204. public static void Fnmsub_S(ArmEmitterContext context) // Fused.
  1205. {
  1206. if (Optimizations.FastFP && Optimizations.UseSse2)
  1207. {
  1208. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1209. Operand d = GetVec(op.Rd);
  1210. Operand a = GetVec(op.Ra);
  1211. Operand n = GetVec(op.Rn);
  1212. Operand m = GetVec(op.Rm);
  1213. if (op.Size == 0)
  1214. {
  1215. Operand mask = X86GetScalar(context, -0f);
  1216. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1217. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1218. res = context.AddIntrinsic(Intrinsic.X86Addss, aNeg, res);
  1219. context.Copy(d, context.VectorZeroUpper96(res));
  1220. }
  1221. else /* if (op.Size == 1) */
  1222. {
  1223. Operand mask = X86GetScalar(context, -0d);
  1224. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1225. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1226. res = context.AddIntrinsic(Intrinsic.X86Addsd, aNeg, res);
  1227. context.Copy(d, context.VectorZeroUpper64(res));
  1228. }
  1229. }
  1230. else
  1231. {
  1232. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1233. {
  1234. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulSub), op1, op2, op3);
  1235. });
  1236. }
  1237. }
  1238. public static void Fnmul_S(ArmEmitterContext context)
  1239. {
  1240. EmitScalarBinaryOpF(context, (op1, op2) => context.Negate(context.Multiply(op1, op2)));
  1241. }
  1242. public static void Frecpe_S(ArmEmitterContext context)
  1243. {
  1244. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1245. int sizeF = op.Size & 1;
  1246. if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  1247. {
  1248. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rcpss, GetVec(op.Rn)), scalar: true);
  1249. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1250. }
  1251. else
  1252. {
  1253. EmitScalarUnaryOpF(context, (op1) =>
  1254. {
  1255. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1256. });
  1257. }
  1258. }
  1259. public static void Frecpe_V(ArmEmitterContext context)
  1260. {
  1261. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1262. int sizeF = op.Size & 1;
  1263. if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  1264. {
  1265. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rcpps, GetVec(op.Rn)), scalar: false);
  1266. if (op.RegisterSize == RegisterSize.Simd64)
  1267. {
  1268. res = context.VectorZeroUpper64(res);
  1269. }
  1270. context.Copy(GetVec(op.Rd), res);
  1271. }
  1272. else
  1273. {
  1274. EmitVectorUnaryOpF(context, (op1) =>
  1275. {
  1276. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1277. });
  1278. }
  1279. }
  1280. public static void Frecps_S(ArmEmitterContext context) // Fused.
  1281. {
  1282. if (Optimizations.FastFP && Optimizations.UseSse41)
  1283. {
  1284. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1285. Operand n = GetVec(op.Rn);
  1286. Operand m = GetVec(op.Rm);
  1287. int sizeF = op.Size & 1;
  1288. if (sizeF == 0)
  1289. {
  1290. Operand mask = X86GetScalar(context, 2f);
  1291. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1292. res = context.AddIntrinsic(Intrinsic.X86Subss, mask, res);
  1293. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: true, sizeF);
  1294. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1295. }
  1296. else /* if (sizeF == 1) */
  1297. {
  1298. Operand mask = X86GetScalar(context, 2d);
  1299. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1300. res = context.AddIntrinsic(Intrinsic.X86Subsd, mask, res);
  1301. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: true, sizeF);
  1302. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1303. }
  1304. }
  1305. else
  1306. {
  1307. EmitScalarBinaryOpF(context, (op1, op2) =>
  1308. {
  1309. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1310. });
  1311. }
  1312. }
  1313. public static void Frecps_V(ArmEmitterContext context) // Fused.
  1314. {
  1315. if (Optimizations.FastFP && Optimizations.UseSse41)
  1316. {
  1317. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1318. Operand n = GetVec(op.Rn);
  1319. Operand m = GetVec(op.Rm);
  1320. int sizeF = op.Size & 1;
  1321. if (sizeF == 0)
  1322. {
  1323. Operand mask = X86GetAllElements(context, 2f);
  1324. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  1325. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: false, sizeF);
  1326. res = context.AddIntrinsic(Intrinsic.X86Subps, mask, res);
  1327. if (op.RegisterSize == RegisterSize.Simd64)
  1328. {
  1329. res = context.VectorZeroUpper64(res);
  1330. }
  1331. context.Copy(GetVec(op.Rd), res);
  1332. }
  1333. else /* if (sizeF == 1) */
  1334. {
  1335. Operand mask = X86GetAllElements(context, 2d);
  1336. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  1337. res = EmitSse41RecipStepSelectOpF(context, n, m, res, mask, scalar: false, sizeF);
  1338. res = context.AddIntrinsic(Intrinsic.X86Subpd, mask, res);
  1339. context.Copy(GetVec(op.Rd), res);
  1340. }
  1341. }
  1342. else
  1343. {
  1344. EmitVectorBinaryOpF(context, (op1, op2) =>
  1345. {
  1346. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1347. });
  1348. }
  1349. }
  1350. public static void Frecpx_S(ArmEmitterContext context)
  1351. {
  1352. EmitScalarUnaryOpF(context, (op1) =>
  1353. {
  1354. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecpX), op1);
  1355. });
  1356. }
  1357. public static void Frinta_S(ArmEmitterContext context)
  1358. {
  1359. if (Optimizations.UseSse41)
  1360. {
  1361. EmitSse41ScalarRoundOpF(context, FPRoundingMode.ToNearestAway);
  1362. }
  1363. else
  1364. {
  1365. EmitScalarUnaryOpF(context, (op1) =>
  1366. {
  1367. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1368. });
  1369. }
  1370. }
  1371. public static void Frinta_V(ArmEmitterContext context)
  1372. {
  1373. if (Optimizations.UseSse41)
  1374. {
  1375. EmitSse41VectorRoundOpF(context, FPRoundingMode.ToNearestAway);
  1376. }
  1377. else
  1378. {
  1379. EmitVectorUnaryOpF(context, (op1) =>
  1380. {
  1381. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1382. });
  1383. }
  1384. }
  1385. public static void Frinti_S(ArmEmitterContext context)
  1386. {
  1387. EmitScalarUnaryOpF(context, (op1) =>
  1388. {
  1389. return EmitRoundByRMode(context, op1);
  1390. });
  1391. }
  1392. public static void Frinti_V(ArmEmitterContext context)
  1393. {
  1394. EmitVectorUnaryOpF(context, (op1) =>
  1395. {
  1396. return EmitRoundByRMode(context, op1);
  1397. });
  1398. }
  1399. public static void Frintm_S(ArmEmitterContext context)
  1400. {
  1401. if (Optimizations.UseSse41)
  1402. {
  1403. EmitSse41ScalarRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1404. }
  1405. else
  1406. {
  1407. EmitScalarUnaryOpF(context, (op1) =>
  1408. {
  1409. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1410. });
  1411. }
  1412. }
  1413. public static void Frintm_V(ArmEmitterContext context)
  1414. {
  1415. if (Optimizations.UseSse41)
  1416. {
  1417. EmitSse41VectorRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1418. }
  1419. else
  1420. {
  1421. EmitVectorUnaryOpF(context, (op1) =>
  1422. {
  1423. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1424. });
  1425. }
  1426. }
  1427. public static void Frintn_S(ArmEmitterContext context)
  1428. {
  1429. if (Optimizations.UseSse41)
  1430. {
  1431. EmitSse41ScalarRoundOpF(context, FPRoundingMode.ToNearest);
  1432. }
  1433. else
  1434. {
  1435. EmitScalarUnaryOpF(context, (op1) =>
  1436. {
  1437. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1438. });
  1439. }
  1440. }
  1441. public static void Frintn_V(ArmEmitterContext context)
  1442. {
  1443. if (Optimizations.UseSse41)
  1444. {
  1445. EmitSse41VectorRoundOpF(context, FPRoundingMode.ToNearest);
  1446. }
  1447. else
  1448. {
  1449. EmitVectorUnaryOpF(context, (op1) =>
  1450. {
  1451. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1452. });
  1453. }
  1454. }
  1455. public static void Frintp_S(ArmEmitterContext context)
  1456. {
  1457. if (Optimizations.UseSse41)
  1458. {
  1459. EmitSse41ScalarRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1460. }
  1461. else
  1462. {
  1463. EmitScalarUnaryOpF(context, (op1) =>
  1464. {
  1465. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  1466. });
  1467. }
  1468. }
  1469. public static void Frintp_V(ArmEmitterContext context)
  1470. {
  1471. if (Optimizations.UseSse41)
  1472. {
  1473. EmitSse41VectorRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1474. }
  1475. else
  1476. {
  1477. EmitVectorUnaryOpF(context, (op1) =>
  1478. {
  1479. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  1480. });
  1481. }
  1482. }
  1483. public static void Frintx_S(ArmEmitterContext context)
  1484. {
  1485. EmitScalarUnaryOpF(context, (op1) =>
  1486. {
  1487. return EmitRoundByRMode(context, op1);
  1488. });
  1489. }
  1490. public static void Frintx_V(ArmEmitterContext context)
  1491. {
  1492. EmitVectorUnaryOpF(context, (op1) =>
  1493. {
  1494. return EmitRoundByRMode(context, op1);
  1495. });
  1496. }
  1497. public static void Frintz_S(ArmEmitterContext context)
  1498. {
  1499. if (Optimizations.UseSse41)
  1500. {
  1501. EmitSse41ScalarRoundOpF(context, FPRoundingMode.TowardsZero);
  1502. }
  1503. else
  1504. {
  1505. EmitScalarUnaryOpF(context, (op1) =>
  1506. {
  1507. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  1508. });
  1509. }
  1510. }
  1511. public static void Frintz_V(ArmEmitterContext context)
  1512. {
  1513. if (Optimizations.UseSse41)
  1514. {
  1515. EmitSse41VectorRoundOpF(context, FPRoundingMode.TowardsZero);
  1516. }
  1517. else
  1518. {
  1519. EmitVectorUnaryOpF(context, (op1) =>
  1520. {
  1521. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  1522. });
  1523. }
  1524. }
  1525. public static void Frsqrte_S(ArmEmitterContext context)
  1526. {
  1527. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1528. int sizeF = op.Size & 1;
  1529. if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  1530. {
  1531. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rsqrtss, GetVec(op.Rn)), scalar: true);
  1532. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1533. }
  1534. else
  1535. {
  1536. EmitScalarUnaryOpF(context, (op1) =>
  1537. {
  1538. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  1539. });
  1540. }
  1541. }
  1542. public static void Frsqrte_V(ArmEmitterContext context)
  1543. {
  1544. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1545. int sizeF = op.Size & 1;
  1546. if (Optimizations.FastFP && Optimizations.UseSse41 && sizeF == 0)
  1547. {
  1548. Operand res = EmitSse41Round32Exp8OpF(context, context.AddIntrinsic(Intrinsic.X86Rsqrtps, GetVec(op.Rn)), scalar: false);
  1549. if (op.RegisterSize == RegisterSize.Simd64)
  1550. {
  1551. res = context.VectorZeroUpper64(res);
  1552. }
  1553. context.Copy(GetVec(op.Rd), res);
  1554. }
  1555. else
  1556. {
  1557. EmitVectorUnaryOpF(context, (op1) =>
  1558. {
  1559. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  1560. });
  1561. }
  1562. }
  1563. public static void Frsqrts_S(ArmEmitterContext context) // Fused.
  1564. {
  1565. if (Optimizations.FastFP && Optimizations.UseSse41)
  1566. {
  1567. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1568. Operand n = GetVec(op.Rn);
  1569. Operand m = GetVec(op.Rm);
  1570. int sizeF = op.Size & 1;
  1571. if (sizeF == 0)
  1572. {
  1573. Operand maskHalf = X86GetScalar(context, 0.5f);
  1574. Operand maskThree = X86GetScalar(context, 3f);
  1575. Operand maskOneHalf = X86GetScalar(context, 1.5f);
  1576. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1577. res = context.AddIntrinsic(Intrinsic.X86Subss, maskThree, res);
  1578. res = context.AddIntrinsic(Intrinsic.X86Mulss, maskHalf, res);
  1579. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: true, sizeF);
  1580. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1581. }
  1582. else /* if (sizeF == 1) */
  1583. {
  1584. Operand maskHalf = X86GetScalar(context, 0.5d);
  1585. Operand maskThree = X86GetScalar(context, 3d);
  1586. Operand maskOneHalf = X86GetScalar(context, 1.5d);
  1587. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1588. res = context.AddIntrinsic(Intrinsic.X86Subsd, maskThree, res);
  1589. res = context.AddIntrinsic(Intrinsic.X86Mulsd, maskHalf, res);
  1590. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: true, sizeF);
  1591. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1592. }
  1593. }
  1594. else
  1595. {
  1596. EmitScalarBinaryOpF(context, (op1, op2) =>
  1597. {
  1598. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  1599. });
  1600. }
  1601. }
  1602. public static void Frsqrts_V(ArmEmitterContext context) // Fused.
  1603. {
  1604. if (Optimizations.FastFP && Optimizations.UseSse41)
  1605. {
  1606. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1607. Operand n = GetVec(op.Rn);
  1608. Operand m = GetVec(op.Rm);
  1609. int sizeF = op.Size & 1;
  1610. if (sizeF == 0)
  1611. {
  1612. Operand maskHalf = X86GetAllElements(context, 0.5f);
  1613. Operand maskThree = X86GetAllElements(context, 3f);
  1614. Operand maskOneHalf = X86GetAllElements(context, 1.5f);
  1615. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  1616. res = context.AddIntrinsic(Intrinsic.X86Subps, maskThree, res);
  1617. res = context.AddIntrinsic(Intrinsic.X86Mulps, maskHalf, res);
  1618. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: false, sizeF);
  1619. if (op.RegisterSize == RegisterSize.Simd64)
  1620. {
  1621. res = context.VectorZeroUpper64(res);
  1622. }
  1623. context.Copy(GetVec(op.Rd), res);
  1624. }
  1625. else /* if (sizeF == 1) */
  1626. {
  1627. Operand maskHalf = X86GetAllElements(context, 0.5d);
  1628. Operand maskThree = X86GetAllElements(context, 3d);
  1629. Operand maskOneHalf = X86GetAllElements(context, 1.5d);
  1630. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  1631. res = context.AddIntrinsic(Intrinsic.X86Subpd, maskThree, res);
  1632. res = context.AddIntrinsic(Intrinsic.X86Mulpd, maskHalf, res);
  1633. res = EmitSse41RecipStepSelectOpF(context, n, m, res, maskOneHalf, scalar: false, sizeF);
  1634. context.Copy(GetVec(op.Rd), res);
  1635. }
  1636. }
  1637. else
  1638. {
  1639. EmitVectorBinaryOpF(context, (op1, op2) =>
  1640. {
  1641. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  1642. });
  1643. }
  1644. }
  1645. public static void Fsqrt_S(ArmEmitterContext context)
  1646. {
  1647. if (Optimizations.FastFP && Optimizations.UseSse2)
  1648. {
  1649. EmitScalarUnaryOpF(context, Intrinsic.X86Sqrtss, Intrinsic.X86Sqrtsd);
  1650. }
  1651. else
  1652. {
  1653. EmitScalarUnaryOpF(context, (op1) =>
  1654. {
  1655. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  1656. });
  1657. }
  1658. }
  1659. public static void Fsqrt_V(ArmEmitterContext context)
  1660. {
  1661. if (Optimizations.FastFP && Optimizations.UseSse2)
  1662. {
  1663. EmitVectorUnaryOpF(context, Intrinsic.X86Sqrtps, Intrinsic.X86Sqrtpd);
  1664. }
  1665. else
  1666. {
  1667. EmitVectorUnaryOpF(context, (op1) =>
  1668. {
  1669. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  1670. });
  1671. }
  1672. }
  1673. public static void Fsub_S(ArmEmitterContext context)
  1674. {
  1675. if (Optimizations.FastFP && Optimizations.UseSse2)
  1676. {
  1677. EmitScalarBinaryOpF(context, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  1678. }
  1679. else if (Optimizations.FastFP)
  1680. {
  1681. EmitScalarBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1682. }
  1683. else
  1684. {
  1685. EmitScalarBinaryOpF(context, (op1, op2) =>
  1686. {
  1687. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  1688. });
  1689. }
  1690. }
  1691. public static void Fsub_V(ArmEmitterContext context)
  1692. {
  1693. if (Optimizations.FastFP && Optimizations.UseSse2)
  1694. {
  1695. EmitVectorBinaryOpF(context, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  1696. }
  1697. else if (Optimizations.FastFP)
  1698. {
  1699. EmitVectorBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1700. }
  1701. else
  1702. {
  1703. EmitVectorBinaryOpF(context, (op1, op2) =>
  1704. {
  1705. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  1706. });
  1707. }
  1708. }
  1709. public static void Mla_V(ArmEmitterContext context)
  1710. {
  1711. if (Optimizations.UseSse41)
  1712. {
  1713. EmitSse41VectorMul_AddSub(context, AddSub.Add);
  1714. }
  1715. else
  1716. {
  1717. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1718. {
  1719. return context.Add(op1, context.Multiply(op2, op3));
  1720. });
  1721. }
  1722. }
  1723. public static void Mla_Ve(ArmEmitterContext context)
  1724. {
  1725. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1726. {
  1727. return context.Add(op1, context.Multiply(op2, op3));
  1728. });
  1729. }
  1730. public static void Mls_V(ArmEmitterContext context)
  1731. {
  1732. if (Optimizations.UseSse41)
  1733. {
  1734. EmitSse41VectorMul_AddSub(context, AddSub.Subtract);
  1735. }
  1736. else
  1737. {
  1738. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1739. {
  1740. return context.Subtract(op1, context.Multiply(op2, op3));
  1741. });
  1742. }
  1743. }
  1744. public static void Mls_Ve(ArmEmitterContext context)
  1745. {
  1746. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1747. {
  1748. return context.Subtract(op1, context.Multiply(op2, op3));
  1749. });
  1750. }
  1751. public static void Mul_V(ArmEmitterContext context)
  1752. {
  1753. if (Optimizations.UseSse41)
  1754. {
  1755. EmitSse41VectorMul_AddSub(context, AddSub.None);
  1756. }
  1757. else
  1758. {
  1759. EmitVectorBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  1760. }
  1761. }
  1762. public static void Mul_Ve(ArmEmitterContext context)
  1763. {
  1764. EmitVectorBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  1765. }
  1766. public static void Neg_S(ArmEmitterContext context)
  1767. {
  1768. EmitScalarUnaryOpSx(context, (op1) => context.Negate(op1));
  1769. }
  1770. public static void Neg_V(ArmEmitterContext context)
  1771. {
  1772. if (Optimizations.UseSse2)
  1773. {
  1774. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1775. Intrinsic subInst = X86PsubInstruction[op.Size];
  1776. Operand res = context.AddIntrinsic(subInst, context.VectorZero(), GetVec(op.Rn));
  1777. if (op.RegisterSize == RegisterSize.Simd64)
  1778. {
  1779. res = context.VectorZeroUpper64(res);
  1780. }
  1781. context.Copy(GetVec(op.Rd), res);
  1782. }
  1783. else
  1784. {
  1785. EmitVectorUnaryOpSx(context, (op1) => context.Negate(op1));
  1786. }
  1787. }
  1788. public static void Pmull_V(ArmEmitterContext context)
  1789. {
  1790. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1791. if (Optimizations.UsePclmulqdq && op.Size == 3)
  1792. {
  1793. Operand n = GetVec(op.Rn);
  1794. Operand m = GetVec(op.Rm);
  1795. int imm8 = op.RegisterSize == RegisterSize.Simd64 ? 0b0000_0000 : 0b0001_0001;
  1796. Operand res = context.AddIntrinsic(Intrinsic.X86Pclmulqdq, n, m, Const(imm8));
  1797. context.Copy(GetVec(op.Rd), res);
  1798. }
  1799. else if (Optimizations.UseSse41)
  1800. {
  1801. Operand n = GetVec(op.Rn);
  1802. Operand m = GetVec(op.Rm);
  1803. if (op.RegisterSize == RegisterSize.Simd64)
  1804. {
  1805. n = context.VectorZeroUpper64(n);
  1806. m = context.VectorZeroUpper64(m);
  1807. }
  1808. else /* if (op.RegisterSize == RegisterSize.Simd128) */
  1809. {
  1810. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1811. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1812. }
  1813. Operand res = context.VectorZero();
  1814. if (op.Size == 0)
  1815. {
  1816. n = context.AddIntrinsic(Intrinsic.X86Pmovzxbw, n);
  1817. m = context.AddIntrinsic(Intrinsic.X86Pmovzxbw, m);
  1818. for (int i = 0; i < 8; i++)
  1819. {
  1820. Operand mask = context.AddIntrinsic(Intrinsic.X86Psllw, n, Const(15 - i));
  1821. mask = context.AddIntrinsic(Intrinsic.X86Psraw, mask, Const(15));
  1822. Operand tmp = context.AddIntrinsic(Intrinsic.X86Psllw, m, Const(i));
  1823. tmp = context.AddIntrinsic(Intrinsic.X86Pand, tmp, mask);
  1824. res = context.AddIntrinsic(Intrinsic.X86Pxor, res, tmp);
  1825. }
  1826. }
  1827. else /* if (op.Size == 3) */
  1828. {
  1829. Operand zero = context.VectorZero();
  1830. for (int i = 0; i < 64; i++)
  1831. {
  1832. Operand mask = context.AddIntrinsic(Intrinsic.X86Movlhps, n, n);
  1833. mask = context.AddIntrinsic(Intrinsic.X86Psllq, mask, Const(63 - i));
  1834. mask = context.AddIntrinsic(Intrinsic.X86Psrlq, mask, Const(63));
  1835. mask = context.AddIntrinsic(Intrinsic.X86Psubq, zero, mask);
  1836. Operand tmp = EmitSse2Sll_128(context, m, i);
  1837. tmp = context.AddIntrinsic(Intrinsic.X86Pand, tmp, mask);
  1838. res = context.AddIntrinsic(Intrinsic.X86Pxor, res, tmp);
  1839. }
  1840. }
  1841. context.Copy(GetVec(op.Rd), res);
  1842. }
  1843. else
  1844. {
  1845. Operand n = GetVec(op.Rn);
  1846. Operand m = GetVec(op.Rm);
  1847. Operand res;
  1848. if (op.Size == 0)
  1849. {
  1850. res = context.VectorZero();
  1851. int part = op.RegisterSize == RegisterSize.Simd64 ? 0 : 8;
  1852. for (int index = 0; index < 8; index++)
  1853. {
  1854. Operand ne = context.VectorExtract8(n, part + index);
  1855. Operand me = context.VectorExtract8(m, part + index);
  1856. Operand de = EmitPolynomialMultiply(context, ne, me, 8);
  1857. res = EmitVectorInsert(context, res, de, index, 1);
  1858. }
  1859. }
  1860. else /* if (op.Size == 3) */
  1861. {
  1862. int part = op.RegisterSize == RegisterSize.Simd64 ? 0 : 1;
  1863. Operand ne = context.VectorExtract(OperandType.I64, n, part);
  1864. Operand me = context.VectorExtract(OperandType.I64, m, part);
  1865. res = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.PolynomialMult64_128)), ne, me);
  1866. }
  1867. context.Copy(GetVec(op.Rd), res);
  1868. }
  1869. }
  1870. public static void Raddhn_V(ArmEmitterContext context)
  1871. {
  1872. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: true);
  1873. }
  1874. public static void Rsubhn_V(ArmEmitterContext context)
  1875. {
  1876. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: true);
  1877. }
  1878. public static void Saba_V(ArmEmitterContext context)
  1879. {
  1880. EmitVectorTernaryOpSx(context, (op1, op2, op3) =>
  1881. {
  1882. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1883. });
  1884. }
  1885. public static void Sabal_V(ArmEmitterContext context)
  1886. {
  1887. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1888. {
  1889. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1890. });
  1891. }
  1892. public static void Sabd_V(ArmEmitterContext context)
  1893. {
  1894. if (Optimizations.UseSse41)
  1895. {
  1896. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1897. Operand n = GetVec(op.Rn);
  1898. Operand m = GetVec(op.Rm);
  1899. EmitSse41VectorSabdOp(context, op, n, m, isLong: false);
  1900. }
  1901. else
  1902. {
  1903. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1904. {
  1905. return EmitAbs(context, context.Subtract(op1, op2));
  1906. });
  1907. }
  1908. }
  1909. public static void Sabdl_V(ArmEmitterContext context)
  1910. {
  1911. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1912. if (Optimizations.UseSse41 && op.Size < 2)
  1913. {
  1914. Operand n = GetVec(op.Rn);
  1915. Operand m = GetVec(op.Rm);
  1916. if (op.RegisterSize == RegisterSize.Simd128)
  1917. {
  1918. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1919. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1920. }
  1921. Intrinsic movInst = op.Size == 0
  1922. ? Intrinsic.X86Pmovsxbw
  1923. : Intrinsic.X86Pmovsxwd;
  1924. n = context.AddIntrinsic(movInst, n);
  1925. m = context.AddIntrinsic(movInst, m);
  1926. EmitSse41VectorSabdOp(context, op, n, m, isLong: true);
  1927. }
  1928. else
  1929. {
  1930. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) =>
  1931. {
  1932. return EmitAbs(context, context.Subtract(op1, op2));
  1933. });
  1934. }
  1935. }
  1936. public static void Sadalp_V(ArmEmitterContext context)
  1937. {
  1938. EmitAddLongPairwise(context, signed: true, accumulate: true);
  1939. }
  1940. public static void Saddl_V(ArmEmitterContext context)
  1941. {
  1942. if (Optimizations.UseSse41)
  1943. {
  1944. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1945. Operand n = GetVec(op.Rn);
  1946. Operand m = GetVec(op.Rm);
  1947. if (op.RegisterSize == RegisterSize.Simd128)
  1948. {
  1949. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1950. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1951. }
  1952. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1953. n = context.AddIntrinsic(movInst, n);
  1954. m = context.AddIntrinsic(movInst, m);
  1955. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1956. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1957. }
  1958. else
  1959. {
  1960. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1961. }
  1962. }
  1963. public static void Saddlp_V(ArmEmitterContext context)
  1964. {
  1965. EmitAddLongPairwise(context, signed: true, accumulate: false);
  1966. }
  1967. public static void Saddlv_V(ArmEmitterContext context)
  1968. {
  1969. EmitVectorLongAcrossVectorOpSx(context, (op1, op2) => context.Add(op1, op2));
  1970. }
  1971. public static void Saddw_V(ArmEmitterContext context)
  1972. {
  1973. if (Optimizations.UseSse41)
  1974. {
  1975. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1976. Operand n = GetVec(op.Rn);
  1977. Operand m = GetVec(op.Rm);
  1978. if (op.RegisterSize == RegisterSize.Simd128)
  1979. {
  1980. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1981. }
  1982. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1983. m = context.AddIntrinsic(movInst, m);
  1984. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1985. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1986. }
  1987. else
  1988. {
  1989. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1990. }
  1991. }
  1992. public static void Shadd_V(ArmEmitterContext context)
  1993. {
  1994. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1995. if (Optimizations.UseSse2 && op.Size > 0)
  1996. {
  1997. Operand n = GetVec(op.Rn);
  1998. Operand m = GetVec(op.Rm);
  1999. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  2000. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  2001. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psraw : Intrinsic.X86Psrad;
  2002. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  2003. Intrinsic addInst = X86PaddInstruction[op.Size];
  2004. res = context.AddIntrinsic(addInst, res, res2);
  2005. if (op.RegisterSize == RegisterSize.Simd64)
  2006. {
  2007. res = context.VectorZeroUpper64(res);
  2008. }
  2009. context.Copy(GetVec(op.Rd), res);
  2010. }
  2011. else
  2012. {
  2013. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2014. {
  2015. return context.ShiftRightSI(context.Add(op1, op2), Const(1));
  2016. });
  2017. }
  2018. }
  2019. public static void Shsub_V(ArmEmitterContext context)
  2020. {
  2021. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2022. if (Optimizations.UseSse2 && op.Size < 2)
  2023. {
  2024. Operand n = GetVec(op.Rn);
  2025. Operand m = GetVec(op.Rm);
  2026. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  2027. Intrinsic addInst = X86PaddInstruction[op.Size];
  2028. Operand nPlusMask = context.AddIntrinsic(addInst, n, mask);
  2029. Operand mPlusMask = context.AddIntrinsic(addInst, m, mask);
  2030. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2031. Operand res = context.AddIntrinsic(avgInst, nPlusMask, mPlusMask);
  2032. Intrinsic subInst = X86PsubInstruction[op.Size];
  2033. res = context.AddIntrinsic(subInst, nPlusMask, res);
  2034. if (op.RegisterSize == RegisterSize.Simd64)
  2035. {
  2036. res = context.VectorZeroUpper64(res);
  2037. }
  2038. context.Copy(GetVec(op.Rd), res);
  2039. }
  2040. else
  2041. {
  2042. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2043. {
  2044. return context.ShiftRightSI(context.Subtract(op1, op2), Const(1));
  2045. });
  2046. }
  2047. }
  2048. public static void Smax_V(ArmEmitterContext context)
  2049. {
  2050. if (Optimizations.UseSse41)
  2051. {
  2052. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2053. Operand n = GetVec(op.Rn);
  2054. Operand m = GetVec(op.Rm);
  2055. Intrinsic maxInst = X86PmaxsInstruction[op.Size];
  2056. Operand res = context.AddIntrinsic(maxInst, n, m);
  2057. if (op.RegisterSize == RegisterSize.Simd64)
  2058. {
  2059. res = context.VectorZeroUpper64(res);
  2060. }
  2061. context.Copy(GetVec(op.Rd), res);
  2062. }
  2063. else
  2064. {
  2065. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  2066. }
  2067. }
  2068. public static void Smaxp_V(ArmEmitterContext context)
  2069. {
  2070. if (Optimizations.UseSsse3)
  2071. {
  2072. EmitSsse3VectorPairwiseOp(context, X86PmaxsInstruction);
  2073. }
  2074. else
  2075. {
  2076. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  2077. }
  2078. }
  2079. public static void Smaxv_V(ArmEmitterContext context)
  2080. {
  2081. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  2082. }
  2083. public static void Smin_V(ArmEmitterContext context)
  2084. {
  2085. if (Optimizations.UseSse41)
  2086. {
  2087. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2088. Operand n = GetVec(op.Rn);
  2089. Operand m = GetVec(op.Rm);
  2090. Intrinsic minInst = X86PminsInstruction[op.Size];
  2091. Operand res = context.AddIntrinsic(minInst, n, m);
  2092. if (op.RegisterSize == RegisterSize.Simd64)
  2093. {
  2094. res = context.VectorZeroUpper64(res);
  2095. }
  2096. context.Copy(GetVec(op.Rd), res);
  2097. }
  2098. else
  2099. {
  2100. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  2101. }
  2102. }
  2103. public static void Sminp_V(ArmEmitterContext context)
  2104. {
  2105. if (Optimizations.UseSsse3)
  2106. {
  2107. EmitSsse3VectorPairwiseOp(context, X86PminsInstruction);
  2108. }
  2109. else
  2110. {
  2111. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  2112. }
  2113. }
  2114. public static void Sminv_V(ArmEmitterContext context)
  2115. {
  2116. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  2117. }
  2118. public static void Smlal_V(ArmEmitterContext context)
  2119. {
  2120. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2121. if (Optimizations.UseSse41 && op.Size < 2)
  2122. {
  2123. Operand d = GetVec(op.Rd);
  2124. Operand n = GetVec(op.Rn);
  2125. Operand m = GetVec(op.Rm);
  2126. if (op.RegisterSize == RegisterSize.Simd128)
  2127. {
  2128. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2129. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2130. }
  2131. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2132. n = context.AddIntrinsic(movInst, n);
  2133. m = context.AddIntrinsic(movInst, m);
  2134. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2135. Operand res = context.AddIntrinsic(mullInst, n, m);
  2136. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2137. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  2138. }
  2139. else
  2140. {
  2141. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  2142. {
  2143. return context.Add(op1, context.Multiply(op2, op3));
  2144. });
  2145. }
  2146. }
  2147. public static void Smlal_Ve(ArmEmitterContext context)
  2148. {
  2149. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  2150. {
  2151. return context.Add(op1, context.Multiply(op2, op3));
  2152. });
  2153. }
  2154. public static void Smlsl_V(ArmEmitterContext context)
  2155. {
  2156. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2157. if (Optimizations.UseSse41 && op.Size < 2)
  2158. {
  2159. Operand d = GetVec(op.Rd);
  2160. Operand n = GetVec(op.Rn);
  2161. Operand m = GetVec(op.Rm);
  2162. if (op.RegisterSize == RegisterSize.Simd128)
  2163. {
  2164. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2165. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2166. }
  2167. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovsxbw : Intrinsic.X86Pmovsxwd;
  2168. n = context.AddIntrinsic(movInst, n);
  2169. m = context.AddIntrinsic(movInst, m);
  2170. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2171. Operand res = context.AddIntrinsic(mullInst, n, m);
  2172. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2173. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  2174. }
  2175. else
  2176. {
  2177. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  2178. {
  2179. return context.Subtract(op1, context.Multiply(op2, op3));
  2180. });
  2181. }
  2182. }
  2183. public static void Smlsl_Ve(ArmEmitterContext context)
  2184. {
  2185. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  2186. {
  2187. return context.Subtract(op1, context.Multiply(op2, op3));
  2188. });
  2189. }
  2190. public static void Smull_V(ArmEmitterContext context)
  2191. {
  2192. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Multiply(op1, op2));
  2193. }
  2194. public static void Smull_Ve(ArmEmitterContext context)
  2195. {
  2196. EmitVectorWidenBinaryOpByElemSx(context, (op1, op2) => context.Multiply(op1, op2));
  2197. }
  2198. public static void Sqabs_S(ArmEmitterContext context)
  2199. {
  2200. EmitScalarSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  2201. }
  2202. public static void Sqabs_V(ArmEmitterContext context)
  2203. {
  2204. EmitVectorSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  2205. }
  2206. public static void Sqadd_S(ArmEmitterContext context)
  2207. {
  2208. EmitScalarSaturatingBinaryOpSx(context, flags: SaturatingFlags.Add);
  2209. }
  2210. public static void Sqadd_V(ArmEmitterContext context)
  2211. {
  2212. EmitVectorSaturatingBinaryOpSx(context, flags: SaturatingFlags.Add);
  2213. }
  2214. public static void Sqdmulh_S(ArmEmitterContext context)
  2215. {
  2216. EmitScalarSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false));
  2217. }
  2218. public static void Sqdmulh_V(ArmEmitterContext context)
  2219. {
  2220. EmitVectorSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false));
  2221. }
  2222. public static void Sqdmulh_Ve(ArmEmitterContext context)
  2223. {
  2224. EmitVectorSaturatingBinaryOpByElemSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false));
  2225. }
  2226. public static void Sqneg_S(ArmEmitterContext context)
  2227. {
  2228. EmitScalarSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  2229. }
  2230. public static void Sqneg_V(ArmEmitterContext context)
  2231. {
  2232. EmitVectorSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  2233. }
  2234. public static void Sqrdmulh_S(ArmEmitterContext context)
  2235. {
  2236. EmitScalarSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true));
  2237. }
  2238. public static void Sqrdmulh_V(ArmEmitterContext context)
  2239. {
  2240. EmitVectorSaturatingBinaryOpSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true));
  2241. }
  2242. public static void Sqrdmulh_Ve(ArmEmitterContext context)
  2243. {
  2244. EmitVectorSaturatingBinaryOpByElemSx(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true));
  2245. }
  2246. public static void Sqsub_S(ArmEmitterContext context)
  2247. {
  2248. EmitScalarSaturatingBinaryOpSx(context, flags: SaturatingFlags.Sub);
  2249. }
  2250. public static void Sqsub_V(ArmEmitterContext context)
  2251. {
  2252. EmitVectorSaturatingBinaryOpSx(context, flags: SaturatingFlags.Sub);
  2253. }
  2254. public static void Sqxtn_S(ArmEmitterContext context)
  2255. {
  2256. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxSx);
  2257. }
  2258. public static void Sqxtn_V(ArmEmitterContext context)
  2259. {
  2260. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxSx);
  2261. }
  2262. public static void Sqxtun_S(ArmEmitterContext context)
  2263. {
  2264. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxZx);
  2265. }
  2266. public static void Sqxtun_V(ArmEmitterContext context)
  2267. {
  2268. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxZx);
  2269. }
  2270. public static void Srhadd_V(ArmEmitterContext context)
  2271. {
  2272. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2273. if (Optimizations.UseSse2 && op.Size < 2)
  2274. {
  2275. Operand n = GetVec(op.Rn);
  2276. Operand m = GetVec(op.Rm);
  2277. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  2278. Intrinsic subInst = X86PsubInstruction[op.Size];
  2279. Operand nMinusMask = context.AddIntrinsic(subInst, n, mask);
  2280. Operand mMinusMask = context.AddIntrinsic(subInst, m, mask);
  2281. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2282. Operand res = context.AddIntrinsic(avgInst, nMinusMask, mMinusMask);
  2283. Intrinsic addInst = X86PaddInstruction[op.Size];
  2284. res = context.AddIntrinsic(addInst, mask, res);
  2285. if (op.RegisterSize == RegisterSize.Simd64)
  2286. {
  2287. res = context.VectorZeroUpper64(res);
  2288. }
  2289. context.Copy(GetVec(op.Rd), res);
  2290. }
  2291. else
  2292. {
  2293. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2294. {
  2295. Operand res = context.Add(op1, op2);
  2296. res = context.Add(res, Const(1L));
  2297. return context.ShiftRightSI(res, Const(1));
  2298. });
  2299. }
  2300. }
  2301. public static void Ssubl_V(ArmEmitterContext context)
  2302. {
  2303. if (Optimizations.UseSse41)
  2304. {
  2305. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2306. Operand n = GetVec(op.Rn);
  2307. Operand m = GetVec(op.Rm);
  2308. if (op.RegisterSize == RegisterSize.Simd128)
  2309. {
  2310. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2311. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2312. }
  2313. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2314. n = context.AddIntrinsic(movInst, n);
  2315. m = context.AddIntrinsic(movInst, m);
  2316. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2317. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2318. }
  2319. else
  2320. {
  2321. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  2322. }
  2323. }
  2324. public static void Ssubw_V(ArmEmitterContext context)
  2325. {
  2326. if (Optimizations.UseSse41)
  2327. {
  2328. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2329. Operand n = GetVec(op.Rn);
  2330. Operand m = GetVec(op.Rm);
  2331. if (op.RegisterSize == RegisterSize.Simd128)
  2332. {
  2333. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2334. }
  2335. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2336. m = context.AddIntrinsic(movInst, m);
  2337. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2338. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2339. }
  2340. else
  2341. {
  2342. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  2343. }
  2344. }
  2345. public static void Sub_S(ArmEmitterContext context)
  2346. {
  2347. EmitScalarBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2348. }
  2349. public static void Sub_V(ArmEmitterContext context)
  2350. {
  2351. if (Optimizations.UseSse2)
  2352. {
  2353. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2354. Operand n = GetVec(op.Rn);
  2355. Operand m = GetVec(op.Rm);
  2356. Intrinsic subInst = X86PsubInstruction[op.Size];
  2357. Operand res = context.AddIntrinsic(subInst, n, m);
  2358. if (op.RegisterSize == RegisterSize.Simd64)
  2359. {
  2360. res = context.VectorZeroUpper64(res);
  2361. }
  2362. context.Copy(GetVec(op.Rd), res);
  2363. }
  2364. else
  2365. {
  2366. EmitVectorBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2367. }
  2368. }
  2369. public static void Subhn_V(ArmEmitterContext context)
  2370. {
  2371. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: false);
  2372. }
  2373. public static void Suqadd_S(ArmEmitterContext context)
  2374. {
  2375. EmitScalarSaturatingBinaryOpSx(context, flags: SaturatingFlags.Accumulate);
  2376. }
  2377. public static void Suqadd_V(ArmEmitterContext context)
  2378. {
  2379. EmitVectorSaturatingBinaryOpSx(context, flags: SaturatingFlags.Accumulate);
  2380. }
  2381. public static void Uaba_V(ArmEmitterContext context)
  2382. {
  2383. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  2384. {
  2385. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2386. });
  2387. }
  2388. public static void Uabal_V(ArmEmitterContext context)
  2389. {
  2390. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2391. {
  2392. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2393. });
  2394. }
  2395. public static void Uabd_V(ArmEmitterContext context)
  2396. {
  2397. if (Optimizations.UseSse41)
  2398. {
  2399. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2400. Operand n = GetVec(op.Rn);
  2401. Operand m = GetVec(op.Rm);
  2402. EmitSse41VectorUabdOp(context, op, n, m, isLong: false);
  2403. }
  2404. else
  2405. {
  2406. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2407. {
  2408. return EmitAbs(context, context.Subtract(op1, op2));
  2409. });
  2410. }
  2411. }
  2412. public static void Uabdl_V(ArmEmitterContext context)
  2413. {
  2414. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2415. if (Optimizations.UseSse41 && op.Size < 2)
  2416. {
  2417. Operand n = GetVec(op.Rn);
  2418. Operand m = GetVec(op.Rm);
  2419. if (op.RegisterSize == RegisterSize.Simd128)
  2420. {
  2421. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2422. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2423. }
  2424. Intrinsic movInst = op.Size == 0
  2425. ? Intrinsic.X86Pmovzxbw
  2426. : Intrinsic.X86Pmovzxwd;
  2427. n = context.AddIntrinsic(movInst, n);
  2428. m = context.AddIntrinsic(movInst, m);
  2429. EmitSse41VectorUabdOp(context, op, n, m, isLong: true);
  2430. }
  2431. else
  2432. {
  2433. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) =>
  2434. {
  2435. return EmitAbs(context, context.Subtract(op1, op2));
  2436. });
  2437. }
  2438. }
  2439. public static void Uadalp_V(ArmEmitterContext context)
  2440. {
  2441. EmitAddLongPairwise(context, signed: false, accumulate: true);
  2442. }
  2443. public static void Uaddl_V(ArmEmitterContext context)
  2444. {
  2445. if (Optimizations.UseSse41)
  2446. {
  2447. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2448. Operand n = GetVec(op.Rn);
  2449. Operand m = GetVec(op.Rm);
  2450. if (op.RegisterSize == RegisterSize.Simd128)
  2451. {
  2452. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2453. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2454. }
  2455. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2456. n = context.AddIntrinsic(movInst, n);
  2457. m = context.AddIntrinsic(movInst, m);
  2458. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2459. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2460. }
  2461. else
  2462. {
  2463. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2464. }
  2465. }
  2466. public static void Uaddlp_V(ArmEmitterContext context)
  2467. {
  2468. EmitAddLongPairwise(context, signed: false, accumulate: false);
  2469. }
  2470. public static void Uaddlv_V(ArmEmitterContext context)
  2471. {
  2472. EmitVectorLongAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  2473. }
  2474. public static void Uaddw_V(ArmEmitterContext context)
  2475. {
  2476. if (Optimizations.UseSse41)
  2477. {
  2478. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2479. Operand n = GetVec(op.Rn);
  2480. Operand m = GetVec(op.Rm);
  2481. if (op.RegisterSize == RegisterSize.Simd128)
  2482. {
  2483. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2484. }
  2485. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2486. m = context.AddIntrinsic(movInst, m);
  2487. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2488. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2489. }
  2490. else
  2491. {
  2492. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2493. }
  2494. }
  2495. public static void Uhadd_V(ArmEmitterContext context)
  2496. {
  2497. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2498. if (Optimizations.UseSse2 && op.Size > 0)
  2499. {
  2500. Operand n = GetVec(op.Rn);
  2501. Operand m = GetVec(op.Rm);
  2502. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  2503. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  2504. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psrlw : Intrinsic.X86Psrld;
  2505. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  2506. Intrinsic addInst = X86PaddInstruction[op.Size];
  2507. res = context.AddIntrinsic(addInst, res, res2);
  2508. if (op.RegisterSize == RegisterSize.Simd64)
  2509. {
  2510. res = context.VectorZeroUpper64(res);
  2511. }
  2512. context.Copy(GetVec(op.Rd), res);
  2513. }
  2514. else
  2515. {
  2516. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2517. {
  2518. return context.ShiftRightUI(context.Add(op1, op2), Const(1));
  2519. });
  2520. }
  2521. }
  2522. public static void Uhsub_V(ArmEmitterContext context)
  2523. {
  2524. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2525. if (Optimizations.UseSse2 && op.Size < 2)
  2526. {
  2527. Operand n = GetVec(op.Rn);
  2528. Operand m = GetVec(op.Rm);
  2529. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2530. Operand res = context.AddIntrinsic(avgInst, n, m);
  2531. Intrinsic subInst = X86PsubInstruction[op.Size];
  2532. res = context.AddIntrinsic(subInst, n, res);
  2533. if (op.RegisterSize == RegisterSize.Simd64)
  2534. {
  2535. res = context.VectorZeroUpper64(res);
  2536. }
  2537. context.Copy(GetVec(op.Rd), res);
  2538. }
  2539. else
  2540. {
  2541. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2542. {
  2543. return context.ShiftRightUI(context.Subtract(op1, op2), Const(1));
  2544. });
  2545. }
  2546. }
  2547. public static void Umax_V(ArmEmitterContext context)
  2548. {
  2549. if (Optimizations.UseSse41)
  2550. {
  2551. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2552. Operand n = GetVec(op.Rn);
  2553. Operand m = GetVec(op.Rm);
  2554. Intrinsic maxInst = X86PmaxuInstruction[op.Size];
  2555. Operand res = context.AddIntrinsic(maxInst, n, m);
  2556. if (op.RegisterSize == RegisterSize.Simd64)
  2557. {
  2558. res = context.VectorZeroUpper64(res);
  2559. }
  2560. context.Copy(GetVec(op.Rd), res);
  2561. }
  2562. else
  2563. {
  2564. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2565. }
  2566. }
  2567. public static void Umaxp_V(ArmEmitterContext context)
  2568. {
  2569. if (Optimizations.UseSsse3)
  2570. {
  2571. EmitSsse3VectorPairwiseOp(context, X86PmaxuInstruction);
  2572. }
  2573. else
  2574. {
  2575. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2576. }
  2577. }
  2578. public static void Umaxv_V(ArmEmitterContext context)
  2579. {
  2580. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2581. }
  2582. public static void Umin_V(ArmEmitterContext context)
  2583. {
  2584. if (Optimizations.UseSse41)
  2585. {
  2586. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2587. Operand n = GetVec(op.Rn);
  2588. Operand m = GetVec(op.Rm);
  2589. Intrinsic minInst = X86PminuInstruction[op.Size];
  2590. Operand res = context.AddIntrinsic(minInst, n, m);
  2591. if (op.RegisterSize == RegisterSize.Simd64)
  2592. {
  2593. res = context.VectorZeroUpper64(res);
  2594. }
  2595. context.Copy(GetVec(op.Rd), res);
  2596. }
  2597. else
  2598. {
  2599. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2600. }
  2601. }
  2602. public static void Uminp_V(ArmEmitterContext context)
  2603. {
  2604. if (Optimizations.UseSsse3)
  2605. {
  2606. EmitSsse3VectorPairwiseOp(context, X86PminuInstruction);
  2607. }
  2608. else
  2609. {
  2610. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2611. }
  2612. }
  2613. public static void Uminv_V(ArmEmitterContext context)
  2614. {
  2615. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2616. }
  2617. public static void Umlal_V(ArmEmitterContext context)
  2618. {
  2619. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2620. if (Optimizations.UseSse41 && op.Size < 2)
  2621. {
  2622. Operand d = GetVec(op.Rd);
  2623. Operand n = GetVec(op.Rn);
  2624. Operand m = GetVec(op.Rm);
  2625. if (op.RegisterSize == RegisterSize.Simd128)
  2626. {
  2627. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2628. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2629. }
  2630. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2631. n = context.AddIntrinsic(movInst, n);
  2632. m = context.AddIntrinsic(movInst, m);
  2633. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2634. Operand res = context.AddIntrinsic(mullInst, n, m);
  2635. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2636. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  2637. }
  2638. else
  2639. {
  2640. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2641. {
  2642. return context.Add(op1, context.Multiply(op2, op3));
  2643. });
  2644. }
  2645. }
  2646. public static void Umlal_Ve(ArmEmitterContext context)
  2647. {
  2648. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2649. {
  2650. return context.Add(op1, context.Multiply(op2, op3));
  2651. });
  2652. }
  2653. public static void Umlsl_V(ArmEmitterContext context)
  2654. {
  2655. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2656. if (Optimizations.UseSse41 && op.Size < 2)
  2657. {
  2658. Operand d = GetVec(op.Rd);
  2659. Operand n = GetVec(op.Rn);
  2660. Operand m = GetVec(op.Rm);
  2661. if (op.RegisterSize == RegisterSize.Simd128)
  2662. {
  2663. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2664. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2665. }
  2666. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovzxbw : Intrinsic.X86Pmovzxwd;
  2667. n = context.AddIntrinsic(movInst, n);
  2668. m = context.AddIntrinsic(movInst, m);
  2669. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2670. Operand res = context.AddIntrinsic(mullInst, n, m);
  2671. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2672. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  2673. }
  2674. else
  2675. {
  2676. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2677. {
  2678. return context.Subtract(op1, context.Multiply(op2, op3));
  2679. });
  2680. }
  2681. }
  2682. public static void Umlsl_Ve(ArmEmitterContext context)
  2683. {
  2684. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2685. {
  2686. return context.Subtract(op1, context.Multiply(op2, op3));
  2687. });
  2688. }
  2689. public static void Umull_V(ArmEmitterContext context)
  2690. {
  2691. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  2692. }
  2693. public static void Umull_Ve(ArmEmitterContext context)
  2694. {
  2695. EmitVectorWidenBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  2696. }
  2697. public static void Uqadd_S(ArmEmitterContext context)
  2698. {
  2699. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2700. }
  2701. public static void Uqadd_V(ArmEmitterContext context)
  2702. {
  2703. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2704. }
  2705. public static void Uqsub_S(ArmEmitterContext context)
  2706. {
  2707. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2708. }
  2709. public static void Uqsub_V(ArmEmitterContext context)
  2710. {
  2711. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2712. }
  2713. public static void Uqxtn_S(ArmEmitterContext context)
  2714. {
  2715. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarZxZx);
  2716. }
  2717. public static void Uqxtn_V(ArmEmitterContext context)
  2718. {
  2719. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorZxZx);
  2720. }
  2721. public static void Urhadd_V(ArmEmitterContext context)
  2722. {
  2723. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2724. if (Optimizations.UseSse2 && op.Size < 2)
  2725. {
  2726. Operand n = GetVec(op.Rn);
  2727. Operand m = GetVec(op.Rm);
  2728. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2729. Operand res = context.AddIntrinsic(avgInst, n, m);
  2730. if (op.RegisterSize == RegisterSize.Simd64)
  2731. {
  2732. res = context.VectorZeroUpper64(res);
  2733. }
  2734. context.Copy(GetVec(op.Rd), res);
  2735. }
  2736. else
  2737. {
  2738. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2739. {
  2740. Operand res = context.Add(op1, op2);
  2741. res = context.Add(res, Const(1L));
  2742. return context.ShiftRightUI(res, Const(1));
  2743. });
  2744. }
  2745. }
  2746. public static void Usqadd_S(ArmEmitterContext context)
  2747. {
  2748. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2749. }
  2750. public static void Usqadd_V(ArmEmitterContext context)
  2751. {
  2752. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2753. }
  2754. public static void Usubl_V(ArmEmitterContext context)
  2755. {
  2756. if (Optimizations.UseSse41)
  2757. {
  2758. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2759. Operand n = GetVec(op.Rn);
  2760. Operand m = GetVec(op.Rm);
  2761. if (op.RegisterSize == RegisterSize.Simd128)
  2762. {
  2763. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2764. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2765. }
  2766. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2767. n = context.AddIntrinsic(movInst, n);
  2768. m = context.AddIntrinsic(movInst, m);
  2769. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2770. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2771. }
  2772. else
  2773. {
  2774. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2775. }
  2776. }
  2777. public static void Usubw_V(ArmEmitterContext context)
  2778. {
  2779. if (Optimizations.UseSse41)
  2780. {
  2781. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2782. Operand n = GetVec(op.Rn);
  2783. Operand m = GetVec(op.Rm);
  2784. if (op.RegisterSize == RegisterSize.Simd128)
  2785. {
  2786. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2787. }
  2788. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2789. m = context.AddIntrinsic(movInst, m);
  2790. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2791. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2792. }
  2793. else
  2794. {
  2795. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2796. }
  2797. }
  2798. private static Operand EmitAbs(ArmEmitterContext context, Operand value)
  2799. {
  2800. Operand isPositive = context.ICompareGreaterOrEqual(value, Const(value.Type, 0));
  2801. return context.ConditionalSelect(isPositive, value, context.Negate(value));
  2802. }
  2803. private static void EmitAddLongPairwise(ArmEmitterContext context, bool signed, bool accumulate)
  2804. {
  2805. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2806. Operand res = context.VectorZero();
  2807. int pairs = op.GetPairsCount() >> op.Size;
  2808. for (int index = 0; index < pairs; index++)
  2809. {
  2810. int pairIndex = index << 1;
  2811. Operand ne0 = EmitVectorExtract(context, op.Rn, pairIndex, op.Size, signed);
  2812. Operand ne1 = EmitVectorExtract(context, op.Rn, pairIndex + 1, op.Size, signed);
  2813. Operand e = context.Add(ne0, ne1);
  2814. if (accumulate)
  2815. {
  2816. Operand de = EmitVectorExtract(context, op.Rd, index, op.Size + 1, signed);
  2817. e = context.Add(e, de);
  2818. }
  2819. res = EmitVectorInsert(context, res, e, index, op.Size + 1);
  2820. }
  2821. context.Copy(GetVec(op.Rd), res);
  2822. }
  2823. private static Operand EmitDoublingMultiplyHighHalf(
  2824. ArmEmitterContext context,
  2825. Operand n,
  2826. Operand m,
  2827. bool round)
  2828. {
  2829. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2830. int eSize = 8 << op.Size;
  2831. Operand res = context.Multiply(n, m);
  2832. if (!round)
  2833. {
  2834. res = context.ShiftRightSI(res, Const(eSize - 1));
  2835. }
  2836. else
  2837. {
  2838. long roundConst = 1L << (eSize - 1);
  2839. res = context.ShiftLeft(res, Const(1));
  2840. res = context.Add(res, Const(roundConst));
  2841. res = context.ShiftRightSI(res, Const(eSize));
  2842. Operand isIntMin = context.ICompareEqual(res, Const((long)int.MinValue));
  2843. res = context.ConditionalSelect(isIntMin, context.Negate(res), res);
  2844. }
  2845. return res;
  2846. }
  2847. private static void EmitHighNarrow(ArmEmitterContext context, Func2I emit, bool round)
  2848. {
  2849. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2850. int elems = 8 >> op.Size;
  2851. int eSize = 8 << op.Size;
  2852. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  2853. Operand d = GetVec(op.Rd);
  2854. Operand res = part == 0 ? context.VectorZero() : context.Copy(d);
  2855. long roundConst = 1L << (eSize - 1);
  2856. for (int index = 0; index < elems; index++)
  2857. {
  2858. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size + 1);
  2859. Operand me = EmitVectorExtractZx(context, op.Rm, index, op.Size + 1);
  2860. Operand de = emit(ne, me);
  2861. if (round)
  2862. {
  2863. de = context.Add(de, Const(roundConst));
  2864. }
  2865. de = context.ShiftRightUI(de, Const(eSize));
  2866. res = EmitVectorInsert(context, res, de, part + index, op.Size);
  2867. }
  2868. context.Copy(d, res);
  2869. }
  2870. private static Operand EmitMax64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  2871. {
  2872. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  2873. Operand cmp = signed
  2874. ? context.ICompareGreaterOrEqual (op1, op2)
  2875. : context.ICompareGreaterOrEqualUI(op1, op2);
  2876. return context.ConditionalSelect(cmp, op1, op2);
  2877. }
  2878. private static Operand EmitMin64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  2879. {
  2880. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  2881. Operand cmp = signed
  2882. ? context.ICompareLessOrEqual (op1, op2)
  2883. : context.ICompareLessOrEqualUI(op1, op2);
  2884. return context.ConditionalSelect(cmp, op1, op2);
  2885. }
  2886. private static void EmitSse41ScalarRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2887. {
  2888. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2889. Operand n = GetVec(op.Rn);
  2890. Operand res;
  2891. if (roundMode != FPRoundingMode.ToNearestAway)
  2892. {
  2893. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundsd : Intrinsic.X86Roundss;
  2894. res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2895. }
  2896. else
  2897. {
  2898. res = EmitSse41RoundToNearestWithTiesToAwayOpF(context, n, scalar: true);
  2899. }
  2900. if ((op.Size & 1) != 0)
  2901. {
  2902. res = context.VectorZeroUpper64(res);
  2903. }
  2904. else
  2905. {
  2906. res = context.VectorZeroUpper96(res);
  2907. }
  2908. context.Copy(GetVec(op.Rd), res);
  2909. }
  2910. private static void EmitSse41VectorRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2911. {
  2912. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2913. Operand n = GetVec(op.Rn);
  2914. Operand res;
  2915. if (roundMode != FPRoundingMode.ToNearestAway)
  2916. {
  2917. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundpd : Intrinsic.X86Roundps;
  2918. res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2919. }
  2920. else
  2921. {
  2922. res = EmitSse41RoundToNearestWithTiesToAwayOpF(context, n, scalar: false);
  2923. }
  2924. if (op.RegisterSize == RegisterSize.Simd64)
  2925. {
  2926. res = context.VectorZeroUpper64(res);
  2927. }
  2928. context.Copy(GetVec(op.Rd), res);
  2929. }
  2930. private static Operand EmitSse41Round32Exp8OpF(ArmEmitterContext context, Operand value, bool scalar)
  2931. {
  2932. Operand roundMask;
  2933. Operand truncMask;
  2934. Operand expMask;
  2935. if (scalar)
  2936. {
  2937. roundMask = X86GetScalar(context, 0x4000);
  2938. truncMask = X86GetScalar(context, unchecked((int)0xFFFF8000));
  2939. expMask = X86GetScalar(context, 0x7F800000);
  2940. }
  2941. else
  2942. {
  2943. roundMask = X86GetAllElements(context, 0x4000);
  2944. truncMask = X86GetAllElements(context, unchecked((int)0xFFFF8000));
  2945. expMask = X86GetAllElements(context, 0x7F800000);
  2946. }
  2947. Operand oValue = value;
  2948. Operand masked = context.AddIntrinsic(Intrinsic.X86Pand, value, expMask);
  2949. Operand isNaNInf = context.AddIntrinsic(Intrinsic.X86Pcmpeqd, masked, expMask);
  2950. value = context.AddIntrinsic(Intrinsic.X86Paddd, value, roundMask);
  2951. value = context.AddIntrinsic(Intrinsic.X86Pand, value, truncMask);
  2952. return context.AddIntrinsic(Intrinsic.X86Blendvps, value, oValue, isNaNInf);
  2953. }
  2954. private static Operand EmitSse41RecipStepSelectOpF(
  2955. ArmEmitterContext context,
  2956. Operand n,
  2957. Operand m,
  2958. Operand res,
  2959. Operand mask,
  2960. bool scalar,
  2961. int sizeF)
  2962. {
  2963. Intrinsic cmpOp;
  2964. Intrinsic shlOp;
  2965. Intrinsic blendOp;
  2966. Operand zero = context.VectorZero();
  2967. Operand expMask;
  2968. if (sizeF == 0)
  2969. {
  2970. cmpOp = Intrinsic.X86Pcmpeqd;
  2971. shlOp = Intrinsic.X86Pslld;
  2972. blendOp = Intrinsic.X86Blendvps;
  2973. expMask = scalar ? X86GetScalar(context, 0x7F800000 << 1) : X86GetAllElements(context, 0x7F800000 << 1);
  2974. }
  2975. else /* if (sizeF == 1) */
  2976. {
  2977. cmpOp = Intrinsic.X86Pcmpeqq;
  2978. shlOp = Intrinsic.X86Psllq;
  2979. blendOp = Intrinsic.X86Blendvpd;
  2980. expMask = scalar ? X86GetScalar(context, 0x7FF0000000000000L << 1) : X86GetAllElements(context, 0x7FF0000000000000L << 1);
  2981. }
  2982. n = context.AddIntrinsic(shlOp, n, Const(1));
  2983. m = context.AddIntrinsic(shlOp, m, Const(1));
  2984. Operand nZero = context.AddIntrinsic(cmpOp, n, zero);
  2985. Operand mZero = context.AddIntrinsic(cmpOp, m, zero);
  2986. Operand nInf = context.AddIntrinsic(cmpOp, n, expMask);
  2987. Operand mInf = context.AddIntrinsic(cmpOp, m, expMask);
  2988. Operand nmZero = context.AddIntrinsic(Intrinsic.X86Por, nZero, mZero);
  2989. Operand nmInf = context.AddIntrinsic(Intrinsic.X86Por, nInf, mInf);
  2990. Operand nmZeroInf = context.AddIntrinsic(Intrinsic.X86Pand, nmZero, nmInf);
  2991. return context.AddIntrinsic(blendOp, res, mask, nmZeroInf);
  2992. }
  2993. public static void EmitSse2VectorIsNaNOpF(
  2994. ArmEmitterContext context,
  2995. Operand opF,
  2996. out Operand qNaNMask,
  2997. out Operand sNaNMask,
  2998. bool? isQNaN = null)
  2999. {
  3000. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  3001. if ((op.Size & 1) == 0)
  3002. {
  3003. const int QBit = 22;
  3004. Operand qMask = X86GetAllElements(context, 1 << QBit);
  3005. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmpps, opF, opF, Const((int)CmpCondition.UnorderedQ));
  3006. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  3007. mask2 = context.AddIntrinsic(Intrinsic.X86Cmpps, mask2, qMask, Const((int)CmpCondition.Equal));
  3008. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andps, mask2, mask1) : default;
  3009. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnps, mask2, mask1) : default;
  3010. }
  3011. else /* if ((op.Size & 1) == 1) */
  3012. {
  3013. const int QBit = 51;
  3014. Operand qMask = X86GetAllElements(context, 1L << QBit);
  3015. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmppd, opF, opF, Const((int)CmpCondition.UnorderedQ));
  3016. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  3017. mask2 = context.AddIntrinsic(Intrinsic.X86Cmppd, mask2, qMask, Const((int)CmpCondition.Equal));
  3018. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andpd, mask2, mask1) : default;
  3019. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnpd, mask2, mask1) : default;
  3020. }
  3021. }
  3022. public static Operand EmitSse41ProcessNaNsOpF(
  3023. ArmEmitterContext context,
  3024. Func2I emit,
  3025. bool scalar,
  3026. Operand n = default,
  3027. Operand m = default)
  3028. {
  3029. Operand nCopy = n == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn)) : n;
  3030. Operand mCopy = m == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm)) : m;
  3031. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out Operand nSNaNMask);
  3032. EmitSse2VectorIsNaNOpF(context, mCopy, out _, out Operand mSNaNMask, isQNaN: false);
  3033. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  3034. if (sizeF == 0)
  3035. {
  3036. const int QBit = 22;
  3037. Operand qMask = scalar ? X86GetScalar(context, 1 << QBit) : X86GetAllElements(context, 1 << QBit);
  3038. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  3039. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  3040. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, nCopy, resNaNMask);
  3041. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  3042. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmpps, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  3043. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvps, resNaN, emit(nCopy, mCopy), resMask);
  3044. if (n != default || m != default)
  3045. {
  3046. return res;
  3047. }
  3048. if (scalar)
  3049. {
  3050. res = context.VectorZeroUpper96(res);
  3051. }
  3052. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  3053. {
  3054. res = context.VectorZeroUpper64(res);
  3055. }
  3056. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  3057. return default;
  3058. }
  3059. else /* if (sizeF == 1) */
  3060. {
  3061. const int QBit = 51;
  3062. Operand qMask = scalar ? X86GetScalar(context, 1L << QBit) : X86GetAllElements(context, 1L << QBit);
  3063. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  3064. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  3065. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, nCopy, resNaNMask);
  3066. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  3067. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmppd, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  3068. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvpd, resNaN, emit(nCopy, mCopy), resMask);
  3069. if (n != default || m != default)
  3070. {
  3071. return res;
  3072. }
  3073. if (scalar)
  3074. {
  3075. res = context.VectorZeroUpper64(res);
  3076. }
  3077. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  3078. return default;
  3079. }
  3080. }
  3081. public static Operand EmitSseOrAvxHandleFzModeOpF(
  3082. ArmEmitterContext context,
  3083. Func2I emit,
  3084. bool scalar,
  3085. Operand n = default,
  3086. Operand m = default)
  3087. {
  3088. Operand nCopy = n == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn)) : n;
  3089. Operand mCopy = m == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm)) : m;
  3090. EmitSseOrAvxEnterFtzAndDazModesOpF(context, out Operand isTrue);
  3091. Operand res = emit(nCopy, mCopy);
  3092. EmitSseOrAvxExitFtzAndDazModesOpF(context, isTrue);
  3093. if (n != default || m != default)
  3094. {
  3095. return res;
  3096. }
  3097. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  3098. if (sizeF == 0)
  3099. {
  3100. if (scalar)
  3101. {
  3102. res = context.VectorZeroUpper96(res);
  3103. }
  3104. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  3105. {
  3106. res = context.VectorZeroUpper64(res);
  3107. }
  3108. }
  3109. else /* if (sizeF == 1) */
  3110. {
  3111. if (scalar)
  3112. {
  3113. res = context.VectorZeroUpper64(res);
  3114. }
  3115. }
  3116. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  3117. return default;
  3118. }
  3119. private static Operand EmitSse2VectorMaxMinOpF(ArmEmitterContext context, Operand n, Operand m, bool isMax)
  3120. {
  3121. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  3122. if ((op.Size & 1) == 0)
  3123. {
  3124. Operand mask = X86GetAllElements(context, -0f);
  3125. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxps : Intrinsic.X86Minps, n, m);
  3126. res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, res);
  3127. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  3128. resSign = context.AddIntrinsic(Intrinsic.X86Andps, mask, resSign);
  3129. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  3130. }
  3131. else /* if ((op.Size & 1) == 1) */
  3132. {
  3133. Operand mask = X86GetAllElements(context, -0d);
  3134. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxpd : Intrinsic.X86Minpd, n, m);
  3135. res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, res);
  3136. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  3137. resSign = context.AddIntrinsic(Intrinsic.X86Andpd, mask, resSign);
  3138. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  3139. }
  3140. }
  3141. private static Operand EmitSse41MaxMinNumOpF(
  3142. ArmEmitterContext context,
  3143. bool isMaxNum,
  3144. bool scalar,
  3145. Operand n = default,
  3146. Operand m = default)
  3147. {
  3148. Operand nCopy = n == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn)) : n;
  3149. Operand mCopy = m == default ? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm)) : m;
  3150. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out _, isQNaN: true);
  3151. EmitSse2VectorIsNaNOpF(context, mCopy, out Operand mQNaNMask, out _, isQNaN: true);
  3152. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  3153. if (sizeF == 0)
  3154. {
  3155. Operand negInfMask = scalar
  3156. ? X86GetScalar (context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity)
  3157. : X86GetAllElements(context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity);
  3158. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnps, mQNaNMask, nQNaNMask);
  3159. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnps, nQNaNMask, mQNaNMask);
  3160. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, nCopy, negInfMask, nMask);
  3161. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, negInfMask, mMask);
  3162. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  3163. {
  3164. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  3165. {
  3166. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  3167. }, scalar: scalar, op1, op2);
  3168. }, scalar: scalar, nCopy, mCopy);
  3169. if (n != default || m != default)
  3170. {
  3171. return res;
  3172. }
  3173. if (scalar)
  3174. {
  3175. res = context.VectorZeroUpper96(res);
  3176. }
  3177. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  3178. {
  3179. res = context.VectorZeroUpper64(res);
  3180. }
  3181. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  3182. return default;
  3183. }
  3184. else /* if (sizeF == 1) */
  3185. {
  3186. Operand negInfMask = scalar
  3187. ? X86GetScalar (context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity)
  3188. : X86GetAllElements(context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity);
  3189. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnpd, mQNaNMask, nQNaNMask);
  3190. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnpd, nQNaNMask, mQNaNMask);
  3191. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, nCopy, negInfMask, nMask);
  3192. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, negInfMask, mMask);
  3193. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  3194. {
  3195. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  3196. {
  3197. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  3198. }, scalar: scalar, op1, op2);
  3199. }, scalar: scalar, nCopy, mCopy);
  3200. if (n != default || m != default)
  3201. {
  3202. return res;
  3203. }
  3204. if (scalar)
  3205. {
  3206. res = context.VectorZeroUpper64(res);
  3207. }
  3208. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  3209. return default;
  3210. }
  3211. }
  3212. private enum AddSub
  3213. {
  3214. None,
  3215. Add,
  3216. Subtract
  3217. }
  3218. private static void EmitSse41VectorMul_AddSub(ArmEmitterContext context, AddSub addSub)
  3219. {
  3220. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3221. Operand n = GetVec(op.Rn);
  3222. Operand m = GetVec(op.Rm);
  3223. Operand res;
  3224. if (op.Size == 0)
  3225. {
  3226. Operand ns8 = context.AddIntrinsic(Intrinsic.X86Psrlw, n, Const(8));
  3227. Operand ms8 = context.AddIntrinsic(Intrinsic.X86Psrlw, m, Const(8));
  3228. res = context.AddIntrinsic(Intrinsic.X86Pmullw, ns8, ms8);
  3229. res = context.AddIntrinsic(Intrinsic.X86Psllw, res, Const(8));
  3230. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  3231. Operand mask = X86GetAllElements(context, 0x00FF00FF);
  3232. res = context.AddIntrinsic(Intrinsic.X86Pblendvb, res, res2, mask);
  3233. }
  3234. else if (op.Size == 1)
  3235. {
  3236. res = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  3237. }
  3238. else
  3239. {
  3240. res = context.AddIntrinsic(Intrinsic.X86Pmulld, n, m);
  3241. }
  3242. Operand d = GetVec(op.Rd);
  3243. if (addSub == AddSub.Add)
  3244. {
  3245. Intrinsic addInst = X86PaddInstruction[op.Size];
  3246. res = context.AddIntrinsic(addInst, d, res);
  3247. }
  3248. else if (addSub == AddSub.Subtract)
  3249. {
  3250. Intrinsic subInst = X86PsubInstruction[op.Size];
  3251. res = context.AddIntrinsic(subInst, d, res);
  3252. }
  3253. if (op.RegisterSize == RegisterSize.Simd64)
  3254. {
  3255. res = context.VectorZeroUpper64(res);
  3256. }
  3257. context.Copy(d, res);
  3258. }
  3259. private static void EmitSse41VectorSabdOp(
  3260. ArmEmitterContext context,
  3261. OpCodeSimdReg op,
  3262. Operand n,
  3263. Operand m,
  3264. bool isLong)
  3265. {
  3266. int size = isLong ? op.Size + 1 : op.Size;
  3267. Intrinsic cmpgtInst = X86PcmpgtInstruction[size];
  3268. Operand cmpMask = context.AddIntrinsic(cmpgtInst, n, m);
  3269. Intrinsic subInst = X86PsubInstruction[size];
  3270. Operand res = context.AddIntrinsic(subInst, n, m);
  3271. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  3272. Operand res2 = context.AddIntrinsic(subInst, m, n);
  3273. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  3274. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  3275. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  3276. {
  3277. res = context.VectorZeroUpper64(res);
  3278. }
  3279. context.Copy(GetVec(op.Rd), res);
  3280. }
  3281. private static void EmitSse41VectorUabdOp(
  3282. ArmEmitterContext context,
  3283. OpCodeSimdReg op,
  3284. Operand n,
  3285. Operand m,
  3286. bool isLong)
  3287. {
  3288. int size = isLong ? op.Size + 1 : op.Size;
  3289. Intrinsic maxInst = X86PmaxuInstruction[size];
  3290. Operand max = context.AddIntrinsic(maxInst, m, n);
  3291. Intrinsic cmpeqInst = X86PcmpeqInstruction[size];
  3292. Operand cmpMask = context.AddIntrinsic(cmpeqInst, max, m);
  3293. Operand onesMask = X86GetAllElements(context, -1L);
  3294. cmpMask = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, onesMask);
  3295. Intrinsic subInst = X86PsubInstruction[size];
  3296. Operand res = context.AddIntrinsic(subInst, n, m);
  3297. Operand res2 = context.AddIntrinsic(subInst, m, n);
  3298. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  3299. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  3300. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  3301. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  3302. {
  3303. res = context.VectorZeroUpper64(res);
  3304. }
  3305. context.Copy(GetVec(op.Rd), res);
  3306. }
  3307. private static Operand EmitSse2Sll_128(ArmEmitterContext context, Operand op, int shift)
  3308. {
  3309. // The upper part of op is assumed to be zero.
  3310. Debug.Assert(shift >= 0 && shift < 64);
  3311. if (shift == 0)
  3312. {
  3313. return op;
  3314. }
  3315. Operand high = context.AddIntrinsic(Intrinsic.X86Pslldq, op, Const(8));
  3316. high = context.AddIntrinsic(Intrinsic.X86Psrlq, high, Const(64 - shift));
  3317. Operand low = context.AddIntrinsic(Intrinsic.X86Psllq, op, Const(shift));
  3318. return context.AddIntrinsic(Intrinsic.X86Por, high, low);
  3319. }
  3320. }
  3321. }