InstEmitSimdArithmetic.cs 129 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849
  1. // https://github.com/intel/ARM_NEON_2_x86_SSE/blob/master/NEON_2_SSE.h
  2. // https://www.agner.org/optimize/#vectorclass @ vectori128.h
  3. using ARMeilleure.Decoders;
  4. using ARMeilleure.IntermediateRepresentation;
  5. using ARMeilleure.State;
  6. using ARMeilleure.Translation;
  7. using System;
  8. using System.Diagnostics;
  9. using static ARMeilleure.Instructions.InstEmitHelper;
  10. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  11. using static ARMeilleure.Instructions.InstEmitSimdHelper32;
  12. using static ARMeilleure.IntermediateRepresentation.OperandHelper;
  13. namespace ARMeilleure.Instructions
  14. {
  15. using Func2I = Func<Operand, Operand, Operand>;
  16. static partial class InstEmit
  17. {
  18. public static void Abs_S(ArmEmitterContext context)
  19. {
  20. EmitScalarUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  21. }
  22. public static void Abs_V(ArmEmitterContext context)
  23. {
  24. EmitVectorUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  25. }
  26. public static void Add_S(ArmEmitterContext context)
  27. {
  28. EmitScalarBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  29. }
  30. public static void Add_V(ArmEmitterContext context)
  31. {
  32. if (Optimizations.UseSse2)
  33. {
  34. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  35. Operand n = GetVec(op.Rn);
  36. Operand m = GetVec(op.Rm);
  37. Intrinsic addInst = X86PaddInstruction[op.Size];
  38. Operand res = context.AddIntrinsic(addInst, n, m);
  39. if (op.RegisterSize == RegisterSize.Simd64)
  40. {
  41. res = context.VectorZeroUpper64(res);
  42. }
  43. context.Copy(GetVec(op.Rd), res);
  44. }
  45. else
  46. {
  47. EmitVectorBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  48. }
  49. }
  50. public static void Addhn_V(ArmEmitterContext context)
  51. {
  52. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: false);
  53. }
  54. public static void Addp_S(ArmEmitterContext context)
  55. {
  56. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  57. Operand ne0 = EmitVectorExtractZx(context, op.Rn, 0, op.Size);
  58. Operand ne1 = EmitVectorExtractZx(context, op.Rn, 1, op.Size);
  59. Operand res = context.Add(ne0, ne1);
  60. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), res, 0, op.Size));
  61. }
  62. public static void Addp_V(ArmEmitterContext context)
  63. {
  64. if (Optimizations.UseSsse3)
  65. {
  66. EmitSsse3VectorPairwiseOp(context, X86PaddInstruction);
  67. }
  68. else
  69. {
  70. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Add(op1, op2));
  71. }
  72. }
  73. public static void Addv_V(ArmEmitterContext context)
  74. {
  75. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  76. }
  77. public static void Cls_V(ArmEmitterContext context)
  78. {
  79. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  80. Operand res = context.VectorZero();
  81. int elems = op.GetBytesCount() >> op.Size;
  82. int eSize = 8 << op.Size;
  83. for (int index = 0; index < elems; index++)
  84. {
  85. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  86. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingSigns)), ne, Const(eSize));
  87. res = EmitVectorInsert(context, res, de, index, op.Size);
  88. }
  89. context.Copy(GetVec(op.Rd), res);
  90. }
  91. public static void Clz_V(ArmEmitterContext context)
  92. {
  93. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  94. Operand res = context.VectorZero();
  95. int elems = op.GetBytesCount() >> op.Size;
  96. int eSize = 8 << op.Size;
  97. for (int index = 0; index < elems; index++)
  98. {
  99. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  100. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingZeros)), ne, Const(eSize));
  101. res = EmitVectorInsert(context, res, de, index, op.Size);
  102. }
  103. context.Copy(GetVec(op.Rd), res);
  104. }
  105. public static void Cnt_V(ArmEmitterContext context)
  106. {
  107. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  108. Operand res = context.VectorZero();
  109. int elems = op.RegisterSize == RegisterSize.Simd128 ? 16 : 8;
  110. for (int index = 0; index < elems; index++)
  111. {
  112. Operand ne = EmitVectorExtractZx(context, op.Rn, index, 0);
  113. Operand de;
  114. if (Optimizations.UsePopCnt)
  115. {
  116. de = context.AddIntrinsicLong(Intrinsic.X86Popcnt, ne);
  117. }
  118. else
  119. {
  120. de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountSetBits8)), ne);
  121. }
  122. res = EmitVectorInsert(context, res, de, index, 0);
  123. }
  124. context.Copy(GetVec(op.Rd), res);
  125. }
  126. public static void Fabd_S(ArmEmitterContext context)
  127. {
  128. if (Optimizations.FastFP && Optimizations.UseSse2)
  129. {
  130. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  131. int sizeF = op.Size & 1;
  132. if (sizeF == 0)
  133. {
  134. Operand res = context.AddIntrinsic(Intrinsic.X86Subss, GetVec(op.Rn), GetVec(op.Rm));
  135. res = EmitFloatAbs(context, res, true, false);
  136. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  137. }
  138. else /* if (sizeF == 1) */
  139. {
  140. Operand res = context.AddIntrinsic(Intrinsic.X86Subsd, GetVec(op.Rn), GetVec(op.Rm));
  141. res = EmitFloatAbs(context, res, false, false);
  142. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  143. }
  144. }
  145. else
  146. {
  147. EmitScalarBinaryOpF(context, (op1, op2) =>
  148. {
  149. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  150. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  151. });
  152. }
  153. }
  154. public static void Fabd_V(ArmEmitterContext context)
  155. {
  156. if (Optimizations.FastFP && Optimizations.UseSse2)
  157. {
  158. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  159. int sizeF = op.Size & 1;
  160. if (sizeF == 0)
  161. {
  162. Operand res = context.AddIntrinsic(Intrinsic.X86Subps, GetVec(op.Rn), GetVec(op.Rm));
  163. res = EmitFloatAbs(context, res, true, true);
  164. if (op.RegisterSize == RegisterSize.Simd64)
  165. {
  166. res = context.VectorZeroUpper64(res);
  167. }
  168. context.Copy(GetVec(op.Rd), res);
  169. }
  170. else /* if (sizeF == 1) */
  171. {
  172. Operand res = context.AddIntrinsic(Intrinsic.X86Subpd, GetVec(op.Rn), GetVec(op.Rm));
  173. res = EmitFloatAbs(context, res, false, true);
  174. context.Copy(GetVec(op.Rd), res);
  175. }
  176. }
  177. else
  178. {
  179. EmitVectorBinaryOpF(context, (op1, op2) =>
  180. {
  181. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  182. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  183. });
  184. }
  185. }
  186. public static void Fabs_S(ArmEmitterContext context)
  187. {
  188. if (Optimizations.UseSse2)
  189. {
  190. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  191. if (op.Size == 0)
  192. {
  193. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, false);
  194. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  195. }
  196. else /* if (op.Size == 1) */
  197. {
  198. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, false);
  199. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  200. }
  201. }
  202. else
  203. {
  204. EmitScalarUnaryOpF(context, (op1) =>
  205. {
  206. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  207. });
  208. }
  209. }
  210. public static void Fabs_V(ArmEmitterContext context)
  211. {
  212. if (Optimizations.UseSse2)
  213. {
  214. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  215. int sizeF = op.Size & 1;
  216. if (sizeF == 0)
  217. {
  218. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, true);
  219. if (op.RegisterSize == RegisterSize.Simd64)
  220. {
  221. res = context.VectorZeroUpper64(res);
  222. }
  223. context.Copy(GetVec(op.Rd), res);
  224. }
  225. else /* if (sizeF == 1) */
  226. {
  227. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, true);
  228. context.Copy(GetVec(op.Rd), res);
  229. }
  230. }
  231. else
  232. {
  233. EmitVectorUnaryOpF(context, (op1) =>
  234. {
  235. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  236. });
  237. }
  238. }
  239. public static void Fadd_S(ArmEmitterContext context)
  240. {
  241. if (Optimizations.FastFP && Optimizations.UseSse2)
  242. {
  243. EmitScalarBinaryOpF(context, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  244. }
  245. else if (Optimizations.FastFP)
  246. {
  247. EmitScalarBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  248. }
  249. else
  250. {
  251. EmitScalarBinaryOpF(context, (op1, op2) =>
  252. {
  253. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  254. });
  255. }
  256. }
  257. public static void Fadd_V(ArmEmitterContext context)
  258. {
  259. if (Optimizations.FastFP && Optimizations.UseSse2)
  260. {
  261. EmitVectorBinaryOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  262. }
  263. else if (Optimizations.FastFP)
  264. {
  265. EmitVectorBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  266. }
  267. else
  268. {
  269. EmitVectorBinaryOpF(context, (op1, op2) =>
  270. {
  271. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  272. });
  273. }
  274. }
  275. public static void Faddp_S(ArmEmitterContext context)
  276. {
  277. if (Optimizations.FastFP && Optimizations.UseSse3)
  278. {
  279. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  280. if ((op.Size & 1) == 0)
  281. {
  282. Operand res = context.AddIntrinsic(Intrinsic.X86Haddps, GetVec(op.Rn), GetVec(op.Rn));
  283. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  284. }
  285. else /* if ((op.Size & 1) == 1) */
  286. {
  287. Operand res = context.AddIntrinsic(Intrinsic.X86Haddpd, GetVec(op.Rn), GetVec(op.Rn));
  288. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  289. }
  290. }
  291. else
  292. {
  293. EmitScalarPairwiseOpF(context, (op1, op2) =>
  294. {
  295. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  296. });
  297. }
  298. }
  299. public static void Faddp_V(ArmEmitterContext context)
  300. {
  301. if (Optimizations.FastFP && Optimizations.UseSse41)
  302. {
  303. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  304. {
  305. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  306. {
  307. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  308. {
  309. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  310. Intrinsic addInst = (op.Size & 1) == 0 ? Intrinsic.X86Addps : Intrinsic.X86Addpd;
  311. return context.AddIntrinsic(addInst, op1, op2);
  312. }, scalar: false, op1, op2);
  313. }, scalar: false, op1, op2);
  314. });
  315. }
  316. else
  317. {
  318. EmitVectorPairwiseOpF(context, (op1, op2) =>
  319. {
  320. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  321. });
  322. }
  323. }
  324. public static void Fdiv_S(ArmEmitterContext context)
  325. {
  326. if (Optimizations.FastFP && Optimizations.UseSse2)
  327. {
  328. EmitScalarBinaryOpF(context, Intrinsic.X86Divss, Intrinsic.X86Divsd);
  329. }
  330. else if (Optimizations.FastFP)
  331. {
  332. EmitScalarBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  333. }
  334. else
  335. {
  336. EmitScalarBinaryOpF(context, (op1, op2) =>
  337. {
  338. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  339. });
  340. }
  341. }
  342. public static void Fdiv_V(ArmEmitterContext context)
  343. {
  344. if (Optimizations.FastFP && Optimizations.UseSse2)
  345. {
  346. EmitVectorBinaryOpF(context, Intrinsic.X86Divps, Intrinsic.X86Divpd);
  347. }
  348. else if (Optimizations.FastFP)
  349. {
  350. EmitVectorBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  351. }
  352. else
  353. {
  354. EmitVectorBinaryOpF(context, (op1, op2) =>
  355. {
  356. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  357. });
  358. }
  359. }
  360. public static void Fmadd_S(ArmEmitterContext context) // Fused.
  361. {
  362. if (Optimizations.FastFP && Optimizations.UseSse2)
  363. {
  364. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  365. Operand d = GetVec(op.Rd);
  366. Operand a = GetVec(op.Ra);
  367. Operand n = GetVec(op.Rn);
  368. Operand m = GetVec(op.Rm);
  369. if (op.Size == 0)
  370. {
  371. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  372. res = context.AddIntrinsic(Intrinsic.X86Addss, a, res);
  373. context.Copy(d, context.VectorZeroUpper96(res));
  374. }
  375. else /* if (op.Size == 1) */
  376. {
  377. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  378. res = context.AddIntrinsic(Intrinsic.X86Addsd, a, res);
  379. context.Copy(d, context.VectorZeroUpper64(res));
  380. }
  381. }
  382. else
  383. {
  384. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  385. {
  386. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  387. });
  388. }
  389. }
  390. public static void Fmax_S(ArmEmitterContext context)
  391. {
  392. if (Optimizations.FastFP && Optimizations.UseSse41)
  393. {
  394. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  395. {
  396. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  397. {
  398. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  399. }, scalar: true, op1, op2);
  400. }, scalar: true);
  401. }
  402. else
  403. {
  404. EmitScalarBinaryOpF(context, (op1, op2) =>
  405. {
  406. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  407. });
  408. }
  409. }
  410. public static void Fmax_V(ArmEmitterContext context)
  411. {
  412. if (Optimizations.FastFP && Optimizations.UseSse41)
  413. {
  414. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  415. {
  416. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  417. {
  418. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  419. }, scalar: false, op1, op2);
  420. }, scalar: false);
  421. }
  422. else
  423. {
  424. EmitVectorBinaryOpF(context, (op1, op2) =>
  425. {
  426. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  427. });
  428. }
  429. }
  430. public static void Fmaxnm_S(ArmEmitterContext context)
  431. {
  432. if (Optimizations.FastFP && Optimizations.UseSse41)
  433. {
  434. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true);
  435. }
  436. else
  437. {
  438. EmitScalarBinaryOpF(context, (op1, op2) =>
  439. {
  440. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  441. });
  442. }
  443. }
  444. public static void Fmaxnm_V(ArmEmitterContext context)
  445. {
  446. if (Optimizations.FastFP && Optimizations.UseSse41)
  447. {
  448. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false);
  449. }
  450. else
  451. {
  452. EmitVectorBinaryOpF(context, (op1, op2) =>
  453. {
  454. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  455. });
  456. }
  457. }
  458. public static void Fmaxnmp_S(ArmEmitterContext context)
  459. {
  460. if (Optimizations.FastFP && Optimizations.UseSse41)
  461. {
  462. EmitSse2ScalarPairwiseOpF(context, (op1, op2) =>
  463. {
  464. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true, op1, op2);
  465. });
  466. }
  467. else
  468. {
  469. EmitScalarPairwiseOpF(context, (op1, op2) =>
  470. {
  471. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  472. });
  473. }
  474. }
  475. public static void Fmaxnmp_V(ArmEmitterContext context)
  476. {
  477. if (Optimizations.FastFP && Optimizations.UseSse41)
  478. {
  479. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  480. {
  481. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  482. });
  483. }
  484. else
  485. {
  486. EmitVectorPairwiseOpF(context, (op1, op2) =>
  487. {
  488. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  489. });
  490. }
  491. }
  492. public static void Fmaxnmv_V(ArmEmitterContext context)
  493. {
  494. if (Optimizations.FastFP && Optimizations.UseSse41)
  495. {
  496. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  497. {
  498. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  499. });
  500. }
  501. else
  502. {
  503. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  504. {
  505. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMaxNum)), op1, op2);
  506. });
  507. }
  508. }
  509. public static void Fmaxp_V(ArmEmitterContext context)
  510. {
  511. if (Optimizations.FastFP && Optimizations.UseSse41)
  512. {
  513. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  514. {
  515. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  516. {
  517. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  518. {
  519. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  520. }, scalar: false, op1, op2);
  521. }, scalar: false, op1, op2);
  522. });
  523. }
  524. else
  525. {
  526. EmitVectorPairwiseOpF(context, (op1, op2) =>
  527. {
  528. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  529. });
  530. }
  531. }
  532. public static void Fmaxv_V(ArmEmitterContext context)
  533. {
  534. if (Optimizations.FastFP && Optimizations.UseSse41)
  535. {
  536. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  537. {
  538. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  539. {
  540. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  541. {
  542. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  543. }, scalar: false, op1, op2);
  544. }, scalar: false, op1, op2);
  545. });
  546. }
  547. else
  548. {
  549. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  550. {
  551. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMax)), op1, op2);
  552. });
  553. }
  554. }
  555. public static void Fmin_S(ArmEmitterContext context)
  556. {
  557. if (Optimizations.FastFP && Optimizations.UseSse41)
  558. {
  559. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  560. {
  561. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  562. {
  563. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  564. }, scalar: true, op1, op2);
  565. }, scalar: true);
  566. }
  567. else
  568. {
  569. EmitScalarBinaryOpF(context, (op1, op2) =>
  570. {
  571. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  572. });
  573. }
  574. }
  575. public static void Fmin_V(ArmEmitterContext context)
  576. {
  577. if (Optimizations.FastFP && Optimizations.UseSse41)
  578. {
  579. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  580. {
  581. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  582. {
  583. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  584. }, scalar: false, op1, op2);
  585. }, scalar: false);
  586. }
  587. else
  588. {
  589. EmitVectorBinaryOpF(context, (op1, op2) =>
  590. {
  591. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  592. });
  593. }
  594. }
  595. public static void Fminnm_S(ArmEmitterContext context)
  596. {
  597. if (Optimizations.FastFP && Optimizations.UseSse41)
  598. {
  599. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true);
  600. }
  601. else
  602. {
  603. EmitScalarBinaryOpF(context, (op1, op2) =>
  604. {
  605. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  606. });
  607. }
  608. }
  609. public static void Fminnm_V(ArmEmitterContext context)
  610. {
  611. if (Optimizations.FastFP && Optimizations.UseSse41)
  612. {
  613. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false);
  614. }
  615. else
  616. {
  617. EmitVectorBinaryOpF(context, (op1, op2) =>
  618. {
  619. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  620. });
  621. }
  622. }
  623. public static void Fminnmp_S(ArmEmitterContext context)
  624. {
  625. if (Optimizations.FastFP && Optimizations.UseSse41)
  626. {
  627. EmitSse2ScalarPairwiseOpF(context, (op1, op2) =>
  628. {
  629. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true, op1, op2);
  630. });
  631. }
  632. else
  633. {
  634. EmitScalarPairwiseOpF(context, (op1, op2) =>
  635. {
  636. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  637. });
  638. }
  639. }
  640. public static void Fminnmp_V(ArmEmitterContext context)
  641. {
  642. if (Optimizations.FastFP && Optimizations.UseSse41)
  643. {
  644. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  645. {
  646. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  647. });
  648. }
  649. else
  650. {
  651. EmitVectorPairwiseOpF(context, (op1, op2) =>
  652. {
  653. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  654. });
  655. }
  656. }
  657. public static void Fminnmv_V(ArmEmitterContext context)
  658. {
  659. if (Optimizations.FastFP && Optimizations.UseSse41)
  660. {
  661. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  662. {
  663. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  664. });
  665. }
  666. else
  667. {
  668. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  669. {
  670. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMinNum)), op1, op2);
  671. });
  672. }
  673. }
  674. public static void Fminp_V(ArmEmitterContext context)
  675. {
  676. if (Optimizations.FastFP && Optimizations.UseSse41)
  677. {
  678. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  679. {
  680. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  681. {
  682. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  683. {
  684. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  685. }, scalar: false, op1, op2);
  686. }, scalar: false, op1, op2);
  687. });
  688. }
  689. else
  690. {
  691. EmitVectorPairwiseOpF(context, (op1, op2) =>
  692. {
  693. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  694. });
  695. }
  696. }
  697. public static void Fminv_V(ArmEmitterContext context)
  698. {
  699. if (Optimizations.FastFP && Optimizations.UseSse41)
  700. {
  701. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  702. {
  703. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  704. {
  705. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  706. {
  707. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  708. }, scalar: false, op1, op2);
  709. }, scalar: false, op1, op2);
  710. });
  711. }
  712. else
  713. {
  714. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  715. {
  716. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMin)), op1, op2);
  717. });
  718. }
  719. }
  720. public static void Fmla_Se(ArmEmitterContext context) // Fused.
  721. {
  722. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  723. {
  724. return context.Add(op1, context.Multiply(op2, op3));
  725. });
  726. }
  727. public static void Fmla_V(ArmEmitterContext context) // Fused.
  728. {
  729. if (Optimizations.FastFP && Optimizations.UseSse2)
  730. {
  731. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  732. Operand d = GetVec(op.Rd);
  733. Operand n = GetVec(op.Rn);
  734. Operand m = GetVec(op.Rm);
  735. int sizeF = op.Size & 1;
  736. if (sizeF == 0)
  737. {
  738. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  739. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  740. if (op.RegisterSize == RegisterSize.Simd64)
  741. {
  742. res = context.VectorZeroUpper64(res);
  743. }
  744. context.Copy(d, res);
  745. }
  746. else /* if (sizeF == 1) */
  747. {
  748. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  749. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  750. context.Copy(d, res);
  751. }
  752. }
  753. else
  754. {
  755. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  756. {
  757. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  758. });
  759. }
  760. }
  761. public static void Fmla_Ve(ArmEmitterContext context) // Fused.
  762. {
  763. if (Optimizations.FastFP && Optimizations.UseSse2)
  764. {
  765. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  766. Operand d = GetVec(op.Rd);
  767. Operand n = GetVec(op.Rn);
  768. Operand m = GetVec(op.Rm);
  769. int sizeF = op.Size & 1;
  770. if (sizeF == 0)
  771. {
  772. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  773. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  774. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  775. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  776. if (op.RegisterSize == RegisterSize.Simd64)
  777. {
  778. res = context.VectorZeroUpper64(res);
  779. }
  780. context.Copy(d, res);
  781. }
  782. else /* if (sizeF == 1) */
  783. {
  784. int shuffleMask = op.Index | op.Index << 1;
  785. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  786. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  787. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  788. context.Copy(d, res);
  789. }
  790. }
  791. else
  792. {
  793. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  794. {
  795. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  796. });
  797. }
  798. }
  799. public static void Fmls_Se(ArmEmitterContext context) // Fused.
  800. {
  801. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  802. {
  803. return context.Subtract(op1, context.Multiply(op2, op3));
  804. });
  805. }
  806. public static void Fmls_V(ArmEmitterContext context) // Fused.
  807. {
  808. if (Optimizations.FastFP && Optimizations.UseSse2)
  809. {
  810. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  811. Operand d = GetVec(op.Rd);
  812. Operand n = GetVec(op.Rn);
  813. Operand m = GetVec(op.Rm);
  814. int sizeF = op.Size & 1;
  815. if (sizeF == 0)
  816. {
  817. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  818. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  819. if (op.RegisterSize == RegisterSize.Simd64)
  820. {
  821. res = context.VectorZeroUpper64(res);
  822. }
  823. context.Copy(d, res);
  824. }
  825. else /* if (sizeF == 1) */
  826. {
  827. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  828. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  829. context.Copy(d, res);
  830. }
  831. }
  832. else
  833. {
  834. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  835. {
  836. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  837. });
  838. }
  839. }
  840. public static void Fmls_Ve(ArmEmitterContext context) // Fused.
  841. {
  842. if (Optimizations.FastFP && Optimizations.UseSse2)
  843. {
  844. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  845. Operand d = GetVec(op.Rd);
  846. Operand n = GetVec(op.Rn);
  847. Operand m = GetVec(op.Rm);
  848. int sizeF = op.Size & 1;
  849. if (sizeF == 0)
  850. {
  851. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  852. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  853. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  854. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  855. if (op.RegisterSize == RegisterSize.Simd64)
  856. {
  857. res = context.VectorZeroUpper64(res);
  858. }
  859. context.Copy(d, res);
  860. }
  861. else /* if (sizeF == 1) */
  862. {
  863. int shuffleMask = op.Index | op.Index << 1;
  864. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  865. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  866. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  867. context.Copy(d, res);
  868. }
  869. }
  870. else
  871. {
  872. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  873. {
  874. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  875. });
  876. }
  877. }
  878. public static void Fmsub_S(ArmEmitterContext context) // Fused.
  879. {
  880. if (Optimizations.FastFP && Optimizations.UseSse2)
  881. {
  882. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  883. Operand d = GetVec(op.Rd);
  884. Operand a = GetVec(op.Ra);
  885. Operand n = GetVec(op.Rn);
  886. Operand m = GetVec(op.Rm);
  887. if (op.Size == 0)
  888. {
  889. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  890. res = context.AddIntrinsic(Intrinsic.X86Subss, a, res);
  891. context.Copy(d, context.VectorZeroUpper96(res));
  892. }
  893. else /* if (op.Size == 1) */
  894. {
  895. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  896. res = context.AddIntrinsic(Intrinsic.X86Subsd, a, res);
  897. context.Copy(d, context.VectorZeroUpper64(res));
  898. }
  899. }
  900. else
  901. {
  902. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  903. {
  904. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  905. });
  906. }
  907. }
  908. public static void Fmul_S(ArmEmitterContext context)
  909. {
  910. if (Optimizations.FastFP && Optimizations.UseSse2)
  911. {
  912. EmitScalarBinaryOpF(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd);
  913. }
  914. else if (Optimizations.FastFP)
  915. {
  916. EmitScalarBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  917. }
  918. else
  919. {
  920. EmitScalarBinaryOpF(context, (op1, op2) =>
  921. {
  922. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  923. });
  924. }
  925. }
  926. public static void Fmul_Se(ArmEmitterContext context)
  927. {
  928. EmitScalarBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  929. }
  930. public static void Fmul_V(ArmEmitterContext context)
  931. {
  932. if (Optimizations.FastFP && Optimizations.UseSse2)
  933. {
  934. EmitVectorBinaryOpF(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd);
  935. }
  936. else if (Optimizations.FastFP)
  937. {
  938. EmitVectorBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  939. }
  940. else
  941. {
  942. EmitVectorBinaryOpF(context, (op1, op2) =>
  943. {
  944. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  945. });
  946. }
  947. }
  948. public static void Fmul_Ve(ArmEmitterContext context)
  949. {
  950. if (Optimizations.FastFP && Optimizations.UseSse2)
  951. {
  952. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  953. Operand n = GetVec(op.Rn);
  954. Operand m = GetVec(op.Rm);
  955. int sizeF = op.Size & 1;
  956. if (sizeF == 0)
  957. {
  958. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  959. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  960. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  961. if (op.RegisterSize == RegisterSize.Simd64)
  962. {
  963. res = context.VectorZeroUpper64(res);
  964. }
  965. context.Copy(GetVec(op.Rd), res);
  966. }
  967. else /* if (sizeF == 1) */
  968. {
  969. int shuffleMask = op.Index | op.Index << 1;
  970. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  971. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  972. context.Copy(GetVec(op.Rd), res);
  973. }
  974. }
  975. else if (Optimizations.FastFP)
  976. {
  977. EmitVectorBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  978. }
  979. else
  980. {
  981. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  982. {
  983. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  984. });
  985. }
  986. }
  987. public static void Fmulx_S(ArmEmitterContext context)
  988. {
  989. EmitScalarBinaryOpF(context, (op1, op2) =>
  990. {
  991. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  992. });
  993. }
  994. public static void Fmulx_Se(ArmEmitterContext context)
  995. {
  996. EmitScalarBinaryOpByElemF(context, (op1, op2) =>
  997. {
  998. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  999. });
  1000. }
  1001. public static void Fmulx_V(ArmEmitterContext context)
  1002. {
  1003. EmitVectorBinaryOpF(context, (op1, op2) =>
  1004. {
  1005. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1006. });
  1007. }
  1008. public static void Fmulx_Ve(ArmEmitterContext context)
  1009. {
  1010. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  1011. {
  1012. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  1013. });
  1014. }
  1015. public static void Fneg_S(ArmEmitterContext context)
  1016. {
  1017. if (Optimizations.UseSse2)
  1018. {
  1019. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1020. if (op.Size == 0)
  1021. {
  1022. Operand mask = X86GetScalar(context, -0f);
  1023. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  1024. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1025. }
  1026. else /* if (op.Size == 1) */
  1027. {
  1028. Operand mask = X86GetScalar(context, -0d);
  1029. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  1030. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1031. }
  1032. }
  1033. else
  1034. {
  1035. EmitScalarUnaryOpF(context, (op1) => context.Negate(op1));
  1036. }
  1037. }
  1038. public static void Fneg_V(ArmEmitterContext context)
  1039. {
  1040. if (Optimizations.UseSse2)
  1041. {
  1042. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1043. int sizeF = op.Size & 1;
  1044. if (sizeF == 0)
  1045. {
  1046. Operand mask = X86GetAllElements(context, -0f);
  1047. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  1048. if (op.RegisterSize == RegisterSize.Simd64)
  1049. {
  1050. res = context.VectorZeroUpper64(res);
  1051. }
  1052. context.Copy(GetVec(op.Rd), res);
  1053. }
  1054. else /* if (sizeF == 1) */
  1055. {
  1056. Operand mask = X86GetAllElements(context, -0d);
  1057. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  1058. context.Copy(GetVec(op.Rd), res);
  1059. }
  1060. }
  1061. else
  1062. {
  1063. EmitVectorUnaryOpF(context, (op1) => context.Negate(op1));
  1064. }
  1065. }
  1066. public static void Fnmadd_S(ArmEmitterContext context) // Fused.
  1067. {
  1068. if (Optimizations.FastFP && Optimizations.UseSse2)
  1069. {
  1070. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1071. Operand d = GetVec(op.Rd);
  1072. Operand a = GetVec(op.Ra);
  1073. Operand n = GetVec(op.Rn);
  1074. Operand m = GetVec(op.Rm);
  1075. if (op.Size == 0)
  1076. {
  1077. Operand mask = X86GetScalar(context, -0f);
  1078. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1079. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1080. res = context.AddIntrinsic(Intrinsic.X86Subss, aNeg, res);
  1081. context.Copy(d, context.VectorZeroUpper96(res));
  1082. }
  1083. else /* if (op.Size == 1) */
  1084. {
  1085. Operand mask = X86GetScalar(context, -0d);
  1086. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1087. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1088. res = context.AddIntrinsic(Intrinsic.X86Subsd, aNeg, res);
  1089. context.Copy(d, context.VectorZeroUpper64(res));
  1090. }
  1091. }
  1092. else
  1093. {
  1094. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1095. {
  1096. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulAdd), op1, op2, op3);
  1097. });
  1098. }
  1099. }
  1100. public static void Fnmsub_S(ArmEmitterContext context) // Fused.
  1101. {
  1102. if (Optimizations.FastFP && Optimizations.UseSse2)
  1103. {
  1104. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1105. Operand d = GetVec(op.Rd);
  1106. Operand a = GetVec(op.Ra);
  1107. Operand n = GetVec(op.Rn);
  1108. Operand m = GetVec(op.Rm);
  1109. if (op.Size == 0)
  1110. {
  1111. Operand mask = X86GetScalar(context, -0f);
  1112. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1113. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1114. res = context.AddIntrinsic(Intrinsic.X86Addss, aNeg, res);
  1115. context.Copy(d, context.VectorZeroUpper96(res));
  1116. }
  1117. else /* if (op.Size == 1) */
  1118. {
  1119. Operand mask = X86GetScalar(context, -0d);
  1120. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1121. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1122. res = context.AddIntrinsic(Intrinsic.X86Addsd, aNeg, res);
  1123. context.Copy(d, context.VectorZeroUpper64(res));
  1124. }
  1125. }
  1126. else
  1127. {
  1128. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1129. {
  1130. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulSub), op1, op2, op3);
  1131. });
  1132. }
  1133. }
  1134. public static void Fnmul_S(ArmEmitterContext context)
  1135. {
  1136. EmitScalarBinaryOpF(context, (op1, op2) => context.Negate(context.Multiply(op1, op2)));
  1137. }
  1138. public static void Frecpe_S(ArmEmitterContext context)
  1139. {
  1140. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1141. int sizeF = op.Size & 1;
  1142. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1143. {
  1144. EmitScalarUnaryOpF(context, Intrinsic.X86Rcpss, 0);
  1145. }
  1146. else
  1147. {
  1148. EmitScalarUnaryOpF(context, (op1) =>
  1149. {
  1150. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1151. });
  1152. }
  1153. }
  1154. public static void Frecpe_V(ArmEmitterContext context)
  1155. {
  1156. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1157. int sizeF = op.Size & 1;
  1158. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1159. {
  1160. EmitVectorUnaryOpF(context, Intrinsic.X86Rcpps, 0);
  1161. }
  1162. else
  1163. {
  1164. EmitVectorUnaryOpF(context, (op1) =>
  1165. {
  1166. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1167. });
  1168. }
  1169. }
  1170. public static void Frecps_S(ArmEmitterContext context) // Fused.
  1171. {
  1172. if (Optimizations.FastFP && Optimizations.UseSse2)
  1173. {
  1174. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1175. int sizeF = op.Size & 1;
  1176. if (sizeF == 0)
  1177. {
  1178. Operand mask = X86GetScalar(context, 2f);
  1179. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  1180. res = context.AddIntrinsic(Intrinsic.X86Subss, mask, res);
  1181. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1182. }
  1183. else /* if (sizeF == 1) */
  1184. {
  1185. Operand mask = X86GetScalar(context, 2d);
  1186. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  1187. res = context.AddIntrinsic(Intrinsic.X86Subsd, mask, res);
  1188. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1189. }
  1190. }
  1191. else
  1192. {
  1193. EmitScalarBinaryOpF(context, (op1, op2) =>
  1194. {
  1195. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1196. });
  1197. }
  1198. }
  1199. public static void Frecps_V(ArmEmitterContext context) // Fused.
  1200. {
  1201. if (Optimizations.FastFP && Optimizations.UseSse2)
  1202. {
  1203. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1204. int sizeF = op.Size & 1;
  1205. if (sizeF == 0)
  1206. {
  1207. Operand mask = X86GetAllElements(context, 2f);
  1208. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  1209. res = context.AddIntrinsic(Intrinsic.X86Subps, mask, res);
  1210. if (op.RegisterSize == RegisterSize.Simd64)
  1211. {
  1212. res = context.VectorZeroUpper64(res);
  1213. }
  1214. context.Copy(GetVec(op.Rd), res);
  1215. }
  1216. else /* if (sizeF == 1) */
  1217. {
  1218. Operand mask = X86GetAllElements(context, 2d);
  1219. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  1220. res = context.AddIntrinsic(Intrinsic.X86Subpd, mask, res);
  1221. context.Copy(GetVec(op.Rd), res);
  1222. }
  1223. }
  1224. else
  1225. {
  1226. EmitVectorBinaryOpF(context, (op1, op2) =>
  1227. {
  1228. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1229. });
  1230. }
  1231. }
  1232. public static void Frecpx_S(ArmEmitterContext context)
  1233. {
  1234. EmitScalarUnaryOpF(context, (op1) =>
  1235. {
  1236. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecpX), op1);
  1237. });
  1238. }
  1239. public static void Frinta_S(ArmEmitterContext context)
  1240. {
  1241. EmitScalarUnaryOpF(context, (op1) =>
  1242. {
  1243. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1244. });
  1245. }
  1246. public static void Frinta_V(ArmEmitterContext context)
  1247. {
  1248. EmitVectorUnaryOpF(context, (op1) =>
  1249. {
  1250. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1251. });
  1252. }
  1253. public static void Frinti_S(ArmEmitterContext context)
  1254. {
  1255. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1256. EmitScalarUnaryOpF(context, (op1) =>
  1257. {
  1258. if (op.Size == 0)
  1259. {
  1260. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1261. }
  1262. else /* if (op.Size == 1) */
  1263. {
  1264. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1265. }
  1266. });
  1267. }
  1268. public static void Frinti_V(ArmEmitterContext context)
  1269. {
  1270. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1271. int sizeF = op.Size & 1;
  1272. EmitVectorUnaryOpF(context, (op1) =>
  1273. {
  1274. if (sizeF == 0)
  1275. {
  1276. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1277. }
  1278. else /* if (sizeF == 1) */
  1279. {
  1280. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1281. }
  1282. });
  1283. }
  1284. public static void Frintm_S(ArmEmitterContext context)
  1285. {
  1286. if (Optimizations.UseSse41)
  1287. {
  1288. EmitScalarRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1289. }
  1290. else
  1291. {
  1292. EmitScalarUnaryOpF(context, (op1) =>
  1293. {
  1294. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1295. });
  1296. }
  1297. }
  1298. public static void Frintm_V(ArmEmitterContext context)
  1299. {
  1300. if (Optimizations.UseSse41)
  1301. {
  1302. EmitVectorRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1303. }
  1304. else
  1305. {
  1306. EmitVectorUnaryOpF(context, (op1) =>
  1307. {
  1308. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1309. });
  1310. }
  1311. }
  1312. public static void Frintn_S(ArmEmitterContext context)
  1313. {
  1314. if (Optimizations.UseSse41)
  1315. {
  1316. EmitScalarRoundOpF(context, FPRoundingMode.ToNearest);
  1317. }
  1318. else
  1319. {
  1320. EmitScalarUnaryOpF(context, (op1) =>
  1321. {
  1322. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1323. });
  1324. }
  1325. }
  1326. public static void Frintn_V(ArmEmitterContext context)
  1327. {
  1328. if (Optimizations.UseSse41)
  1329. {
  1330. EmitVectorRoundOpF(context, FPRoundingMode.ToNearest);
  1331. }
  1332. else
  1333. {
  1334. EmitVectorUnaryOpF(context, (op1) =>
  1335. {
  1336. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1337. });
  1338. }
  1339. }
  1340. public static void Frintp_S(ArmEmitterContext context)
  1341. {
  1342. if (Optimizations.UseSse41)
  1343. {
  1344. EmitScalarRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1345. }
  1346. else
  1347. {
  1348. EmitScalarUnaryOpF(context, (op1) =>
  1349. {
  1350. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  1351. });
  1352. }
  1353. }
  1354. public static void Frintp_V(ArmEmitterContext context)
  1355. {
  1356. if (Optimizations.UseSse41)
  1357. {
  1358. EmitVectorRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1359. }
  1360. else
  1361. {
  1362. EmitVectorUnaryOpF(context, (op1) =>
  1363. {
  1364. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  1365. });
  1366. }
  1367. }
  1368. public static void Frintx_S(ArmEmitterContext context)
  1369. {
  1370. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1371. EmitScalarUnaryOpF(context, (op1) =>
  1372. {
  1373. if (op.Size == 0)
  1374. {
  1375. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1376. }
  1377. else /* if (op.Size == 1) */
  1378. {
  1379. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1380. }
  1381. });
  1382. }
  1383. public static void Frintx_V(ArmEmitterContext context)
  1384. {
  1385. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1386. int sizeF = op.Size & 1;
  1387. EmitVectorUnaryOpF(context, (op1) =>
  1388. {
  1389. if (sizeF == 0)
  1390. {
  1391. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1392. }
  1393. else /* if (sizeF == 1) */
  1394. {
  1395. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1396. }
  1397. });
  1398. }
  1399. public static void Frintz_S(ArmEmitterContext context)
  1400. {
  1401. if (Optimizations.UseSse41)
  1402. {
  1403. EmitScalarRoundOpF(context, FPRoundingMode.TowardsZero);
  1404. }
  1405. else
  1406. {
  1407. EmitScalarUnaryOpF(context, (op1) =>
  1408. {
  1409. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  1410. });
  1411. }
  1412. }
  1413. public static void Frintz_V(ArmEmitterContext context)
  1414. {
  1415. if (Optimizations.UseSse41)
  1416. {
  1417. EmitVectorRoundOpF(context, FPRoundingMode.TowardsZero);
  1418. }
  1419. else
  1420. {
  1421. EmitVectorUnaryOpF(context, (op1) =>
  1422. {
  1423. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  1424. });
  1425. }
  1426. }
  1427. public static void Frsqrte_S(ArmEmitterContext context)
  1428. {
  1429. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1430. int sizeF = op.Size & 1;
  1431. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1432. {
  1433. EmitScalarUnaryOpF(context, Intrinsic.X86Rsqrtss, 0);
  1434. }
  1435. else
  1436. {
  1437. EmitScalarUnaryOpF(context, (op1) =>
  1438. {
  1439. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  1440. });
  1441. }
  1442. }
  1443. public static void Frsqrte_V(ArmEmitterContext context)
  1444. {
  1445. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1446. int sizeF = op.Size & 1;
  1447. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1448. {
  1449. EmitVectorUnaryOpF(context, Intrinsic.X86Rsqrtps, 0);
  1450. }
  1451. else
  1452. {
  1453. EmitVectorUnaryOpF(context, (op1) =>
  1454. {
  1455. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  1456. });
  1457. }
  1458. }
  1459. public static void Frsqrts_S(ArmEmitterContext context) // Fused.
  1460. {
  1461. if (Optimizations.FastFP && Optimizations.UseSse2)
  1462. {
  1463. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1464. int sizeF = op.Size & 1;
  1465. if (sizeF == 0)
  1466. {
  1467. Operand maskHalf = X86GetScalar(context, 0.5f);
  1468. Operand maskThree = X86GetScalar(context, 3f);
  1469. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  1470. res = context.AddIntrinsic(Intrinsic.X86Subss, maskThree, res);
  1471. res = context.AddIntrinsic(Intrinsic.X86Mulss, maskHalf, res);
  1472. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1473. }
  1474. else /* if (sizeF == 1) */
  1475. {
  1476. Operand maskHalf = X86GetScalar(context, 0.5d);
  1477. Operand maskThree = X86GetScalar(context, 3d);
  1478. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  1479. res = context.AddIntrinsic(Intrinsic.X86Subsd, maskThree, res);
  1480. res = context.AddIntrinsic(Intrinsic.X86Mulsd, maskHalf, res);
  1481. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1482. }
  1483. }
  1484. else
  1485. {
  1486. EmitScalarBinaryOpF(context, (op1, op2) =>
  1487. {
  1488. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  1489. });
  1490. }
  1491. }
  1492. public static void Frsqrts_V(ArmEmitterContext context) // Fused.
  1493. {
  1494. if (Optimizations.FastFP && Optimizations.UseSse2)
  1495. {
  1496. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1497. int sizeF = op.Size & 1;
  1498. if (sizeF == 0)
  1499. {
  1500. Operand maskHalf = X86GetAllElements(context, 0.5f);
  1501. Operand maskThree = X86GetAllElements(context, 3f);
  1502. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  1503. res = context.AddIntrinsic(Intrinsic.X86Subps, maskThree, res);
  1504. res = context.AddIntrinsic(Intrinsic.X86Mulps, maskHalf, res);
  1505. if (op.RegisterSize == RegisterSize.Simd64)
  1506. {
  1507. res = context.VectorZeroUpper64(res);
  1508. }
  1509. context.Copy(GetVec(op.Rd), res);
  1510. }
  1511. else /* if (sizeF == 1) */
  1512. {
  1513. Operand maskHalf = X86GetAllElements(context, 0.5d);
  1514. Operand maskThree = X86GetAllElements(context, 3d);
  1515. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  1516. res = context.AddIntrinsic(Intrinsic.X86Subpd, maskThree, res);
  1517. res = context.AddIntrinsic(Intrinsic.X86Mulpd, maskHalf, res);
  1518. context.Copy(GetVec(op.Rd), res);
  1519. }
  1520. }
  1521. else
  1522. {
  1523. EmitVectorBinaryOpF(context, (op1, op2) =>
  1524. {
  1525. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  1526. });
  1527. }
  1528. }
  1529. public static void Fsqrt_S(ArmEmitterContext context)
  1530. {
  1531. if (Optimizations.FastFP && Optimizations.UseSse2)
  1532. {
  1533. EmitScalarUnaryOpF(context, Intrinsic.X86Sqrtss, Intrinsic.X86Sqrtsd);
  1534. }
  1535. else
  1536. {
  1537. EmitScalarUnaryOpF(context, (op1) =>
  1538. {
  1539. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  1540. });
  1541. }
  1542. }
  1543. public static void Fsqrt_V(ArmEmitterContext context)
  1544. {
  1545. if (Optimizations.FastFP && Optimizations.UseSse2)
  1546. {
  1547. EmitVectorUnaryOpF(context, Intrinsic.X86Sqrtps, Intrinsic.X86Sqrtpd);
  1548. }
  1549. else
  1550. {
  1551. EmitVectorUnaryOpF(context, (op1) =>
  1552. {
  1553. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  1554. });
  1555. }
  1556. }
  1557. public static void Fsub_S(ArmEmitterContext context)
  1558. {
  1559. if (Optimizations.FastFP && Optimizations.UseSse2)
  1560. {
  1561. EmitScalarBinaryOpF(context, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  1562. }
  1563. else if (Optimizations.FastFP)
  1564. {
  1565. EmitScalarBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1566. }
  1567. else
  1568. {
  1569. EmitScalarBinaryOpF(context, (op1, op2) =>
  1570. {
  1571. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  1572. });
  1573. }
  1574. }
  1575. public static void Fsub_V(ArmEmitterContext context)
  1576. {
  1577. if (Optimizations.FastFP && Optimizations.UseSse2)
  1578. {
  1579. EmitVectorBinaryOpF(context, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  1580. }
  1581. else if (Optimizations.FastFP)
  1582. {
  1583. EmitVectorBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1584. }
  1585. else
  1586. {
  1587. EmitVectorBinaryOpF(context, (op1, op2) =>
  1588. {
  1589. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  1590. });
  1591. }
  1592. }
  1593. public static void Mla_V(ArmEmitterContext context)
  1594. {
  1595. if (Optimizations.UseSse41)
  1596. {
  1597. EmitSse41VectorMul_AddSub(context, AddSub.Add);
  1598. }
  1599. else
  1600. {
  1601. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1602. {
  1603. return context.Add(op1, context.Multiply(op2, op3));
  1604. });
  1605. }
  1606. }
  1607. public static void Mla_Ve(ArmEmitterContext context)
  1608. {
  1609. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1610. {
  1611. return context.Add(op1, context.Multiply(op2, op3));
  1612. });
  1613. }
  1614. public static void Mls_V(ArmEmitterContext context)
  1615. {
  1616. if (Optimizations.UseSse41)
  1617. {
  1618. EmitSse41VectorMul_AddSub(context, AddSub.Subtract);
  1619. }
  1620. else
  1621. {
  1622. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1623. {
  1624. return context.Subtract(op1, context.Multiply(op2, op3));
  1625. });
  1626. }
  1627. }
  1628. public static void Mls_Ve(ArmEmitterContext context)
  1629. {
  1630. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1631. {
  1632. return context.Subtract(op1, context.Multiply(op2, op3));
  1633. });
  1634. }
  1635. public static void Mul_V(ArmEmitterContext context)
  1636. {
  1637. if (Optimizations.UseSse41)
  1638. {
  1639. EmitSse41VectorMul_AddSub(context, AddSub.None);
  1640. }
  1641. else
  1642. {
  1643. EmitVectorBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  1644. }
  1645. }
  1646. public static void Mul_Ve(ArmEmitterContext context)
  1647. {
  1648. EmitVectorBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  1649. }
  1650. public static void Neg_S(ArmEmitterContext context)
  1651. {
  1652. EmitScalarUnaryOpSx(context, (op1) => context.Negate(op1));
  1653. }
  1654. public static void Neg_V(ArmEmitterContext context)
  1655. {
  1656. if (Optimizations.UseSse2)
  1657. {
  1658. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1659. Intrinsic subInst = X86PsubInstruction[op.Size];
  1660. Operand res = context.AddIntrinsic(subInst, context.VectorZero(), GetVec(op.Rn));
  1661. if (op.RegisterSize == RegisterSize.Simd64)
  1662. {
  1663. res = context.VectorZeroUpper64(res);
  1664. }
  1665. context.Copy(GetVec(op.Rd), res);
  1666. }
  1667. else
  1668. {
  1669. EmitVectorUnaryOpSx(context, (op1) => context.Negate(op1));
  1670. }
  1671. }
  1672. public static void Pmull_V(ArmEmitterContext context)
  1673. {
  1674. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1675. if (Optimizations.UsePclmulqdq && op.Size == 3)
  1676. {
  1677. Operand n = GetVec(op.Rn);
  1678. Operand m = GetVec(op.Rm);
  1679. int imm8 = op.RegisterSize == RegisterSize.Simd64 ? 0b0000_0000 : 0b0001_0001;
  1680. Operand res = context.AddIntrinsic(Intrinsic.X86Pclmulqdq, n, m, Const(imm8));
  1681. context.Copy(GetVec(op.Rd), res);
  1682. }
  1683. else if (Optimizations.UseSse41)
  1684. {
  1685. Operand n = GetVec(op.Rn);
  1686. Operand m = GetVec(op.Rm);
  1687. if (op.RegisterSize == RegisterSize.Simd64)
  1688. {
  1689. n = context.VectorZeroUpper64(n);
  1690. m = context.VectorZeroUpper64(m);
  1691. }
  1692. else /* if (op.RegisterSize == RegisterSize.Simd128) */
  1693. {
  1694. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1695. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1696. }
  1697. Operand res = context.VectorZero();
  1698. if (op.Size == 0)
  1699. {
  1700. n = context.AddIntrinsic(Intrinsic.X86Pmovzxbw, n);
  1701. m = context.AddIntrinsic(Intrinsic.X86Pmovzxbw, m);
  1702. for (int i = 0; i < 8; i++)
  1703. {
  1704. Operand mask = context.AddIntrinsic(Intrinsic.X86Psllw, n, Const(15 - i));
  1705. mask = context.AddIntrinsic(Intrinsic.X86Psraw, mask, Const(15));
  1706. Operand tmp = context.AddIntrinsic(Intrinsic.X86Psllw, m, Const(i));
  1707. tmp = context.AddIntrinsic(Intrinsic.X86Pand, tmp, mask);
  1708. res = context.AddIntrinsic(Intrinsic.X86Pxor, res, tmp);
  1709. }
  1710. }
  1711. else /* if (op.Size == 3) */
  1712. {
  1713. Operand zero = context.VectorZero();
  1714. for (int i = 0; i < 64; i++)
  1715. {
  1716. Operand mask = context.AddIntrinsic(Intrinsic.X86Movlhps, n, n);
  1717. mask = context.AddIntrinsic(Intrinsic.X86Psllq, mask, Const(63 - i));
  1718. mask = context.AddIntrinsic(Intrinsic.X86Psrlq, mask, Const(63));
  1719. mask = context.AddIntrinsic(Intrinsic.X86Psubq, zero, mask);
  1720. Operand tmp = EmitSse2Sll_128(context, m, i);
  1721. tmp = context.AddIntrinsic(Intrinsic.X86Pand, tmp, mask);
  1722. res = context.AddIntrinsic(Intrinsic.X86Pxor, res, tmp);
  1723. }
  1724. }
  1725. context.Copy(GetVec(op.Rd), res);
  1726. }
  1727. else
  1728. {
  1729. Operand n = GetVec(op.Rn);
  1730. Operand m = GetVec(op.Rm);
  1731. Operand res;
  1732. if (op.Size == 0)
  1733. {
  1734. res = context.VectorZero();
  1735. int part = op.RegisterSize == RegisterSize.Simd64 ? 0 : 8;
  1736. for (int index = 0; index < 8; index++)
  1737. {
  1738. Operand ne = context.VectorExtract8(n, part + index);
  1739. Operand me = context.VectorExtract8(m, part + index);
  1740. Operand de = EmitPolynomialMultiply(context, ne, me, 8);
  1741. res = EmitVectorInsert(context, res, de, index, 1);
  1742. }
  1743. }
  1744. else /* if (op.Size == 3) */
  1745. {
  1746. int part = op.RegisterSize == RegisterSize.Simd64 ? 0 : 1;
  1747. Operand ne = context.VectorExtract(OperandType.I64, n, part);
  1748. Operand me = context.VectorExtract(OperandType.I64, m, part);
  1749. res = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.PolynomialMult64_128)), ne, me);
  1750. }
  1751. context.Copy(GetVec(op.Rd), res);
  1752. }
  1753. }
  1754. public static void Raddhn_V(ArmEmitterContext context)
  1755. {
  1756. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: true);
  1757. }
  1758. public static void Rsubhn_V(ArmEmitterContext context)
  1759. {
  1760. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: true);
  1761. }
  1762. public static void Saba_V(ArmEmitterContext context)
  1763. {
  1764. EmitVectorTernaryOpSx(context, (op1, op2, op3) =>
  1765. {
  1766. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1767. });
  1768. }
  1769. public static void Sabal_V(ArmEmitterContext context)
  1770. {
  1771. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1772. {
  1773. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1774. });
  1775. }
  1776. public static void Sabd_V(ArmEmitterContext context)
  1777. {
  1778. if (Optimizations.UseSse41)
  1779. {
  1780. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1781. Operand n = GetVec(op.Rn);
  1782. Operand m = GetVec(op.Rm);
  1783. EmitSse41VectorSabdOp(context, op, n, m, isLong: false);
  1784. }
  1785. else
  1786. {
  1787. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1788. {
  1789. return EmitAbs(context, context.Subtract(op1, op2));
  1790. });
  1791. }
  1792. }
  1793. public static void Sabdl_V(ArmEmitterContext context)
  1794. {
  1795. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1796. if (Optimizations.UseSse41 && op.Size < 2)
  1797. {
  1798. Operand n = GetVec(op.Rn);
  1799. Operand m = GetVec(op.Rm);
  1800. if (op.RegisterSize == RegisterSize.Simd128)
  1801. {
  1802. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1803. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1804. }
  1805. Intrinsic movInst = op.Size == 0
  1806. ? Intrinsic.X86Pmovsxbw
  1807. : Intrinsic.X86Pmovsxwd;
  1808. n = context.AddIntrinsic(movInst, n);
  1809. m = context.AddIntrinsic(movInst, m);
  1810. EmitSse41VectorSabdOp(context, op, n, m, isLong: true);
  1811. }
  1812. else
  1813. {
  1814. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) =>
  1815. {
  1816. return EmitAbs(context, context.Subtract(op1, op2));
  1817. });
  1818. }
  1819. }
  1820. public static void Sadalp_V(ArmEmitterContext context)
  1821. {
  1822. EmitAddLongPairwise(context, signed: true, accumulate: true);
  1823. }
  1824. public static void Saddl_V(ArmEmitterContext context)
  1825. {
  1826. if (Optimizations.UseSse41)
  1827. {
  1828. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1829. Operand n = GetVec(op.Rn);
  1830. Operand m = GetVec(op.Rm);
  1831. if (op.RegisterSize == RegisterSize.Simd128)
  1832. {
  1833. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1834. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1835. }
  1836. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1837. n = context.AddIntrinsic(movInst, n);
  1838. m = context.AddIntrinsic(movInst, m);
  1839. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1840. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1841. }
  1842. else
  1843. {
  1844. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1845. }
  1846. }
  1847. public static void Saddlp_V(ArmEmitterContext context)
  1848. {
  1849. EmitAddLongPairwise(context, signed: true, accumulate: false);
  1850. }
  1851. public static void Saddlv_V(ArmEmitterContext context)
  1852. {
  1853. EmitVectorLongAcrossVectorOpSx(context, (op1, op2) => context.Add(op1, op2));
  1854. }
  1855. public static void Saddw_V(ArmEmitterContext context)
  1856. {
  1857. if (Optimizations.UseSse41)
  1858. {
  1859. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1860. Operand n = GetVec(op.Rn);
  1861. Operand m = GetVec(op.Rm);
  1862. if (op.RegisterSize == RegisterSize.Simd128)
  1863. {
  1864. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1865. }
  1866. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1867. m = context.AddIntrinsic(movInst, m);
  1868. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1869. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1870. }
  1871. else
  1872. {
  1873. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1874. }
  1875. }
  1876. public static void Shadd_V(ArmEmitterContext context)
  1877. {
  1878. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1879. if (Optimizations.UseSse2 && op.Size > 0)
  1880. {
  1881. Operand n = GetVec(op.Rn);
  1882. Operand m = GetVec(op.Rm);
  1883. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  1884. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  1885. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psraw : Intrinsic.X86Psrad;
  1886. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  1887. Intrinsic addInst = X86PaddInstruction[op.Size];
  1888. res = context.AddIntrinsic(addInst, res, res2);
  1889. if (op.RegisterSize == RegisterSize.Simd64)
  1890. {
  1891. res = context.VectorZeroUpper64(res);
  1892. }
  1893. context.Copy(GetVec(op.Rd), res);
  1894. }
  1895. else
  1896. {
  1897. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1898. {
  1899. return context.ShiftRightSI(context.Add(op1, op2), Const(1));
  1900. });
  1901. }
  1902. }
  1903. public static void Shsub_V(ArmEmitterContext context)
  1904. {
  1905. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1906. if (Optimizations.UseSse2 && op.Size < 2)
  1907. {
  1908. Operand n = GetVec(op.Rn);
  1909. Operand m = GetVec(op.Rm);
  1910. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  1911. Intrinsic addInst = X86PaddInstruction[op.Size];
  1912. Operand nPlusMask = context.AddIntrinsic(addInst, n, mask);
  1913. Operand mPlusMask = context.AddIntrinsic(addInst, m, mask);
  1914. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  1915. Operand res = context.AddIntrinsic(avgInst, nPlusMask, mPlusMask);
  1916. Intrinsic subInst = X86PsubInstruction[op.Size];
  1917. res = context.AddIntrinsic(subInst, nPlusMask, res);
  1918. if (op.RegisterSize == RegisterSize.Simd64)
  1919. {
  1920. res = context.VectorZeroUpper64(res);
  1921. }
  1922. context.Copy(GetVec(op.Rd), res);
  1923. }
  1924. else
  1925. {
  1926. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1927. {
  1928. return context.ShiftRightSI(context.Subtract(op1, op2), Const(1));
  1929. });
  1930. }
  1931. }
  1932. public static void Smax_V(ArmEmitterContext context)
  1933. {
  1934. if (Optimizations.UseSse41)
  1935. {
  1936. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1937. Operand n = GetVec(op.Rn);
  1938. Operand m = GetVec(op.Rm);
  1939. Intrinsic maxInst = X86PmaxsInstruction[op.Size];
  1940. Operand res = context.AddIntrinsic(maxInst, n, m);
  1941. if (op.RegisterSize == RegisterSize.Simd64)
  1942. {
  1943. res = context.VectorZeroUpper64(res);
  1944. }
  1945. context.Copy(GetVec(op.Rd), res);
  1946. }
  1947. else
  1948. {
  1949. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  1950. }
  1951. }
  1952. public static void Smaxp_V(ArmEmitterContext context)
  1953. {
  1954. if (Optimizations.UseSsse3)
  1955. {
  1956. EmitSsse3VectorPairwiseOp(context, X86PmaxsInstruction);
  1957. }
  1958. else
  1959. {
  1960. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  1961. }
  1962. }
  1963. public static void Smaxv_V(ArmEmitterContext context)
  1964. {
  1965. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  1966. }
  1967. public static void Smin_V(ArmEmitterContext context)
  1968. {
  1969. if (Optimizations.UseSse41)
  1970. {
  1971. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1972. Operand n = GetVec(op.Rn);
  1973. Operand m = GetVec(op.Rm);
  1974. Intrinsic minInst = X86PminsInstruction[op.Size];
  1975. Operand res = context.AddIntrinsic(minInst, n, m);
  1976. if (op.RegisterSize == RegisterSize.Simd64)
  1977. {
  1978. res = context.VectorZeroUpper64(res);
  1979. }
  1980. context.Copy(GetVec(op.Rd), res);
  1981. }
  1982. else
  1983. {
  1984. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  1985. }
  1986. }
  1987. public static void Sminp_V(ArmEmitterContext context)
  1988. {
  1989. if (Optimizations.UseSsse3)
  1990. {
  1991. EmitSsse3VectorPairwiseOp(context, X86PminsInstruction);
  1992. }
  1993. else
  1994. {
  1995. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  1996. }
  1997. }
  1998. public static void Sminv_V(ArmEmitterContext context)
  1999. {
  2000. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  2001. }
  2002. public static void Smlal_V(ArmEmitterContext context)
  2003. {
  2004. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2005. if (Optimizations.UseSse41 && op.Size < 2)
  2006. {
  2007. Operand d = GetVec(op.Rd);
  2008. Operand n = GetVec(op.Rn);
  2009. Operand m = GetVec(op.Rm);
  2010. if (op.RegisterSize == RegisterSize.Simd128)
  2011. {
  2012. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2013. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2014. }
  2015. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2016. n = context.AddIntrinsic(movInst, n);
  2017. m = context.AddIntrinsic(movInst, m);
  2018. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2019. Operand res = context.AddIntrinsic(mullInst, n, m);
  2020. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2021. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  2022. }
  2023. else
  2024. {
  2025. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  2026. {
  2027. return context.Add(op1, context.Multiply(op2, op3));
  2028. });
  2029. }
  2030. }
  2031. public static void Smlal_Ve(ArmEmitterContext context)
  2032. {
  2033. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  2034. {
  2035. return context.Add(op1, context.Multiply(op2, op3));
  2036. });
  2037. }
  2038. public static void Smlsl_V(ArmEmitterContext context)
  2039. {
  2040. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2041. if (Optimizations.UseSse41 && op.Size < 2)
  2042. {
  2043. Operand d = GetVec(op.Rd);
  2044. Operand n = GetVec(op.Rn);
  2045. Operand m = GetVec(op.Rm);
  2046. if (op.RegisterSize == RegisterSize.Simd128)
  2047. {
  2048. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2049. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2050. }
  2051. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovsxbw : Intrinsic.X86Pmovsxwd;
  2052. n = context.AddIntrinsic(movInst, n);
  2053. m = context.AddIntrinsic(movInst, m);
  2054. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2055. Operand res = context.AddIntrinsic(mullInst, n, m);
  2056. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2057. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  2058. }
  2059. else
  2060. {
  2061. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  2062. {
  2063. return context.Subtract(op1, context.Multiply(op2, op3));
  2064. });
  2065. }
  2066. }
  2067. public static void Smlsl_Ve(ArmEmitterContext context)
  2068. {
  2069. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  2070. {
  2071. return context.Subtract(op1, context.Multiply(op2, op3));
  2072. });
  2073. }
  2074. public static void Smull_V(ArmEmitterContext context)
  2075. {
  2076. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Multiply(op1, op2));
  2077. }
  2078. public static void Smull_Ve(ArmEmitterContext context)
  2079. {
  2080. EmitVectorWidenBinaryOpByElemSx(context, (op1, op2) => context.Multiply(op1, op2));
  2081. }
  2082. public static void Sqabs_S(ArmEmitterContext context)
  2083. {
  2084. EmitScalarSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  2085. }
  2086. public static void Sqabs_V(ArmEmitterContext context)
  2087. {
  2088. EmitVectorSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  2089. }
  2090. public static void Sqadd_S(ArmEmitterContext context)
  2091. {
  2092. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  2093. }
  2094. public static void Sqadd_V(ArmEmitterContext context)
  2095. {
  2096. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  2097. }
  2098. public static void Sqdmulh_S(ArmEmitterContext context)
  2099. {
  2100. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.ScalarSx);
  2101. }
  2102. public static void Sqdmulh_V(ArmEmitterContext context)
  2103. {
  2104. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.VectorSx);
  2105. }
  2106. public static void Sqneg_S(ArmEmitterContext context)
  2107. {
  2108. EmitScalarSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  2109. }
  2110. public static void Sqneg_V(ArmEmitterContext context)
  2111. {
  2112. EmitVectorSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  2113. }
  2114. public static void Sqrdmulh_S(ArmEmitterContext context)
  2115. {
  2116. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.ScalarSx);
  2117. }
  2118. public static void Sqrdmulh_V(ArmEmitterContext context)
  2119. {
  2120. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.VectorSx);
  2121. }
  2122. public static void Sqsub_S(ArmEmitterContext context)
  2123. {
  2124. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  2125. }
  2126. public static void Sqsub_V(ArmEmitterContext context)
  2127. {
  2128. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  2129. }
  2130. public static void Sqxtn_S(ArmEmitterContext context)
  2131. {
  2132. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxSx);
  2133. }
  2134. public static void Sqxtn_V(ArmEmitterContext context)
  2135. {
  2136. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxSx);
  2137. }
  2138. public static void Sqxtun_S(ArmEmitterContext context)
  2139. {
  2140. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxZx);
  2141. }
  2142. public static void Sqxtun_V(ArmEmitterContext context)
  2143. {
  2144. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxZx);
  2145. }
  2146. public static void Srhadd_V(ArmEmitterContext context)
  2147. {
  2148. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2149. if (Optimizations.UseSse2 && op.Size < 2)
  2150. {
  2151. Operand n = GetVec(op.Rn);
  2152. Operand m = GetVec(op.Rm);
  2153. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  2154. Intrinsic subInst = X86PsubInstruction[op.Size];
  2155. Operand nMinusMask = context.AddIntrinsic(subInst, n, mask);
  2156. Operand mMinusMask = context.AddIntrinsic(subInst, m, mask);
  2157. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2158. Operand res = context.AddIntrinsic(avgInst, nMinusMask, mMinusMask);
  2159. Intrinsic addInst = X86PaddInstruction[op.Size];
  2160. res = context.AddIntrinsic(addInst, mask, res);
  2161. if (op.RegisterSize == RegisterSize.Simd64)
  2162. {
  2163. res = context.VectorZeroUpper64(res);
  2164. }
  2165. context.Copy(GetVec(op.Rd), res);
  2166. }
  2167. else
  2168. {
  2169. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2170. {
  2171. Operand res = context.Add(op1, op2);
  2172. res = context.Add(res, Const(1L));
  2173. return context.ShiftRightSI(res, Const(1));
  2174. });
  2175. }
  2176. }
  2177. public static void Ssubl_V(ArmEmitterContext context)
  2178. {
  2179. if (Optimizations.UseSse41)
  2180. {
  2181. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2182. Operand n = GetVec(op.Rn);
  2183. Operand m = GetVec(op.Rm);
  2184. if (op.RegisterSize == RegisterSize.Simd128)
  2185. {
  2186. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2187. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2188. }
  2189. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2190. n = context.AddIntrinsic(movInst, n);
  2191. m = context.AddIntrinsic(movInst, m);
  2192. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2193. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2194. }
  2195. else
  2196. {
  2197. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  2198. }
  2199. }
  2200. public static void Ssubw_V(ArmEmitterContext context)
  2201. {
  2202. if (Optimizations.UseSse41)
  2203. {
  2204. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2205. Operand n = GetVec(op.Rn);
  2206. Operand m = GetVec(op.Rm);
  2207. if (op.RegisterSize == RegisterSize.Simd128)
  2208. {
  2209. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2210. }
  2211. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2212. m = context.AddIntrinsic(movInst, m);
  2213. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2214. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2215. }
  2216. else
  2217. {
  2218. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  2219. }
  2220. }
  2221. public static void Sub_S(ArmEmitterContext context)
  2222. {
  2223. EmitScalarBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2224. }
  2225. public static void Sub_V(ArmEmitterContext context)
  2226. {
  2227. if (Optimizations.UseSse2)
  2228. {
  2229. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2230. Operand n = GetVec(op.Rn);
  2231. Operand m = GetVec(op.Rm);
  2232. Intrinsic subInst = X86PsubInstruction[op.Size];
  2233. Operand res = context.AddIntrinsic(subInst, n, m);
  2234. if (op.RegisterSize == RegisterSize.Simd64)
  2235. {
  2236. res = context.VectorZeroUpper64(res);
  2237. }
  2238. context.Copy(GetVec(op.Rd), res);
  2239. }
  2240. else
  2241. {
  2242. EmitVectorBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2243. }
  2244. }
  2245. public static void Subhn_V(ArmEmitterContext context)
  2246. {
  2247. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: false);
  2248. }
  2249. public static void Suqadd_S(ArmEmitterContext context)
  2250. {
  2251. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  2252. }
  2253. public static void Suqadd_V(ArmEmitterContext context)
  2254. {
  2255. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  2256. }
  2257. public static void Uaba_V(ArmEmitterContext context)
  2258. {
  2259. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  2260. {
  2261. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2262. });
  2263. }
  2264. public static void Uabal_V(ArmEmitterContext context)
  2265. {
  2266. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2267. {
  2268. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2269. });
  2270. }
  2271. public static void Uabd_V(ArmEmitterContext context)
  2272. {
  2273. if (Optimizations.UseSse41)
  2274. {
  2275. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2276. Operand n = GetVec(op.Rn);
  2277. Operand m = GetVec(op.Rm);
  2278. EmitSse41VectorUabdOp(context, op, n, m, isLong: false);
  2279. }
  2280. else
  2281. {
  2282. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2283. {
  2284. return EmitAbs(context, context.Subtract(op1, op2));
  2285. });
  2286. }
  2287. }
  2288. public static void Uabdl_V(ArmEmitterContext context)
  2289. {
  2290. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2291. if (Optimizations.UseSse41 && op.Size < 2)
  2292. {
  2293. Operand n = GetVec(op.Rn);
  2294. Operand m = GetVec(op.Rm);
  2295. if (op.RegisterSize == RegisterSize.Simd128)
  2296. {
  2297. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2298. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2299. }
  2300. Intrinsic movInst = op.Size == 0
  2301. ? Intrinsic.X86Pmovzxbw
  2302. : Intrinsic.X86Pmovzxwd;
  2303. n = context.AddIntrinsic(movInst, n);
  2304. m = context.AddIntrinsic(movInst, m);
  2305. EmitSse41VectorUabdOp(context, op, n, m, isLong: true);
  2306. }
  2307. else
  2308. {
  2309. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) =>
  2310. {
  2311. return EmitAbs(context, context.Subtract(op1, op2));
  2312. });
  2313. }
  2314. }
  2315. public static void Uadalp_V(ArmEmitterContext context)
  2316. {
  2317. EmitAddLongPairwise(context, signed: false, accumulate: true);
  2318. }
  2319. public static void Uaddl_V(ArmEmitterContext context)
  2320. {
  2321. if (Optimizations.UseSse41)
  2322. {
  2323. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2324. Operand n = GetVec(op.Rn);
  2325. Operand m = GetVec(op.Rm);
  2326. if (op.RegisterSize == RegisterSize.Simd128)
  2327. {
  2328. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2329. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2330. }
  2331. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2332. n = context.AddIntrinsic(movInst, n);
  2333. m = context.AddIntrinsic(movInst, m);
  2334. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2335. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2336. }
  2337. else
  2338. {
  2339. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2340. }
  2341. }
  2342. public static void Uaddlp_V(ArmEmitterContext context)
  2343. {
  2344. EmitAddLongPairwise(context, signed: false, accumulate: false);
  2345. }
  2346. public static void Uaddlv_V(ArmEmitterContext context)
  2347. {
  2348. EmitVectorLongAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  2349. }
  2350. public static void Uaddw_V(ArmEmitterContext context)
  2351. {
  2352. if (Optimizations.UseSse41)
  2353. {
  2354. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2355. Operand n = GetVec(op.Rn);
  2356. Operand m = GetVec(op.Rm);
  2357. if (op.RegisterSize == RegisterSize.Simd128)
  2358. {
  2359. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2360. }
  2361. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2362. m = context.AddIntrinsic(movInst, m);
  2363. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2364. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2365. }
  2366. else
  2367. {
  2368. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2369. }
  2370. }
  2371. public static void Uhadd_V(ArmEmitterContext context)
  2372. {
  2373. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2374. if (Optimizations.UseSse2 && op.Size > 0)
  2375. {
  2376. Operand n = GetVec(op.Rn);
  2377. Operand m = GetVec(op.Rm);
  2378. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  2379. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  2380. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psrlw : Intrinsic.X86Psrld;
  2381. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  2382. Intrinsic addInst = X86PaddInstruction[op.Size];
  2383. res = context.AddIntrinsic(addInst, res, res2);
  2384. if (op.RegisterSize == RegisterSize.Simd64)
  2385. {
  2386. res = context.VectorZeroUpper64(res);
  2387. }
  2388. context.Copy(GetVec(op.Rd), res);
  2389. }
  2390. else
  2391. {
  2392. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2393. {
  2394. return context.ShiftRightUI(context.Add(op1, op2), Const(1));
  2395. });
  2396. }
  2397. }
  2398. public static void Uhsub_V(ArmEmitterContext context)
  2399. {
  2400. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2401. if (Optimizations.UseSse2 && op.Size < 2)
  2402. {
  2403. Operand n = GetVec(op.Rn);
  2404. Operand m = GetVec(op.Rm);
  2405. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2406. Operand res = context.AddIntrinsic(avgInst, n, m);
  2407. Intrinsic subInst = X86PsubInstruction[op.Size];
  2408. res = context.AddIntrinsic(subInst, n, res);
  2409. if (op.RegisterSize == RegisterSize.Simd64)
  2410. {
  2411. res = context.VectorZeroUpper64(res);
  2412. }
  2413. context.Copy(GetVec(op.Rd), res);
  2414. }
  2415. else
  2416. {
  2417. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2418. {
  2419. return context.ShiftRightUI(context.Subtract(op1, op2), Const(1));
  2420. });
  2421. }
  2422. }
  2423. public static void Umax_V(ArmEmitterContext context)
  2424. {
  2425. if (Optimizations.UseSse41)
  2426. {
  2427. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2428. Operand n = GetVec(op.Rn);
  2429. Operand m = GetVec(op.Rm);
  2430. Intrinsic maxInst = X86PmaxuInstruction[op.Size];
  2431. Operand res = context.AddIntrinsic(maxInst, n, m);
  2432. if (op.RegisterSize == RegisterSize.Simd64)
  2433. {
  2434. res = context.VectorZeroUpper64(res);
  2435. }
  2436. context.Copy(GetVec(op.Rd), res);
  2437. }
  2438. else
  2439. {
  2440. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2441. }
  2442. }
  2443. public static void Umaxp_V(ArmEmitterContext context)
  2444. {
  2445. if (Optimizations.UseSsse3)
  2446. {
  2447. EmitSsse3VectorPairwiseOp(context, X86PmaxuInstruction);
  2448. }
  2449. else
  2450. {
  2451. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2452. }
  2453. }
  2454. public static void Umaxv_V(ArmEmitterContext context)
  2455. {
  2456. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2457. }
  2458. public static void Umin_V(ArmEmitterContext context)
  2459. {
  2460. if (Optimizations.UseSse41)
  2461. {
  2462. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2463. Operand n = GetVec(op.Rn);
  2464. Operand m = GetVec(op.Rm);
  2465. Intrinsic minInst = X86PminuInstruction[op.Size];
  2466. Operand res = context.AddIntrinsic(minInst, n, m);
  2467. if (op.RegisterSize == RegisterSize.Simd64)
  2468. {
  2469. res = context.VectorZeroUpper64(res);
  2470. }
  2471. context.Copy(GetVec(op.Rd), res);
  2472. }
  2473. else
  2474. {
  2475. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2476. }
  2477. }
  2478. public static void Uminp_V(ArmEmitterContext context)
  2479. {
  2480. if (Optimizations.UseSsse3)
  2481. {
  2482. EmitSsse3VectorPairwiseOp(context, X86PminuInstruction);
  2483. }
  2484. else
  2485. {
  2486. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2487. }
  2488. }
  2489. public static void Uminv_V(ArmEmitterContext context)
  2490. {
  2491. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2492. }
  2493. public static void Umlal_V(ArmEmitterContext context)
  2494. {
  2495. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2496. if (Optimizations.UseSse41 && op.Size < 2)
  2497. {
  2498. Operand d = GetVec(op.Rd);
  2499. Operand n = GetVec(op.Rn);
  2500. Operand m = GetVec(op.Rm);
  2501. if (op.RegisterSize == RegisterSize.Simd128)
  2502. {
  2503. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2504. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2505. }
  2506. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2507. n = context.AddIntrinsic(movInst, n);
  2508. m = context.AddIntrinsic(movInst, m);
  2509. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2510. Operand res = context.AddIntrinsic(mullInst, n, m);
  2511. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2512. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  2513. }
  2514. else
  2515. {
  2516. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2517. {
  2518. return context.Add(op1, context.Multiply(op2, op3));
  2519. });
  2520. }
  2521. }
  2522. public static void Umlal_Ve(ArmEmitterContext context)
  2523. {
  2524. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2525. {
  2526. return context.Add(op1, context.Multiply(op2, op3));
  2527. });
  2528. }
  2529. public static void Umlsl_V(ArmEmitterContext context)
  2530. {
  2531. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2532. if (Optimizations.UseSse41 && op.Size < 2)
  2533. {
  2534. Operand d = GetVec(op.Rd);
  2535. Operand n = GetVec(op.Rn);
  2536. Operand m = GetVec(op.Rm);
  2537. if (op.RegisterSize == RegisterSize.Simd128)
  2538. {
  2539. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2540. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2541. }
  2542. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovzxbw : Intrinsic.X86Pmovzxwd;
  2543. n = context.AddIntrinsic(movInst, n);
  2544. m = context.AddIntrinsic(movInst, m);
  2545. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2546. Operand res = context.AddIntrinsic(mullInst, n, m);
  2547. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2548. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  2549. }
  2550. else
  2551. {
  2552. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2553. {
  2554. return context.Subtract(op1, context.Multiply(op2, op3));
  2555. });
  2556. }
  2557. }
  2558. public static void Umlsl_Ve(ArmEmitterContext context)
  2559. {
  2560. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2561. {
  2562. return context.Subtract(op1, context.Multiply(op2, op3));
  2563. });
  2564. }
  2565. public static void Umull_V(ArmEmitterContext context)
  2566. {
  2567. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  2568. }
  2569. public static void Umull_Ve(ArmEmitterContext context)
  2570. {
  2571. EmitVectorWidenBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  2572. }
  2573. public static void Uqadd_S(ArmEmitterContext context)
  2574. {
  2575. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2576. }
  2577. public static void Uqadd_V(ArmEmitterContext context)
  2578. {
  2579. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2580. }
  2581. public static void Uqsub_S(ArmEmitterContext context)
  2582. {
  2583. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2584. }
  2585. public static void Uqsub_V(ArmEmitterContext context)
  2586. {
  2587. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2588. }
  2589. public static void Uqxtn_S(ArmEmitterContext context)
  2590. {
  2591. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarZxZx);
  2592. }
  2593. public static void Uqxtn_V(ArmEmitterContext context)
  2594. {
  2595. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorZxZx);
  2596. }
  2597. public static void Urhadd_V(ArmEmitterContext context)
  2598. {
  2599. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2600. if (Optimizations.UseSse2 && op.Size < 2)
  2601. {
  2602. Operand n = GetVec(op.Rn);
  2603. Operand m = GetVec(op.Rm);
  2604. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2605. Operand res = context.AddIntrinsic(avgInst, n, m);
  2606. if (op.RegisterSize == RegisterSize.Simd64)
  2607. {
  2608. res = context.VectorZeroUpper64(res);
  2609. }
  2610. context.Copy(GetVec(op.Rd), res);
  2611. }
  2612. else
  2613. {
  2614. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2615. {
  2616. Operand res = context.Add(op1, op2);
  2617. res = context.Add(res, Const(1L));
  2618. return context.ShiftRightUI(res, Const(1));
  2619. });
  2620. }
  2621. }
  2622. public static void Usqadd_S(ArmEmitterContext context)
  2623. {
  2624. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2625. }
  2626. public static void Usqadd_V(ArmEmitterContext context)
  2627. {
  2628. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2629. }
  2630. public static void Usubl_V(ArmEmitterContext context)
  2631. {
  2632. if (Optimizations.UseSse41)
  2633. {
  2634. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2635. Operand n = GetVec(op.Rn);
  2636. Operand m = GetVec(op.Rm);
  2637. if (op.RegisterSize == RegisterSize.Simd128)
  2638. {
  2639. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2640. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2641. }
  2642. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2643. n = context.AddIntrinsic(movInst, n);
  2644. m = context.AddIntrinsic(movInst, m);
  2645. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2646. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2647. }
  2648. else
  2649. {
  2650. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2651. }
  2652. }
  2653. public static void Usubw_V(ArmEmitterContext context)
  2654. {
  2655. if (Optimizations.UseSse41)
  2656. {
  2657. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2658. Operand n = GetVec(op.Rn);
  2659. Operand m = GetVec(op.Rm);
  2660. if (op.RegisterSize == RegisterSize.Simd128)
  2661. {
  2662. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2663. }
  2664. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2665. m = context.AddIntrinsic(movInst, m);
  2666. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2667. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2668. }
  2669. else
  2670. {
  2671. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2672. }
  2673. }
  2674. private static Operand EmitAbs(ArmEmitterContext context, Operand value)
  2675. {
  2676. Operand isPositive = context.ICompareGreaterOrEqual(value, Const(value.Type, 0));
  2677. return context.ConditionalSelect(isPositive, value, context.Negate(value));
  2678. }
  2679. private static void EmitAddLongPairwise(ArmEmitterContext context, bool signed, bool accumulate)
  2680. {
  2681. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2682. Operand res = context.VectorZero();
  2683. int pairs = op.GetPairsCount() >> op.Size;
  2684. for (int index = 0; index < pairs; index++)
  2685. {
  2686. int pairIndex = index << 1;
  2687. Operand ne0 = EmitVectorExtract(context, op.Rn, pairIndex, op.Size, signed);
  2688. Operand ne1 = EmitVectorExtract(context, op.Rn, pairIndex + 1, op.Size, signed);
  2689. Operand e = context.Add(ne0, ne1);
  2690. if (accumulate)
  2691. {
  2692. Operand de = EmitVectorExtract(context, op.Rd, index, op.Size + 1, signed);
  2693. e = context.Add(e, de);
  2694. }
  2695. res = EmitVectorInsert(context, res, e, index, op.Size + 1);
  2696. }
  2697. context.Copy(GetVec(op.Rd), res);
  2698. }
  2699. private static Operand EmitDoublingMultiplyHighHalf(
  2700. ArmEmitterContext context,
  2701. Operand n,
  2702. Operand m,
  2703. bool round)
  2704. {
  2705. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2706. int eSize = 8 << op.Size;
  2707. Operand res = context.Multiply(n, m);
  2708. if (!round)
  2709. {
  2710. res = context.ShiftRightSI(res, Const(eSize - 1));
  2711. }
  2712. else
  2713. {
  2714. long roundConst = 1L << (eSize - 1);
  2715. res = context.ShiftLeft(res, Const(1));
  2716. res = context.Add(res, Const(roundConst));
  2717. res = context.ShiftRightSI(res, Const(eSize));
  2718. Operand isIntMin = context.ICompareEqual(res, Const((long)int.MinValue));
  2719. res = context.ConditionalSelect(isIntMin, context.Negate(res), res);
  2720. }
  2721. return res;
  2722. }
  2723. private static void EmitHighNarrow(ArmEmitterContext context, Func2I emit, bool round)
  2724. {
  2725. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2726. int elems = 8 >> op.Size;
  2727. int eSize = 8 << op.Size;
  2728. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  2729. Operand d = GetVec(op.Rd);
  2730. Operand res = part == 0 ? context.VectorZero() : context.Copy(d);
  2731. long roundConst = 1L << (eSize - 1);
  2732. for (int index = 0; index < elems; index++)
  2733. {
  2734. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size + 1);
  2735. Operand me = EmitVectorExtractZx(context, op.Rm, index, op.Size + 1);
  2736. Operand de = emit(ne, me);
  2737. if (round)
  2738. {
  2739. de = context.Add(de, Const(roundConst));
  2740. }
  2741. de = context.ShiftRightUI(de, Const(eSize));
  2742. res = EmitVectorInsert(context, res, de, part + index, op.Size);
  2743. }
  2744. context.Copy(d, res);
  2745. }
  2746. private static Operand EmitMax64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  2747. {
  2748. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  2749. Operand cmp = signed
  2750. ? context.ICompareGreaterOrEqual (op1, op2)
  2751. : context.ICompareGreaterOrEqualUI(op1, op2);
  2752. return context.ConditionalSelect(cmp, op1, op2);
  2753. }
  2754. private static Operand EmitMin64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  2755. {
  2756. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  2757. Operand cmp = signed
  2758. ? context.ICompareLessOrEqual (op1, op2)
  2759. : context.ICompareLessOrEqualUI(op1, op2);
  2760. return context.ConditionalSelect(cmp, op1, op2);
  2761. }
  2762. private static void EmitScalarRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2763. {
  2764. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2765. Operand n = GetVec(op.Rn);
  2766. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundsd : Intrinsic.X86Roundss;
  2767. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2768. if ((op.Size & 1) != 0)
  2769. {
  2770. res = context.VectorZeroUpper64(res);
  2771. }
  2772. else
  2773. {
  2774. res = context.VectorZeroUpper96(res);
  2775. }
  2776. context.Copy(GetVec(op.Rd), res);
  2777. }
  2778. private static void EmitVectorRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2779. {
  2780. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2781. Operand n = GetVec(op.Rn);
  2782. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundpd : Intrinsic.X86Roundps;
  2783. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2784. if (op.RegisterSize == RegisterSize.Simd64)
  2785. {
  2786. res = context.VectorZeroUpper64(res);
  2787. }
  2788. context.Copy(GetVec(op.Rd), res);
  2789. }
  2790. public static void EmitSse2VectorIsNaNOpF(
  2791. ArmEmitterContext context,
  2792. Operand opF,
  2793. out Operand qNaNMask,
  2794. out Operand sNaNMask,
  2795. bool? isQNaN = null)
  2796. {
  2797. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  2798. if ((op.Size & 1) == 0)
  2799. {
  2800. const int QBit = 22;
  2801. Operand qMask = X86GetAllElements(context, 1 << QBit);
  2802. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmpps, opF, opF, Const((int)CmpCondition.UnorderedQ));
  2803. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  2804. mask2 = context.AddIntrinsic(Intrinsic.X86Cmpps, mask2, qMask, Const((int)CmpCondition.Equal));
  2805. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andps, mask2, mask1) : null;
  2806. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnps, mask2, mask1) : null;
  2807. }
  2808. else /* if ((op.Size & 1) == 1) */
  2809. {
  2810. const int QBit = 51;
  2811. Operand qMask = X86GetAllElements(context, 1L << QBit);
  2812. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmppd, opF, opF, Const((int)CmpCondition.UnorderedQ));
  2813. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  2814. mask2 = context.AddIntrinsic(Intrinsic.X86Cmppd, mask2, qMask, Const((int)CmpCondition.Equal));
  2815. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andpd, mask2, mask1) : null;
  2816. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnpd, mask2, mask1) : null;
  2817. }
  2818. }
  2819. public static Operand EmitSse41ProcessNaNsOpF(
  2820. ArmEmitterContext context,
  2821. Func2I emit,
  2822. bool scalar,
  2823. Operand n = null,
  2824. Operand m = null)
  2825. {
  2826. Operand nCopy = n ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn));
  2827. Operand mCopy = m ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm));
  2828. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out Operand nSNaNMask);
  2829. EmitSse2VectorIsNaNOpF(context, mCopy, out _, out Operand mSNaNMask, isQNaN: false);
  2830. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  2831. if (sizeF == 0)
  2832. {
  2833. const int QBit = 22;
  2834. Operand qMask = scalar ? X86GetScalar(context, 1 << QBit) : X86GetAllElements(context, 1 << QBit);
  2835. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  2836. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  2837. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, nCopy, resNaNMask);
  2838. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  2839. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmpps, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  2840. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvps, resNaN, emit(nCopy, mCopy), resMask);
  2841. if (n != null || m != null)
  2842. {
  2843. return res;
  2844. }
  2845. if (scalar)
  2846. {
  2847. res = context.VectorZeroUpper96(res);
  2848. }
  2849. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  2850. {
  2851. res = context.VectorZeroUpper64(res);
  2852. }
  2853. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  2854. return null;
  2855. }
  2856. else /* if (sizeF == 1) */
  2857. {
  2858. const int QBit = 51;
  2859. Operand qMask = scalar ? X86GetScalar(context, 1L << QBit) : X86GetAllElements(context, 1L << QBit);
  2860. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  2861. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  2862. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, nCopy, resNaNMask);
  2863. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  2864. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmppd, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  2865. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvpd, resNaN, emit(nCopy, mCopy), resMask);
  2866. if (n != null || m != null)
  2867. {
  2868. return res;
  2869. }
  2870. if (scalar)
  2871. {
  2872. res = context.VectorZeroUpper64(res);
  2873. }
  2874. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  2875. return null;
  2876. }
  2877. }
  2878. public static Operand EmitSseOrAvxHandleFzModeOpF(
  2879. ArmEmitterContext context,
  2880. Func2I emit,
  2881. bool scalar,
  2882. Operand n = null,
  2883. Operand m = null)
  2884. {
  2885. Operand nCopy = n ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn));
  2886. Operand mCopy = m ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm));
  2887. EmitSseOrAvxEnterFtzAndDazModesOpF(context, out Operand isTrue);
  2888. Operand res = emit(nCopy, mCopy);
  2889. EmitSseOrAvxExitFtzAndDazModesOpF(context, isTrue);
  2890. if (n != null || m != null)
  2891. {
  2892. return res;
  2893. }
  2894. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  2895. if (sizeF == 0)
  2896. {
  2897. if (scalar)
  2898. {
  2899. res = context.VectorZeroUpper96(res);
  2900. }
  2901. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  2902. {
  2903. res = context.VectorZeroUpper64(res);
  2904. }
  2905. }
  2906. else /* if (sizeF == 1) */
  2907. {
  2908. if (scalar)
  2909. {
  2910. res = context.VectorZeroUpper64(res);
  2911. }
  2912. }
  2913. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  2914. return null;
  2915. }
  2916. private static Operand EmitSse2VectorMaxMinOpF(ArmEmitterContext context, Operand n, Operand m, bool isMax)
  2917. {
  2918. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  2919. if ((op.Size & 1) == 0)
  2920. {
  2921. Operand mask = X86GetAllElements(context, -0f);
  2922. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxps : Intrinsic.X86Minps, n, m);
  2923. res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, res);
  2924. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  2925. resSign = context.AddIntrinsic(Intrinsic.X86Andps, mask, resSign);
  2926. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  2927. }
  2928. else /* if ((op.Size & 1) == 1) */
  2929. {
  2930. Operand mask = X86GetAllElements(context, -0d);
  2931. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxpd : Intrinsic.X86Minpd, n, m);
  2932. res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, res);
  2933. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  2934. resSign = context.AddIntrinsic(Intrinsic.X86Andpd, mask, resSign);
  2935. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  2936. }
  2937. }
  2938. private static Operand EmitSse41MaxMinNumOpF(
  2939. ArmEmitterContext context,
  2940. bool isMaxNum,
  2941. bool scalar,
  2942. Operand n = null,
  2943. Operand m = null)
  2944. {
  2945. Operand nCopy = n ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn));
  2946. Operand mCopy = m ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm));
  2947. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out _, isQNaN: true);
  2948. EmitSse2VectorIsNaNOpF(context, mCopy, out Operand mQNaNMask, out _, isQNaN: true);
  2949. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  2950. if (sizeF == 0)
  2951. {
  2952. Operand negInfMask = scalar
  2953. ? X86GetScalar (context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity)
  2954. : X86GetAllElements(context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity);
  2955. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnps, mQNaNMask, nQNaNMask);
  2956. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnps, nQNaNMask, mQNaNMask);
  2957. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, nCopy, negInfMask, nMask);
  2958. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, negInfMask, mMask);
  2959. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  2960. {
  2961. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  2962. {
  2963. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  2964. }, scalar: scalar, op1, op2);
  2965. }, scalar: scalar, nCopy, mCopy);
  2966. if (n != null || m != null)
  2967. {
  2968. return res;
  2969. }
  2970. if (scalar)
  2971. {
  2972. res = context.VectorZeroUpper96(res);
  2973. }
  2974. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  2975. {
  2976. res = context.VectorZeroUpper64(res);
  2977. }
  2978. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  2979. return null;
  2980. }
  2981. else /* if (sizeF == 1) */
  2982. {
  2983. Operand negInfMask = scalar
  2984. ? X86GetScalar (context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity)
  2985. : X86GetAllElements(context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity);
  2986. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnpd, mQNaNMask, nQNaNMask);
  2987. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnpd, nQNaNMask, mQNaNMask);
  2988. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, nCopy, negInfMask, nMask);
  2989. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, negInfMask, mMask);
  2990. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  2991. {
  2992. return EmitSseOrAvxHandleFzModeOpF(context, (op1, op2) =>
  2993. {
  2994. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  2995. }, scalar: scalar, op1, op2);
  2996. }, scalar: scalar, nCopy, mCopy);
  2997. if (n != null || m != null)
  2998. {
  2999. return res;
  3000. }
  3001. if (scalar)
  3002. {
  3003. res = context.VectorZeroUpper64(res);
  3004. }
  3005. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  3006. return null;
  3007. }
  3008. }
  3009. private enum AddSub
  3010. {
  3011. None,
  3012. Add,
  3013. Subtract
  3014. }
  3015. private static void EmitSse41VectorMul_AddSub(ArmEmitterContext context, AddSub addSub)
  3016. {
  3017. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  3018. Operand n = GetVec(op.Rn);
  3019. Operand m = GetVec(op.Rm);
  3020. Operand res;
  3021. if (op.Size == 0)
  3022. {
  3023. Operand ns8 = context.AddIntrinsic(Intrinsic.X86Psrlw, n, Const(8));
  3024. Operand ms8 = context.AddIntrinsic(Intrinsic.X86Psrlw, m, Const(8));
  3025. res = context.AddIntrinsic(Intrinsic.X86Pmullw, ns8, ms8);
  3026. res = context.AddIntrinsic(Intrinsic.X86Psllw, res, Const(8));
  3027. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  3028. Operand mask = X86GetAllElements(context, 0x00FF00FF);
  3029. res = context.AddIntrinsic(Intrinsic.X86Pblendvb, res, res2, mask);
  3030. }
  3031. else if (op.Size == 1)
  3032. {
  3033. res = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  3034. }
  3035. else
  3036. {
  3037. res = context.AddIntrinsic(Intrinsic.X86Pmulld, n, m);
  3038. }
  3039. Operand d = GetVec(op.Rd);
  3040. if (addSub == AddSub.Add)
  3041. {
  3042. Intrinsic addInst = X86PaddInstruction[op.Size];
  3043. res = context.AddIntrinsic(addInst, d, res);
  3044. }
  3045. else if (addSub == AddSub.Subtract)
  3046. {
  3047. Intrinsic subInst = X86PsubInstruction[op.Size];
  3048. res = context.AddIntrinsic(subInst, d, res);
  3049. }
  3050. if (op.RegisterSize == RegisterSize.Simd64)
  3051. {
  3052. res = context.VectorZeroUpper64(res);
  3053. }
  3054. context.Copy(d, res);
  3055. }
  3056. private static void EmitSse41VectorSabdOp(
  3057. ArmEmitterContext context,
  3058. OpCodeSimdReg op,
  3059. Operand n,
  3060. Operand m,
  3061. bool isLong)
  3062. {
  3063. int size = isLong ? op.Size + 1 : op.Size;
  3064. Intrinsic cmpgtInst = X86PcmpgtInstruction[size];
  3065. Operand cmpMask = context.AddIntrinsic(cmpgtInst, n, m);
  3066. Intrinsic subInst = X86PsubInstruction[size];
  3067. Operand res = context.AddIntrinsic(subInst, n, m);
  3068. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  3069. Operand res2 = context.AddIntrinsic(subInst, m, n);
  3070. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  3071. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  3072. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  3073. {
  3074. res = context.VectorZeroUpper64(res);
  3075. }
  3076. context.Copy(GetVec(op.Rd), res);
  3077. }
  3078. private static void EmitSse41VectorUabdOp(
  3079. ArmEmitterContext context,
  3080. OpCodeSimdReg op,
  3081. Operand n,
  3082. Operand m,
  3083. bool isLong)
  3084. {
  3085. int size = isLong ? op.Size + 1 : op.Size;
  3086. Intrinsic maxInst = X86PmaxuInstruction[size];
  3087. Operand max = context.AddIntrinsic(maxInst, m, n);
  3088. Intrinsic cmpeqInst = X86PcmpeqInstruction[size];
  3089. Operand cmpMask = context.AddIntrinsic(cmpeqInst, max, m);
  3090. Operand onesMask = X86GetAllElements(context, -1L);
  3091. cmpMask = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, onesMask);
  3092. Intrinsic subInst = X86PsubInstruction[size];
  3093. Operand res = context.AddIntrinsic(subInst, n, m);
  3094. Operand res2 = context.AddIntrinsic(subInst, m, n);
  3095. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  3096. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  3097. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  3098. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  3099. {
  3100. res = context.VectorZeroUpper64(res);
  3101. }
  3102. context.Copy(GetVec(op.Rd), res);
  3103. }
  3104. private static Operand EmitSse2Sll_128(ArmEmitterContext context, Operand op, int shift)
  3105. {
  3106. // The upper part of op is assumed to be zero.
  3107. Debug.Assert(shift >= 0 && shift < 64);
  3108. if (shift == 0)
  3109. {
  3110. return op;
  3111. }
  3112. Operand high = context.AddIntrinsic(Intrinsic.X86Pslldq, op, Const(8));
  3113. high = context.AddIntrinsic(Intrinsic.X86Psrlq, high, Const(64 - shift));
  3114. Operand low = context.AddIntrinsic(Intrinsic.X86Psllq, op, Const(shift));
  3115. return context.AddIntrinsic(Intrinsic.X86Por, high, low);
  3116. }
  3117. }
  3118. }