InstEmitSimdArithmetic.cs 103 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159
  1. // https://github.com/intel/ARM_NEON_2_x86_SSE/blob/master/NEON_2_SSE.h
  2. // https://www.agner.org/optimize/#vectorclass @ vectori128.h
  3. using ARMeilleure.Decoders;
  4. using ARMeilleure.IntermediateRepresentation;
  5. using ARMeilleure.State;
  6. using ARMeilleure.Translation;
  7. using System;
  8. using static ARMeilleure.Instructions.InstEmitHelper;
  9. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  10. using static ARMeilleure.IntermediateRepresentation.OperandHelper;
  11. namespace ARMeilleure.Instructions
  12. {
  13. using Func2I = Func<Operand, Operand, Operand>;
  14. static partial class InstEmit
  15. {
  16. public static void Abs_S(ArmEmitterContext context)
  17. {
  18. EmitScalarUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  19. }
  20. public static void Abs_V(ArmEmitterContext context)
  21. {
  22. EmitVectorUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  23. }
  24. public static void Add_S(ArmEmitterContext context)
  25. {
  26. EmitScalarBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  27. }
  28. public static void Add_V(ArmEmitterContext context)
  29. {
  30. if (Optimizations.UseSse2)
  31. {
  32. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  33. Operand n = GetVec(op.Rn);
  34. Operand m = GetVec(op.Rm);
  35. Intrinsic addInst = X86PaddInstruction[op.Size];
  36. Operand res = context.AddIntrinsic(addInst, n, m);
  37. if (op.RegisterSize == RegisterSize.Simd64)
  38. {
  39. res = context.VectorZeroUpper64(res);
  40. }
  41. context.Copy(GetVec(op.Rd), res);
  42. }
  43. else
  44. {
  45. EmitVectorBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  46. }
  47. }
  48. public static void Addhn_V(ArmEmitterContext context)
  49. {
  50. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: false);
  51. }
  52. public static void Addp_S(ArmEmitterContext context)
  53. {
  54. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  55. Operand ne0 = EmitVectorExtractZx(context, op.Rn, 0, op.Size);
  56. Operand ne1 = EmitVectorExtractZx(context, op.Rn, 1, op.Size);
  57. Operand res = context.Add(ne0, ne1);
  58. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), res, 0, op.Size));
  59. }
  60. public static void Addp_V(ArmEmitterContext context)
  61. {
  62. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Add(op1, op2));
  63. }
  64. public static void Addv_V(ArmEmitterContext context)
  65. {
  66. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  67. }
  68. public static void Cls_V(ArmEmitterContext context)
  69. {
  70. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  71. Operand res = context.VectorZero();
  72. int elems = op.GetBytesCount() >> op.Size;
  73. int eSize = 8 << op.Size;
  74. for (int index = 0; index < elems; index++)
  75. {
  76. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  77. Operand de = context.Call(new _U64_U64_S32(SoftFallback.CountLeadingSigns), ne, Const(eSize));
  78. res = EmitVectorInsert(context, res, de, index, op.Size);
  79. }
  80. context.Copy(GetVec(op.Rd), res);
  81. }
  82. public static void Clz_V(ArmEmitterContext context)
  83. {
  84. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  85. Operand res = context.VectorZero();
  86. int elems = op.GetBytesCount() >> op.Size;
  87. int eSize = 8 << op.Size;
  88. for (int index = 0; index < elems; index++)
  89. {
  90. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  91. Operand de;
  92. if (eSize == 64)
  93. {
  94. de = context.CountLeadingZeros(ne);
  95. }
  96. else
  97. {
  98. de = context.Call(new _U64_U64_S32(SoftFallback.CountLeadingZeros), ne, Const(eSize));
  99. }
  100. res = EmitVectorInsert(context, res, de, index, op.Size);
  101. }
  102. context.Copy(GetVec(op.Rd), res);
  103. }
  104. public static void Cnt_V(ArmEmitterContext context)
  105. {
  106. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  107. Operand res = context.VectorZero();
  108. int elems = op.RegisterSize == RegisterSize.Simd128 ? 16 : 8;
  109. for (int index = 0; index < elems; index++)
  110. {
  111. Operand ne = EmitVectorExtractZx(context, op.Rn, index, 0);
  112. Operand de;
  113. if (Optimizations.UsePopCnt)
  114. {
  115. de = context.AddIntrinsicLong(Intrinsic.X86Popcnt, ne);
  116. }
  117. else
  118. {
  119. de = context.Call(new _U64_U64(SoftFallback.CountSetBits8), ne);
  120. }
  121. res = EmitVectorInsert(context, res, de, index, 0);
  122. }
  123. context.Copy(GetVec(op.Rd), res);
  124. }
  125. public static void Fabd_S(ArmEmitterContext context)
  126. {
  127. if (Optimizations.FastFP && Optimizations.UseSse2)
  128. {
  129. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  130. int sizeF = op.Size & 1;
  131. if (sizeF == 0)
  132. {
  133. Operand res = context.AddIntrinsic(Intrinsic.X86Subss, GetVec(op.Rn), GetVec(op.Rm));
  134. Operand mask = X86GetScalar(context, -0f);
  135. res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, res);
  136. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  137. }
  138. else /* if (sizeF == 1) */
  139. {
  140. Operand res = context.AddIntrinsic(Intrinsic.X86Subsd, GetVec(op.Rn), GetVec(op.Rm));
  141. Operand mask = X86GetScalar(context, -0d);
  142. res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, res);
  143. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  144. }
  145. }
  146. else
  147. {
  148. EmitScalarBinaryOpF(context, (op1, op2) =>
  149. {
  150. Operand res = EmitSoftFloatCall(context, SoftFloat32.FPSub, SoftFloat64.FPSub, op1, op2);
  151. return EmitUnaryMathCall(context, MathF.Abs, Math.Abs, res);
  152. });
  153. }
  154. }
  155. public static void Fabd_V(ArmEmitterContext context)
  156. {
  157. if (Optimizations.FastFP && Optimizations.UseSse2)
  158. {
  159. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  160. int sizeF = op.Size & 1;
  161. if (sizeF == 0)
  162. {
  163. Operand res = context.AddIntrinsic(Intrinsic.X86Subps, GetVec(op.Rn), GetVec(op.Rm));
  164. Operand mask = X86GetAllElements(context, -0f);
  165. res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, res);
  166. if (op.RegisterSize == RegisterSize.Simd64)
  167. {
  168. res = context.VectorZeroUpper64(res);
  169. }
  170. context.Copy(GetVec(op.Rd), res);
  171. }
  172. else /* if (sizeF == 1) */
  173. {
  174. Operand res = context.AddIntrinsic(Intrinsic.X86Subpd, GetVec(op.Rn), GetVec(op.Rm));
  175. Operand mask = X86GetAllElements(context, -0d);
  176. res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, res);
  177. context.Copy(GetVec(op.Rd), res);
  178. }
  179. }
  180. else
  181. {
  182. EmitVectorBinaryOpF(context, (op1, op2) =>
  183. {
  184. Operand res = EmitSoftFloatCall(context, SoftFloat32.FPSub, SoftFloat64.FPSub, op1, op2);
  185. return EmitUnaryMathCall(context, MathF.Abs, Math.Abs, res);
  186. });
  187. }
  188. }
  189. public static void Fabs_S(ArmEmitterContext context)
  190. {
  191. if (Optimizations.UseSse2)
  192. {
  193. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  194. if (op.Size == 0)
  195. {
  196. Operand mask = X86GetScalar(context, -0f);
  197. Operand res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, GetVec(op.Rn));
  198. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  199. }
  200. else /* if (op.Size == 1) */
  201. {
  202. Operand mask = X86GetScalar(context, -0d);
  203. Operand res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, GetVec(op.Rn));
  204. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  205. }
  206. }
  207. else
  208. {
  209. EmitScalarUnaryOpF(context, (op1) =>
  210. {
  211. return EmitUnaryMathCall(context, MathF.Abs, Math.Abs, op1);
  212. });
  213. }
  214. }
  215. public static void Fabs_V(ArmEmitterContext context)
  216. {
  217. if (Optimizations.UseSse2)
  218. {
  219. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  220. int sizeF = op.Size & 1;
  221. if (sizeF == 0)
  222. {
  223. Operand mask = X86GetAllElements(context, -0f);
  224. Operand res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, GetVec(op.Rn));
  225. if (op.RegisterSize == RegisterSize.Simd64)
  226. {
  227. res = context.VectorZeroUpper64(res);
  228. }
  229. context.Copy(GetVec(op.Rd), res);
  230. }
  231. else /* if (sizeF == 1) */
  232. {
  233. Operand mask = X86GetAllElements(context, -0d);
  234. Operand res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, GetVec(op.Rn));
  235. context.Copy(GetVec(op.Rd), res);
  236. }
  237. }
  238. else
  239. {
  240. EmitVectorUnaryOpF(context, (op1) =>
  241. {
  242. return EmitUnaryMathCall(context, MathF.Abs, Math.Abs, op1);
  243. });
  244. }
  245. }
  246. public static void Fadd_S(ArmEmitterContext context)
  247. {
  248. if (Optimizations.FastFP && Optimizations.UseSse2)
  249. {
  250. EmitScalarBinaryOpF(context, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  251. }
  252. else if (Optimizations.FastFP)
  253. {
  254. EmitScalarBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  255. }
  256. else
  257. {
  258. EmitScalarBinaryOpF(context, (op1, op2) =>
  259. {
  260. return EmitSoftFloatCall(context, SoftFloat32.FPAdd, SoftFloat64.FPAdd, op1, op2);
  261. });
  262. }
  263. }
  264. public static void Fadd_V(ArmEmitterContext context)
  265. {
  266. if (Optimizations.FastFP && Optimizations.UseSse2)
  267. {
  268. EmitVectorBinaryOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  269. }
  270. else if (Optimizations.FastFP)
  271. {
  272. EmitVectorBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  273. }
  274. else
  275. {
  276. EmitVectorBinaryOpF(context, (op1, op2) =>
  277. {
  278. return EmitSoftFloatCall(context, SoftFloat32.FPAdd, SoftFloat64.FPAdd, op1, op2);
  279. });
  280. }
  281. }
  282. public static void Faddp_S(ArmEmitterContext context)
  283. {
  284. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  285. int sizeF = op.Size & 1;
  286. if (Optimizations.FastFP && Optimizations.UseSse3)
  287. {
  288. if (sizeF == 0)
  289. {
  290. Operand res = context.AddIntrinsic(Intrinsic.X86Haddps, GetVec(op.Rn), GetVec(op.Rn));
  291. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  292. }
  293. else /* if (sizeF == 1) */
  294. {
  295. Operand res = context.AddIntrinsic(Intrinsic.X86Haddpd, GetVec(op.Rn), GetVec(op.Rn));
  296. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  297. }
  298. }
  299. else
  300. {
  301. OperandType type = sizeF != 0 ? OperandType.FP64
  302. : OperandType.FP32;
  303. Operand ne0 = context.VectorExtract(type, GetVec(op.Rn), 0);
  304. Operand ne1 = context.VectorExtract(type, GetVec(op.Rn), 1);
  305. Operand res = EmitSoftFloatCall(context, SoftFloat32.FPAdd, SoftFloat64.FPAdd, ne0, ne1);
  306. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  307. }
  308. }
  309. public static void Faddp_V(ArmEmitterContext context)
  310. {
  311. if (Optimizations.FastFP && Optimizations.UseSse2)
  312. {
  313. EmitVectorPairwiseOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  314. }
  315. else
  316. {
  317. EmitVectorPairwiseOpF(context, (op1, op2) =>
  318. {
  319. return EmitSoftFloatCall(context, SoftFloat32.FPAdd, SoftFloat64.FPAdd, op1, op2);
  320. });
  321. }
  322. }
  323. public static void Fdiv_S(ArmEmitterContext context)
  324. {
  325. if (Optimizations.FastFP && Optimizations.UseSse2)
  326. {
  327. EmitScalarBinaryOpF(context, Intrinsic.X86Divss, Intrinsic.X86Divsd);
  328. }
  329. else if (Optimizations.FastFP)
  330. {
  331. EmitScalarBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  332. }
  333. else
  334. {
  335. EmitScalarBinaryOpF(context, (op1, op2) =>
  336. {
  337. return EmitSoftFloatCall(context, SoftFloat32.FPDiv, SoftFloat64.FPDiv, op1, op2);
  338. });
  339. }
  340. }
  341. public static void Fdiv_V(ArmEmitterContext context)
  342. {
  343. if (Optimizations.FastFP && Optimizations.UseSse2)
  344. {
  345. EmitVectorBinaryOpF(context, Intrinsic.X86Divps, Intrinsic.X86Divpd);
  346. }
  347. else if (Optimizations.FastFP)
  348. {
  349. EmitVectorBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  350. }
  351. else
  352. {
  353. EmitVectorBinaryOpF(context, (op1, op2) =>
  354. {
  355. return EmitSoftFloatCall(context, SoftFloat32.FPDiv, SoftFloat64.FPDiv, op1, op2);
  356. });
  357. }
  358. }
  359. public static void Fmadd_S(ArmEmitterContext context) // Fused.
  360. {
  361. if (Optimizations.FastFP && Optimizations.UseSse2)
  362. {
  363. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  364. Operand a = GetVec(op.Ra);
  365. Operand n = GetVec(op.Rn);
  366. Operand m = GetVec(op.Rm);
  367. if (op.Size == 0)
  368. {
  369. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  370. res = context.AddIntrinsic(Intrinsic.X86Addss, a, res);
  371. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  372. }
  373. else /* if (op.Size == 1) */
  374. {
  375. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  376. res = context.AddIntrinsic(Intrinsic.X86Addsd, a, res);
  377. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  378. }
  379. }
  380. else
  381. {
  382. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  383. {
  384. return EmitSoftFloatCall(context, SoftFloat32.FPMulAdd, SoftFloat64.FPMulAdd, op1, op2, op3);
  385. });
  386. }
  387. }
  388. public static void Fmax_S(ArmEmitterContext context)
  389. {
  390. if (Optimizations.FastFP && Optimizations.UseSse2)
  391. {
  392. EmitScalarBinaryOpF(context, Intrinsic.X86Maxss, Intrinsic.X86Maxsd);
  393. }
  394. else
  395. {
  396. EmitScalarBinaryOpF(context, (op1, op2) =>
  397. {
  398. return EmitSoftFloatCall(context, SoftFloat32.FPMax, SoftFloat64.FPMax, op1, op2);
  399. });
  400. }
  401. }
  402. public static void Fmax_V(ArmEmitterContext context)
  403. {
  404. if (Optimizations.FastFP && Optimizations.UseSse2)
  405. {
  406. EmitVectorBinaryOpF(context, Intrinsic.X86Maxps, Intrinsic.X86Maxpd);
  407. }
  408. else
  409. {
  410. EmitVectorBinaryOpF(context, (op1, op2) =>
  411. {
  412. return EmitSoftFloatCall(context, SoftFloat32.FPMax, SoftFloat64.FPMax, op1, op2);
  413. });
  414. }
  415. }
  416. public static void Fmaxnm_S(ArmEmitterContext context)
  417. {
  418. EmitScalarBinaryOpF(context, (op1, op2) =>
  419. {
  420. return EmitSoftFloatCall(context, SoftFloat32.FPMaxNum, SoftFloat64.FPMaxNum, op1, op2);
  421. });
  422. }
  423. public static void Fmaxnm_V(ArmEmitterContext context)
  424. {
  425. EmitVectorBinaryOpF(context, (op1, op2) =>
  426. {
  427. return EmitSoftFloatCall(context, SoftFloat32.FPMaxNum, SoftFloat64.FPMaxNum, op1, op2);
  428. });
  429. }
  430. public static void Fmaxp_V(ArmEmitterContext context)
  431. {
  432. if (Optimizations.FastFP && Optimizations.UseSse2)
  433. {
  434. EmitVectorPairwiseOpF(context, Intrinsic.X86Maxps, Intrinsic.X86Maxpd);
  435. }
  436. else
  437. {
  438. EmitVectorPairwiseOpF(context, (op1, op2) =>
  439. {
  440. return EmitSoftFloatCall(context, SoftFloat32.FPMax, SoftFloat64.FPMax, op1, op2);
  441. });
  442. }
  443. }
  444. public static void Fmin_S(ArmEmitterContext context)
  445. {
  446. if (Optimizations.FastFP && Optimizations.UseSse2)
  447. {
  448. EmitScalarBinaryOpF(context, Intrinsic.X86Minss, Intrinsic.X86Minsd);
  449. }
  450. else
  451. {
  452. EmitScalarBinaryOpF(context, (op1, op2) =>
  453. {
  454. return EmitSoftFloatCall(context, SoftFloat32.FPMin, SoftFloat64.FPMin, op1, op2);
  455. });
  456. }
  457. }
  458. public static void Fmin_V(ArmEmitterContext context)
  459. {
  460. if (Optimizations.FastFP && Optimizations.UseSse2)
  461. {
  462. EmitVectorBinaryOpF(context, Intrinsic.X86Minps, Intrinsic.X86Minpd);
  463. }
  464. else
  465. {
  466. EmitVectorBinaryOpF(context, (op1, op2) =>
  467. {
  468. return EmitSoftFloatCall(context, SoftFloat32.FPMin, SoftFloat64.FPMin, op1, op2);
  469. });
  470. }
  471. }
  472. public static void Fminnm_S(ArmEmitterContext context)
  473. {
  474. EmitScalarBinaryOpF(context, (op1, op2) =>
  475. {
  476. return EmitSoftFloatCall(context, SoftFloat32.FPMinNum, SoftFloat64.FPMinNum, op1, op2);
  477. });
  478. }
  479. public static void Fminnm_V(ArmEmitterContext context)
  480. {
  481. EmitVectorBinaryOpF(context, (op1, op2) =>
  482. {
  483. return EmitSoftFloatCall(context, SoftFloat32.FPMinNum, SoftFloat64.FPMinNum, op1, op2);
  484. });
  485. }
  486. public static void Fminp_V(ArmEmitterContext context)
  487. {
  488. if (Optimizations.FastFP && Optimizations.UseSse2)
  489. {
  490. EmitVectorPairwiseOpF(context, Intrinsic.X86Minps, Intrinsic.X86Minpd);
  491. }
  492. else
  493. {
  494. EmitVectorPairwiseOpF(context, (op1, op2) =>
  495. {
  496. return EmitSoftFloatCall(context, SoftFloat32.FPMin, SoftFloat64.FPMin, op1, op2);
  497. });
  498. }
  499. }
  500. public static void Fmla_Se(ArmEmitterContext context) // Fused.
  501. {
  502. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  503. {
  504. return context.Add(op1, context.Multiply(op2, op3));
  505. });
  506. }
  507. public static void Fmla_V(ArmEmitterContext context) // Fused.
  508. {
  509. if (Optimizations.FastFP && Optimizations.UseSse2)
  510. {
  511. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  512. Operand d = GetVec(op.Rd);
  513. Operand n = GetVec(op.Rn);
  514. Operand m = GetVec(op.Rm);
  515. int sizeF = op.Size & 1;
  516. if (sizeF == 0)
  517. {
  518. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  519. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  520. if (op.RegisterSize == RegisterSize.Simd64)
  521. {
  522. res = context.VectorZeroUpper64(res);
  523. }
  524. context.Copy(GetVec(op.Rd), res);
  525. }
  526. else /* if (sizeF == 1) */
  527. {
  528. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  529. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  530. context.Copy(GetVec(op.Rd), res);
  531. }
  532. }
  533. else
  534. {
  535. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  536. {
  537. return EmitSoftFloatCall(context, SoftFloat32.FPMulAdd, SoftFloat64.FPMulAdd, op1, op2, op3);
  538. });
  539. }
  540. }
  541. public static void Fmla_Ve(ArmEmitterContext context) // Fused.
  542. {
  543. if (Optimizations.FastFP && Optimizations.UseSse2)
  544. {
  545. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  546. Operand d = GetVec(op.Rd);
  547. Operand n = GetVec(op.Rn);
  548. Operand m = GetVec(op.Rm);
  549. int sizeF = op.Size & 1;
  550. if (sizeF == 0)
  551. {
  552. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  553. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  554. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  555. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  556. if (op.RegisterSize == RegisterSize.Simd64)
  557. {
  558. res = context.VectorZeroUpper64(res);
  559. }
  560. context.Copy(GetVec(op.Rd), res);
  561. }
  562. else /* if (sizeF == 1) */
  563. {
  564. int shuffleMask = op.Index | op.Index << 1;
  565. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  566. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  567. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  568. context.Copy(GetVec(op.Rd), res);
  569. }
  570. }
  571. else
  572. {
  573. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  574. {
  575. return EmitSoftFloatCall(context, SoftFloat32.FPMulAdd, SoftFloat64.FPMulAdd, op1, op2, op3);
  576. });
  577. }
  578. }
  579. public static void Fmls_Se(ArmEmitterContext context) // Fused.
  580. {
  581. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  582. {
  583. return context.Subtract(op1, context.Multiply(op2, op3));
  584. });
  585. }
  586. public static void Fmls_V(ArmEmitterContext context) // Fused.
  587. {
  588. if (Optimizations.FastFP && Optimizations.UseSse2)
  589. {
  590. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  591. Operand d = GetVec(op.Rd);
  592. Operand n = GetVec(op.Rn);
  593. Operand m = GetVec(op.Rm);
  594. int sizeF = op.Size & 1;
  595. if (sizeF == 0)
  596. {
  597. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  598. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  599. if (op.RegisterSize == RegisterSize.Simd64)
  600. {
  601. res = context.VectorZeroUpper64(res);
  602. }
  603. context.Copy(GetVec(op.Rd), res);
  604. }
  605. else /* if (sizeF == 1) */
  606. {
  607. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  608. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  609. context.Copy(GetVec(op.Rd), res);
  610. }
  611. }
  612. else
  613. {
  614. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  615. {
  616. return EmitSoftFloatCall(context, SoftFloat32.FPMulSub, SoftFloat64.FPMulSub, op1, op2, op3);
  617. });
  618. }
  619. }
  620. public static void Fmls_Ve(ArmEmitterContext context) // Fused.
  621. {
  622. if (Optimizations.FastFP && Optimizations.UseSse2)
  623. {
  624. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  625. Operand d = GetVec(op.Rd);
  626. Operand n = GetVec(op.Rn);
  627. Operand m = GetVec(op.Rm);
  628. int sizeF = op.Size & 1;
  629. if (sizeF == 0)
  630. {
  631. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  632. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  633. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  634. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  635. if (op.RegisterSize == RegisterSize.Simd64)
  636. {
  637. res = context.VectorZeroUpper64(res);
  638. }
  639. context.Copy(GetVec(op.Rd), res);
  640. }
  641. else /* if (sizeF == 1) */
  642. {
  643. int shuffleMask = op.Index | op.Index << 1;
  644. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  645. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  646. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  647. context.Copy(GetVec(op.Rd), res);
  648. }
  649. }
  650. else
  651. {
  652. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  653. {
  654. return EmitSoftFloatCall(context, SoftFloat32.FPMulSub, SoftFloat64.FPMulSub, op1, op2, op3);
  655. });
  656. }
  657. }
  658. public static void Fmsub_S(ArmEmitterContext context) // Fused.
  659. {
  660. if (Optimizations.FastFP && Optimizations.UseSse2)
  661. {
  662. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  663. Operand a = GetVec(op.Ra);
  664. Operand n = GetVec(op.Rn);
  665. Operand m = GetVec(op.Rm);
  666. if (op.Size == 0)
  667. {
  668. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  669. res = context.AddIntrinsic(Intrinsic.X86Subss, a, res);
  670. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  671. }
  672. else /* if (op.Size == 1) */
  673. {
  674. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  675. res = context.AddIntrinsic(Intrinsic.X86Subsd, a, res);
  676. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  677. }
  678. }
  679. else
  680. {
  681. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  682. {
  683. return EmitSoftFloatCall(context, SoftFloat32.FPMulSub, SoftFloat64.FPMulSub, op1, op2, op3);
  684. });
  685. }
  686. }
  687. public static void Fmul_S(ArmEmitterContext context)
  688. {
  689. if (Optimizations.FastFP && Optimizations.UseSse2)
  690. {
  691. EmitScalarBinaryOpF(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd);
  692. }
  693. else if (Optimizations.FastFP)
  694. {
  695. EmitScalarBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  696. }
  697. else
  698. {
  699. EmitScalarBinaryOpF(context, (op1, op2) =>
  700. {
  701. return EmitSoftFloatCall(context, SoftFloat32.FPMul, SoftFloat64.FPMul, op1, op2);
  702. });
  703. }
  704. }
  705. public static void Fmul_Se(ArmEmitterContext context)
  706. {
  707. EmitScalarBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  708. }
  709. public static void Fmul_V(ArmEmitterContext context)
  710. {
  711. if (Optimizations.FastFP && Optimizations.UseSse2)
  712. {
  713. EmitVectorBinaryOpF(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd);
  714. }
  715. else if (Optimizations.FastFP)
  716. {
  717. EmitVectorBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  718. }
  719. else
  720. {
  721. EmitVectorBinaryOpF(context, (op1, op2) =>
  722. {
  723. return EmitSoftFloatCall(context, SoftFloat32.FPMul, SoftFloat64.FPMul, op1, op2);
  724. });
  725. }
  726. }
  727. public static void Fmul_Ve(ArmEmitterContext context)
  728. {
  729. if (Optimizations.FastFP && Optimizations.UseSse2)
  730. {
  731. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  732. Operand n = GetVec(op.Rn);
  733. Operand m = GetVec(op.Rm);
  734. int sizeF = op.Size & 1;
  735. if (sizeF == 0)
  736. {
  737. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  738. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  739. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  740. if (op.RegisterSize == RegisterSize.Simd64)
  741. {
  742. res = context.VectorZeroUpper64(res);
  743. }
  744. context.Copy(GetVec(op.Rd), res);
  745. }
  746. else /* if (sizeF == 1) */
  747. {
  748. int shuffleMask = op.Index | op.Index << 1;
  749. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  750. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  751. context.Copy(GetVec(op.Rd), res);
  752. }
  753. }
  754. else if (Optimizations.FastFP)
  755. {
  756. EmitVectorBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  757. }
  758. else
  759. {
  760. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  761. {
  762. return EmitSoftFloatCall(context, SoftFloat32.FPMul, SoftFloat64.FPMul, op1, op2);
  763. });
  764. }
  765. }
  766. public static void Fmulx_S(ArmEmitterContext context)
  767. {
  768. EmitScalarBinaryOpF(context, (op1, op2) =>
  769. {
  770. return EmitSoftFloatCall(context, SoftFloat32.FPMulX, SoftFloat64.FPMulX, op1, op2);
  771. });
  772. }
  773. public static void Fmulx_Se(ArmEmitterContext context)
  774. {
  775. EmitScalarBinaryOpByElemF(context, (op1, op2) =>
  776. {
  777. return EmitSoftFloatCall(context, SoftFloat32.FPMulX, SoftFloat64.FPMulX, op1, op2);
  778. });
  779. }
  780. public static void Fmulx_V(ArmEmitterContext context)
  781. {
  782. EmitVectorBinaryOpF(context, (op1, op2) =>
  783. {
  784. return EmitSoftFloatCall(context, SoftFloat32.FPMulX, SoftFloat64.FPMulX, op1, op2);
  785. });
  786. }
  787. public static void Fmulx_Ve(ArmEmitterContext context)
  788. {
  789. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  790. {
  791. return EmitSoftFloatCall(context, SoftFloat32.FPMulX, SoftFloat64.FPMulX, op1, op2);
  792. });
  793. }
  794. public static void Fneg_S(ArmEmitterContext context)
  795. {
  796. if (Optimizations.UseSse2)
  797. {
  798. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  799. if (op.Size == 0)
  800. {
  801. Operand mask = X86GetScalar(context, -0f);
  802. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  803. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  804. }
  805. else /* if (op.Size == 1) */
  806. {
  807. Operand mask = X86GetScalar(context, -0d);
  808. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  809. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  810. }
  811. }
  812. else
  813. {
  814. EmitScalarUnaryOpF(context, (op1) => context.Negate(op1));
  815. }
  816. }
  817. public static void Fneg_V(ArmEmitterContext context)
  818. {
  819. if (Optimizations.UseSse2)
  820. {
  821. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  822. int sizeF = op.Size & 1;
  823. if (sizeF == 0)
  824. {
  825. Operand mask = X86GetAllElements(context, -0f);
  826. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  827. if (op.RegisterSize == RegisterSize.Simd64)
  828. {
  829. res = context.VectorZeroUpper64(res);
  830. }
  831. context.Copy(GetVec(op.Rd), res);
  832. }
  833. else /* if (sizeF == 1) */
  834. {
  835. Operand mask = X86GetAllElements(context, -0d);
  836. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  837. context.Copy(GetVec(op.Rd), res);
  838. }
  839. }
  840. else
  841. {
  842. EmitVectorUnaryOpF(context, (op1) => context.Negate(op1));
  843. }
  844. }
  845. public static void Fnmadd_S(ArmEmitterContext context)
  846. {
  847. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  848. int sizeF = op.Size & 1;
  849. OperandType type = sizeF != 0 ? OperandType.FP64
  850. : OperandType.FP32;
  851. Operand ne = context.VectorExtract(type, GetVec(op.Rn), 0);
  852. Operand me = context.VectorExtract(type, GetVec(op.Rm), 0);
  853. Operand ae = context.VectorExtract(type, GetVec(op.Ra), 0);
  854. Operand res = context.Subtract(context.Multiply(context.Negate(ne), me), ae);
  855. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  856. }
  857. public static void Fnmsub_S(ArmEmitterContext context)
  858. {
  859. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  860. int sizeF = op.Size & 1;
  861. OperandType type = sizeF != 0 ? OperandType.FP64
  862. : OperandType.FP32;
  863. Operand ne = context.VectorExtract(type, GetVec(op.Rn), 0);
  864. Operand me = context.VectorExtract(type, GetVec(op.Rm), 0);
  865. Operand ae = context.VectorExtract(type, GetVec(op.Ra), 0);
  866. Operand res = context.Subtract(context.Multiply(ne, me), ae);
  867. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  868. }
  869. public static void Fnmul_S(ArmEmitterContext context)
  870. {
  871. EmitScalarBinaryOpF(context, (op1, op2) => context.Negate(context.Multiply(op1, op2)));
  872. }
  873. public static void Frecpe_S(ArmEmitterContext context)
  874. {
  875. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  876. int sizeF = op.Size & 1;
  877. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  878. {
  879. EmitScalarUnaryOpF(context, Intrinsic.X86Rcpss, 0);
  880. }
  881. else
  882. {
  883. EmitScalarUnaryOpF(context, (op1) =>
  884. {
  885. return EmitSoftFloatCall(context, SoftFloat32.FPRecipEstimate, SoftFloat64.FPRecipEstimate, op1);
  886. });
  887. }
  888. }
  889. public static void Frecpe_V(ArmEmitterContext context)
  890. {
  891. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  892. int sizeF = op.Size & 1;
  893. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  894. {
  895. EmitVectorUnaryOpF(context, Intrinsic.X86Rcpps, 0);
  896. }
  897. else
  898. {
  899. EmitVectorUnaryOpF(context, (op1) =>
  900. {
  901. return EmitSoftFloatCall(context, SoftFloat32.FPRecipEstimate, SoftFloat64.FPRecipEstimate, op1);
  902. });
  903. }
  904. }
  905. public static void Frecps_S(ArmEmitterContext context) // Fused.
  906. {
  907. if (Optimizations.FastFP && Optimizations.UseSse2)
  908. {
  909. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  910. int sizeF = op.Size & 1;
  911. if (sizeF == 0)
  912. {
  913. Operand mask = X86GetScalar(context, 2f);
  914. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  915. res = context.AddIntrinsic(Intrinsic.X86Subss, mask, res);
  916. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  917. }
  918. else /* if (sizeF == 1) */
  919. {
  920. Operand mask = X86GetScalar(context, 2d);
  921. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  922. res = context.AddIntrinsic(Intrinsic.X86Subsd, mask, res);
  923. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  924. }
  925. }
  926. else
  927. {
  928. EmitScalarBinaryOpF(context, (op1, op2) =>
  929. {
  930. return EmitSoftFloatCall(context, SoftFloat32.FPRecipStepFused, SoftFloat64.FPRecipStepFused, op1, op2);
  931. });
  932. }
  933. }
  934. public static void Frecps_V(ArmEmitterContext context) // Fused.
  935. {
  936. if (Optimizations.FastFP && Optimizations.UseSse2)
  937. {
  938. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  939. int sizeF = op.Size & 1;
  940. if (sizeF == 0)
  941. {
  942. Operand mask = X86GetAllElements(context, 2f);
  943. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  944. res = context.AddIntrinsic(Intrinsic.X86Subps, mask, res);
  945. if (op.RegisterSize == RegisterSize.Simd64)
  946. {
  947. res = context.VectorZeroUpper64(res);
  948. }
  949. context.Copy(GetVec(op.Rd), res);
  950. }
  951. else /* if (sizeF == 1) */
  952. {
  953. Operand mask = X86GetAllElements(context, 2d);
  954. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  955. res = context.AddIntrinsic(Intrinsic.X86Subpd, mask, res);
  956. context.Copy(GetVec(op.Rd), res);
  957. }
  958. }
  959. else
  960. {
  961. EmitVectorBinaryOpF(context, (op1, op2) =>
  962. {
  963. return EmitSoftFloatCall(context, SoftFloat32.FPRecipStepFused, SoftFloat64.FPRecipStepFused, op1, op2);
  964. });
  965. }
  966. }
  967. public static void Frecpx_S(ArmEmitterContext context)
  968. {
  969. EmitScalarUnaryOpF(context, (op1) =>
  970. {
  971. return EmitSoftFloatCall(context, SoftFloat32.FPRecpX, SoftFloat64.FPRecpX, op1);
  972. });
  973. }
  974. public static void Frinta_S(ArmEmitterContext context)
  975. {
  976. EmitScalarUnaryOpF(context, (op1) =>
  977. {
  978. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  979. });
  980. }
  981. public static void Frinta_V(ArmEmitterContext context)
  982. {
  983. EmitVectorUnaryOpF(context, (op1) =>
  984. {
  985. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  986. });
  987. }
  988. public static void Frinti_S(ArmEmitterContext context)
  989. {
  990. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  991. EmitScalarUnaryOpF(context, (op1) =>
  992. {
  993. if (op.Size == 0)
  994. {
  995. return context.Call(new _F32_F32(SoftFallback.RoundF), op1);
  996. }
  997. else /* if (op.Size == 1) */
  998. {
  999. return context.Call(new _F64_F64(SoftFallback.Round), op1);
  1000. }
  1001. });
  1002. }
  1003. public static void Frinti_V(ArmEmitterContext context)
  1004. {
  1005. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1006. int sizeF = op.Size & 1;
  1007. EmitVectorUnaryOpF(context, (op1) =>
  1008. {
  1009. if (sizeF == 0)
  1010. {
  1011. return context.Call(new _F32_F32(SoftFallback.RoundF), op1);
  1012. }
  1013. else /* if (sizeF == 1) */
  1014. {
  1015. return context.Call(new _F64_F64(SoftFallback.Round), op1);
  1016. }
  1017. });
  1018. }
  1019. public static void Frintm_S(ArmEmitterContext context)
  1020. {
  1021. if (Optimizations.UseSse41)
  1022. {
  1023. EmitScalarRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1024. }
  1025. else
  1026. {
  1027. EmitScalarUnaryOpF(context, (op1) =>
  1028. {
  1029. return EmitUnaryMathCall(context, MathF.Floor, Math.Floor, op1);
  1030. });
  1031. }
  1032. }
  1033. public static void Frintm_V(ArmEmitterContext context)
  1034. {
  1035. if (Optimizations.UseSse41)
  1036. {
  1037. EmitVectorRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1038. }
  1039. else
  1040. {
  1041. EmitVectorUnaryOpF(context, (op1) =>
  1042. {
  1043. return EmitUnaryMathCall(context, MathF.Floor, Math.Floor, op1);
  1044. });
  1045. }
  1046. }
  1047. public static void Frintn_S(ArmEmitterContext context)
  1048. {
  1049. if (Optimizations.UseSse41)
  1050. {
  1051. EmitScalarRoundOpF(context, FPRoundingMode.ToNearest);
  1052. }
  1053. else
  1054. {
  1055. EmitScalarUnaryOpF(context, (op1) =>
  1056. {
  1057. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1058. });
  1059. }
  1060. }
  1061. public static void Frintn_V(ArmEmitterContext context)
  1062. {
  1063. if (Optimizations.UseSse41)
  1064. {
  1065. EmitVectorRoundOpF(context, FPRoundingMode.ToNearest);
  1066. }
  1067. else
  1068. {
  1069. EmitVectorUnaryOpF(context, (op1) =>
  1070. {
  1071. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1072. });
  1073. }
  1074. }
  1075. public static void Frintp_S(ArmEmitterContext context)
  1076. {
  1077. if (Optimizations.UseSse41)
  1078. {
  1079. EmitScalarRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1080. }
  1081. else
  1082. {
  1083. EmitScalarUnaryOpF(context, (op1) =>
  1084. {
  1085. return EmitUnaryMathCall(context, MathF.Ceiling, Math.Ceiling, op1);
  1086. });
  1087. }
  1088. }
  1089. public static void Frintp_V(ArmEmitterContext context)
  1090. {
  1091. if (Optimizations.UseSse41)
  1092. {
  1093. EmitVectorRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1094. }
  1095. else
  1096. {
  1097. EmitVectorUnaryOpF(context, (op1) =>
  1098. {
  1099. return EmitUnaryMathCall(context, MathF.Ceiling, Math.Ceiling, op1);
  1100. });
  1101. }
  1102. }
  1103. public static void Frintx_S(ArmEmitterContext context)
  1104. {
  1105. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1106. EmitScalarUnaryOpF(context, (op1) =>
  1107. {
  1108. if (op.Size == 0)
  1109. {
  1110. return context.Call(new _F32_F32(SoftFallback.RoundF), op1);
  1111. }
  1112. else /* if (op.Size == 1) */
  1113. {
  1114. return context.Call(new _F64_F64(SoftFallback.Round), op1);
  1115. }
  1116. });
  1117. }
  1118. public static void Frintx_V(ArmEmitterContext context)
  1119. {
  1120. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1121. int sizeF = op.Size & 1;
  1122. EmitVectorUnaryOpF(context, (op1) =>
  1123. {
  1124. if (sizeF == 0)
  1125. {
  1126. return context.Call(new _F32_F32(SoftFallback.RoundF), op1);
  1127. }
  1128. else /* if (sizeF == 1) */
  1129. {
  1130. return context.Call(new _F64_F64(SoftFallback.Round), op1);
  1131. }
  1132. });
  1133. }
  1134. public static void Frintz_S(ArmEmitterContext context)
  1135. {
  1136. if (Optimizations.UseSse41)
  1137. {
  1138. EmitScalarRoundOpF(context, FPRoundingMode.TowardsZero);
  1139. }
  1140. else
  1141. {
  1142. EmitScalarUnaryOpF(context, (op1) =>
  1143. {
  1144. return EmitUnaryMathCall(context, MathF.Truncate, Math.Truncate, op1);
  1145. });
  1146. }
  1147. }
  1148. public static void Frintz_V(ArmEmitterContext context)
  1149. {
  1150. if (Optimizations.UseSse41)
  1151. {
  1152. EmitVectorRoundOpF(context, FPRoundingMode.TowardsZero);
  1153. }
  1154. else
  1155. {
  1156. EmitVectorUnaryOpF(context, (op1) =>
  1157. {
  1158. return EmitUnaryMathCall(context, MathF.Truncate, Math.Truncate, op1);
  1159. });
  1160. }
  1161. }
  1162. public static void Frsqrte_S(ArmEmitterContext context)
  1163. {
  1164. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1165. int sizeF = op.Size & 1;
  1166. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1167. {
  1168. EmitScalarUnaryOpF(context, Intrinsic.X86Rsqrtss, 0);
  1169. }
  1170. else
  1171. {
  1172. EmitScalarUnaryOpF(context, (op1) =>
  1173. {
  1174. return EmitSoftFloatCall(context, SoftFloat32.FPRSqrtEstimate, SoftFloat64.FPRSqrtEstimate, op1);
  1175. });
  1176. }
  1177. }
  1178. public static void Frsqrte_V(ArmEmitterContext context)
  1179. {
  1180. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1181. int sizeF = op.Size & 1;
  1182. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1183. {
  1184. EmitVectorUnaryOpF(context, Intrinsic.X86Rsqrtps, 0);
  1185. }
  1186. else
  1187. {
  1188. EmitVectorUnaryOpF(context, (op1) =>
  1189. {
  1190. return EmitSoftFloatCall(context, SoftFloat32.FPRSqrtEstimate, SoftFloat64.FPRSqrtEstimate, op1);
  1191. });
  1192. }
  1193. }
  1194. public static void Frsqrts_S(ArmEmitterContext context) // Fused.
  1195. {
  1196. if (Optimizations.FastFP && Optimizations.UseSse2)
  1197. {
  1198. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1199. int sizeF = op.Size & 1;
  1200. if (sizeF == 0)
  1201. {
  1202. Operand maskHalf = X86GetScalar(context, 0.5f);
  1203. Operand maskThree = X86GetScalar(context, 3f);
  1204. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  1205. res = context.AddIntrinsic(Intrinsic.X86Subss, maskThree, res);
  1206. res = context.AddIntrinsic(Intrinsic.X86Mulss, maskHalf, res);
  1207. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1208. }
  1209. else /* if (sizeF == 1) */
  1210. {
  1211. Operand maskHalf = X86GetScalar(context, 0.5d);
  1212. Operand maskThree = X86GetScalar(context, 3d);
  1213. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  1214. res = context.AddIntrinsic(Intrinsic.X86Subsd, maskThree, res);
  1215. res = context.AddIntrinsic(Intrinsic.X86Mulsd, maskHalf, res);
  1216. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1217. }
  1218. }
  1219. else
  1220. {
  1221. EmitScalarBinaryOpF(context, (op1, op2) =>
  1222. {
  1223. return EmitSoftFloatCall(context, SoftFloat32.FPRSqrtStepFused, SoftFloat64.FPRSqrtStepFused, op1, op2);
  1224. });
  1225. }
  1226. }
  1227. public static void Frsqrts_V(ArmEmitterContext context) // Fused.
  1228. {
  1229. if (Optimizations.FastFP && Optimizations.UseSse2)
  1230. {
  1231. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1232. int sizeF = op.Size & 1;
  1233. if (sizeF == 0)
  1234. {
  1235. Operand maskHalf = X86GetAllElements(context, 0.5f);
  1236. Operand maskThree = X86GetAllElements(context, 3f);
  1237. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  1238. res = context.AddIntrinsic(Intrinsic.X86Subps, maskThree, res);
  1239. res = context.AddIntrinsic(Intrinsic.X86Mulps, maskHalf, res);
  1240. if (op.RegisterSize == RegisterSize.Simd64)
  1241. {
  1242. res = context.VectorZeroUpper64(res);
  1243. }
  1244. context.Copy(GetVec(op.Rd), res);
  1245. }
  1246. else /* if (sizeF == 1) */
  1247. {
  1248. Operand maskHalf = X86GetAllElements(context, 0.5d);
  1249. Operand maskThree = X86GetAllElements(context, 3d);
  1250. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  1251. res = context.AddIntrinsic(Intrinsic.X86Subpd, maskThree, res);
  1252. res = context.AddIntrinsic(Intrinsic.X86Mulpd, maskHalf, res);
  1253. context.Copy(GetVec(op.Rd), res);
  1254. }
  1255. }
  1256. else
  1257. {
  1258. EmitVectorBinaryOpF(context, (op1, op2) =>
  1259. {
  1260. return EmitSoftFloatCall(context, SoftFloat32.FPRSqrtStepFused, SoftFloat64.FPRSqrtStepFused, op1, op2);
  1261. });
  1262. }
  1263. }
  1264. public static void Fsqrt_S(ArmEmitterContext context)
  1265. {
  1266. if (Optimizations.FastFP && Optimizations.UseSse2)
  1267. {
  1268. EmitScalarUnaryOpF(context, Intrinsic.X86Sqrtss, Intrinsic.X86Sqrtsd);
  1269. }
  1270. else
  1271. {
  1272. EmitScalarUnaryOpF(context, (op1) =>
  1273. {
  1274. return EmitSoftFloatCall(context, SoftFloat32.FPSqrt, SoftFloat64.FPSqrt, op1);
  1275. });
  1276. }
  1277. }
  1278. public static void Fsqrt_V(ArmEmitterContext context)
  1279. {
  1280. if (Optimizations.FastFP && Optimizations.UseSse2)
  1281. {
  1282. EmitVectorUnaryOpF(context, Intrinsic.X86Sqrtps, Intrinsic.X86Sqrtpd);
  1283. }
  1284. else
  1285. {
  1286. EmitVectorUnaryOpF(context, (op1) =>
  1287. {
  1288. return EmitSoftFloatCall(context, SoftFloat32.FPSqrt, SoftFloat64.FPSqrt, op1);
  1289. });
  1290. }
  1291. }
  1292. public static void Fsub_S(ArmEmitterContext context)
  1293. {
  1294. if (Optimizations.FastFP && Optimizations.UseSse2)
  1295. {
  1296. EmitScalarBinaryOpF(context, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  1297. }
  1298. else if (Optimizations.FastFP)
  1299. {
  1300. EmitScalarBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1301. }
  1302. else
  1303. {
  1304. EmitScalarBinaryOpF(context, (op1, op2) =>
  1305. {
  1306. return EmitSoftFloatCall(context, SoftFloat32.FPSub, SoftFloat64.FPSub, op1, op2);
  1307. });
  1308. }
  1309. }
  1310. public static void Fsub_V(ArmEmitterContext context)
  1311. {
  1312. if (Optimizations.FastFP && Optimizations.UseSse2)
  1313. {
  1314. EmitVectorBinaryOpF(context, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  1315. }
  1316. else if (Optimizations.FastFP)
  1317. {
  1318. EmitVectorBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1319. }
  1320. else
  1321. {
  1322. EmitVectorBinaryOpF(context, (op1, op2) =>
  1323. {
  1324. return EmitSoftFloatCall(context, SoftFloat32.FPSub, SoftFloat64.FPSub, op1, op2);
  1325. });
  1326. }
  1327. }
  1328. public static void Mla_V(ArmEmitterContext context)
  1329. {
  1330. if (Optimizations.UseSse41)
  1331. {
  1332. EmitSse41Mul_AddSub(context, AddSub.Add);
  1333. }
  1334. else
  1335. {
  1336. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1337. {
  1338. return context.Add(op1, context.Multiply(op2, op3));
  1339. });
  1340. }
  1341. }
  1342. public static void Mla_Ve(ArmEmitterContext context)
  1343. {
  1344. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1345. {
  1346. return context.Add(op1, context.Multiply(op2, op3));
  1347. });
  1348. }
  1349. public static void Mls_V(ArmEmitterContext context)
  1350. {
  1351. if (Optimizations.UseSse41)
  1352. {
  1353. EmitSse41Mul_AddSub(context, AddSub.Subtract);
  1354. }
  1355. else
  1356. {
  1357. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1358. {
  1359. return context.Subtract(op1, context.Multiply(op2, op3));
  1360. });
  1361. }
  1362. }
  1363. public static void Mls_Ve(ArmEmitterContext context)
  1364. {
  1365. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1366. {
  1367. return context.Subtract(op1, context.Multiply(op2, op3));
  1368. });
  1369. }
  1370. public static void Mul_V(ArmEmitterContext context)
  1371. {
  1372. if (Optimizations.UseSse41)
  1373. {
  1374. EmitSse41Mul_AddSub(context, AddSub.None);
  1375. }
  1376. else
  1377. {
  1378. EmitVectorBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  1379. }
  1380. }
  1381. public static void Mul_Ve(ArmEmitterContext context)
  1382. {
  1383. EmitVectorBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  1384. }
  1385. public static void Neg_S(ArmEmitterContext context)
  1386. {
  1387. EmitScalarUnaryOpSx(context, (op1) => context.Negate(op1));
  1388. }
  1389. public static void Neg_V(ArmEmitterContext context)
  1390. {
  1391. if (Optimizations.UseSse2)
  1392. {
  1393. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1394. Intrinsic subInst = X86PsubInstruction[op.Size];
  1395. Operand res = context.AddIntrinsic(subInst, context.VectorZero(), GetVec(op.Rn));
  1396. if (op.RegisterSize == RegisterSize.Simd64)
  1397. {
  1398. res = context.VectorZeroUpper64(res);
  1399. }
  1400. context.Copy(GetVec(op.Rd), res);
  1401. }
  1402. else
  1403. {
  1404. EmitVectorUnaryOpSx(context, (op1) => context.Negate(op1));
  1405. }
  1406. }
  1407. public static void Raddhn_V(ArmEmitterContext context)
  1408. {
  1409. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: true);
  1410. }
  1411. public static void Rsubhn_V(ArmEmitterContext context)
  1412. {
  1413. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: true);
  1414. }
  1415. public static void Saba_V(ArmEmitterContext context)
  1416. {
  1417. EmitVectorTernaryOpSx(context, (op1, op2, op3) =>
  1418. {
  1419. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1420. });
  1421. }
  1422. public static void Sabal_V(ArmEmitterContext context)
  1423. {
  1424. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1425. {
  1426. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1427. });
  1428. }
  1429. public static void Sabd_V(ArmEmitterContext context)
  1430. {
  1431. if (Optimizations.UseSse2)
  1432. {
  1433. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1434. Operand n = GetVec(op.Rn);
  1435. Operand m = GetVec(op.Rm);
  1436. EmitSse41Sabd(context, op, n, m, isLong: false);
  1437. }
  1438. else
  1439. {
  1440. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1441. {
  1442. return EmitAbs(context, context.Subtract(op1, op2));
  1443. });
  1444. }
  1445. }
  1446. public static void Sabdl_V(ArmEmitterContext context)
  1447. {
  1448. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1449. if (Optimizations.UseSse41 && op.Size < 2)
  1450. {
  1451. Operand n = GetVec(op.Rn);
  1452. Operand m = GetVec(op.Rm);
  1453. if (op.RegisterSize == RegisterSize.Simd128)
  1454. {
  1455. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1456. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1457. }
  1458. Intrinsic movInst = op.Size == 0
  1459. ? Intrinsic.X86Pmovsxbw
  1460. : Intrinsic.X86Pmovsxwd;
  1461. n = context.AddIntrinsic(movInst, n);
  1462. m = context.AddIntrinsic(movInst, m);
  1463. EmitSse41Sabd(context, op, n, m, isLong: true);
  1464. }
  1465. else
  1466. {
  1467. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) =>
  1468. {
  1469. return EmitAbs(context, context.Subtract(op1, op2));
  1470. });
  1471. }
  1472. }
  1473. public static void Sadalp_V(ArmEmitterContext context)
  1474. {
  1475. EmitAddLongPairwise(context, signed: true, accumulate: true);
  1476. }
  1477. public static void Saddl_V(ArmEmitterContext context)
  1478. {
  1479. if (Optimizations.UseSse41)
  1480. {
  1481. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1482. Operand n = GetVec(op.Rn);
  1483. Operand m = GetVec(op.Rm);
  1484. if (op.RegisterSize == RegisterSize.Simd128)
  1485. {
  1486. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1487. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1488. }
  1489. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1490. n = context.AddIntrinsic(movInst, n);
  1491. m = context.AddIntrinsic(movInst, m);
  1492. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1493. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1494. }
  1495. else
  1496. {
  1497. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1498. }
  1499. }
  1500. public static void Saddlp_V(ArmEmitterContext context)
  1501. {
  1502. EmitAddLongPairwise(context, signed: true, accumulate: false);
  1503. }
  1504. public static void Saddlv_V(ArmEmitterContext context)
  1505. {
  1506. EmitVectorLongAcrossVectorOpSx(context, (op1, op2) => context.Add(op1, op2));
  1507. }
  1508. public static void Saddw_V(ArmEmitterContext context)
  1509. {
  1510. if (Optimizations.UseSse41)
  1511. {
  1512. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1513. Operand n = GetVec(op.Rn);
  1514. Operand m = GetVec(op.Rm);
  1515. if (op.RegisterSize == RegisterSize.Simd128)
  1516. {
  1517. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1518. }
  1519. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1520. m = context.AddIntrinsic(movInst, m);
  1521. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1522. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1523. }
  1524. else
  1525. {
  1526. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1527. }
  1528. }
  1529. public static void Shadd_V(ArmEmitterContext context)
  1530. {
  1531. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1532. if (Optimizations.UseSse2 && op.Size > 0)
  1533. {
  1534. Operand n = GetVec(op.Rn);
  1535. Operand m = GetVec(op.Rm);
  1536. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  1537. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  1538. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psraw : Intrinsic.X86Psrad;
  1539. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  1540. Intrinsic addInst = X86PaddInstruction[op.Size];
  1541. res = context.AddIntrinsic(addInst, res, res2);
  1542. if (op.RegisterSize == RegisterSize.Simd64)
  1543. {
  1544. res = context.VectorZeroUpper64(res);
  1545. }
  1546. context.Copy(GetVec(op.Rd), res);
  1547. }
  1548. else
  1549. {
  1550. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1551. {
  1552. return context.ShiftRightSI(context.Add(op1, op2), Const(1));
  1553. });
  1554. }
  1555. }
  1556. public static void Shsub_V(ArmEmitterContext context)
  1557. {
  1558. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1559. if (Optimizations.UseSse2 && op.Size < 2)
  1560. {
  1561. Operand n = GetVec(op.Rn);
  1562. Operand m = GetVec(op.Rm);
  1563. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  1564. Intrinsic addInst = X86PaddInstruction[op.Size];
  1565. Operand nPlusMask = context.AddIntrinsic(addInst, n, mask);
  1566. Operand mPlusMask = context.AddIntrinsic(addInst, m, mask);
  1567. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  1568. Operand res = context.AddIntrinsic(avgInst, nPlusMask, mPlusMask);
  1569. Intrinsic subInst = X86PsubInstruction[op.Size];
  1570. res = context.AddIntrinsic(subInst, nPlusMask, res);
  1571. if (op.RegisterSize == RegisterSize.Simd64)
  1572. {
  1573. res = context.VectorZeroUpper64(res);
  1574. }
  1575. context.Copy(GetVec(op.Rd), res);
  1576. }
  1577. else
  1578. {
  1579. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1580. {
  1581. return context.ShiftRightSI(context.Subtract(op1, op2), Const(1));
  1582. });
  1583. }
  1584. }
  1585. public static void Smax_V(ArmEmitterContext context)
  1586. {
  1587. if (Optimizations.UseSse41)
  1588. {
  1589. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1590. Operand n = GetVec(op.Rn);
  1591. Operand m = GetVec(op.Rm);
  1592. Intrinsic maxInst = X86PmaxsInstruction[op.Size];
  1593. Operand res = context.AddIntrinsic(maxInst, n, m);
  1594. if (op.RegisterSize == RegisterSize.Simd64)
  1595. {
  1596. res = context.VectorZeroUpper64(res);
  1597. }
  1598. context.Copy(GetVec(op.Rd), res);
  1599. }
  1600. else
  1601. {
  1602. Delegate dlg = new _S64_S64_S64(Math.Max);
  1603. EmitVectorBinaryOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1604. }
  1605. }
  1606. public static void Smaxp_V(ArmEmitterContext context)
  1607. {
  1608. Delegate dlg = new _S64_S64_S64(Math.Max);
  1609. EmitVectorPairwiseOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1610. }
  1611. public static void Smaxv_V(ArmEmitterContext context)
  1612. {
  1613. Delegate dlg = new _S64_S64_S64(Math.Max);
  1614. EmitVectorAcrossVectorOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1615. }
  1616. public static void Smin_V(ArmEmitterContext context)
  1617. {
  1618. if (Optimizations.UseSse41)
  1619. {
  1620. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1621. Operand n = GetVec(op.Rn);
  1622. Operand m = GetVec(op.Rm);
  1623. Intrinsic minInst = X86PminsInstruction[op.Size];
  1624. Operand res = context.AddIntrinsic(minInst, n, m);
  1625. if (op.RegisterSize == RegisterSize.Simd64)
  1626. {
  1627. res = context.VectorZeroUpper64(res);
  1628. }
  1629. context.Copy(GetVec(op.Rd), res);
  1630. }
  1631. else
  1632. {
  1633. Delegate dlg = new _S64_S64_S64(Math.Min);
  1634. EmitVectorBinaryOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1635. }
  1636. }
  1637. public static void Sminp_V(ArmEmitterContext context)
  1638. {
  1639. Delegate dlg = new _S64_S64_S64(Math.Min);
  1640. EmitVectorPairwiseOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1641. }
  1642. public static void Sminv_V(ArmEmitterContext context)
  1643. {
  1644. Delegate dlg = new _S64_S64_S64(Math.Min);
  1645. EmitVectorAcrossVectorOpSx(context, (op1, op2) => context.Call(dlg, op1, op2));
  1646. }
  1647. public static void Smlal_V(ArmEmitterContext context)
  1648. {
  1649. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1650. if (Optimizations.UseSse41 && op.Size < 2)
  1651. {
  1652. Operand d = GetVec(op.Rd);
  1653. Operand n = GetVec(op.Rn);
  1654. Operand m = GetVec(op.Rm);
  1655. if (op.RegisterSize == RegisterSize.Simd128)
  1656. {
  1657. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1658. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1659. }
  1660. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1661. n = context.AddIntrinsic(movInst, n);
  1662. m = context.AddIntrinsic(movInst, m);
  1663. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  1664. Operand res = context.AddIntrinsic(mullInst, n, m);
  1665. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1666. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  1667. }
  1668. else
  1669. {
  1670. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1671. {
  1672. return context.Add(op1, context.Multiply(op2, op3));
  1673. });
  1674. }
  1675. }
  1676. public static void Smlal_Ve(ArmEmitterContext context)
  1677. {
  1678. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  1679. {
  1680. return context.Add(op1, context.Multiply(op2, op3));
  1681. });
  1682. }
  1683. public static void Smlsl_V(ArmEmitterContext context)
  1684. {
  1685. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1686. if (Optimizations.UseSse41 && op.Size < 2)
  1687. {
  1688. Operand d = GetVec(op.Rd);
  1689. Operand n = GetVec(op.Rn);
  1690. Operand m = GetVec(op.Rm);
  1691. if (op.RegisterSize == RegisterSize.Simd128)
  1692. {
  1693. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1694. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1695. }
  1696. Intrinsic movInst = op.Size == 0
  1697. ? Intrinsic.X86Pmovsxbw
  1698. : Intrinsic.X86Pmovsxwd;
  1699. n = context.AddIntrinsic(movInst, n);
  1700. m = context.AddIntrinsic(movInst, m);
  1701. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  1702. Operand res = context.AddIntrinsic(mullInst, n, m);
  1703. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1704. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  1705. }
  1706. else
  1707. {
  1708. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1709. {
  1710. return context.Subtract(op1, context.Multiply(op2, op3));
  1711. });
  1712. }
  1713. }
  1714. public static void Smlsl_Ve(ArmEmitterContext context)
  1715. {
  1716. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  1717. {
  1718. return context.Subtract(op1, context.Multiply(op2, op3));
  1719. });
  1720. }
  1721. public static void Smull_V(ArmEmitterContext context)
  1722. {
  1723. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Multiply(op1, op2));
  1724. }
  1725. public static void Smull_Ve(ArmEmitterContext context)
  1726. {
  1727. EmitVectorWidenBinaryOpByElemSx(context, (op1, op2) => context.Multiply(op1, op2));
  1728. }
  1729. public static void Sqabs_S(ArmEmitterContext context)
  1730. {
  1731. EmitScalarSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  1732. }
  1733. public static void Sqabs_V(ArmEmitterContext context)
  1734. {
  1735. EmitVectorSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  1736. }
  1737. public static void Sqadd_S(ArmEmitterContext context)
  1738. {
  1739. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  1740. }
  1741. public static void Sqadd_V(ArmEmitterContext context)
  1742. {
  1743. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  1744. }
  1745. public static void Sqdmulh_S(ArmEmitterContext context)
  1746. {
  1747. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.ScalarSx);
  1748. }
  1749. public static void Sqdmulh_V(ArmEmitterContext context)
  1750. {
  1751. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.VectorSx);
  1752. }
  1753. public static void Sqneg_S(ArmEmitterContext context)
  1754. {
  1755. EmitScalarSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  1756. }
  1757. public static void Sqneg_V(ArmEmitterContext context)
  1758. {
  1759. EmitVectorSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  1760. }
  1761. public static void Sqrdmulh_S(ArmEmitterContext context)
  1762. {
  1763. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.ScalarSx);
  1764. }
  1765. public static void Sqrdmulh_V(ArmEmitterContext context)
  1766. {
  1767. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.VectorSx);
  1768. }
  1769. public static void Sqsub_S(ArmEmitterContext context)
  1770. {
  1771. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  1772. }
  1773. public static void Sqsub_V(ArmEmitterContext context)
  1774. {
  1775. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  1776. }
  1777. public static void Sqxtn_S(ArmEmitterContext context)
  1778. {
  1779. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxSx);
  1780. }
  1781. public static void Sqxtn_V(ArmEmitterContext context)
  1782. {
  1783. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxSx);
  1784. }
  1785. public static void Sqxtun_S(ArmEmitterContext context)
  1786. {
  1787. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxZx);
  1788. }
  1789. public static void Sqxtun_V(ArmEmitterContext context)
  1790. {
  1791. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxZx);
  1792. }
  1793. public static void Srhadd_V(ArmEmitterContext context)
  1794. {
  1795. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1796. if (Optimizations.UseSse2 && op.Size < 2)
  1797. {
  1798. Operand n = GetVec(op.Rn);
  1799. Operand m = GetVec(op.Rm);
  1800. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  1801. Intrinsic subInst = X86PsubInstruction[op.Size];
  1802. Operand nMinusMask = context.AddIntrinsic(subInst, n, mask);
  1803. Operand mMinusMask = context.AddIntrinsic(subInst, m, mask);
  1804. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  1805. Operand res = context.AddIntrinsic(avgInst, nMinusMask, mMinusMask);
  1806. Intrinsic addInst = X86PaddInstruction[op.Size];
  1807. res = context.AddIntrinsic(addInst, mask, res);
  1808. if (op.RegisterSize == RegisterSize.Simd64)
  1809. {
  1810. res = context.VectorZeroUpper64(res);
  1811. }
  1812. context.Copy(GetVec(op.Rd), res);
  1813. }
  1814. else
  1815. {
  1816. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1817. {
  1818. Operand res = context.Add(op1, op2);
  1819. res = context.Add(res, Const(1L));
  1820. return context.ShiftRightSI(res, Const(1));
  1821. });
  1822. }
  1823. }
  1824. public static void Ssubl_V(ArmEmitterContext context)
  1825. {
  1826. if (Optimizations.UseSse41)
  1827. {
  1828. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1829. Operand n = GetVec(op.Rn);
  1830. Operand m = GetVec(op.Rm);
  1831. if (op.RegisterSize == RegisterSize.Simd128)
  1832. {
  1833. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1834. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1835. }
  1836. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1837. n = context.AddIntrinsic(movInst, n);
  1838. m = context.AddIntrinsic(movInst, m);
  1839. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1840. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  1841. }
  1842. else
  1843. {
  1844. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  1845. }
  1846. }
  1847. public static void Ssubw_V(ArmEmitterContext context)
  1848. {
  1849. if (Optimizations.UseSse41)
  1850. {
  1851. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1852. Operand n = GetVec(op.Rn);
  1853. Operand m = GetVec(op.Rm);
  1854. if (op.RegisterSize == RegisterSize.Simd128)
  1855. {
  1856. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1857. }
  1858. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1859. m = context.AddIntrinsic(movInst, m);
  1860. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1861. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  1862. }
  1863. else
  1864. {
  1865. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  1866. }
  1867. }
  1868. public static void Sub_S(ArmEmitterContext context)
  1869. {
  1870. EmitScalarBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  1871. }
  1872. public static void Sub_V(ArmEmitterContext context)
  1873. {
  1874. if (Optimizations.UseSse2)
  1875. {
  1876. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1877. Operand n = GetVec(op.Rn);
  1878. Operand m = GetVec(op.Rm);
  1879. Intrinsic subInst = X86PsubInstruction[op.Size];
  1880. Operand res = context.AddIntrinsic(subInst, n, m);
  1881. if (op.RegisterSize == RegisterSize.Simd64)
  1882. {
  1883. res = context.VectorZeroUpper64(res);
  1884. }
  1885. context.Copy(GetVec(op.Rd), res);
  1886. }
  1887. else
  1888. {
  1889. EmitVectorBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  1890. }
  1891. }
  1892. public static void Subhn_V(ArmEmitterContext context)
  1893. {
  1894. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: false);
  1895. }
  1896. public static void Suqadd_S(ArmEmitterContext context)
  1897. {
  1898. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  1899. }
  1900. public static void Suqadd_V(ArmEmitterContext context)
  1901. {
  1902. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  1903. }
  1904. public static void Uaba_V(ArmEmitterContext context)
  1905. {
  1906. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1907. {
  1908. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1909. });
  1910. }
  1911. public static void Uabal_V(ArmEmitterContext context)
  1912. {
  1913. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  1914. {
  1915. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1916. });
  1917. }
  1918. public static void Uabd_V(ArmEmitterContext context)
  1919. {
  1920. if (Optimizations.UseSse41)
  1921. {
  1922. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1923. Operand n = GetVec(op.Rn);
  1924. Operand m = GetVec(op.Rm);
  1925. EmitSse41Uabd(context, op, n, m, isLong: false);
  1926. }
  1927. else
  1928. {
  1929. EmitVectorBinaryOpZx(context, (op1, op2) =>
  1930. {
  1931. return EmitAbs(context, context.Subtract(op1, op2));
  1932. });
  1933. }
  1934. }
  1935. public static void Uabdl_V(ArmEmitterContext context)
  1936. {
  1937. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1938. if (Optimizations.UseSse41 && op.Size < 2)
  1939. {
  1940. Operand n = GetVec(op.Rn);
  1941. Operand m = GetVec(op.Rm);
  1942. if (op.RegisterSize == RegisterSize.Simd128)
  1943. {
  1944. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1945. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1946. }
  1947. Intrinsic movInst = op.Size == 0
  1948. ? Intrinsic.X86Pmovzxbw
  1949. : Intrinsic.X86Pmovzxwd;
  1950. n = context.AddIntrinsic(movInst, n);
  1951. m = context.AddIntrinsic(movInst, m);
  1952. EmitSse41Uabd(context, op, n, m, isLong: true);
  1953. }
  1954. else
  1955. {
  1956. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) =>
  1957. {
  1958. return EmitAbs(context, context.Subtract(op1, op2));
  1959. });
  1960. }
  1961. }
  1962. public static void Uadalp_V(ArmEmitterContext context)
  1963. {
  1964. EmitAddLongPairwise(context, signed: false, accumulate: true);
  1965. }
  1966. public static void Uaddl_V(ArmEmitterContext context)
  1967. {
  1968. if (Optimizations.UseSse41)
  1969. {
  1970. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1971. Operand n = GetVec(op.Rn);
  1972. Operand m = GetVec(op.Rm);
  1973. if (op.RegisterSize == RegisterSize.Simd128)
  1974. {
  1975. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1976. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1977. }
  1978. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  1979. n = context.AddIntrinsic(movInst, n);
  1980. m = context.AddIntrinsic(movInst, m);
  1981. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1982. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1983. }
  1984. else
  1985. {
  1986. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  1987. }
  1988. }
  1989. public static void Uaddlp_V(ArmEmitterContext context)
  1990. {
  1991. EmitAddLongPairwise(context, signed: false, accumulate: false);
  1992. }
  1993. public static void Uaddlv_V(ArmEmitterContext context)
  1994. {
  1995. EmitVectorLongAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  1996. }
  1997. public static void Uaddw_V(ArmEmitterContext context)
  1998. {
  1999. if (Optimizations.UseSse41)
  2000. {
  2001. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2002. Operand n = GetVec(op.Rn);
  2003. Operand m = GetVec(op.Rm);
  2004. if (op.RegisterSize == RegisterSize.Simd128)
  2005. {
  2006. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2007. }
  2008. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2009. m = context.AddIntrinsic(movInst, m);
  2010. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2011. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2012. }
  2013. else
  2014. {
  2015. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2016. }
  2017. }
  2018. public static void Uhadd_V(ArmEmitterContext context)
  2019. {
  2020. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2021. if (Optimizations.UseSse2 && op.Size > 0)
  2022. {
  2023. Operand n = GetVec(op.Rn);
  2024. Operand m = GetVec(op.Rm);
  2025. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  2026. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  2027. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psrlw : Intrinsic.X86Psrld;
  2028. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  2029. Intrinsic addInst = X86PaddInstruction[op.Size];
  2030. res = context.AddIntrinsic(addInst, res, res2);
  2031. if (op.RegisterSize == RegisterSize.Simd64)
  2032. {
  2033. res = context.VectorZeroUpper64(res);
  2034. }
  2035. context.Copy(GetVec(op.Rd), res);
  2036. }
  2037. else
  2038. {
  2039. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2040. {
  2041. return context.ShiftRightUI(context.Add(op1, op2), Const(1));
  2042. });
  2043. }
  2044. }
  2045. public static void Uhsub_V(ArmEmitterContext context)
  2046. {
  2047. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2048. if (Optimizations.UseSse2 && op.Size < 2)
  2049. {
  2050. Operand n = GetVec(op.Rn);
  2051. Operand m = GetVec(op.Rm);
  2052. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2053. Operand res = context.AddIntrinsic(avgInst, n, m);
  2054. Intrinsic subInst = X86PsubInstruction[op.Size];
  2055. res = context.AddIntrinsic(subInst, n, res);
  2056. if (op.RegisterSize == RegisterSize.Simd64)
  2057. {
  2058. res = context.VectorZeroUpper64(res);
  2059. }
  2060. context.Copy(GetVec(op.Rd), res);
  2061. }
  2062. else
  2063. {
  2064. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2065. {
  2066. return context.ShiftRightUI(context.Subtract(op1, op2), Const(1));
  2067. });
  2068. }
  2069. }
  2070. public static void Umax_V(ArmEmitterContext context)
  2071. {
  2072. if (Optimizations.UseSse41)
  2073. {
  2074. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2075. Operand n = GetVec(op.Rn);
  2076. Operand m = GetVec(op.Rm);
  2077. Intrinsic maxInst = X86PmaxuInstruction[op.Size];
  2078. Operand res = context.AddIntrinsic(maxInst, n, m);
  2079. if (op.RegisterSize == RegisterSize.Simd64)
  2080. {
  2081. res = context.VectorZeroUpper64(res);
  2082. }
  2083. context.Copy(GetVec(op.Rd), res);
  2084. }
  2085. else
  2086. {
  2087. Delegate dlg = new _U64_U64_U64(Math.Max);
  2088. EmitVectorBinaryOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2089. }
  2090. }
  2091. public static void Umaxp_V(ArmEmitterContext context)
  2092. {
  2093. Delegate dlg = new _U64_U64_U64(Math.Max);
  2094. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2095. }
  2096. public static void Umaxv_V(ArmEmitterContext context)
  2097. {
  2098. Delegate dlg = new _U64_U64_U64(Math.Max);
  2099. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2100. }
  2101. public static void Umin_V(ArmEmitterContext context)
  2102. {
  2103. if (Optimizations.UseSse41)
  2104. {
  2105. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2106. Operand n = GetVec(op.Rn);
  2107. Operand m = GetVec(op.Rm);
  2108. Intrinsic minInst = X86PminuInstruction[op.Size];
  2109. Operand res = context.AddIntrinsic(minInst, n, m);
  2110. if (op.RegisterSize == RegisterSize.Simd64)
  2111. {
  2112. res = context.VectorZeroUpper64(res);
  2113. }
  2114. context.Copy(GetVec(op.Rd), res);
  2115. }
  2116. else
  2117. {
  2118. Delegate dlg = new _U64_U64_U64(Math.Min);
  2119. EmitVectorBinaryOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2120. }
  2121. }
  2122. public static void Uminp_V(ArmEmitterContext context)
  2123. {
  2124. Delegate dlg = new _U64_U64_U64(Math.Min);
  2125. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2126. }
  2127. public static void Uminv_V(ArmEmitterContext context)
  2128. {
  2129. Delegate dlg = new _U64_U64_U64(Math.Min);
  2130. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Call(dlg, op1, op2));
  2131. }
  2132. public static void Umlal_V(ArmEmitterContext context)
  2133. {
  2134. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2135. if (Optimizations.UseSse41 && op.Size < 2)
  2136. {
  2137. Operand d = GetVec(op.Rd);
  2138. Operand n = GetVec(op.Rn);
  2139. Operand m = GetVec(op.Rm);
  2140. if (op.RegisterSize == RegisterSize.Simd128)
  2141. {
  2142. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2143. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2144. }
  2145. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2146. n = context.AddIntrinsic(movInst, n);
  2147. m = context.AddIntrinsic(movInst, m);
  2148. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2149. Operand res = context.AddIntrinsic(mullInst, n, m);
  2150. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2151. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  2152. }
  2153. else
  2154. {
  2155. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2156. {
  2157. return context.Add(op1, context.Multiply(op2, op3));
  2158. });
  2159. }
  2160. }
  2161. public static void Umlal_Ve(ArmEmitterContext context)
  2162. {
  2163. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2164. {
  2165. return context.Add(op1, context.Multiply(op2, op3));
  2166. });
  2167. }
  2168. public static void Umlsl_V(ArmEmitterContext context)
  2169. {
  2170. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2171. if (Optimizations.UseSse41 && op.Size < 2)
  2172. {
  2173. Operand d = GetVec(op.Rd);
  2174. Operand n = GetVec(op.Rn);
  2175. Operand m = GetVec(op.Rm);
  2176. if (op.RegisterSize == RegisterSize.Simd128)
  2177. {
  2178. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2179. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2180. }
  2181. Intrinsic movInst = op.Size == 0
  2182. ? Intrinsic.X86Pmovzxbw
  2183. : Intrinsic.X86Pmovzxwd;
  2184. n = context.AddIntrinsic(movInst, n);
  2185. m = context.AddIntrinsic(movInst, m);
  2186. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2187. Operand res = context.AddIntrinsic(mullInst, n, m);
  2188. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2189. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  2190. }
  2191. else
  2192. {
  2193. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2194. {
  2195. return context.Subtract(op1, context.Multiply(op2, op3));
  2196. });
  2197. }
  2198. }
  2199. public static void Umlsl_Ve(ArmEmitterContext context)
  2200. {
  2201. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2202. {
  2203. return context.Subtract(op1, context.Multiply(op2, op3));
  2204. });
  2205. }
  2206. public static void Umull_V(ArmEmitterContext context)
  2207. {
  2208. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  2209. }
  2210. public static void Umull_Ve(ArmEmitterContext context)
  2211. {
  2212. EmitVectorWidenBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  2213. }
  2214. public static void Uqadd_S(ArmEmitterContext context)
  2215. {
  2216. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2217. }
  2218. public static void Uqadd_V(ArmEmitterContext context)
  2219. {
  2220. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2221. }
  2222. public static void Uqsub_S(ArmEmitterContext context)
  2223. {
  2224. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2225. }
  2226. public static void Uqsub_V(ArmEmitterContext context)
  2227. {
  2228. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2229. }
  2230. public static void Uqxtn_S(ArmEmitterContext context)
  2231. {
  2232. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarZxZx);
  2233. }
  2234. public static void Uqxtn_V(ArmEmitterContext context)
  2235. {
  2236. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorZxZx);
  2237. }
  2238. public static void Urhadd_V(ArmEmitterContext context)
  2239. {
  2240. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2241. if (Optimizations.UseSse2 && op.Size < 2)
  2242. {
  2243. Operand n = GetVec(op.Rn);
  2244. Operand m = GetVec(op.Rm);
  2245. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2246. Operand res = context.AddIntrinsic(avgInst, n, m);
  2247. if (op.RegisterSize == RegisterSize.Simd64)
  2248. {
  2249. res = context.VectorZeroUpper64(res);
  2250. }
  2251. context.Copy(GetVec(op.Rd), res);
  2252. }
  2253. else
  2254. {
  2255. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2256. {
  2257. Operand res = context.Add(op1, op2);
  2258. res = context.Add(res, Const(1L));
  2259. return context.ShiftRightUI(res, Const(1));
  2260. });
  2261. }
  2262. }
  2263. public static void Usqadd_S(ArmEmitterContext context)
  2264. {
  2265. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2266. }
  2267. public static void Usqadd_V(ArmEmitterContext context)
  2268. {
  2269. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2270. }
  2271. public static void Usubl_V(ArmEmitterContext context)
  2272. {
  2273. if (Optimizations.UseSse41)
  2274. {
  2275. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2276. Operand n = GetVec(op.Rn);
  2277. Operand m = GetVec(op.Rm);
  2278. if (op.RegisterSize == RegisterSize.Simd128)
  2279. {
  2280. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2281. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2282. }
  2283. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2284. n = context.AddIntrinsic(movInst, n);
  2285. m = context.AddIntrinsic(movInst, m);
  2286. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2287. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2288. }
  2289. else
  2290. {
  2291. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2292. }
  2293. }
  2294. public static void Usubw_V(ArmEmitterContext context)
  2295. {
  2296. if (Optimizations.UseSse41)
  2297. {
  2298. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2299. Operand n = GetVec(op.Rn);
  2300. Operand m = GetVec(op.Rm);
  2301. if (op.RegisterSize == RegisterSize.Simd128)
  2302. {
  2303. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2304. }
  2305. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2306. m = context.AddIntrinsic(movInst, m);
  2307. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2308. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2309. }
  2310. else
  2311. {
  2312. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2313. }
  2314. }
  2315. private static Operand EmitAbs(ArmEmitterContext context, Operand value)
  2316. {
  2317. Operand isPositive = context.ICompareGreaterOrEqual(value, Const(value.Type, 0));
  2318. return context.ConditionalSelect(isPositive, value, context.Negate(value));
  2319. }
  2320. private static void EmitAddLongPairwise(ArmEmitterContext context, bool signed, bool accumulate)
  2321. {
  2322. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2323. Operand res = context.VectorZero();
  2324. int pairs = op.GetPairsCount() >> op.Size;
  2325. for (int index = 0; index < pairs; index++)
  2326. {
  2327. int pairIndex = index << 1;
  2328. Operand ne0 = EmitVectorExtract(context, op.Rn, pairIndex, op.Size, signed);
  2329. Operand ne1 = EmitVectorExtract(context, op.Rn, pairIndex + 1, op.Size, signed);
  2330. Operand e = context.Add(ne0, ne1);
  2331. if (accumulate)
  2332. {
  2333. Operand de = EmitVectorExtract(context, op.Rd, index, op.Size + 1, signed);
  2334. e = context.Add(e, de);
  2335. }
  2336. res = EmitVectorInsert(context, res, e, index, op.Size + 1);
  2337. }
  2338. context.Copy(GetVec(op.Rd), res);
  2339. }
  2340. private static Operand EmitDoublingMultiplyHighHalf(
  2341. ArmEmitterContext context,
  2342. Operand n,
  2343. Operand m,
  2344. bool round)
  2345. {
  2346. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2347. int eSize = 8 << op.Size;
  2348. Operand res = context.Multiply(n, m);
  2349. if (!round)
  2350. {
  2351. res = context.ShiftRightSI(res, Const(eSize - 1));
  2352. }
  2353. else
  2354. {
  2355. long roundConst = 1L << (eSize - 1);
  2356. res = context.ShiftLeft(res, Const(1));
  2357. res = context.Add(res, Const(roundConst));
  2358. res = context.ShiftRightSI(res, Const(eSize));
  2359. Operand isIntMin = context.ICompareEqual(res, Const((long)int.MinValue));
  2360. res = context.ConditionalSelect(isIntMin, context.Negate(res), res);
  2361. }
  2362. return res;
  2363. }
  2364. private static void EmitHighNarrow(ArmEmitterContext context, Func2I emit, bool round)
  2365. {
  2366. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2367. int elems = 8 >> op.Size;
  2368. int eSize = 8 << op.Size;
  2369. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  2370. Operand res = part == 0 ? context.VectorZero() : context.Copy(GetVec(op.Rd));
  2371. long roundConst = 1L << (eSize - 1);
  2372. for (int index = 0; index < elems; index++)
  2373. {
  2374. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size + 1);
  2375. Operand me = EmitVectorExtractZx(context, op.Rm, index, op.Size + 1);
  2376. Operand de = emit(ne, me);
  2377. if (round)
  2378. {
  2379. de = context.Add(de, Const(roundConst));
  2380. }
  2381. de = context.ShiftRightUI(de, Const(eSize));
  2382. res = EmitVectorInsert(context, res, de, part + index, op.Size);
  2383. }
  2384. context.Copy(GetVec(op.Rd), res);
  2385. }
  2386. public static void EmitScalarRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2387. {
  2388. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2389. Operand n = GetVec(op.Rn);
  2390. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundsd : Intrinsic.X86Roundss;
  2391. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2392. if ((op.Size & 1) != 0)
  2393. {
  2394. res = context.VectorZeroUpper64(res);
  2395. }
  2396. else
  2397. {
  2398. res = context.VectorZeroUpper96(res);
  2399. }
  2400. context.Copy(GetVec(op.Rd), res);
  2401. }
  2402. public static void EmitVectorRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2403. {
  2404. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2405. Operand n = GetVec(op.Rn);
  2406. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundpd : Intrinsic.X86Roundps;
  2407. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2408. if (op.RegisterSize == RegisterSize.Simd64)
  2409. {
  2410. res = context.VectorZeroUpper64(res);
  2411. }
  2412. context.Copy(GetVec(op.Rd), res);
  2413. }
  2414. private enum AddSub
  2415. {
  2416. None,
  2417. Add,
  2418. Subtract
  2419. }
  2420. private static void EmitSse41Mul_AddSub(ArmEmitterContext context, AddSub addSub)
  2421. {
  2422. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2423. Operand n = GetVec(op.Rn);
  2424. Operand m = GetVec(op.Rm);
  2425. Operand res = null;
  2426. if (op.Size == 0)
  2427. {
  2428. Operand ns8 = context.AddIntrinsic(Intrinsic.X86Psrlw, n, Const(8));
  2429. Operand ms8 = context.AddIntrinsic(Intrinsic.X86Psrlw, m, Const(8));
  2430. res = context.AddIntrinsic(Intrinsic.X86Pmullw, ns8, ms8);
  2431. res = context.AddIntrinsic(Intrinsic.X86Psllw, res, Const(8));
  2432. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  2433. Operand mask = X86GetAllElements(context, 0x00FF00FF);
  2434. res = context.AddIntrinsic(Intrinsic.X86Pblendvb, res, res2, mask);
  2435. }
  2436. else if (op.Size == 1)
  2437. {
  2438. res = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  2439. }
  2440. else
  2441. {
  2442. res = context.AddIntrinsic(Intrinsic.X86Pmulld, n, m);
  2443. }
  2444. Operand d = GetVec(op.Rd);
  2445. if (addSub == AddSub.Add)
  2446. {
  2447. switch (op.Size)
  2448. {
  2449. case 0: res = context.AddIntrinsic(Intrinsic.X86Paddb, d, res); break;
  2450. case 1: res = context.AddIntrinsic(Intrinsic.X86Paddw, d, res); break;
  2451. case 2: res = context.AddIntrinsic(Intrinsic.X86Paddd, d, res); break;
  2452. case 3: res = context.AddIntrinsic(Intrinsic.X86Paddq, d, res); break;
  2453. }
  2454. }
  2455. else if (addSub == AddSub.Subtract)
  2456. {
  2457. switch (op.Size)
  2458. {
  2459. case 0: res = context.AddIntrinsic(Intrinsic.X86Psubb, d, res); break;
  2460. case 1: res = context.AddIntrinsic(Intrinsic.X86Psubw, d, res); break;
  2461. case 2: res = context.AddIntrinsic(Intrinsic.X86Psubd, d, res); break;
  2462. case 3: res = context.AddIntrinsic(Intrinsic.X86Psubq, d, res); break;
  2463. }
  2464. }
  2465. if (op.RegisterSize == RegisterSize.Simd64)
  2466. {
  2467. res = context.VectorZeroUpper64(res);
  2468. }
  2469. context.Copy(d, res);
  2470. }
  2471. private static void EmitSse41Sabd(
  2472. ArmEmitterContext context,
  2473. OpCodeSimdReg op,
  2474. Operand n,
  2475. Operand m,
  2476. bool isLong)
  2477. {
  2478. int size = isLong ? op.Size + 1 : op.Size;
  2479. Intrinsic cmpgtInst = X86PcmpgtInstruction[size];
  2480. Operand cmpMask = context.AddIntrinsic(cmpgtInst, n, m);
  2481. Intrinsic subInst = X86PsubInstruction[size];
  2482. Operand res = context.AddIntrinsic(subInst, n, m);
  2483. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  2484. Operand res2 = context.AddIntrinsic(subInst, m, n);
  2485. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  2486. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  2487. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  2488. {
  2489. res = context.VectorZeroUpper64(res);
  2490. }
  2491. context.Copy(GetVec(op.Rd), res);
  2492. }
  2493. private static void EmitSse41Uabd(
  2494. ArmEmitterContext context,
  2495. OpCodeSimdReg op,
  2496. Operand n,
  2497. Operand m,
  2498. bool isLong)
  2499. {
  2500. int size = isLong ? op.Size + 1 : op.Size;
  2501. Intrinsic maxInst = X86PmaxuInstruction[size];
  2502. Operand max = context.AddIntrinsic(maxInst, m, n);
  2503. Intrinsic cmpeqInst = X86PcmpeqInstruction[size];
  2504. Operand cmpMask = context.AddIntrinsic(cmpeqInst, max, m);
  2505. Operand onesMask = X86GetAllElements(context, -1L);
  2506. cmpMask = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, onesMask);
  2507. Intrinsic subInst = X86PsubInstruction[size];
  2508. Operand res = context.AddIntrinsic(subInst, n, m);
  2509. Operand res2 = context.AddIntrinsic(subInst, m, n);
  2510. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  2511. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  2512. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  2513. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  2514. {
  2515. res = context.VectorZeroUpper64(res);
  2516. }
  2517. context.Copy(GetVec(op.Rd), res);
  2518. }
  2519. }
  2520. }