InstEmitSimdArithmetic.cs 120 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611
  1. // https://github.com/intel/ARM_NEON_2_x86_SSE/blob/master/NEON_2_SSE.h
  2. // https://www.agner.org/optimize/#vectorclass @ vectori128.h
  3. using ARMeilleure.Decoders;
  4. using ARMeilleure.IntermediateRepresentation;
  5. using ARMeilleure.State;
  6. using ARMeilleure.Translation;
  7. using System;
  8. using System.Diagnostics;
  9. using static ARMeilleure.Instructions.InstEmitHelper;
  10. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  11. using static ARMeilleure.IntermediateRepresentation.OperandHelper;
  12. namespace ARMeilleure.Instructions
  13. {
  14. using Func2I = Func<Operand, Operand, Operand>;
  15. static partial class InstEmit
  16. {
  17. public static void Abs_S(ArmEmitterContext context)
  18. {
  19. EmitScalarUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  20. }
  21. public static void Abs_V(ArmEmitterContext context)
  22. {
  23. EmitVectorUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  24. }
  25. public static void Add_S(ArmEmitterContext context)
  26. {
  27. EmitScalarBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  28. }
  29. public static void Add_V(ArmEmitterContext context)
  30. {
  31. if (Optimizations.UseSse2)
  32. {
  33. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  34. Operand n = GetVec(op.Rn);
  35. Operand m = GetVec(op.Rm);
  36. Intrinsic addInst = X86PaddInstruction[op.Size];
  37. Operand res = context.AddIntrinsic(addInst, n, m);
  38. if (op.RegisterSize == RegisterSize.Simd64)
  39. {
  40. res = context.VectorZeroUpper64(res);
  41. }
  42. context.Copy(GetVec(op.Rd), res);
  43. }
  44. else
  45. {
  46. EmitVectorBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  47. }
  48. }
  49. public static void Addhn_V(ArmEmitterContext context)
  50. {
  51. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: false);
  52. }
  53. public static void Addp_S(ArmEmitterContext context)
  54. {
  55. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  56. Operand ne0 = EmitVectorExtractZx(context, op.Rn, 0, op.Size);
  57. Operand ne1 = EmitVectorExtractZx(context, op.Rn, 1, op.Size);
  58. Operand res = context.Add(ne0, ne1);
  59. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), res, 0, op.Size));
  60. }
  61. public static void Addp_V(ArmEmitterContext context)
  62. {
  63. if (Optimizations.UseSsse3)
  64. {
  65. EmitSsse3VectorPairwiseOp(context, X86PaddInstruction);
  66. }
  67. else
  68. {
  69. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Add(op1, op2));
  70. }
  71. }
  72. public static void Addv_V(ArmEmitterContext context)
  73. {
  74. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  75. }
  76. public static void Cls_V(ArmEmitterContext context)
  77. {
  78. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  79. Operand res = context.VectorZero();
  80. int elems = op.GetBytesCount() >> op.Size;
  81. int eSize = 8 << op.Size;
  82. for (int index = 0; index < elems; index++)
  83. {
  84. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  85. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingSigns)), ne, Const(eSize));
  86. res = EmitVectorInsert(context, res, de, index, op.Size);
  87. }
  88. context.Copy(GetVec(op.Rd), res);
  89. }
  90. public static void Clz_V(ArmEmitterContext context)
  91. {
  92. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  93. Operand res = context.VectorZero();
  94. int elems = op.GetBytesCount() >> op.Size;
  95. int eSize = 8 << op.Size;
  96. for (int index = 0; index < elems; index++)
  97. {
  98. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  99. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingZeros)), ne, Const(eSize));
  100. res = EmitVectorInsert(context, res, de, index, op.Size);
  101. }
  102. context.Copy(GetVec(op.Rd), res);
  103. }
  104. public static void Cnt_V(ArmEmitterContext context)
  105. {
  106. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  107. Operand res = context.VectorZero();
  108. int elems = op.RegisterSize == RegisterSize.Simd128 ? 16 : 8;
  109. for (int index = 0; index < elems; index++)
  110. {
  111. Operand ne = EmitVectorExtractZx(context, op.Rn, index, 0);
  112. Operand de;
  113. if (Optimizations.UsePopCnt)
  114. {
  115. de = context.AddIntrinsicLong(Intrinsic.X86Popcnt, ne);
  116. }
  117. else
  118. {
  119. de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountSetBits8)), ne);
  120. }
  121. res = EmitVectorInsert(context, res, de, index, 0);
  122. }
  123. context.Copy(GetVec(op.Rd), res);
  124. }
  125. public static void Fabd_S(ArmEmitterContext context)
  126. {
  127. if (Optimizations.FastFP && Optimizations.UseSse2)
  128. {
  129. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  130. int sizeF = op.Size & 1;
  131. if (sizeF == 0)
  132. {
  133. Operand res = context.AddIntrinsic(Intrinsic.X86Subss, GetVec(op.Rn), GetVec(op.Rm));
  134. res = EmitFloatAbs(context, res, true, false);
  135. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  136. }
  137. else /* if (sizeF == 1) */
  138. {
  139. Operand res = context.AddIntrinsic(Intrinsic.X86Subsd, GetVec(op.Rn), GetVec(op.Rm));
  140. res = EmitFloatAbs(context, res, false, false);
  141. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  142. }
  143. }
  144. else
  145. {
  146. EmitScalarBinaryOpF(context, (op1, op2) =>
  147. {
  148. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  149. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  150. });
  151. }
  152. }
  153. public static void Fabd_V(ArmEmitterContext context)
  154. {
  155. if (Optimizations.FastFP && Optimizations.UseSse2)
  156. {
  157. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  158. int sizeF = op.Size & 1;
  159. if (sizeF == 0)
  160. {
  161. Operand res = context.AddIntrinsic(Intrinsic.X86Subps, GetVec(op.Rn), GetVec(op.Rm));
  162. res = EmitFloatAbs(context, res, true, true);
  163. if (op.RegisterSize == RegisterSize.Simd64)
  164. {
  165. res = context.VectorZeroUpper64(res);
  166. }
  167. context.Copy(GetVec(op.Rd), res);
  168. }
  169. else /* if (sizeF == 1) */
  170. {
  171. Operand res = context.AddIntrinsic(Intrinsic.X86Subpd, GetVec(op.Rn), GetVec(op.Rm));
  172. res = EmitFloatAbs(context, res, false, true);
  173. context.Copy(GetVec(op.Rd), res);
  174. }
  175. }
  176. else
  177. {
  178. EmitVectorBinaryOpF(context, (op1, op2) =>
  179. {
  180. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  181. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  182. });
  183. }
  184. }
  185. public static void Fabs_S(ArmEmitterContext context)
  186. {
  187. if (Optimizations.UseSse2)
  188. {
  189. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  190. if (op.Size == 0)
  191. {
  192. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, false);
  193. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  194. }
  195. else /* if (op.Size == 1) */
  196. {
  197. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, false);
  198. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  199. }
  200. }
  201. else
  202. {
  203. EmitScalarUnaryOpF(context, (op1) =>
  204. {
  205. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  206. });
  207. }
  208. }
  209. public static void Fabs_V(ArmEmitterContext context)
  210. {
  211. if (Optimizations.UseSse2)
  212. {
  213. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  214. int sizeF = op.Size & 1;
  215. if (sizeF == 0)
  216. {
  217. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, true);
  218. if (op.RegisterSize == RegisterSize.Simd64)
  219. {
  220. res = context.VectorZeroUpper64(res);
  221. }
  222. context.Copy(GetVec(op.Rd), res);
  223. }
  224. else /* if (sizeF == 1) */
  225. {
  226. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, true);
  227. context.Copy(GetVec(op.Rd), res);
  228. }
  229. }
  230. else
  231. {
  232. EmitVectorUnaryOpF(context, (op1) =>
  233. {
  234. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  235. });
  236. }
  237. }
  238. public static void Fadd_S(ArmEmitterContext context)
  239. {
  240. if (Optimizations.FastFP && Optimizations.UseSse2)
  241. {
  242. EmitScalarBinaryOpF(context, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  243. }
  244. else if (Optimizations.FastFP)
  245. {
  246. EmitScalarBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  247. }
  248. else
  249. {
  250. EmitScalarBinaryOpF(context, (op1, op2) =>
  251. {
  252. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  253. });
  254. }
  255. }
  256. public static void Fadd_V(ArmEmitterContext context)
  257. {
  258. if (Optimizations.FastFP && Optimizations.UseSse2)
  259. {
  260. EmitVectorBinaryOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  261. }
  262. else if (Optimizations.FastFP)
  263. {
  264. EmitVectorBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  265. }
  266. else
  267. {
  268. EmitVectorBinaryOpF(context, (op1, op2) =>
  269. {
  270. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  271. });
  272. }
  273. }
  274. public static void Faddp_S(ArmEmitterContext context)
  275. {
  276. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  277. int sizeF = op.Size & 1;
  278. if (Optimizations.FastFP && Optimizations.UseSse3)
  279. {
  280. if (sizeF == 0)
  281. {
  282. Operand res = context.AddIntrinsic(Intrinsic.X86Haddps, GetVec(op.Rn), GetVec(op.Rn));
  283. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  284. }
  285. else /* if (sizeF == 1) */
  286. {
  287. Operand res = context.AddIntrinsic(Intrinsic.X86Haddpd, GetVec(op.Rn), GetVec(op.Rn));
  288. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  289. }
  290. }
  291. else
  292. {
  293. OperandType type = sizeF != 0 ? OperandType.FP64 : OperandType.FP32;
  294. Operand ne0 = context.VectorExtract(type, GetVec(op.Rn), 0);
  295. Operand ne1 = context.VectorExtract(type, GetVec(op.Rn), 1);
  296. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), ne0, ne1);
  297. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  298. }
  299. }
  300. public static void Faddp_V(ArmEmitterContext context)
  301. {
  302. if (Optimizations.FastFP && Optimizations.UseSse2)
  303. {
  304. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  305. {
  306. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  307. Intrinsic addInst = (op.Size & 1) == 0 ? Intrinsic.X86Addps : Intrinsic.X86Addpd;
  308. return context.AddIntrinsic(addInst, op1, op2);
  309. });
  310. }
  311. else
  312. {
  313. EmitVectorPairwiseOpF(context, (op1, op2) =>
  314. {
  315. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  316. });
  317. }
  318. }
  319. public static void Fdiv_S(ArmEmitterContext context)
  320. {
  321. if (Optimizations.FastFP && Optimizations.UseSse2)
  322. {
  323. EmitScalarBinaryOpF(context, Intrinsic.X86Divss, Intrinsic.X86Divsd);
  324. }
  325. else if (Optimizations.FastFP)
  326. {
  327. EmitScalarBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  328. }
  329. else
  330. {
  331. EmitScalarBinaryOpF(context, (op1, op2) =>
  332. {
  333. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  334. });
  335. }
  336. }
  337. public static void Fdiv_V(ArmEmitterContext context)
  338. {
  339. if (Optimizations.FastFP && Optimizations.UseSse2)
  340. {
  341. EmitVectorBinaryOpF(context, Intrinsic.X86Divps, Intrinsic.X86Divpd);
  342. }
  343. else if (Optimizations.FastFP)
  344. {
  345. EmitVectorBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  346. }
  347. else
  348. {
  349. EmitVectorBinaryOpF(context, (op1, op2) =>
  350. {
  351. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  352. });
  353. }
  354. }
  355. public static void Fmadd_S(ArmEmitterContext context) // Fused.
  356. {
  357. if (Optimizations.FastFP && Optimizations.UseSse2)
  358. {
  359. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  360. Operand d = GetVec(op.Rd);
  361. Operand a = GetVec(op.Ra);
  362. Operand n = GetVec(op.Rn);
  363. Operand m = GetVec(op.Rm);
  364. if (op.Size == 0)
  365. {
  366. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  367. res = context.AddIntrinsic(Intrinsic.X86Addss, a, res);
  368. context.Copy(d, context.VectorZeroUpper96(res));
  369. }
  370. else /* if (op.Size == 1) */
  371. {
  372. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  373. res = context.AddIntrinsic(Intrinsic.X86Addsd, a, res);
  374. context.Copy(d, context.VectorZeroUpper64(res));
  375. }
  376. }
  377. else
  378. {
  379. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  380. {
  381. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  382. });
  383. }
  384. }
  385. public static void Fmax_S(ArmEmitterContext context)
  386. {
  387. if (Optimizations.FastFP && Optimizations.UseSse41)
  388. {
  389. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  390. {
  391. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  392. }, scalar: true);
  393. }
  394. else
  395. {
  396. EmitScalarBinaryOpF(context, (op1, op2) =>
  397. {
  398. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  399. });
  400. }
  401. }
  402. public static void Fmax_V(ArmEmitterContext context)
  403. {
  404. if (Optimizations.FastFP && Optimizations.UseSse41)
  405. {
  406. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  407. {
  408. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  409. }, scalar: false);
  410. }
  411. else
  412. {
  413. EmitVectorBinaryOpF(context, (op1, op2) =>
  414. {
  415. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  416. });
  417. }
  418. }
  419. public static void Fmaxnm_S(ArmEmitterContext context)
  420. {
  421. if (Optimizations.FastFP && Optimizations.UseSse41)
  422. {
  423. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true);
  424. }
  425. else
  426. {
  427. EmitScalarBinaryOpF(context, (op1, op2) =>
  428. {
  429. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  430. });
  431. }
  432. }
  433. public static void Fmaxnm_V(ArmEmitterContext context)
  434. {
  435. if (Optimizations.FastFP && Optimizations.UseSse41)
  436. {
  437. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false);
  438. }
  439. else
  440. {
  441. EmitVectorBinaryOpF(context, (op1, op2) =>
  442. {
  443. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  444. });
  445. }
  446. }
  447. public static void Fmaxnmp_V(ArmEmitterContext context)
  448. {
  449. if (Optimizations.FastFP && Optimizations.UseSse41)
  450. {
  451. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  452. {
  453. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  454. });
  455. }
  456. else
  457. {
  458. EmitVectorPairwiseOpF(context, (op1, op2) =>
  459. {
  460. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  461. });
  462. }
  463. }
  464. public static void Fmaxnmv_V(ArmEmitterContext context)
  465. {
  466. if (Optimizations.FastFP && Optimizations.UseSse41)
  467. {
  468. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  469. {
  470. return EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false, op1, op2);
  471. });
  472. }
  473. else
  474. {
  475. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  476. {
  477. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMaxNum)), op1, op2);
  478. });
  479. }
  480. }
  481. public static void Fmaxp_V(ArmEmitterContext context)
  482. {
  483. if (Optimizations.FastFP && Optimizations.UseSse41)
  484. {
  485. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  486. {
  487. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  488. {
  489. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  490. }, scalar: false, op1, op2);
  491. });
  492. }
  493. else
  494. {
  495. EmitVectorPairwiseOpF(context, (op1, op2) =>
  496. {
  497. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  498. });
  499. }
  500. }
  501. public static void Fmaxv_V(ArmEmitterContext context)
  502. {
  503. if (Optimizations.FastFP && Optimizations.UseSse41)
  504. {
  505. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  506. {
  507. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  508. {
  509. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: true);
  510. }, scalar: false, op1, op2);
  511. });
  512. }
  513. else
  514. {
  515. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  516. {
  517. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMax)), op1, op2);
  518. });
  519. }
  520. }
  521. public static void Fmin_S(ArmEmitterContext context)
  522. {
  523. if (Optimizations.FastFP && Optimizations.UseSse41)
  524. {
  525. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  526. {
  527. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  528. }, scalar: true);
  529. }
  530. else
  531. {
  532. EmitScalarBinaryOpF(context, (op1, op2) =>
  533. {
  534. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  535. });
  536. }
  537. }
  538. public static void Fmin_V(ArmEmitterContext context)
  539. {
  540. if (Optimizations.FastFP && Optimizations.UseSse41)
  541. {
  542. EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  543. {
  544. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  545. }, scalar: false);
  546. }
  547. else
  548. {
  549. EmitVectorBinaryOpF(context, (op1, op2) =>
  550. {
  551. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  552. });
  553. }
  554. }
  555. public static void Fminnm_S(ArmEmitterContext context)
  556. {
  557. if (Optimizations.FastFP && Optimizations.UseSse41)
  558. {
  559. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true);
  560. }
  561. else
  562. {
  563. EmitScalarBinaryOpF(context, (op1, op2) =>
  564. {
  565. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  566. });
  567. }
  568. }
  569. public static void Fminnm_V(ArmEmitterContext context)
  570. {
  571. if (Optimizations.FastFP && Optimizations.UseSse41)
  572. {
  573. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false);
  574. }
  575. else
  576. {
  577. EmitVectorBinaryOpF(context, (op1, op2) =>
  578. {
  579. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  580. });
  581. }
  582. }
  583. public static void Fminnmp_V(ArmEmitterContext context)
  584. {
  585. if (Optimizations.FastFP && Optimizations.UseSse41)
  586. {
  587. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  588. {
  589. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  590. });
  591. }
  592. else
  593. {
  594. EmitVectorPairwiseOpF(context, (op1, op2) =>
  595. {
  596. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  597. });
  598. }
  599. }
  600. public static void Fminnmv_V(ArmEmitterContext context)
  601. {
  602. if (Optimizations.FastFP && Optimizations.UseSse41)
  603. {
  604. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  605. {
  606. return EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false, op1, op2);
  607. });
  608. }
  609. else
  610. {
  611. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  612. {
  613. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMinNum)), op1, op2);
  614. });
  615. }
  616. }
  617. public static void Fminp_V(ArmEmitterContext context)
  618. {
  619. if (Optimizations.FastFP && Optimizations.UseSse41)
  620. {
  621. EmitSse2VectorPairwiseOpF(context, (op1, op2) =>
  622. {
  623. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  624. {
  625. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  626. }, scalar: false, op1, op2);
  627. });
  628. }
  629. else
  630. {
  631. EmitVectorPairwiseOpF(context, (op1, op2) =>
  632. {
  633. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  634. });
  635. }
  636. }
  637. public static void Fminv_V(ArmEmitterContext context)
  638. {
  639. if (Optimizations.FastFP && Optimizations.UseSse41)
  640. {
  641. EmitSse2VectorAcrossVectorOpF(context, (op1, op2) =>
  642. {
  643. return EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  644. {
  645. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: false);
  646. }, scalar: false, op1, op2);
  647. });
  648. }
  649. else
  650. {
  651. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  652. {
  653. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMin)), op1, op2);
  654. });
  655. }
  656. }
  657. public static void Fmla_Se(ArmEmitterContext context) // Fused.
  658. {
  659. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  660. {
  661. return context.Add(op1, context.Multiply(op2, op3));
  662. });
  663. }
  664. public static void Fmla_V(ArmEmitterContext context) // Fused.
  665. {
  666. if (Optimizations.FastFP && Optimizations.UseSse2)
  667. {
  668. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  669. Operand d = GetVec(op.Rd);
  670. Operand n = GetVec(op.Rn);
  671. Operand m = GetVec(op.Rm);
  672. int sizeF = op.Size & 1;
  673. if (sizeF == 0)
  674. {
  675. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  676. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  677. if (op.RegisterSize == RegisterSize.Simd64)
  678. {
  679. res = context.VectorZeroUpper64(res);
  680. }
  681. context.Copy(d, res);
  682. }
  683. else /* if (sizeF == 1) */
  684. {
  685. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  686. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  687. context.Copy(d, res);
  688. }
  689. }
  690. else
  691. {
  692. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  693. {
  694. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  695. });
  696. }
  697. }
  698. public static void Fmla_Ve(ArmEmitterContext context) // Fused.
  699. {
  700. if (Optimizations.FastFP && Optimizations.UseSse2)
  701. {
  702. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  703. Operand d = GetVec(op.Rd);
  704. Operand n = GetVec(op.Rn);
  705. Operand m = GetVec(op.Rm);
  706. int sizeF = op.Size & 1;
  707. if (sizeF == 0)
  708. {
  709. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  710. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  711. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  712. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  713. if (op.RegisterSize == RegisterSize.Simd64)
  714. {
  715. res = context.VectorZeroUpper64(res);
  716. }
  717. context.Copy(d, res);
  718. }
  719. else /* if (sizeF == 1) */
  720. {
  721. int shuffleMask = op.Index | op.Index << 1;
  722. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  723. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  724. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  725. context.Copy(d, res);
  726. }
  727. }
  728. else
  729. {
  730. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  731. {
  732. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  733. });
  734. }
  735. }
  736. public static void Fmls_Se(ArmEmitterContext context) // Fused.
  737. {
  738. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  739. {
  740. return context.Subtract(op1, context.Multiply(op2, op3));
  741. });
  742. }
  743. public static void Fmls_V(ArmEmitterContext context) // Fused.
  744. {
  745. if (Optimizations.FastFP && Optimizations.UseSse2)
  746. {
  747. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  748. Operand d = GetVec(op.Rd);
  749. Operand n = GetVec(op.Rn);
  750. Operand m = GetVec(op.Rm);
  751. int sizeF = op.Size & 1;
  752. if (sizeF == 0)
  753. {
  754. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  755. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  756. if (op.RegisterSize == RegisterSize.Simd64)
  757. {
  758. res = context.VectorZeroUpper64(res);
  759. }
  760. context.Copy(d, res);
  761. }
  762. else /* if (sizeF == 1) */
  763. {
  764. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  765. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  766. context.Copy(d, res);
  767. }
  768. }
  769. else
  770. {
  771. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  772. {
  773. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  774. });
  775. }
  776. }
  777. public static void Fmls_Ve(ArmEmitterContext context) // Fused.
  778. {
  779. if (Optimizations.FastFP && Optimizations.UseSse2)
  780. {
  781. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  782. Operand d = GetVec(op.Rd);
  783. Operand n = GetVec(op.Rn);
  784. Operand m = GetVec(op.Rm);
  785. int sizeF = op.Size & 1;
  786. if (sizeF == 0)
  787. {
  788. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  789. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  790. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  791. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  792. if (op.RegisterSize == RegisterSize.Simd64)
  793. {
  794. res = context.VectorZeroUpper64(res);
  795. }
  796. context.Copy(d, res);
  797. }
  798. else /* if (sizeF == 1) */
  799. {
  800. int shuffleMask = op.Index | op.Index << 1;
  801. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  802. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  803. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  804. context.Copy(d, res);
  805. }
  806. }
  807. else
  808. {
  809. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  810. {
  811. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  812. });
  813. }
  814. }
  815. public static void Fmsub_S(ArmEmitterContext context) // Fused.
  816. {
  817. if (Optimizations.FastFP && Optimizations.UseSse2)
  818. {
  819. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  820. Operand d = GetVec(op.Rd);
  821. Operand a = GetVec(op.Ra);
  822. Operand n = GetVec(op.Rn);
  823. Operand m = GetVec(op.Rm);
  824. if (op.Size == 0)
  825. {
  826. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  827. res = context.AddIntrinsic(Intrinsic.X86Subss, a, res);
  828. context.Copy(d, context.VectorZeroUpper96(res));
  829. }
  830. else /* if (op.Size == 1) */
  831. {
  832. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  833. res = context.AddIntrinsic(Intrinsic.X86Subsd, a, res);
  834. context.Copy(d, context.VectorZeroUpper64(res));
  835. }
  836. }
  837. else
  838. {
  839. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  840. {
  841. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  842. });
  843. }
  844. }
  845. public static void Fmul_S(ArmEmitterContext context)
  846. {
  847. if (Optimizations.FastFP && Optimizations.UseSse2)
  848. {
  849. EmitScalarBinaryOpF(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd);
  850. }
  851. else if (Optimizations.FastFP)
  852. {
  853. EmitScalarBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  854. }
  855. else
  856. {
  857. EmitScalarBinaryOpF(context, (op1, op2) =>
  858. {
  859. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  860. });
  861. }
  862. }
  863. public static void Fmul_Se(ArmEmitterContext context)
  864. {
  865. EmitScalarBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  866. }
  867. public static void Fmul_V(ArmEmitterContext context)
  868. {
  869. if (Optimizations.FastFP && Optimizations.UseSse2)
  870. {
  871. EmitVectorBinaryOpF(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd);
  872. }
  873. else if (Optimizations.FastFP)
  874. {
  875. EmitVectorBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  876. }
  877. else
  878. {
  879. EmitVectorBinaryOpF(context, (op1, op2) =>
  880. {
  881. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  882. });
  883. }
  884. }
  885. public static void Fmul_Ve(ArmEmitterContext context)
  886. {
  887. if (Optimizations.FastFP && Optimizations.UseSse2)
  888. {
  889. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  890. Operand n = GetVec(op.Rn);
  891. Operand m = GetVec(op.Rm);
  892. int sizeF = op.Size & 1;
  893. if (sizeF == 0)
  894. {
  895. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  896. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  897. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  898. if (op.RegisterSize == RegisterSize.Simd64)
  899. {
  900. res = context.VectorZeroUpper64(res);
  901. }
  902. context.Copy(GetVec(op.Rd), res);
  903. }
  904. else /* if (sizeF == 1) */
  905. {
  906. int shuffleMask = op.Index | op.Index << 1;
  907. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  908. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  909. context.Copy(GetVec(op.Rd), res);
  910. }
  911. }
  912. else if (Optimizations.FastFP)
  913. {
  914. EmitVectorBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  915. }
  916. else
  917. {
  918. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  919. {
  920. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  921. });
  922. }
  923. }
  924. public static void Fmulx_S(ArmEmitterContext context)
  925. {
  926. EmitScalarBinaryOpF(context, (op1, op2) =>
  927. {
  928. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  929. });
  930. }
  931. public static void Fmulx_Se(ArmEmitterContext context)
  932. {
  933. EmitScalarBinaryOpByElemF(context, (op1, op2) =>
  934. {
  935. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  936. });
  937. }
  938. public static void Fmulx_V(ArmEmitterContext context)
  939. {
  940. EmitVectorBinaryOpF(context, (op1, op2) =>
  941. {
  942. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  943. });
  944. }
  945. public static void Fmulx_Ve(ArmEmitterContext context)
  946. {
  947. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  948. {
  949. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  950. });
  951. }
  952. public static void Fneg_S(ArmEmitterContext context)
  953. {
  954. if (Optimizations.UseSse2)
  955. {
  956. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  957. if (op.Size == 0)
  958. {
  959. Operand mask = X86GetScalar(context, -0f);
  960. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  961. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  962. }
  963. else /* if (op.Size == 1) */
  964. {
  965. Operand mask = X86GetScalar(context, -0d);
  966. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  967. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  968. }
  969. }
  970. else
  971. {
  972. EmitScalarUnaryOpF(context, (op1) => context.Negate(op1));
  973. }
  974. }
  975. public static void Fneg_V(ArmEmitterContext context)
  976. {
  977. if (Optimizations.UseSse2)
  978. {
  979. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  980. int sizeF = op.Size & 1;
  981. if (sizeF == 0)
  982. {
  983. Operand mask = X86GetAllElements(context, -0f);
  984. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  985. if (op.RegisterSize == RegisterSize.Simd64)
  986. {
  987. res = context.VectorZeroUpper64(res);
  988. }
  989. context.Copy(GetVec(op.Rd), res);
  990. }
  991. else /* if (sizeF == 1) */
  992. {
  993. Operand mask = X86GetAllElements(context, -0d);
  994. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  995. context.Copy(GetVec(op.Rd), res);
  996. }
  997. }
  998. else
  999. {
  1000. EmitVectorUnaryOpF(context, (op1) => context.Negate(op1));
  1001. }
  1002. }
  1003. public static void Fnmadd_S(ArmEmitterContext context) // Fused.
  1004. {
  1005. if (Optimizations.FastFP && Optimizations.UseSse2)
  1006. {
  1007. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1008. Operand d = GetVec(op.Rd);
  1009. Operand a = GetVec(op.Ra);
  1010. Operand n = GetVec(op.Rn);
  1011. Operand m = GetVec(op.Rm);
  1012. if (op.Size == 0)
  1013. {
  1014. Operand mask = X86GetScalar(context, -0f);
  1015. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1016. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1017. res = context.AddIntrinsic(Intrinsic.X86Subss, aNeg, res);
  1018. context.Copy(d, context.VectorZeroUpper96(res));
  1019. }
  1020. else /* if (op.Size == 1) */
  1021. {
  1022. Operand mask = X86GetScalar(context, -0d);
  1023. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1024. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1025. res = context.AddIntrinsic(Intrinsic.X86Subsd, aNeg, res);
  1026. context.Copy(d, context.VectorZeroUpper64(res));
  1027. }
  1028. }
  1029. else
  1030. {
  1031. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1032. {
  1033. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulAdd), op1, op2, op3);
  1034. });
  1035. }
  1036. }
  1037. public static void Fnmsub_S(ArmEmitterContext context) // Fused.
  1038. {
  1039. if (Optimizations.FastFP && Optimizations.UseSse2)
  1040. {
  1041. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1042. Operand d = GetVec(op.Rd);
  1043. Operand a = GetVec(op.Ra);
  1044. Operand n = GetVec(op.Rn);
  1045. Operand m = GetVec(op.Rm);
  1046. if (op.Size == 0)
  1047. {
  1048. Operand mask = X86GetScalar(context, -0f);
  1049. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  1050. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  1051. res = context.AddIntrinsic(Intrinsic.X86Addss, aNeg, res);
  1052. context.Copy(d, context.VectorZeroUpper96(res));
  1053. }
  1054. else /* if (op.Size == 1) */
  1055. {
  1056. Operand mask = X86GetScalar(context, -0d);
  1057. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  1058. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  1059. res = context.AddIntrinsic(Intrinsic.X86Addsd, aNeg, res);
  1060. context.Copy(d, context.VectorZeroUpper64(res));
  1061. }
  1062. }
  1063. else
  1064. {
  1065. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  1066. {
  1067. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulSub), op1, op2, op3);
  1068. });
  1069. }
  1070. }
  1071. public static void Fnmul_S(ArmEmitterContext context)
  1072. {
  1073. EmitScalarBinaryOpF(context, (op1, op2) => context.Negate(context.Multiply(op1, op2)));
  1074. }
  1075. public static void Frecpe_S(ArmEmitterContext context)
  1076. {
  1077. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1078. int sizeF = op.Size & 1;
  1079. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1080. {
  1081. EmitScalarUnaryOpF(context, Intrinsic.X86Rcpss, 0);
  1082. }
  1083. else
  1084. {
  1085. EmitScalarUnaryOpF(context, (op1) =>
  1086. {
  1087. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1088. });
  1089. }
  1090. }
  1091. public static void Frecpe_V(ArmEmitterContext context)
  1092. {
  1093. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1094. int sizeF = op.Size & 1;
  1095. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1096. {
  1097. EmitVectorUnaryOpF(context, Intrinsic.X86Rcpps, 0);
  1098. }
  1099. else
  1100. {
  1101. EmitVectorUnaryOpF(context, (op1) =>
  1102. {
  1103. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  1104. });
  1105. }
  1106. }
  1107. public static void Frecps_S(ArmEmitterContext context) // Fused.
  1108. {
  1109. if (Optimizations.FastFP && Optimizations.UseSse2)
  1110. {
  1111. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1112. int sizeF = op.Size & 1;
  1113. if (sizeF == 0)
  1114. {
  1115. Operand mask = X86GetScalar(context, 2f);
  1116. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  1117. res = context.AddIntrinsic(Intrinsic.X86Subss, mask, res);
  1118. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1119. }
  1120. else /* if (sizeF == 1) */
  1121. {
  1122. Operand mask = X86GetScalar(context, 2d);
  1123. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  1124. res = context.AddIntrinsic(Intrinsic.X86Subsd, mask, res);
  1125. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1126. }
  1127. }
  1128. else
  1129. {
  1130. EmitScalarBinaryOpF(context, (op1, op2) =>
  1131. {
  1132. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1133. });
  1134. }
  1135. }
  1136. public static void Frecps_V(ArmEmitterContext context) // Fused.
  1137. {
  1138. if (Optimizations.FastFP && Optimizations.UseSse2)
  1139. {
  1140. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1141. int sizeF = op.Size & 1;
  1142. if (sizeF == 0)
  1143. {
  1144. Operand mask = X86GetAllElements(context, 2f);
  1145. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  1146. res = context.AddIntrinsic(Intrinsic.X86Subps, mask, res);
  1147. if (op.RegisterSize == RegisterSize.Simd64)
  1148. {
  1149. res = context.VectorZeroUpper64(res);
  1150. }
  1151. context.Copy(GetVec(op.Rd), res);
  1152. }
  1153. else /* if (sizeF == 1) */
  1154. {
  1155. Operand mask = X86GetAllElements(context, 2d);
  1156. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  1157. res = context.AddIntrinsic(Intrinsic.X86Subpd, mask, res);
  1158. context.Copy(GetVec(op.Rd), res);
  1159. }
  1160. }
  1161. else
  1162. {
  1163. EmitVectorBinaryOpF(context, (op1, op2) =>
  1164. {
  1165. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1166. });
  1167. }
  1168. }
  1169. public static void Frecpx_S(ArmEmitterContext context)
  1170. {
  1171. EmitScalarUnaryOpF(context, (op1) =>
  1172. {
  1173. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecpX), op1);
  1174. });
  1175. }
  1176. public static void Frinta_S(ArmEmitterContext context)
  1177. {
  1178. EmitScalarUnaryOpF(context, (op1) =>
  1179. {
  1180. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1181. });
  1182. }
  1183. public static void Frinta_V(ArmEmitterContext context)
  1184. {
  1185. EmitVectorUnaryOpF(context, (op1) =>
  1186. {
  1187. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1188. });
  1189. }
  1190. public static void Frinti_S(ArmEmitterContext context)
  1191. {
  1192. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1193. EmitScalarUnaryOpF(context, (op1) =>
  1194. {
  1195. if (op.Size == 0)
  1196. {
  1197. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1198. }
  1199. else /* if (op.Size == 1) */
  1200. {
  1201. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1202. }
  1203. });
  1204. }
  1205. public static void Frinti_V(ArmEmitterContext context)
  1206. {
  1207. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1208. int sizeF = op.Size & 1;
  1209. EmitVectorUnaryOpF(context, (op1) =>
  1210. {
  1211. if (sizeF == 0)
  1212. {
  1213. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1214. }
  1215. else /* if (sizeF == 1) */
  1216. {
  1217. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1218. }
  1219. });
  1220. }
  1221. public static void Frintm_S(ArmEmitterContext context)
  1222. {
  1223. if (Optimizations.UseSse41)
  1224. {
  1225. EmitScalarRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1226. }
  1227. else
  1228. {
  1229. EmitScalarUnaryOpF(context, (op1) =>
  1230. {
  1231. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1232. });
  1233. }
  1234. }
  1235. public static void Frintm_V(ArmEmitterContext context)
  1236. {
  1237. if (Optimizations.UseSse41)
  1238. {
  1239. EmitVectorRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1240. }
  1241. else
  1242. {
  1243. EmitVectorUnaryOpF(context, (op1) =>
  1244. {
  1245. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1246. });
  1247. }
  1248. }
  1249. public static void Frintn_S(ArmEmitterContext context)
  1250. {
  1251. if (Optimizations.UseSse41)
  1252. {
  1253. EmitScalarRoundOpF(context, FPRoundingMode.ToNearest);
  1254. }
  1255. else
  1256. {
  1257. EmitScalarUnaryOpF(context, (op1) =>
  1258. {
  1259. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1260. });
  1261. }
  1262. }
  1263. public static void Frintn_V(ArmEmitterContext context)
  1264. {
  1265. if (Optimizations.UseSse41)
  1266. {
  1267. EmitVectorRoundOpF(context, FPRoundingMode.ToNearest);
  1268. }
  1269. else
  1270. {
  1271. EmitVectorUnaryOpF(context, (op1) =>
  1272. {
  1273. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1274. });
  1275. }
  1276. }
  1277. public static void Frintp_S(ArmEmitterContext context)
  1278. {
  1279. if (Optimizations.UseSse41)
  1280. {
  1281. EmitScalarRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1282. }
  1283. else
  1284. {
  1285. EmitScalarUnaryOpF(context, (op1) =>
  1286. {
  1287. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  1288. });
  1289. }
  1290. }
  1291. public static void Frintp_V(ArmEmitterContext context)
  1292. {
  1293. if (Optimizations.UseSse41)
  1294. {
  1295. EmitVectorRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1296. }
  1297. else
  1298. {
  1299. EmitVectorUnaryOpF(context, (op1) =>
  1300. {
  1301. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  1302. });
  1303. }
  1304. }
  1305. public static void Frintx_S(ArmEmitterContext context)
  1306. {
  1307. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1308. EmitScalarUnaryOpF(context, (op1) =>
  1309. {
  1310. if (op.Size == 0)
  1311. {
  1312. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1313. }
  1314. else /* if (op.Size == 1) */
  1315. {
  1316. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1317. }
  1318. });
  1319. }
  1320. public static void Frintx_V(ArmEmitterContext context)
  1321. {
  1322. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1323. int sizeF = op.Size & 1;
  1324. EmitVectorUnaryOpF(context, (op1) =>
  1325. {
  1326. if (sizeF == 0)
  1327. {
  1328. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1329. }
  1330. else /* if (sizeF == 1) */
  1331. {
  1332. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1333. }
  1334. });
  1335. }
  1336. public static void Frintz_S(ArmEmitterContext context)
  1337. {
  1338. if (Optimizations.UseSse41)
  1339. {
  1340. EmitScalarRoundOpF(context, FPRoundingMode.TowardsZero);
  1341. }
  1342. else
  1343. {
  1344. EmitScalarUnaryOpF(context, (op1) =>
  1345. {
  1346. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  1347. });
  1348. }
  1349. }
  1350. public static void Frintz_V(ArmEmitterContext context)
  1351. {
  1352. if (Optimizations.UseSse41)
  1353. {
  1354. EmitVectorRoundOpF(context, FPRoundingMode.TowardsZero);
  1355. }
  1356. else
  1357. {
  1358. EmitVectorUnaryOpF(context, (op1) =>
  1359. {
  1360. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  1361. });
  1362. }
  1363. }
  1364. public static void Frsqrte_S(ArmEmitterContext context)
  1365. {
  1366. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1367. int sizeF = op.Size & 1;
  1368. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1369. {
  1370. EmitScalarUnaryOpF(context, Intrinsic.X86Rsqrtss, 0);
  1371. }
  1372. else
  1373. {
  1374. EmitScalarUnaryOpF(context, (op1) =>
  1375. {
  1376. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  1377. });
  1378. }
  1379. }
  1380. public static void Frsqrte_V(ArmEmitterContext context)
  1381. {
  1382. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1383. int sizeF = op.Size & 1;
  1384. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1385. {
  1386. EmitVectorUnaryOpF(context, Intrinsic.X86Rsqrtps, 0);
  1387. }
  1388. else
  1389. {
  1390. EmitVectorUnaryOpF(context, (op1) =>
  1391. {
  1392. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  1393. });
  1394. }
  1395. }
  1396. public static void Frsqrts_S(ArmEmitterContext context) // Fused.
  1397. {
  1398. if (Optimizations.FastFP && Optimizations.UseSse2)
  1399. {
  1400. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1401. int sizeF = op.Size & 1;
  1402. if (sizeF == 0)
  1403. {
  1404. Operand maskHalf = X86GetScalar(context, 0.5f);
  1405. Operand maskThree = X86GetScalar(context, 3f);
  1406. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  1407. res = context.AddIntrinsic(Intrinsic.X86Subss, maskThree, res);
  1408. res = context.AddIntrinsic(Intrinsic.X86Mulss, maskHalf, res);
  1409. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1410. }
  1411. else /* if (sizeF == 1) */
  1412. {
  1413. Operand maskHalf = X86GetScalar(context, 0.5d);
  1414. Operand maskThree = X86GetScalar(context, 3d);
  1415. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  1416. res = context.AddIntrinsic(Intrinsic.X86Subsd, maskThree, res);
  1417. res = context.AddIntrinsic(Intrinsic.X86Mulsd, maskHalf, res);
  1418. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1419. }
  1420. }
  1421. else
  1422. {
  1423. EmitScalarBinaryOpF(context, (op1, op2) =>
  1424. {
  1425. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  1426. });
  1427. }
  1428. }
  1429. public static void Frsqrts_V(ArmEmitterContext context) // Fused.
  1430. {
  1431. if (Optimizations.FastFP && Optimizations.UseSse2)
  1432. {
  1433. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1434. int sizeF = op.Size & 1;
  1435. if (sizeF == 0)
  1436. {
  1437. Operand maskHalf = X86GetAllElements(context, 0.5f);
  1438. Operand maskThree = X86GetAllElements(context, 3f);
  1439. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  1440. res = context.AddIntrinsic(Intrinsic.X86Subps, maskThree, res);
  1441. res = context.AddIntrinsic(Intrinsic.X86Mulps, maskHalf, res);
  1442. if (op.RegisterSize == RegisterSize.Simd64)
  1443. {
  1444. res = context.VectorZeroUpper64(res);
  1445. }
  1446. context.Copy(GetVec(op.Rd), res);
  1447. }
  1448. else /* if (sizeF == 1) */
  1449. {
  1450. Operand maskHalf = X86GetAllElements(context, 0.5d);
  1451. Operand maskThree = X86GetAllElements(context, 3d);
  1452. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  1453. res = context.AddIntrinsic(Intrinsic.X86Subpd, maskThree, res);
  1454. res = context.AddIntrinsic(Intrinsic.X86Mulpd, maskHalf, res);
  1455. context.Copy(GetVec(op.Rd), res);
  1456. }
  1457. }
  1458. else
  1459. {
  1460. EmitVectorBinaryOpF(context, (op1, op2) =>
  1461. {
  1462. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  1463. });
  1464. }
  1465. }
  1466. public static void Fsqrt_S(ArmEmitterContext context)
  1467. {
  1468. if (Optimizations.FastFP && Optimizations.UseSse2)
  1469. {
  1470. EmitScalarUnaryOpF(context, Intrinsic.X86Sqrtss, Intrinsic.X86Sqrtsd);
  1471. }
  1472. else
  1473. {
  1474. EmitScalarUnaryOpF(context, (op1) =>
  1475. {
  1476. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  1477. });
  1478. }
  1479. }
  1480. public static void Fsqrt_V(ArmEmitterContext context)
  1481. {
  1482. if (Optimizations.FastFP && Optimizations.UseSse2)
  1483. {
  1484. EmitVectorUnaryOpF(context, Intrinsic.X86Sqrtps, Intrinsic.X86Sqrtpd);
  1485. }
  1486. else
  1487. {
  1488. EmitVectorUnaryOpF(context, (op1) =>
  1489. {
  1490. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  1491. });
  1492. }
  1493. }
  1494. public static void Fsub_S(ArmEmitterContext context)
  1495. {
  1496. if (Optimizations.FastFP && Optimizations.UseSse2)
  1497. {
  1498. EmitScalarBinaryOpF(context, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  1499. }
  1500. else if (Optimizations.FastFP)
  1501. {
  1502. EmitScalarBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1503. }
  1504. else
  1505. {
  1506. EmitScalarBinaryOpF(context, (op1, op2) =>
  1507. {
  1508. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  1509. });
  1510. }
  1511. }
  1512. public static void Fsub_V(ArmEmitterContext context)
  1513. {
  1514. if (Optimizations.FastFP && Optimizations.UseSse2)
  1515. {
  1516. EmitVectorBinaryOpF(context, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  1517. }
  1518. else if (Optimizations.FastFP)
  1519. {
  1520. EmitVectorBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1521. }
  1522. else
  1523. {
  1524. EmitVectorBinaryOpF(context, (op1, op2) =>
  1525. {
  1526. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  1527. });
  1528. }
  1529. }
  1530. public static void Mla_V(ArmEmitterContext context)
  1531. {
  1532. if (Optimizations.UseSse41)
  1533. {
  1534. EmitSse41VectorMul_AddSub(context, AddSub.Add);
  1535. }
  1536. else
  1537. {
  1538. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1539. {
  1540. return context.Add(op1, context.Multiply(op2, op3));
  1541. });
  1542. }
  1543. }
  1544. public static void Mla_Ve(ArmEmitterContext context)
  1545. {
  1546. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1547. {
  1548. return context.Add(op1, context.Multiply(op2, op3));
  1549. });
  1550. }
  1551. public static void Mls_V(ArmEmitterContext context)
  1552. {
  1553. if (Optimizations.UseSse41)
  1554. {
  1555. EmitSse41VectorMul_AddSub(context, AddSub.Subtract);
  1556. }
  1557. else
  1558. {
  1559. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1560. {
  1561. return context.Subtract(op1, context.Multiply(op2, op3));
  1562. });
  1563. }
  1564. }
  1565. public static void Mls_Ve(ArmEmitterContext context)
  1566. {
  1567. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1568. {
  1569. return context.Subtract(op1, context.Multiply(op2, op3));
  1570. });
  1571. }
  1572. public static void Mul_V(ArmEmitterContext context)
  1573. {
  1574. if (Optimizations.UseSse41)
  1575. {
  1576. EmitSse41VectorMul_AddSub(context, AddSub.None);
  1577. }
  1578. else
  1579. {
  1580. EmitVectorBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  1581. }
  1582. }
  1583. public static void Mul_Ve(ArmEmitterContext context)
  1584. {
  1585. EmitVectorBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  1586. }
  1587. public static void Neg_S(ArmEmitterContext context)
  1588. {
  1589. EmitScalarUnaryOpSx(context, (op1) => context.Negate(op1));
  1590. }
  1591. public static void Neg_V(ArmEmitterContext context)
  1592. {
  1593. if (Optimizations.UseSse2)
  1594. {
  1595. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1596. Intrinsic subInst = X86PsubInstruction[op.Size];
  1597. Operand res = context.AddIntrinsic(subInst, context.VectorZero(), GetVec(op.Rn));
  1598. if (op.RegisterSize == RegisterSize.Simd64)
  1599. {
  1600. res = context.VectorZeroUpper64(res);
  1601. }
  1602. context.Copy(GetVec(op.Rd), res);
  1603. }
  1604. else
  1605. {
  1606. EmitVectorUnaryOpSx(context, (op1) => context.Negate(op1));
  1607. }
  1608. }
  1609. public static void Raddhn_V(ArmEmitterContext context)
  1610. {
  1611. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: true);
  1612. }
  1613. public static void Rsubhn_V(ArmEmitterContext context)
  1614. {
  1615. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: true);
  1616. }
  1617. public static void Saba_V(ArmEmitterContext context)
  1618. {
  1619. EmitVectorTernaryOpSx(context, (op1, op2, op3) =>
  1620. {
  1621. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1622. });
  1623. }
  1624. public static void Sabal_V(ArmEmitterContext context)
  1625. {
  1626. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1627. {
  1628. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1629. });
  1630. }
  1631. public static void Sabd_V(ArmEmitterContext context)
  1632. {
  1633. if (Optimizations.UseSse41)
  1634. {
  1635. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1636. Operand n = GetVec(op.Rn);
  1637. Operand m = GetVec(op.Rm);
  1638. EmitSse41VectorSabdOp(context, op, n, m, isLong: false);
  1639. }
  1640. else
  1641. {
  1642. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1643. {
  1644. return EmitAbs(context, context.Subtract(op1, op2));
  1645. });
  1646. }
  1647. }
  1648. public static void Sabdl_V(ArmEmitterContext context)
  1649. {
  1650. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1651. if (Optimizations.UseSse41 && op.Size < 2)
  1652. {
  1653. Operand n = GetVec(op.Rn);
  1654. Operand m = GetVec(op.Rm);
  1655. if (op.RegisterSize == RegisterSize.Simd128)
  1656. {
  1657. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1658. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1659. }
  1660. Intrinsic movInst = op.Size == 0
  1661. ? Intrinsic.X86Pmovsxbw
  1662. : Intrinsic.X86Pmovsxwd;
  1663. n = context.AddIntrinsic(movInst, n);
  1664. m = context.AddIntrinsic(movInst, m);
  1665. EmitSse41VectorSabdOp(context, op, n, m, isLong: true);
  1666. }
  1667. else
  1668. {
  1669. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) =>
  1670. {
  1671. return EmitAbs(context, context.Subtract(op1, op2));
  1672. });
  1673. }
  1674. }
  1675. public static void Sadalp_V(ArmEmitterContext context)
  1676. {
  1677. EmitAddLongPairwise(context, signed: true, accumulate: true);
  1678. }
  1679. public static void Saddl_V(ArmEmitterContext context)
  1680. {
  1681. if (Optimizations.UseSse41)
  1682. {
  1683. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1684. Operand n = GetVec(op.Rn);
  1685. Operand m = GetVec(op.Rm);
  1686. if (op.RegisterSize == RegisterSize.Simd128)
  1687. {
  1688. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1689. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1690. }
  1691. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1692. n = context.AddIntrinsic(movInst, n);
  1693. m = context.AddIntrinsic(movInst, m);
  1694. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1695. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1696. }
  1697. else
  1698. {
  1699. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1700. }
  1701. }
  1702. public static void Saddlp_V(ArmEmitterContext context)
  1703. {
  1704. EmitAddLongPairwise(context, signed: true, accumulate: false);
  1705. }
  1706. public static void Saddlv_V(ArmEmitterContext context)
  1707. {
  1708. EmitVectorLongAcrossVectorOpSx(context, (op1, op2) => context.Add(op1, op2));
  1709. }
  1710. public static void Saddw_V(ArmEmitterContext context)
  1711. {
  1712. if (Optimizations.UseSse41)
  1713. {
  1714. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1715. Operand n = GetVec(op.Rn);
  1716. Operand m = GetVec(op.Rm);
  1717. if (op.RegisterSize == RegisterSize.Simd128)
  1718. {
  1719. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1720. }
  1721. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1722. m = context.AddIntrinsic(movInst, m);
  1723. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1724. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1725. }
  1726. else
  1727. {
  1728. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1729. }
  1730. }
  1731. public static void Shadd_V(ArmEmitterContext context)
  1732. {
  1733. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1734. if (Optimizations.UseSse2 && op.Size > 0)
  1735. {
  1736. Operand n = GetVec(op.Rn);
  1737. Operand m = GetVec(op.Rm);
  1738. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  1739. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  1740. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psraw : Intrinsic.X86Psrad;
  1741. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  1742. Intrinsic addInst = X86PaddInstruction[op.Size];
  1743. res = context.AddIntrinsic(addInst, res, res2);
  1744. if (op.RegisterSize == RegisterSize.Simd64)
  1745. {
  1746. res = context.VectorZeroUpper64(res);
  1747. }
  1748. context.Copy(GetVec(op.Rd), res);
  1749. }
  1750. else
  1751. {
  1752. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1753. {
  1754. return context.ShiftRightSI(context.Add(op1, op2), Const(1));
  1755. });
  1756. }
  1757. }
  1758. public static void Shsub_V(ArmEmitterContext context)
  1759. {
  1760. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1761. if (Optimizations.UseSse2 && op.Size < 2)
  1762. {
  1763. Operand n = GetVec(op.Rn);
  1764. Operand m = GetVec(op.Rm);
  1765. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  1766. Intrinsic addInst = X86PaddInstruction[op.Size];
  1767. Operand nPlusMask = context.AddIntrinsic(addInst, n, mask);
  1768. Operand mPlusMask = context.AddIntrinsic(addInst, m, mask);
  1769. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  1770. Operand res = context.AddIntrinsic(avgInst, nPlusMask, mPlusMask);
  1771. Intrinsic subInst = X86PsubInstruction[op.Size];
  1772. res = context.AddIntrinsic(subInst, nPlusMask, res);
  1773. if (op.RegisterSize == RegisterSize.Simd64)
  1774. {
  1775. res = context.VectorZeroUpper64(res);
  1776. }
  1777. context.Copy(GetVec(op.Rd), res);
  1778. }
  1779. else
  1780. {
  1781. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1782. {
  1783. return context.ShiftRightSI(context.Subtract(op1, op2), Const(1));
  1784. });
  1785. }
  1786. }
  1787. public static void Smax_V(ArmEmitterContext context)
  1788. {
  1789. if (Optimizations.UseSse41)
  1790. {
  1791. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1792. Operand n = GetVec(op.Rn);
  1793. Operand m = GetVec(op.Rm);
  1794. Intrinsic maxInst = X86PmaxsInstruction[op.Size];
  1795. Operand res = context.AddIntrinsic(maxInst, n, m);
  1796. if (op.RegisterSize == RegisterSize.Simd64)
  1797. {
  1798. res = context.VectorZeroUpper64(res);
  1799. }
  1800. context.Copy(GetVec(op.Rd), res);
  1801. }
  1802. else
  1803. {
  1804. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  1805. }
  1806. }
  1807. public static void Smaxp_V(ArmEmitterContext context)
  1808. {
  1809. if (Optimizations.UseSsse3)
  1810. {
  1811. EmitSsse3VectorPairwiseOp(context, X86PmaxsInstruction);
  1812. }
  1813. else
  1814. {
  1815. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  1816. }
  1817. }
  1818. public static void Smaxv_V(ArmEmitterContext context)
  1819. {
  1820. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  1821. }
  1822. public static void Smin_V(ArmEmitterContext context)
  1823. {
  1824. if (Optimizations.UseSse41)
  1825. {
  1826. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1827. Operand n = GetVec(op.Rn);
  1828. Operand m = GetVec(op.Rm);
  1829. Intrinsic minInst = X86PminsInstruction[op.Size];
  1830. Operand res = context.AddIntrinsic(minInst, n, m);
  1831. if (op.RegisterSize == RegisterSize.Simd64)
  1832. {
  1833. res = context.VectorZeroUpper64(res);
  1834. }
  1835. context.Copy(GetVec(op.Rd), res);
  1836. }
  1837. else
  1838. {
  1839. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  1840. }
  1841. }
  1842. public static void Sminp_V(ArmEmitterContext context)
  1843. {
  1844. if (Optimizations.UseSsse3)
  1845. {
  1846. EmitSsse3VectorPairwiseOp(context, X86PminsInstruction);
  1847. }
  1848. else
  1849. {
  1850. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  1851. }
  1852. }
  1853. public static void Sminv_V(ArmEmitterContext context)
  1854. {
  1855. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  1856. }
  1857. public static void Smlal_V(ArmEmitterContext context)
  1858. {
  1859. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1860. if (Optimizations.UseSse41 && op.Size < 2)
  1861. {
  1862. Operand d = GetVec(op.Rd);
  1863. Operand n = GetVec(op.Rn);
  1864. Operand m = GetVec(op.Rm);
  1865. if (op.RegisterSize == RegisterSize.Simd128)
  1866. {
  1867. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1868. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1869. }
  1870. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1871. n = context.AddIntrinsic(movInst, n);
  1872. m = context.AddIntrinsic(movInst, m);
  1873. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  1874. Operand res = context.AddIntrinsic(mullInst, n, m);
  1875. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1876. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  1877. }
  1878. else
  1879. {
  1880. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1881. {
  1882. return context.Add(op1, context.Multiply(op2, op3));
  1883. });
  1884. }
  1885. }
  1886. public static void Smlal_Ve(ArmEmitterContext context)
  1887. {
  1888. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  1889. {
  1890. return context.Add(op1, context.Multiply(op2, op3));
  1891. });
  1892. }
  1893. public static void Smlsl_V(ArmEmitterContext context)
  1894. {
  1895. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1896. if (Optimizations.UseSse41 && op.Size < 2)
  1897. {
  1898. Operand d = GetVec(op.Rd);
  1899. Operand n = GetVec(op.Rn);
  1900. Operand m = GetVec(op.Rm);
  1901. if (op.RegisterSize == RegisterSize.Simd128)
  1902. {
  1903. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1904. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1905. }
  1906. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovsxbw : Intrinsic.X86Pmovsxwd;
  1907. n = context.AddIntrinsic(movInst, n);
  1908. m = context.AddIntrinsic(movInst, m);
  1909. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  1910. Operand res = context.AddIntrinsic(mullInst, n, m);
  1911. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1912. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  1913. }
  1914. else
  1915. {
  1916. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1917. {
  1918. return context.Subtract(op1, context.Multiply(op2, op3));
  1919. });
  1920. }
  1921. }
  1922. public static void Smlsl_Ve(ArmEmitterContext context)
  1923. {
  1924. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  1925. {
  1926. return context.Subtract(op1, context.Multiply(op2, op3));
  1927. });
  1928. }
  1929. public static void Smull_V(ArmEmitterContext context)
  1930. {
  1931. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Multiply(op1, op2));
  1932. }
  1933. public static void Smull_Ve(ArmEmitterContext context)
  1934. {
  1935. EmitVectorWidenBinaryOpByElemSx(context, (op1, op2) => context.Multiply(op1, op2));
  1936. }
  1937. public static void Sqabs_S(ArmEmitterContext context)
  1938. {
  1939. EmitScalarSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  1940. }
  1941. public static void Sqabs_V(ArmEmitterContext context)
  1942. {
  1943. EmitVectorSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  1944. }
  1945. public static void Sqadd_S(ArmEmitterContext context)
  1946. {
  1947. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  1948. }
  1949. public static void Sqadd_V(ArmEmitterContext context)
  1950. {
  1951. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  1952. }
  1953. public static void Sqdmulh_S(ArmEmitterContext context)
  1954. {
  1955. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.ScalarSx);
  1956. }
  1957. public static void Sqdmulh_V(ArmEmitterContext context)
  1958. {
  1959. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.VectorSx);
  1960. }
  1961. public static void Sqneg_S(ArmEmitterContext context)
  1962. {
  1963. EmitScalarSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  1964. }
  1965. public static void Sqneg_V(ArmEmitterContext context)
  1966. {
  1967. EmitVectorSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  1968. }
  1969. public static void Sqrdmulh_S(ArmEmitterContext context)
  1970. {
  1971. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.ScalarSx);
  1972. }
  1973. public static void Sqrdmulh_V(ArmEmitterContext context)
  1974. {
  1975. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.VectorSx);
  1976. }
  1977. public static void Sqsub_S(ArmEmitterContext context)
  1978. {
  1979. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  1980. }
  1981. public static void Sqsub_V(ArmEmitterContext context)
  1982. {
  1983. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  1984. }
  1985. public static void Sqxtn_S(ArmEmitterContext context)
  1986. {
  1987. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxSx);
  1988. }
  1989. public static void Sqxtn_V(ArmEmitterContext context)
  1990. {
  1991. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxSx);
  1992. }
  1993. public static void Sqxtun_S(ArmEmitterContext context)
  1994. {
  1995. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxZx);
  1996. }
  1997. public static void Sqxtun_V(ArmEmitterContext context)
  1998. {
  1999. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxZx);
  2000. }
  2001. public static void Srhadd_V(ArmEmitterContext context)
  2002. {
  2003. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2004. if (Optimizations.UseSse2 && op.Size < 2)
  2005. {
  2006. Operand n = GetVec(op.Rn);
  2007. Operand m = GetVec(op.Rm);
  2008. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  2009. Intrinsic subInst = X86PsubInstruction[op.Size];
  2010. Operand nMinusMask = context.AddIntrinsic(subInst, n, mask);
  2011. Operand mMinusMask = context.AddIntrinsic(subInst, m, mask);
  2012. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2013. Operand res = context.AddIntrinsic(avgInst, nMinusMask, mMinusMask);
  2014. Intrinsic addInst = X86PaddInstruction[op.Size];
  2015. res = context.AddIntrinsic(addInst, mask, res);
  2016. if (op.RegisterSize == RegisterSize.Simd64)
  2017. {
  2018. res = context.VectorZeroUpper64(res);
  2019. }
  2020. context.Copy(GetVec(op.Rd), res);
  2021. }
  2022. else
  2023. {
  2024. EmitVectorBinaryOpSx(context, (op1, op2) =>
  2025. {
  2026. Operand res = context.Add(op1, op2);
  2027. res = context.Add(res, Const(1L));
  2028. return context.ShiftRightSI(res, Const(1));
  2029. });
  2030. }
  2031. }
  2032. public static void Ssubl_V(ArmEmitterContext context)
  2033. {
  2034. if (Optimizations.UseSse41)
  2035. {
  2036. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2037. Operand n = GetVec(op.Rn);
  2038. Operand m = GetVec(op.Rm);
  2039. if (op.RegisterSize == RegisterSize.Simd128)
  2040. {
  2041. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2042. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2043. }
  2044. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2045. n = context.AddIntrinsic(movInst, n);
  2046. m = context.AddIntrinsic(movInst, m);
  2047. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2048. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2049. }
  2050. else
  2051. {
  2052. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  2053. }
  2054. }
  2055. public static void Ssubw_V(ArmEmitterContext context)
  2056. {
  2057. if (Optimizations.UseSse41)
  2058. {
  2059. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2060. Operand n = GetVec(op.Rn);
  2061. Operand m = GetVec(op.Rm);
  2062. if (op.RegisterSize == RegisterSize.Simd128)
  2063. {
  2064. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2065. }
  2066. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  2067. m = context.AddIntrinsic(movInst, m);
  2068. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2069. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2070. }
  2071. else
  2072. {
  2073. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  2074. }
  2075. }
  2076. public static void Sub_S(ArmEmitterContext context)
  2077. {
  2078. EmitScalarBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2079. }
  2080. public static void Sub_V(ArmEmitterContext context)
  2081. {
  2082. if (Optimizations.UseSse2)
  2083. {
  2084. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2085. Operand n = GetVec(op.Rn);
  2086. Operand m = GetVec(op.Rm);
  2087. Intrinsic subInst = X86PsubInstruction[op.Size];
  2088. Operand res = context.AddIntrinsic(subInst, n, m);
  2089. if (op.RegisterSize == RegisterSize.Simd64)
  2090. {
  2091. res = context.VectorZeroUpper64(res);
  2092. }
  2093. context.Copy(GetVec(op.Rd), res);
  2094. }
  2095. else
  2096. {
  2097. EmitVectorBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2098. }
  2099. }
  2100. public static void Subhn_V(ArmEmitterContext context)
  2101. {
  2102. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: false);
  2103. }
  2104. public static void Suqadd_S(ArmEmitterContext context)
  2105. {
  2106. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  2107. }
  2108. public static void Suqadd_V(ArmEmitterContext context)
  2109. {
  2110. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  2111. }
  2112. public static void Uaba_V(ArmEmitterContext context)
  2113. {
  2114. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  2115. {
  2116. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2117. });
  2118. }
  2119. public static void Uabal_V(ArmEmitterContext context)
  2120. {
  2121. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2122. {
  2123. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2124. });
  2125. }
  2126. public static void Uabd_V(ArmEmitterContext context)
  2127. {
  2128. if (Optimizations.UseSse41)
  2129. {
  2130. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2131. Operand n = GetVec(op.Rn);
  2132. Operand m = GetVec(op.Rm);
  2133. EmitSse41VectorUabdOp(context, op, n, m, isLong: false);
  2134. }
  2135. else
  2136. {
  2137. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2138. {
  2139. return EmitAbs(context, context.Subtract(op1, op2));
  2140. });
  2141. }
  2142. }
  2143. public static void Uabdl_V(ArmEmitterContext context)
  2144. {
  2145. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2146. if (Optimizations.UseSse41 && op.Size < 2)
  2147. {
  2148. Operand n = GetVec(op.Rn);
  2149. Operand m = GetVec(op.Rm);
  2150. if (op.RegisterSize == RegisterSize.Simd128)
  2151. {
  2152. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2153. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2154. }
  2155. Intrinsic movInst = op.Size == 0
  2156. ? Intrinsic.X86Pmovzxbw
  2157. : Intrinsic.X86Pmovzxwd;
  2158. n = context.AddIntrinsic(movInst, n);
  2159. m = context.AddIntrinsic(movInst, m);
  2160. EmitSse41VectorUabdOp(context, op, n, m, isLong: true);
  2161. }
  2162. else
  2163. {
  2164. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) =>
  2165. {
  2166. return EmitAbs(context, context.Subtract(op1, op2));
  2167. });
  2168. }
  2169. }
  2170. public static void Uadalp_V(ArmEmitterContext context)
  2171. {
  2172. EmitAddLongPairwise(context, signed: false, accumulate: true);
  2173. }
  2174. public static void Uaddl_V(ArmEmitterContext context)
  2175. {
  2176. if (Optimizations.UseSse41)
  2177. {
  2178. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2179. Operand n = GetVec(op.Rn);
  2180. Operand m = GetVec(op.Rm);
  2181. if (op.RegisterSize == RegisterSize.Simd128)
  2182. {
  2183. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2184. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2185. }
  2186. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2187. n = context.AddIntrinsic(movInst, n);
  2188. m = context.AddIntrinsic(movInst, m);
  2189. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2190. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2191. }
  2192. else
  2193. {
  2194. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2195. }
  2196. }
  2197. public static void Uaddlp_V(ArmEmitterContext context)
  2198. {
  2199. EmitAddLongPairwise(context, signed: false, accumulate: false);
  2200. }
  2201. public static void Uaddlv_V(ArmEmitterContext context)
  2202. {
  2203. EmitVectorLongAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  2204. }
  2205. public static void Uaddw_V(ArmEmitterContext context)
  2206. {
  2207. if (Optimizations.UseSse41)
  2208. {
  2209. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2210. Operand n = GetVec(op.Rn);
  2211. Operand m = GetVec(op.Rm);
  2212. if (op.RegisterSize == RegisterSize.Simd128)
  2213. {
  2214. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2215. }
  2216. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2217. m = context.AddIntrinsic(movInst, m);
  2218. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2219. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2220. }
  2221. else
  2222. {
  2223. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2224. }
  2225. }
  2226. public static void Uhadd_V(ArmEmitterContext context)
  2227. {
  2228. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2229. if (Optimizations.UseSse2 && op.Size > 0)
  2230. {
  2231. Operand n = GetVec(op.Rn);
  2232. Operand m = GetVec(op.Rm);
  2233. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  2234. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  2235. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psrlw : Intrinsic.X86Psrld;
  2236. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  2237. Intrinsic addInst = X86PaddInstruction[op.Size];
  2238. res = context.AddIntrinsic(addInst, res, res2);
  2239. if (op.RegisterSize == RegisterSize.Simd64)
  2240. {
  2241. res = context.VectorZeroUpper64(res);
  2242. }
  2243. context.Copy(GetVec(op.Rd), res);
  2244. }
  2245. else
  2246. {
  2247. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2248. {
  2249. return context.ShiftRightUI(context.Add(op1, op2), Const(1));
  2250. });
  2251. }
  2252. }
  2253. public static void Uhsub_V(ArmEmitterContext context)
  2254. {
  2255. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2256. if (Optimizations.UseSse2 && op.Size < 2)
  2257. {
  2258. Operand n = GetVec(op.Rn);
  2259. Operand m = GetVec(op.Rm);
  2260. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2261. Operand res = context.AddIntrinsic(avgInst, n, m);
  2262. Intrinsic subInst = X86PsubInstruction[op.Size];
  2263. res = context.AddIntrinsic(subInst, n, res);
  2264. if (op.RegisterSize == RegisterSize.Simd64)
  2265. {
  2266. res = context.VectorZeroUpper64(res);
  2267. }
  2268. context.Copy(GetVec(op.Rd), res);
  2269. }
  2270. else
  2271. {
  2272. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2273. {
  2274. return context.ShiftRightUI(context.Subtract(op1, op2), Const(1));
  2275. });
  2276. }
  2277. }
  2278. public static void Umax_V(ArmEmitterContext context)
  2279. {
  2280. if (Optimizations.UseSse41)
  2281. {
  2282. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2283. Operand n = GetVec(op.Rn);
  2284. Operand m = GetVec(op.Rm);
  2285. Intrinsic maxInst = X86PmaxuInstruction[op.Size];
  2286. Operand res = context.AddIntrinsic(maxInst, n, m);
  2287. if (op.RegisterSize == RegisterSize.Simd64)
  2288. {
  2289. res = context.VectorZeroUpper64(res);
  2290. }
  2291. context.Copy(GetVec(op.Rd), res);
  2292. }
  2293. else
  2294. {
  2295. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2296. }
  2297. }
  2298. public static void Umaxp_V(ArmEmitterContext context)
  2299. {
  2300. if (Optimizations.UseSsse3)
  2301. {
  2302. EmitSsse3VectorPairwiseOp(context, X86PmaxuInstruction);
  2303. }
  2304. else
  2305. {
  2306. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2307. }
  2308. }
  2309. public static void Umaxv_V(ArmEmitterContext context)
  2310. {
  2311. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2312. }
  2313. public static void Umin_V(ArmEmitterContext context)
  2314. {
  2315. if (Optimizations.UseSse41)
  2316. {
  2317. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2318. Operand n = GetVec(op.Rn);
  2319. Operand m = GetVec(op.Rm);
  2320. Intrinsic minInst = X86PminuInstruction[op.Size];
  2321. Operand res = context.AddIntrinsic(minInst, n, m);
  2322. if (op.RegisterSize == RegisterSize.Simd64)
  2323. {
  2324. res = context.VectorZeroUpper64(res);
  2325. }
  2326. context.Copy(GetVec(op.Rd), res);
  2327. }
  2328. else
  2329. {
  2330. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2331. }
  2332. }
  2333. public static void Uminp_V(ArmEmitterContext context)
  2334. {
  2335. if (Optimizations.UseSsse3)
  2336. {
  2337. EmitSsse3VectorPairwiseOp(context, X86PminuInstruction);
  2338. }
  2339. else
  2340. {
  2341. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2342. }
  2343. }
  2344. public static void Uminv_V(ArmEmitterContext context)
  2345. {
  2346. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2347. }
  2348. public static void Umlal_V(ArmEmitterContext context)
  2349. {
  2350. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2351. if (Optimizations.UseSse41 && op.Size < 2)
  2352. {
  2353. Operand d = GetVec(op.Rd);
  2354. Operand n = GetVec(op.Rn);
  2355. Operand m = GetVec(op.Rm);
  2356. if (op.RegisterSize == RegisterSize.Simd128)
  2357. {
  2358. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2359. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2360. }
  2361. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2362. n = context.AddIntrinsic(movInst, n);
  2363. m = context.AddIntrinsic(movInst, m);
  2364. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2365. Operand res = context.AddIntrinsic(mullInst, n, m);
  2366. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2367. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  2368. }
  2369. else
  2370. {
  2371. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2372. {
  2373. return context.Add(op1, context.Multiply(op2, op3));
  2374. });
  2375. }
  2376. }
  2377. public static void Umlal_Ve(ArmEmitterContext context)
  2378. {
  2379. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2380. {
  2381. return context.Add(op1, context.Multiply(op2, op3));
  2382. });
  2383. }
  2384. public static void Umlsl_V(ArmEmitterContext context)
  2385. {
  2386. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2387. if (Optimizations.UseSse41 && op.Size < 2)
  2388. {
  2389. Operand d = GetVec(op.Rd);
  2390. Operand n = GetVec(op.Rn);
  2391. Operand m = GetVec(op.Rm);
  2392. if (op.RegisterSize == RegisterSize.Simd128)
  2393. {
  2394. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2395. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2396. }
  2397. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovzxbw : Intrinsic.X86Pmovzxwd;
  2398. n = context.AddIntrinsic(movInst, n);
  2399. m = context.AddIntrinsic(movInst, m);
  2400. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2401. Operand res = context.AddIntrinsic(mullInst, n, m);
  2402. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2403. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  2404. }
  2405. else
  2406. {
  2407. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2408. {
  2409. return context.Subtract(op1, context.Multiply(op2, op3));
  2410. });
  2411. }
  2412. }
  2413. public static void Umlsl_Ve(ArmEmitterContext context)
  2414. {
  2415. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2416. {
  2417. return context.Subtract(op1, context.Multiply(op2, op3));
  2418. });
  2419. }
  2420. public static void Umull_V(ArmEmitterContext context)
  2421. {
  2422. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  2423. }
  2424. public static void Umull_Ve(ArmEmitterContext context)
  2425. {
  2426. EmitVectorWidenBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  2427. }
  2428. public static void Uqadd_S(ArmEmitterContext context)
  2429. {
  2430. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2431. }
  2432. public static void Uqadd_V(ArmEmitterContext context)
  2433. {
  2434. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2435. }
  2436. public static void Uqsub_S(ArmEmitterContext context)
  2437. {
  2438. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2439. }
  2440. public static void Uqsub_V(ArmEmitterContext context)
  2441. {
  2442. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2443. }
  2444. public static void Uqxtn_S(ArmEmitterContext context)
  2445. {
  2446. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarZxZx);
  2447. }
  2448. public static void Uqxtn_V(ArmEmitterContext context)
  2449. {
  2450. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorZxZx);
  2451. }
  2452. public static void Urhadd_V(ArmEmitterContext context)
  2453. {
  2454. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2455. if (Optimizations.UseSse2 && op.Size < 2)
  2456. {
  2457. Operand n = GetVec(op.Rn);
  2458. Operand m = GetVec(op.Rm);
  2459. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2460. Operand res = context.AddIntrinsic(avgInst, n, m);
  2461. if (op.RegisterSize == RegisterSize.Simd64)
  2462. {
  2463. res = context.VectorZeroUpper64(res);
  2464. }
  2465. context.Copy(GetVec(op.Rd), res);
  2466. }
  2467. else
  2468. {
  2469. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2470. {
  2471. Operand res = context.Add(op1, op2);
  2472. res = context.Add(res, Const(1L));
  2473. return context.ShiftRightUI(res, Const(1));
  2474. });
  2475. }
  2476. }
  2477. public static void Usqadd_S(ArmEmitterContext context)
  2478. {
  2479. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2480. }
  2481. public static void Usqadd_V(ArmEmitterContext context)
  2482. {
  2483. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2484. }
  2485. public static void Usubl_V(ArmEmitterContext context)
  2486. {
  2487. if (Optimizations.UseSse41)
  2488. {
  2489. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2490. Operand n = GetVec(op.Rn);
  2491. Operand m = GetVec(op.Rm);
  2492. if (op.RegisterSize == RegisterSize.Simd128)
  2493. {
  2494. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2495. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2496. }
  2497. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2498. n = context.AddIntrinsic(movInst, n);
  2499. m = context.AddIntrinsic(movInst, m);
  2500. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2501. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2502. }
  2503. else
  2504. {
  2505. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2506. }
  2507. }
  2508. public static void Usubw_V(ArmEmitterContext context)
  2509. {
  2510. if (Optimizations.UseSse41)
  2511. {
  2512. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2513. Operand n = GetVec(op.Rn);
  2514. Operand m = GetVec(op.Rm);
  2515. if (op.RegisterSize == RegisterSize.Simd128)
  2516. {
  2517. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2518. }
  2519. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2520. m = context.AddIntrinsic(movInst, m);
  2521. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2522. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2523. }
  2524. else
  2525. {
  2526. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2527. }
  2528. }
  2529. private static Operand EmitAbs(ArmEmitterContext context, Operand value)
  2530. {
  2531. Operand isPositive = context.ICompareGreaterOrEqual(value, Const(value.Type, 0));
  2532. return context.ConditionalSelect(isPositive, value, context.Negate(value));
  2533. }
  2534. private static void EmitAddLongPairwise(ArmEmitterContext context, bool signed, bool accumulate)
  2535. {
  2536. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2537. Operand res = context.VectorZero();
  2538. int pairs = op.GetPairsCount() >> op.Size;
  2539. for (int index = 0; index < pairs; index++)
  2540. {
  2541. int pairIndex = index << 1;
  2542. Operand ne0 = EmitVectorExtract(context, op.Rn, pairIndex, op.Size, signed);
  2543. Operand ne1 = EmitVectorExtract(context, op.Rn, pairIndex + 1, op.Size, signed);
  2544. Operand e = context.Add(ne0, ne1);
  2545. if (accumulate)
  2546. {
  2547. Operand de = EmitVectorExtract(context, op.Rd, index, op.Size + 1, signed);
  2548. e = context.Add(e, de);
  2549. }
  2550. res = EmitVectorInsert(context, res, e, index, op.Size + 1);
  2551. }
  2552. context.Copy(GetVec(op.Rd), res);
  2553. }
  2554. private static Operand EmitDoublingMultiplyHighHalf(
  2555. ArmEmitterContext context,
  2556. Operand n,
  2557. Operand m,
  2558. bool round)
  2559. {
  2560. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2561. int eSize = 8 << op.Size;
  2562. Operand res = context.Multiply(n, m);
  2563. if (!round)
  2564. {
  2565. res = context.ShiftRightSI(res, Const(eSize - 1));
  2566. }
  2567. else
  2568. {
  2569. long roundConst = 1L << (eSize - 1);
  2570. res = context.ShiftLeft(res, Const(1));
  2571. res = context.Add(res, Const(roundConst));
  2572. res = context.ShiftRightSI(res, Const(eSize));
  2573. Operand isIntMin = context.ICompareEqual(res, Const((long)int.MinValue));
  2574. res = context.ConditionalSelect(isIntMin, context.Negate(res), res);
  2575. }
  2576. return res;
  2577. }
  2578. private static void EmitHighNarrow(ArmEmitterContext context, Func2I emit, bool round)
  2579. {
  2580. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2581. int elems = 8 >> op.Size;
  2582. int eSize = 8 << op.Size;
  2583. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  2584. Operand d = GetVec(op.Rd);
  2585. Operand res = part == 0 ? context.VectorZero() : context.Copy(d);
  2586. long roundConst = 1L << (eSize - 1);
  2587. for (int index = 0; index < elems; index++)
  2588. {
  2589. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size + 1);
  2590. Operand me = EmitVectorExtractZx(context, op.Rm, index, op.Size + 1);
  2591. Operand de = emit(ne, me);
  2592. if (round)
  2593. {
  2594. de = context.Add(de, Const(roundConst));
  2595. }
  2596. de = context.ShiftRightUI(de, Const(eSize));
  2597. res = EmitVectorInsert(context, res, de, part + index, op.Size);
  2598. }
  2599. context.Copy(d, res);
  2600. }
  2601. private static Operand EmitMax64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  2602. {
  2603. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  2604. Operand cmp = signed
  2605. ? context.ICompareGreaterOrEqual (op1, op2)
  2606. : context.ICompareGreaterOrEqualUI(op1, op2);
  2607. return context.ConditionalSelect(cmp, op1, op2);
  2608. }
  2609. private static Operand EmitMin64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  2610. {
  2611. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  2612. Operand cmp = signed
  2613. ? context.ICompareLessOrEqual (op1, op2)
  2614. : context.ICompareLessOrEqualUI(op1, op2);
  2615. return context.ConditionalSelect(cmp, op1, op2);
  2616. }
  2617. private static void EmitScalarRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2618. {
  2619. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2620. Operand n = GetVec(op.Rn);
  2621. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundsd : Intrinsic.X86Roundss;
  2622. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2623. if ((op.Size & 1) != 0)
  2624. {
  2625. res = context.VectorZeroUpper64(res);
  2626. }
  2627. else
  2628. {
  2629. res = context.VectorZeroUpper96(res);
  2630. }
  2631. context.Copy(GetVec(op.Rd), res);
  2632. }
  2633. private static void EmitVectorRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2634. {
  2635. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2636. Operand n = GetVec(op.Rn);
  2637. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundpd : Intrinsic.X86Roundps;
  2638. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2639. if (op.RegisterSize == RegisterSize.Simd64)
  2640. {
  2641. res = context.VectorZeroUpper64(res);
  2642. }
  2643. context.Copy(GetVec(op.Rd), res);
  2644. }
  2645. public static void EmitSse2VectorIsNaNOpF(
  2646. ArmEmitterContext context,
  2647. Operand opF,
  2648. out Operand qNaNMask,
  2649. out Operand sNaNMask,
  2650. bool? isQNaN = null)
  2651. {
  2652. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  2653. if ((op.Size & 1) == 0)
  2654. {
  2655. const int QBit = 22;
  2656. Operand qMask = X86GetAllElements(context, 1 << QBit);
  2657. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmpps, opF, opF, Const((int)CmpCondition.UnorderedQ));
  2658. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  2659. mask2 = context.AddIntrinsic(Intrinsic.X86Cmpps, mask2, qMask, Const((int)CmpCondition.Equal));
  2660. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andps, mask2, mask1) : null;
  2661. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnps, mask2, mask1) : null;
  2662. }
  2663. else /* if ((op.Size & 1) == 1) */
  2664. {
  2665. const int QBit = 51;
  2666. Operand qMask = X86GetAllElements(context, 1L << QBit);
  2667. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmppd, opF, opF, Const((int)CmpCondition.UnorderedQ));
  2668. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  2669. mask2 = context.AddIntrinsic(Intrinsic.X86Cmppd, mask2, qMask, Const((int)CmpCondition.Equal));
  2670. qNaNMask = isQNaN == null || (bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andpd, mask2, mask1) : null;
  2671. sNaNMask = isQNaN == null || !(bool)isQNaN ? context.AddIntrinsic(Intrinsic.X86Andnpd, mask2, mask1) : null;
  2672. }
  2673. }
  2674. public static Operand EmitSse41ProcessNaNsOpF(
  2675. ArmEmitterContext context,
  2676. Func2I emit,
  2677. bool scalar,
  2678. Operand n = null,
  2679. Operand m = null)
  2680. {
  2681. Operand nCopy = n ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn));
  2682. Operand mCopy = m ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm));
  2683. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out Operand nSNaNMask);
  2684. EmitSse2VectorIsNaNOpF(context, mCopy, out _, out Operand mSNaNMask, isQNaN: false);
  2685. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  2686. if (sizeF == 0)
  2687. {
  2688. const int QBit = 22;
  2689. Operand qMask = scalar ? X86GetScalar(context, 1 << QBit) : X86GetAllElements(context, 1 << QBit);
  2690. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  2691. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  2692. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, nCopy, resNaNMask);
  2693. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  2694. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmpps, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  2695. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvps, resNaN, emit(nCopy, mCopy), resMask);
  2696. if (n != null || m != null)
  2697. {
  2698. return res;
  2699. }
  2700. if (scalar)
  2701. {
  2702. res = context.VectorZeroUpper96(res);
  2703. }
  2704. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  2705. {
  2706. res = context.VectorZeroUpper64(res);
  2707. }
  2708. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  2709. return null;
  2710. }
  2711. else /* if (sizeF == 1) */
  2712. {
  2713. const int QBit = 51;
  2714. Operand qMask = scalar ? X86GetScalar(context, 1L << QBit) : X86GetAllElements(context, 1L << QBit);
  2715. Operand resNaNMask = context.AddIntrinsic(Intrinsic.X86Pandn, mSNaNMask, nQNaNMask);
  2716. resNaNMask = context.AddIntrinsic(Intrinsic.X86Por, resNaNMask, nSNaNMask);
  2717. Operand resNaN = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, nCopy, resNaNMask);
  2718. resNaN = context.AddIntrinsic(Intrinsic.X86Por, resNaN, qMask);
  2719. Operand resMask = context.AddIntrinsic(Intrinsic.X86Cmppd, nCopy, mCopy, Const((int)CmpCondition.OrderedQ));
  2720. Operand res = context.AddIntrinsic(Intrinsic.X86Blendvpd, resNaN, emit(nCopy, mCopy), resMask);
  2721. if (n != null || m != null)
  2722. {
  2723. return res;
  2724. }
  2725. if (scalar)
  2726. {
  2727. res = context.VectorZeroUpper64(res);
  2728. }
  2729. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  2730. return null;
  2731. }
  2732. }
  2733. private static Operand EmitSse2VectorMaxMinOpF(ArmEmitterContext context, Operand n, Operand m, bool isMax)
  2734. {
  2735. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  2736. if ((op.Size & 1) == 0)
  2737. {
  2738. Operand mask = X86GetAllElements(context, -0f);
  2739. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxps : Intrinsic.X86Minps, n, m);
  2740. res = context.AddIntrinsic(Intrinsic.X86Andnps, mask, res);
  2741. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  2742. resSign = context.AddIntrinsic(Intrinsic.X86Andps, mask, resSign);
  2743. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  2744. }
  2745. else /* if ((op.Size & 1) == 1) */
  2746. {
  2747. Operand mask = X86GetAllElements(context, -0d);
  2748. Operand res = context.AddIntrinsic(isMax ? Intrinsic.X86Maxpd : Intrinsic.X86Minpd, n, m);
  2749. res = context.AddIntrinsic(Intrinsic.X86Andnpd, mask, res);
  2750. Operand resSign = context.AddIntrinsic(isMax ? Intrinsic.X86Pand : Intrinsic.X86Por, n, m);
  2751. resSign = context.AddIntrinsic(Intrinsic.X86Andpd, mask, resSign);
  2752. return context.AddIntrinsic(Intrinsic.X86Por, res, resSign);
  2753. }
  2754. }
  2755. private static Operand EmitSse41MaxMinNumOpF(
  2756. ArmEmitterContext context,
  2757. bool isMaxNum,
  2758. bool scalar,
  2759. Operand n = null,
  2760. Operand m = null)
  2761. {
  2762. Operand nCopy = n ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rn));
  2763. Operand mCopy = m ?? context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rm));
  2764. EmitSse2VectorIsNaNOpF(context, nCopy, out Operand nQNaNMask, out _, isQNaN: true);
  2765. EmitSse2VectorIsNaNOpF(context, mCopy, out Operand mQNaNMask, out _, isQNaN: true);
  2766. int sizeF = ((IOpCodeSimd)context.CurrOp).Size & 1;
  2767. if (sizeF == 0)
  2768. {
  2769. Operand negInfMask = scalar
  2770. ? X86GetScalar (context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity)
  2771. : X86GetAllElements(context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity);
  2772. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnps, mQNaNMask, nQNaNMask);
  2773. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnps, nQNaNMask, mQNaNMask);
  2774. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, nCopy, negInfMask, nMask);
  2775. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvps, mCopy, negInfMask, mMask);
  2776. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  2777. {
  2778. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  2779. }, scalar: scalar, nCopy, mCopy);
  2780. if (n != null || m != null)
  2781. {
  2782. return res;
  2783. }
  2784. if (scalar)
  2785. {
  2786. res = context.VectorZeroUpper96(res);
  2787. }
  2788. else if (((OpCodeSimdReg)context.CurrOp).RegisterSize == RegisterSize.Simd64)
  2789. {
  2790. res = context.VectorZeroUpper64(res);
  2791. }
  2792. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  2793. return null;
  2794. }
  2795. else /* if (sizeF == 1) */
  2796. {
  2797. Operand negInfMask = scalar
  2798. ? X86GetScalar (context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity)
  2799. : X86GetAllElements(context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity);
  2800. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnpd, mQNaNMask, nQNaNMask);
  2801. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnpd, nQNaNMask, mQNaNMask);
  2802. nCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, nCopy, negInfMask, nMask);
  2803. mCopy = context.AddIntrinsic(Intrinsic.X86Blendvpd, mCopy, negInfMask, mMask);
  2804. Operand res = EmitSse41ProcessNaNsOpF(context, (op1, op2) =>
  2805. {
  2806. return EmitSse2VectorMaxMinOpF(context, op1, op2, isMax: isMaxNum);
  2807. }, scalar: scalar, nCopy, mCopy);
  2808. if (n != null || m != null)
  2809. {
  2810. return res;
  2811. }
  2812. if (scalar)
  2813. {
  2814. res = context.VectorZeroUpper64(res);
  2815. }
  2816. context.Copy(GetVec(((OpCodeSimdReg)context.CurrOp).Rd), res);
  2817. return null;
  2818. }
  2819. }
  2820. private enum AddSub
  2821. {
  2822. None,
  2823. Add,
  2824. Subtract
  2825. }
  2826. private static void EmitSse41VectorMul_AddSub(ArmEmitterContext context, AddSub addSub)
  2827. {
  2828. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2829. Operand n = GetVec(op.Rn);
  2830. Operand m = GetVec(op.Rm);
  2831. Operand res;
  2832. if (op.Size == 0)
  2833. {
  2834. Operand ns8 = context.AddIntrinsic(Intrinsic.X86Psrlw, n, Const(8));
  2835. Operand ms8 = context.AddIntrinsic(Intrinsic.X86Psrlw, m, Const(8));
  2836. res = context.AddIntrinsic(Intrinsic.X86Pmullw, ns8, ms8);
  2837. res = context.AddIntrinsic(Intrinsic.X86Psllw, res, Const(8));
  2838. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  2839. Operand mask = X86GetAllElements(context, 0x00FF00FF);
  2840. res = context.AddIntrinsic(Intrinsic.X86Pblendvb, res, res2, mask);
  2841. }
  2842. else if (op.Size == 1)
  2843. {
  2844. res = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  2845. }
  2846. else
  2847. {
  2848. res = context.AddIntrinsic(Intrinsic.X86Pmulld, n, m);
  2849. }
  2850. Operand d = GetVec(op.Rd);
  2851. if (addSub == AddSub.Add)
  2852. {
  2853. Intrinsic addInst = X86PaddInstruction[op.Size];
  2854. res = context.AddIntrinsic(addInst, d, res);
  2855. }
  2856. else if (addSub == AddSub.Subtract)
  2857. {
  2858. Intrinsic subInst = X86PsubInstruction[op.Size];
  2859. res = context.AddIntrinsic(subInst, d, res);
  2860. }
  2861. if (op.RegisterSize == RegisterSize.Simd64)
  2862. {
  2863. res = context.VectorZeroUpper64(res);
  2864. }
  2865. context.Copy(d, res);
  2866. }
  2867. private static void EmitSse41VectorSabdOp(
  2868. ArmEmitterContext context,
  2869. OpCodeSimdReg op,
  2870. Operand n,
  2871. Operand m,
  2872. bool isLong)
  2873. {
  2874. int size = isLong ? op.Size + 1 : op.Size;
  2875. Intrinsic cmpgtInst = X86PcmpgtInstruction[size];
  2876. Operand cmpMask = context.AddIntrinsic(cmpgtInst, n, m);
  2877. Intrinsic subInst = X86PsubInstruction[size];
  2878. Operand res = context.AddIntrinsic(subInst, n, m);
  2879. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  2880. Operand res2 = context.AddIntrinsic(subInst, m, n);
  2881. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  2882. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  2883. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  2884. {
  2885. res = context.VectorZeroUpper64(res);
  2886. }
  2887. context.Copy(GetVec(op.Rd), res);
  2888. }
  2889. private static void EmitSse41VectorUabdOp(
  2890. ArmEmitterContext context,
  2891. OpCodeSimdReg op,
  2892. Operand n,
  2893. Operand m,
  2894. bool isLong)
  2895. {
  2896. int size = isLong ? op.Size + 1 : op.Size;
  2897. Intrinsic maxInst = X86PmaxuInstruction[size];
  2898. Operand max = context.AddIntrinsic(maxInst, m, n);
  2899. Intrinsic cmpeqInst = X86PcmpeqInstruction[size];
  2900. Operand cmpMask = context.AddIntrinsic(cmpeqInst, max, m);
  2901. Operand onesMask = X86GetAllElements(context, -1L);
  2902. cmpMask = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, onesMask);
  2903. Intrinsic subInst = X86PsubInstruction[size];
  2904. Operand res = context.AddIntrinsic(subInst, n, m);
  2905. Operand res2 = context.AddIntrinsic(subInst, m, n);
  2906. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  2907. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  2908. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  2909. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  2910. {
  2911. res = context.VectorZeroUpper64(res);
  2912. }
  2913. context.Copy(GetVec(op.Rd), res);
  2914. }
  2915. }
  2916. }