InstEmitSimdArithmetic.cs 109 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341
  1. // https://github.com/intel/ARM_NEON_2_x86_SSE/blob/master/NEON_2_SSE.h
  2. // https://www.agner.org/optimize/#vectorclass @ vectori128.h
  3. using ARMeilleure.Decoders;
  4. using ARMeilleure.IntermediateRepresentation;
  5. using ARMeilleure.State;
  6. using ARMeilleure.Translation;
  7. using System;
  8. using System.Diagnostics;
  9. using static ARMeilleure.Instructions.InstEmitHelper;
  10. using static ARMeilleure.Instructions.InstEmitSimdHelper;
  11. using static ARMeilleure.IntermediateRepresentation.OperandHelper;
  12. namespace ARMeilleure.Instructions
  13. {
  14. using Func2I = Func<Operand, Operand, Operand>;
  15. static partial class InstEmit
  16. {
  17. public static void Abs_S(ArmEmitterContext context)
  18. {
  19. EmitScalarUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  20. }
  21. public static void Abs_V(ArmEmitterContext context)
  22. {
  23. EmitVectorUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  24. }
  25. public static void Add_S(ArmEmitterContext context)
  26. {
  27. EmitScalarBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  28. }
  29. public static void Add_V(ArmEmitterContext context)
  30. {
  31. if (Optimizations.UseSse2)
  32. {
  33. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  34. Operand n = GetVec(op.Rn);
  35. Operand m = GetVec(op.Rm);
  36. Intrinsic addInst = X86PaddInstruction[op.Size];
  37. Operand res = context.AddIntrinsic(addInst, n, m);
  38. if (op.RegisterSize == RegisterSize.Simd64)
  39. {
  40. res = context.VectorZeroUpper64(res);
  41. }
  42. context.Copy(GetVec(op.Rd), res);
  43. }
  44. else
  45. {
  46. EmitVectorBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  47. }
  48. }
  49. public static void Addhn_V(ArmEmitterContext context)
  50. {
  51. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: false);
  52. }
  53. public static void Addp_S(ArmEmitterContext context)
  54. {
  55. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  56. Operand ne0 = EmitVectorExtractZx(context, op.Rn, 0, op.Size);
  57. Operand ne1 = EmitVectorExtractZx(context, op.Rn, 1, op.Size);
  58. Operand res = context.Add(ne0, ne1);
  59. context.Copy(GetVec(op.Rd), EmitVectorInsert(context, context.VectorZero(), res, 0, op.Size));
  60. }
  61. public static void Addp_V(ArmEmitterContext context)
  62. {
  63. if (Optimizations.UseSsse3)
  64. {
  65. EmitSsse3VectorPairwiseOp(context, X86PaddInstruction);
  66. }
  67. else
  68. {
  69. EmitVectorPairwiseOpZx(context, (op1, op2) => context.Add(op1, op2));
  70. }
  71. }
  72. public static void Addv_V(ArmEmitterContext context)
  73. {
  74. EmitVectorAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  75. }
  76. public static void Cls_V(ArmEmitterContext context)
  77. {
  78. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  79. Operand res = context.VectorZero();
  80. int elems = op.GetBytesCount() >> op.Size;
  81. int eSize = 8 << op.Size;
  82. for (int index = 0; index < elems; index++)
  83. {
  84. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  85. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingSigns)), ne, Const(eSize));
  86. res = EmitVectorInsert(context, res, de, index, op.Size);
  87. }
  88. context.Copy(GetVec(op.Rd), res);
  89. }
  90. public static void Clz_V(ArmEmitterContext context)
  91. {
  92. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  93. Operand res = context.VectorZero();
  94. int elems = op.GetBytesCount() >> op.Size;
  95. int eSize = 8 << op.Size;
  96. for (int index = 0; index < elems; index++)
  97. {
  98. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size);
  99. Operand de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountLeadingZeros)), ne, Const(eSize));
  100. res = EmitVectorInsert(context, res, de, index, op.Size);
  101. }
  102. context.Copy(GetVec(op.Rd), res);
  103. }
  104. public static void Cnt_V(ArmEmitterContext context)
  105. {
  106. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  107. Operand res = context.VectorZero();
  108. int elems = op.RegisterSize == RegisterSize.Simd128 ? 16 : 8;
  109. for (int index = 0; index < elems; index++)
  110. {
  111. Operand ne = EmitVectorExtractZx(context, op.Rn, index, 0);
  112. Operand de;
  113. if (Optimizations.UsePopCnt)
  114. {
  115. de = context.AddIntrinsicLong(Intrinsic.X86Popcnt, ne);
  116. }
  117. else
  118. {
  119. de = context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.CountSetBits8)), ne);
  120. }
  121. res = EmitVectorInsert(context, res, de, index, 0);
  122. }
  123. context.Copy(GetVec(op.Rd), res);
  124. }
  125. public static void Fabd_S(ArmEmitterContext context)
  126. {
  127. if (Optimizations.FastFP && Optimizations.UseSse2)
  128. {
  129. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  130. int sizeF = op.Size & 1;
  131. if (sizeF == 0)
  132. {
  133. Operand res = context.AddIntrinsic(Intrinsic.X86Subss, GetVec(op.Rn), GetVec(op.Rm));
  134. res = EmitFloatAbs(context, res, true, false);
  135. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  136. }
  137. else /* if (sizeF == 1) */
  138. {
  139. Operand res = context.AddIntrinsic(Intrinsic.X86Subsd, GetVec(op.Rn), GetVec(op.Rm));
  140. res = EmitFloatAbs(context, res, false, false);
  141. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  142. }
  143. }
  144. else
  145. {
  146. EmitScalarBinaryOpF(context, (op1, op2) =>
  147. {
  148. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  149. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  150. });
  151. }
  152. }
  153. public static void Fabd_V(ArmEmitterContext context)
  154. {
  155. if (Optimizations.FastFP && Optimizations.UseSse2)
  156. {
  157. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  158. int sizeF = op.Size & 1;
  159. if (sizeF == 0)
  160. {
  161. Operand res = context.AddIntrinsic(Intrinsic.X86Subps, GetVec(op.Rn), GetVec(op.Rm));
  162. res = EmitFloatAbs(context, res, true, true);
  163. if (op.RegisterSize == RegisterSize.Simd64)
  164. {
  165. res = context.VectorZeroUpper64(res);
  166. }
  167. context.Copy(GetVec(op.Rd), res);
  168. }
  169. else /* if (sizeF == 1) */
  170. {
  171. Operand res = context.AddIntrinsic(Intrinsic.X86Subpd, GetVec(op.Rn), GetVec(op.Rm));
  172. res = EmitFloatAbs(context, res, false, true);
  173. context.Copy(GetVec(op.Rd), res);
  174. }
  175. }
  176. else
  177. {
  178. EmitVectorBinaryOpF(context, (op1, op2) =>
  179. {
  180. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  181. return EmitUnaryMathCall(context, nameof(Math.Abs), res);
  182. });
  183. }
  184. }
  185. public static void Fabs_S(ArmEmitterContext context)
  186. {
  187. if (Optimizations.UseSse2)
  188. {
  189. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  190. if (op.Size == 0)
  191. {
  192. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, false);
  193. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  194. }
  195. else /* if (op.Size == 1) */
  196. {
  197. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, false);
  198. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  199. }
  200. }
  201. else
  202. {
  203. EmitScalarUnaryOpF(context, (op1) =>
  204. {
  205. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  206. });
  207. }
  208. }
  209. public static void Fabs_V(ArmEmitterContext context)
  210. {
  211. if (Optimizations.UseSse2)
  212. {
  213. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  214. int sizeF = op.Size & 1;
  215. if (sizeF == 0)
  216. {
  217. Operand res = EmitFloatAbs(context, GetVec(op.Rn), true, true);
  218. if (op.RegisterSize == RegisterSize.Simd64)
  219. {
  220. res = context.VectorZeroUpper64(res);
  221. }
  222. context.Copy(GetVec(op.Rd), res);
  223. }
  224. else /* if (sizeF == 1) */
  225. {
  226. Operand res = EmitFloatAbs(context, GetVec(op.Rn), false, true);
  227. context.Copy(GetVec(op.Rd), res);
  228. }
  229. }
  230. else
  231. {
  232. EmitVectorUnaryOpF(context, (op1) =>
  233. {
  234. return EmitUnaryMathCall(context, nameof(Math.Abs), op1);
  235. });
  236. }
  237. }
  238. public static void Fadd_S(ArmEmitterContext context)
  239. {
  240. if (Optimizations.FastFP && Optimizations.UseSse2)
  241. {
  242. EmitScalarBinaryOpF(context, Intrinsic.X86Addss, Intrinsic.X86Addsd);
  243. }
  244. else if (Optimizations.FastFP)
  245. {
  246. EmitScalarBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  247. }
  248. else
  249. {
  250. EmitScalarBinaryOpF(context, (op1, op2) =>
  251. {
  252. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  253. });
  254. }
  255. }
  256. public static void Fadd_V(ArmEmitterContext context)
  257. {
  258. if (Optimizations.FastFP && Optimizations.UseSse2)
  259. {
  260. EmitVectorBinaryOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  261. }
  262. else if (Optimizations.FastFP)
  263. {
  264. EmitVectorBinaryOpF(context, (op1, op2) => context.Add(op1, op2));
  265. }
  266. else
  267. {
  268. EmitVectorBinaryOpF(context, (op1, op2) =>
  269. {
  270. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  271. });
  272. }
  273. }
  274. public static void Faddp_S(ArmEmitterContext context)
  275. {
  276. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  277. int sizeF = op.Size & 1;
  278. if (Optimizations.FastFP && Optimizations.UseSse3)
  279. {
  280. if (sizeF == 0)
  281. {
  282. Operand res = context.AddIntrinsic(Intrinsic.X86Haddps, GetVec(op.Rn), GetVec(op.Rn));
  283. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  284. }
  285. else /* if (sizeF == 1) */
  286. {
  287. Operand res = context.AddIntrinsic(Intrinsic.X86Haddpd, GetVec(op.Rn), GetVec(op.Rn));
  288. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  289. }
  290. }
  291. else
  292. {
  293. OperandType type = sizeF != 0 ? OperandType.FP64 : OperandType.FP32;
  294. Operand ne0 = context.VectorExtract(type, GetVec(op.Rn), 0);
  295. Operand ne1 = context.VectorExtract(type, GetVec(op.Rn), 1);
  296. Operand res = EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), ne0, ne1);
  297. context.Copy(GetVec(op.Rd), context.VectorInsert(context.VectorZero(), res, 0));
  298. }
  299. }
  300. public static void Faddp_V(ArmEmitterContext context)
  301. {
  302. if (Optimizations.FastFP && Optimizations.UseSse2)
  303. {
  304. EmitSse2VectorPairwiseOpF(context, Intrinsic.X86Addps, Intrinsic.X86Addpd);
  305. }
  306. else
  307. {
  308. EmitVectorPairwiseOpF(context, (op1, op2) =>
  309. {
  310. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPAdd), op1, op2);
  311. });
  312. }
  313. }
  314. public static void Fdiv_S(ArmEmitterContext context)
  315. {
  316. if (Optimizations.FastFP && Optimizations.UseSse2)
  317. {
  318. EmitScalarBinaryOpF(context, Intrinsic.X86Divss, Intrinsic.X86Divsd);
  319. }
  320. else if (Optimizations.FastFP)
  321. {
  322. EmitScalarBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  323. }
  324. else
  325. {
  326. EmitScalarBinaryOpF(context, (op1, op2) =>
  327. {
  328. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  329. });
  330. }
  331. }
  332. public static void Fdiv_V(ArmEmitterContext context)
  333. {
  334. if (Optimizations.FastFP && Optimizations.UseSse2)
  335. {
  336. EmitVectorBinaryOpF(context, Intrinsic.X86Divps, Intrinsic.X86Divpd);
  337. }
  338. else if (Optimizations.FastFP)
  339. {
  340. EmitVectorBinaryOpF(context, (op1, op2) => context.Divide(op1, op2));
  341. }
  342. else
  343. {
  344. EmitVectorBinaryOpF(context, (op1, op2) =>
  345. {
  346. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPDiv), op1, op2);
  347. });
  348. }
  349. }
  350. public static void Fmadd_S(ArmEmitterContext context) // Fused.
  351. {
  352. if (Optimizations.FastFP && Optimizations.UseSse2)
  353. {
  354. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  355. Operand d = GetVec(op.Rd);
  356. Operand a = GetVec(op.Ra);
  357. Operand n = GetVec(op.Rn);
  358. Operand m = GetVec(op.Rm);
  359. if (op.Size == 0)
  360. {
  361. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  362. res = context.AddIntrinsic(Intrinsic.X86Addss, a, res);
  363. context.Copy(d, context.VectorZeroUpper96(res));
  364. }
  365. else /* if (op.Size == 1) */
  366. {
  367. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  368. res = context.AddIntrinsic(Intrinsic.X86Addsd, a, res);
  369. context.Copy(d, context.VectorZeroUpper64(res));
  370. }
  371. }
  372. else
  373. {
  374. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  375. {
  376. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  377. });
  378. }
  379. }
  380. public static void Fmax_S(ArmEmitterContext context)
  381. {
  382. if (Optimizations.FastFP && Optimizations.UseSse2)
  383. {
  384. EmitScalarBinaryOpF(context, Intrinsic.X86Maxss, Intrinsic.X86Maxsd);
  385. }
  386. else
  387. {
  388. EmitScalarBinaryOpF(context, (op1, op2) =>
  389. {
  390. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  391. });
  392. }
  393. }
  394. public static void Fmax_V(ArmEmitterContext context)
  395. {
  396. if (Optimizations.FastFP && Optimizations.UseSse2)
  397. {
  398. EmitVectorBinaryOpF(context, Intrinsic.X86Maxps, Intrinsic.X86Maxpd);
  399. }
  400. else
  401. {
  402. EmitVectorBinaryOpF(context, (op1, op2) =>
  403. {
  404. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  405. });
  406. }
  407. }
  408. public static void Fmaxnm_S(ArmEmitterContext context)
  409. {
  410. if (Optimizations.FastFP && Optimizations.UseSse41)
  411. {
  412. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: true);
  413. }
  414. else
  415. {
  416. EmitScalarBinaryOpF(context, (op1, op2) =>
  417. {
  418. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  419. });
  420. }
  421. }
  422. public static void Fmaxnm_V(ArmEmitterContext context)
  423. {
  424. if (Optimizations.FastFP && Optimizations.UseSse41)
  425. {
  426. EmitSse41MaxMinNumOpF(context, isMaxNum: true, scalar: false);
  427. }
  428. else
  429. {
  430. EmitVectorBinaryOpF(context, (op1, op2) =>
  431. {
  432. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMaxNum), op1, op2);
  433. });
  434. }
  435. }
  436. public static void Fmaxnmv_V(ArmEmitterContext context)
  437. {
  438. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  439. {
  440. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMaxNum)), op1, op2);
  441. });
  442. }
  443. public static void Fmaxp_V(ArmEmitterContext context)
  444. {
  445. if (Optimizations.FastFP && Optimizations.UseSse2)
  446. {
  447. EmitSse2VectorPairwiseOpF(context, Intrinsic.X86Maxps, Intrinsic.X86Maxpd);
  448. }
  449. else
  450. {
  451. EmitVectorPairwiseOpF(context, (op1, op2) =>
  452. {
  453. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMax), op1, op2);
  454. });
  455. }
  456. }
  457. public static void Fmin_S(ArmEmitterContext context)
  458. {
  459. if (Optimizations.FastFP && Optimizations.UseSse2)
  460. {
  461. EmitScalarBinaryOpF(context, Intrinsic.X86Minss, Intrinsic.X86Minsd);
  462. }
  463. else
  464. {
  465. EmitScalarBinaryOpF(context, (op1, op2) =>
  466. {
  467. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  468. });
  469. }
  470. }
  471. public static void Fmin_V(ArmEmitterContext context)
  472. {
  473. if (Optimizations.FastFP && Optimizations.UseSse2)
  474. {
  475. EmitVectorBinaryOpF(context, Intrinsic.X86Minps, Intrinsic.X86Minpd);
  476. }
  477. else
  478. {
  479. EmitVectorBinaryOpF(context, (op1, op2) =>
  480. {
  481. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  482. });
  483. }
  484. }
  485. public static void Fminnm_S(ArmEmitterContext context)
  486. {
  487. if (Optimizations.FastFP && Optimizations.UseSse41)
  488. {
  489. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: true);
  490. }
  491. else
  492. {
  493. EmitScalarBinaryOpF(context, (op1, op2) =>
  494. {
  495. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  496. });
  497. }
  498. }
  499. public static void Fminnm_V(ArmEmitterContext context)
  500. {
  501. if (Optimizations.FastFP && Optimizations.UseSse41)
  502. {
  503. EmitSse41MaxMinNumOpF(context, isMaxNum: false, scalar: false);
  504. }
  505. else
  506. {
  507. EmitVectorBinaryOpF(context, (op1, op2) =>
  508. {
  509. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMinNum), op1, op2);
  510. });
  511. }
  512. }
  513. public static void Fminnmv_V(ArmEmitterContext context)
  514. {
  515. EmitVectorAcrossVectorOpF(context, (op1, op2) =>
  516. {
  517. return context.Call(typeof(SoftFloat32).GetMethod(nameof(SoftFloat32.FPMinNum)), op1, op2);
  518. });
  519. }
  520. public static void Fminp_V(ArmEmitterContext context)
  521. {
  522. if (Optimizations.FastFP && Optimizations.UseSse2)
  523. {
  524. EmitSse2VectorPairwiseOpF(context, Intrinsic.X86Minps, Intrinsic.X86Minpd);
  525. }
  526. else
  527. {
  528. EmitVectorPairwiseOpF(context, (op1, op2) =>
  529. {
  530. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMin), op1, op2);
  531. });
  532. }
  533. }
  534. public static void Fmla_Se(ArmEmitterContext context) // Fused.
  535. {
  536. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  537. {
  538. return context.Add(op1, context.Multiply(op2, op3));
  539. });
  540. }
  541. public static void Fmla_V(ArmEmitterContext context) // Fused.
  542. {
  543. if (Optimizations.FastFP && Optimizations.UseSse2)
  544. {
  545. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  546. Operand d = GetVec(op.Rd);
  547. Operand n = GetVec(op.Rn);
  548. Operand m = GetVec(op.Rm);
  549. int sizeF = op.Size & 1;
  550. if (sizeF == 0)
  551. {
  552. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  553. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  554. if (op.RegisterSize == RegisterSize.Simd64)
  555. {
  556. res = context.VectorZeroUpper64(res);
  557. }
  558. context.Copy(d, res);
  559. }
  560. else /* if (sizeF == 1) */
  561. {
  562. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  563. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  564. context.Copy(d, res);
  565. }
  566. }
  567. else
  568. {
  569. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  570. {
  571. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  572. });
  573. }
  574. }
  575. public static void Fmla_Ve(ArmEmitterContext context) // Fused.
  576. {
  577. if (Optimizations.FastFP && Optimizations.UseSse2)
  578. {
  579. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  580. Operand d = GetVec(op.Rd);
  581. Operand n = GetVec(op.Rn);
  582. Operand m = GetVec(op.Rm);
  583. int sizeF = op.Size & 1;
  584. if (sizeF == 0)
  585. {
  586. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  587. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  588. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  589. res = context.AddIntrinsic(Intrinsic.X86Addps, d, res);
  590. if (op.RegisterSize == RegisterSize.Simd64)
  591. {
  592. res = context.VectorZeroUpper64(res);
  593. }
  594. context.Copy(d, res);
  595. }
  596. else /* if (sizeF == 1) */
  597. {
  598. int shuffleMask = op.Index | op.Index << 1;
  599. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  600. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  601. res = context.AddIntrinsic(Intrinsic.X86Addpd, d, res);
  602. context.Copy(d, res);
  603. }
  604. }
  605. else
  606. {
  607. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  608. {
  609. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulAdd), op1, op2, op3);
  610. });
  611. }
  612. }
  613. public static void Fmls_Se(ArmEmitterContext context) // Fused.
  614. {
  615. EmitScalarTernaryOpByElemF(context, (op1, op2, op3) =>
  616. {
  617. return context.Subtract(op1, context.Multiply(op2, op3));
  618. });
  619. }
  620. public static void Fmls_V(ArmEmitterContext context) // Fused.
  621. {
  622. if (Optimizations.FastFP && Optimizations.UseSse2)
  623. {
  624. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  625. Operand d = GetVec(op.Rd);
  626. Operand n = GetVec(op.Rn);
  627. Operand m = GetVec(op.Rm);
  628. int sizeF = op.Size & 1;
  629. if (sizeF == 0)
  630. {
  631. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, n, m);
  632. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  633. if (op.RegisterSize == RegisterSize.Simd64)
  634. {
  635. res = context.VectorZeroUpper64(res);
  636. }
  637. context.Copy(d, res);
  638. }
  639. else /* if (sizeF == 1) */
  640. {
  641. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, m);
  642. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  643. context.Copy(d, res);
  644. }
  645. }
  646. else
  647. {
  648. EmitVectorTernaryOpF(context, (op1, op2, op3) =>
  649. {
  650. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  651. });
  652. }
  653. }
  654. public static void Fmls_Ve(ArmEmitterContext context) // Fused.
  655. {
  656. if (Optimizations.FastFP && Optimizations.UseSse2)
  657. {
  658. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  659. Operand d = GetVec(op.Rd);
  660. Operand n = GetVec(op.Rn);
  661. Operand m = GetVec(op.Rm);
  662. int sizeF = op.Size & 1;
  663. if (sizeF == 0)
  664. {
  665. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  666. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  667. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  668. res = context.AddIntrinsic(Intrinsic.X86Subps, d, res);
  669. if (op.RegisterSize == RegisterSize.Simd64)
  670. {
  671. res = context.VectorZeroUpper64(res);
  672. }
  673. context.Copy(d, res);
  674. }
  675. else /* if (sizeF == 1) */
  676. {
  677. int shuffleMask = op.Index | op.Index << 1;
  678. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  679. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  680. res = context.AddIntrinsic(Intrinsic.X86Subpd, d, res);
  681. context.Copy(d, res);
  682. }
  683. }
  684. else
  685. {
  686. EmitVectorTernaryOpByElemF(context, (op1, op2, op3) =>
  687. {
  688. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  689. });
  690. }
  691. }
  692. public static void Fmsub_S(ArmEmitterContext context) // Fused.
  693. {
  694. if (Optimizations.FastFP && Optimizations.UseSse2)
  695. {
  696. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  697. Operand d = GetVec(op.Rd);
  698. Operand a = GetVec(op.Ra);
  699. Operand n = GetVec(op.Rn);
  700. Operand m = GetVec(op.Rm);
  701. if (op.Size == 0)
  702. {
  703. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  704. res = context.AddIntrinsic(Intrinsic.X86Subss, a, res);
  705. context.Copy(d, context.VectorZeroUpper96(res));
  706. }
  707. else /* if (op.Size == 1) */
  708. {
  709. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  710. res = context.AddIntrinsic(Intrinsic.X86Subsd, a, res);
  711. context.Copy(d, context.VectorZeroUpper64(res));
  712. }
  713. }
  714. else
  715. {
  716. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  717. {
  718. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulSub), op1, op2, op3);
  719. });
  720. }
  721. }
  722. public static void Fmul_S(ArmEmitterContext context)
  723. {
  724. if (Optimizations.FastFP && Optimizations.UseSse2)
  725. {
  726. EmitScalarBinaryOpF(context, Intrinsic.X86Mulss, Intrinsic.X86Mulsd);
  727. }
  728. else if (Optimizations.FastFP)
  729. {
  730. EmitScalarBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  731. }
  732. else
  733. {
  734. EmitScalarBinaryOpF(context, (op1, op2) =>
  735. {
  736. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  737. });
  738. }
  739. }
  740. public static void Fmul_Se(ArmEmitterContext context)
  741. {
  742. EmitScalarBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  743. }
  744. public static void Fmul_V(ArmEmitterContext context)
  745. {
  746. if (Optimizations.FastFP && Optimizations.UseSse2)
  747. {
  748. EmitVectorBinaryOpF(context, Intrinsic.X86Mulps, Intrinsic.X86Mulpd);
  749. }
  750. else if (Optimizations.FastFP)
  751. {
  752. EmitVectorBinaryOpF(context, (op1, op2) => context.Multiply(op1, op2));
  753. }
  754. else
  755. {
  756. EmitVectorBinaryOpF(context, (op1, op2) =>
  757. {
  758. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  759. });
  760. }
  761. }
  762. public static void Fmul_Ve(ArmEmitterContext context)
  763. {
  764. if (Optimizations.FastFP && Optimizations.UseSse2)
  765. {
  766. OpCodeSimdRegElemF op = (OpCodeSimdRegElemF)context.CurrOp;
  767. Operand n = GetVec(op.Rn);
  768. Operand m = GetVec(op.Rm);
  769. int sizeF = op.Size & 1;
  770. if (sizeF == 0)
  771. {
  772. int shuffleMask = op.Index | op.Index << 2 | op.Index << 4 | op.Index << 6;
  773. Operand res = context.AddIntrinsic(Intrinsic.X86Shufps, m, m, Const(shuffleMask));
  774. res = context.AddIntrinsic(Intrinsic.X86Mulps, n, res);
  775. if (op.RegisterSize == RegisterSize.Simd64)
  776. {
  777. res = context.VectorZeroUpper64(res);
  778. }
  779. context.Copy(GetVec(op.Rd), res);
  780. }
  781. else /* if (sizeF == 1) */
  782. {
  783. int shuffleMask = op.Index | op.Index << 1;
  784. Operand res = context.AddIntrinsic(Intrinsic.X86Shufpd, m, m, Const(shuffleMask));
  785. res = context.AddIntrinsic(Intrinsic.X86Mulpd, n, res);
  786. context.Copy(GetVec(op.Rd), res);
  787. }
  788. }
  789. else if (Optimizations.FastFP)
  790. {
  791. EmitVectorBinaryOpByElemF(context, (op1, op2) => context.Multiply(op1, op2));
  792. }
  793. else
  794. {
  795. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  796. {
  797. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMul), op1, op2);
  798. });
  799. }
  800. }
  801. public static void Fmulx_S(ArmEmitterContext context)
  802. {
  803. EmitScalarBinaryOpF(context, (op1, op2) =>
  804. {
  805. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  806. });
  807. }
  808. public static void Fmulx_Se(ArmEmitterContext context)
  809. {
  810. EmitScalarBinaryOpByElemF(context, (op1, op2) =>
  811. {
  812. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  813. });
  814. }
  815. public static void Fmulx_V(ArmEmitterContext context)
  816. {
  817. EmitVectorBinaryOpF(context, (op1, op2) =>
  818. {
  819. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  820. });
  821. }
  822. public static void Fmulx_Ve(ArmEmitterContext context)
  823. {
  824. EmitVectorBinaryOpByElemF(context, (op1, op2) =>
  825. {
  826. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPMulX), op1, op2);
  827. });
  828. }
  829. public static void Fneg_S(ArmEmitterContext context)
  830. {
  831. if (Optimizations.UseSse2)
  832. {
  833. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  834. if (op.Size == 0)
  835. {
  836. Operand mask = X86GetScalar(context, -0f);
  837. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  838. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  839. }
  840. else /* if (op.Size == 1) */
  841. {
  842. Operand mask = X86GetScalar(context, -0d);
  843. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  844. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  845. }
  846. }
  847. else
  848. {
  849. EmitScalarUnaryOpF(context, (op1) => context.Negate(op1));
  850. }
  851. }
  852. public static void Fneg_V(ArmEmitterContext context)
  853. {
  854. if (Optimizations.UseSse2)
  855. {
  856. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  857. int sizeF = op.Size & 1;
  858. if (sizeF == 0)
  859. {
  860. Operand mask = X86GetAllElements(context, -0f);
  861. Operand res = context.AddIntrinsic(Intrinsic.X86Xorps, mask, GetVec(op.Rn));
  862. if (op.RegisterSize == RegisterSize.Simd64)
  863. {
  864. res = context.VectorZeroUpper64(res);
  865. }
  866. context.Copy(GetVec(op.Rd), res);
  867. }
  868. else /* if (sizeF == 1) */
  869. {
  870. Operand mask = X86GetAllElements(context, -0d);
  871. Operand res = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, GetVec(op.Rn));
  872. context.Copy(GetVec(op.Rd), res);
  873. }
  874. }
  875. else
  876. {
  877. EmitVectorUnaryOpF(context, (op1) => context.Negate(op1));
  878. }
  879. }
  880. public static void Fnmadd_S(ArmEmitterContext context) // Fused.
  881. {
  882. if (Optimizations.FastFP && Optimizations.UseSse2)
  883. {
  884. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  885. Operand d = GetVec(op.Rd);
  886. Operand a = GetVec(op.Ra);
  887. Operand n = GetVec(op.Rn);
  888. Operand m = GetVec(op.Rm);
  889. if (op.Size == 0)
  890. {
  891. Operand mask = X86GetScalar(context, -0f);
  892. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  893. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  894. res = context.AddIntrinsic(Intrinsic.X86Subss, aNeg, res);
  895. context.Copy(d, context.VectorZeroUpper96(res));
  896. }
  897. else /* if (op.Size == 1) */
  898. {
  899. Operand mask = X86GetScalar(context, -0d);
  900. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  901. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  902. res = context.AddIntrinsic(Intrinsic.X86Subsd, aNeg, res);
  903. context.Copy(d, context.VectorZeroUpper64(res));
  904. }
  905. }
  906. else
  907. {
  908. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  909. {
  910. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulAdd), op1, op2, op3);
  911. });
  912. }
  913. }
  914. public static void Fnmsub_S(ArmEmitterContext context) // Fused.
  915. {
  916. if (Optimizations.FastFP && Optimizations.UseSse2)
  917. {
  918. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  919. Operand d = GetVec(op.Rd);
  920. Operand a = GetVec(op.Ra);
  921. Operand n = GetVec(op.Rn);
  922. Operand m = GetVec(op.Rm);
  923. if (op.Size == 0)
  924. {
  925. Operand mask = X86GetScalar(context, -0f);
  926. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorps, mask, a);
  927. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, n, m);
  928. res = context.AddIntrinsic(Intrinsic.X86Addss, aNeg, res);
  929. context.Copy(d, context.VectorZeroUpper96(res));
  930. }
  931. else /* if (op.Size == 1) */
  932. {
  933. Operand mask = X86GetScalar(context, -0d);
  934. Operand aNeg = context.AddIntrinsic(Intrinsic.X86Xorpd, mask, a);
  935. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, n, m);
  936. res = context.AddIntrinsic(Intrinsic.X86Addsd, aNeg, res);
  937. context.Copy(d, context.VectorZeroUpper64(res));
  938. }
  939. }
  940. else
  941. {
  942. EmitScalarTernaryRaOpF(context, (op1, op2, op3) =>
  943. {
  944. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPNegMulSub), op1, op2, op3);
  945. });
  946. }
  947. }
  948. public static void Fnmul_S(ArmEmitterContext context)
  949. {
  950. EmitScalarBinaryOpF(context, (op1, op2) => context.Negate(context.Multiply(op1, op2)));
  951. }
  952. public static void Frecpe_S(ArmEmitterContext context)
  953. {
  954. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  955. int sizeF = op.Size & 1;
  956. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  957. {
  958. EmitScalarUnaryOpF(context, Intrinsic.X86Rcpss, 0);
  959. }
  960. else
  961. {
  962. EmitScalarUnaryOpF(context, (op1) =>
  963. {
  964. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  965. });
  966. }
  967. }
  968. public static void Frecpe_V(ArmEmitterContext context)
  969. {
  970. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  971. int sizeF = op.Size & 1;
  972. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  973. {
  974. EmitVectorUnaryOpF(context, Intrinsic.X86Rcpps, 0);
  975. }
  976. else
  977. {
  978. EmitVectorUnaryOpF(context, (op1) =>
  979. {
  980. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipEstimate), op1);
  981. });
  982. }
  983. }
  984. public static void Frecps_S(ArmEmitterContext context) // Fused.
  985. {
  986. if (Optimizations.FastFP && Optimizations.UseSse2)
  987. {
  988. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  989. int sizeF = op.Size & 1;
  990. if (sizeF == 0)
  991. {
  992. Operand mask = X86GetScalar(context, 2f);
  993. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  994. res = context.AddIntrinsic(Intrinsic.X86Subss, mask, res);
  995. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  996. }
  997. else /* if (sizeF == 1) */
  998. {
  999. Operand mask = X86GetScalar(context, 2d);
  1000. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  1001. res = context.AddIntrinsic(Intrinsic.X86Subsd, mask, res);
  1002. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1003. }
  1004. }
  1005. else
  1006. {
  1007. EmitScalarBinaryOpF(context, (op1, op2) =>
  1008. {
  1009. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1010. });
  1011. }
  1012. }
  1013. public static void Frecps_V(ArmEmitterContext context) // Fused.
  1014. {
  1015. if (Optimizations.FastFP && Optimizations.UseSse2)
  1016. {
  1017. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1018. int sizeF = op.Size & 1;
  1019. if (sizeF == 0)
  1020. {
  1021. Operand mask = X86GetAllElements(context, 2f);
  1022. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  1023. res = context.AddIntrinsic(Intrinsic.X86Subps, mask, res);
  1024. if (op.RegisterSize == RegisterSize.Simd64)
  1025. {
  1026. res = context.VectorZeroUpper64(res);
  1027. }
  1028. context.Copy(GetVec(op.Rd), res);
  1029. }
  1030. else /* if (sizeF == 1) */
  1031. {
  1032. Operand mask = X86GetAllElements(context, 2d);
  1033. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  1034. res = context.AddIntrinsic(Intrinsic.X86Subpd, mask, res);
  1035. context.Copy(GetVec(op.Rd), res);
  1036. }
  1037. }
  1038. else
  1039. {
  1040. EmitVectorBinaryOpF(context, (op1, op2) =>
  1041. {
  1042. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecipStepFused), op1, op2);
  1043. });
  1044. }
  1045. }
  1046. public static void Frecpx_S(ArmEmitterContext context)
  1047. {
  1048. EmitScalarUnaryOpF(context, (op1) =>
  1049. {
  1050. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRecpX), op1);
  1051. });
  1052. }
  1053. public static void Frinta_S(ArmEmitterContext context)
  1054. {
  1055. EmitScalarUnaryOpF(context, (op1) =>
  1056. {
  1057. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1058. });
  1059. }
  1060. public static void Frinta_V(ArmEmitterContext context)
  1061. {
  1062. EmitVectorUnaryOpF(context, (op1) =>
  1063. {
  1064. return EmitRoundMathCall(context, MidpointRounding.AwayFromZero, op1);
  1065. });
  1066. }
  1067. public static void Frinti_S(ArmEmitterContext context)
  1068. {
  1069. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1070. EmitScalarUnaryOpF(context, (op1) =>
  1071. {
  1072. if (op.Size == 0)
  1073. {
  1074. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1075. }
  1076. else /* if (op.Size == 1) */
  1077. {
  1078. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1079. }
  1080. });
  1081. }
  1082. public static void Frinti_V(ArmEmitterContext context)
  1083. {
  1084. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1085. int sizeF = op.Size & 1;
  1086. EmitVectorUnaryOpF(context, (op1) =>
  1087. {
  1088. if (sizeF == 0)
  1089. {
  1090. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1091. }
  1092. else /* if (sizeF == 1) */
  1093. {
  1094. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1095. }
  1096. });
  1097. }
  1098. public static void Frintm_S(ArmEmitterContext context)
  1099. {
  1100. if (Optimizations.UseSse41)
  1101. {
  1102. EmitScalarRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1103. }
  1104. else
  1105. {
  1106. EmitScalarUnaryOpF(context, (op1) =>
  1107. {
  1108. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1109. });
  1110. }
  1111. }
  1112. public static void Frintm_V(ArmEmitterContext context)
  1113. {
  1114. if (Optimizations.UseSse41)
  1115. {
  1116. EmitVectorRoundOpF(context, FPRoundingMode.TowardsMinusInfinity);
  1117. }
  1118. else
  1119. {
  1120. EmitVectorUnaryOpF(context, (op1) =>
  1121. {
  1122. return EmitUnaryMathCall(context, nameof(Math.Floor), op1);
  1123. });
  1124. }
  1125. }
  1126. public static void Frintn_S(ArmEmitterContext context)
  1127. {
  1128. if (Optimizations.UseSse41)
  1129. {
  1130. EmitScalarRoundOpF(context, FPRoundingMode.ToNearest);
  1131. }
  1132. else
  1133. {
  1134. EmitScalarUnaryOpF(context, (op1) =>
  1135. {
  1136. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1137. });
  1138. }
  1139. }
  1140. public static void Frintn_V(ArmEmitterContext context)
  1141. {
  1142. if (Optimizations.UseSse41)
  1143. {
  1144. EmitVectorRoundOpF(context, FPRoundingMode.ToNearest);
  1145. }
  1146. else
  1147. {
  1148. EmitVectorUnaryOpF(context, (op1) =>
  1149. {
  1150. return EmitRoundMathCall(context, MidpointRounding.ToEven, op1);
  1151. });
  1152. }
  1153. }
  1154. public static void Frintp_S(ArmEmitterContext context)
  1155. {
  1156. if (Optimizations.UseSse41)
  1157. {
  1158. EmitScalarRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1159. }
  1160. else
  1161. {
  1162. EmitScalarUnaryOpF(context, (op1) =>
  1163. {
  1164. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  1165. });
  1166. }
  1167. }
  1168. public static void Frintp_V(ArmEmitterContext context)
  1169. {
  1170. if (Optimizations.UseSse41)
  1171. {
  1172. EmitVectorRoundOpF(context, FPRoundingMode.TowardsPlusInfinity);
  1173. }
  1174. else
  1175. {
  1176. EmitVectorUnaryOpF(context, (op1) =>
  1177. {
  1178. return EmitUnaryMathCall(context, nameof(Math.Ceiling), op1);
  1179. });
  1180. }
  1181. }
  1182. public static void Frintx_S(ArmEmitterContext context)
  1183. {
  1184. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1185. EmitScalarUnaryOpF(context, (op1) =>
  1186. {
  1187. if (op.Size == 0)
  1188. {
  1189. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1190. }
  1191. else /* if (op.Size == 1) */
  1192. {
  1193. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1194. }
  1195. });
  1196. }
  1197. public static void Frintx_V(ArmEmitterContext context)
  1198. {
  1199. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1200. int sizeF = op.Size & 1;
  1201. EmitVectorUnaryOpF(context, (op1) =>
  1202. {
  1203. if (sizeF == 0)
  1204. {
  1205. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.RoundF)), op1);
  1206. }
  1207. else /* if (sizeF == 1) */
  1208. {
  1209. return context.Call(typeof(SoftFallback).GetMethod(nameof(SoftFallback.Round)), op1);
  1210. }
  1211. });
  1212. }
  1213. public static void Frintz_S(ArmEmitterContext context)
  1214. {
  1215. if (Optimizations.UseSse41)
  1216. {
  1217. EmitScalarRoundOpF(context, FPRoundingMode.TowardsZero);
  1218. }
  1219. else
  1220. {
  1221. EmitScalarUnaryOpF(context, (op1) =>
  1222. {
  1223. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  1224. });
  1225. }
  1226. }
  1227. public static void Frintz_V(ArmEmitterContext context)
  1228. {
  1229. if (Optimizations.UseSse41)
  1230. {
  1231. EmitVectorRoundOpF(context, FPRoundingMode.TowardsZero);
  1232. }
  1233. else
  1234. {
  1235. EmitVectorUnaryOpF(context, (op1) =>
  1236. {
  1237. return EmitUnaryMathCall(context, nameof(Math.Truncate), op1);
  1238. });
  1239. }
  1240. }
  1241. public static void Frsqrte_S(ArmEmitterContext context)
  1242. {
  1243. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1244. int sizeF = op.Size & 1;
  1245. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1246. {
  1247. EmitScalarUnaryOpF(context, Intrinsic.X86Rsqrtss, 0);
  1248. }
  1249. else
  1250. {
  1251. EmitScalarUnaryOpF(context, (op1) =>
  1252. {
  1253. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  1254. });
  1255. }
  1256. }
  1257. public static void Frsqrte_V(ArmEmitterContext context)
  1258. {
  1259. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1260. int sizeF = op.Size & 1;
  1261. if (Optimizations.FastFP && Optimizations.UseSse && sizeF == 0)
  1262. {
  1263. EmitVectorUnaryOpF(context, Intrinsic.X86Rsqrtps, 0);
  1264. }
  1265. else
  1266. {
  1267. EmitVectorUnaryOpF(context, (op1) =>
  1268. {
  1269. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtEstimate), op1);
  1270. });
  1271. }
  1272. }
  1273. public static void Frsqrts_S(ArmEmitterContext context) // Fused.
  1274. {
  1275. if (Optimizations.FastFP && Optimizations.UseSse2)
  1276. {
  1277. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1278. int sizeF = op.Size & 1;
  1279. if (sizeF == 0)
  1280. {
  1281. Operand maskHalf = X86GetScalar(context, 0.5f);
  1282. Operand maskThree = X86GetScalar(context, 3f);
  1283. Operand res = context.AddIntrinsic(Intrinsic.X86Mulss, GetVec(op.Rn), GetVec(op.Rm));
  1284. res = context.AddIntrinsic(Intrinsic.X86Subss, maskThree, res);
  1285. res = context.AddIntrinsic(Intrinsic.X86Mulss, maskHalf, res);
  1286. context.Copy(GetVec(op.Rd), context.VectorZeroUpper96(res));
  1287. }
  1288. else /* if (sizeF == 1) */
  1289. {
  1290. Operand maskHalf = X86GetScalar(context, 0.5d);
  1291. Operand maskThree = X86GetScalar(context, 3d);
  1292. Operand res = context.AddIntrinsic(Intrinsic.X86Mulsd, GetVec(op.Rn), GetVec(op.Rm));
  1293. res = context.AddIntrinsic(Intrinsic.X86Subsd, maskThree, res);
  1294. res = context.AddIntrinsic(Intrinsic.X86Mulsd, maskHalf, res);
  1295. context.Copy(GetVec(op.Rd), context.VectorZeroUpper64(res));
  1296. }
  1297. }
  1298. else
  1299. {
  1300. EmitScalarBinaryOpF(context, (op1, op2) =>
  1301. {
  1302. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  1303. });
  1304. }
  1305. }
  1306. public static void Frsqrts_V(ArmEmitterContext context) // Fused.
  1307. {
  1308. if (Optimizations.FastFP && Optimizations.UseSse2)
  1309. {
  1310. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1311. int sizeF = op.Size & 1;
  1312. if (sizeF == 0)
  1313. {
  1314. Operand maskHalf = X86GetAllElements(context, 0.5f);
  1315. Operand maskThree = X86GetAllElements(context, 3f);
  1316. Operand res = context.AddIntrinsic(Intrinsic.X86Mulps, GetVec(op.Rn), GetVec(op.Rm));
  1317. res = context.AddIntrinsic(Intrinsic.X86Subps, maskThree, res);
  1318. res = context.AddIntrinsic(Intrinsic.X86Mulps, maskHalf, res);
  1319. if (op.RegisterSize == RegisterSize.Simd64)
  1320. {
  1321. res = context.VectorZeroUpper64(res);
  1322. }
  1323. context.Copy(GetVec(op.Rd), res);
  1324. }
  1325. else /* if (sizeF == 1) */
  1326. {
  1327. Operand maskHalf = X86GetAllElements(context, 0.5d);
  1328. Operand maskThree = X86GetAllElements(context, 3d);
  1329. Operand res = context.AddIntrinsic(Intrinsic.X86Mulpd, GetVec(op.Rn), GetVec(op.Rm));
  1330. res = context.AddIntrinsic(Intrinsic.X86Subpd, maskThree, res);
  1331. res = context.AddIntrinsic(Intrinsic.X86Mulpd, maskHalf, res);
  1332. context.Copy(GetVec(op.Rd), res);
  1333. }
  1334. }
  1335. else
  1336. {
  1337. EmitVectorBinaryOpF(context, (op1, op2) =>
  1338. {
  1339. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPRSqrtStepFused), op1, op2);
  1340. });
  1341. }
  1342. }
  1343. public static void Fsqrt_S(ArmEmitterContext context)
  1344. {
  1345. if (Optimizations.FastFP && Optimizations.UseSse2)
  1346. {
  1347. EmitScalarUnaryOpF(context, Intrinsic.X86Sqrtss, Intrinsic.X86Sqrtsd);
  1348. }
  1349. else
  1350. {
  1351. EmitScalarUnaryOpF(context, (op1) =>
  1352. {
  1353. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  1354. });
  1355. }
  1356. }
  1357. public static void Fsqrt_V(ArmEmitterContext context)
  1358. {
  1359. if (Optimizations.FastFP && Optimizations.UseSse2)
  1360. {
  1361. EmitVectorUnaryOpF(context, Intrinsic.X86Sqrtps, Intrinsic.X86Sqrtpd);
  1362. }
  1363. else
  1364. {
  1365. EmitVectorUnaryOpF(context, (op1) =>
  1366. {
  1367. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSqrt), op1);
  1368. });
  1369. }
  1370. }
  1371. public static void Fsub_S(ArmEmitterContext context)
  1372. {
  1373. if (Optimizations.FastFP && Optimizations.UseSse2)
  1374. {
  1375. EmitScalarBinaryOpF(context, Intrinsic.X86Subss, Intrinsic.X86Subsd);
  1376. }
  1377. else if (Optimizations.FastFP)
  1378. {
  1379. EmitScalarBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1380. }
  1381. else
  1382. {
  1383. EmitScalarBinaryOpF(context, (op1, op2) =>
  1384. {
  1385. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  1386. });
  1387. }
  1388. }
  1389. public static void Fsub_V(ArmEmitterContext context)
  1390. {
  1391. if (Optimizations.FastFP && Optimizations.UseSse2)
  1392. {
  1393. EmitVectorBinaryOpF(context, Intrinsic.X86Subps, Intrinsic.X86Subpd);
  1394. }
  1395. else if (Optimizations.FastFP)
  1396. {
  1397. EmitVectorBinaryOpF(context, (op1, op2) => context.Subtract(op1, op2));
  1398. }
  1399. else
  1400. {
  1401. EmitVectorBinaryOpF(context, (op1, op2) =>
  1402. {
  1403. return EmitSoftFloatCall(context, nameof(SoftFloat32.FPSub), op1, op2);
  1404. });
  1405. }
  1406. }
  1407. public static void Mla_V(ArmEmitterContext context)
  1408. {
  1409. if (Optimizations.UseSse41)
  1410. {
  1411. EmitSse41VectorMul_AddSub(context, AddSub.Add);
  1412. }
  1413. else
  1414. {
  1415. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1416. {
  1417. return context.Add(op1, context.Multiply(op2, op3));
  1418. });
  1419. }
  1420. }
  1421. public static void Mla_Ve(ArmEmitterContext context)
  1422. {
  1423. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1424. {
  1425. return context.Add(op1, context.Multiply(op2, op3));
  1426. });
  1427. }
  1428. public static void Mls_V(ArmEmitterContext context)
  1429. {
  1430. if (Optimizations.UseSse41)
  1431. {
  1432. EmitSse41VectorMul_AddSub(context, AddSub.Subtract);
  1433. }
  1434. else
  1435. {
  1436. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1437. {
  1438. return context.Subtract(op1, context.Multiply(op2, op3));
  1439. });
  1440. }
  1441. }
  1442. public static void Mls_Ve(ArmEmitterContext context)
  1443. {
  1444. EmitVectorTernaryOpByElemZx(context, (op1, op2, op3) =>
  1445. {
  1446. return context.Subtract(op1, context.Multiply(op2, op3));
  1447. });
  1448. }
  1449. public static void Mul_V(ArmEmitterContext context)
  1450. {
  1451. if (Optimizations.UseSse41)
  1452. {
  1453. EmitSse41VectorMul_AddSub(context, AddSub.None);
  1454. }
  1455. else
  1456. {
  1457. EmitVectorBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  1458. }
  1459. }
  1460. public static void Mul_Ve(ArmEmitterContext context)
  1461. {
  1462. EmitVectorBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  1463. }
  1464. public static void Neg_S(ArmEmitterContext context)
  1465. {
  1466. EmitScalarUnaryOpSx(context, (op1) => context.Negate(op1));
  1467. }
  1468. public static void Neg_V(ArmEmitterContext context)
  1469. {
  1470. if (Optimizations.UseSse2)
  1471. {
  1472. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  1473. Intrinsic subInst = X86PsubInstruction[op.Size];
  1474. Operand res = context.AddIntrinsic(subInst, context.VectorZero(), GetVec(op.Rn));
  1475. if (op.RegisterSize == RegisterSize.Simd64)
  1476. {
  1477. res = context.VectorZeroUpper64(res);
  1478. }
  1479. context.Copy(GetVec(op.Rd), res);
  1480. }
  1481. else
  1482. {
  1483. EmitVectorUnaryOpSx(context, (op1) => context.Negate(op1));
  1484. }
  1485. }
  1486. public static void Raddhn_V(ArmEmitterContext context)
  1487. {
  1488. EmitHighNarrow(context, (op1, op2) => context.Add(op1, op2), round: true);
  1489. }
  1490. public static void Rsubhn_V(ArmEmitterContext context)
  1491. {
  1492. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: true);
  1493. }
  1494. public static void Saba_V(ArmEmitterContext context)
  1495. {
  1496. EmitVectorTernaryOpSx(context, (op1, op2, op3) =>
  1497. {
  1498. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1499. });
  1500. }
  1501. public static void Sabal_V(ArmEmitterContext context)
  1502. {
  1503. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1504. {
  1505. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1506. });
  1507. }
  1508. public static void Sabd_V(ArmEmitterContext context)
  1509. {
  1510. if (Optimizations.UseSse41)
  1511. {
  1512. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1513. Operand n = GetVec(op.Rn);
  1514. Operand m = GetVec(op.Rm);
  1515. EmitSse41VectorSabdOp(context, op, n, m, isLong: false);
  1516. }
  1517. else
  1518. {
  1519. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1520. {
  1521. return EmitAbs(context, context.Subtract(op1, op2));
  1522. });
  1523. }
  1524. }
  1525. public static void Sabdl_V(ArmEmitterContext context)
  1526. {
  1527. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1528. if (Optimizations.UseSse41 && op.Size < 2)
  1529. {
  1530. Operand n = GetVec(op.Rn);
  1531. Operand m = GetVec(op.Rm);
  1532. if (op.RegisterSize == RegisterSize.Simd128)
  1533. {
  1534. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1535. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1536. }
  1537. Intrinsic movInst = op.Size == 0
  1538. ? Intrinsic.X86Pmovsxbw
  1539. : Intrinsic.X86Pmovsxwd;
  1540. n = context.AddIntrinsic(movInst, n);
  1541. m = context.AddIntrinsic(movInst, m);
  1542. EmitSse41VectorSabdOp(context, op, n, m, isLong: true);
  1543. }
  1544. else
  1545. {
  1546. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) =>
  1547. {
  1548. return EmitAbs(context, context.Subtract(op1, op2));
  1549. });
  1550. }
  1551. }
  1552. public static void Sadalp_V(ArmEmitterContext context)
  1553. {
  1554. EmitAddLongPairwise(context, signed: true, accumulate: true);
  1555. }
  1556. public static void Saddl_V(ArmEmitterContext context)
  1557. {
  1558. if (Optimizations.UseSse41)
  1559. {
  1560. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1561. Operand n = GetVec(op.Rn);
  1562. Operand m = GetVec(op.Rm);
  1563. if (op.RegisterSize == RegisterSize.Simd128)
  1564. {
  1565. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1566. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1567. }
  1568. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1569. n = context.AddIntrinsic(movInst, n);
  1570. m = context.AddIntrinsic(movInst, m);
  1571. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1572. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1573. }
  1574. else
  1575. {
  1576. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1577. }
  1578. }
  1579. public static void Saddlp_V(ArmEmitterContext context)
  1580. {
  1581. EmitAddLongPairwise(context, signed: true, accumulate: false);
  1582. }
  1583. public static void Saddlv_V(ArmEmitterContext context)
  1584. {
  1585. EmitVectorLongAcrossVectorOpSx(context, (op1, op2) => context.Add(op1, op2));
  1586. }
  1587. public static void Saddw_V(ArmEmitterContext context)
  1588. {
  1589. if (Optimizations.UseSse41)
  1590. {
  1591. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1592. Operand n = GetVec(op.Rn);
  1593. Operand m = GetVec(op.Rm);
  1594. if (op.RegisterSize == RegisterSize.Simd128)
  1595. {
  1596. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1597. }
  1598. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1599. m = context.AddIntrinsic(movInst, m);
  1600. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1601. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  1602. }
  1603. else
  1604. {
  1605. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Add(op1, op2));
  1606. }
  1607. }
  1608. public static void Shadd_V(ArmEmitterContext context)
  1609. {
  1610. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1611. if (Optimizations.UseSse2 && op.Size > 0)
  1612. {
  1613. Operand n = GetVec(op.Rn);
  1614. Operand m = GetVec(op.Rm);
  1615. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  1616. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  1617. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psraw : Intrinsic.X86Psrad;
  1618. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  1619. Intrinsic addInst = X86PaddInstruction[op.Size];
  1620. res = context.AddIntrinsic(addInst, res, res2);
  1621. if (op.RegisterSize == RegisterSize.Simd64)
  1622. {
  1623. res = context.VectorZeroUpper64(res);
  1624. }
  1625. context.Copy(GetVec(op.Rd), res);
  1626. }
  1627. else
  1628. {
  1629. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1630. {
  1631. return context.ShiftRightSI(context.Add(op1, op2), Const(1));
  1632. });
  1633. }
  1634. }
  1635. public static void Shsub_V(ArmEmitterContext context)
  1636. {
  1637. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1638. if (Optimizations.UseSse2 && op.Size < 2)
  1639. {
  1640. Operand n = GetVec(op.Rn);
  1641. Operand m = GetVec(op.Rm);
  1642. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  1643. Intrinsic addInst = X86PaddInstruction[op.Size];
  1644. Operand nPlusMask = context.AddIntrinsic(addInst, n, mask);
  1645. Operand mPlusMask = context.AddIntrinsic(addInst, m, mask);
  1646. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  1647. Operand res = context.AddIntrinsic(avgInst, nPlusMask, mPlusMask);
  1648. Intrinsic subInst = X86PsubInstruction[op.Size];
  1649. res = context.AddIntrinsic(subInst, nPlusMask, res);
  1650. if (op.RegisterSize == RegisterSize.Simd64)
  1651. {
  1652. res = context.VectorZeroUpper64(res);
  1653. }
  1654. context.Copy(GetVec(op.Rd), res);
  1655. }
  1656. else
  1657. {
  1658. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1659. {
  1660. return context.ShiftRightSI(context.Subtract(op1, op2), Const(1));
  1661. });
  1662. }
  1663. }
  1664. public static void Smax_V(ArmEmitterContext context)
  1665. {
  1666. if (Optimizations.UseSse41)
  1667. {
  1668. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1669. Operand n = GetVec(op.Rn);
  1670. Operand m = GetVec(op.Rm);
  1671. Intrinsic maxInst = X86PmaxsInstruction[op.Size];
  1672. Operand res = context.AddIntrinsic(maxInst, n, m);
  1673. if (op.RegisterSize == RegisterSize.Simd64)
  1674. {
  1675. res = context.VectorZeroUpper64(res);
  1676. }
  1677. context.Copy(GetVec(op.Rd), res);
  1678. }
  1679. else
  1680. {
  1681. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  1682. }
  1683. }
  1684. public static void Smaxp_V(ArmEmitterContext context)
  1685. {
  1686. if (Optimizations.UseSsse3)
  1687. {
  1688. EmitSsse3VectorPairwiseOp(context, X86PmaxsInstruction);
  1689. }
  1690. else
  1691. {
  1692. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  1693. }
  1694. }
  1695. public static void Smaxv_V(ArmEmitterContext context)
  1696. {
  1697. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: true));
  1698. }
  1699. public static void Smin_V(ArmEmitterContext context)
  1700. {
  1701. if (Optimizations.UseSse41)
  1702. {
  1703. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1704. Operand n = GetVec(op.Rn);
  1705. Operand m = GetVec(op.Rm);
  1706. Intrinsic minInst = X86PminsInstruction[op.Size];
  1707. Operand res = context.AddIntrinsic(minInst, n, m);
  1708. if (op.RegisterSize == RegisterSize.Simd64)
  1709. {
  1710. res = context.VectorZeroUpper64(res);
  1711. }
  1712. context.Copy(GetVec(op.Rd), res);
  1713. }
  1714. else
  1715. {
  1716. EmitVectorBinaryOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  1717. }
  1718. }
  1719. public static void Sminp_V(ArmEmitterContext context)
  1720. {
  1721. if (Optimizations.UseSsse3)
  1722. {
  1723. EmitSsse3VectorPairwiseOp(context, X86PminsInstruction);
  1724. }
  1725. else
  1726. {
  1727. EmitVectorPairwiseOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  1728. }
  1729. }
  1730. public static void Sminv_V(ArmEmitterContext context)
  1731. {
  1732. EmitVectorAcrossVectorOpSx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: true));
  1733. }
  1734. public static void Smlal_V(ArmEmitterContext context)
  1735. {
  1736. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1737. if (Optimizations.UseSse41 && op.Size < 2)
  1738. {
  1739. Operand d = GetVec(op.Rd);
  1740. Operand n = GetVec(op.Rn);
  1741. Operand m = GetVec(op.Rm);
  1742. if (op.RegisterSize == RegisterSize.Simd128)
  1743. {
  1744. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1745. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1746. }
  1747. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1748. n = context.AddIntrinsic(movInst, n);
  1749. m = context.AddIntrinsic(movInst, m);
  1750. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  1751. Operand res = context.AddIntrinsic(mullInst, n, m);
  1752. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  1753. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  1754. }
  1755. else
  1756. {
  1757. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1758. {
  1759. return context.Add(op1, context.Multiply(op2, op3));
  1760. });
  1761. }
  1762. }
  1763. public static void Smlal_Ve(ArmEmitterContext context)
  1764. {
  1765. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  1766. {
  1767. return context.Add(op1, context.Multiply(op2, op3));
  1768. });
  1769. }
  1770. public static void Smlsl_V(ArmEmitterContext context)
  1771. {
  1772. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1773. if (Optimizations.UseSse41 && op.Size < 2)
  1774. {
  1775. Operand d = GetVec(op.Rd);
  1776. Operand n = GetVec(op.Rn);
  1777. Operand m = GetVec(op.Rm);
  1778. if (op.RegisterSize == RegisterSize.Simd128)
  1779. {
  1780. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1781. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1782. }
  1783. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovsxbw : Intrinsic.X86Pmovsxwd;
  1784. n = context.AddIntrinsic(movInst, n);
  1785. m = context.AddIntrinsic(movInst, m);
  1786. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  1787. Operand res = context.AddIntrinsic(mullInst, n, m);
  1788. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1789. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  1790. }
  1791. else
  1792. {
  1793. EmitVectorWidenRnRmTernaryOpSx(context, (op1, op2, op3) =>
  1794. {
  1795. return context.Subtract(op1, context.Multiply(op2, op3));
  1796. });
  1797. }
  1798. }
  1799. public static void Smlsl_Ve(ArmEmitterContext context)
  1800. {
  1801. EmitVectorWidenTernaryOpByElemSx(context, (op1, op2, op3) =>
  1802. {
  1803. return context.Subtract(op1, context.Multiply(op2, op3));
  1804. });
  1805. }
  1806. public static void Smull_V(ArmEmitterContext context)
  1807. {
  1808. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Multiply(op1, op2));
  1809. }
  1810. public static void Smull_Ve(ArmEmitterContext context)
  1811. {
  1812. EmitVectorWidenBinaryOpByElemSx(context, (op1, op2) => context.Multiply(op1, op2));
  1813. }
  1814. public static void Sqabs_S(ArmEmitterContext context)
  1815. {
  1816. EmitScalarSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  1817. }
  1818. public static void Sqabs_V(ArmEmitterContext context)
  1819. {
  1820. EmitVectorSaturatingUnaryOpSx(context, (op1) => EmitAbs(context, op1));
  1821. }
  1822. public static void Sqadd_S(ArmEmitterContext context)
  1823. {
  1824. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  1825. }
  1826. public static void Sqadd_V(ArmEmitterContext context)
  1827. {
  1828. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Add);
  1829. }
  1830. public static void Sqdmulh_S(ArmEmitterContext context)
  1831. {
  1832. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.ScalarSx);
  1833. }
  1834. public static void Sqdmulh_V(ArmEmitterContext context)
  1835. {
  1836. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: false), SaturatingFlags.VectorSx);
  1837. }
  1838. public static void Sqneg_S(ArmEmitterContext context)
  1839. {
  1840. EmitScalarSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  1841. }
  1842. public static void Sqneg_V(ArmEmitterContext context)
  1843. {
  1844. EmitVectorSaturatingUnaryOpSx(context, (op1) => context.Negate(op1));
  1845. }
  1846. public static void Sqrdmulh_S(ArmEmitterContext context)
  1847. {
  1848. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.ScalarSx);
  1849. }
  1850. public static void Sqrdmulh_V(ArmEmitterContext context)
  1851. {
  1852. EmitSaturatingBinaryOp(context, (op1, op2) => EmitDoublingMultiplyHighHalf(context, op1, op2, round: true), SaturatingFlags.VectorSx);
  1853. }
  1854. public static void Sqsub_S(ArmEmitterContext context)
  1855. {
  1856. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  1857. }
  1858. public static void Sqsub_V(ArmEmitterContext context)
  1859. {
  1860. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Sub);
  1861. }
  1862. public static void Sqxtn_S(ArmEmitterContext context)
  1863. {
  1864. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxSx);
  1865. }
  1866. public static void Sqxtn_V(ArmEmitterContext context)
  1867. {
  1868. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxSx);
  1869. }
  1870. public static void Sqxtun_S(ArmEmitterContext context)
  1871. {
  1872. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarSxZx);
  1873. }
  1874. public static void Sqxtun_V(ArmEmitterContext context)
  1875. {
  1876. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorSxZx);
  1877. }
  1878. public static void Srhadd_V(ArmEmitterContext context)
  1879. {
  1880. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1881. if (Optimizations.UseSse2 && op.Size < 2)
  1882. {
  1883. Operand n = GetVec(op.Rn);
  1884. Operand m = GetVec(op.Rm);
  1885. Operand mask = X86GetAllElements(context, (int)(op.Size == 0 ? 0x80808080u : 0x80008000u));
  1886. Intrinsic subInst = X86PsubInstruction[op.Size];
  1887. Operand nMinusMask = context.AddIntrinsic(subInst, n, mask);
  1888. Operand mMinusMask = context.AddIntrinsic(subInst, m, mask);
  1889. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  1890. Operand res = context.AddIntrinsic(avgInst, nMinusMask, mMinusMask);
  1891. Intrinsic addInst = X86PaddInstruction[op.Size];
  1892. res = context.AddIntrinsic(addInst, mask, res);
  1893. if (op.RegisterSize == RegisterSize.Simd64)
  1894. {
  1895. res = context.VectorZeroUpper64(res);
  1896. }
  1897. context.Copy(GetVec(op.Rd), res);
  1898. }
  1899. else
  1900. {
  1901. EmitVectorBinaryOpSx(context, (op1, op2) =>
  1902. {
  1903. Operand res = context.Add(op1, op2);
  1904. res = context.Add(res, Const(1L));
  1905. return context.ShiftRightSI(res, Const(1));
  1906. });
  1907. }
  1908. }
  1909. public static void Ssubl_V(ArmEmitterContext context)
  1910. {
  1911. if (Optimizations.UseSse41)
  1912. {
  1913. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1914. Operand n = GetVec(op.Rn);
  1915. Operand m = GetVec(op.Rm);
  1916. if (op.RegisterSize == RegisterSize.Simd128)
  1917. {
  1918. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  1919. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1920. }
  1921. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1922. n = context.AddIntrinsic(movInst, n);
  1923. m = context.AddIntrinsic(movInst, m);
  1924. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1925. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  1926. }
  1927. else
  1928. {
  1929. EmitVectorWidenRnRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  1930. }
  1931. }
  1932. public static void Ssubw_V(ArmEmitterContext context)
  1933. {
  1934. if (Optimizations.UseSse41)
  1935. {
  1936. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1937. Operand n = GetVec(op.Rn);
  1938. Operand m = GetVec(op.Rm);
  1939. if (op.RegisterSize == RegisterSize.Simd128)
  1940. {
  1941. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  1942. }
  1943. Intrinsic movInst = X86PmovsxInstruction[op.Size];
  1944. m = context.AddIntrinsic(movInst, m);
  1945. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  1946. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  1947. }
  1948. else
  1949. {
  1950. EmitVectorWidenRmBinaryOpSx(context, (op1, op2) => context.Subtract(op1, op2));
  1951. }
  1952. }
  1953. public static void Sub_S(ArmEmitterContext context)
  1954. {
  1955. EmitScalarBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  1956. }
  1957. public static void Sub_V(ArmEmitterContext context)
  1958. {
  1959. if (Optimizations.UseSse2)
  1960. {
  1961. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  1962. Operand n = GetVec(op.Rn);
  1963. Operand m = GetVec(op.Rm);
  1964. Intrinsic subInst = X86PsubInstruction[op.Size];
  1965. Operand res = context.AddIntrinsic(subInst, n, m);
  1966. if (op.RegisterSize == RegisterSize.Simd64)
  1967. {
  1968. res = context.VectorZeroUpper64(res);
  1969. }
  1970. context.Copy(GetVec(op.Rd), res);
  1971. }
  1972. else
  1973. {
  1974. EmitVectorBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  1975. }
  1976. }
  1977. public static void Subhn_V(ArmEmitterContext context)
  1978. {
  1979. EmitHighNarrow(context, (op1, op2) => context.Subtract(op1, op2), round: false);
  1980. }
  1981. public static void Suqadd_S(ArmEmitterContext context)
  1982. {
  1983. EmitScalarSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  1984. }
  1985. public static void Suqadd_V(ArmEmitterContext context)
  1986. {
  1987. EmitVectorSaturatingBinaryOpSx(context, SaturatingFlags.Accumulate);
  1988. }
  1989. public static void Uaba_V(ArmEmitterContext context)
  1990. {
  1991. EmitVectorTernaryOpZx(context, (op1, op2, op3) =>
  1992. {
  1993. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  1994. });
  1995. }
  1996. public static void Uabal_V(ArmEmitterContext context)
  1997. {
  1998. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  1999. {
  2000. return context.Add(op1, EmitAbs(context, context.Subtract(op2, op3)));
  2001. });
  2002. }
  2003. public static void Uabd_V(ArmEmitterContext context)
  2004. {
  2005. if (Optimizations.UseSse41)
  2006. {
  2007. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2008. Operand n = GetVec(op.Rn);
  2009. Operand m = GetVec(op.Rm);
  2010. EmitSse41VectorUabdOp(context, op, n, m, isLong: false);
  2011. }
  2012. else
  2013. {
  2014. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2015. {
  2016. return EmitAbs(context, context.Subtract(op1, op2));
  2017. });
  2018. }
  2019. }
  2020. public static void Uabdl_V(ArmEmitterContext context)
  2021. {
  2022. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2023. if (Optimizations.UseSse41 && op.Size < 2)
  2024. {
  2025. Operand n = GetVec(op.Rn);
  2026. Operand m = GetVec(op.Rm);
  2027. if (op.RegisterSize == RegisterSize.Simd128)
  2028. {
  2029. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2030. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2031. }
  2032. Intrinsic movInst = op.Size == 0
  2033. ? Intrinsic.X86Pmovzxbw
  2034. : Intrinsic.X86Pmovzxwd;
  2035. n = context.AddIntrinsic(movInst, n);
  2036. m = context.AddIntrinsic(movInst, m);
  2037. EmitSse41VectorUabdOp(context, op, n, m, isLong: true);
  2038. }
  2039. else
  2040. {
  2041. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) =>
  2042. {
  2043. return EmitAbs(context, context.Subtract(op1, op2));
  2044. });
  2045. }
  2046. }
  2047. public static void Uadalp_V(ArmEmitterContext context)
  2048. {
  2049. EmitAddLongPairwise(context, signed: false, accumulate: true);
  2050. }
  2051. public static void Uaddl_V(ArmEmitterContext context)
  2052. {
  2053. if (Optimizations.UseSse41)
  2054. {
  2055. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2056. Operand n = GetVec(op.Rn);
  2057. Operand m = GetVec(op.Rm);
  2058. if (op.RegisterSize == RegisterSize.Simd128)
  2059. {
  2060. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2061. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2062. }
  2063. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2064. n = context.AddIntrinsic(movInst, n);
  2065. m = context.AddIntrinsic(movInst, m);
  2066. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2067. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2068. }
  2069. else
  2070. {
  2071. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2072. }
  2073. }
  2074. public static void Uaddlp_V(ArmEmitterContext context)
  2075. {
  2076. EmitAddLongPairwise(context, signed: false, accumulate: false);
  2077. }
  2078. public static void Uaddlv_V(ArmEmitterContext context)
  2079. {
  2080. EmitVectorLongAcrossVectorOpZx(context, (op1, op2) => context.Add(op1, op2));
  2081. }
  2082. public static void Uaddw_V(ArmEmitterContext context)
  2083. {
  2084. if (Optimizations.UseSse41)
  2085. {
  2086. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2087. Operand n = GetVec(op.Rn);
  2088. Operand m = GetVec(op.Rm);
  2089. if (op.RegisterSize == RegisterSize.Simd128)
  2090. {
  2091. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2092. }
  2093. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2094. m = context.AddIntrinsic(movInst, m);
  2095. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2096. context.Copy(GetVec(op.Rd), context.AddIntrinsic(addInst, n, m));
  2097. }
  2098. else
  2099. {
  2100. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Add(op1, op2));
  2101. }
  2102. }
  2103. public static void Uhadd_V(ArmEmitterContext context)
  2104. {
  2105. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2106. if (Optimizations.UseSse2 && op.Size > 0)
  2107. {
  2108. Operand n = GetVec(op.Rn);
  2109. Operand m = GetVec(op.Rm);
  2110. Operand res = context.AddIntrinsic(Intrinsic.X86Pand, n, m);
  2111. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pxor, n, m);
  2112. Intrinsic shiftInst = op.Size == 1 ? Intrinsic.X86Psrlw : Intrinsic.X86Psrld;
  2113. res2 = context.AddIntrinsic(shiftInst, res2, Const(1));
  2114. Intrinsic addInst = X86PaddInstruction[op.Size];
  2115. res = context.AddIntrinsic(addInst, res, res2);
  2116. if (op.RegisterSize == RegisterSize.Simd64)
  2117. {
  2118. res = context.VectorZeroUpper64(res);
  2119. }
  2120. context.Copy(GetVec(op.Rd), res);
  2121. }
  2122. else
  2123. {
  2124. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2125. {
  2126. return context.ShiftRightUI(context.Add(op1, op2), Const(1));
  2127. });
  2128. }
  2129. }
  2130. public static void Uhsub_V(ArmEmitterContext context)
  2131. {
  2132. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2133. if (Optimizations.UseSse2 && op.Size < 2)
  2134. {
  2135. Operand n = GetVec(op.Rn);
  2136. Operand m = GetVec(op.Rm);
  2137. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2138. Operand res = context.AddIntrinsic(avgInst, n, m);
  2139. Intrinsic subInst = X86PsubInstruction[op.Size];
  2140. res = context.AddIntrinsic(subInst, n, res);
  2141. if (op.RegisterSize == RegisterSize.Simd64)
  2142. {
  2143. res = context.VectorZeroUpper64(res);
  2144. }
  2145. context.Copy(GetVec(op.Rd), res);
  2146. }
  2147. else
  2148. {
  2149. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2150. {
  2151. return context.ShiftRightUI(context.Subtract(op1, op2), Const(1));
  2152. });
  2153. }
  2154. }
  2155. public static void Umax_V(ArmEmitterContext context)
  2156. {
  2157. if (Optimizations.UseSse41)
  2158. {
  2159. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2160. Operand n = GetVec(op.Rn);
  2161. Operand m = GetVec(op.Rm);
  2162. Intrinsic maxInst = X86PmaxuInstruction[op.Size];
  2163. Operand res = context.AddIntrinsic(maxInst, n, m);
  2164. if (op.RegisterSize == RegisterSize.Simd64)
  2165. {
  2166. res = context.VectorZeroUpper64(res);
  2167. }
  2168. context.Copy(GetVec(op.Rd), res);
  2169. }
  2170. else
  2171. {
  2172. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2173. }
  2174. }
  2175. public static void Umaxp_V(ArmEmitterContext context)
  2176. {
  2177. if (Optimizations.UseSsse3)
  2178. {
  2179. EmitSsse3VectorPairwiseOp(context, X86PmaxuInstruction);
  2180. }
  2181. else
  2182. {
  2183. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2184. }
  2185. }
  2186. public static void Umaxv_V(ArmEmitterContext context)
  2187. {
  2188. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMax64Op(context, op1, op2, signed: false));
  2189. }
  2190. public static void Umin_V(ArmEmitterContext context)
  2191. {
  2192. if (Optimizations.UseSse41)
  2193. {
  2194. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2195. Operand n = GetVec(op.Rn);
  2196. Operand m = GetVec(op.Rm);
  2197. Intrinsic minInst = X86PminuInstruction[op.Size];
  2198. Operand res = context.AddIntrinsic(minInst, n, m);
  2199. if (op.RegisterSize == RegisterSize.Simd64)
  2200. {
  2201. res = context.VectorZeroUpper64(res);
  2202. }
  2203. context.Copy(GetVec(op.Rd), res);
  2204. }
  2205. else
  2206. {
  2207. EmitVectorBinaryOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2208. }
  2209. }
  2210. public static void Uminp_V(ArmEmitterContext context)
  2211. {
  2212. if (Optimizations.UseSsse3)
  2213. {
  2214. EmitSsse3VectorPairwiseOp(context, X86PminuInstruction);
  2215. }
  2216. else
  2217. {
  2218. EmitVectorPairwiseOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2219. }
  2220. }
  2221. public static void Uminv_V(ArmEmitterContext context)
  2222. {
  2223. EmitVectorAcrossVectorOpZx(context, (op1, op2) => EmitMin64Op(context, op1, op2, signed: false));
  2224. }
  2225. public static void Umlal_V(ArmEmitterContext context)
  2226. {
  2227. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2228. if (Optimizations.UseSse41 && op.Size < 2)
  2229. {
  2230. Operand d = GetVec(op.Rd);
  2231. Operand n = GetVec(op.Rn);
  2232. Operand m = GetVec(op.Rm);
  2233. if (op.RegisterSize == RegisterSize.Simd128)
  2234. {
  2235. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2236. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2237. }
  2238. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2239. n = context.AddIntrinsic(movInst, n);
  2240. m = context.AddIntrinsic(movInst, m);
  2241. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2242. Operand res = context.AddIntrinsic(mullInst, n, m);
  2243. Intrinsic addInst = X86PaddInstruction[op.Size + 1];
  2244. context.Copy(d, context.AddIntrinsic(addInst, d, res));
  2245. }
  2246. else
  2247. {
  2248. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2249. {
  2250. return context.Add(op1, context.Multiply(op2, op3));
  2251. });
  2252. }
  2253. }
  2254. public static void Umlal_Ve(ArmEmitterContext context)
  2255. {
  2256. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2257. {
  2258. return context.Add(op1, context.Multiply(op2, op3));
  2259. });
  2260. }
  2261. public static void Umlsl_V(ArmEmitterContext context)
  2262. {
  2263. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2264. if (Optimizations.UseSse41 && op.Size < 2)
  2265. {
  2266. Operand d = GetVec(op.Rd);
  2267. Operand n = GetVec(op.Rn);
  2268. Operand m = GetVec(op.Rm);
  2269. if (op.RegisterSize == RegisterSize.Simd128)
  2270. {
  2271. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2272. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2273. }
  2274. Intrinsic movInst = op.Size == 0 ? Intrinsic.X86Pmovzxbw : Intrinsic.X86Pmovzxwd;
  2275. n = context.AddIntrinsic(movInst, n);
  2276. m = context.AddIntrinsic(movInst, m);
  2277. Intrinsic mullInst = op.Size == 0 ? Intrinsic.X86Pmullw : Intrinsic.X86Pmulld;
  2278. Operand res = context.AddIntrinsic(mullInst, n, m);
  2279. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2280. context.Copy(d, context.AddIntrinsic(subInst, d, res));
  2281. }
  2282. else
  2283. {
  2284. EmitVectorWidenRnRmTernaryOpZx(context, (op1, op2, op3) =>
  2285. {
  2286. return context.Subtract(op1, context.Multiply(op2, op3));
  2287. });
  2288. }
  2289. }
  2290. public static void Umlsl_Ve(ArmEmitterContext context)
  2291. {
  2292. EmitVectorWidenTernaryOpByElemZx(context, (op1, op2, op3) =>
  2293. {
  2294. return context.Subtract(op1, context.Multiply(op2, op3));
  2295. });
  2296. }
  2297. public static void Umull_V(ArmEmitterContext context)
  2298. {
  2299. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Multiply(op1, op2));
  2300. }
  2301. public static void Umull_Ve(ArmEmitterContext context)
  2302. {
  2303. EmitVectorWidenBinaryOpByElemZx(context, (op1, op2) => context.Multiply(op1, op2));
  2304. }
  2305. public static void Uqadd_S(ArmEmitterContext context)
  2306. {
  2307. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2308. }
  2309. public static void Uqadd_V(ArmEmitterContext context)
  2310. {
  2311. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Add);
  2312. }
  2313. public static void Uqsub_S(ArmEmitterContext context)
  2314. {
  2315. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2316. }
  2317. public static void Uqsub_V(ArmEmitterContext context)
  2318. {
  2319. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Sub);
  2320. }
  2321. public static void Uqxtn_S(ArmEmitterContext context)
  2322. {
  2323. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.ScalarZxZx);
  2324. }
  2325. public static void Uqxtn_V(ArmEmitterContext context)
  2326. {
  2327. EmitSaturatingNarrowOp(context, SaturatingNarrowFlags.VectorZxZx);
  2328. }
  2329. public static void Urhadd_V(ArmEmitterContext context)
  2330. {
  2331. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2332. if (Optimizations.UseSse2 && op.Size < 2)
  2333. {
  2334. Operand n = GetVec(op.Rn);
  2335. Operand m = GetVec(op.Rm);
  2336. Intrinsic avgInst = op.Size == 0 ? Intrinsic.X86Pavgb : Intrinsic.X86Pavgw;
  2337. Operand res = context.AddIntrinsic(avgInst, n, m);
  2338. if (op.RegisterSize == RegisterSize.Simd64)
  2339. {
  2340. res = context.VectorZeroUpper64(res);
  2341. }
  2342. context.Copy(GetVec(op.Rd), res);
  2343. }
  2344. else
  2345. {
  2346. EmitVectorBinaryOpZx(context, (op1, op2) =>
  2347. {
  2348. Operand res = context.Add(op1, op2);
  2349. res = context.Add(res, Const(1L));
  2350. return context.ShiftRightUI(res, Const(1));
  2351. });
  2352. }
  2353. }
  2354. public static void Usqadd_S(ArmEmitterContext context)
  2355. {
  2356. EmitScalarSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2357. }
  2358. public static void Usqadd_V(ArmEmitterContext context)
  2359. {
  2360. EmitVectorSaturatingBinaryOpZx(context, SaturatingFlags.Accumulate);
  2361. }
  2362. public static void Usubl_V(ArmEmitterContext context)
  2363. {
  2364. if (Optimizations.UseSse41)
  2365. {
  2366. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2367. Operand n = GetVec(op.Rn);
  2368. Operand m = GetVec(op.Rm);
  2369. if (op.RegisterSize == RegisterSize.Simd128)
  2370. {
  2371. n = context.AddIntrinsic(Intrinsic.X86Psrldq, n, Const(8));
  2372. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2373. }
  2374. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2375. n = context.AddIntrinsic(movInst, n);
  2376. m = context.AddIntrinsic(movInst, m);
  2377. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2378. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2379. }
  2380. else
  2381. {
  2382. EmitVectorWidenRnRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2383. }
  2384. }
  2385. public static void Usubw_V(ArmEmitterContext context)
  2386. {
  2387. if (Optimizations.UseSse41)
  2388. {
  2389. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2390. Operand n = GetVec(op.Rn);
  2391. Operand m = GetVec(op.Rm);
  2392. if (op.RegisterSize == RegisterSize.Simd128)
  2393. {
  2394. m = context.AddIntrinsic(Intrinsic.X86Psrldq, m, Const(8));
  2395. }
  2396. Intrinsic movInst = X86PmovzxInstruction[op.Size];
  2397. m = context.AddIntrinsic(movInst, m);
  2398. Intrinsic subInst = X86PsubInstruction[op.Size + 1];
  2399. context.Copy(GetVec(op.Rd), context.AddIntrinsic(subInst, n, m));
  2400. }
  2401. else
  2402. {
  2403. EmitVectorWidenRmBinaryOpZx(context, (op1, op2) => context.Subtract(op1, op2));
  2404. }
  2405. }
  2406. private static Operand EmitAbs(ArmEmitterContext context, Operand value)
  2407. {
  2408. Operand isPositive = context.ICompareGreaterOrEqual(value, Const(value.Type, 0));
  2409. return context.ConditionalSelect(isPositive, value, context.Negate(value));
  2410. }
  2411. private static void EmitAddLongPairwise(ArmEmitterContext context, bool signed, bool accumulate)
  2412. {
  2413. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2414. Operand res = context.VectorZero();
  2415. int pairs = op.GetPairsCount() >> op.Size;
  2416. for (int index = 0; index < pairs; index++)
  2417. {
  2418. int pairIndex = index << 1;
  2419. Operand ne0 = EmitVectorExtract(context, op.Rn, pairIndex, op.Size, signed);
  2420. Operand ne1 = EmitVectorExtract(context, op.Rn, pairIndex + 1, op.Size, signed);
  2421. Operand e = context.Add(ne0, ne1);
  2422. if (accumulate)
  2423. {
  2424. Operand de = EmitVectorExtract(context, op.Rd, index, op.Size + 1, signed);
  2425. e = context.Add(e, de);
  2426. }
  2427. res = EmitVectorInsert(context, res, e, index, op.Size + 1);
  2428. }
  2429. context.Copy(GetVec(op.Rd), res);
  2430. }
  2431. private static Operand EmitDoublingMultiplyHighHalf(
  2432. ArmEmitterContext context,
  2433. Operand n,
  2434. Operand m,
  2435. bool round)
  2436. {
  2437. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2438. int eSize = 8 << op.Size;
  2439. Operand res = context.Multiply(n, m);
  2440. if (!round)
  2441. {
  2442. res = context.ShiftRightSI(res, Const(eSize - 1));
  2443. }
  2444. else
  2445. {
  2446. long roundConst = 1L << (eSize - 1);
  2447. res = context.ShiftLeft(res, Const(1));
  2448. res = context.Add(res, Const(roundConst));
  2449. res = context.ShiftRightSI(res, Const(eSize));
  2450. Operand isIntMin = context.ICompareEqual(res, Const((long)int.MinValue));
  2451. res = context.ConditionalSelect(isIntMin, context.Negate(res), res);
  2452. }
  2453. return res;
  2454. }
  2455. private static void EmitHighNarrow(ArmEmitterContext context, Func2I emit, bool round)
  2456. {
  2457. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2458. int elems = 8 >> op.Size;
  2459. int eSize = 8 << op.Size;
  2460. int part = op.RegisterSize == RegisterSize.Simd128 ? elems : 0;
  2461. Operand d = GetVec(op.Rd);
  2462. Operand res = part == 0 ? context.VectorZero() : context.Copy(d);
  2463. long roundConst = 1L << (eSize - 1);
  2464. for (int index = 0; index < elems; index++)
  2465. {
  2466. Operand ne = EmitVectorExtractZx(context, op.Rn, index, op.Size + 1);
  2467. Operand me = EmitVectorExtractZx(context, op.Rm, index, op.Size + 1);
  2468. Operand de = emit(ne, me);
  2469. if (round)
  2470. {
  2471. de = context.Add(de, Const(roundConst));
  2472. }
  2473. de = context.ShiftRightUI(de, Const(eSize));
  2474. res = EmitVectorInsert(context, res, de, part + index, op.Size);
  2475. }
  2476. context.Copy(d, res);
  2477. }
  2478. private static Operand EmitMax64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  2479. {
  2480. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  2481. Operand cmp = signed
  2482. ? context.ICompareGreaterOrEqual (op1, op2)
  2483. : context.ICompareGreaterOrEqualUI(op1, op2);
  2484. return context.ConditionalSelect(cmp, op1, op2);
  2485. }
  2486. private static Operand EmitMin64Op(ArmEmitterContext context, Operand op1, Operand op2, bool signed)
  2487. {
  2488. Debug.Assert(op1.Type == OperandType.I64 && op2.Type == OperandType.I64);
  2489. Operand cmp = signed
  2490. ? context.ICompareLessOrEqual (op1, op2)
  2491. : context.ICompareLessOrEqualUI(op1, op2);
  2492. return context.ConditionalSelect(cmp, op1, op2);
  2493. }
  2494. private static void EmitScalarRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2495. {
  2496. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2497. Operand n = GetVec(op.Rn);
  2498. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundsd : Intrinsic.X86Roundss;
  2499. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2500. if ((op.Size & 1) != 0)
  2501. {
  2502. res = context.VectorZeroUpper64(res);
  2503. }
  2504. else
  2505. {
  2506. res = context.VectorZeroUpper96(res);
  2507. }
  2508. context.Copy(GetVec(op.Rd), res);
  2509. }
  2510. private static void EmitVectorRoundOpF(ArmEmitterContext context, FPRoundingMode roundMode)
  2511. {
  2512. OpCodeSimd op = (OpCodeSimd)context.CurrOp;
  2513. Operand n = GetVec(op.Rn);
  2514. Intrinsic inst = (op.Size & 1) != 0 ? Intrinsic.X86Roundpd : Intrinsic.X86Roundps;
  2515. Operand res = context.AddIntrinsic(inst, n, Const(X86GetRoundControl(roundMode)));
  2516. if (op.RegisterSize == RegisterSize.Simd64)
  2517. {
  2518. res = context.VectorZeroUpper64(res);
  2519. }
  2520. context.Copy(GetVec(op.Rd), res);
  2521. }
  2522. public static Operand EmitSse2VectorIsQNaNOpF(ArmEmitterContext context, Operand opF)
  2523. {
  2524. IOpCodeSimd op = (IOpCodeSimd)context.CurrOp;
  2525. if ((op.Size & 1) == 0)
  2526. {
  2527. const int QBit = 22;
  2528. Operand qMask = X86GetAllElements(context, 1 << QBit);
  2529. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmpps, opF, opF, Const((int)CmpCondition.UnorderedQ));
  2530. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  2531. mask2 = context.AddIntrinsic(Intrinsic.X86Cmpps, mask2, qMask, Const((int)CmpCondition.Equal));
  2532. return context.AddIntrinsic(Intrinsic.X86Andps, mask1, mask2);
  2533. }
  2534. else /* if ((op.Size & 1) == 1) */
  2535. {
  2536. const int QBit = 51;
  2537. Operand qMask = X86GetAllElements(context, 1L << QBit);
  2538. Operand mask1 = context.AddIntrinsic(Intrinsic.X86Cmppd, opF, opF, Const((int)CmpCondition.UnorderedQ));
  2539. Operand mask2 = context.AddIntrinsic(Intrinsic.X86Pand, opF, qMask);
  2540. mask2 = context.AddIntrinsic(Intrinsic.X86Cmppd, mask2, qMask, Const((int)CmpCondition.Equal));
  2541. return context.AddIntrinsic(Intrinsic.X86Andpd, mask1, mask2);
  2542. }
  2543. }
  2544. private static void EmitSse41MaxMinNumOpF(ArmEmitterContext context, bool isMaxNum, bool scalar)
  2545. {
  2546. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2547. Operand d = GetVec(op.Rd);
  2548. Operand n = GetVec(op.Rn);
  2549. Operand m = GetVec(op.Rm);
  2550. Operand nNum = context.Copy(n);
  2551. Operand mNum = context.Copy(m);
  2552. Operand nQNaNMask = EmitSse2VectorIsQNaNOpF(context, nNum);
  2553. Operand mQNaNMask = EmitSse2VectorIsQNaNOpF(context, mNum);
  2554. int sizeF = op.Size & 1;
  2555. if (sizeF == 0)
  2556. {
  2557. Operand negInfMask = X86GetAllElements(context, isMaxNum ? float.NegativeInfinity : float.PositiveInfinity);
  2558. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnps, mQNaNMask, nQNaNMask);
  2559. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnps, nQNaNMask, mQNaNMask);
  2560. nNum = context.AddIntrinsic(Intrinsic.X86Blendvps, nNum, negInfMask, nMask);
  2561. mNum = context.AddIntrinsic(Intrinsic.X86Blendvps, mNum, negInfMask, mMask);
  2562. Operand res = context.AddIntrinsic(isMaxNum ? Intrinsic.X86Maxps : Intrinsic.X86Minps, nNum, mNum);
  2563. if (scalar)
  2564. {
  2565. res = context.VectorZeroUpper96(res);
  2566. }
  2567. else if (op.RegisterSize == RegisterSize.Simd64)
  2568. {
  2569. res = context.VectorZeroUpper64(res);
  2570. }
  2571. context.Copy(d, res);
  2572. }
  2573. else /* if (sizeF == 1) */
  2574. {
  2575. Operand negInfMask = X86GetAllElements(context, isMaxNum ? double.NegativeInfinity : double.PositiveInfinity);
  2576. Operand nMask = context.AddIntrinsic(Intrinsic.X86Andnpd, mQNaNMask, nQNaNMask);
  2577. Operand mMask = context.AddIntrinsic(Intrinsic.X86Andnpd, nQNaNMask, mQNaNMask);
  2578. nNum = context.AddIntrinsic(Intrinsic.X86Blendvpd, nNum, negInfMask, nMask);
  2579. mNum = context.AddIntrinsic(Intrinsic.X86Blendvpd, mNum, negInfMask, mMask);
  2580. Operand res = context.AddIntrinsic(isMaxNum ? Intrinsic.X86Maxpd : Intrinsic.X86Minpd, nNum, mNum);
  2581. if (scalar)
  2582. {
  2583. res = context.VectorZeroUpper64(res);
  2584. }
  2585. context.Copy(d, res);
  2586. }
  2587. }
  2588. private enum AddSub
  2589. {
  2590. None,
  2591. Add,
  2592. Subtract
  2593. }
  2594. private static void EmitSse41VectorMul_AddSub(ArmEmitterContext context, AddSub addSub)
  2595. {
  2596. OpCodeSimdReg op = (OpCodeSimdReg)context.CurrOp;
  2597. Operand n = GetVec(op.Rn);
  2598. Operand m = GetVec(op.Rm);
  2599. Operand res;
  2600. if (op.Size == 0)
  2601. {
  2602. Operand ns8 = context.AddIntrinsic(Intrinsic.X86Psrlw, n, Const(8));
  2603. Operand ms8 = context.AddIntrinsic(Intrinsic.X86Psrlw, m, Const(8));
  2604. res = context.AddIntrinsic(Intrinsic.X86Pmullw, ns8, ms8);
  2605. res = context.AddIntrinsic(Intrinsic.X86Psllw, res, Const(8));
  2606. Operand res2 = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  2607. Operand mask = X86GetAllElements(context, 0x00FF00FF);
  2608. res = context.AddIntrinsic(Intrinsic.X86Pblendvb, res, res2, mask);
  2609. }
  2610. else if (op.Size == 1)
  2611. {
  2612. res = context.AddIntrinsic(Intrinsic.X86Pmullw, n, m);
  2613. }
  2614. else
  2615. {
  2616. res = context.AddIntrinsic(Intrinsic.X86Pmulld, n, m);
  2617. }
  2618. Operand d = GetVec(op.Rd);
  2619. if (addSub == AddSub.Add)
  2620. {
  2621. Intrinsic addInst = X86PaddInstruction[op.Size];
  2622. res = context.AddIntrinsic(addInst, d, res);
  2623. }
  2624. else if (addSub == AddSub.Subtract)
  2625. {
  2626. Intrinsic subInst = X86PsubInstruction[op.Size];
  2627. res = context.AddIntrinsic(subInst, d, res);
  2628. }
  2629. if (op.RegisterSize == RegisterSize.Simd64)
  2630. {
  2631. res = context.VectorZeroUpper64(res);
  2632. }
  2633. context.Copy(d, res);
  2634. }
  2635. private static void EmitSse41VectorSabdOp(
  2636. ArmEmitterContext context,
  2637. OpCodeSimdReg op,
  2638. Operand n,
  2639. Operand m,
  2640. bool isLong)
  2641. {
  2642. int size = isLong ? op.Size + 1 : op.Size;
  2643. Intrinsic cmpgtInst = X86PcmpgtInstruction[size];
  2644. Operand cmpMask = context.AddIntrinsic(cmpgtInst, n, m);
  2645. Intrinsic subInst = X86PsubInstruction[size];
  2646. Operand res = context.AddIntrinsic(subInst, n, m);
  2647. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  2648. Operand res2 = context.AddIntrinsic(subInst, m, n);
  2649. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  2650. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  2651. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  2652. {
  2653. res = context.VectorZeroUpper64(res);
  2654. }
  2655. context.Copy(GetVec(op.Rd), res);
  2656. }
  2657. private static void EmitSse41VectorUabdOp(
  2658. ArmEmitterContext context,
  2659. OpCodeSimdReg op,
  2660. Operand n,
  2661. Operand m,
  2662. bool isLong)
  2663. {
  2664. int size = isLong ? op.Size + 1 : op.Size;
  2665. Intrinsic maxInst = X86PmaxuInstruction[size];
  2666. Operand max = context.AddIntrinsic(maxInst, m, n);
  2667. Intrinsic cmpeqInst = X86PcmpeqInstruction[size];
  2668. Operand cmpMask = context.AddIntrinsic(cmpeqInst, max, m);
  2669. Operand onesMask = X86GetAllElements(context, -1L);
  2670. cmpMask = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, onesMask);
  2671. Intrinsic subInst = X86PsubInstruction[size];
  2672. Operand res = context.AddIntrinsic(subInst, n, m);
  2673. Operand res2 = context.AddIntrinsic(subInst, m, n);
  2674. res = context.AddIntrinsic(Intrinsic.X86Pand, cmpMask, res);
  2675. res2 = context.AddIntrinsic(Intrinsic.X86Pandn, cmpMask, res2);
  2676. res = context.AddIntrinsic(Intrinsic.X86Por, res, res2);
  2677. if (!isLong && op.RegisterSize == RegisterSize.Simd64)
  2678. {
  2679. res = context.VectorZeroUpper64(res);
  2680. }
  2681. context.Copy(GetVec(op.Rd), res);
  2682. }
  2683. }
  2684. }