CodeGenerator.cs 67 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851
  1. using ARMeilleure.CodeGen.Linking;
  2. using ARMeilleure.CodeGen.Optimizations;
  3. using ARMeilleure.CodeGen.RegisterAllocators;
  4. using ARMeilleure.CodeGen.Unwinding;
  5. using ARMeilleure.Common;
  6. using ARMeilleure.Diagnostics;
  7. using ARMeilleure.IntermediateRepresentation;
  8. using ARMeilleure.Translation;
  9. using System;
  10. using System.Collections.Generic;
  11. using System.Diagnostics;
  12. using System.IO;
  13. using System.Numerics;
  14. using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
  15. namespace ARMeilleure.CodeGen.X86
  16. {
  17. static class CodeGenerator
  18. {
  19. private const int PageSize = 0x1000;
  20. private const int StackGuardSize = 0x2000;
  21. private static Action<CodeGenContext, Operation>[] _instTable;
  22. static CodeGenerator()
  23. {
  24. _instTable = new Action<CodeGenContext, Operation>[EnumUtils.GetCount(typeof(Instruction))];
  25. Add(Instruction.Add, GenerateAdd);
  26. Add(Instruction.BitwiseAnd, GenerateBitwiseAnd);
  27. Add(Instruction.BitwiseExclusiveOr, GenerateBitwiseExclusiveOr);
  28. Add(Instruction.BitwiseNot, GenerateBitwiseNot);
  29. Add(Instruction.BitwiseOr, GenerateBitwiseOr);
  30. Add(Instruction.BranchIf, GenerateBranchIf);
  31. Add(Instruction.ByteSwap, GenerateByteSwap);
  32. Add(Instruction.Call, GenerateCall);
  33. Add(Instruction.Clobber, GenerateClobber);
  34. Add(Instruction.Compare, GenerateCompare);
  35. Add(Instruction.CompareAndSwap, GenerateCompareAndSwap);
  36. Add(Instruction.CompareAndSwap16, GenerateCompareAndSwap16);
  37. Add(Instruction.CompareAndSwap8, GenerateCompareAndSwap8);
  38. Add(Instruction.ConditionalSelect, GenerateConditionalSelect);
  39. Add(Instruction.ConvertI64ToI32, GenerateConvertI64ToI32);
  40. Add(Instruction.ConvertToFP, GenerateConvertToFP);
  41. Add(Instruction.Copy, GenerateCopy);
  42. Add(Instruction.CountLeadingZeros, GenerateCountLeadingZeros);
  43. Add(Instruction.Divide, GenerateDivide);
  44. Add(Instruction.DivideUI, GenerateDivideUI);
  45. Add(Instruction.Fill, GenerateFill);
  46. Add(Instruction.Load, GenerateLoad);
  47. Add(Instruction.Load16, GenerateLoad16);
  48. Add(Instruction.Load8, GenerateLoad8);
  49. Add(Instruction.Multiply, GenerateMultiply);
  50. Add(Instruction.Multiply64HighSI, GenerateMultiply64HighSI);
  51. Add(Instruction.Multiply64HighUI, GenerateMultiply64HighUI);
  52. Add(Instruction.Negate, GenerateNegate);
  53. Add(Instruction.Return, GenerateReturn);
  54. Add(Instruction.RotateRight, GenerateRotateRight);
  55. Add(Instruction.ShiftLeft, GenerateShiftLeft);
  56. Add(Instruction.ShiftRightSI, GenerateShiftRightSI);
  57. Add(Instruction.ShiftRightUI, GenerateShiftRightUI);
  58. Add(Instruction.SignExtend16, GenerateSignExtend16);
  59. Add(Instruction.SignExtend32, GenerateSignExtend32);
  60. Add(Instruction.SignExtend8, GenerateSignExtend8);
  61. Add(Instruction.Spill, GenerateSpill);
  62. Add(Instruction.SpillArg, GenerateSpillArg);
  63. Add(Instruction.StackAlloc, GenerateStackAlloc);
  64. Add(Instruction.Store, GenerateStore);
  65. Add(Instruction.Store16, GenerateStore16);
  66. Add(Instruction.Store8, GenerateStore8);
  67. Add(Instruction.Subtract, GenerateSubtract);
  68. Add(Instruction.Tailcall, GenerateTailcall);
  69. Add(Instruction.VectorCreateScalar, GenerateVectorCreateScalar);
  70. Add(Instruction.VectorExtract, GenerateVectorExtract);
  71. Add(Instruction.VectorExtract16, GenerateVectorExtract16);
  72. Add(Instruction.VectorExtract8, GenerateVectorExtract8);
  73. Add(Instruction.VectorInsert, GenerateVectorInsert);
  74. Add(Instruction.VectorInsert16, GenerateVectorInsert16);
  75. Add(Instruction.VectorInsert8, GenerateVectorInsert8);
  76. Add(Instruction.VectorOne, GenerateVectorOne);
  77. Add(Instruction.VectorZero, GenerateVectorZero);
  78. Add(Instruction.VectorZeroUpper64, GenerateVectorZeroUpper64);
  79. Add(Instruction.VectorZeroUpper96, GenerateVectorZeroUpper96);
  80. Add(Instruction.ZeroExtend16, GenerateZeroExtend16);
  81. Add(Instruction.ZeroExtend32, GenerateZeroExtend32);
  82. Add(Instruction.ZeroExtend8, GenerateZeroExtend8);
  83. }
  84. private static void Add(Instruction inst, Action<CodeGenContext, Operation> func)
  85. {
  86. _instTable[(int)inst] = func;
  87. }
  88. public static CompiledFunction Generate(CompilerContext cctx)
  89. {
  90. ControlFlowGraph cfg = cctx.Cfg;
  91. Logger.StartPass(PassName.Optimization);
  92. if (cctx.Options.HasFlag(CompilerOptions.Optimize))
  93. {
  94. if (cctx.Options.HasFlag(CompilerOptions.SsaForm))
  95. {
  96. Optimizer.RunPass(cfg);
  97. }
  98. BlockPlacement.RunPass(cfg);
  99. }
  100. X86Optimizer.RunPass(cfg);
  101. Logger.EndPass(PassName.Optimization, cfg);
  102. Logger.StartPass(PassName.PreAllocation);
  103. StackAllocator stackAlloc = new StackAllocator();
  104. PreAllocator.RunPass(cctx, stackAlloc, out int maxCallArgs);
  105. Logger.EndPass(PassName.PreAllocation, cfg);
  106. Logger.StartPass(PassName.RegisterAllocation);
  107. if (cctx.Options.HasFlag(CompilerOptions.SsaForm))
  108. {
  109. Ssa.Deconstruct(cfg);
  110. }
  111. IRegisterAllocator regAlloc;
  112. if (cctx.Options.HasFlag(CompilerOptions.Lsra))
  113. {
  114. regAlloc = new LinearScanAllocator();
  115. }
  116. else
  117. {
  118. regAlloc = new HybridAllocator();
  119. }
  120. RegisterMasks regMasks = new RegisterMasks(
  121. CallingConvention.GetIntAvailableRegisters(),
  122. CallingConvention.GetVecAvailableRegisters(),
  123. CallingConvention.GetIntCallerSavedRegisters(),
  124. CallingConvention.GetVecCallerSavedRegisters(),
  125. CallingConvention.GetIntCalleeSavedRegisters(),
  126. CallingConvention.GetVecCalleeSavedRegisters());
  127. AllocationResult allocResult = regAlloc.RunPass(cfg, stackAlloc, regMasks);
  128. Logger.EndPass(PassName.RegisterAllocation, cfg);
  129. Logger.StartPass(PassName.CodeGeneration);
  130. bool relocatable = (cctx.Options & CompilerOptions.Relocatable) != 0;
  131. using MemoryStream stream = new();
  132. CodeGenContext context = new(stream, allocResult, maxCallArgs, cfg.Blocks.Count, relocatable);
  133. UnwindInfo unwindInfo = WritePrologue(context);
  134. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  135. {
  136. context.EnterBlock(block);
  137. for (Operation node = block.Operations.First; node != default; node = node.ListNext)
  138. {
  139. GenerateOperation(context, node);
  140. }
  141. if (block.SuccessorsCount == 0)
  142. {
  143. // The only blocks which can have 0 successors are exit blocks.
  144. Operation last = block.Operations.Last;
  145. Debug.Assert(last.Instruction == Instruction.Tailcall ||
  146. last.Instruction == Instruction.Return);
  147. }
  148. else
  149. {
  150. BasicBlock succ = block.GetSuccessor(0);
  151. if (succ != block.ListNext)
  152. {
  153. context.JumpTo(succ);
  154. }
  155. }
  156. }
  157. (byte[] code, RelocInfo relocInfo) = context.GetCode();
  158. Logger.EndPass(PassName.CodeGeneration);
  159. return new CompiledFunction(code, unwindInfo, relocInfo);
  160. }
  161. private static void GenerateOperation(CodeGenContext context, Operation operation)
  162. {
  163. if (operation.Instruction == Instruction.Extended)
  164. {
  165. IntrinsicInfo info = IntrinsicTable.GetInfo(operation.Intrinsic);
  166. switch (info.Type)
  167. {
  168. case IntrinsicType.Comis_:
  169. {
  170. Operand dest = operation.Destination;
  171. Operand src1 = operation.GetSource(0);
  172. Operand src2 = operation.GetSource(1);
  173. switch (operation.Intrinsic)
  174. {
  175. case Intrinsic.X86Comisdeq:
  176. context.Assembler.Comisd(src1, src2);
  177. context.Assembler.Setcc(dest, X86Condition.Equal);
  178. break;
  179. case Intrinsic.X86Comisdge:
  180. context.Assembler.Comisd(src1, src2);
  181. context.Assembler.Setcc(dest, X86Condition.AboveOrEqual);
  182. break;
  183. case Intrinsic.X86Comisdlt:
  184. context.Assembler.Comisd(src1, src2);
  185. context.Assembler.Setcc(dest, X86Condition.Below);
  186. break;
  187. case Intrinsic.X86Comisseq:
  188. context.Assembler.Comiss(src1, src2);
  189. context.Assembler.Setcc(dest, X86Condition.Equal);
  190. break;
  191. case Intrinsic.X86Comissge:
  192. context.Assembler.Comiss(src1, src2);
  193. context.Assembler.Setcc(dest, X86Condition.AboveOrEqual);
  194. break;
  195. case Intrinsic.X86Comisslt:
  196. context.Assembler.Comiss(src1, src2);
  197. context.Assembler.Setcc(dest, X86Condition.Below);
  198. break;
  199. }
  200. context.Assembler.Movzx8(dest, dest, OperandType.I32);
  201. break;
  202. }
  203. case IntrinsicType.Mxcsr:
  204. {
  205. Operand offset = operation.GetSource(0);
  206. Operand bits = operation.GetSource(1);
  207. Debug.Assert(offset.Kind == OperandKind.Constant && bits.Kind == OperandKind.Constant);
  208. Debug.Assert(offset.Type == OperandType.I32 && bits.Type == OperandType.I32);
  209. int offs = offset.AsInt32() + context.CallArgsRegionSize;
  210. Operand rsp = Register(X86Register.Rsp);
  211. Operand memOp = MemoryOp(OperandType.I32, rsp, default, Multiplier.x1, offs);
  212. Debug.Assert(HardwareCapabilities.SupportsSse || HardwareCapabilities.SupportsVexEncoding);
  213. context.Assembler.Stmxcsr(memOp);
  214. if (operation.Intrinsic == Intrinsic.X86Mxcsrmb)
  215. {
  216. context.Assembler.Or(memOp, bits, OperandType.I32);
  217. }
  218. else /* if (intrinOp.Intrinsic == Intrinsic.X86Mxcsrub) */
  219. {
  220. Operand notBits = Const(~bits.AsInt32());
  221. context.Assembler.And(memOp, notBits, OperandType.I32);
  222. }
  223. context.Assembler.Ldmxcsr(memOp);
  224. break;
  225. }
  226. case IntrinsicType.PopCount:
  227. {
  228. Operand dest = operation.Destination;
  229. Operand source = operation.GetSource(0);
  230. EnsureSameType(dest, source);
  231. Debug.Assert(dest.Type.IsInteger());
  232. context.Assembler.Popcnt(dest, source, dest.Type);
  233. break;
  234. }
  235. case IntrinsicType.Unary:
  236. {
  237. Operand dest = operation.Destination;
  238. Operand source = operation.GetSource(0);
  239. EnsureSameType(dest, source);
  240. Debug.Assert(!dest.Type.IsInteger());
  241. context.Assembler.WriteInstruction(info.Inst, dest, source);
  242. break;
  243. }
  244. case IntrinsicType.UnaryToGpr:
  245. {
  246. Operand dest = operation.Destination;
  247. Operand source = operation.GetSource(0);
  248. Debug.Assert(dest.Type.IsInteger() && !source.Type.IsInteger());
  249. if (operation.Intrinsic == Intrinsic.X86Cvtsi2si)
  250. {
  251. if (dest.Type == OperandType.I32)
  252. {
  253. context.Assembler.Movd(dest, source); // int _mm_cvtsi128_si32(__m128i a)
  254. }
  255. else /* if (dest.Type == OperandType.I64) */
  256. {
  257. context.Assembler.Movq(dest, source); // __int64 _mm_cvtsi128_si64(__m128i a)
  258. }
  259. }
  260. else
  261. {
  262. context.Assembler.WriteInstruction(info.Inst, dest, source, dest.Type);
  263. }
  264. break;
  265. }
  266. case IntrinsicType.Binary:
  267. {
  268. Operand dest = operation.Destination;
  269. Operand src1 = operation.GetSource(0);
  270. Operand src2 = operation.GetSource(1);
  271. EnsureSameType(dest, src1);
  272. if (!HardwareCapabilities.SupportsVexEncoding)
  273. {
  274. EnsureSameReg(dest, src1);
  275. }
  276. Debug.Assert(!dest.Type.IsInteger());
  277. Debug.Assert(!src2.Type.IsInteger() || src2.Kind == OperandKind.Constant);
  278. context.Assembler.WriteInstruction(info.Inst, dest, src1, src2);
  279. break;
  280. }
  281. case IntrinsicType.BinaryGpr:
  282. {
  283. Operand dest = operation.Destination;
  284. Operand src1 = operation.GetSource(0);
  285. Operand src2 = operation.GetSource(1);
  286. EnsureSameType(dest, src1);
  287. if (!HardwareCapabilities.SupportsVexEncoding)
  288. {
  289. EnsureSameReg(dest, src1);
  290. }
  291. Debug.Assert(!dest.Type.IsInteger() && src2.Type.IsInteger());
  292. context.Assembler.WriteInstruction(info.Inst, dest, src1, src2, src2.Type);
  293. break;
  294. }
  295. case IntrinsicType.Crc32:
  296. {
  297. Operand dest = operation.Destination;
  298. Operand src1 = operation.GetSource(0);
  299. Operand src2 = operation.GetSource(1);
  300. EnsureSameReg(dest, src1);
  301. Debug.Assert(dest.Type.IsInteger() && src1.Type.IsInteger() && src2.Type.IsInteger());
  302. context.Assembler.WriteInstruction(info.Inst, dest, src2, dest.Type);
  303. break;
  304. }
  305. case IntrinsicType.BinaryImm:
  306. {
  307. Operand dest = operation.Destination;
  308. Operand src1 = operation.GetSource(0);
  309. Operand src2 = operation.GetSource(1);
  310. EnsureSameType(dest, src1);
  311. if (!HardwareCapabilities.SupportsVexEncoding)
  312. {
  313. EnsureSameReg(dest, src1);
  314. }
  315. Debug.Assert(!dest.Type.IsInteger() && src2.Kind == OperandKind.Constant);
  316. context.Assembler.WriteInstruction(info.Inst, dest, src1, src2.AsByte());
  317. break;
  318. }
  319. case IntrinsicType.Ternary:
  320. {
  321. Operand dest = operation.Destination;
  322. Operand src1 = operation.GetSource(0);
  323. Operand src2 = operation.GetSource(1);
  324. Operand src3 = operation.GetSource(2);
  325. EnsureSameType(dest, src1, src2, src3);
  326. Debug.Assert(!dest.Type.IsInteger());
  327. if (info.Inst == X86Instruction.Blendvpd && HardwareCapabilities.SupportsVexEncoding)
  328. {
  329. context.Assembler.WriteInstruction(X86Instruction.Vblendvpd, dest, src1, src2, src3);
  330. }
  331. else if (info.Inst == X86Instruction.Blendvps && HardwareCapabilities.SupportsVexEncoding)
  332. {
  333. context.Assembler.WriteInstruction(X86Instruction.Vblendvps, dest, src1, src2, src3);
  334. }
  335. else if (info.Inst == X86Instruction.Pblendvb && HardwareCapabilities.SupportsVexEncoding)
  336. {
  337. context.Assembler.WriteInstruction(X86Instruction.Vpblendvb, dest, src1, src2, src3);
  338. }
  339. else
  340. {
  341. EnsureSameReg(dest, src1);
  342. Debug.Assert(src3.GetRegister().Index == 0);
  343. context.Assembler.WriteInstruction(info.Inst, dest, src1, src2);
  344. }
  345. break;
  346. }
  347. case IntrinsicType.TernaryImm:
  348. {
  349. Operand dest = operation.Destination;
  350. Operand src1 = operation.GetSource(0);
  351. Operand src2 = operation.GetSource(1);
  352. Operand src3 = operation.GetSource(2);
  353. EnsureSameType(dest, src1, src2);
  354. if (!HardwareCapabilities.SupportsVexEncoding)
  355. {
  356. EnsureSameReg(dest, src1);
  357. }
  358. Debug.Assert(!dest.Type.IsInteger() && src3.Kind == OperandKind.Constant);
  359. context.Assembler.WriteInstruction(info.Inst, dest, src1, src2, src3.AsByte());
  360. break;
  361. }
  362. case IntrinsicType.Fma:
  363. {
  364. Operand dest = operation.Destination;
  365. Operand src1 = operation.GetSource(0);
  366. Operand src2 = operation.GetSource(1);
  367. Operand src3 = operation.GetSource(2);
  368. Debug.Assert(HardwareCapabilities.SupportsVexEncoding);
  369. Debug.Assert(dest.Kind == OperandKind.Register && src1.Kind == OperandKind.Register && src2.Kind == OperandKind.Register);
  370. Debug.Assert(src3.Kind == OperandKind.Register || src3.Kind == OperandKind.Memory);
  371. EnsureSameType(dest, src1, src2, src3);
  372. Debug.Assert(dest.Type == OperandType.V128);
  373. Debug.Assert(dest.Value == src1.Value);
  374. context.Assembler.WriteInstruction(info.Inst, dest, src2, src3);
  375. break;
  376. }
  377. }
  378. }
  379. else
  380. {
  381. Action<CodeGenContext, Operation> func = _instTable[(int)operation.Instruction];
  382. if (func != null)
  383. {
  384. func(context, operation);
  385. }
  386. else
  387. {
  388. throw new ArgumentException($"Invalid instruction \"{operation.Instruction}\".");
  389. }
  390. }
  391. }
  392. private static void GenerateAdd(CodeGenContext context, Operation operation)
  393. {
  394. Operand dest = operation.Destination;
  395. Operand src1 = operation.GetSource(0);
  396. Operand src2 = operation.GetSource(1);
  397. if (dest.Type.IsInteger())
  398. {
  399. // If Destination and Source 1 Operands are the same, perform a standard add as there are no benefits to using LEA.
  400. if (dest.Kind == src1.Kind && dest.Value == src1.Value)
  401. {
  402. ValidateBinOp(dest, src1, src2);
  403. context.Assembler.Add(dest, src2, dest.Type);
  404. }
  405. else
  406. {
  407. EnsureSameType(dest, src1, src2);
  408. int offset;
  409. Operand index;
  410. if (src2.Kind == OperandKind.Constant)
  411. {
  412. offset = src2.AsInt32();
  413. index = default;
  414. }
  415. else
  416. {
  417. offset = 0;
  418. index = src2;
  419. }
  420. Operand memOp = MemoryOp(dest.Type, src1, index, Multiplier.x1, offset);
  421. context.Assembler.Lea(dest, memOp, dest.Type);
  422. }
  423. }
  424. else
  425. {
  426. ValidateBinOp(dest, src1, src2);
  427. if (dest.Type == OperandType.FP32)
  428. {
  429. context.Assembler.Addss(dest, src1, src2);
  430. }
  431. else /* if (dest.Type == OperandType.FP64) */
  432. {
  433. context.Assembler.Addsd(dest, src1, src2);
  434. }
  435. }
  436. }
  437. private static void GenerateBitwiseAnd(CodeGenContext context, Operation operation)
  438. {
  439. Operand dest = operation.Destination;
  440. Operand src1 = operation.GetSource(0);
  441. Operand src2 = operation.GetSource(1);
  442. ValidateBinOp(dest, src1, src2);
  443. Debug.Assert(dest.Type.IsInteger());
  444. // Note: GenerateCompareCommon makes the assumption that BitwiseAnd will emit only a single `and`
  445. // instruction.
  446. context.Assembler.And(dest, src2, dest.Type);
  447. }
  448. private static void GenerateBitwiseExclusiveOr(CodeGenContext context, Operation operation)
  449. {
  450. Operand dest = operation.Destination;
  451. Operand src1 = operation.GetSource(0);
  452. Operand src2 = operation.GetSource(1);
  453. ValidateBinOp(dest, src1, src2);
  454. if (dest.Type.IsInteger())
  455. {
  456. context.Assembler.Xor(dest, src2, dest.Type);
  457. }
  458. else
  459. {
  460. context.Assembler.Xorps(dest, src1, src2);
  461. }
  462. }
  463. private static void GenerateBitwiseNot(CodeGenContext context, Operation operation)
  464. {
  465. Operand dest = operation.Destination;
  466. Operand source = operation.GetSource(0);
  467. ValidateUnOp(dest, source);
  468. Debug.Assert(dest.Type.IsInteger());
  469. context.Assembler.Not(dest);
  470. }
  471. private static void GenerateBitwiseOr(CodeGenContext context, Operation operation)
  472. {
  473. Operand dest = operation.Destination;
  474. Operand src1 = operation.GetSource(0);
  475. Operand src2 = operation.GetSource(1);
  476. ValidateBinOp(dest, src1, src2);
  477. Debug.Assert(dest.Type.IsInteger());
  478. context.Assembler.Or(dest, src2, dest.Type);
  479. }
  480. private static void GenerateBranchIf(CodeGenContext context, Operation operation)
  481. {
  482. Operand comp = operation.GetSource(2);
  483. Debug.Assert(comp.Kind == OperandKind.Constant);
  484. var cond = ((Comparison)comp.AsInt32()).ToX86Condition();
  485. GenerateCompareCommon(context, operation);
  486. context.JumpTo(cond, context.CurrBlock.GetSuccessor(1));
  487. }
  488. private static void GenerateByteSwap(CodeGenContext context, Operation operation)
  489. {
  490. Operand dest = operation.Destination;
  491. Operand source = operation.GetSource(0);
  492. ValidateUnOp(dest, source);
  493. Debug.Assert(dest.Type.IsInteger());
  494. context.Assembler.Bswap(dest);
  495. }
  496. private static void GenerateCall(CodeGenContext context, Operation operation)
  497. {
  498. context.Assembler.Call(operation.GetSource(0));
  499. }
  500. private static void GenerateClobber(CodeGenContext context, Operation operation)
  501. {
  502. // This is only used to indicate that a register is clobbered to the
  503. // register allocator, we don't need to produce any code.
  504. }
  505. private static void GenerateCompare(CodeGenContext context, Operation operation)
  506. {
  507. Operand dest = operation.Destination;
  508. Operand comp = operation.GetSource(2);
  509. Debug.Assert(dest.Type == OperandType.I32);
  510. Debug.Assert(comp.Kind == OperandKind.Constant);
  511. var cond = ((Comparison)comp.AsInt32()).ToX86Condition();
  512. GenerateCompareCommon(context, operation);
  513. context.Assembler.Setcc(dest, cond);
  514. context.Assembler.Movzx8(dest, dest, OperandType.I32);
  515. }
  516. private static void GenerateCompareCommon(CodeGenContext context, Operation operation)
  517. {
  518. Operand src1 = operation.GetSource(0);
  519. Operand src2 = operation.GetSource(1);
  520. EnsureSameType(src1, src2);
  521. Debug.Assert(src1.Type.IsInteger());
  522. if (src2.Kind == OperandKind.Constant && src2.Value == 0)
  523. {
  524. if (MatchOperation(operation.ListPrevious, Instruction.BitwiseAnd, src1.Type, src1.GetRegister()))
  525. {
  526. // Since the `test` and `and` instruction set the status flags in the same way, we can omit the
  527. // `test r,r` instruction when it is immediately preceded by an `and r,*` instruction.
  528. //
  529. // For example:
  530. //
  531. // and eax, 0x3
  532. // test eax, eax
  533. // jz .L0
  534. //
  535. // =>
  536. //
  537. // and eax, 0x3
  538. // jz .L0
  539. }
  540. else
  541. {
  542. context.Assembler.Test(src1, src1, src1.Type);
  543. }
  544. }
  545. else
  546. {
  547. context.Assembler.Cmp(src1, src2, src1.Type);
  548. }
  549. }
  550. private static void GenerateCompareAndSwap(CodeGenContext context, Operation operation)
  551. {
  552. Operand src1 = operation.GetSource(0);
  553. if (operation.SourcesCount == 5) // CompareAndSwap128 has 5 sources, compared to CompareAndSwap64/32's 3.
  554. {
  555. Operand memOp = MemoryOp(OperandType.I64, src1);
  556. context.Assembler.Cmpxchg16b(memOp);
  557. }
  558. else
  559. {
  560. Operand src2 = operation.GetSource(1);
  561. Operand src3 = operation.GetSource(2);
  562. EnsureSameType(src2, src3);
  563. Operand memOp = MemoryOp(src3.Type, src1);
  564. context.Assembler.Cmpxchg(memOp, src3);
  565. }
  566. }
  567. private static void GenerateCompareAndSwap16(CodeGenContext context, Operation operation)
  568. {
  569. Operand src1 = operation.GetSource(0);
  570. Operand src2 = operation.GetSource(1);
  571. Operand src3 = operation.GetSource(2);
  572. EnsureSameType(src2, src3);
  573. Operand memOp = MemoryOp(src3.Type, src1);
  574. context.Assembler.Cmpxchg16(memOp, src3);
  575. }
  576. private static void GenerateCompareAndSwap8(CodeGenContext context, Operation operation)
  577. {
  578. Operand src1 = operation.GetSource(0);
  579. Operand src2 = operation.GetSource(1);
  580. Operand src3 = operation.GetSource(2);
  581. EnsureSameType(src2, src3);
  582. Operand memOp = MemoryOp(src3.Type, src1);
  583. context.Assembler.Cmpxchg8(memOp, src3);
  584. }
  585. private static void GenerateConditionalSelect(CodeGenContext context, Operation operation)
  586. {
  587. Operand dest = operation.Destination;
  588. Operand src1 = operation.GetSource(0);
  589. Operand src2 = operation.GetSource(1);
  590. Operand src3 = operation.GetSource(2);
  591. EnsureSameReg (dest, src3);
  592. EnsureSameType(dest, src2, src3);
  593. Debug.Assert(dest.Type.IsInteger());
  594. Debug.Assert(src1.Type == OperandType.I32);
  595. context.Assembler.Test (src1, src1, src1.Type);
  596. context.Assembler.Cmovcc(dest, src2, dest.Type, X86Condition.NotEqual);
  597. }
  598. private static void GenerateConvertI64ToI32(CodeGenContext context, Operation operation)
  599. {
  600. Operand dest = operation.Destination;
  601. Operand source = operation.GetSource(0);
  602. Debug.Assert(dest.Type == OperandType.I32 && source.Type == OperandType.I64);
  603. context.Assembler.Mov(dest, source, OperandType.I32);
  604. }
  605. private static void GenerateConvertToFP(CodeGenContext context, Operation operation)
  606. {
  607. Operand dest = operation.Destination;
  608. Operand source = operation.GetSource(0);
  609. Debug.Assert(dest.Type == OperandType.FP32 || dest.Type == OperandType.FP64);
  610. if (dest.Type == OperandType.FP32)
  611. {
  612. Debug.Assert(source.Type.IsInteger() || source.Type == OperandType.FP64);
  613. if (source.Type.IsInteger())
  614. {
  615. context.Assembler.Xorps (dest, dest, dest);
  616. context.Assembler.Cvtsi2ss(dest, dest, source, source.Type);
  617. }
  618. else /* if (source.Type == OperandType.FP64) */
  619. {
  620. context.Assembler.Cvtsd2ss(dest, dest, source);
  621. GenerateZeroUpper96(context, dest, dest);
  622. }
  623. }
  624. else /* if (dest.Type == OperandType.FP64) */
  625. {
  626. Debug.Assert(source.Type.IsInteger() || source.Type == OperandType.FP32);
  627. if (source.Type.IsInteger())
  628. {
  629. context.Assembler.Xorps (dest, dest, dest);
  630. context.Assembler.Cvtsi2sd(dest, dest, source, source.Type);
  631. }
  632. else /* if (source.Type == OperandType.FP32) */
  633. {
  634. context.Assembler.Cvtss2sd(dest, dest, source);
  635. GenerateZeroUpper64(context, dest, dest);
  636. }
  637. }
  638. }
  639. private static void GenerateCopy(CodeGenContext context, Operation operation)
  640. {
  641. Operand dest = operation.Destination;
  642. Operand source = operation.GetSource(0);
  643. EnsureSameType(dest, source);
  644. Debug.Assert(dest.Type.IsInteger() || source.Kind != OperandKind.Constant);
  645. // Moves to the same register are useless.
  646. if (dest.Kind == source.Kind && dest.Value == source.Value)
  647. {
  648. return;
  649. }
  650. if (dest.Kind == OperandKind.Register &&
  651. source.Kind == OperandKind.Constant && source.Value == 0)
  652. {
  653. // Assemble "mov reg, 0" as "xor reg, reg" as the later is more efficient.
  654. context.Assembler.Xor(dest, dest, OperandType.I32);
  655. }
  656. else if (dest.Type.IsInteger())
  657. {
  658. context.Assembler.Mov(dest, source, dest.Type);
  659. }
  660. else
  661. {
  662. context.Assembler.Movdqu(dest, source);
  663. }
  664. }
  665. private static void GenerateCountLeadingZeros(CodeGenContext context, Operation operation)
  666. {
  667. Operand dest = operation.Destination;
  668. Operand source = operation.GetSource(0);
  669. EnsureSameType(dest, source);
  670. Debug.Assert(dest.Type.IsInteger());
  671. context.Assembler.Bsr(dest, source, dest.Type);
  672. int operandSize = dest.Type == OperandType.I32 ? 32 : 64;
  673. int operandMask = operandSize - 1;
  674. // When the input operand is 0, the result is undefined, however the
  675. // ZF flag is set. We are supposed to return the operand size on that
  676. // case. So, add an additional jump to handle that case, by moving the
  677. // operand size constant to the destination register.
  678. context.JumpToNear(X86Condition.NotEqual);
  679. context.Assembler.Mov(dest, Const(operandSize | operandMask), OperandType.I32);
  680. context.JumpHere();
  681. // BSR returns the zero based index of the last bit set on the operand,
  682. // starting from the least significant bit. However we are supposed to
  683. // return the number of 0 bits on the high end. So, we invert the result
  684. // of the BSR using XOR to get the correct value.
  685. context.Assembler.Xor(dest, Const(operandMask), OperandType.I32);
  686. }
  687. private static void GenerateDivide(CodeGenContext context, Operation operation)
  688. {
  689. Operand dest = operation.Destination;
  690. Operand dividend = operation.GetSource(0);
  691. Operand divisor = operation.GetSource(1);
  692. if (!dest.Type.IsInteger())
  693. {
  694. ValidateBinOp(dest, dividend, divisor);
  695. }
  696. if (dest.Type.IsInteger())
  697. {
  698. divisor = operation.GetSource(2);
  699. EnsureSameType(dest, divisor);
  700. if (divisor.Type == OperandType.I32)
  701. {
  702. context.Assembler.Cdq();
  703. }
  704. else
  705. {
  706. context.Assembler.Cqo();
  707. }
  708. context.Assembler.Idiv(divisor);
  709. }
  710. else if (dest.Type == OperandType.FP32)
  711. {
  712. context.Assembler.Divss(dest, dividend, divisor);
  713. }
  714. else /* if (dest.Type == OperandType.FP64) */
  715. {
  716. context.Assembler.Divsd(dest, dividend, divisor);
  717. }
  718. }
  719. private static void GenerateDivideUI(CodeGenContext context, Operation operation)
  720. {
  721. Operand divisor = operation.GetSource(2);
  722. Operand rdx = Register(X86Register.Rdx);
  723. Debug.Assert(divisor.Type.IsInteger());
  724. context.Assembler.Xor(rdx, rdx, OperandType.I32);
  725. context.Assembler.Div(divisor);
  726. }
  727. private static void GenerateFill(CodeGenContext context, Operation operation)
  728. {
  729. Operand dest = operation.Destination;
  730. Operand offset = operation.GetSource(0);
  731. Debug.Assert(offset.Kind == OperandKind.Constant);
  732. int offs = offset.AsInt32() + context.CallArgsRegionSize;
  733. Operand rsp = Register(X86Register.Rsp);
  734. Operand memOp = MemoryOp(dest.Type, rsp, default, Multiplier.x1, offs);
  735. GenerateLoad(context, memOp, dest);
  736. }
  737. private static void GenerateLoad(CodeGenContext context, Operation operation)
  738. {
  739. Operand value = operation.Destination;
  740. Operand address = Memory(operation.GetSource(0), value.Type);
  741. GenerateLoad(context, address, value);
  742. }
  743. private static void GenerateLoad16(CodeGenContext context, Operation operation)
  744. {
  745. Operand value = operation.Destination;
  746. Operand address = Memory(operation.GetSource(0), value.Type);
  747. Debug.Assert(value.Type.IsInteger());
  748. context.Assembler.Movzx16(value, address, value.Type);
  749. }
  750. private static void GenerateLoad8(CodeGenContext context, Operation operation)
  751. {
  752. Operand value = operation.Destination;
  753. Operand address = Memory(operation.GetSource(0), value.Type);
  754. Debug.Assert(value.Type.IsInteger());
  755. context.Assembler.Movzx8(value, address, value.Type);
  756. }
  757. private static void GenerateMultiply(CodeGenContext context, Operation operation)
  758. {
  759. Operand dest = operation.Destination;
  760. Operand src1 = operation.GetSource(0);
  761. Operand src2 = operation.GetSource(1);
  762. if (src2.Kind != OperandKind.Constant)
  763. {
  764. EnsureSameReg(dest, src1);
  765. }
  766. EnsureSameType(dest, src1, src2);
  767. if (dest.Type.IsInteger())
  768. {
  769. if (src2.Kind == OperandKind.Constant)
  770. {
  771. context.Assembler.Imul(dest, src1, src2, dest.Type);
  772. }
  773. else
  774. {
  775. context.Assembler.Imul(dest, src2, dest.Type);
  776. }
  777. }
  778. else if (dest.Type == OperandType.FP32)
  779. {
  780. context.Assembler.Mulss(dest, src1, src2);
  781. }
  782. else /* if (dest.Type == OperandType.FP64) */
  783. {
  784. context.Assembler.Mulsd(dest, src1, src2);
  785. }
  786. }
  787. private static void GenerateMultiply64HighSI(CodeGenContext context, Operation operation)
  788. {
  789. Operand source = operation.GetSource(1);
  790. Debug.Assert(source.Type == OperandType.I64);
  791. context.Assembler.Imul(source);
  792. }
  793. private static void GenerateMultiply64HighUI(CodeGenContext context, Operation operation)
  794. {
  795. Operand source = operation.GetSource(1);
  796. Debug.Assert(source.Type == OperandType.I64);
  797. context.Assembler.Mul(source);
  798. }
  799. private static void GenerateNegate(CodeGenContext context, Operation operation)
  800. {
  801. Operand dest = operation.Destination;
  802. Operand source = operation.GetSource(0);
  803. ValidateUnOp(dest, source);
  804. Debug.Assert(dest.Type.IsInteger());
  805. context.Assembler.Neg(dest);
  806. }
  807. private static void GenerateReturn(CodeGenContext context, Operation operation)
  808. {
  809. WriteEpilogue(context);
  810. context.Assembler.Return();
  811. }
  812. private static void GenerateRotateRight(CodeGenContext context, Operation operation)
  813. {
  814. Operand dest = operation.Destination;
  815. Operand src1 = operation.GetSource(0);
  816. Operand src2 = operation.GetSource(1);
  817. ValidateShift(dest, src1, src2);
  818. context.Assembler.Ror(dest, src2, dest.Type);
  819. }
  820. private static void GenerateShiftLeft(CodeGenContext context, Operation operation)
  821. {
  822. Operand dest = operation.Destination;
  823. Operand src1 = operation.GetSource(0);
  824. Operand src2 = operation.GetSource(1);
  825. ValidateShift(dest, src1, src2);
  826. context.Assembler.Shl(dest, src2, dest.Type);
  827. }
  828. private static void GenerateShiftRightSI(CodeGenContext context, Operation operation)
  829. {
  830. Operand dest = operation.Destination;
  831. Operand src1 = operation.GetSource(0);
  832. Operand src2 = operation.GetSource(1);
  833. ValidateShift(dest, src1, src2);
  834. context.Assembler.Sar(dest, src2, dest.Type);
  835. }
  836. private static void GenerateShiftRightUI(CodeGenContext context, Operation operation)
  837. {
  838. Operand dest = operation.Destination;
  839. Operand src1 = operation.GetSource(0);
  840. Operand src2 = operation.GetSource(1);
  841. ValidateShift(dest, src1, src2);
  842. context.Assembler.Shr(dest, src2, dest.Type);
  843. }
  844. private static void GenerateSignExtend16(CodeGenContext context, Operation operation)
  845. {
  846. Operand dest = operation.Destination;
  847. Operand source = operation.GetSource(0);
  848. Debug.Assert(dest.Type.IsInteger() && source.Type.IsInteger());
  849. context.Assembler.Movsx16(dest, source, dest.Type);
  850. }
  851. private static void GenerateSignExtend32(CodeGenContext context, Operation operation)
  852. {
  853. Operand dest = operation.Destination;
  854. Operand source = operation.GetSource(0);
  855. Debug.Assert(dest.Type.IsInteger() && source.Type.IsInteger());
  856. context.Assembler.Movsx32(dest, source, dest.Type);
  857. }
  858. private static void GenerateSignExtend8(CodeGenContext context, Operation operation)
  859. {
  860. Operand dest = operation.Destination;
  861. Operand source = operation.GetSource(0);
  862. Debug.Assert(dest.Type.IsInteger() && source.Type.IsInteger());
  863. context.Assembler.Movsx8(dest, source, dest.Type);
  864. }
  865. private static void GenerateSpill(CodeGenContext context, Operation operation)
  866. {
  867. GenerateSpill(context, operation, context.CallArgsRegionSize);
  868. }
  869. private static void GenerateSpillArg(CodeGenContext context, Operation operation)
  870. {
  871. GenerateSpill(context, operation, 0);
  872. }
  873. private static void GenerateSpill(CodeGenContext context, Operation operation, int baseOffset)
  874. {
  875. Operand offset = operation.GetSource(0);
  876. Operand source = operation.GetSource(1);
  877. Debug.Assert(offset.Kind == OperandKind.Constant);
  878. int offs = offset.AsInt32() + baseOffset;
  879. Operand rsp = Register(X86Register.Rsp);
  880. Operand memOp = MemoryOp(source.Type, rsp, default, Multiplier.x1, offs);
  881. GenerateStore(context, memOp, source);
  882. }
  883. private static void GenerateStackAlloc(CodeGenContext context, Operation operation)
  884. {
  885. Operand dest = operation.Destination;
  886. Operand offset = operation.GetSource(0);
  887. Debug.Assert(offset.Kind == OperandKind.Constant);
  888. int offs = offset.AsInt32() + context.CallArgsRegionSize;
  889. Operand rsp = Register(X86Register.Rsp);
  890. Operand memOp = MemoryOp(OperandType.I64, rsp, default, Multiplier.x1, offs);
  891. context.Assembler.Lea(dest, memOp, OperandType.I64);
  892. }
  893. private static void GenerateStore(CodeGenContext context, Operation operation)
  894. {
  895. Operand value = operation.GetSource(1);
  896. Operand address = Memory(operation.GetSource(0), value.Type);
  897. GenerateStore(context, address, value);
  898. }
  899. private static void GenerateStore16(CodeGenContext context, Operation operation)
  900. {
  901. Operand value = operation.GetSource(1);
  902. Operand address = Memory(operation.GetSource(0), value.Type);
  903. Debug.Assert(value.Type.IsInteger());
  904. context.Assembler.Mov16(address, value);
  905. }
  906. private static void GenerateStore8(CodeGenContext context, Operation operation)
  907. {
  908. Operand value = operation.GetSource(1);
  909. Operand address = Memory(operation.GetSource(0), value.Type);
  910. Debug.Assert(value.Type.IsInteger());
  911. context.Assembler.Mov8(address, value);
  912. }
  913. private static void GenerateSubtract(CodeGenContext context, Operation operation)
  914. {
  915. Operand dest = operation.Destination;
  916. Operand src1 = operation.GetSource(0);
  917. Operand src2 = operation.GetSource(1);
  918. ValidateBinOp(dest, src1, src2);
  919. if (dest.Type.IsInteger())
  920. {
  921. context.Assembler.Sub(dest, src2, dest.Type);
  922. }
  923. else if (dest.Type == OperandType.FP32)
  924. {
  925. context.Assembler.Subss(dest, src1, src2);
  926. }
  927. else /* if (dest.Type == OperandType.FP64) */
  928. {
  929. context.Assembler.Subsd(dest, src1, src2);
  930. }
  931. }
  932. private static void GenerateTailcall(CodeGenContext context, Operation operation)
  933. {
  934. WriteEpilogue(context);
  935. context.Assembler.Jmp(operation.GetSource(0));
  936. }
  937. private static void GenerateVectorCreateScalar(CodeGenContext context, Operation operation)
  938. {
  939. Operand dest = operation.Destination;
  940. Operand source = operation.GetSource(0);
  941. Debug.Assert(!dest.Type.IsInteger() && source.Type.IsInteger());
  942. if (source.Type == OperandType.I32)
  943. {
  944. context.Assembler.Movd(dest, source); // (__m128i _mm_cvtsi32_si128(int a))
  945. }
  946. else /* if (source.Type == OperandType.I64) */
  947. {
  948. context.Assembler.Movq(dest, source); // (__m128i _mm_cvtsi64_si128(__int64 a))
  949. }
  950. }
  951. private static void GenerateVectorExtract(CodeGenContext context, Operation operation)
  952. {
  953. Operand dest = operation.Destination; //Value
  954. Operand src1 = operation.GetSource(0); //Vector
  955. Operand src2 = operation.GetSource(1); //Index
  956. Debug.Assert(src1.Type == OperandType.V128);
  957. Debug.Assert(src2.Kind == OperandKind.Constant);
  958. byte index = src2.AsByte();
  959. Debug.Assert(index < OperandType.V128.GetSizeInBytes() / dest.Type.GetSizeInBytes());
  960. if (dest.Type == OperandType.I32)
  961. {
  962. if (index == 0)
  963. {
  964. context.Assembler.Movd(dest, src1);
  965. }
  966. else if (HardwareCapabilities.SupportsSse41)
  967. {
  968. context.Assembler.Pextrd(dest, src1, index);
  969. }
  970. else
  971. {
  972. int mask0 = 0b11_10_01_00;
  973. int mask1 = 0b11_10_01_00;
  974. mask0 = BitUtils.RotateRight(mask0, index * 2, 8);
  975. mask1 = BitUtils.RotateRight(mask1, 8 - index * 2, 8);
  976. context.Assembler.Pshufd(src1, src1, (byte)mask0);
  977. context.Assembler.Movd (dest, src1);
  978. context.Assembler.Pshufd(src1, src1, (byte)mask1);
  979. }
  980. }
  981. else if (dest.Type == OperandType.I64)
  982. {
  983. if (index == 0)
  984. {
  985. context.Assembler.Movq(dest, src1);
  986. }
  987. else if (HardwareCapabilities.SupportsSse41)
  988. {
  989. context.Assembler.Pextrq(dest, src1, index);
  990. }
  991. else
  992. {
  993. const byte mask = 0b01_00_11_10;
  994. context.Assembler.Pshufd(src1, src1, mask);
  995. context.Assembler.Movq (dest, src1);
  996. context.Assembler.Pshufd(src1, src1, mask);
  997. }
  998. }
  999. else
  1000. {
  1001. // Floating-point types.
  1002. if ((index >= 2 && dest.Type == OperandType.FP32) ||
  1003. (index == 1 && dest.Type == OperandType.FP64))
  1004. {
  1005. context.Assembler.Movhlps(dest, dest, src1);
  1006. context.Assembler.Movq (dest, dest);
  1007. }
  1008. else
  1009. {
  1010. context.Assembler.Movq(dest, src1);
  1011. }
  1012. if (dest.Type == OperandType.FP32)
  1013. {
  1014. context.Assembler.Pshufd(dest, dest, (byte)(0xfc | (index & 1)));
  1015. }
  1016. }
  1017. }
  1018. private static void GenerateVectorExtract16(CodeGenContext context, Operation operation)
  1019. {
  1020. Operand dest = operation.Destination; //Value
  1021. Operand src1 = operation.GetSource(0); //Vector
  1022. Operand src2 = operation.GetSource(1); //Index
  1023. Debug.Assert(src1.Type == OperandType.V128);
  1024. Debug.Assert(src2.Kind == OperandKind.Constant);
  1025. byte index = src2.AsByte();
  1026. Debug.Assert(index < 8);
  1027. context.Assembler.Pextrw(dest, src1, index);
  1028. }
  1029. private static void GenerateVectorExtract8(CodeGenContext context, Operation operation)
  1030. {
  1031. Operand dest = operation.Destination; //Value
  1032. Operand src1 = operation.GetSource(0); //Vector
  1033. Operand src2 = operation.GetSource(1); //Index
  1034. Debug.Assert(src1.Type == OperandType.V128);
  1035. Debug.Assert(src2.Kind == OperandKind.Constant);
  1036. byte index = src2.AsByte();
  1037. Debug.Assert(index < 16);
  1038. if (HardwareCapabilities.SupportsSse41)
  1039. {
  1040. context.Assembler.Pextrb(dest, src1, index);
  1041. }
  1042. else
  1043. {
  1044. context.Assembler.Pextrw(dest, src1, (byte)(index >> 1));
  1045. if ((index & 1) != 0)
  1046. {
  1047. context.Assembler.Shr(dest, Const(8), OperandType.I32);
  1048. }
  1049. else
  1050. {
  1051. context.Assembler.Movzx8(dest, dest, OperandType.I32);
  1052. }
  1053. }
  1054. }
  1055. private static void GenerateVectorInsert(CodeGenContext context, Operation operation)
  1056. {
  1057. Operand dest = operation.Destination;
  1058. Operand src1 = operation.GetSource(0); //Vector
  1059. Operand src2 = operation.GetSource(1); //Value
  1060. Operand src3 = operation.GetSource(2); //Index
  1061. if (!HardwareCapabilities.SupportsVexEncoding)
  1062. {
  1063. EnsureSameReg(dest, src1);
  1064. }
  1065. Debug.Assert(src1.Type == OperandType.V128);
  1066. Debug.Assert(src3.Kind == OperandKind.Constant);
  1067. byte index = src3.AsByte();
  1068. void InsertIntSse2(int words)
  1069. {
  1070. if (dest.GetRegister() != src1.GetRegister())
  1071. {
  1072. context.Assembler.Movdqu(dest, src1);
  1073. }
  1074. for (int word = 0; word < words; word++)
  1075. {
  1076. // Insert lower 16-bits.
  1077. context.Assembler.Pinsrw(dest, dest, src2, (byte)(index * words + word));
  1078. // Move next word down.
  1079. context.Assembler.Ror(src2, Const(16), src2.Type);
  1080. }
  1081. }
  1082. if (src2.Type == OperandType.I32)
  1083. {
  1084. Debug.Assert(index < 4);
  1085. if (HardwareCapabilities.SupportsSse41)
  1086. {
  1087. context.Assembler.Pinsrd(dest, src1, src2, index);
  1088. }
  1089. else
  1090. {
  1091. InsertIntSse2(2);
  1092. }
  1093. }
  1094. else if (src2.Type == OperandType.I64)
  1095. {
  1096. Debug.Assert(index < 2);
  1097. if (HardwareCapabilities.SupportsSse41)
  1098. {
  1099. context.Assembler.Pinsrq(dest, src1, src2, index);
  1100. }
  1101. else
  1102. {
  1103. InsertIntSse2(4);
  1104. }
  1105. }
  1106. else if (src2.Type == OperandType.FP32)
  1107. {
  1108. Debug.Assert(index < 4);
  1109. if (index != 0)
  1110. {
  1111. if (HardwareCapabilities.SupportsSse41)
  1112. {
  1113. context.Assembler.Insertps(dest, src1, src2, (byte)(index << 4));
  1114. }
  1115. else
  1116. {
  1117. if (src1.GetRegister() == src2.GetRegister())
  1118. {
  1119. int mask = 0b11_10_01_00;
  1120. mask &= ~(0b11 << index * 2);
  1121. context.Assembler.Pshufd(dest, src1, (byte)mask);
  1122. }
  1123. else
  1124. {
  1125. int mask0 = 0b11_10_01_00;
  1126. int mask1 = 0b11_10_01_00;
  1127. mask0 = BitUtils.RotateRight(mask0, index * 2, 8);
  1128. mask1 = BitUtils.RotateRight(mask1, 8 - index * 2, 8);
  1129. context.Assembler.Pshufd(src1, src1, (byte)mask0); // Lane to be inserted in position 0.
  1130. context.Assembler.Movss (dest, src1, src2); // dest[127:0] = src1[127:32] | src2[31:0]
  1131. context.Assembler.Pshufd(dest, dest, (byte)mask1); // Inserted lane in original position.
  1132. if (dest.GetRegister() != src1.GetRegister())
  1133. {
  1134. context.Assembler.Pshufd(src1, src1, (byte)mask1); // Restore src1.
  1135. }
  1136. }
  1137. }
  1138. }
  1139. else
  1140. {
  1141. context.Assembler.Movss(dest, src1, src2);
  1142. }
  1143. }
  1144. else /* if (src2.Type == OperandType.FP64) */
  1145. {
  1146. Debug.Assert(index < 2);
  1147. if (index != 0)
  1148. {
  1149. context.Assembler.Movlhps(dest, src1, src2);
  1150. }
  1151. else
  1152. {
  1153. context.Assembler.Movsd(dest, src1, src2);
  1154. }
  1155. }
  1156. }
  1157. private static void GenerateVectorInsert16(CodeGenContext context, Operation operation)
  1158. {
  1159. Operand dest = operation.Destination;
  1160. Operand src1 = operation.GetSource(0); //Vector
  1161. Operand src2 = operation.GetSource(1); //Value
  1162. Operand src3 = operation.GetSource(2); //Index
  1163. if (!HardwareCapabilities.SupportsVexEncoding)
  1164. {
  1165. EnsureSameReg(dest, src1);
  1166. }
  1167. Debug.Assert(src1.Type == OperandType.V128);
  1168. Debug.Assert(src3.Kind == OperandKind.Constant);
  1169. byte index = src3.AsByte();
  1170. context.Assembler.Pinsrw(dest, src1, src2, index);
  1171. }
  1172. private static void GenerateVectorInsert8(CodeGenContext context, Operation operation)
  1173. {
  1174. Operand dest = operation.Destination;
  1175. Operand src1 = operation.GetSource(0); //Vector
  1176. Operand src2 = operation.GetSource(1); //Value
  1177. Operand src3 = operation.GetSource(2); //Index
  1178. // It's not possible to emulate this instruction without
  1179. // SSE 4.1 support without the use of a temporary register,
  1180. // so we instead handle that case on the pre-allocator when
  1181. // SSE 4.1 is not supported on the CPU.
  1182. Debug.Assert(HardwareCapabilities.SupportsSse41);
  1183. if (!HardwareCapabilities.SupportsVexEncoding)
  1184. {
  1185. EnsureSameReg(dest, src1);
  1186. }
  1187. Debug.Assert(src1.Type == OperandType.V128);
  1188. Debug.Assert(src3.Kind == OperandKind.Constant);
  1189. byte index = src3.AsByte();
  1190. context.Assembler.Pinsrb(dest, src1, src2, index);
  1191. }
  1192. private static void GenerateVectorOne(CodeGenContext context, Operation operation)
  1193. {
  1194. Operand dest = operation.Destination;
  1195. Debug.Assert(!dest.Type.IsInteger());
  1196. context.Assembler.Pcmpeqw(dest, dest, dest);
  1197. }
  1198. private static void GenerateVectorZero(CodeGenContext context, Operation operation)
  1199. {
  1200. Operand dest = operation.Destination;
  1201. Debug.Assert(!dest.Type.IsInteger());
  1202. context.Assembler.Xorps(dest, dest, dest);
  1203. }
  1204. private static void GenerateVectorZeroUpper64(CodeGenContext context, Operation operation)
  1205. {
  1206. Operand dest = operation.Destination;
  1207. Operand source = operation.GetSource(0);
  1208. Debug.Assert(dest.Type == OperandType.V128 && source.Type == OperandType.V128);
  1209. GenerateZeroUpper64(context, dest, source);
  1210. }
  1211. private static void GenerateVectorZeroUpper96(CodeGenContext context, Operation operation)
  1212. {
  1213. Operand dest = operation.Destination;
  1214. Operand source = operation.GetSource(0);
  1215. Debug.Assert(dest.Type == OperandType.V128 && source.Type == OperandType.V128);
  1216. GenerateZeroUpper96(context, dest, source);
  1217. }
  1218. private static void GenerateZeroExtend16(CodeGenContext context, Operation operation)
  1219. {
  1220. Operand dest = operation.Destination;
  1221. Operand source = operation.GetSource(0);
  1222. Debug.Assert(dest.Type.IsInteger() && source.Type.IsInteger());
  1223. context.Assembler.Movzx16(dest, source, OperandType.I32);
  1224. }
  1225. private static void GenerateZeroExtend32(CodeGenContext context, Operation operation)
  1226. {
  1227. Operand dest = operation.Destination;
  1228. Operand source = operation.GetSource(0);
  1229. Debug.Assert(dest.Type.IsInteger() && source.Type.IsInteger());
  1230. context.Assembler.Mov(dest, source, OperandType.I32);
  1231. }
  1232. private static void GenerateZeroExtend8(CodeGenContext context, Operation operation)
  1233. {
  1234. Operand dest = operation.Destination;
  1235. Operand source = operation.GetSource(0);
  1236. Debug.Assert(dest.Type.IsInteger() && source.Type.IsInteger());
  1237. context.Assembler.Movzx8(dest, source, OperandType.I32);
  1238. }
  1239. private static void GenerateLoad(CodeGenContext context, Operand address, Operand value)
  1240. {
  1241. switch (value.Type)
  1242. {
  1243. case OperandType.I32: context.Assembler.Mov (value, address, OperandType.I32); break;
  1244. case OperandType.I64: context.Assembler.Mov (value, address, OperandType.I64); break;
  1245. case OperandType.FP32: context.Assembler.Movd (value, address); break;
  1246. case OperandType.FP64: context.Assembler.Movq (value, address); break;
  1247. case OperandType.V128: context.Assembler.Movdqu(value, address); break;
  1248. default: Debug.Assert(false); break;
  1249. }
  1250. }
  1251. private static void GenerateStore(CodeGenContext context, Operand address, Operand value)
  1252. {
  1253. switch (value.Type)
  1254. {
  1255. case OperandType.I32: context.Assembler.Mov (address, value, OperandType.I32); break;
  1256. case OperandType.I64: context.Assembler.Mov (address, value, OperandType.I64); break;
  1257. case OperandType.FP32: context.Assembler.Movd (address, value); break;
  1258. case OperandType.FP64: context.Assembler.Movq (address, value); break;
  1259. case OperandType.V128: context.Assembler.Movdqu(address, value); break;
  1260. default: Debug.Assert(false); break;
  1261. }
  1262. }
  1263. private static void GenerateZeroUpper64(CodeGenContext context, Operand dest, Operand source)
  1264. {
  1265. context.Assembler.Movq(dest, source);
  1266. }
  1267. private static void GenerateZeroUpper96(CodeGenContext context, Operand dest, Operand source)
  1268. {
  1269. context.Assembler.Movq(dest, source);
  1270. context.Assembler.Pshufd(dest, dest, 0xfc);
  1271. }
  1272. private static bool MatchOperation(Operation node, Instruction inst, OperandType destType, Register destReg)
  1273. {
  1274. if (node == default || node.DestinationsCount == 0)
  1275. {
  1276. return false;
  1277. }
  1278. if (node.Instruction != inst)
  1279. {
  1280. return false;
  1281. }
  1282. Operand dest = node.Destination;
  1283. return dest.Kind == OperandKind.Register &&
  1284. dest.Type == destType &&
  1285. dest.GetRegister() == destReg;
  1286. }
  1287. [Conditional("DEBUG")]
  1288. private static void ValidateUnOp(Operand dest, Operand source)
  1289. {
  1290. EnsureSameReg (dest, source);
  1291. EnsureSameType(dest, source);
  1292. }
  1293. [Conditional("DEBUG")]
  1294. private static void ValidateBinOp(Operand dest, Operand src1, Operand src2)
  1295. {
  1296. EnsureSameReg (dest, src1);
  1297. EnsureSameType(dest, src1, src2);
  1298. }
  1299. [Conditional("DEBUG")]
  1300. private static void ValidateShift(Operand dest, Operand src1, Operand src2)
  1301. {
  1302. EnsureSameReg (dest, src1);
  1303. EnsureSameType(dest, src1);
  1304. Debug.Assert(dest.Type.IsInteger() && src2.Type == OperandType.I32);
  1305. }
  1306. private static void EnsureSameReg(Operand op1, Operand op2)
  1307. {
  1308. if (!op1.Type.IsInteger() && HardwareCapabilities.SupportsVexEncoding)
  1309. {
  1310. return;
  1311. }
  1312. Debug.Assert(op1.Kind == OperandKind.Register || op1.Kind == OperandKind.Memory);
  1313. Debug.Assert(op1.Kind == op2.Kind);
  1314. Debug.Assert(op1.Value == op2.Value);
  1315. }
  1316. private static void EnsureSameType(Operand op1, Operand op2)
  1317. {
  1318. Debug.Assert(op1.Type == op2.Type);
  1319. }
  1320. private static void EnsureSameType(Operand op1, Operand op2, Operand op3)
  1321. {
  1322. Debug.Assert(op1.Type == op2.Type);
  1323. Debug.Assert(op1.Type == op3.Type);
  1324. }
  1325. private static void EnsureSameType(Operand op1, Operand op2, Operand op3, Operand op4)
  1326. {
  1327. Debug.Assert(op1.Type == op2.Type);
  1328. Debug.Assert(op1.Type == op3.Type);
  1329. Debug.Assert(op1.Type == op4.Type);
  1330. }
  1331. private static UnwindInfo WritePrologue(CodeGenContext context)
  1332. {
  1333. List<UnwindPushEntry> pushEntries = new List<UnwindPushEntry>();
  1334. Operand rsp = Register(X86Register.Rsp);
  1335. int mask = CallingConvention.GetIntCalleeSavedRegisters() & context.AllocResult.IntUsedRegisters;
  1336. while (mask != 0)
  1337. {
  1338. int bit = BitOperations.TrailingZeroCount(mask);
  1339. context.Assembler.Push(Register((X86Register)bit));
  1340. pushEntries.Add(new UnwindPushEntry(UnwindPseudoOp.PushReg, context.StreamOffset, regIndex: bit));
  1341. mask &= ~(1 << bit);
  1342. }
  1343. int reservedStackSize = context.CallArgsRegionSize + context.AllocResult.SpillRegionSize;
  1344. reservedStackSize += context.XmmSaveRegionSize;
  1345. if (reservedStackSize >= StackGuardSize)
  1346. {
  1347. GenerateInlineStackProbe(context, reservedStackSize);
  1348. }
  1349. if (reservedStackSize != 0)
  1350. {
  1351. context.Assembler.Sub(rsp, Const(reservedStackSize), OperandType.I64);
  1352. pushEntries.Add(new UnwindPushEntry(UnwindPseudoOp.AllocStack, context.StreamOffset, stackOffsetOrAllocSize: reservedStackSize));
  1353. }
  1354. int offset = reservedStackSize;
  1355. mask = CallingConvention.GetVecCalleeSavedRegisters() & context.AllocResult.VecUsedRegisters;
  1356. while (mask != 0)
  1357. {
  1358. int bit = BitOperations.TrailingZeroCount(mask);
  1359. offset -= 16;
  1360. Operand memOp = MemoryOp(OperandType.V128, rsp, default, Multiplier.x1, offset);
  1361. context.Assembler.Movdqu(memOp, Xmm((X86Register)bit));
  1362. pushEntries.Add(new UnwindPushEntry(UnwindPseudoOp.SaveXmm128, context.StreamOffset, bit, offset));
  1363. mask &= ~(1 << bit);
  1364. }
  1365. return new UnwindInfo(pushEntries.ToArray(), context.StreamOffset);
  1366. }
  1367. private static void WriteEpilogue(CodeGenContext context)
  1368. {
  1369. Operand rsp = Register(X86Register.Rsp);
  1370. int reservedStackSize = context.CallArgsRegionSize + context.AllocResult.SpillRegionSize;
  1371. reservedStackSize += context.XmmSaveRegionSize;
  1372. int offset = reservedStackSize;
  1373. int mask = CallingConvention.GetVecCalleeSavedRegisters() & context.AllocResult.VecUsedRegisters;
  1374. while (mask != 0)
  1375. {
  1376. int bit = BitOperations.TrailingZeroCount(mask);
  1377. offset -= 16;
  1378. Operand memOp = MemoryOp(OperandType.V128, rsp, default, Multiplier.x1, offset);
  1379. context.Assembler.Movdqu(Xmm((X86Register)bit), memOp);
  1380. mask &= ~(1 << bit);
  1381. }
  1382. if (reservedStackSize != 0)
  1383. {
  1384. context.Assembler.Add(rsp, Const(reservedStackSize), OperandType.I64);
  1385. }
  1386. mask = CallingConvention.GetIntCalleeSavedRegisters() & context.AllocResult.IntUsedRegisters;
  1387. while (mask != 0)
  1388. {
  1389. int bit = BitUtils.HighestBitSet(mask);
  1390. context.Assembler.Pop(Register((X86Register)bit));
  1391. mask &= ~(1 << bit);
  1392. }
  1393. }
  1394. private static void GenerateInlineStackProbe(CodeGenContext context, int size)
  1395. {
  1396. // Windows does lazy stack allocation, and there are just 2
  1397. // guard pages on the end of the stack. So, if the allocation
  1398. // size we make is greater than this guard size, we must ensure
  1399. // that the OS will map all pages that we'll use. We do that by
  1400. // doing a dummy read on those pages, forcing a page fault and
  1401. // the OS to map them. If they are already mapped, nothing happens.
  1402. const int pageMask = PageSize - 1;
  1403. size = (size + pageMask) & ~pageMask;
  1404. Operand rsp = Register(X86Register.Rsp);
  1405. Operand temp = Register(CallingConvention.GetIntReturnRegister());
  1406. for (int offset = PageSize; offset < size; offset += PageSize)
  1407. {
  1408. Operand memOp = MemoryOp(OperandType.I32, rsp, default, Multiplier.x1, -offset);
  1409. context.Assembler.Mov(temp, memOp, OperandType.I32);
  1410. }
  1411. }
  1412. private static Operand Memory(Operand operand, OperandType type)
  1413. {
  1414. if (operand.Kind == OperandKind.Memory)
  1415. {
  1416. return operand;
  1417. }
  1418. return MemoryOp(type, operand);
  1419. }
  1420. private static Operand Register(X86Register register, OperandType type = OperandType.I64)
  1421. {
  1422. return Operand.Factory.Register((int)register, RegisterType.Integer, type);
  1423. }
  1424. private static Operand Xmm(X86Register register)
  1425. {
  1426. return Operand.Factory.Register((int)register, RegisterType.Vector, OperandType.V128);
  1427. }
  1428. }
  1429. }