LinearScanAllocator.cs 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041
  1. using ARMeilleure.Common;
  2. using ARMeilleure.IntermediateRepresentation;
  3. using ARMeilleure.Translation;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.Diagnostics;
  7. using System.Linq;
  8. using System.Numerics;
  9. namespace ARMeilleure.CodeGen.RegisterAllocators
  10. {
  11. // Based on:
  12. // "Linear Scan Register Allocation for the Java(tm) HotSpot Client Compiler".
  13. // http://www.christianwimmer.at/Publications/Wimmer04a/Wimmer04a.pdf
  14. class LinearScanAllocator : IRegisterAllocator
  15. {
  16. private const int InstructionGap = 2;
  17. private const int InstructionGapMask = InstructionGap - 1;
  18. private const int RegistersCount = 16;
  19. private HashSet<int> _blockEdges;
  20. private LiveRange[] _blockRanges;
  21. private BitMap[] _blockLiveIn;
  22. private List<LiveInterval> _intervals;
  23. private LiveInterval[] _parentIntervals;
  24. private List<(IntrusiveList<Node>, Node)> _operationNodes;
  25. private int _operationsCount;
  26. private class AllocationContext : IDisposable
  27. {
  28. public RegisterMasks Masks { get; }
  29. public StackAllocator StackAlloc { get; }
  30. public BitMap Active { get; }
  31. public BitMap Inactive { get; }
  32. public int IntUsedRegisters { get; set; }
  33. public int VecUsedRegisters { get; set; }
  34. public AllocationContext(StackAllocator stackAlloc, RegisterMasks masks, int intervalsCount)
  35. {
  36. StackAlloc = stackAlloc;
  37. Masks = masks;
  38. BitMapPool.PrepareBitMapPool();
  39. Active = BitMapPool.Allocate(intervalsCount);
  40. Inactive = BitMapPool.Allocate(intervalsCount);
  41. }
  42. public void MoveActiveToInactive(int bit)
  43. {
  44. Move(Active, Inactive, bit);
  45. }
  46. public void MoveInactiveToActive(int bit)
  47. {
  48. Move(Inactive, Active, bit);
  49. }
  50. private static void Move(BitMap source, BitMap dest, int bit)
  51. {
  52. source.Clear(bit);
  53. dest.Set(bit);
  54. }
  55. public void Dispose()
  56. {
  57. BitMapPool.ResetBitMapPool();
  58. }
  59. }
  60. public AllocationResult RunPass(
  61. ControlFlowGraph cfg,
  62. StackAllocator stackAlloc,
  63. RegisterMasks regMasks)
  64. {
  65. NumberLocals(cfg);
  66. using AllocationContext context = new AllocationContext(stackAlloc, regMasks, _intervals.Count);
  67. BuildIntervals(cfg, context);
  68. for (int index = 0; index < _intervals.Count; index++)
  69. {
  70. LiveInterval current = _intervals[index];
  71. if (current.IsEmpty)
  72. {
  73. continue;
  74. }
  75. if (current.IsFixed)
  76. {
  77. context.Active.Set(index);
  78. if (current.Register.Type == RegisterType.Integer)
  79. {
  80. context.IntUsedRegisters |= 1 << current.Register.Index;
  81. }
  82. else /* if (interval.Register.Type == RegisterType.Vector) */
  83. {
  84. context.VecUsedRegisters |= 1 << current.Register.Index;
  85. }
  86. continue;
  87. }
  88. AllocateInterval(context, current, index);
  89. }
  90. for (int index = RegistersCount * 2; index < _intervals.Count; index++)
  91. {
  92. if (!_intervals[index].IsSpilled)
  93. {
  94. ReplaceLocalWithRegister(_intervals[index]);
  95. }
  96. }
  97. InsertSplitCopies();
  98. InsertSplitCopiesAtEdges(cfg);
  99. return new AllocationResult(context.IntUsedRegisters, context.VecUsedRegisters, context.StackAlloc.TotalSize);
  100. }
  101. private void AllocateInterval(AllocationContext context, LiveInterval current, int cIndex)
  102. {
  103. // Check active intervals that already ended.
  104. foreach (int iIndex in context.Active)
  105. {
  106. LiveInterval interval = _intervals[iIndex];
  107. if (interval.GetEnd() < current.GetStart())
  108. {
  109. context.Active.Clear(iIndex);
  110. }
  111. else if (!interval.Overlaps(current.GetStart()))
  112. {
  113. context.MoveActiveToInactive(iIndex);
  114. }
  115. }
  116. // Check inactive intervals that already ended or were reactivated.
  117. foreach (int iIndex in context.Inactive)
  118. {
  119. LiveInterval interval = _intervals[iIndex];
  120. if (interval.GetEnd() < current.GetStart())
  121. {
  122. context.Inactive.Clear(iIndex);
  123. }
  124. else if (interval.Overlaps(current.GetStart()))
  125. {
  126. context.MoveInactiveToActive(iIndex);
  127. }
  128. }
  129. if (!TryAllocateRegWithoutSpill(context, current, cIndex))
  130. {
  131. AllocateRegWithSpill(context, current, cIndex);
  132. }
  133. }
  134. private bool TryAllocateRegWithoutSpill(AllocationContext context, LiveInterval current, int cIndex)
  135. {
  136. RegisterType regType = current.Local.Type.ToRegisterType();
  137. int availableRegisters = context.Masks.GetAvailableRegisters(regType);
  138. int[] freePositions = new int[RegistersCount];
  139. for (int index = 0; index < RegistersCount; index++)
  140. {
  141. if ((availableRegisters & (1 << index)) != 0)
  142. {
  143. freePositions[index] = int.MaxValue;
  144. }
  145. }
  146. foreach (int iIndex in context.Active)
  147. {
  148. LiveInterval interval = _intervals[iIndex];
  149. if (interval.Register.Type == regType)
  150. {
  151. freePositions[interval.Register.Index] = 0;
  152. }
  153. }
  154. foreach (int iIndex in context.Inactive)
  155. {
  156. LiveInterval interval = _intervals[iIndex];
  157. if (interval.Register.Type == regType)
  158. {
  159. int overlapPosition = interval.GetOverlapPosition(current);
  160. if (overlapPosition != LiveInterval.NotFound && freePositions[interval.Register.Index] > overlapPosition)
  161. {
  162. freePositions[interval.Register.Index] = overlapPosition;
  163. }
  164. }
  165. }
  166. int selectedReg = GetHighestValueIndex(freePositions);
  167. int selectedNextUse = freePositions[selectedReg];
  168. // Intervals starts and ends at odd positions, unless they span an entire
  169. // block, in this case they will have ranges at a even position.
  170. // When a interval is loaded from the stack to a register, we can only
  171. // do the split at a odd position, because otherwise the split interval
  172. // that is inserted on the list to be processed may clobber a register
  173. // used by the instruction at the same position as the split.
  174. // The problem only happens when a interval ends exactly at this instruction,
  175. // because otherwise they would interfere, and the register wouldn't be selected.
  176. // When the interval is aligned and the above happens, there's no problem as
  177. // the instruction that is actually with the last use is the one
  178. // before that position.
  179. selectedNextUse &= ~InstructionGapMask;
  180. if (selectedNextUse <= current.GetStart())
  181. {
  182. return false;
  183. }
  184. else if (selectedNextUse < current.GetEnd())
  185. {
  186. Debug.Assert(selectedNextUse > current.GetStart(), "Trying to split interval at the start.");
  187. LiveInterval splitChild = current.Split(selectedNextUse);
  188. if (splitChild.UsesCount != 0)
  189. {
  190. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  191. InsertInterval(splitChild);
  192. }
  193. else
  194. {
  195. Spill(context, splitChild);
  196. }
  197. }
  198. current.Register = new Register(selectedReg, regType);
  199. if (regType == RegisterType.Integer)
  200. {
  201. context.IntUsedRegisters |= 1 << selectedReg;
  202. }
  203. else /* if (regType == RegisterType.Vector) */
  204. {
  205. context.VecUsedRegisters |= 1 << selectedReg;
  206. }
  207. context.Active.Set(cIndex);
  208. return true;
  209. }
  210. private void AllocateRegWithSpill(AllocationContext context, LiveInterval current, int cIndex)
  211. {
  212. RegisterType regType = current.Local.Type.ToRegisterType();
  213. int availableRegisters = context.Masks.GetAvailableRegisters(regType);
  214. int[] usePositions = new int[RegistersCount];
  215. int[] blockedPositions = new int[RegistersCount];
  216. for (int index = 0; index < RegistersCount; index++)
  217. {
  218. if ((availableRegisters & (1 << index)) != 0)
  219. {
  220. usePositions[index] = int.MaxValue;
  221. blockedPositions[index] = int.MaxValue;
  222. }
  223. }
  224. void SetUsePosition(int index, int position)
  225. {
  226. usePositions[index] = Math.Min(usePositions[index], position);
  227. }
  228. void SetBlockedPosition(int index, int position)
  229. {
  230. blockedPositions[index] = Math.Min(blockedPositions[index], position);
  231. SetUsePosition(index, position);
  232. }
  233. foreach (int iIndex in context.Active)
  234. {
  235. LiveInterval interval = _intervals[iIndex];
  236. if (!interval.IsFixed && interval.Register.Type == regType)
  237. {
  238. int nextUse = interval.NextUseAfter(current.GetStart());
  239. if (nextUse != -1)
  240. {
  241. SetUsePosition(interval.Register.Index, nextUse);
  242. }
  243. }
  244. }
  245. foreach (int iIndex in context.Inactive)
  246. {
  247. LiveInterval interval = _intervals[iIndex];
  248. if (!interval.IsFixed && interval.Register.Type == regType && interval.Overlaps(current))
  249. {
  250. int nextUse = interval.NextUseAfter(current.GetStart());
  251. if (nextUse != -1)
  252. {
  253. SetUsePosition(interval.Register.Index, nextUse);
  254. }
  255. }
  256. }
  257. foreach (int iIndex in context.Active)
  258. {
  259. LiveInterval interval = _intervals[iIndex];
  260. if (interval.IsFixed && interval.Register.Type == regType)
  261. {
  262. SetBlockedPosition(interval.Register.Index, 0);
  263. }
  264. }
  265. foreach (int iIndex in context.Inactive)
  266. {
  267. LiveInterval interval = _intervals[iIndex];
  268. if (interval.IsFixed && interval.Register.Type == regType)
  269. {
  270. int overlapPosition = interval.GetOverlapPosition(current);
  271. if (overlapPosition != LiveInterval.NotFound)
  272. {
  273. SetBlockedPosition(interval.Register.Index, overlapPosition);
  274. }
  275. }
  276. }
  277. int selectedReg = GetHighestValueIndex(usePositions);
  278. int currentFirstUse = current.FirstUse();
  279. Debug.Assert(currentFirstUse >= 0, "Current interval has no uses.");
  280. if (usePositions[selectedReg] < currentFirstUse)
  281. {
  282. // All intervals on inactive and active are being used before current,
  283. // so spill the current interval.
  284. Debug.Assert(currentFirstUse > current.GetStart(), "Trying to spill a interval currently being used.");
  285. LiveInterval splitChild = current.Split(currentFirstUse);
  286. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  287. InsertInterval(splitChild);
  288. Spill(context, current);
  289. }
  290. else if (blockedPositions[selectedReg] > current.GetEnd())
  291. {
  292. // Spill made the register available for the entire current lifetime,
  293. // so we only need to split the intervals using the selected register.
  294. current.Register = new Register(selectedReg, regType);
  295. SplitAndSpillOverlappingIntervals(context, current);
  296. context.Active.Set(cIndex);
  297. }
  298. else
  299. {
  300. // There are conflicts even after spill due to the use of fixed registers
  301. // that can't be spilled, so we need to also split current at the point of
  302. // the first fixed register use.
  303. current.Register = new Register(selectedReg, regType);
  304. int splitPosition = blockedPositions[selectedReg] & ~InstructionGapMask;
  305. Debug.Assert(splitPosition > current.GetStart(), "Trying to split a interval at a invalid position.");
  306. LiveInterval splitChild = current.Split(splitPosition);
  307. if (splitChild.UsesCount != 0)
  308. {
  309. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  310. InsertInterval(splitChild);
  311. }
  312. else
  313. {
  314. Spill(context, splitChild);
  315. }
  316. SplitAndSpillOverlappingIntervals(context, current);
  317. context.Active.Set(cIndex);
  318. }
  319. }
  320. private static int GetHighestValueIndex(int[] array)
  321. {
  322. int higuest = array[0];
  323. if (higuest == int.MaxValue)
  324. {
  325. return 0;
  326. }
  327. int selected = 0;
  328. for (int index = 1; index < array.Length; index++)
  329. {
  330. int current = array[index];
  331. if (higuest < current)
  332. {
  333. higuest = current;
  334. selected = index;
  335. if (current == int.MaxValue)
  336. {
  337. break;
  338. }
  339. }
  340. }
  341. return selected;
  342. }
  343. private void SplitAndSpillOverlappingIntervals(AllocationContext context, LiveInterval current)
  344. {
  345. foreach (int iIndex in context.Active)
  346. {
  347. LiveInterval interval = _intervals[iIndex];
  348. if (!interval.IsFixed && interval.Register == current.Register)
  349. {
  350. SplitAndSpillOverlappingInterval(context, current, interval);
  351. context.Active.Clear(iIndex);
  352. }
  353. }
  354. foreach (int iIndex in context.Inactive)
  355. {
  356. LiveInterval interval = _intervals[iIndex];
  357. if (!interval.IsFixed && interval.Register == current.Register && interval.Overlaps(current))
  358. {
  359. SplitAndSpillOverlappingInterval(context, current, interval);
  360. context.Inactive.Clear(iIndex);
  361. }
  362. }
  363. }
  364. private void SplitAndSpillOverlappingInterval(
  365. AllocationContext context,
  366. LiveInterval current,
  367. LiveInterval interval)
  368. {
  369. // If there's a next use after the start of the current interval,
  370. // we need to split the spilled interval twice, and re-insert it
  371. // on the "pending" list to ensure that it will get a new register
  372. // on that use position.
  373. int nextUse = interval.NextUseAfter(current.GetStart());
  374. LiveInterval splitChild;
  375. if (interval.GetStart() < current.GetStart())
  376. {
  377. splitChild = interval.Split(current.GetStart());
  378. }
  379. else
  380. {
  381. splitChild = interval;
  382. }
  383. if (nextUse != -1)
  384. {
  385. Debug.Assert(nextUse > current.GetStart(), "Trying to spill a interval currently being used.");
  386. if (nextUse > splitChild.GetStart())
  387. {
  388. LiveInterval right = splitChild.Split(nextUse);
  389. Spill(context, splitChild);
  390. splitChild = right;
  391. }
  392. InsertInterval(splitChild);
  393. }
  394. else
  395. {
  396. Spill(context, splitChild);
  397. }
  398. }
  399. private void InsertInterval(LiveInterval interval)
  400. {
  401. Debug.Assert(interval.UsesCount != 0, "Trying to insert a interval without uses.");
  402. Debug.Assert(!interval.IsEmpty, "Trying to insert a empty interval.");
  403. Debug.Assert(!interval.IsSpilled, "Trying to insert a spilled interval.");
  404. int startIndex = RegistersCount * 2;
  405. int insertIndex = _intervals.BinarySearch(startIndex, _intervals.Count - startIndex, interval, null);
  406. if (insertIndex < 0)
  407. {
  408. insertIndex = ~insertIndex;
  409. }
  410. _intervals.Insert(insertIndex, interval);
  411. }
  412. private void Spill(AllocationContext context, LiveInterval interval)
  413. {
  414. Debug.Assert(!interval.IsFixed, "Trying to spill a fixed interval.");
  415. Debug.Assert(interval.UsesCount == 0, "Trying to spill a interval with uses.");
  416. // We first check if any of the siblings were spilled, if so we can reuse
  417. // the stack offset. Otherwise, we allocate a new space on the stack.
  418. // This prevents stack-to-stack copies being necessary for a split interval.
  419. if (!interval.TrySpillWithSiblingOffset())
  420. {
  421. interval.Spill(context.StackAlloc.Allocate(interval.Local.Type));
  422. }
  423. }
  424. private void InsertSplitCopies()
  425. {
  426. Dictionary<int, CopyResolver> copyResolvers = new Dictionary<int, CopyResolver>();
  427. CopyResolver GetCopyResolver(int position)
  428. {
  429. CopyResolver copyResolver = new CopyResolver();
  430. if (copyResolvers.TryAdd(position, copyResolver))
  431. {
  432. return copyResolver;
  433. }
  434. return copyResolvers[position];
  435. }
  436. foreach (LiveInterval interval in _intervals.Where(x => x.IsSplit))
  437. {
  438. LiveInterval previous = interval;
  439. foreach (LiveInterval splitChild in interval.SplitChilds())
  440. {
  441. int splitPosition = splitChild.GetStart();
  442. if (!_blockEdges.Contains(splitPosition) && previous.GetEnd() == splitPosition)
  443. {
  444. GetCopyResolver(splitPosition).AddSplit(previous, splitChild);
  445. }
  446. previous = splitChild;
  447. }
  448. }
  449. foreach (KeyValuePair<int, CopyResolver> kv in copyResolvers)
  450. {
  451. CopyResolver copyResolver = kv.Value;
  452. if (!copyResolver.HasCopy)
  453. {
  454. continue;
  455. }
  456. int splitPosition = kv.Key;
  457. (IntrusiveList<Node> nodes, Node node) = GetOperationNode(splitPosition);
  458. Operation[] sequence = copyResolver.Sequence();
  459. nodes.AddBefore(node, sequence[0]);
  460. node = sequence[0];
  461. for (int index = 1; index < sequence.Length; index++)
  462. {
  463. nodes.AddAfter(node, sequence[index]);
  464. node = sequence[index];
  465. }
  466. }
  467. }
  468. private void InsertSplitCopiesAtEdges(ControlFlowGraph cfg)
  469. {
  470. int blocksCount = cfg.Blocks.Count;
  471. bool IsSplitEdgeBlock(BasicBlock block)
  472. {
  473. return block.Index >= blocksCount;
  474. }
  475. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  476. {
  477. if (IsSplitEdgeBlock(block))
  478. {
  479. continue;
  480. }
  481. bool hasSingleOrNoSuccessor = block.SuccessorCount <= 1;
  482. for (int i = 0; i < block.SuccessorCount; i++)
  483. {
  484. BasicBlock successor = block.GetSuccessor(i);
  485. int succIndex = successor.Index;
  486. // If the current node is a split node, then the actual successor node
  487. // (the successor before the split) should be right after it.
  488. if (IsSplitEdgeBlock(successor))
  489. {
  490. succIndex = successor.GetSuccessor(0).Index;
  491. }
  492. CopyResolver copyResolver = new CopyResolver();
  493. foreach (int iIndex in _blockLiveIn[succIndex])
  494. {
  495. LiveInterval interval = _parentIntervals[iIndex];
  496. if (!interval.IsSplit)
  497. {
  498. continue;
  499. }
  500. int lEnd = _blockRanges[block.Index].End - 1;
  501. int rStart = _blockRanges[succIndex].Start;
  502. LiveInterval left = interval.GetSplitChild(lEnd);
  503. LiveInterval right = interval.GetSplitChild(rStart);
  504. if (left != null && right != null && left != right)
  505. {
  506. copyResolver.AddSplit(left, right);
  507. }
  508. }
  509. if (!copyResolver.HasCopy)
  510. {
  511. continue;
  512. }
  513. Operation[] sequence = copyResolver.Sequence();
  514. if (hasSingleOrNoSuccessor)
  515. {
  516. foreach (Operation operation in sequence)
  517. {
  518. block.Append(operation);
  519. }
  520. }
  521. else if (successor.Predecessors.Count == 1)
  522. {
  523. successor.Operations.AddFirst(sequence[0]);
  524. Node prependNode = sequence[0];
  525. for (int index = 1; index < sequence.Length; index++)
  526. {
  527. Operation operation = sequence[index];
  528. successor.Operations.AddAfter(prependNode, operation);
  529. prependNode = operation;
  530. }
  531. }
  532. else
  533. {
  534. // Split the critical edge.
  535. BasicBlock splitBlock = cfg.SplitEdge(block, successor);
  536. foreach (Operation operation in sequence)
  537. {
  538. splitBlock.Append(operation);
  539. }
  540. }
  541. }
  542. }
  543. }
  544. private void ReplaceLocalWithRegister(LiveInterval current)
  545. {
  546. Operand register = GetRegister(current);
  547. IList<int> usePositions = current.UsePositions();
  548. for (int i = usePositions.Count - 1; i >= 0; i--)
  549. {
  550. int usePosition = -usePositions[i];
  551. (_, Node operation) = GetOperationNode(usePosition);
  552. for (int index = 0; index < operation.SourcesCount; index++)
  553. {
  554. Operand source = operation.GetSource(index);
  555. if (source == current.Local)
  556. {
  557. operation.SetSource(index, register);
  558. }
  559. else if (source.Kind == OperandKind.Memory)
  560. {
  561. MemoryOperand memOp = (MemoryOperand)source;
  562. if (memOp.BaseAddress == current.Local)
  563. {
  564. memOp.BaseAddress = register;
  565. }
  566. if (memOp.Index == current.Local)
  567. {
  568. memOp.Index = register;
  569. }
  570. }
  571. }
  572. for (int index = 0; index < operation.DestinationsCount; index++)
  573. {
  574. Operand dest = operation.GetDestination(index);
  575. if (dest == current.Local)
  576. {
  577. operation.SetDestination(index, register);
  578. }
  579. }
  580. }
  581. }
  582. private static Operand GetRegister(LiveInterval interval)
  583. {
  584. Debug.Assert(!interval.IsSpilled, "Spilled intervals are not allowed.");
  585. return OperandHelper.Register(
  586. interval.Register.Index,
  587. interval.Register.Type,
  588. interval.Local.Type);
  589. }
  590. private (IntrusiveList<Node>, Node) GetOperationNode(int position)
  591. {
  592. return _operationNodes[position / InstructionGap];
  593. }
  594. private void NumberLocals(ControlFlowGraph cfg)
  595. {
  596. _operationNodes = new List<(IntrusiveList<Node>, Node)>();
  597. _intervals = new List<LiveInterval>();
  598. for (int index = 0; index < RegistersCount; index++)
  599. {
  600. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Integer)));
  601. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Vector)));
  602. }
  603. HashSet<Operand> visited = new HashSet<Operand>();
  604. _operationsCount = 0;
  605. for (int index = cfg.PostOrderBlocks.Length - 1; index >= 0; index--)
  606. {
  607. BasicBlock block = cfg.PostOrderBlocks[index];
  608. for (Node node = block.Operations.First; node != null; node = node.ListNext)
  609. {
  610. _operationNodes.Add((block.Operations, node));
  611. for (int i = 0; i < node.DestinationsCount; i++)
  612. {
  613. Operand dest = node.GetDestination(i);
  614. if (dest.Kind == OperandKind.LocalVariable && visited.Add(dest))
  615. {
  616. dest.NumberLocal(_intervals.Count);
  617. _intervals.Add(new LiveInterval(dest));
  618. }
  619. }
  620. }
  621. _operationsCount += block.Operations.Count * InstructionGap;
  622. if (block.Operations.Count == 0)
  623. {
  624. // Pretend we have a dummy instruction on the empty block.
  625. _operationNodes.Add((null, null));
  626. _operationsCount += InstructionGap;
  627. }
  628. }
  629. _parentIntervals = _intervals.ToArray();
  630. }
  631. private void BuildIntervals(ControlFlowGraph cfg, AllocationContext context)
  632. {
  633. _blockRanges = new LiveRange[cfg.Blocks.Count];
  634. int mapSize = _intervals.Count;
  635. BitMap[] blkLiveGen = new BitMap[cfg.Blocks.Count];
  636. BitMap[] blkLiveKill = new BitMap[cfg.Blocks.Count];
  637. // Compute local live sets.
  638. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  639. {
  640. BitMap liveGen = BitMapPool.Allocate(mapSize);
  641. BitMap liveKill = BitMapPool.Allocate(mapSize);
  642. for (Node node = block.Operations.First; node != null; node = node.ListNext)
  643. {
  644. Sources(node, (source) =>
  645. {
  646. int id = GetOperandId(source);
  647. if (!liveKill.IsSet(id))
  648. {
  649. liveGen.Set(id);
  650. }
  651. });
  652. for (int i = 0; i < node.DestinationsCount; i++)
  653. {
  654. Operand dest = node.GetDestination(i);
  655. liveKill.Set(GetOperandId(dest));
  656. }
  657. }
  658. blkLiveGen [block.Index] = liveGen;
  659. blkLiveKill[block.Index] = liveKill;
  660. }
  661. // Compute global live sets.
  662. BitMap[] blkLiveIn = new BitMap[cfg.Blocks.Count];
  663. BitMap[] blkLiveOut = new BitMap[cfg.Blocks.Count];
  664. for (int index = 0; index < cfg.Blocks.Count; index++)
  665. {
  666. blkLiveIn [index] = BitMapPool.Allocate(mapSize);
  667. blkLiveOut[index] = BitMapPool.Allocate(mapSize);
  668. }
  669. bool modified;
  670. do
  671. {
  672. modified = false;
  673. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  674. {
  675. BasicBlock block = cfg.PostOrderBlocks[index];
  676. BitMap liveOut = blkLiveOut[block.Index];
  677. for (int i = 0; i < block.SuccessorCount; i++)
  678. {
  679. BasicBlock succ = block.GetSuccessor(i);
  680. modified |= liveOut.Set(blkLiveIn[succ.Index]);
  681. }
  682. BitMap liveIn = blkLiveIn[block.Index];
  683. liveIn.Set (liveOut);
  684. liveIn.Clear(blkLiveKill[block.Index]);
  685. liveIn.Set (blkLiveGen [block.Index]);
  686. }
  687. }
  688. while (modified);
  689. _blockLiveIn = blkLiveIn;
  690. _blockEdges = new HashSet<int>();
  691. // Compute lifetime intervals.
  692. int operationPos = _operationsCount;
  693. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  694. {
  695. BasicBlock block = cfg.PostOrderBlocks[index];
  696. // We handle empty blocks by pretending they have a dummy instruction,
  697. // because otherwise the block would have the same start and end position,
  698. // and this is not valid.
  699. int instCount = Math.Max(block.Operations.Count, 1);
  700. int blockStart = operationPos - instCount * InstructionGap;
  701. int blockEnd = operationPos;
  702. _blockRanges[block.Index] = new LiveRange(blockStart, blockEnd);
  703. _blockEdges.Add(blockStart);
  704. BitMap liveOut = blkLiveOut[block.Index];
  705. foreach (int id in liveOut)
  706. {
  707. _intervals[id].AddRange(blockStart, blockEnd);
  708. }
  709. if (block.Operations.Count == 0)
  710. {
  711. operationPos -= InstructionGap;
  712. continue;
  713. }
  714. foreach (Node node in BottomOperations(block))
  715. {
  716. operationPos -= InstructionGap;
  717. for (int i = 0; i < node.DestinationsCount; i++)
  718. {
  719. Operand dest = node.GetDestination(i);
  720. LiveInterval interval = _intervals[GetOperandId(dest)];
  721. interval.SetStart(operationPos + 1);
  722. interval.AddUsePosition(operationPos + 1);
  723. }
  724. Sources(node, (source) =>
  725. {
  726. LiveInterval interval = _intervals[GetOperandId(source)];
  727. interval.AddRange(blockStart, operationPos + 1);
  728. interval.AddUsePosition(operationPos);
  729. });
  730. if (node is Operation operation && operation.Instruction == Instruction.Call)
  731. {
  732. AddIntervalCallerSavedReg(context.Masks.IntCallerSavedRegisters, operationPos, RegisterType.Integer);
  733. AddIntervalCallerSavedReg(context.Masks.VecCallerSavedRegisters, operationPos, RegisterType.Vector);
  734. }
  735. }
  736. }
  737. }
  738. private void AddIntervalCallerSavedReg(int mask, int operationPos, RegisterType regType)
  739. {
  740. while (mask != 0)
  741. {
  742. int regIndex = BitOperations.TrailingZeroCount(mask);
  743. Register callerSavedReg = new Register(regIndex, regType);
  744. LiveInterval interval = _intervals[GetRegisterId(callerSavedReg)];
  745. interval.AddRange(operationPos + 1, operationPos + InstructionGap);
  746. mask &= ~(1 << regIndex);
  747. }
  748. }
  749. private static int GetOperandId(Operand operand)
  750. {
  751. if (operand.Kind == OperandKind.LocalVariable)
  752. {
  753. return operand.GetLocalNumber();
  754. }
  755. else if (operand.Kind == OperandKind.Register)
  756. {
  757. return GetRegisterId(operand.GetRegister());
  758. }
  759. else
  760. {
  761. throw new ArgumentException($"Invalid operand kind \"{operand.Kind}\".");
  762. }
  763. }
  764. private static int GetRegisterId(Register register)
  765. {
  766. return (register.Index << 1) | (register.Type == RegisterType.Vector ? 1 : 0);
  767. }
  768. private static IEnumerable<Node> BottomOperations(BasicBlock block)
  769. {
  770. Node node = block.Operations.Last;
  771. while (node != null && !(node is PhiNode))
  772. {
  773. yield return node;
  774. node = node.ListPrevious;
  775. }
  776. }
  777. private static void Sources(Node node, Action<Operand> action)
  778. {
  779. for (int index = 0; index < node.SourcesCount; index++)
  780. {
  781. Operand source = node.GetSource(index);
  782. if (IsLocalOrRegister(source.Kind))
  783. {
  784. action(source);
  785. }
  786. else if (source.Kind == OperandKind.Memory)
  787. {
  788. MemoryOperand memOp = (MemoryOperand)source;
  789. if (memOp.BaseAddress != null)
  790. {
  791. action(memOp.BaseAddress);
  792. }
  793. if (memOp.Index != null)
  794. {
  795. action(memOp.Index);
  796. }
  797. }
  798. }
  799. }
  800. private static bool IsLocalOrRegister(OperandKind kind)
  801. {
  802. return kind == OperandKind.LocalVariable ||
  803. kind == OperandKind.Register;
  804. }
  805. }
  806. }