LinearScanAllocator.cs 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035
  1. using ARMeilleure.Common;
  2. using ARMeilleure.IntermediateRepresentation;
  3. using ARMeilleure.Translation;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.Diagnostics;
  7. using System.Linq;
  8. using System.Numerics;
  9. namespace ARMeilleure.CodeGen.RegisterAllocators
  10. {
  11. // Based on:
  12. // "Linear Scan Register Allocation for the Java(tm) HotSpot Client Compiler".
  13. // http://www.christianwimmer.at/Publications/Wimmer04a/Wimmer04a.pdf
  14. class LinearScanAllocator : IRegisterAllocator
  15. {
  16. private const int InstructionGap = 2;
  17. private const int InstructionGapMask = InstructionGap - 1;
  18. private const int RegistersCount = 16;
  19. private HashSet<int> _blockEdges;
  20. private LiveRange[] _blockRanges;
  21. private BitMap[] _blockLiveIn;
  22. private List<LiveInterval> _intervals;
  23. private LiveInterval[] _parentIntervals;
  24. private List<(IntrusiveList<Operation>, Operation)> _operationNodes;
  25. private int _operationsCount;
  26. private class AllocationContext
  27. {
  28. public RegisterMasks Masks { get; }
  29. public StackAllocator StackAlloc { get; }
  30. public BitMap Active { get; }
  31. public BitMap Inactive { get; }
  32. public int IntUsedRegisters { get; set; }
  33. public int VecUsedRegisters { get; set; }
  34. public AllocationContext(StackAllocator stackAlloc, RegisterMasks masks, int intervalsCount)
  35. {
  36. StackAlloc = stackAlloc;
  37. Masks = masks;
  38. Active = new BitMap(Allocators.Default, intervalsCount);
  39. Inactive = new BitMap(Allocators.Default, intervalsCount);
  40. }
  41. public void MoveActiveToInactive(int bit)
  42. {
  43. Move(Active, Inactive, bit);
  44. }
  45. public void MoveInactiveToActive(int bit)
  46. {
  47. Move(Inactive, Active, bit);
  48. }
  49. private static void Move(BitMap source, BitMap dest, int bit)
  50. {
  51. source.Clear(bit);
  52. dest.Set(bit);
  53. }
  54. }
  55. public AllocationResult RunPass(
  56. ControlFlowGraph cfg,
  57. StackAllocator stackAlloc,
  58. RegisterMasks regMasks)
  59. {
  60. NumberLocals(cfg);
  61. var context = new AllocationContext(stackAlloc, regMasks, _intervals.Count);
  62. BuildIntervals(cfg, context);
  63. for (int index = 0; index < _intervals.Count; index++)
  64. {
  65. LiveInterval current = _intervals[index];
  66. if (current.IsEmpty)
  67. {
  68. continue;
  69. }
  70. if (current.IsFixed)
  71. {
  72. context.Active.Set(index);
  73. if (current.Register.Type == RegisterType.Integer)
  74. {
  75. context.IntUsedRegisters |= 1 << current.Register.Index;
  76. }
  77. else /* if (interval.Register.Type == RegisterType.Vector) */
  78. {
  79. context.VecUsedRegisters |= 1 << current.Register.Index;
  80. }
  81. continue;
  82. }
  83. AllocateInterval(context, current, index);
  84. }
  85. for (int index = RegistersCount * 2; index < _intervals.Count; index++)
  86. {
  87. if (!_intervals[index].IsSpilled)
  88. {
  89. ReplaceLocalWithRegister(_intervals[index]);
  90. }
  91. }
  92. InsertSplitCopies();
  93. InsertSplitCopiesAtEdges(cfg);
  94. return new AllocationResult(context.IntUsedRegisters, context.VecUsedRegisters, context.StackAlloc.TotalSize);
  95. }
  96. private void AllocateInterval(AllocationContext context, LiveInterval current, int cIndex)
  97. {
  98. // Check active intervals that already ended.
  99. foreach (int iIndex in context.Active)
  100. {
  101. LiveInterval interval = _intervals[iIndex];
  102. if (interval.GetEnd() < current.GetStart())
  103. {
  104. context.Active.Clear(iIndex);
  105. }
  106. else if (!interval.Overlaps(current.GetStart()))
  107. {
  108. context.MoveActiveToInactive(iIndex);
  109. }
  110. }
  111. // Check inactive intervals that already ended or were reactivated.
  112. foreach (int iIndex in context.Inactive)
  113. {
  114. LiveInterval interval = _intervals[iIndex];
  115. if (interval.GetEnd() < current.GetStart())
  116. {
  117. context.Inactive.Clear(iIndex);
  118. }
  119. else if (interval.Overlaps(current.GetStart()))
  120. {
  121. context.MoveInactiveToActive(iIndex);
  122. }
  123. }
  124. if (!TryAllocateRegWithoutSpill(context, current, cIndex))
  125. {
  126. AllocateRegWithSpill(context, current, cIndex);
  127. }
  128. }
  129. private bool TryAllocateRegWithoutSpill(AllocationContext context, LiveInterval current, int cIndex)
  130. {
  131. RegisterType regType = current.Local.Type.ToRegisterType();
  132. int availableRegisters = context.Masks.GetAvailableRegisters(regType);
  133. int[] freePositions = new int[RegistersCount];
  134. for (int index = 0; index < RegistersCount; index++)
  135. {
  136. if ((availableRegisters & (1 << index)) != 0)
  137. {
  138. freePositions[index] = int.MaxValue;
  139. }
  140. }
  141. foreach (int iIndex in context.Active)
  142. {
  143. LiveInterval interval = _intervals[iIndex];
  144. if (interval.Register.Type == regType)
  145. {
  146. freePositions[interval.Register.Index] = 0;
  147. }
  148. }
  149. foreach (int iIndex in context.Inactive)
  150. {
  151. LiveInterval interval = _intervals[iIndex];
  152. if (interval.Register.Type == regType)
  153. {
  154. int overlapPosition = interval.GetOverlapPosition(current);
  155. if (overlapPosition != LiveInterval.NotFound && freePositions[interval.Register.Index] > overlapPosition)
  156. {
  157. freePositions[interval.Register.Index] = overlapPosition;
  158. }
  159. }
  160. }
  161. int selectedReg = GetHighestValueIndex(freePositions);
  162. int selectedNextUse = freePositions[selectedReg];
  163. // Intervals starts and ends at odd positions, unless they span an entire
  164. // block, in this case they will have ranges at a even position.
  165. // When a interval is loaded from the stack to a register, we can only
  166. // do the split at a odd position, because otherwise the split interval
  167. // that is inserted on the list to be processed may clobber a register
  168. // used by the instruction at the same position as the split.
  169. // The problem only happens when a interval ends exactly at this instruction,
  170. // because otherwise they would interfere, and the register wouldn't be selected.
  171. // When the interval is aligned and the above happens, there's no problem as
  172. // the instruction that is actually with the last use is the one
  173. // before that position.
  174. selectedNextUse &= ~InstructionGapMask;
  175. if (selectedNextUse <= current.GetStart())
  176. {
  177. return false;
  178. }
  179. else if (selectedNextUse < current.GetEnd())
  180. {
  181. Debug.Assert(selectedNextUse > current.GetStart(), "Trying to split interval at the start.");
  182. LiveInterval splitChild = current.Split(selectedNextUse);
  183. if (splitChild.UsesCount != 0)
  184. {
  185. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  186. InsertInterval(splitChild);
  187. }
  188. else
  189. {
  190. Spill(context, splitChild);
  191. }
  192. }
  193. current.Register = new Register(selectedReg, regType);
  194. if (regType == RegisterType.Integer)
  195. {
  196. context.IntUsedRegisters |= 1 << selectedReg;
  197. }
  198. else /* if (regType == RegisterType.Vector) */
  199. {
  200. context.VecUsedRegisters |= 1 << selectedReg;
  201. }
  202. context.Active.Set(cIndex);
  203. return true;
  204. }
  205. private void AllocateRegWithSpill(AllocationContext context, LiveInterval current, int cIndex)
  206. {
  207. RegisterType regType = current.Local.Type.ToRegisterType();
  208. int availableRegisters = context.Masks.GetAvailableRegisters(regType);
  209. int[] usePositions = new int[RegistersCount];
  210. int[] blockedPositions = new int[RegistersCount];
  211. for (int index = 0; index < RegistersCount; index++)
  212. {
  213. if ((availableRegisters & (1 << index)) != 0)
  214. {
  215. usePositions[index] = int.MaxValue;
  216. blockedPositions[index] = int.MaxValue;
  217. }
  218. }
  219. void SetUsePosition(int index, int position)
  220. {
  221. usePositions[index] = Math.Min(usePositions[index], position);
  222. }
  223. void SetBlockedPosition(int index, int position)
  224. {
  225. blockedPositions[index] = Math.Min(blockedPositions[index], position);
  226. SetUsePosition(index, position);
  227. }
  228. foreach (int iIndex in context.Active)
  229. {
  230. LiveInterval interval = _intervals[iIndex];
  231. if (!interval.IsFixed && interval.Register.Type == regType)
  232. {
  233. int nextUse = interval.NextUseAfter(current.GetStart());
  234. if (nextUse != -1)
  235. {
  236. SetUsePosition(interval.Register.Index, nextUse);
  237. }
  238. }
  239. }
  240. foreach (int iIndex in context.Inactive)
  241. {
  242. LiveInterval interval = _intervals[iIndex];
  243. if (!interval.IsFixed && interval.Register.Type == regType && interval.Overlaps(current))
  244. {
  245. int nextUse = interval.NextUseAfter(current.GetStart());
  246. if (nextUse != -1)
  247. {
  248. SetUsePosition(interval.Register.Index, nextUse);
  249. }
  250. }
  251. }
  252. foreach (int iIndex in context.Active)
  253. {
  254. LiveInterval interval = _intervals[iIndex];
  255. if (interval.IsFixed && interval.Register.Type == regType)
  256. {
  257. SetBlockedPosition(interval.Register.Index, 0);
  258. }
  259. }
  260. foreach (int iIndex in context.Inactive)
  261. {
  262. LiveInterval interval = _intervals[iIndex];
  263. if (interval.IsFixed && interval.Register.Type == regType)
  264. {
  265. int overlapPosition = interval.GetOverlapPosition(current);
  266. if (overlapPosition != LiveInterval.NotFound)
  267. {
  268. SetBlockedPosition(interval.Register.Index, overlapPosition);
  269. }
  270. }
  271. }
  272. int selectedReg = GetHighestValueIndex(usePositions);
  273. int currentFirstUse = current.FirstUse();
  274. Debug.Assert(currentFirstUse >= 0, "Current interval has no uses.");
  275. if (usePositions[selectedReg] < currentFirstUse)
  276. {
  277. // All intervals on inactive and active are being used before current,
  278. // so spill the current interval.
  279. Debug.Assert(currentFirstUse > current.GetStart(), "Trying to spill a interval currently being used.");
  280. LiveInterval splitChild = current.Split(currentFirstUse);
  281. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  282. InsertInterval(splitChild);
  283. Spill(context, current);
  284. }
  285. else if (blockedPositions[selectedReg] > current.GetEnd())
  286. {
  287. // Spill made the register available for the entire current lifetime,
  288. // so we only need to split the intervals using the selected register.
  289. current.Register = new Register(selectedReg, regType);
  290. SplitAndSpillOverlappingIntervals(context, current);
  291. context.Active.Set(cIndex);
  292. }
  293. else
  294. {
  295. // There are conflicts even after spill due to the use of fixed registers
  296. // that can't be spilled, so we need to also split current at the point of
  297. // the first fixed register use.
  298. current.Register = new Register(selectedReg, regType);
  299. int splitPosition = blockedPositions[selectedReg] & ~InstructionGapMask;
  300. Debug.Assert(splitPosition > current.GetStart(), "Trying to split a interval at a invalid position.");
  301. LiveInterval splitChild = current.Split(splitPosition);
  302. if (splitChild.UsesCount != 0)
  303. {
  304. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  305. InsertInterval(splitChild);
  306. }
  307. else
  308. {
  309. Spill(context, splitChild);
  310. }
  311. SplitAndSpillOverlappingIntervals(context, current);
  312. context.Active.Set(cIndex);
  313. }
  314. }
  315. private static int GetHighestValueIndex(int[] array)
  316. {
  317. int higuest = array[0];
  318. if (higuest == int.MaxValue)
  319. {
  320. return 0;
  321. }
  322. int selected = 0;
  323. for (int index = 1; index < array.Length; index++)
  324. {
  325. int current = array[index];
  326. if (higuest < current)
  327. {
  328. higuest = current;
  329. selected = index;
  330. if (current == int.MaxValue)
  331. {
  332. break;
  333. }
  334. }
  335. }
  336. return selected;
  337. }
  338. private void SplitAndSpillOverlappingIntervals(AllocationContext context, LiveInterval current)
  339. {
  340. foreach (int iIndex in context.Active)
  341. {
  342. LiveInterval interval = _intervals[iIndex];
  343. if (!interval.IsFixed && interval.Register == current.Register)
  344. {
  345. SplitAndSpillOverlappingInterval(context, current, interval);
  346. context.Active.Clear(iIndex);
  347. }
  348. }
  349. foreach (int iIndex in context.Inactive)
  350. {
  351. LiveInterval interval = _intervals[iIndex];
  352. if (!interval.IsFixed && interval.Register == current.Register && interval.Overlaps(current))
  353. {
  354. SplitAndSpillOverlappingInterval(context, current, interval);
  355. context.Inactive.Clear(iIndex);
  356. }
  357. }
  358. }
  359. private void SplitAndSpillOverlappingInterval(
  360. AllocationContext context,
  361. LiveInterval current,
  362. LiveInterval interval)
  363. {
  364. // If there's a next use after the start of the current interval,
  365. // we need to split the spilled interval twice, and re-insert it
  366. // on the "pending" list to ensure that it will get a new register
  367. // on that use position.
  368. int nextUse = interval.NextUseAfter(current.GetStart());
  369. LiveInterval splitChild;
  370. if (interval.GetStart() < current.GetStart())
  371. {
  372. splitChild = interval.Split(current.GetStart());
  373. }
  374. else
  375. {
  376. splitChild = interval;
  377. }
  378. if (nextUse != -1)
  379. {
  380. Debug.Assert(nextUse > current.GetStart(), "Trying to spill a interval currently being used.");
  381. if (nextUse > splitChild.GetStart())
  382. {
  383. LiveInterval right = splitChild.Split(nextUse);
  384. Spill(context, splitChild);
  385. splitChild = right;
  386. }
  387. InsertInterval(splitChild);
  388. }
  389. else
  390. {
  391. Spill(context, splitChild);
  392. }
  393. }
  394. private void InsertInterval(LiveInterval interval)
  395. {
  396. Debug.Assert(interval.UsesCount != 0, "Trying to insert a interval without uses.");
  397. Debug.Assert(!interval.IsEmpty, "Trying to insert a empty interval.");
  398. Debug.Assert(!interval.IsSpilled, "Trying to insert a spilled interval.");
  399. int startIndex = RegistersCount * 2;
  400. int insertIndex = _intervals.BinarySearch(startIndex, _intervals.Count - startIndex, interval, null);
  401. if (insertIndex < 0)
  402. {
  403. insertIndex = ~insertIndex;
  404. }
  405. _intervals.Insert(insertIndex, interval);
  406. }
  407. private void Spill(AllocationContext context, LiveInterval interval)
  408. {
  409. Debug.Assert(!interval.IsFixed, "Trying to spill a fixed interval.");
  410. Debug.Assert(interval.UsesCount == 0, "Trying to spill a interval with uses.");
  411. // We first check if any of the siblings were spilled, if so we can reuse
  412. // the stack offset. Otherwise, we allocate a new space on the stack.
  413. // This prevents stack-to-stack copies being necessary for a split interval.
  414. if (!interval.TrySpillWithSiblingOffset())
  415. {
  416. interval.Spill(context.StackAlloc.Allocate(interval.Local.Type));
  417. }
  418. }
  419. private void InsertSplitCopies()
  420. {
  421. Dictionary<int, CopyResolver> copyResolvers = new Dictionary<int, CopyResolver>();
  422. CopyResolver GetCopyResolver(int position)
  423. {
  424. CopyResolver copyResolver = new CopyResolver();
  425. if (copyResolvers.TryAdd(position, copyResolver))
  426. {
  427. return copyResolver;
  428. }
  429. return copyResolvers[position];
  430. }
  431. foreach (LiveInterval interval in _intervals.Where(x => x.IsSplit))
  432. {
  433. LiveInterval previous = interval;
  434. foreach (LiveInterval splitChild in interval.SplitChilds())
  435. {
  436. int splitPosition = splitChild.GetStart();
  437. if (!_blockEdges.Contains(splitPosition) && previous.GetEnd() == splitPosition)
  438. {
  439. GetCopyResolver(splitPosition).AddSplit(previous, splitChild);
  440. }
  441. previous = splitChild;
  442. }
  443. }
  444. foreach (KeyValuePair<int, CopyResolver> kv in copyResolvers)
  445. {
  446. CopyResolver copyResolver = kv.Value;
  447. if (!copyResolver.HasCopy)
  448. {
  449. continue;
  450. }
  451. int splitPosition = kv.Key;
  452. (IntrusiveList<Operation> nodes, Operation node) = GetOperationNode(splitPosition);
  453. Operation[] sequence = copyResolver.Sequence();
  454. nodes.AddBefore(node, sequence[0]);
  455. node = sequence[0];
  456. for (int index = 1; index < sequence.Length; index++)
  457. {
  458. nodes.AddAfter(node, sequence[index]);
  459. node = sequence[index];
  460. }
  461. }
  462. }
  463. private void InsertSplitCopiesAtEdges(ControlFlowGraph cfg)
  464. {
  465. int blocksCount = cfg.Blocks.Count;
  466. bool IsSplitEdgeBlock(BasicBlock block)
  467. {
  468. return block.Index >= blocksCount;
  469. }
  470. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  471. {
  472. if (IsSplitEdgeBlock(block))
  473. {
  474. continue;
  475. }
  476. bool hasSingleOrNoSuccessor = block.SuccessorsCount <= 1;
  477. for (int i = 0; i < block.SuccessorsCount; i++)
  478. {
  479. BasicBlock successor = block.GetSuccessor(i);
  480. int succIndex = successor.Index;
  481. // If the current node is a split node, then the actual successor node
  482. // (the successor before the split) should be right after it.
  483. if (IsSplitEdgeBlock(successor))
  484. {
  485. succIndex = successor.GetSuccessor(0).Index;
  486. }
  487. CopyResolver copyResolver = new CopyResolver();
  488. foreach (int iIndex in _blockLiveIn[succIndex])
  489. {
  490. LiveInterval interval = _parentIntervals[iIndex];
  491. if (!interval.IsSplit)
  492. {
  493. continue;
  494. }
  495. int lEnd = _blockRanges[block.Index].End - 1;
  496. int rStart = _blockRanges[succIndex].Start;
  497. LiveInterval left = interval.GetSplitChild(lEnd);
  498. LiveInterval right = interval.GetSplitChild(rStart);
  499. if (left != null && right != null && left != right)
  500. {
  501. copyResolver.AddSplit(left, right);
  502. }
  503. }
  504. if (!copyResolver.HasCopy)
  505. {
  506. continue;
  507. }
  508. Operation[] sequence = copyResolver.Sequence();
  509. if (hasSingleOrNoSuccessor)
  510. {
  511. foreach (Operation operation in sequence)
  512. {
  513. block.Append(operation);
  514. }
  515. }
  516. else if (successor.Predecessors.Count == 1)
  517. {
  518. successor.Operations.AddFirst(sequence[0]);
  519. Operation prependNode = sequence[0];
  520. for (int index = 1; index < sequence.Length; index++)
  521. {
  522. Operation operation = sequence[index];
  523. successor.Operations.AddAfter(prependNode, operation);
  524. prependNode = operation;
  525. }
  526. }
  527. else
  528. {
  529. // Split the critical edge.
  530. BasicBlock splitBlock = cfg.SplitEdge(block, successor);
  531. foreach (Operation operation in sequence)
  532. {
  533. splitBlock.Append(operation);
  534. }
  535. }
  536. }
  537. }
  538. }
  539. private void ReplaceLocalWithRegister(LiveInterval current)
  540. {
  541. Operand register = GetRegister(current);
  542. IList<int> usePositions = current.UsePositions();
  543. for (int i = usePositions.Count - 1; i >= 0; i--)
  544. {
  545. int usePosition = -usePositions[i];
  546. (_, Operation operation) = GetOperationNode(usePosition);
  547. for (int index = 0; index < operation.SourcesCount; index++)
  548. {
  549. Operand source = operation.GetSource(index);
  550. if (source == current.Local)
  551. {
  552. operation.SetSource(index, register);
  553. }
  554. else if (source.Kind == OperandKind.Memory)
  555. {
  556. MemoryOperand memOp = source.GetMemory();
  557. if (memOp.BaseAddress == current.Local)
  558. {
  559. memOp.BaseAddress = register;
  560. }
  561. if (memOp.Index == current.Local)
  562. {
  563. memOp.Index = register;
  564. }
  565. }
  566. }
  567. for (int index = 0; index < operation.DestinationsCount; index++)
  568. {
  569. Operand dest = operation.GetDestination(index);
  570. if (dest == current.Local)
  571. {
  572. operation.SetDestination(index, register);
  573. }
  574. }
  575. }
  576. }
  577. private static Operand GetRegister(LiveInterval interval)
  578. {
  579. Debug.Assert(!interval.IsSpilled, "Spilled intervals are not allowed.");
  580. return Operand.Factory.Register(
  581. interval.Register.Index,
  582. interval.Register.Type,
  583. interval.Local.Type);
  584. }
  585. private (IntrusiveList<Operation>, Operation) GetOperationNode(int position)
  586. {
  587. return _operationNodes[position / InstructionGap];
  588. }
  589. private void NumberLocals(ControlFlowGraph cfg)
  590. {
  591. _operationNodes = new List<(IntrusiveList<Operation>, Operation)>();
  592. _intervals = new List<LiveInterval>();
  593. for (int index = 0; index < RegistersCount; index++)
  594. {
  595. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Integer)));
  596. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Vector)));
  597. }
  598. HashSet<Operand> visited = new HashSet<Operand>();
  599. _operationsCount = 0;
  600. for (int index = cfg.PostOrderBlocks.Length - 1; index >= 0; index--)
  601. {
  602. BasicBlock block = cfg.PostOrderBlocks[index];
  603. for (Operation node = block.Operations.First; node != default; node = node.ListNext)
  604. {
  605. _operationNodes.Add((block.Operations, node));
  606. for (int i = 0; i < node.DestinationsCount; i++)
  607. {
  608. Operand dest = node.GetDestination(i);
  609. if (dest.Kind == OperandKind.LocalVariable && visited.Add(dest))
  610. {
  611. dest.NumberLocal(_intervals.Count);
  612. _intervals.Add(new LiveInterval(dest));
  613. }
  614. }
  615. }
  616. _operationsCount += block.Operations.Count * InstructionGap;
  617. if (block.Operations.Count == 0)
  618. {
  619. // Pretend we have a dummy instruction on the empty block.
  620. _operationNodes.Add((default, default));
  621. _operationsCount += InstructionGap;
  622. }
  623. }
  624. _parentIntervals = _intervals.ToArray();
  625. }
  626. private void BuildIntervals(ControlFlowGraph cfg, AllocationContext context)
  627. {
  628. _blockRanges = new LiveRange[cfg.Blocks.Count];
  629. int mapSize = _intervals.Count;
  630. BitMap[] blkLiveGen = new BitMap[cfg.Blocks.Count];
  631. BitMap[] blkLiveKill = new BitMap[cfg.Blocks.Count];
  632. // Compute local live sets.
  633. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  634. {
  635. BitMap liveGen = new BitMap(Allocators.Default, mapSize);
  636. BitMap liveKill = new BitMap(Allocators.Default, mapSize);
  637. for (Operation node = block.Operations.First; node != default; node = node.ListNext)
  638. {
  639. Sources(node, (source) =>
  640. {
  641. int id = GetOperandId(source);
  642. if (!liveKill.IsSet(id))
  643. {
  644. liveGen.Set(id);
  645. }
  646. });
  647. for (int i = 0; i < node.DestinationsCount; i++)
  648. {
  649. Operand dest = node.GetDestination(i);
  650. liveKill.Set(GetOperandId(dest));
  651. }
  652. }
  653. blkLiveGen [block.Index] = liveGen;
  654. blkLiveKill[block.Index] = liveKill;
  655. }
  656. // Compute global live sets.
  657. BitMap[] blkLiveIn = new BitMap[cfg.Blocks.Count];
  658. BitMap[] blkLiveOut = new BitMap[cfg.Blocks.Count];
  659. for (int index = 0; index < cfg.Blocks.Count; index++)
  660. {
  661. blkLiveIn [index] = new BitMap(Allocators.Default, mapSize);
  662. blkLiveOut[index] = new BitMap(Allocators.Default, mapSize);
  663. }
  664. bool modified;
  665. do
  666. {
  667. modified = false;
  668. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  669. {
  670. BasicBlock block = cfg.PostOrderBlocks[index];
  671. BitMap liveOut = blkLiveOut[block.Index];
  672. for (int i = 0; i < block.SuccessorsCount; i++)
  673. {
  674. BasicBlock succ = block.GetSuccessor(i);
  675. modified |= liveOut.Set(blkLiveIn[succ.Index]);
  676. }
  677. BitMap liveIn = blkLiveIn[block.Index];
  678. liveIn.Set (liveOut);
  679. liveIn.Clear(blkLiveKill[block.Index]);
  680. liveIn.Set (blkLiveGen [block.Index]);
  681. }
  682. }
  683. while (modified);
  684. _blockLiveIn = blkLiveIn;
  685. _blockEdges = new HashSet<int>();
  686. // Compute lifetime intervals.
  687. int operationPos = _operationsCount;
  688. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  689. {
  690. BasicBlock block = cfg.PostOrderBlocks[index];
  691. // We handle empty blocks by pretending they have a dummy instruction,
  692. // because otherwise the block would have the same start and end position,
  693. // and this is not valid.
  694. int instCount = Math.Max(block.Operations.Count, 1);
  695. int blockStart = operationPos - instCount * InstructionGap;
  696. int blockEnd = operationPos;
  697. _blockRanges[block.Index] = new LiveRange(blockStart, blockEnd);
  698. _blockEdges.Add(blockStart);
  699. BitMap liveOut = blkLiveOut[block.Index];
  700. foreach (int id in liveOut)
  701. {
  702. _intervals[id].AddRange(blockStart, blockEnd);
  703. }
  704. if (block.Operations.Count == 0)
  705. {
  706. operationPos -= InstructionGap;
  707. continue;
  708. }
  709. foreach (Operation node in BottomOperations(block))
  710. {
  711. operationPos -= InstructionGap;
  712. for (int i = 0; i < node.DestinationsCount; i++)
  713. {
  714. Operand dest = node.GetDestination(i);
  715. LiveInterval interval = _intervals[GetOperandId(dest)];
  716. interval.SetStart(operationPos + 1);
  717. interval.AddUsePosition(operationPos + 1);
  718. }
  719. Sources(node, (source) =>
  720. {
  721. LiveInterval interval = _intervals[GetOperandId(source)];
  722. interval.AddRange(blockStart, operationPos + 1);
  723. interval.AddUsePosition(operationPos);
  724. });
  725. if (node.Instruction == Instruction.Call)
  726. {
  727. AddIntervalCallerSavedReg(context.Masks.IntCallerSavedRegisters, operationPos, RegisterType.Integer);
  728. AddIntervalCallerSavedReg(context.Masks.VecCallerSavedRegisters, operationPos, RegisterType.Vector);
  729. }
  730. }
  731. }
  732. }
  733. private void AddIntervalCallerSavedReg(int mask, int operationPos, RegisterType regType)
  734. {
  735. while (mask != 0)
  736. {
  737. int regIndex = BitOperations.TrailingZeroCount(mask);
  738. Register callerSavedReg = new Register(regIndex, regType);
  739. LiveInterval interval = _intervals[GetRegisterId(callerSavedReg)];
  740. interval.AddRange(operationPos + 1, operationPos + InstructionGap);
  741. mask &= ~(1 << regIndex);
  742. }
  743. }
  744. private static int GetOperandId(Operand operand)
  745. {
  746. if (operand.Kind == OperandKind.LocalVariable)
  747. {
  748. return operand.GetLocalNumber();
  749. }
  750. else if (operand.Kind == OperandKind.Register)
  751. {
  752. return GetRegisterId(operand.GetRegister());
  753. }
  754. else
  755. {
  756. throw new ArgumentException($"Invalid operand kind \"{operand.Kind}\".");
  757. }
  758. }
  759. private static int GetRegisterId(Register register)
  760. {
  761. return (register.Index << 1) | (register.Type == RegisterType.Vector ? 1 : 0);
  762. }
  763. private static IEnumerable<Operation> BottomOperations(BasicBlock block)
  764. {
  765. Operation node = block.Operations.Last;
  766. while (node != default)
  767. {
  768. yield return node;
  769. node = node.ListPrevious;
  770. }
  771. }
  772. private static void Sources(Operation node, Action<Operand> action)
  773. {
  774. for (int index = 0; index < node.SourcesCount; index++)
  775. {
  776. Operand source = node.GetSource(index);
  777. if (IsLocalOrRegister(source.Kind))
  778. {
  779. action(source);
  780. }
  781. else if (source.Kind == OperandKind.Memory)
  782. {
  783. MemoryOperand memOp = source.GetMemory();
  784. if (memOp.BaseAddress != default)
  785. {
  786. action(memOp.BaseAddress);
  787. }
  788. if (memOp.Index != default)
  789. {
  790. action(memOp.Index);
  791. }
  792. }
  793. }
  794. }
  795. private static bool IsLocalOrRegister(OperandKind kind)
  796. {
  797. return kind == OperandKind.LocalVariable ||
  798. kind == OperandKind.Register;
  799. }
  800. }
  801. }