LinearScanAllocator.cs 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101
  1. using ARMeilleure.Common;
  2. using ARMeilleure.IntermediateRepresentation;
  3. using ARMeilleure.Translation;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.Diagnostics;
  7. using System.Linq;
  8. using System.Numerics;
  9. namespace ARMeilleure.CodeGen.RegisterAllocators
  10. {
  11. // Based on:
  12. // "Linear Scan Register Allocation for the Java(tm) HotSpot Client Compiler".
  13. // http://www.christianwimmer.at/Publications/Wimmer04a/Wimmer04a.pdf
  14. class LinearScanAllocator : IRegisterAllocator
  15. {
  16. private const int InstructionGap = 2;
  17. private const int InstructionGapMask = InstructionGap - 1;
  18. private HashSet<int> _blockEdges;
  19. private LiveRange[] _blockRanges;
  20. private BitMap[] _blockLiveIn;
  21. private List<LiveInterval> _intervals;
  22. private LiveInterval[] _parentIntervals;
  23. private List<(IntrusiveList<Operation>, Operation)> _operationNodes;
  24. private int _operationsCount;
  25. private class AllocationContext
  26. {
  27. public RegisterMasks Masks { get; }
  28. public StackAllocator StackAlloc { get; }
  29. public BitMap Active { get; }
  30. public BitMap Inactive { get; }
  31. public int IntUsedRegisters { get; set; }
  32. public int VecUsedRegisters { get; set; }
  33. private readonly int[] _intFreePositions;
  34. private readonly int[] _vecFreePositions;
  35. private readonly int _intFreePositionsCount;
  36. private readonly int _vecFreePositionsCount;
  37. public AllocationContext(StackAllocator stackAlloc, RegisterMasks masks, int intervalsCount)
  38. {
  39. StackAlloc = stackAlloc;
  40. Masks = masks;
  41. Active = new BitMap(Allocators.Default, intervalsCount);
  42. Inactive = new BitMap(Allocators.Default, intervalsCount);
  43. PopulateFreePositions(RegisterType.Integer, out _intFreePositions, out _intFreePositionsCount);
  44. PopulateFreePositions(RegisterType.Vector, out _vecFreePositions, out _vecFreePositionsCount);
  45. void PopulateFreePositions(RegisterType type, out int[] positions, out int count)
  46. {
  47. positions = new int[masks.RegistersCount];
  48. count = BitOperations.PopCount((uint)masks.GetAvailableRegisters(type));
  49. int mask = masks.GetAvailableRegisters(type);
  50. for (int i = 0; i < positions.Length; i++)
  51. {
  52. if ((mask & (1 << i)) != 0)
  53. {
  54. positions[i] = int.MaxValue;
  55. }
  56. }
  57. }
  58. }
  59. public void GetFreePositions(RegisterType type, in Span<int> positions, out int count)
  60. {
  61. if (type == RegisterType.Integer)
  62. {
  63. _intFreePositions.CopyTo(positions);
  64. count = _intFreePositionsCount;
  65. }
  66. else
  67. {
  68. Debug.Assert(type == RegisterType.Vector);
  69. _vecFreePositions.CopyTo(positions);
  70. count = _vecFreePositionsCount;
  71. }
  72. }
  73. public void MoveActiveToInactive(int bit)
  74. {
  75. Move(Active, Inactive, bit);
  76. }
  77. public void MoveInactiveToActive(int bit)
  78. {
  79. Move(Inactive, Active, bit);
  80. }
  81. private static void Move(BitMap source, BitMap dest, int bit)
  82. {
  83. source.Clear(bit);
  84. dest.Set(bit);
  85. }
  86. }
  87. public AllocationResult RunPass(
  88. ControlFlowGraph cfg,
  89. StackAllocator stackAlloc,
  90. RegisterMasks regMasks)
  91. {
  92. NumberLocals(cfg, regMasks.RegistersCount);
  93. var context = new AllocationContext(stackAlloc, regMasks, _intervals.Count);
  94. BuildIntervals(cfg, context);
  95. for (int index = 0; index < _intervals.Count; index++)
  96. {
  97. LiveInterval current = _intervals[index];
  98. if (current.IsEmpty)
  99. {
  100. continue;
  101. }
  102. if (current.IsFixed)
  103. {
  104. context.Active.Set(index);
  105. if (current.IsFixedAndUsed)
  106. {
  107. if (current.Register.Type == RegisterType.Integer)
  108. {
  109. context.IntUsedRegisters |= 1 << current.Register.Index;
  110. }
  111. else /* if (interval.Register.Type == RegisterType.Vector) */
  112. {
  113. context.VecUsedRegisters |= 1 << current.Register.Index;
  114. }
  115. }
  116. continue;
  117. }
  118. AllocateInterval(context, current, index, regMasks.RegistersCount);
  119. }
  120. for (int index = regMasks.RegistersCount * 2; index < _intervals.Count; index++)
  121. {
  122. if (!_intervals[index].IsSpilled)
  123. {
  124. ReplaceLocalWithRegister(_intervals[index]);
  125. }
  126. }
  127. InsertSplitCopies();
  128. InsertSplitCopiesAtEdges(cfg);
  129. return new AllocationResult(context.IntUsedRegisters, context.VecUsedRegisters, context.StackAlloc.TotalSize);
  130. }
  131. private void AllocateInterval(AllocationContext context, LiveInterval current, int cIndex, int registersCount)
  132. {
  133. // Check active intervals that already ended.
  134. foreach (int iIndex in context.Active)
  135. {
  136. LiveInterval interval = _intervals[iIndex];
  137. interval.Forward(current.GetStart());
  138. if (interval.GetEnd() < current.GetStart())
  139. {
  140. context.Active.Clear(iIndex);
  141. }
  142. else if (!interval.Overlaps(current.GetStart()))
  143. {
  144. context.MoveActiveToInactive(iIndex);
  145. }
  146. }
  147. // Check inactive intervals that already ended or were reactivated.
  148. foreach (int iIndex in context.Inactive)
  149. {
  150. LiveInterval interval = _intervals[iIndex];
  151. interval.Forward(current.GetStart());
  152. if (interval.GetEnd() < current.GetStart())
  153. {
  154. context.Inactive.Clear(iIndex);
  155. }
  156. else if (interval.Overlaps(current.GetStart()))
  157. {
  158. context.MoveInactiveToActive(iIndex);
  159. }
  160. }
  161. if (!TryAllocateRegWithoutSpill(context, current, cIndex, registersCount))
  162. {
  163. AllocateRegWithSpill(context, current, cIndex, registersCount);
  164. }
  165. }
  166. private bool TryAllocateRegWithoutSpill(AllocationContext context, LiveInterval current, int cIndex, int registersCount)
  167. {
  168. RegisterType regType = current.Local.Type.ToRegisterType();
  169. Span<int> freePositions = stackalloc int[registersCount];
  170. context.GetFreePositions(regType, freePositions, out int freePositionsCount);
  171. foreach (int iIndex in context.Active)
  172. {
  173. LiveInterval interval = _intervals[iIndex];
  174. Register reg = interval.Register;
  175. if (reg.Type == regType)
  176. {
  177. freePositions[reg.Index] = 0;
  178. freePositionsCount--;
  179. }
  180. }
  181. // If all registers are already active, return early. No point in inspecting the inactive set to look for
  182. // holes.
  183. if (freePositionsCount == 0)
  184. {
  185. return false;
  186. }
  187. foreach (int iIndex in context.Inactive)
  188. {
  189. LiveInterval interval = _intervals[iIndex];
  190. Register reg = interval.Register;
  191. ref int freePosition = ref freePositions[reg.Index];
  192. if (reg.Type == regType && freePosition != 0)
  193. {
  194. int overlapPosition = interval.GetOverlapPosition(current);
  195. if (overlapPosition != LiveInterval.NotFound && freePosition > overlapPosition)
  196. {
  197. freePosition = overlapPosition;
  198. }
  199. }
  200. }
  201. int selectedReg = GetHighestValueIndex(freePositions);
  202. int selectedNextUse = freePositions[selectedReg];
  203. // Intervals starts and ends at odd positions, unless they span an entire
  204. // block, in this case they will have ranges at a even position.
  205. // When a interval is loaded from the stack to a register, we can only
  206. // do the split at a odd position, because otherwise the split interval
  207. // that is inserted on the list to be processed may clobber a register
  208. // used by the instruction at the same position as the split.
  209. // The problem only happens when a interval ends exactly at this instruction,
  210. // because otherwise they would interfere, and the register wouldn't be selected.
  211. // When the interval is aligned and the above happens, there's no problem as
  212. // the instruction that is actually with the last use is the one
  213. // before that position.
  214. selectedNextUse &= ~InstructionGapMask;
  215. if (selectedNextUse <= current.GetStart())
  216. {
  217. return false;
  218. }
  219. else if (selectedNextUse < current.GetEnd())
  220. {
  221. LiveInterval splitChild = current.Split(selectedNextUse);
  222. if (splitChild.UsesCount != 0)
  223. {
  224. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  225. InsertInterval(splitChild, registersCount);
  226. }
  227. else
  228. {
  229. Spill(context, splitChild);
  230. }
  231. }
  232. current.Register = new Register(selectedReg, regType);
  233. if (regType == RegisterType.Integer)
  234. {
  235. context.IntUsedRegisters |= 1 << selectedReg;
  236. }
  237. else /* if (regType == RegisterType.Vector) */
  238. {
  239. context.VecUsedRegisters |= 1 << selectedReg;
  240. }
  241. context.Active.Set(cIndex);
  242. return true;
  243. }
  244. private void AllocateRegWithSpill(AllocationContext context, LiveInterval current, int cIndex, int registersCount)
  245. {
  246. RegisterType regType = current.Local.Type.ToRegisterType();
  247. Span<int> usePositions = stackalloc int[registersCount];
  248. Span<int> blockedPositions = stackalloc int[registersCount];
  249. context.GetFreePositions(regType, usePositions, out _);
  250. context.GetFreePositions(regType, blockedPositions, out _);
  251. foreach (int iIndex in context.Active)
  252. {
  253. LiveInterval interval = _intervals[iIndex];
  254. Register reg = interval.Register;
  255. if (reg.Type == regType)
  256. {
  257. ref int usePosition = ref usePositions[reg.Index];
  258. ref int blockedPosition = ref blockedPositions[reg.Index];
  259. if (interval.IsFixed)
  260. {
  261. usePosition = 0;
  262. blockedPosition = 0;
  263. }
  264. else
  265. {
  266. int nextUse = interval.NextUseAfter(current.GetStart());
  267. if (nextUse != LiveInterval.NotFound && usePosition > nextUse)
  268. {
  269. usePosition = nextUse;
  270. }
  271. }
  272. }
  273. }
  274. foreach (int iIndex in context.Inactive)
  275. {
  276. LiveInterval interval = _intervals[iIndex];
  277. Register reg = interval.Register;
  278. if (reg.Type == regType)
  279. {
  280. ref int usePosition = ref usePositions[reg.Index];
  281. ref int blockedPosition = ref blockedPositions[reg.Index];
  282. if (interval.IsFixed)
  283. {
  284. int overlapPosition = interval.GetOverlapPosition(current);
  285. if (overlapPosition != LiveInterval.NotFound)
  286. {
  287. blockedPosition = Math.Min(blockedPosition, overlapPosition);
  288. usePosition = Math.Min(usePosition, overlapPosition);
  289. }
  290. }
  291. else if (interval.Overlaps(current))
  292. {
  293. int nextUse = interval.NextUseAfter(current.GetStart());
  294. if (nextUse != LiveInterval.NotFound && usePosition > nextUse)
  295. {
  296. usePosition = nextUse;
  297. }
  298. }
  299. }
  300. }
  301. int selectedReg = GetHighestValueIndex(usePositions);
  302. int currentFirstUse = current.FirstUse();
  303. Debug.Assert(currentFirstUse >= 0, "Current interval has no uses.");
  304. if (usePositions[selectedReg] < currentFirstUse)
  305. {
  306. // All intervals on inactive and active are being used before current,
  307. // so spill the current interval.
  308. Debug.Assert(currentFirstUse > current.GetStart(), "Trying to spill a interval currently being used.");
  309. LiveInterval splitChild = current.Split(currentFirstUse);
  310. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  311. InsertInterval(splitChild, registersCount);
  312. Spill(context, current);
  313. }
  314. else if (blockedPositions[selectedReg] > current.GetEnd())
  315. {
  316. // Spill made the register available for the entire current lifetime,
  317. // so we only need to split the intervals using the selected register.
  318. current.Register = new Register(selectedReg, regType);
  319. SplitAndSpillOverlappingIntervals(context, current, registersCount);
  320. context.Active.Set(cIndex);
  321. }
  322. else
  323. {
  324. // There are conflicts even after spill due to the use of fixed registers
  325. // that can't be spilled, so we need to also split current at the point of
  326. // the first fixed register use.
  327. current.Register = new Register(selectedReg, regType);
  328. int splitPosition = blockedPositions[selectedReg] & ~InstructionGapMask;
  329. Debug.Assert(splitPosition > current.GetStart(), "Trying to split a interval at a invalid position.");
  330. LiveInterval splitChild = current.Split(splitPosition);
  331. if (splitChild.UsesCount != 0)
  332. {
  333. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  334. InsertInterval(splitChild, registersCount);
  335. }
  336. else
  337. {
  338. Spill(context, splitChild);
  339. }
  340. SplitAndSpillOverlappingIntervals(context, current, registersCount);
  341. context.Active.Set(cIndex);
  342. }
  343. }
  344. private static int GetHighestValueIndex(Span<int> span)
  345. {
  346. int highest = int.MinValue;
  347. int selected = 0;
  348. for (int index = 0; index < span.Length; index++)
  349. {
  350. int current = span[index];
  351. if (highest < current)
  352. {
  353. highest = current;
  354. selected = index;
  355. if (current == int.MaxValue)
  356. {
  357. break;
  358. }
  359. }
  360. }
  361. return selected;
  362. }
  363. private void SplitAndSpillOverlappingIntervals(AllocationContext context, LiveInterval current, int registersCount)
  364. {
  365. foreach (int iIndex in context.Active)
  366. {
  367. LiveInterval interval = _intervals[iIndex];
  368. if (!interval.IsFixed && interval.Register == current.Register)
  369. {
  370. SplitAndSpillOverlappingInterval(context, current, interval, registersCount);
  371. context.Active.Clear(iIndex);
  372. }
  373. }
  374. foreach (int iIndex in context.Inactive)
  375. {
  376. LiveInterval interval = _intervals[iIndex];
  377. if (!interval.IsFixed && interval.Register == current.Register && interval.Overlaps(current))
  378. {
  379. SplitAndSpillOverlappingInterval(context, current, interval, registersCount);
  380. context.Inactive.Clear(iIndex);
  381. }
  382. }
  383. }
  384. private void SplitAndSpillOverlappingInterval(
  385. AllocationContext context,
  386. LiveInterval current,
  387. LiveInterval interval,
  388. int registersCount)
  389. {
  390. // If there's a next use after the start of the current interval,
  391. // we need to split the spilled interval twice, and re-insert it
  392. // on the "pending" list to ensure that it will get a new register
  393. // on that use position.
  394. int nextUse = interval.NextUseAfter(current.GetStart());
  395. LiveInterval splitChild;
  396. if (interval.GetStart() < current.GetStart())
  397. {
  398. splitChild = interval.Split(current.GetStart());
  399. }
  400. else
  401. {
  402. splitChild = interval;
  403. }
  404. if (nextUse != -1)
  405. {
  406. Debug.Assert(nextUse > current.GetStart(), "Trying to spill a interval currently being used.");
  407. if (nextUse > splitChild.GetStart())
  408. {
  409. LiveInterval right = splitChild.Split(nextUse);
  410. Spill(context, splitChild);
  411. splitChild = right;
  412. }
  413. InsertInterval(splitChild, registersCount);
  414. }
  415. else
  416. {
  417. Spill(context, splitChild);
  418. }
  419. }
  420. private void InsertInterval(LiveInterval interval, int registersCount)
  421. {
  422. Debug.Assert(interval.UsesCount != 0, "Trying to insert a interval without uses.");
  423. Debug.Assert(!interval.IsEmpty, "Trying to insert a empty interval.");
  424. Debug.Assert(!interval.IsSpilled, "Trying to insert a spilled interval.");
  425. int startIndex = registersCount * 2;
  426. int insertIndex = _intervals.BinarySearch(startIndex, _intervals.Count - startIndex, interval, null);
  427. if (insertIndex < 0)
  428. {
  429. insertIndex = ~insertIndex;
  430. }
  431. _intervals.Insert(insertIndex, interval);
  432. }
  433. private void Spill(AllocationContext context, LiveInterval interval)
  434. {
  435. Debug.Assert(!interval.IsFixed, "Trying to spill a fixed interval.");
  436. Debug.Assert(interval.UsesCount == 0, "Trying to spill a interval with uses.");
  437. // We first check if any of the siblings were spilled, if so we can reuse
  438. // the stack offset. Otherwise, we allocate a new space on the stack.
  439. // This prevents stack-to-stack copies being necessary for a split interval.
  440. if (!interval.TrySpillWithSiblingOffset())
  441. {
  442. interval.Spill(context.StackAlloc.Allocate(interval.Local.Type));
  443. }
  444. }
  445. private void InsertSplitCopies()
  446. {
  447. Dictionary<int, CopyResolver> copyResolvers = new Dictionary<int, CopyResolver>();
  448. CopyResolver GetCopyResolver(int position)
  449. {
  450. if (!copyResolvers.TryGetValue(position, out CopyResolver copyResolver))
  451. {
  452. copyResolver = new CopyResolver();
  453. copyResolvers.Add(position, copyResolver);
  454. }
  455. return copyResolver;
  456. }
  457. foreach (LiveInterval interval in _intervals.Where(x => x.IsSplit))
  458. {
  459. LiveInterval previous = interval;
  460. foreach (LiveInterval splitChild in interval.SplitChildren())
  461. {
  462. int splitPosition = splitChild.GetStart();
  463. if (!_blockEdges.Contains(splitPosition) && previous.GetEnd() == splitPosition)
  464. {
  465. GetCopyResolver(splitPosition).AddSplit(previous, splitChild);
  466. }
  467. previous = splitChild;
  468. }
  469. }
  470. foreach (KeyValuePair<int, CopyResolver> kv in copyResolvers)
  471. {
  472. CopyResolver copyResolver = kv.Value;
  473. if (!copyResolver.HasCopy)
  474. {
  475. continue;
  476. }
  477. int splitPosition = kv.Key;
  478. (IntrusiveList<Operation> nodes, Operation node) = GetOperationNode(splitPosition);
  479. Operation[] sequence = copyResolver.Sequence();
  480. nodes.AddBefore(node, sequence[0]);
  481. node = sequence[0];
  482. for (int index = 1; index < sequence.Length; index++)
  483. {
  484. nodes.AddAfter(node, sequence[index]);
  485. node = sequence[index];
  486. }
  487. }
  488. }
  489. private void InsertSplitCopiesAtEdges(ControlFlowGraph cfg)
  490. {
  491. int blocksCount = cfg.Blocks.Count;
  492. bool IsSplitEdgeBlock(BasicBlock block)
  493. {
  494. return block.Index >= blocksCount;
  495. }
  496. // Reset iterators to beginning because GetSplitChild depends on the state of the iterator.
  497. foreach (LiveInterval interval in _intervals)
  498. {
  499. interval.Reset();
  500. }
  501. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  502. {
  503. if (IsSplitEdgeBlock(block))
  504. {
  505. continue;
  506. }
  507. bool hasSingleOrNoSuccessor = block.SuccessorsCount <= 1;
  508. for (int i = 0; i < block.SuccessorsCount; i++)
  509. {
  510. BasicBlock successor = block.GetSuccessor(i);
  511. int succIndex = successor.Index;
  512. // If the current node is a split node, then the actual successor node
  513. // (the successor before the split) should be right after it.
  514. if (IsSplitEdgeBlock(successor))
  515. {
  516. succIndex = successor.GetSuccessor(0).Index;
  517. }
  518. CopyResolver copyResolver = null;
  519. foreach (int iIndex in _blockLiveIn[succIndex])
  520. {
  521. LiveInterval interval = _parentIntervals[iIndex];
  522. if (!interval.IsSplit)
  523. {
  524. continue;
  525. }
  526. int lEnd = _blockRanges[block.Index].End - 1;
  527. int rStart = _blockRanges[succIndex].Start;
  528. LiveInterval left = interval.GetSplitChild(lEnd);
  529. LiveInterval right = interval.GetSplitChild(rStart);
  530. if (left != default && right != default && left != right)
  531. {
  532. if (copyResolver == null)
  533. {
  534. copyResolver = new CopyResolver();
  535. }
  536. copyResolver.AddSplit(left, right);
  537. }
  538. }
  539. if (copyResolver == null || !copyResolver.HasCopy)
  540. {
  541. continue;
  542. }
  543. Operation[] sequence = copyResolver.Sequence();
  544. if (hasSingleOrNoSuccessor)
  545. {
  546. foreach (Operation operation in sequence)
  547. {
  548. block.Append(operation);
  549. }
  550. }
  551. else if (successor.Predecessors.Count == 1)
  552. {
  553. successor.Operations.AddFirst(sequence[0]);
  554. Operation prependNode = sequence[0];
  555. for (int index = 1; index < sequence.Length; index++)
  556. {
  557. Operation operation = sequence[index];
  558. successor.Operations.AddAfter(prependNode, operation);
  559. prependNode = operation;
  560. }
  561. }
  562. else
  563. {
  564. // Split the critical edge.
  565. BasicBlock splitBlock = cfg.SplitEdge(block, successor);
  566. foreach (Operation operation in sequence)
  567. {
  568. splitBlock.Append(operation);
  569. }
  570. }
  571. }
  572. }
  573. }
  574. private void ReplaceLocalWithRegister(LiveInterval current)
  575. {
  576. Operand register = GetRegister(current);
  577. foreach (int usePosition in current.UsePositions())
  578. {
  579. (_, Operation operation) = GetOperationNode(usePosition);
  580. for (int index = 0; index < operation.SourcesCount; index++)
  581. {
  582. Operand source = operation.GetSource(index);
  583. if (source == current.Local)
  584. {
  585. operation.SetSource(index, register);
  586. }
  587. else if (source.Kind == OperandKind.Memory)
  588. {
  589. MemoryOperand memOp = source.GetMemory();
  590. if (memOp.BaseAddress == current.Local)
  591. {
  592. memOp.BaseAddress = register;
  593. }
  594. if (memOp.Index == current.Local)
  595. {
  596. memOp.Index = register;
  597. }
  598. }
  599. }
  600. for (int index = 0; index < operation.DestinationsCount; index++)
  601. {
  602. Operand dest = operation.GetDestination(index);
  603. if (dest == current.Local)
  604. {
  605. operation.SetDestination(index, register);
  606. }
  607. }
  608. }
  609. }
  610. private static Operand GetRegister(LiveInterval interval)
  611. {
  612. Debug.Assert(!interval.IsSpilled, "Spilled intervals are not allowed.");
  613. return Operand.Factory.Register(
  614. interval.Register.Index,
  615. interval.Register.Type,
  616. interval.Local.Type);
  617. }
  618. private (IntrusiveList<Operation>, Operation) GetOperationNode(int position)
  619. {
  620. return _operationNodes[position / InstructionGap];
  621. }
  622. private void NumberLocals(ControlFlowGraph cfg, int registersCount)
  623. {
  624. _operationNodes = new List<(IntrusiveList<Operation>, Operation)>();
  625. _intervals = new List<LiveInterval>();
  626. for (int index = 0; index < registersCount; index++)
  627. {
  628. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Integer)));
  629. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Vector)));
  630. }
  631. // The "visited" state is stored in the MSB of the local's value.
  632. const ulong VisitedMask = 1ul << 63;
  633. bool IsVisited(Operand local)
  634. {
  635. return (local.GetValueUnsafe() & VisitedMask) != 0;
  636. }
  637. void SetVisited(Operand local)
  638. {
  639. local.GetValueUnsafe() |= VisitedMask;
  640. }
  641. _operationsCount = 0;
  642. for (int index = cfg.PostOrderBlocks.Length - 1; index >= 0; index--)
  643. {
  644. BasicBlock block = cfg.PostOrderBlocks[index];
  645. for (Operation node = block.Operations.First; node != default; node = node.ListNext)
  646. {
  647. _operationNodes.Add((block.Operations, node));
  648. for (int i = 0; i < node.DestinationsCount; i++)
  649. {
  650. Operand dest = node.GetDestination(i);
  651. if (dest.Kind == OperandKind.LocalVariable && !IsVisited(dest))
  652. {
  653. dest.NumberLocal(_intervals.Count);
  654. _intervals.Add(new LiveInterval(dest));
  655. SetVisited(dest);
  656. }
  657. }
  658. }
  659. _operationsCount += block.Operations.Count * InstructionGap;
  660. if (block.Operations.Count == 0)
  661. {
  662. // Pretend we have a dummy instruction on the empty block.
  663. _operationNodes.Add((default, default));
  664. _operationsCount += InstructionGap;
  665. }
  666. }
  667. _parentIntervals = _intervals.ToArray();
  668. }
  669. private void BuildIntervals(ControlFlowGraph cfg, AllocationContext context)
  670. {
  671. _blockRanges = new LiveRange[cfg.Blocks.Count];
  672. int mapSize = _intervals.Count;
  673. BitMap[] blkLiveGen = new BitMap[cfg.Blocks.Count];
  674. BitMap[] blkLiveKill = new BitMap[cfg.Blocks.Count];
  675. // Compute local live sets.
  676. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  677. {
  678. BitMap liveGen = new BitMap(Allocators.Default, mapSize);
  679. BitMap liveKill = new BitMap(Allocators.Default, mapSize);
  680. for (Operation node = block.Operations.First; node != default; node = node.ListNext)
  681. {
  682. for (int i = 0; i < node.SourcesCount; i++)
  683. {
  684. VisitSource(node.GetSource(i));
  685. }
  686. for (int i = 0; i < node.DestinationsCount; i++)
  687. {
  688. VisitDestination(node.GetDestination(i));
  689. }
  690. void VisitSource(Operand source)
  691. {
  692. if (IsLocalOrRegister(source.Kind))
  693. {
  694. int id = GetOperandId(source);
  695. if (!liveKill.IsSet(id))
  696. {
  697. liveGen.Set(id);
  698. }
  699. }
  700. else if (source.Kind == OperandKind.Memory)
  701. {
  702. MemoryOperand memOp = source.GetMemory();
  703. if (memOp.BaseAddress != default)
  704. {
  705. VisitSource(memOp.BaseAddress);
  706. }
  707. if (memOp.Index != default)
  708. {
  709. VisitSource(memOp.Index);
  710. }
  711. }
  712. }
  713. void VisitDestination(Operand dest)
  714. {
  715. liveKill.Set(GetOperandId(dest));
  716. }
  717. }
  718. blkLiveGen [block.Index] = liveGen;
  719. blkLiveKill[block.Index] = liveKill;
  720. }
  721. // Compute global live sets.
  722. BitMap[] blkLiveIn = new BitMap[cfg.Blocks.Count];
  723. BitMap[] blkLiveOut = new BitMap[cfg.Blocks.Count];
  724. for (int index = 0; index < cfg.Blocks.Count; index++)
  725. {
  726. blkLiveIn [index] = new BitMap(Allocators.Default, mapSize);
  727. blkLiveOut[index] = new BitMap(Allocators.Default, mapSize);
  728. }
  729. bool modified;
  730. do
  731. {
  732. modified = false;
  733. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  734. {
  735. BasicBlock block = cfg.PostOrderBlocks[index];
  736. BitMap liveOut = blkLiveOut[block.Index];
  737. for (int i = 0; i < block.SuccessorsCount; i++)
  738. {
  739. BasicBlock succ = block.GetSuccessor(i);
  740. modified |= liveOut.Set(blkLiveIn[succ.Index]);
  741. }
  742. BitMap liveIn = blkLiveIn[block.Index];
  743. liveIn.Set (liveOut);
  744. liveIn.Clear(blkLiveKill[block.Index]);
  745. liveIn.Set (blkLiveGen [block.Index]);
  746. }
  747. }
  748. while (modified);
  749. _blockLiveIn = blkLiveIn;
  750. _blockEdges = new HashSet<int>();
  751. // Compute lifetime intervals.
  752. int operationPos = _operationsCount;
  753. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  754. {
  755. BasicBlock block = cfg.PostOrderBlocks[index];
  756. // We handle empty blocks by pretending they have a dummy instruction,
  757. // because otherwise the block would have the same start and end position,
  758. // and this is not valid.
  759. int instCount = Math.Max(block.Operations.Count, 1);
  760. int blockStart = operationPos - instCount * InstructionGap;
  761. int blockEnd = operationPos;
  762. _blockRanges[block.Index] = new LiveRange(blockStart, blockEnd);
  763. _blockEdges.Add(blockStart);
  764. BitMap liveOut = blkLiveOut[block.Index];
  765. foreach (int id in liveOut)
  766. {
  767. _intervals[id].AddRange(blockStart, blockEnd);
  768. }
  769. if (block.Operations.Count == 0)
  770. {
  771. operationPos -= InstructionGap;
  772. continue;
  773. }
  774. for (Operation node = block.Operations.Last; node != default; node = node.ListPrevious)
  775. {
  776. operationPos -= InstructionGap;
  777. for (int i = 0; i < node.DestinationsCount; i++)
  778. {
  779. VisitDestination(node.GetDestination(i));
  780. }
  781. for (int i = 0; i < node.SourcesCount; i++)
  782. {
  783. VisitSource(node.GetSource(i));
  784. }
  785. if (node.Instruction == Instruction.Call)
  786. {
  787. AddIntervalCallerSavedReg(context.Masks.IntCallerSavedRegisters, operationPos, RegisterType.Integer);
  788. AddIntervalCallerSavedReg(context.Masks.VecCallerSavedRegisters, operationPos, RegisterType.Vector);
  789. }
  790. void VisitSource(Operand source)
  791. {
  792. if (IsLocalOrRegister(source.Kind))
  793. {
  794. LiveInterval interval = _intervals[GetOperandId(source)];
  795. interval.AddRange(blockStart, operationPos + 1);
  796. interval.AddUsePosition(operationPos);
  797. }
  798. else if (source.Kind == OperandKind.Memory)
  799. {
  800. MemoryOperand memOp = source.GetMemory();
  801. if (memOp.BaseAddress != default)
  802. {
  803. VisitSource(memOp.BaseAddress);
  804. }
  805. if (memOp.Index != default)
  806. {
  807. VisitSource(memOp.Index);
  808. }
  809. }
  810. }
  811. void VisitDestination(Operand dest)
  812. {
  813. LiveInterval interval = _intervals[GetOperandId(dest)];
  814. if (interval.IsFixed)
  815. {
  816. interval.IsFixedAndUsed = true;
  817. }
  818. interval.SetStart(operationPos + 1);
  819. interval.AddUsePosition(operationPos + 1);
  820. }
  821. }
  822. }
  823. foreach (LiveInterval interval in _parentIntervals)
  824. {
  825. interval.Reset();
  826. }
  827. }
  828. private void AddIntervalCallerSavedReg(int mask, int operationPos, RegisterType regType)
  829. {
  830. while (mask != 0)
  831. {
  832. int regIndex = BitOperations.TrailingZeroCount(mask);
  833. Register callerSavedReg = new Register(regIndex, regType);
  834. LiveInterval interval = _intervals[GetRegisterId(callerSavedReg)];
  835. interval.AddRange(operationPos + 1, operationPos + InstructionGap);
  836. mask &= ~(1 << regIndex);
  837. }
  838. }
  839. private static int GetOperandId(Operand operand)
  840. {
  841. if (operand.Kind == OperandKind.LocalVariable)
  842. {
  843. return operand.GetLocalNumber();
  844. }
  845. else if (operand.Kind == OperandKind.Register)
  846. {
  847. return GetRegisterId(operand.GetRegister());
  848. }
  849. else
  850. {
  851. throw new ArgumentException($"Invalid operand kind \"{operand.Kind}\".");
  852. }
  853. }
  854. private static int GetRegisterId(Register register)
  855. {
  856. return (register.Index << 1) | (register.Type == RegisterType.Vector ? 1 : 0);
  857. }
  858. private static bool IsLocalOrRegister(OperandKind kind)
  859. {
  860. return kind == OperandKind.LocalVariable ||
  861. kind == OperandKind.Register;
  862. }
  863. }
  864. }