LinearScanAllocator.cs 38 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099
  1. using ARMeilleure.Common;
  2. using ARMeilleure.IntermediateRepresentation;
  3. using ARMeilleure.Translation;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.Diagnostics;
  7. using System.Linq;
  8. using System.Numerics;
  9. namespace ARMeilleure.CodeGen.RegisterAllocators
  10. {
  11. // Based on:
  12. // "Linear Scan Register Allocation for the Java(tm) HotSpot Client Compiler".
  13. // http://www.christianwimmer.at/Publications/Wimmer04a/Wimmer04a.pdf
  14. class LinearScanAllocator : IRegisterAllocator
  15. {
  16. private const int InstructionGap = 2;
  17. private const int InstructionGapMask = InstructionGap - 1;
  18. private const int RegistersCount = 16;
  19. private HashSet<int> _blockEdges;
  20. private LiveRange[] _blockRanges;
  21. private BitMap[] _blockLiveIn;
  22. private List<LiveInterval> _intervals;
  23. private LiveInterval[] _parentIntervals;
  24. private List<(IntrusiveList<Operation>, Operation)> _operationNodes;
  25. private int _operationsCount;
  26. private class AllocationContext
  27. {
  28. public RegisterMasks Masks { get; }
  29. public StackAllocator StackAlloc { get; }
  30. public BitMap Active { get; }
  31. public BitMap Inactive { get; }
  32. public int IntUsedRegisters { get; set; }
  33. public int VecUsedRegisters { get; set; }
  34. private readonly int[] _intFreePositions;
  35. private readonly int[] _vecFreePositions;
  36. private readonly int _intFreePositionsCount;
  37. private readonly int _vecFreePositionsCount;
  38. public AllocationContext(StackAllocator stackAlloc, RegisterMasks masks, int intervalsCount)
  39. {
  40. StackAlloc = stackAlloc;
  41. Masks = masks;
  42. Active = new BitMap(Allocators.Default, intervalsCount);
  43. Inactive = new BitMap(Allocators.Default, intervalsCount);
  44. PopulateFreePositions(RegisterType.Integer, out _intFreePositions, out _intFreePositionsCount);
  45. PopulateFreePositions(RegisterType.Vector, out _vecFreePositions, out _vecFreePositionsCount);
  46. void PopulateFreePositions(RegisterType type, out int[] positions, out int count)
  47. {
  48. positions = new int[RegistersCount];
  49. count = BitOperations.PopCount((uint)masks.GetAvailableRegisters(type));
  50. int mask = masks.GetAvailableRegisters(type);
  51. for (int i = 0; i < positions.Length; i++)
  52. {
  53. if ((mask & (1 << i)) != 0)
  54. {
  55. positions[i] = int.MaxValue;
  56. }
  57. }
  58. }
  59. }
  60. public void GetFreePositions(RegisterType type, in Span<int> positions, out int count)
  61. {
  62. if (type == RegisterType.Integer)
  63. {
  64. _intFreePositions.CopyTo(positions);
  65. count = _intFreePositionsCount;
  66. }
  67. else
  68. {
  69. Debug.Assert(type == RegisterType.Vector);
  70. _vecFreePositions.CopyTo(positions);
  71. count = _vecFreePositionsCount;
  72. }
  73. }
  74. public void MoveActiveToInactive(int bit)
  75. {
  76. Move(Active, Inactive, bit);
  77. }
  78. public void MoveInactiveToActive(int bit)
  79. {
  80. Move(Inactive, Active, bit);
  81. }
  82. private static void Move(BitMap source, BitMap dest, int bit)
  83. {
  84. source.Clear(bit);
  85. dest.Set(bit);
  86. }
  87. }
  88. public AllocationResult RunPass(
  89. ControlFlowGraph cfg,
  90. StackAllocator stackAlloc,
  91. RegisterMasks regMasks)
  92. {
  93. NumberLocals(cfg);
  94. var context = new AllocationContext(stackAlloc, regMasks, _intervals.Count);
  95. BuildIntervals(cfg, context);
  96. for (int index = 0; index < _intervals.Count; index++)
  97. {
  98. LiveInterval current = _intervals[index];
  99. if (current.IsEmpty)
  100. {
  101. continue;
  102. }
  103. if (current.IsFixed)
  104. {
  105. context.Active.Set(index);
  106. if (current.Register.Type == RegisterType.Integer)
  107. {
  108. context.IntUsedRegisters |= 1 << current.Register.Index;
  109. }
  110. else /* if (interval.Register.Type == RegisterType.Vector) */
  111. {
  112. context.VecUsedRegisters |= 1 << current.Register.Index;
  113. }
  114. continue;
  115. }
  116. AllocateInterval(context, current, index);
  117. }
  118. for (int index = RegistersCount * 2; index < _intervals.Count; index++)
  119. {
  120. if (!_intervals[index].IsSpilled)
  121. {
  122. ReplaceLocalWithRegister(_intervals[index]);
  123. }
  124. }
  125. InsertSplitCopies();
  126. InsertSplitCopiesAtEdges(cfg);
  127. return new AllocationResult(context.IntUsedRegisters, context.VecUsedRegisters, context.StackAlloc.TotalSize);
  128. }
  129. private void AllocateInterval(AllocationContext context, LiveInterval current, int cIndex)
  130. {
  131. // Check active intervals that already ended.
  132. foreach (int iIndex in context.Active)
  133. {
  134. LiveInterval interval = _intervals[iIndex];
  135. interval.Forward(current.GetStart());
  136. if (interval.GetEnd() < current.GetStart())
  137. {
  138. context.Active.Clear(iIndex);
  139. }
  140. else if (!interval.Overlaps(current.GetStart()))
  141. {
  142. context.MoveActiveToInactive(iIndex);
  143. }
  144. }
  145. // Check inactive intervals that already ended or were reactivated.
  146. foreach (int iIndex in context.Inactive)
  147. {
  148. LiveInterval interval = _intervals[iIndex];
  149. interval.Forward(current.GetStart());
  150. if (interval.GetEnd() < current.GetStart())
  151. {
  152. context.Inactive.Clear(iIndex);
  153. }
  154. else if (interval.Overlaps(current.GetStart()))
  155. {
  156. context.MoveInactiveToActive(iIndex);
  157. }
  158. }
  159. if (!TryAllocateRegWithoutSpill(context, current, cIndex))
  160. {
  161. AllocateRegWithSpill(context, current, cIndex);
  162. }
  163. }
  164. private bool TryAllocateRegWithoutSpill(AllocationContext context, LiveInterval current, int cIndex)
  165. {
  166. RegisterType regType = current.Local.Type.ToRegisterType();
  167. Span<int> freePositions = stackalloc int[RegistersCount];
  168. context.GetFreePositions(regType, freePositions, out int freePositionsCount);
  169. foreach (int iIndex in context.Active)
  170. {
  171. LiveInterval interval = _intervals[iIndex];
  172. Register reg = interval.Register;
  173. if (reg.Type == regType)
  174. {
  175. freePositions[reg.Index] = 0;
  176. freePositionsCount--;
  177. }
  178. }
  179. // If all registers are already active, return early. No point in inspecting the inactive set to look for
  180. // holes.
  181. if (freePositionsCount == 0)
  182. {
  183. return false;
  184. }
  185. foreach (int iIndex in context.Inactive)
  186. {
  187. LiveInterval interval = _intervals[iIndex];
  188. Register reg = interval.Register;
  189. ref int freePosition = ref freePositions[reg.Index];
  190. if (reg.Type == regType && freePosition != 0)
  191. {
  192. int overlapPosition = interval.GetOverlapPosition(current);
  193. if (overlapPosition != LiveInterval.NotFound && freePosition > overlapPosition)
  194. {
  195. freePosition = overlapPosition;
  196. }
  197. }
  198. }
  199. int selectedReg = GetHighestValueIndex(freePositions);
  200. int selectedNextUse = freePositions[selectedReg];
  201. // Intervals starts and ends at odd positions, unless they span an entire
  202. // block, in this case they will have ranges at a even position.
  203. // When a interval is loaded from the stack to a register, we can only
  204. // do the split at a odd position, because otherwise the split interval
  205. // that is inserted on the list to be processed may clobber a register
  206. // used by the instruction at the same position as the split.
  207. // The problem only happens when a interval ends exactly at this instruction,
  208. // because otherwise they would interfere, and the register wouldn't be selected.
  209. // When the interval is aligned and the above happens, there's no problem as
  210. // the instruction that is actually with the last use is the one
  211. // before that position.
  212. selectedNextUse &= ~InstructionGapMask;
  213. if (selectedNextUse <= current.GetStart())
  214. {
  215. return false;
  216. }
  217. else if (selectedNextUse < current.GetEnd())
  218. {
  219. LiveInterval splitChild = current.Split(selectedNextUse);
  220. if (splitChild.UsesCount != 0)
  221. {
  222. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  223. InsertInterval(splitChild);
  224. }
  225. else
  226. {
  227. Spill(context, splitChild);
  228. }
  229. }
  230. current.Register = new Register(selectedReg, regType);
  231. if (regType == RegisterType.Integer)
  232. {
  233. context.IntUsedRegisters |= 1 << selectedReg;
  234. }
  235. else /* if (regType == RegisterType.Vector) */
  236. {
  237. context.VecUsedRegisters |= 1 << selectedReg;
  238. }
  239. context.Active.Set(cIndex);
  240. return true;
  241. }
  242. private void AllocateRegWithSpill(AllocationContext context, LiveInterval current, int cIndex)
  243. {
  244. RegisterType regType = current.Local.Type.ToRegisterType();
  245. Span<int> usePositions = stackalloc int[RegistersCount];
  246. Span<int> blockedPositions = stackalloc int[RegistersCount];
  247. context.GetFreePositions(regType, usePositions, out _);
  248. context.GetFreePositions(regType, blockedPositions, out _);
  249. foreach (int iIndex in context.Active)
  250. {
  251. LiveInterval interval = _intervals[iIndex];
  252. Register reg = interval.Register;
  253. if (reg.Type == regType)
  254. {
  255. ref int usePosition = ref usePositions[reg.Index];
  256. ref int blockedPosition = ref blockedPositions[reg.Index];
  257. if (interval.IsFixed)
  258. {
  259. usePosition = 0;
  260. blockedPosition = 0;
  261. }
  262. else
  263. {
  264. int nextUse = interval.NextUseAfter(current.GetStart());
  265. if (nextUse != LiveInterval.NotFound && usePosition > nextUse)
  266. {
  267. usePosition = nextUse;
  268. }
  269. }
  270. }
  271. }
  272. foreach (int iIndex in context.Inactive)
  273. {
  274. LiveInterval interval = _intervals[iIndex];
  275. Register reg = interval.Register;
  276. if (reg.Type == regType)
  277. {
  278. ref int usePosition = ref usePositions[reg.Index];
  279. ref int blockedPosition = ref blockedPositions[reg.Index];
  280. if (interval.IsFixed)
  281. {
  282. int overlapPosition = interval.GetOverlapPosition(current);
  283. if (overlapPosition != LiveInterval.NotFound)
  284. {
  285. blockedPosition = Math.Min(blockedPosition, overlapPosition);
  286. usePosition = Math.Min(usePosition, overlapPosition);
  287. }
  288. }
  289. else if (interval.Overlaps(current))
  290. {
  291. int nextUse = interval.NextUseAfter(current.GetStart());
  292. if (nextUse != LiveInterval.NotFound && usePosition > nextUse)
  293. {
  294. usePosition = nextUse;
  295. }
  296. }
  297. }
  298. }
  299. int selectedReg = GetHighestValueIndex(usePositions);
  300. int currentFirstUse = current.FirstUse();
  301. Debug.Assert(currentFirstUse >= 0, "Current interval has no uses.");
  302. if (usePositions[selectedReg] < currentFirstUse)
  303. {
  304. // All intervals on inactive and active are being used before current,
  305. // so spill the current interval.
  306. Debug.Assert(currentFirstUse > current.GetStart(), "Trying to spill a interval currently being used.");
  307. LiveInterval splitChild = current.Split(currentFirstUse);
  308. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  309. InsertInterval(splitChild);
  310. Spill(context, current);
  311. }
  312. else if (blockedPositions[selectedReg] > current.GetEnd())
  313. {
  314. // Spill made the register available for the entire current lifetime,
  315. // so we only need to split the intervals using the selected register.
  316. current.Register = new Register(selectedReg, regType);
  317. SplitAndSpillOverlappingIntervals(context, current);
  318. context.Active.Set(cIndex);
  319. }
  320. else
  321. {
  322. // There are conflicts even after spill due to the use of fixed registers
  323. // that can't be spilled, so we need to also split current at the point of
  324. // the first fixed register use.
  325. current.Register = new Register(selectedReg, regType);
  326. int splitPosition = blockedPositions[selectedReg] & ~InstructionGapMask;
  327. Debug.Assert(splitPosition > current.GetStart(), "Trying to split a interval at a invalid position.");
  328. LiveInterval splitChild = current.Split(splitPosition);
  329. if (splitChild.UsesCount != 0)
  330. {
  331. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  332. InsertInterval(splitChild);
  333. }
  334. else
  335. {
  336. Spill(context, splitChild);
  337. }
  338. SplitAndSpillOverlappingIntervals(context, current);
  339. context.Active.Set(cIndex);
  340. }
  341. }
  342. private static int GetHighestValueIndex(Span<int> span)
  343. {
  344. int highest = span[0];
  345. if (highest == int.MaxValue)
  346. {
  347. return 0;
  348. }
  349. int selected = 0;
  350. for (int index = 1; index < span.Length; index++)
  351. {
  352. int current = span[index];
  353. if (highest < current)
  354. {
  355. highest = current;
  356. selected = index;
  357. if (current == int.MaxValue)
  358. {
  359. break;
  360. }
  361. }
  362. }
  363. return selected;
  364. }
  365. private void SplitAndSpillOverlappingIntervals(AllocationContext context, LiveInterval current)
  366. {
  367. foreach (int iIndex in context.Active)
  368. {
  369. LiveInterval interval = _intervals[iIndex];
  370. if (!interval.IsFixed && interval.Register == current.Register)
  371. {
  372. SplitAndSpillOverlappingInterval(context, current, interval);
  373. context.Active.Clear(iIndex);
  374. }
  375. }
  376. foreach (int iIndex in context.Inactive)
  377. {
  378. LiveInterval interval = _intervals[iIndex];
  379. if (!interval.IsFixed && interval.Register == current.Register && interval.Overlaps(current))
  380. {
  381. SplitAndSpillOverlappingInterval(context, current, interval);
  382. context.Inactive.Clear(iIndex);
  383. }
  384. }
  385. }
  386. private void SplitAndSpillOverlappingInterval(
  387. AllocationContext context,
  388. LiveInterval current,
  389. LiveInterval interval)
  390. {
  391. // If there's a next use after the start of the current interval,
  392. // we need to split the spilled interval twice, and re-insert it
  393. // on the "pending" list to ensure that it will get a new register
  394. // on that use position.
  395. int nextUse = interval.NextUseAfter(current.GetStart());
  396. LiveInterval splitChild;
  397. if (interval.GetStart() < current.GetStart())
  398. {
  399. splitChild = interval.Split(current.GetStart());
  400. }
  401. else
  402. {
  403. splitChild = interval;
  404. }
  405. if (nextUse != -1)
  406. {
  407. Debug.Assert(nextUse > current.GetStart(), "Trying to spill a interval currently being used.");
  408. if (nextUse > splitChild.GetStart())
  409. {
  410. LiveInterval right = splitChild.Split(nextUse);
  411. Spill(context, splitChild);
  412. splitChild = right;
  413. }
  414. InsertInterval(splitChild);
  415. }
  416. else
  417. {
  418. Spill(context, splitChild);
  419. }
  420. }
  421. private void InsertInterval(LiveInterval interval)
  422. {
  423. Debug.Assert(interval.UsesCount != 0, "Trying to insert a interval without uses.");
  424. Debug.Assert(!interval.IsEmpty, "Trying to insert a empty interval.");
  425. Debug.Assert(!interval.IsSpilled, "Trying to insert a spilled interval.");
  426. int startIndex = RegistersCount * 2;
  427. int insertIndex = _intervals.BinarySearch(startIndex, _intervals.Count - startIndex, interval, null);
  428. if (insertIndex < 0)
  429. {
  430. insertIndex = ~insertIndex;
  431. }
  432. _intervals.Insert(insertIndex, interval);
  433. }
  434. private void Spill(AllocationContext context, LiveInterval interval)
  435. {
  436. Debug.Assert(!interval.IsFixed, "Trying to spill a fixed interval.");
  437. Debug.Assert(interval.UsesCount == 0, "Trying to spill a interval with uses.");
  438. // We first check if any of the siblings were spilled, if so we can reuse
  439. // the stack offset. Otherwise, we allocate a new space on the stack.
  440. // This prevents stack-to-stack copies being necessary for a split interval.
  441. if (!interval.TrySpillWithSiblingOffset())
  442. {
  443. interval.Spill(context.StackAlloc.Allocate(interval.Local.Type));
  444. }
  445. }
  446. private void InsertSplitCopies()
  447. {
  448. Dictionary<int, CopyResolver> copyResolvers = new Dictionary<int, CopyResolver>();
  449. CopyResolver GetCopyResolver(int position)
  450. {
  451. if (!copyResolvers.TryGetValue(position, out CopyResolver copyResolver))
  452. {
  453. copyResolver = new CopyResolver();
  454. copyResolvers.Add(position, copyResolver);
  455. }
  456. return copyResolver;
  457. }
  458. foreach (LiveInterval interval in _intervals.Where(x => x.IsSplit))
  459. {
  460. LiveInterval previous = interval;
  461. foreach (LiveInterval splitChild in interval.SplitChildren())
  462. {
  463. int splitPosition = splitChild.GetStart();
  464. if (!_blockEdges.Contains(splitPosition) && previous.GetEnd() == splitPosition)
  465. {
  466. GetCopyResolver(splitPosition).AddSplit(previous, splitChild);
  467. }
  468. previous = splitChild;
  469. }
  470. }
  471. foreach (KeyValuePair<int, CopyResolver> kv in copyResolvers)
  472. {
  473. CopyResolver copyResolver = kv.Value;
  474. if (!copyResolver.HasCopy)
  475. {
  476. continue;
  477. }
  478. int splitPosition = kv.Key;
  479. (IntrusiveList<Operation> nodes, Operation node) = GetOperationNode(splitPosition);
  480. Operation[] sequence = copyResolver.Sequence();
  481. nodes.AddBefore(node, sequence[0]);
  482. node = sequence[0];
  483. for (int index = 1; index < sequence.Length; index++)
  484. {
  485. nodes.AddAfter(node, sequence[index]);
  486. node = sequence[index];
  487. }
  488. }
  489. }
  490. private void InsertSplitCopiesAtEdges(ControlFlowGraph cfg)
  491. {
  492. int blocksCount = cfg.Blocks.Count;
  493. bool IsSplitEdgeBlock(BasicBlock block)
  494. {
  495. return block.Index >= blocksCount;
  496. }
  497. // Reset iterators to beginning because GetSplitChild depends on the state of the iterator.
  498. foreach (LiveInterval interval in _intervals)
  499. {
  500. interval.Reset();
  501. }
  502. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  503. {
  504. if (IsSplitEdgeBlock(block))
  505. {
  506. continue;
  507. }
  508. bool hasSingleOrNoSuccessor = block.SuccessorsCount <= 1;
  509. for (int i = 0; i < block.SuccessorsCount; i++)
  510. {
  511. BasicBlock successor = block.GetSuccessor(i);
  512. int succIndex = successor.Index;
  513. // If the current node is a split node, then the actual successor node
  514. // (the successor before the split) should be right after it.
  515. if (IsSplitEdgeBlock(successor))
  516. {
  517. succIndex = successor.GetSuccessor(0).Index;
  518. }
  519. CopyResolver copyResolver = null;
  520. foreach (int iIndex in _blockLiveIn[succIndex])
  521. {
  522. LiveInterval interval = _parentIntervals[iIndex];
  523. if (!interval.IsSplit)
  524. {
  525. continue;
  526. }
  527. int lEnd = _blockRanges[block.Index].End - 1;
  528. int rStart = _blockRanges[succIndex].Start;
  529. LiveInterval left = interval.GetSplitChild(lEnd);
  530. LiveInterval right = interval.GetSplitChild(rStart);
  531. if (left != default && right != default && left != right)
  532. {
  533. if (copyResolver == null)
  534. {
  535. copyResolver = new CopyResolver();
  536. }
  537. copyResolver.AddSplit(left, right);
  538. }
  539. }
  540. if (copyResolver == null || !copyResolver.HasCopy)
  541. {
  542. continue;
  543. }
  544. Operation[] sequence = copyResolver.Sequence();
  545. if (hasSingleOrNoSuccessor)
  546. {
  547. foreach (Operation operation in sequence)
  548. {
  549. block.Append(operation);
  550. }
  551. }
  552. else if (successor.Predecessors.Count == 1)
  553. {
  554. successor.Operations.AddFirst(sequence[0]);
  555. Operation prependNode = sequence[0];
  556. for (int index = 1; index < sequence.Length; index++)
  557. {
  558. Operation operation = sequence[index];
  559. successor.Operations.AddAfter(prependNode, operation);
  560. prependNode = operation;
  561. }
  562. }
  563. else
  564. {
  565. // Split the critical edge.
  566. BasicBlock splitBlock = cfg.SplitEdge(block, successor);
  567. foreach (Operation operation in sequence)
  568. {
  569. splitBlock.Append(operation);
  570. }
  571. }
  572. }
  573. }
  574. }
  575. private void ReplaceLocalWithRegister(LiveInterval current)
  576. {
  577. Operand register = GetRegister(current);
  578. foreach (int usePosition in current.UsePositions())
  579. {
  580. (_, Operation operation) = GetOperationNode(usePosition);
  581. for (int index = 0; index < operation.SourcesCount; index++)
  582. {
  583. Operand source = operation.GetSource(index);
  584. if (source == current.Local)
  585. {
  586. operation.SetSource(index, register);
  587. }
  588. else if (source.Kind == OperandKind.Memory)
  589. {
  590. MemoryOperand memOp = source.GetMemory();
  591. if (memOp.BaseAddress == current.Local)
  592. {
  593. memOp.BaseAddress = register;
  594. }
  595. if (memOp.Index == current.Local)
  596. {
  597. memOp.Index = register;
  598. }
  599. }
  600. }
  601. for (int index = 0; index < operation.DestinationsCount; index++)
  602. {
  603. Operand dest = operation.GetDestination(index);
  604. if (dest == current.Local)
  605. {
  606. operation.SetDestination(index, register);
  607. }
  608. }
  609. }
  610. }
  611. private static Operand GetRegister(LiveInterval interval)
  612. {
  613. Debug.Assert(!interval.IsSpilled, "Spilled intervals are not allowed.");
  614. return Operand.Factory.Register(
  615. interval.Register.Index,
  616. interval.Register.Type,
  617. interval.Local.Type);
  618. }
  619. private (IntrusiveList<Operation>, Operation) GetOperationNode(int position)
  620. {
  621. return _operationNodes[position / InstructionGap];
  622. }
  623. private void NumberLocals(ControlFlowGraph cfg)
  624. {
  625. _operationNodes = new List<(IntrusiveList<Operation>, Operation)>();
  626. _intervals = new List<LiveInterval>();
  627. for (int index = 0; index < RegistersCount; index++)
  628. {
  629. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Integer)));
  630. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Vector)));
  631. }
  632. // The "visited" state is stored in the MSB of the local's value.
  633. const ulong VisitedMask = 1ul << 63;
  634. bool IsVisited(Operand local)
  635. {
  636. return (local.GetValueUnsafe() & VisitedMask) != 0;
  637. }
  638. void SetVisited(Operand local)
  639. {
  640. local.GetValueUnsafe() |= VisitedMask;
  641. }
  642. _operationsCount = 0;
  643. for (int index = cfg.PostOrderBlocks.Length - 1; index >= 0; index--)
  644. {
  645. BasicBlock block = cfg.PostOrderBlocks[index];
  646. for (Operation node = block.Operations.First; node != default; node = node.ListNext)
  647. {
  648. _operationNodes.Add((block.Operations, node));
  649. for (int i = 0; i < node.DestinationsCount; i++)
  650. {
  651. Operand dest = node.GetDestination(i);
  652. if (dest.Kind == OperandKind.LocalVariable && !IsVisited(dest))
  653. {
  654. dest.NumberLocal(_intervals.Count);
  655. _intervals.Add(new LiveInterval(dest));
  656. SetVisited(dest);
  657. }
  658. }
  659. }
  660. _operationsCount += block.Operations.Count * InstructionGap;
  661. if (block.Operations.Count == 0)
  662. {
  663. // Pretend we have a dummy instruction on the empty block.
  664. _operationNodes.Add((default, default));
  665. _operationsCount += InstructionGap;
  666. }
  667. }
  668. _parentIntervals = _intervals.ToArray();
  669. }
  670. private void BuildIntervals(ControlFlowGraph cfg, AllocationContext context)
  671. {
  672. _blockRanges = new LiveRange[cfg.Blocks.Count];
  673. int mapSize = _intervals.Count;
  674. BitMap[] blkLiveGen = new BitMap[cfg.Blocks.Count];
  675. BitMap[] blkLiveKill = new BitMap[cfg.Blocks.Count];
  676. // Compute local live sets.
  677. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  678. {
  679. BitMap liveGen = new BitMap(Allocators.Default, mapSize);
  680. BitMap liveKill = new BitMap(Allocators.Default, mapSize);
  681. for (Operation node = block.Operations.First; node != default; node = node.ListNext)
  682. {
  683. for (int i = 0; i < node.SourcesCount; i++)
  684. {
  685. VisitSource(node.GetSource(i));
  686. }
  687. for (int i = 0; i < node.DestinationsCount; i++)
  688. {
  689. VisitDestination(node.GetDestination(i));
  690. }
  691. void VisitSource(Operand source)
  692. {
  693. if (IsLocalOrRegister(source.Kind))
  694. {
  695. int id = GetOperandId(source);
  696. if (!liveKill.IsSet(id))
  697. {
  698. liveGen.Set(id);
  699. }
  700. }
  701. else if (source.Kind == OperandKind.Memory)
  702. {
  703. MemoryOperand memOp = source.GetMemory();
  704. if (memOp.BaseAddress != default)
  705. {
  706. VisitSource(memOp.BaseAddress);
  707. }
  708. if (memOp.Index != default)
  709. {
  710. VisitSource(memOp.Index);
  711. }
  712. }
  713. }
  714. void VisitDestination(Operand dest)
  715. {
  716. liveKill.Set(GetOperandId(dest));
  717. }
  718. }
  719. blkLiveGen [block.Index] = liveGen;
  720. blkLiveKill[block.Index] = liveKill;
  721. }
  722. // Compute global live sets.
  723. BitMap[] blkLiveIn = new BitMap[cfg.Blocks.Count];
  724. BitMap[] blkLiveOut = new BitMap[cfg.Blocks.Count];
  725. for (int index = 0; index < cfg.Blocks.Count; index++)
  726. {
  727. blkLiveIn [index] = new BitMap(Allocators.Default, mapSize);
  728. blkLiveOut[index] = new BitMap(Allocators.Default, mapSize);
  729. }
  730. bool modified;
  731. do
  732. {
  733. modified = false;
  734. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  735. {
  736. BasicBlock block = cfg.PostOrderBlocks[index];
  737. BitMap liveOut = blkLiveOut[block.Index];
  738. for (int i = 0; i < block.SuccessorsCount; i++)
  739. {
  740. BasicBlock succ = block.GetSuccessor(i);
  741. modified |= liveOut.Set(blkLiveIn[succ.Index]);
  742. }
  743. BitMap liveIn = blkLiveIn[block.Index];
  744. liveIn.Set (liveOut);
  745. liveIn.Clear(blkLiveKill[block.Index]);
  746. liveIn.Set (blkLiveGen [block.Index]);
  747. }
  748. }
  749. while (modified);
  750. _blockLiveIn = blkLiveIn;
  751. _blockEdges = new HashSet<int>();
  752. // Compute lifetime intervals.
  753. int operationPos = _operationsCount;
  754. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  755. {
  756. BasicBlock block = cfg.PostOrderBlocks[index];
  757. // We handle empty blocks by pretending they have a dummy instruction,
  758. // because otherwise the block would have the same start and end position,
  759. // and this is not valid.
  760. int instCount = Math.Max(block.Operations.Count, 1);
  761. int blockStart = operationPos - instCount * InstructionGap;
  762. int blockEnd = operationPos;
  763. _blockRanges[block.Index] = new LiveRange(blockStart, blockEnd);
  764. _blockEdges.Add(blockStart);
  765. BitMap liveOut = blkLiveOut[block.Index];
  766. foreach (int id in liveOut)
  767. {
  768. _intervals[id].AddRange(blockStart, blockEnd);
  769. }
  770. if (block.Operations.Count == 0)
  771. {
  772. operationPos -= InstructionGap;
  773. continue;
  774. }
  775. for (Operation node = block.Operations.Last; node != default; node = node.ListPrevious)
  776. {
  777. operationPos -= InstructionGap;
  778. for (int i = 0; i < node.DestinationsCount; i++)
  779. {
  780. VisitDestination(node.GetDestination(i));
  781. }
  782. for (int i = 0; i < node.SourcesCount; i++)
  783. {
  784. VisitSource(node.GetSource(i));
  785. }
  786. if (node.Instruction == Instruction.Call)
  787. {
  788. AddIntervalCallerSavedReg(context.Masks.IntCallerSavedRegisters, operationPos, RegisterType.Integer);
  789. AddIntervalCallerSavedReg(context.Masks.VecCallerSavedRegisters, operationPos, RegisterType.Vector);
  790. }
  791. void VisitSource(Operand source)
  792. {
  793. if (IsLocalOrRegister(source.Kind))
  794. {
  795. LiveInterval interval = _intervals[GetOperandId(source)];
  796. interval.AddRange(blockStart, operationPos + 1);
  797. interval.AddUsePosition(operationPos);
  798. }
  799. else if (source.Kind == OperandKind.Memory)
  800. {
  801. MemoryOperand memOp = source.GetMemory();
  802. if (memOp.BaseAddress != default)
  803. {
  804. VisitSource(memOp.BaseAddress);
  805. }
  806. if (memOp.Index != default)
  807. {
  808. VisitSource(memOp.Index);
  809. }
  810. }
  811. }
  812. void VisitDestination(Operand dest)
  813. {
  814. LiveInterval interval = _intervals[GetOperandId(dest)];
  815. interval.SetStart(operationPos + 1);
  816. interval.AddUsePosition(operationPos + 1);
  817. }
  818. }
  819. }
  820. foreach (LiveInterval interval in _parentIntervals)
  821. {
  822. interval.Reset();
  823. }
  824. }
  825. private void AddIntervalCallerSavedReg(int mask, int operationPos, RegisterType regType)
  826. {
  827. while (mask != 0)
  828. {
  829. int regIndex = BitOperations.TrailingZeroCount(mask);
  830. Register callerSavedReg = new Register(regIndex, regType);
  831. LiveInterval interval = _intervals[GetRegisterId(callerSavedReg)];
  832. interval.AddRange(operationPos + 1, operationPos + InstructionGap);
  833. mask &= ~(1 << regIndex);
  834. }
  835. }
  836. private static int GetOperandId(Operand operand)
  837. {
  838. if (operand.Kind == OperandKind.LocalVariable)
  839. {
  840. return operand.GetLocalNumber();
  841. }
  842. else if (operand.Kind == OperandKind.Register)
  843. {
  844. return GetRegisterId(operand.GetRegister());
  845. }
  846. else
  847. {
  848. throw new ArgumentException($"Invalid operand kind \"{operand.Kind}\".");
  849. }
  850. }
  851. private static int GetRegisterId(Register register)
  852. {
  853. return (register.Index << 1) | (register.Type == RegisterType.Vector ? 1 : 0);
  854. }
  855. private static bool IsLocalOrRegister(OperandKind kind)
  856. {
  857. return kind == OperandKind.LocalVariable ||
  858. kind == OperandKind.Register;
  859. }
  860. }
  861. }