LinearScanAllocator.cs 38 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106
  1. using ARMeilleure.Common;
  2. using ARMeilleure.IntermediateRepresentation;
  3. using ARMeilleure.Translation;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.Diagnostics;
  7. using System.Linq;
  8. using System.Numerics;
  9. namespace ARMeilleure.CodeGen.RegisterAllocators
  10. {
  11. // Based on:
  12. // "Linear Scan Register Allocation for the Java(tm) HotSpot Client Compiler".
  13. // http://www.christianwimmer.at/Publications/Wimmer04a/Wimmer04a.pdf
  14. class LinearScanAllocator : IRegisterAllocator
  15. {
  16. private const int InstructionGap = 2;
  17. private const int InstructionGapMask = InstructionGap - 1;
  18. private HashSet<int> _blockEdges;
  19. private LiveRange[] _blockRanges;
  20. private BitMap[] _blockLiveIn;
  21. private List<LiveInterval> _intervals;
  22. private LiveInterval[] _parentIntervals;
  23. private List<(IntrusiveList<Operation>, Operation)> _operationNodes;
  24. private int _operationsCount;
  25. private class AllocationContext
  26. {
  27. public RegisterMasks Masks { get; }
  28. public StackAllocator StackAlloc { get; }
  29. public BitMap Active { get; }
  30. public BitMap Inactive { get; }
  31. public int IntUsedRegisters { get; set; }
  32. public int VecUsedRegisters { get; set; }
  33. private readonly int[] _intFreePositions;
  34. private readonly int[] _vecFreePositions;
  35. private readonly int _intFreePositionsCount;
  36. private readonly int _vecFreePositionsCount;
  37. public AllocationContext(StackAllocator stackAlloc, RegisterMasks masks, int intervalsCount)
  38. {
  39. StackAlloc = stackAlloc;
  40. Masks = masks;
  41. Active = new BitMap(Allocators.Default, intervalsCount);
  42. Inactive = new BitMap(Allocators.Default, intervalsCount);
  43. PopulateFreePositions(RegisterType.Integer, out _intFreePositions, out _intFreePositionsCount);
  44. PopulateFreePositions(RegisterType.Vector, out _vecFreePositions, out _vecFreePositionsCount);
  45. void PopulateFreePositions(RegisterType type, out int[] positions, out int count)
  46. {
  47. positions = new int[masks.RegistersCount];
  48. count = BitOperations.PopCount((uint)masks.GetAvailableRegisters(type));
  49. int mask = masks.GetAvailableRegisters(type);
  50. for (int i = 0; i < positions.Length; i++)
  51. {
  52. if ((mask & (1 << i)) != 0)
  53. {
  54. positions[i] = int.MaxValue;
  55. }
  56. }
  57. }
  58. }
  59. public void GetFreePositions(RegisterType type, in Span<int> positions, out int count)
  60. {
  61. if (type == RegisterType.Integer)
  62. {
  63. _intFreePositions.CopyTo(positions);
  64. count = _intFreePositionsCount;
  65. }
  66. else
  67. {
  68. Debug.Assert(type == RegisterType.Vector);
  69. _vecFreePositions.CopyTo(positions);
  70. count = _vecFreePositionsCount;
  71. }
  72. }
  73. public void MoveActiveToInactive(int bit)
  74. {
  75. Move(Active, Inactive, bit);
  76. }
  77. public void MoveInactiveToActive(int bit)
  78. {
  79. Move(Inactive, Active, bit);
  80. }
  81. private static void Move(BitMap source, BitMap dest, int bit)
  82. {
  83. source.Clear(bit);
  84. dest.Set(bit);
  85. }
  86. }
  87. public AllocationResult RunPass(
  88. ControlFlowGraph cfg,
  89. StackAllocator stackAlloc,
  90. RegisterMasks regMasks)
  91. {
  92. NumberLocals(cfg, regMasks.RegistersCount);
  93. var context = new AllocationContext(stackAlloc, regMasks, _intervals.Count);
  94. BuildIntervals(cfg, context);
  95. for (int index = 0; index < _intervals.Count; index++)
  96. {
  97. LiveInterval current = _intervals[index];
  98. if (current.IsEmpty)
  99. {
  100. continue;
  101. }
  102. if (current.IsFixed)
  103. {
  104. context.Active.Set(index);
  105. if (current.IsFixedAndUsed)
  106. {
  107. if (current.Register.Type == RegisterType.Integer)
  108. {
  109. context.IntUsedRegisters |= 1 << current.Register.Index;
  110. }
  111. else /* if (interval.Register.Type == RegisterType.Vector) */
  112. {
  113. context.VecUsedRegisters |= 1 << current.Register.Index;
  114. }
  115. }
  116. continue;
  117. }
  118. AllocateInterval(context, current, index, regMasks.RegistersCount);
  119. }
  120. for (int index = regMasks.RegistersCount * 2; index < _intervals.Count; index++)
  121. {
  122. if (!_intervals[index].IsSpilled)
  123. {
  124. ReplaceLocalWithRegister(_intervals[index]);
  125. }
  126. }
  127. InsertSplitCopies();
  128. InsertSplitCopiesAtEdges(cfg);
  129. return new AllocationResult(context.IntUsedRegisters, context.VecUsedRegisters, context.StackAlloc.TotalSize);
  130. }
  131. private void AllocateInterval(AllocationContext context, LiveInterval current, int cIndex, int registersCount)
  132. {
  133. // Check active intervals that already ended.
  134. foreach (int iIndex in context.Active)
  135. {
  136. LiveInterval interval = _intervals[iIndex];
  137. interval.Forward(current.GetStart());
  138. if (interval.GetEnd() < current.GetStart())
  139. {
  140. context.Active.Clear(iIndex);
  141. }
  142. else if (!interval.Overlaps(current.GetStart()))
  143. {
  144. context.MoveActiveToInactive(iIndex);
  145. }
  146. }
  147. // Check inactive intervals that already ended or were reactivated.
  148. foreach (int iIndex in context.Inactive)
  149. {
  150. LiveInterval interval = _intervals[iIndex];
  151. interval.Forward(current.GetStart());
  152. if (interval.GetEnd() < current.GetStart())
  153. {
  154. context.Inactive.Clear(iIndex);
  155. }
  156. else if (interval.Overlaps(current.GetStart()))
  157. {
  158. context.MoveInactiveToActive(iIndex);
  159. }
  160. }
  161. if (!TryAllocateRegWithoutSpill(context, current, cIndex, registersCount))
  162. {
  163. AllocateRegWithSpill(context, current, cIndex, registersCount);
  164. }
  165. }
  166. private bool TryAllocateRegWithoutSpill(AllocationContext context, LiveInterval current, int cIndex, int registersCount)
  167. {
  168. RegisterType regType = current.Local.Type.ToRegisterType();
  169. Span<int> freePositions = stackalloc int[registersCount];
  170. context.GetFreePositions(regType, freePositions, out int freePositionsCount);
  171. foreach (int iIndex in context.Active)
  172. {
  173. LiveInterval interval = _intervals[iIndex];
  174. Register reg = interval.Register;
  175. if (reg.Type == regType)
  176. {
  177. freePositions[reg.Index] = 0;
  178. freePositionsCount--;
  179. }
  180. }
  181. // If all registers are already active, return early. No point in inspecting the inactive set to look for
  182. // holes.
  183. if (freePositionsCount == 0)
  184. {
  185. return false;
  186. }
  187. foreach (int iIndex in context.Inactive)
  188. {
  189. LiveInterval interval = _intervals[iIndex];
  190. Register reg = interval.Register;
  191. ref int freePosition = ref freePositions[reg.Index];
  192. if (reg.Type == regType && freePosition != 0)
  193. {
  194. int overlapPosition = interval.GetOverlapPosition(current);
  195. if (overlapPosition != LiveInterval.NotFound && freePosition > overlapPosition)
  196. {
  197. freePosition = overlapPosition;
  198. }
  199. }
  200. }
  201. int selectedReg = GetHighestValueIndex(freePositions);
  202. int selectedNextUse = freePositions[selectedReg];
  203. // Intervals starts and ends at odd positions, unless they span an entire
  204. // block, in this case they will have ranges at a even position.
  205. // When a interval is loaded from the stack to a register, we can only
  206. // do the split at a odd position, because otherwise the split interval
  207. // that is inserted on the list to be processed may clobber a register
  208. // used by the instruction at the same position as the split.
  209. // The problem only happens when a interval ends exactly at this instruction,
  210. // because otherwise they would interfere, and the register wouldn't be selected.
  211. // When the interval is aligned and the above happens, there's no problem as
  212. // the instruction that is actually with the last use is the one
  213. // before that position.
  214. selectedNextUse &= ~InstructionGapMask;
  215. if (selectedNextUse <= current.GetStart())
  216. {
  217. return false;
  218. }
  219. else if (selectedNextUse < current.GetEnd())
  220. {
  221. LiveInterval splitChild = current.Split(selectedNextUse);
  222. if (splitChild.UsesCount != 0)
  223. {
  224. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  225. InsertInterval(splitChild, registersCount);
  226. }
  227. else
  228. {
  229. Spill(context, splitChild);
  230. }
  231. }
  232. current.Register = new Register(selectedReg, regType);
  233. if (regType == RegisterType.Integer)
  234. {
  235. context.IntUsedRegisters |= 1 << selectedReg;
  236. }
  237. else /* if (regType == RegisterType.Vector) */
  238. {
  239. context.VecUsedRegisters |= 1 << selectedReg;
  240. }
  241. context.Active.Set(cIndex);
  242. return true;
  243. }
  244. private void AllocateRegWithSpill(AllocationContext context, LiveInterval current, int cIndex, int registersCount)
  245. {
  246. RegisterType regType = current.Local.Type.ToRegisterType();
  247. Span<int> usePositions = stackalloc int[registersCount];
  248. Span<int> blockedPositions = stackalloc int[registersCount];
  249. context.GetFreePositions(regType, usePositions, out _);
  250. context.GetFreePositions(regType, blockedPositions, out _);
  251. foreach (int iIndex in context.Active)
  252. {
  253. LiveInterval interval = _intervals[iIndex];
  254. Register reg = interval.Register;
  255. if (reg.Type == regType)
  256. {
  257. ref int usePosition = ref usePositions[reg.Index];
  258. ref int blockedPosition = ref blockedPositions[reg.Index];
  259. if (interval.IsFixed)
  260. {
  261. usePosition = 0;
  262. blockedPosition = 0;
  263. }
  264. else
  265. {
  266. int nextUse = interval.NextUseAfter(current.GetStart());
  267. if (nextUse != LiveInterval.NotFound && usePosition > nextUse)
  268. {
  269. usePosition = nextUse;
  270. }
  271. }
  272. }
  273. }
  274. foreach (int iIndex in context.Inactive)
  275. {
  276. LiveInterval interval = _intervals[iIndex];
  277. Register reg = interval.Register;
  278. if (reg.Type == regType)
  279. {
  280. ref int usePosition = ref usePositions[reg.Index];
  281. ref int blockedPosition = ref blockedPositions[reg.Index];
  282. if (interval.IsFixed)
  283. {
  284. int overlapPosition = interval.GetOverlapPosition(current);
  285. if (overlapPosition != LiveInterval.NotFound)
  286. {
  287. blockedPosition = Math.Min(blockedPosition, overlapPosition);
  288. usePosition = Math.Min(usePosition, overlapPosition);
  289. }
  290. }
  291. else if (interval.Overlaps(current))
  292. {
  293. int nextUse = interval.NextUseAfter(current.GetStart());
  294. if (nextUse != LiveInterval.NotFound && usePosition > nextUse)
  295. {
  296. usePosition = nextUse;
  297. }
  298. }
  299. }
  300. }
  301. int selectedReg = GetHighestValueIndex(usePositions);
  302. int currentFirstUse = current.FirstUse();
  303. Debug.Assert(currentFirstUse >= 0, "Current interval has no uses.");
  304. if (usePositions[selectedReg] < currentFirstUse)
  305. {
  306. // All intervals on inactive and active are being used before current,
  307. // so spill the current interval.
  308. Debug.Assert(currentFirstUse > current.GetStart(), "Trying to spill a interval currently being used.");
  309. LiveInterval splitChild = current.Split(currentFirstUse);
  310. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  311. InsertInterval(splitChild, registersCount);
  312. Spill(context, current);
  313. }
  314. else if (blockedPositions[selectedReg] > current.GetEnd())
  315. {
  316. // Spill made the register available for the entire current lifetime,
  317. // so we only need to split the intervals using the selected register.
  318. current.Register = new Register(selectedReg, regType);
  319. SplitAndSpillOverlappingIntervals(context, current, registersCount);
  320. context.Active.Set(cIndex);
  321. }
  322. else
  323. {
  324. // There are conflicts even after spill due to the use of fixed registers
  325. // that can't be spilled, so we need to also split current at the point of
  326. // the first fixed register use.
  327. current.Register = new Register(selectedReg, regType);
  328. int splitPosition = blockedPositions[selectedReg] & ~InstructionGapMask;
  329. Debug.Assert(splitPosition > current.GetStart(), "Trying to split a interval at a invalid position.");
  330. LiveInterval splitChild = current.Split(splitPosition);
  331. if (splitChild.UsesCount != 0)
  332. {
  333. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  334. InsertInterval(splitChild, registersCount);
  335. }
  336. else
  337. {
  338. Spill(context, splitChild);
  339. }
  340. SplitAndSpillOverlappingIntervals(context, current, registersCount);
  341. context.Active.Set(cIndex);
  342. }
  343. }
  344. private static int GetHighestValueIndex(Span<int> span)
  345. {
  346. int highest = span[0];
  347. if (highest == int.MaxValue)
  348. {
  349. return 0;
  350. }
  351. int selected = 0;
  352. for (int index = 1; index < span.Length; index++)
  353. {
  354. int current = span[index];
  355. if (highest < current)
  356. {
  357. highest = current;
  358. selected = index;
  359. if (current == int.MaxValue)
  360. {
  361. break;
  362. }
  363. }
  364. }
  365. return selected;
  366. }
  367. private void SplitAndSpillOverlappingIntervals(AllocationContext context, LiveInterval current, int registersCount)
  368. {
  369. foreach (int iIndex in context.Active)
  370. {
  371. LiveInterval interval = _intervals[iIndex];
  372. if (!interval.IsFixed && interval.Register == current.Register)
  373. {
  374. SplitAndSpillOverlappingInterval(context, current, interval, registersCount);
  375. context.Active.Clear(iIndex);
  376. }
  377. }
  378. foreach (int iIndex in context.Inactive)
  379. {
  380. LiveInterval interval = _intervals[iIndex];
  381. if (!interval.IsFixed && interval.Register == current.Register && interval.Overlaps(current))
  382. {
  383. SplitAndSpillOverlappingInterval(context, current, interval, registersCount);
  384. context.Inactive.Clear(iIndex);
  385. }
  386. }
  387. }
  388. private void SplitAndSpillOverlappingInterval(
  389. AllocationContext context,
  390. LiveInterval current,
  391. LiveInterval interval,
  392. int registersCount)
  393. {
  394. // If there's a next use after the start of the current interval,
  395. // we need to split the spilled interval twice, and re-insert it
  396. // on the "pending" list to ensure that it will get a new register
  397. // on that use position.
  398. int nextUse = interval.NextUseAfter(current.GetStart());
  399. LiveInterval splitChild;
  400. if (interval.GetStart() < current.GetStart())
  401. {
  402. splitChild = interval.Split(current.GetStart());
  403. }
  404. else
  405. {
  406. splitChild = interval;
  407. }
  408. if (nextUse != -1)
  409. {
  410. Debug.Assert(nextUse > current.GetStart(), "Trying to spill a interval currently being used.");
  411. if (nextUse > splitChild.GetStart())
  412. {
  413. LiveInterval right = splitChild.Split(nextUse);
  414. Spill(context, splitChild);
  415. splitChild = right;
  416. }
  417. InsertInterval(splitChild, registersCount);
  418. }
  419. else
  420. {
  421. Spill(context, splitChild);
  422. }
  423. }
  424. private void InsertInterval(LiveInterval interval, int registersCount)
  425. {
  426. Debug.Assert(interval.UsesCount != 0, "Trying to insert a interval without uses.");
  427. Debug.Assert(!interval.IsEmpty, "Trying to insert a empty interval.");
  428. Debug.Assert(!interval.IsSpilled, "Trying to insert a spilled interval.");
  429. int startIndex = registersCount * 2;
  430. int insertIndex = _intervals.BinarySearch(startIndex, _intervals.Count - startIndex, interval, null);
  431. if (insertIndex < 0)
  432. {
  433. insertIndex = ~insertIndex;
  434. }
  435. _intervals.Insert(insertIndex, interval);
  436. }
  437. private void Spill(AllocationContext context, LiveInterval interval)
  438. {
  439. Debug.Assert(!interval.IsFixed, "Trying to spill a fixed interval.");
  440. Debug.Assert(interval.UsesCount == 0, "Trying to spill a interval with uses.");
  441. // We first check if any of the siblings were spilled, if so we can reuse
  442. // the stack offset. Otherwise, we allocate a new space on the stack.
  443. // This prevents stack-to-stack copies being necessary for a split interval.
  444. if (!interval.TrySpillWithSiblingOffset())
  445. {
  446. interval.Spill(context.StackAlloc.Allocate(interval.Local.Type));
  447. }
  448. }
  449. private void InsertSplitCopies()
  450. {
  451. Dictionary<int, CopyResolver> copyResolvers = new Dictionary<int, CopyResolver>();
  452. CopyResolver GetCopyResolver(int position)
  453. {
  454. if (!copyResolvers.TryGetValue(position, out CopyResolver copyResolver))
  455. {
  456. copyResolver = new CopyResolver();
  457. copyResolvers.Add(position, copyResolver);
  458. }
  459. return copyResolver;
  460. }
  461. foreach (LiveInterval interval in _intervals.Where(x => x.IsSplit))
  462. {
  463. LiveInterval previous = interval;
  464. foreach (LiveInterval splitChild in interval.SplitChildren())
  465. {
  466. int splitPosition = splitChild.GetStart();
  467. if (!_blockEdges.Contains(splitPosition) && previous.GetEnd() == splitPosition)
  468. {
  469. GetCopyResolver(splitPosition).AddSplit(previous, splitChild);
  470. }
  471. previous = splitChild;
  472. }
  473. }
  474. foreach (KeyValuePair<int, CopyResolver> kv in copyResolvers)
  475. {
  476. CopyResolver copyResolver = kv.Value;
  477. if (!copyResolver.HasCopy)
  478. {
  479. continue;
  480. }
  481. int splitPosition = kv.Key;
  482. (IntrusiveList<Operation> nodes, Operation node) = GetOperationNode(splitPosition);
  483. Operation[] sequence = copyResolver.Sequence();
  484. nodes.AddBefore(node, sequence[0]);
  485. node = sequence[0];
  486. for (int index = 1; index < sequence.Length; index++)
  487. {
  488. nodes.AddAfter(node, sequence[index]);
  489. node = sequence[index];
  490. }
  491. }
  492. }
  493. private void InsertSplitCopiesAtEdges(ControlFlowGraph cfg)
  494. {
  495. int blocksCount = cfg.Blocks.Count;
  496. bool IsSplitEdgeBlock(BasicBlock block)
  497. {
  498. return block.Index >= blocksCount;
  499. }
  500. // Reset iterators to beginning because GetSplitChild depends on the state of the iterator.
  501. foreach (LiveInterval interval in _intervals)
  502. {
  503. interval.Reset();
  504. }
  505. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  506. {
  507. if (IsSplitEdgeBlock(block))
  508. {
  509. continue;
  510. }
  511. bool hasSingleOrNoSuccessor = block.SuccessorsCount <= 1;
  512. for (int i = 0; i < block.SuccessorsCount; i++)
  513. {
  514. BasicBlock successor = block.GetSuccessor(i);
  515. int succIndex = successor.Index;
  516. // If the current node is a split node, then the actual successor node
  517. // (the successor before the split) should be right after it.
  518. if (IsSplitEdgeBlock(successor))
  519. {
  520. succIndex = successor.GetSuccessor(0).Index;
  521. }
  522. CopyResolver copyResolver = null;
  523. foreach (int iIndex in _blockLiveIn[succIndex])
  524. {
  525. LiveInterval interval = _parentIntervals[iIndex];
  526. if (!interval.IsSplit)
  527. {
  528. continue;
  529. }
  530. int lEnd = _blockRanges[block.Index].End - 1;
  531. int rStart = _blockRanges[succIndex].Start;
  532. LiveInterval left = interval.GetSplitChild(lEnd);
  533. LiveInterval right = interval.GetSplitChild(rStart);
  534. if (left != default && right != default && left != right)
  535. {
  536. if (copyResolver == null)
  537. {
  538. copyResolver = new CopyResolver();
  539. }
  540. copyResolver.AddSplit(left, right);
  541. }
  542. }
  543. if (copyResolver == null || !copyResolver.HasCopy)
  544. {
  545. continue;
  546. }
  547. Operation[] sequence = copyResolver.Sequence();
  548. if (hasSingleOrNoSuccessor)
  549. {
  550. foreach (Operation operation in sequence)
  551. {
  552. block.Append(operation);
  553. }
  554. }
  555. else if (successor.Predecessors.Count == 1)
  556. {
  557. successor.Operations.AddFirst(sequence[0]);
  558. Operation prependNode = sequence[0];
  559. for (int index = 1; index < sequence.Length; index++)
  560. {
  561. Operation operation = sequence[index];
  562. successor.Operations.AddAfter(prependNode, operation);
  563. prependNode = operation;
  564. }
  565. }
  566. else
  567. {
  568. // Split the critical edge.
  569. BasicBlock splitBlock = cfg.SplitEdge(block, successor);
  570. foreach (Operation operation in sequence)
  571. {
  572. splitBlock.Append(operation);
  573. }
  574. }
  575. }
  576. }
  577. }
  578. private void ReplaceLocalWithRegister(LiveInterval current)
  579. {
  580. Operand register = GetRegister(current);
  581. foreach (int usePosition in current.UsePositions())
  582. {
  583. (_, Operation operation) = GetOperationNode(usePosition);
  584. for (int index = 0; index < operation.SourcesCount; index++)
  585. {
  586. Operand source = operation.GetSource(index);
  587. if (source == current.Local)
  588. {
  589. operation.SetSource(index, register);
  590. }
  591. else if (source.Kind == OperandKind.Memory)
  592. {
  593. MemoryOperand memOp = source.GetMemory();
  594. if (memOp.BaseAddress == current.Local)
  595. {
  596. memOp.BaseAddress = register;
  597. }
  598. if (memOp.Index == current.Local)
  599. {
  600. memOp.Index = register;
  601. }
  602. }
  603. }
  604. for (int index = 0; index < operation.DestinationsCount; index++)
  605. {
  606. Operand dest = operation.GetDestination(index);
  607. if (dest == current.Local)
  608. {
  609. operation.SetDestination(index, register);
  610. }
  611. }
  612. }
  613. }
  614. private static Operand GetRegister(LiveInterval interval)
  615. {
  616. Debug.Assert(!interval.IsSpilled, "Spilled intervals are not allowed.");
  617. return Operand.Factory.Register(
  618. interval.Register.Index,
  619. interval.Register.Type,
  620. interval.Local.Type);
  621. }
  622. private (IntrusiveList<Operation>, Operation) GetOperationNode(int position)
  623. {
  624. return _operationNodes[position / InstructionGap];
  625. }
  626. private void NumberLocals(ControlFlowGraph cfg, int registersCount)
  627. {
  628. _operationNodes = new List<(IntrusiveList<Operation>, Operation)>();
  629. _intervals = new List<LiveInterval>();
  630. for (int index = 0; index < registersCount; index++)
  631. {
  632. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Integer)));
  633. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Vector)));
  634. }
  635. // The "visited" state is stored in the MSB of the local's value.
  636. const ulong VisitedMask = 1ul << 63;
  637. bool IsVisited(Operand local)
  638. {
  639. return (local.GetValueUnsafe() & VisitedMask) != 0;
  640. }
  641. void SetVisited(Operand local)
  642. {
  643. local.GetValueUnsafe() |= VisitedMask;
  644. }
  645. _operationsCount = 0;
  646. for (int index = cfg.PostOrderBlocks.Length - 1; index >= 0; index--)
  647. {
  648. BasicBlock block = cfg.PostOrderBlocks[index];
  649. for (Operation node = block.Operations.First; node != default; node = node.ListNext)
  650. {
  651. _operationNodes.Add((block.Operations, node));
  652. for (int i = 0; i < node.DestinationsCount; i++)
  653. {
  654. Operand dest = node.GetDestination(i);
  655. if (dest.Kind == OperandKind.LocalVariable && !IsVisited(dest))
  656. {
  657. dest.NumberLocal(_intervals.Count);
  658. _intervals.Add(new LiveInterval(dest));
  659. SetVisited(dest);
  660. }
  661. }
  662. }
  663. _operationsCount += block.Operations.Count * InstructionGap;
  664. if (block.Operations.Count == 0)
  665. {
  666. // Pretend we have a dummy instruction on the empty block.
  667. _operationNodes.Add((default, default));
  668. _operationsCount += InstructionGap;
  669. }
  670. }
  671. _parentIntervals = _intervals.ToArray();
  672. }
  673. private void BuildIntervals(ControlFlowGraph cfg, AllocationContext context)
  674. {
  675. _blockRanges = new LiveRange[cfg.Blocks.Count];
  676. int mapSize = _intervals.Count;
  677. BitMap[] blkLiveGen = new BitMap[cfg.Blocks.Count];
  678. BitMap[] blkLiveKill = new BitMap[cfg.Blocks.Count];
  679. // Compute local live sets.
  680. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  681. {
  682. BitMap liveGen = new BitMap(Allocators.Default, mapSize);
  683. BitMap liveKill = new BitMap(Allocators.Default, mapSize);
  684. for (Operation node = block.Operations.First; node != default; node = node.ListNext)
  685. {
  686. for (int i = 0; i < node.SourcesCount; i++)
  687. {
  688. VisitSource(node.GetSource(i));
  689. }
  690. for (int i = 0; i < node.DestinationsCount; i++)
  691. {
  692. VisitDestination(node.GetDestination(i));
  693. }
  694. void VisitSource(Operand source)
  695. {
  696. if (IsLocalOrRegister(source.Kind))
  697. {
  698. int id = GetOperandId(source);
  699. if (!liveKill.IsSet(id))
  700. {
  701. liveGen.Set(id);
  702. }
  703. }
  704. else if (source.Kind == OperandKind.Memory)
  705. {
  706. MemoryOperand memOp = source.GetMemory();
  707. if (memOp.BaseAddress != default)
  708. {
  709. VisitSource(memOp.BaseAddress);
  710. }
  711. if (memOp.Index != default)
  712. {
  713. VisitSource(memOp.Index);
  714. }
  715. }
  716. }
  717. void VisitDestination(Operand dest)
  718. {
  719. liveKill.Set(GetOperandId(dest));
  720. }
  721. }
  722. blkLiveGen [block.Index] = liveGen;
  723. blkLiveKill[block.Index] = liveKill;
  724. }
  725. // Compute global live sets.
  726. BitMap[] blkLiveIn = new BitMap[cfg.Blocks.Count];
  727. BitMap[] blkLiveOut = new BitMap[cfg.Blocks.Count];
  728. for (int index = 0; index < cfg.Blocks.Count; index++)
  729. {
  730. blkLiveIn [index] = new BitMap(Allocators.Default, mapSize);
  731. blkLiveOut[index] = new BitMap(Allocators.Default, mapSize);
  732. }
  733. bool modified;
  734. do
  735. {
  736. modified = false;
  737. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  738. {
  739. BasicBlock block = cfg.PostOrderBlocks[index];
  740. BitMap liveOut = blkLiveOut[block.Index];
  741. for (int i = 0; i < block.SuccessorsCount; i++)
  742. {
  743. BasicBlock succ = block.GetSuccessor(i);
  744. modified |= liveOut.Set(blkLiveIn[succ.Index]);
  745. }
  746. BitMap liveIn = blkLiveIn[block.Index];
  747. liveIn.Set (liveOut);
  748. liveIn.Clear(blkLiveKill[block.Index]);
  749. liveIn.Set (blkLiveGen [block.Index]);
  750. }
  751. }
  752. while (modified);
  753. _blockLiveIn = blkLiveIn;
  754. _blockEdges = new HashSet<int>();
  755. // Compute lifetime intervals.
  756. int operationPos = _operationsCount;
  757. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  758. {
  759. BasicBlock block = cfg.PostOrderBlocks[index];
  760. // We handle empty blocks by pretending they have a dummy instruction,
  761. // because otherwise the block would have the same start and end position,
  762. // and this is not valid.
  763. int instCount = Math.Max(block.Operations.Count, 1);
  764. int blockStart = operationPos - instCount * InstructionGap;
  765. int blockEnd = operationPos;
  766. _blockRanges[block.Index] = new LiveRange(blockStart, blockEnd);
  767. _blockEdges.Add(blockStart);
  768. BitMap liveOut = blkLiveOut[block.Index];
  769. foreach (int id in liveOut)
  770. {
  771. _intervals[id].AddRange(blockStart, blockEnd);
  772. }
  773. if (block.Operations.Count == 0)
  774. {
  775. operationPos -= InstructionGap;
  776. continue;
  777. }
  778. for (Operation node = block.Operations.Last; node != default; node = node.ListPrevious)
  779. {
  780. operationPos -= InstructionGap;
  781. for (int i = 0; i < node.DestinationsCount; i++)
  782. {
  783. VisitDestination(node.GetDestination(i));
  784. }
  785. for (int i = 0; i < node.SourcesCount; i++)
  786. {
  787. VisitSource(node.GetSource(i));
  788. }
  789. if (node.Instruction == Instruction.Call)
  790. {
  791. AddIntervalCallerSavedReg(context.Masks.IntCallerSavedRegisters, operationPos, RegisterType.Integer);
  792. AddIntervalCallerSavedReg(context.Masks.VecCallerSavedRegisters, operationPos, RegisterType.Vector);
  793. }
  794. void VisitSource(Operand source)
  795. {
  796. if (IsLocalOrRegister(source.Kind))
  797. {
  798. LiveInterval interval = _intervals[GetOperandId(source)];
  799. interval.AddRange(blockStart, operationPos + 1);
  800. interval.AddUsePosition(operationPos);
  801. }
  802. else if (source.Kind == OperandKind.Memory)
  803. {
  804. MemoryOperand memOp = source.GetMemory();
  805. if (memOp.BaseAddress != default)
  806. {
  807. VisitSource(memOp.BaseAddress);
  808. }
  809. if (memOp.Index != default)
  810. {
  811. VisitSource(memOp.Index);
  812. }
  813. }
  814. }
  815. void VisitDestination(Operand dest)
  816. {
  817. LiveInterval interval = _intervals[GetOperandId(dest)];
  818. if (interval.IsFixed)
  819. {
  820. interval.IsFixedAndUsed = true;
  821. }
  822. interval.SetStart(operationPos + 1);
  823. interval.AddUsePosition(operationPos + 1);
  824. }
  825. }
  826. }
  827. foreach (LiveInterval interval in _parentIntervals)
  828. {
  829. interval.Reset();
  830. }
  831. }
  832. private void AddIntervalCallerSavedReg(int mask, int operationPos, RegisterType regType)
  833. {
  834. while (mask != 0)
  835. {
  836. int regIndex = BitOperations.TrailingZeroCount(mask);
  837. Register callerSavedReg = new Register(regIndex, regType);
  838. LiveInterval interval = _intervals[GetRegisterId(callerSavedReg)];
  839. interval.AddRange(operationPos + 1, operationPos + InstructionGap);
  840. mask &= ~(1 << regIndex);
  841. }
  842. }
  843. private static int GetOperandId(Operand operand)
  844. {
  845. if (operand.Kind == OperandKind.LocalVariable)
  846. {
  847. return operand.GetLocalNumber();
  848. }
  849. else if (operand.Kind == OperandKind.Register)
  850. {
  851. return GetRegisterId(operand.GetRegister());
  852. }
  853. else
  854. {
  855. throw new ArgumentException($"Invalid operand kind \"{operand.Kind}\".");
  856. }
  857. }
  858. private static int GetRegisterId(Register register)
  859. {
  860. return (register.Index << 1) | (register.Type == RegisterType.Vector ? 1 : 0);
  861. }
  862. private static bool IsLocalOrRegister(OperandKind kind)
  863. {
  864. return kind == OperandKind.LocalVariable ||
  865. kind == OperandKind.Register;
  866. }
  867. }
  868. }