LinearScanAllocator.cs 35 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055
  1. using ARMeilleure.Common;
  2. using ARMeilleure.IntermediateRepresentation;
  3. using ARMeilleure.Translation;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.Diagnostics;
  7. using System.Linq;
  8. using System.Numerics;
  9. namespace ARMeilleure.CodeGen.RegisterAllocators
  10. {
  11. // Based on:
  12. // "Linear Scan Register Allocation for the Java(tm) HotSpot Client Compiler".
  13. // http://www.christianwimmer.at/Publications/Wimmer04a/Wimmer04a.pdf
  14. class LinearScanAllocator : IRegisterAllocator
  15. {
  16. private const int InstructionGap = 2;
  17. private const int InstructionGapMask = InstructionGap - 1;
  18. private const int RegistersCount = 16;
  19. private HashSet<int> _blockEdges;
  20. private LiveRange[] _blockRanges;
  21. private BitMap[] _blockLiveIn;
  22. private List<LiveInterval> _intervals;
  23. private LiveInterval[] _parentIntervals;
  24. private List<(IntrusiveList<Node>, Node)> _operationNodes;
  25. private int _operationsCount;
  26. private class AllocationContext : IDisposable
  27. {
  28. public RegisterMasks Masks { get; }
  29. public StackAllocator StackAlloc { get; }
  30. public BitMap Active { get; }
  31. public BitMap Inactive { get; }
  32. public int IntUsedRegisters { get; set; }
  33. public int VecUsedRegisters { get; set; }
  34. public AllocationContext(StackAllocator stackAlloc, RegisterMasks masks, int intervalsCount)
  35. {
  36. StackAlloc = stackAlloc;
  37. Masks = masks;
  38. Active = BitMapPool.Allocate(intervalsCount);
  39. Inactive = BitMapPool.Allocate(intervalsCount);
  40. }
  41. public void MoveActiveToInactive(int bit)
  42. {
  43. Move(Active, Inactive, bit);
  44. }
  45. public void MoveInactiveToActive(int bit)
  46. {
  47. Move(Inactive, Active, bit);
  48. }
  49. private static void Move(BitMap source, BitMap dest, int bit)
  50. {
  51. source.Clear(bit);
  52. dest.Set(bit);
  53. }
  54. public void Dispose()
  55. {
  56. BitMapPool.Release();
  57. }
  58. }
  59. public AllocationResult RunPass(
  60. ControlFlowGraph cfg,
  61. StackAllocator stackAlloc,
  62. RegisterMasks regMasks)
  63. {
  64. NumberLocals(cfg);
  65. AllocationContext context = new AllocationContext(stackAlloc, regMasks, _intervals.Count);
  66. BuildIntervals(cfg, context);
  67. for (int index = 0; index < _intervals.Count; index++)
  68. {
  69. LiveInterval current = _intervals[index];
  70. if (current.IsEmpty)
  71. {
  72. continue;
  73. }
  74. if (current.IsFixed)
  75. {
  76. context.Active.Set(index);
  77. if (current.Register.Type == RegisterType.Integer)
  78. {
  79. context.IntUsedRegisters |= 1 << current.Register.Index;
  80. }
  81. else /* if (interval.Register.Type == RegisterType.Vector) */
  82. {
  83. context.VecUsedRegisters |= 1 << current.Register.Index;
  84. }
  85. continue;
  86. }
  87. AllocateInterval(context, current, index);
  88. }
  89. for (int index = RegistersCount * 2; index < _intervals.Count; index++)
  90. {
  91. if (!_intervals[index].IsSpilled)
  92. {
  93. ReplaceLocalWithRegister(_intervals[index]);
  94. }
  95. }
  96. InsertSplitCopies();
  97. InsertSplitCopiesAtEdges(cfg);
  98. AllocationResult result = new AllocationResult(
  99. context.IntUsedRegisters,
  100. context.VecUsedRegisters,
  101. context.StackAlloc.TotalSize);
  102. context.Dispose();
  103. return result;
  104. }
  105. private void AllocateInterval(AllocationContext context, LiveInterval current, int cIndex)
  106. {
  107. // Check active intervals that already ended.
  108. foreach (int iIndex in context.Active)
  109. {
  110. LiveInterval interval = _intervals[iIndex];
  111. if (interval.GetEnd() < current.GetStart())
  112. {
  113. context.Active.Clear(iIndex);
  114. }
  115. else if (!interval.Overlaps(current.GetStart()))
  116. {
  117. context.MoveActiveToInactive(iIndex);
  118. }
  119. }
  120. // Check inactive intervals that already ended or were reactivated.
  121. foreach (int iIndex in context.Inactive)
  122. {
  123. LiveInterval interval = _intervals[iIndex];
  124. if (interval.GetEnd() < current.GetStart())
  125. {
  126. context.Inactive.Clear(iIndex);
  127. }
  128. else if (interval.Overlaps(current.GetStart()))
  129. {
  130. context.MoveInactiveToActive(iIndex);
  131. }
  132. }
  133. if (!TryAllocateRegWithoutSpill(context, current, cIndex))
  134. {
  135. AllocateRegWithSpill(context, current, cIndex);
  136. }
  137. }
  138. private bool TryAllocateRegWithoutSpill(AllocationContext context, LiveInterval current, int cIndex)
  139. {
  140. RegisterType regType = current.Local.Type.ToRegisterType();
  141. int availableRegisters = context.Masks.GetAvailableRegisters(regType);
  142. int[] freePositions = new int[RegistersCount];
  143. for (int index = 0; index < RegistersCount; index++)
  144. {
  145. if ((availableRegisters & (1 << index)) != 0)
  146. {
  147. freePositions[index] = int.MaxValue;
  148. }
  149. }
  150. foreach (int iIndex in context.Active)
  151. {
  152. LiveInterval interval = _intervals[iIndex];
  153. if (interval.Register.Type == regType)
  154. {
  155. freePositions[interval.Register.Index] = 0;
  156. }
  157. }
  158. foreach (int iIndex in context.Inactive)
  159. {
  160. LiveInterval interval = _intervals[iIndex];
  161. if (interval.Register.Type == regType)
  162. {
  163. int overlapPosition = interval.GetOverlapPosition(current);
  164. if (overlapPosition != LiveInterval.NotFound && freePositions[interval.Register.Index] > overlapPosition)
  165. {
  166. freePositions[interval.Register.Index] = overlapPosition;
  167. }
  168. }
  169. }
  170. int selectedReg = GetHighestValueIndex(freePositions);
  171. int selectedNextUse = freePositions[selectedReg];
  172. // Intervals starts and ends at odd positions, unless they span an entire
  173. // block, in this case they will have ranges at a even position.
  174. // When a interval is loaded from the stack to a register, we can only
  175. // do the split at a odd position, because otherwise the split interval
  176. // that is inserted on the list to be processed may clobber a register
  177. // used by the instruction at the same position as the split.
  178. // The problem only happens when a interval ends exactly at this instruction,
  179. // because otherwise they would interfere, and the register wouldn't be selected.
  180. // When the interval is aligned and the above happens, there's no problem as
  181. // the instruction that is actually with the last use is the one
  182. // before that position.
  183. selectedNextUse &= ~InstructionGapMask;
  184. if (selectedNextUse <= current.GetStart())
  185. {
  186. return false;
  187. }
  188. else if (selectedNextUse < current.GetEnd())
  189. {
  190. Debug.Assert(selectedNextUse > current.GetStart(), "Trying to split interval at the start.");
  191. LiveInterval splitChild = current.Split(selectedNextUse);
  192. if (splitChild.UsesCount != 0)
  193. {
  194. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  195. InsertInterval(splitChild);
  196. }
  197. else
  198. {
  199. Spill(context, splitChild);
  200. }
  201. }
  202. current.Register = new Register(selectedReg, regType);
  203. if (regType == RegisterType.Integer)
  204. {
  205. context.IntUsedRegisters |= 1 << selectedReg;
  206. }
  207. else /* if (regType == RegisterType.Vector) */
  208. {
  209. context.VecUsedRegisters |= 1 << selectedReg;
  210. }
  211. context.Active.Set(cIndex);
  212. return true;
  213. }
  214. private void AllocateRegWithSpill(AllocationContext context, LiveInterval current, int cIndex)
  215. {
  216. RegisterType regType = current.Local.Type.ToRegisterType();
  217. int availableRegisters = context.Masks.GetAvailableRegisters(regType);
  218. int[] usePositions = new int[RegistersCount];
  219. int[] blockedPositions = new int[RegistersCount];
  220. for (int index = 0; index < RegistersCount; index++)
  221. {
  222. if ((availableRegisters & (1 << index)) != 0)
  223. {
  224. usePositions[index] = int.MaxValue;
  225. blockedPositions[index] = int.MaxValue;
  226. }
  227. }
  228. void SetUsePosition(int index, int position)
  229. {
  230. usePositions[index] = Math.Min(usePositions[index], position);
  231. }
  232. void SetBlockedPosition(int index, int position)
  233. {
  234. blockedPositions[index] = Math.Min(blockedPositions[index], position);
  235. SetUsePosition(index, position);
  236. }
  237. foreach (int iIndex in context.Active)
  238. {
  239. LiveInterval interval = _intervals[iIndex];
  240. if (!interval.IsFixed && interval.Register.Type == regType)
  241. {
  242. int nextUse = interval.NextUseAfter(current.GetStart());
  243. if (nextUse != -1)
  244. {
  245. SetUsePosition(interval.Register.Index, nextUse);
  246. }
  247. }
  248. }
  249. foreach (int iIndex in context.Inactive)
  250. {
  251. LiveInterval interval = _intervals[iIndex];
  252. if (!interval.IsFixed && interval.Register.Type == regType && interval.Overlaps(current))
  253. {
  254. int nextUse = interval.NextUseAfter(current.GetStart());
  255. if (nextUse != -1)
  256. {
  257. SetUsePosition(interval.Register.Index, nextUse);
  258. }
  259. }
  260. }
  261. foreach (int iIndex in context.Active)
  262. {
  263. LiveInterval interval = _intervals[iIndex];
  264. if (interval.IsFixed && interval.Register.Type == regType)
  265. {
  266. SetBlockedPosition(interval.Register.Index, 0);
  267. }
  268. }
  269. foreach (int iIndex in context.Inactive)
  270. {
  271. LiveInterval interval = _intervals[iIndex];
  272. if (interval.IsFixed && interval.Register.Type == regType)
  273. {
  274. int overlapPosition = interval.GetOverlapPosition(current);
  275. if (overlapPosition != LiveInterval.NotFound)
  276. {
  277. SetBlockedPosition(interval.Register.Index, overlapPosition);
  278. }
  279. }
  280. }
  281. int selectedReg = GetHighestValueIndex(usePositions);
  282. int currentFirstUse = current.FirstUse();
  283. Debug.Assert(currentFirstUse >= 0, "Current interval has no uses.");
  284. if (usePositions[selectedReg] < currentFirstUse)
  285. {
  286. // All intervals on inactive and active are being used before current,
  287. // so spill the current interval.
  288. Debug.Assert(currentFirstUse > current.GetStart(), "Trying to spill a interval currently being used.");
  289. LiveInterval splitChild = current.Split(currentFirstUse);
  290. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  291. InsertInterval(splitChild);
  292. Spill(context, current);
  293. }
  294. else if (blockedPositions[selectedReg] > current.GetEnd())
  295. {
  296. // Spill made the register available for the entire current lifetime,
  297. // so we only need to split the intervals using the selected register.
  298. current.Register = new Register(selectedReg, regType);
  299. SplitAndSpillOverlappingIntervals(context, current);
  300. context.Active.Set(cIndex);
  301. }
  302. else
  303. {
  304. // There are conflicts even after spill due to the use of fixed registers
  305. // that can't be spilled, so we need to also split current at the point of
  306. // the first fixed register use.
  307. current.Register = new Register(selectedReg, regType);
  308. int splitPosition = blockedPositions[selectedReg] & ~InstructionGapMask;
  309. Debug.Assert(splitPosition > current.GetStart(), "Trying to split a interval at a invalid position.");
  310. LiveInterval splitChild = current.Split(splitPosition);
  311. if (splitChild.UsesCount != 0)
  312. {
  313. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  314. InsertInterval(splitChild);
  315. }
  316. else
  317. {
  318. Spill(context, splitChild);
  319. }
  320. SplitAndSpillOverlappingIntervals(context, current);
  321. context.Active.Set(cIndex);
  322. }
  323. }
  324. private static int GetHighestValueIndex(int[] array)
  325. {
  326. int higuest = array[0];
  327. if (higuest == int.MaxValue)
  328. {
  329. return 0;
  330. }
  331. int selected = 0;
  332. for (int index = 1; index < array.Length; index++)
  333. {
  334. int current = array[index];
  335. if (higuest < current)
  336. {
  337. higuest = current;
  338. selected = index;
  339. if (current == int.MaxValue)
  340. {
  341. break;
  342. }
  343. }
  344. }
  345. return selected;
  346. }
  347. private void SplitAndSpillOverlappingIntervals(AllocationContext context, LiveInterval current)
  348. {
  349. foreach (int iIndex in context.Active)
  350. {
  351. LiveInterval interval = _intervals[iIndex];
  352. if (!interval.IsFixed && interval.Register == current.Register)
  353. {
  354. SplitAndSpillOverlappingInterval(context, current, interval);
  355. context.Active.Clear(iIndex);
  356. }
  357. }
  358. foreach (int iIndex in context.Inactive)
  359. {
  360. LiveInterval interval = _intervals[iIndex];
  361. if (!interval.IsFixed && interval.Register == current.Register && interval.Overlaps(current))
  362. {
  363. SplitAndSpillOverlappingInterval(context, current, interval);
  364. context.Inactive.Clear(iIndex);
  365. }
  366. }
  367. }
  368. private void SplitAndSpillOverlappingInterval(
  369. AllocationContext context,
  370. LiveInterval current,
  371. LiveInterval interval)
  372. {
  373. // If there's a next use after the start of the current interval,
  374. // we need to split the spilled interval twice, and re-insert it
  375. // on the "pending" list to ensure that it will get a new register
  376. // on that use position.
  377. int nextUse = interval.NextUseAfter(current.GetStart());
  378. LiveInterval splitChild;
  379. if (interval.GetStart() < current.GetStart())
  380. {
  381. splitChild = interval.Split(current.GetStart());
  382. }
  383. else
  384. {
  385. splitChild = interval;
  386. }
  387. if (nextUse != -1)
  388. {
  389. Debug.Assert(nextUse > current.GetStart(), "Trying to spill a interval currently being used.");
  390. if (nextUse > splitChild.GetStart())
  391. {
  392. LiveInterval right = splitChild.Split(nextUse);
  393. Spill(context, splitChild);
  394. splitChild = right;
  395. }
  396. InsertInterval(splitChild);
  397. }
  398. else
  399. {
  400. Spill(context, splitChild);
  401. }
  402. }
  403. private void InsertInterval(LiveInterval interval)
  404. {
  405. Debug.Assert(interval.UsesCount != 0, "Trying to insert a interval without uses.");
  406. Debug.Assert(!interval.IsEmpty, "Trying to insert a empty interval.");
  407. Debug.Assert(!interval.IsSpilled, "Trying to insert a spilled interval.");
  408. int startIndex = RegistersCount * 2;
  409. int insertIndex = _intervals.BinarySearch(startIndex, _intervals.Count - startIndex, interval, null);
  410. if (insertIndex < 0)
  411. {
  412. insertIndex = ~insertIndex;
  413. }
  414. _intervals.Insert(insertIndex, interval);
  415. }
  416. private void Spill(AllocationContext context, LiveInterval interval)
  417. {
  418. Debug.Assert(!interval.IsFixed, "Trying to spill a fixed interval.");
  419. Debug.Assert(interval.UsesCount == 0, "Trying to spill a interval with uses.");
  420. // We first check if any of the siblings were spilled, if so we can reuse
  421. // the stack offset. Otherwise, we allocate a new space on the stack.
  422. // This prevents stack-to-stack copies being necessary for a split interval.
  423. if (!interval.TrySpillWithSiblingOffset())
  424. {
  425. interval.Spill(context.StackAlloc.Allocate(interval.Local.Type));
  426. }
  427. }
  428. private void InsertSplitCopies()
  429. {
  430. Dictionary<int, CopyResolver> copyResolvers = new Dictionary<int, CopyResolver>();
  431. CopyResolver GetCopyResolver(int position)
  432. {
  433. CopyResolver copyResolver = new CopyResolver();
  434. if (copyResolvers.TryAdd(position, copyResolver))
  435. {
  436. return copyResolver;
  437. }
  438. return copyResolvers[position];
  439. }
  440. foreach (LiveInterval interval in _intervals.Where(x => x.IsSplit))
  441. {
  442. LiveInterval previous = interval;
  443. foreach (LiveInterval splitChild in interval.SplitChilds())
  444. {
  445. int splitPosition = splitChild.GetStart();
  446. if (!_blockEdges.Contains(splitPosition) && previous.GetEnd() == splitPosition)
  447. {
  448. GetCopyResolver(splitPosition).AddSplit(previous, splitChild);
  449. }
  450. previous = splitChild;
  451. }
  452. }
  453. foreach (KeyValuePair<int, CopyResolver> kv in copyResolvers)
  454. {
  455. CopyResolver copyResolver = kv.Value;
  456. if (!copyResolver.HasCopy)
  457. {
  458. continue;
  459. }
  460. int splitPosition = kv.Key;
  461. (IntrusiveList<Node> nodes, Node node) = GetOperationNode(splitPosition);
  462. Operation[] sequence = copyResolver.Sequence();
  463. nodes.AddBefore(node, sequence[0]);
  464. node = sequence[0];
  465. for (int index = 1; index < sequence.Length; index++)
  466. {
  467. nodes.AddAfter(node, sequence[index]);
  468. node = sequence[index];
  469. }
  470. }
  471. }
  472. private void InsertSplitCopiesAtEdges(ControlFlowGraph cfg)
  473. {
  474. int blocksCount = cfg.Blocks.Count;
  475. bool IsSplitEdgeBlock(BasicBlock block)
  476. {
  477. return block.Index >= blocksCount;
  478. }
  479. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  480. {
  481. if (IsSplitEdgeBlock(block))
  482. {
  483. continue;
  484. }
  485. bool hasSingleOrNoSuccessor = block.Next == null || block.Branch == null;
  486. for (int i = 0; i < 2; i++)
  487. {
  488. // This used to use an enumerable, but it ended up generating a lot of garbage, so now it is a loop.
  489. BasicBlock successor = (i == 0) ? block.Next : block.Branch;
  490. if (successor == null)
  491. {
  492. continue;
  493. }
  494. int succIndex = successor.Index;
  495. // If the current node is a split node, then the actual successor node
  496. // (the successor before the split) should be right after it.
  497. if (IsSplitEdgeBlock(successor))
  498. {
  499. succIndex = FirstSuccessor(successor).Index;
  500. }
  501. CopyResolver copyResolver = new CopyResolver();
  502. foreach (int iIndex in _blockLiveIn[succIndex])
  503. {
  504. LiveInterval interval = _parentIntervals[iIndex];
  505. if (!interval.IsSplit)
  506. {
  507. continue;
  508. }
  509. int lEnd = _blockRanges[block.Index].End - 1;
  510. int rStart = _blockRanges[succIndex].Start;
  511. LiveInterval left = interval.GetSplitChild(lEnd);
  512. LiveInterval right = interval.GetSplitChild(rStart);
  513. if (left != null && right != null && left != right)
  514. {
  515. copyResolver.AddSplit(left, right);
  516. }
  517. }
  518. if (!copyResolver.HasCopy)
  519. {
  520. continue;
  521. }
  522. Operation[] sequence = copyResolver.Sequence();
  523. if (hasSingleOrNoSuccessor)
  524. {
  525. foreach (Operation operation in sequence)
  526. {
  527. block.Append(operation);
  528. }
  529. }
  530. else if (successor.Predecessors.Count == 1)
  531. {
  532. successor.Operations.AddFirst(sequence[0]);
  533. Node prependNode = sequence[0];
  534. for (int index = 1; index < sequence.Length; index++)
  535. {
  536. Operation operation = sequence[index];
  537. successor.Operations.AddAfter(prependNode, operation);
  538. prependNode = operation;
  539. }
  540. }
  541. else
  542. {
  543. // Split the critical edge.
  544. BasicBlock splitBlock = cfg.SplitEdge(block, successor);
  545. foreach (Operation operation in sequence)
  546. {
  547. splitBlock.Append(operation);
  548. }
  549. }
  550. }
  551. }
  552. }
  553. private void ReplaceLocalWithRegister(LiveInterval current)
  554. {
  555. Operand register = GetRegister(current);
  556. IList<int> usePositions = current.UsePositions();
  557. for (int i = usePositions.Count - 1; i >= 0; i--)
  558. {
  559. int usePosition = -usePositions[i];
  560. (_, Node operation) = GetOperationNode(usePosition);
  561. for (int index = 0; index < operation.SourcesCount; index++)
  562. {
  563. Operand source = operation.GetSource(index);
  564. if (source == current.Local)
  565. {
  566. operation.SetSource(index, register);
  567. }
  568. else if (source.Kind == OperandKind.Memory)
  569. {
  570. MemoryOperand memOp = (MemoryOperand)source;
  571. if (memOp.BaseAddress == current.Local)
  572. {
  573. memOp.BaseAddress = register;
  574. }
  575. if (memOp.Index == current.Local)
  576. {
  577. memOp.Index = register;
  578. }
  579. }
  580. }
  581. for (int index = 0; index < operation.DestinationsCount; index++)
  582. {
  583. Operand dest = operation.GetDestination(index);
  584. if (dest == current.Local)
  585. {
  586. operation.SetDestination(index, register);
  587. }
  588. }
  589. }
  590. }
  591. private static Operand GetRegister(LiveInterval interval)
  592. {
  593. Debug.Assert(!interval.IsSpilled, "Spilled intervals are not allowed.");
  594. return OperandHelper.Register(
  595. interval.Register.Index,
  596. interval.Register.Type,
  597. interval.Local.Type);
  598. }
  599. private (IntrusiveList<Node>, Node) GetOperationNode(int position)
  600. {
  601. return _operationNodes[position / InstructionGap];
  602. }
  603. private void NumberLocals(ControlFlowGraph cfg)
  604. {
  605. _operationNodes = new List<(IntrusiveList<Node>, Node)>();
  606. _intervals = new List<LiveInterval>();
  607. for (int index = 0; index < RegistersCount; index++)
  608. {
  609. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Integer)));
  610. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Vector)));
  611. }
  612. HashSet<Operand> visited = new HashSet<Operand>();
  613. _operationsCount = 0;
  614. for (int index = cfg.PostOrderBlocks.Length - 1; index >= 0; index--)
  615. {
  616. BasicBlock block = cfg.PostOrderBlocks[index];
  617. for (Node node = block.Operations.First; node != null; node = node.ListNext)
  618. {
  619. _operationNodes.Add((block.Operations, node));
  620. for (int i = 0; i < node.DestinationsCount; i++)
  621. {
  622. Operand dest = node.GetDestination(i);
  623. if (dest.Kind == OperandKind.LocalVariable && visited.Add(dest))
  624. {
  625. dest.NumberLocal(_intervals.Count);
  626. _intervals.Add(new LiveInterval(dest));
  627. }
  628. }
  629. }
  630. _operationsCount += block.Operations.Count * InstructionGap;
  631. if (block.Operations.Count == 0)
  632. {
  633. // Pretend we have a dummy instruction on the empty block.
  634. _operationNodes.Add((null, null));
  635. _operationsCount += InstructionGap;
  636. }
  637. }
  638. _parentIntervals = _intervals.ToArray();
  639. }
  640. private void BuildIntervals(ControlFlowGraph cfg, AllocationContext context)
  641. {
  642. _blockRanges = new LiveRange[cfg.Blocks.Count];
  643. int mapSize = _intervals.Count;
  644. BitMap[] blkLiveGen = new BitMap[cfg.Blocks.Count];
  645. BitMap[] blkLiveKill = new BitMap[cfg.Blocks.Count];
  646. // Compute local live sets.
  647. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  648. {
  649. BitMap liveGen = BitMapPool.Allocate(mapSize);
  650. BitMap liveKill = BitMapPool.Allocate(mapSize);
  651. for (Node node = block.Operations.First; node != null; node = node.ListNext)
  652. {
  653. Sources(node, (source) =>
  654. {
  655. int id = GetOperandId(source);
  656. if (!liveKill.IsSet(id))
  657. {
  658. liveGen.Set(id);
  659. }
  660. });
  661. for (int i = 0; i < node.DestinationsCount; i++)
  662. {
  663. Operand dest = node.GetDestination(i);
  664. liveKill.Set(GetOperandId(dest));
  665. }
  666. }
  667. blkLiveGen [block.Index] = liveGen;
  668. blkLiveKill[block.Index] = liveKill;
  669. }
  670. // Compute global live sets.
  671. BitMap[] blkLiveIn = new BitMap[cfg.Blocks.Count];
  672. BitMap[] blkLiveOut = new BitMap[cfg.Blocks.Count];
  673. for (int index = 0; index < cfg.Blocks.Count; index++)
  674. {
  675. blkLiveIn [index] = BitMapPool.Allocate(mapSize);
  676. blkLiveOut[index] = BitMapPool.Allocate(mapSize);
  677. }
  678. bool modified;
  679. do
  680. {
  681. modified = false;
  682. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  683. {
  684. BasicBlock block = cfg.PostOrderBlocks[index];
  685. BitMap liveOut = blkLiveOut[block.Index];
  686. if ((block.Next != null && liveOut.Set(blkLiveIn[block.Next.Index])) ||
  687. (block.Branch != null && liveOut.Set(blkLiveIn[block.Branch.Index])))
  688. {
  689. modified = true;
  690. }
  691. BitMap liveIn = blkLiveIn[block.Index];
  692. liveIn.Set (liveOut);
  693. liveIn.Clear(blkLiveKill[block.Index]);
  694. liveIn.Set (blkLiveGen [block.Index]);
  695. }
  696. }
  697. while (modified);
  698. _blockLiveIn = blkLiveIn;
  699. _blockEdges = new HashSet<int>();
  700. // Compute lifetime intervals.
  701. int operationPos = _operationsCount;
  702. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  703. {
  704. BasicBlock block = cfg.PostOrderBlocks[index];
  705. // We handle empty blocks by pretending they have a dummy instruction,
  706. // because otherwise the block would have the same start and end position,
  707. // and this is not valid.
  708. int instCount = Math.Max(block.Operations.Count, 1);
  709. int blockStart = operationPos - instCount * InstructionGap;
  710. int blockEnd = operationPos;
  711. _blockRanges[block.Index] = new LiveRange(blockStart, blockEnd);
  712. _blockEdges.Add(blockStart);
  713. BitMap liveOut = blkLiveOut[block.Index];
  714. foreach (int id in liveOut)
  715. {
  716. _intervals[id].AddRange(blockStart, blockEnd);
  717. }
  718. if (block.Operations.Count == 0)
  719. {
  720. operationPos -= InstructionGap;
  721. continue;
  722. }
  723. foreach (Node node in BottomOperations(block))
  724. {
  725. operationPos -= InstructionGap;
  726. for (int i = 0; i < node.DestinationsCount; i++)
  727. {
  728. Operand dest = node.GetDestination(i);
  729. LiveInterval interval = _intervals[GetOperandId(dest)];
  730. interval.SetStart(operationPos + 1);
  731. interval.AddUsePosition(operationPos + 1);
  732. }
  733. Sources(node, (source) =>
  734. {
  735. LiveInterval interval = _intervals[GetOperandId(source)];
  736. interval.AddRange(blockStart, operationPos + 1);
  737. interval.AddUsePosition(operationPos);
  738. });
  739. if (node is Operation operation && operation.Instruction == Instruction.Call)
  740. {
  741. AddIntervalCallerSavedReg(context.Masks.IntCallerSavedRegisters, operationPos, RegisterType.Integer);
  742. AddIntervalCallerSavedReg(context.Masks.VecCallerSavedRegisters, operationPos, RegisterType.Vector);
  743. }
  744. }
  745. }
  746. }
  747. private void AddIntervalCallerSavedReg(int mask, int operationPos, RegisterType regType)
  748. {
  749. while (mask != 0)
  750. {
  751. int regIndex = BitOperations.TrailingZeroCount(mask);
  752. Register callerSavedReg = new Register(regIndex, regType);
  753. LiveInterval interval = _intervals[GetRegisterId(callerSavedReg)];
  754. interval.AddRange(operationPos + 1, operationPos + InstructionGap);
  755. mask &= ~(1 << regIndex);
  756. }
  757. }
  758. private static int GetOperandId(Operand operand)
  759. {
  760. if (operand.Kind == OperandKind.LocalVariable)
  761. {
  762. return operand.AsInt32();
  763. }
  764. else if (operand.Kind == OperandKind.Register)
  765. {
  766. return GetRegisterId(operand.GetRegister());
  767. }
  768. else
  769. {
  770. throw new ArgumentException($"Invalid operand kind \"{operand.Kind}\".");
  771. }
  772. }
  773. private static int GetRegisterId(Register register)
  774. {
  775. return (register.Index << 1) | (register.Type == RegisterType.Vector ? 1 : 0);
  776. }
  777. private static BasicBlock FirstSuccessor(BasicBlock block)
  778. {
  779. return block.Next ?? block.Branch;
  780. }
  781. private static IEnumerable<Node> BottomOperations(BasicBlock block)
  782. {
  783. Node node = block.Operations.Last;
  784. while (node != null && !(node is PhiNode))
  785. {
  786. yield return node;
  787. node = node.ListPrevious;
  788. }
  789. }
  790. private static void Sources(Node node, Action<Operand> action)
  791. {
  792. for (int index = 0; index < node.SourcesCount; index++)
  793. {
  794. Operand source = node.GetSource(index);
  795. if (IsLocalOrRegister(source.Kind))
  796. {
  797. action(source);
  798. }
  799. else if (source.Kind == OperandKind.Memory)
  800. {
  801. MemoryOperand memOp = (MemoryOperand)source;
  802. if (memOp.BaseAddress != null)
  803. {
  804. action(memOp.BaseAddress);
  805. }
  806. if (memOp.Index != null)
  807. {
  808. action(memOp.Index);
  809. }
  810. }
  811. }
  812. }
  813. private static bool IsLocalOrRegister(OperandKind kind)
  814. {
  815. return kind == OperandKind.LocalVariable ||
  816. kind == OperandKind.Register;
  817. }
  818. }
  819. }