LinearScanAllocator.cs 35 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046
  1. using ARMeilleure.Common;
  2. using ARMeilleure.IntermediateRepresentation;
  3. using ARMeilleure.Translation;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.Diagnostics;
  7. using System.Linq;
  8. using System.Numerics;
  9. namespace ARMeilleure.CodeGen.RegisterAllocators
  10. {
  11. // Based on:
  12. // "Linear Scan Register Allocation for the Java(tm) HotSpot Client Compiler".
  13. // http://www.christianwimmer.at/Publications/Wimmer04a/Wimmer04a.pdf
  14. class LinearScanAllocator : IRegisterAllocator
  15. {
  16. private const int InstructionGap = 2;
  17. private const int InstructionGapMask = InstructionGap - 1;
  18. private const int RegistersCount = 16;
  19. private HashSet<int> _blockEdges;
  20. private LiveRange[] _blockRanges;
  21. private BitMap[] _blockLiveIn;
  22. private List<LiveInterval> _intervals;
  23. private LiveInterval[] _parentIntervals;
  24. private List<(IntrusiveList<Node>, Node)> _operationNodes;
  25. private int _operationsCount;
  26. private class AllocationContext : IDisposable
  27. {
  28. public RegisterMasks Masks { get; }
  29. public StackAllocator StackAlloc { get; }
  30. public BitMap Active { get; }
  31. public BitMap Inactive { get; }
  32. public int IntUsedRegisters { get; set; }
  33. public int VecUsedRegisters { get; set; }
  34. public AllocationContext(StackAllocator stackAlloc, RegisterMasks masks, int intervalsCount)
  35. {
  36. StackAlloc = stackAlloc;
  37. Masks = masks;
  38. Active = BitMapPool.Allocate(intervalsCount);
  39. Inactive = BitMapPool.Allocate(intervalsCount);
  40. }
  41. public void MoveActiveToInactive(int bit)
  42. {
  43. Move(Active, Inactive, bit);
  44. }
  45. public void MoveInactiveToActive(int bit)
  46. {
  47. Move(Inactive, Active, bit);
  48. }
  49. private static void Move(BitMap source, BitMap dest, int bit)
  50. {
  51. source.Clear(bit);
  52. dest.Set(bit);
  53. }
  54. public void Dispose()
  55. {
  56. BitMapPool.Release();
  57. }
  58. }
  59. public AllocationResult RunPass(
  60. ControlFlowGraph cfg,
  61. StackAllocator stackAlloc,
  62. RegisterMasks regMasks)
  63. {
  64. NumberLocals(cfg);
  65. AllocationContext context = new AllocationContext(stackAlloc, regMasks, _intervals.Count);
  66. BuildIntervals(cfg, context);
  67. for (int index = 0; index < _intervals.Count; index++)
  68. {
  69. LiveInterval current = _intervals[index];
  70. if (current.IsEmpty)
  71. {
  72. continue;
  73. }
  74. if (current.IsFixed)
  75. {
  76. context.Active.Set(index);
  77. if (current.Register.Type == RegisterType.Integer)
  78. {
  79. context.IntUsedRegisters |= 1 << current.Register.Index;
  80. }
  81. else /* if (interval.Register.Type == RegisterType.Vector) */
  82. {
  83. context.VecUsedRegisters |= 1 << current.Register.Index;
  84. }
  85. continue;
  86. }
  87. AllocateInterval(context, current, index);
  88. }
  89. for (int index = RegistersCount * 2; index < _intervals.Count; index++)
  90. {
  91. if (!_intervals[index].IsSpilled)
  92. {
  93. ReplaceLocalWithRegister(_intervals[index]);
  94. }
  95. }
  96. InsertSplitCopies();
  97. InsertSplitCopiesAtEdges(cfg);
  98. AllocationResult result = new AllocationResult(
  99. context.IntUsedRegisters,
  100. context.VecUsedRegisters,
  101. context.StackAlloc.TotalSize);
  102. context.Dispose();
  103. return result;
  104. }
  105. private void AllocateInterval(AllocationContext context, LiveInterval current, int cIndex)
  106. {
  107. // Check active intervals that already ended.
  108. foreach (int iIndex in context.Active)
  109. {
  110. LiveInterval interval = _intervals[iIndex];
  111. if (interval.GetEnd() < current.GetStart())
  112. {
  113. context.Active.Clear(iIndex);
  114. }
  115. else if (!interval.Overlaps(current.GetStart()))
  116. {
  117. context.MoveActiveToInactive(iIndex);
  118. }
  119. }
  120. // Check inactive intervals that already ended or were reactivated.
  121. foreach (int iIndex in context.Inactive)
  122. {
  123. LiveInterval interval = _intervals[iIndex];
  124. if (interval.GetEnd() < current.GetStart())
  125. {
  126. context.Inactive.Clear(iIndex);
  127. }
  128. else if (interval.Overlaps(current.GetStart()))
  129. {
  130. context.MoveInactiveToActive(iIndex);
  131. }
  132. }
  133. if (!TryAllocateRegWithoutSpill(context, current, cIndex))
  134. {
  135. AllocateRegWithSpill(context, current, cIndex);
  136. }
  137. }
  138. private bool TryAllocateRegWithoutSpill(AllocationContext context, LiveInterval current, int cIndex)
  139. {
  140. RegisterType regType = current.Local.Type.ToRegisterType();
  141. int availableRegisters = context.Masks.GetAvailableRegisters(regType);
  142. int[] freePositions = new int[RegistersCount];
  143. for (int index = 0; index < RegistersCount; index++)
  144. {
  145. if ((availableRegisters & (1 << index)) != 0)
  146. {
  147. freePositions[index] = int.MaxValue;
  148. }
  149. }
  150. foreach (int iIndex in context.Active)
  151. {
  152. LiveInterval interval = _intervals[iIndex];
  153. if (interval.Register.Type == regType)
  154. {
  155. freePositions[interval.Register.Index] = 0;
  156. }
  157. }
  158. foreach (int iIndex in context.Inactive)
  159. {
  160. LiveInterval interval = _intervals[iIndex];
  161. if (interval.Register.Type == regType)
  162. {
  163. int overlapPosition = interval.GetOverlapPosition(current);
  164. if (overlapPosition != LiveInterval.NotFound && freePositions[interval.Register.Index] > overlapPosition)
  165. {
  166. freePositions[interval.Register.Index] = overlapPosition;
  167. }
  168. }
  169. }
  170. int selectedReg = GetHighestValueIndex(freePositions);
  171. int selectedNextUse = freePositions[selectedReg];
  172. // Intervals starts and ends at odd positions, unless they span an entire
  173. // block, in this case they will have ranges at a even position.
  174. // When a interval is loaded from the stack to a register, we can only
  175. // do the split at a odd position, because otherwise the split interval
  176. // that is inserted on the list to be processed may clobber a register
  177. // used by the instruction at the same position as the split.
  178. // The problem only happens when a interval ends exactly at this instruction,
  179. // because otherwise they would interfere, and the register wouldn't be selected.
  180. // When the interval is aligned and the above happens, there's no problem as
  181. // the instruction that is actually with the last use is the one
  182. // before that position.
  183. selectedNextUse &= ~InstructionGapMask;
  184. if (selectedNextUse <= current.GetStart())
  185. {
  186. return false;
  187. }
  188. else if (selectedNextUse < current.GetEnd())
  189. {
  190. Debug.Assert(selectedNextUse > current.GetStart(), "Trying to split interval at the start.");
  191. LiveInterval splitChild = current.Split(selectedNextUse);
  192. if (splitChild.UsesCount != 0)
  193. {
  194. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  195. InsertInterval(splitChild);
  196. }
  197. else
  198. {
  199. Spill(context, splitChild);
  200. }
  201. }
  202. current.Register = new Register(selectedReg, regType);
  203. if (regType == RegisterType.Integer)
  204. {
  205. context.IntUsedRegisters |= 1 << selectedReg;
  206. }
  207. else /* if (regType == RegisterType.Vector) */
  208. {
  209. context.VecUsedRegisters |= 1 << selectedReg;
  210. }
  211. context.Active.Set(cIndex);
  212. return true;
  213. }
  214. private void AllocateRegWithSpill(AllocationContext context, LiveInterval current, int cIndex)
  215. {
  216. RegisterType regType = current.Local.Type.ToRegisterType();
  217. int availableRegisters = context.Masks.GetAvailableRegisters(regType);
  218. int[] usePositions = new int[RegistersCount];
  219. int[] blockedPositions = new int[RegistersCount];
  220. for (int index = 0; index < RegistersCount; index++)
  221. {
  222. if ((availableRegisters & (1 << index)) != 0)
  223. {
  224. usePositions[index] = int.MaxValue;
  225. blockedPositions[index] = int.MaxValue;
  226. }
  227. }
  228. void SetUsePosition(int index, int position)
  229. {
  230. usePositions[index] = Math.Min(usePositions[index], position);
  231. }
  232. void SetBlockedPosition(int index, int position)
  233. {
  234. blockedPositions[index] = Math.Min(blockedPositions[index], position);
  235. SetUsePosition(index, position);
  236. }
  237. foreach (int iIndex in context.Active)
  238. {
  239. LiveInterval interval = _intervals[iIndex];
  240. if (!interval.IsFixed && interval.Register.Type == regType)
  241. {
  242. int nextUse = interval.NextUseAfter(current.GetStart());
  243. if (nextUse != -1)
  244. {
  245. SetUsePosition(interval.Register.Index, nextUse);
  246. }
  247. }
  248. }
  249. foreach (int iIndex in context.Inactive)
  250. {
  251. LiveInterval interval = _intervals[iIndex];
  252. if (!interval.IsFixed && interval.Register.Type == regType && interval.Overlaps(current))
  253. {
  254. int nextUse = interval.NextUseAfter(current.GetStart());
  255. if (nextUse != -1)
  256. {
  257. SetUsePosition(interval.Register.Index, nextUse);
  258. }
  259. }
  260. }
  261. foreach (int iIndex in context.Active)
  262. {
  263. LiveInterval interval = _intervals[iIndex];
  264. if (interval.IsFixed && interval.Register.Type == regType)
  265. {
  266. SetBlockedPosition(interval.Register.Index, 0);
  267. }
  268. }
  269. foreach (int iIndex in context.Inactive)
  270. {
  271. LiveInterval interval = _intervals[iIndex];
  272. if (interval.IsFixed && interval.Register.Type == regType)
  273. {
  274. int overlapPosition = interval.GetOverlapPosition(current);
  275. if (overlapPosition != LiveInterval.NotFound)
  276. {
  277. SetBlockedPosition(interval.Register.Index, overlapPosition);
  278. }
  279. }
  280. }
  281. int selectedReg = GetHighestValueIndex(usePositions);
  282. int currentFirstUse = current.FirstUse();
  283. Debug.Assert(currentFirstUse >= 0, "Current interval has no uses.");
  284. if (usePositions[selectedReg] < currentFirstUse)
  285. {
  286. // All intervals on inactive and active are being used before current,
  287. // so spill the current interval.
  288. Debug.Assert(currentFirstUse > current.GetStart(), "Trying to spill a interval currently being used.");
  289. LiveInterval splitChild = current.Split(currentFirstUse);
  290. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  291. InsertInterval(splitChild);
  292. Spill(context, current);
  293. }
  294. else if (blockedPositions[selectedReg] > current.GetEnd())
  295. {
  296. // Spill made the register available for the entire current lifetime,
  297. // so we only need to split the intervals using the selected register.
  298. current.Register = new Register(selectedReg, regType);
  299. SplitAndSpillOverlappingIntervals(context, current);
  300. context.Active.Set(cIndex);
  301. }
  302. else
  303. {
  304. // There are conflicts even after spill due to the use of fixed registers
  305. // that can't be spilled, so we need to also split current at the point of
  306. // the first fixed register use.
  307. current.Register = new Register(selectedReg, regType);
  308. int splitPosition = blockedPositions[selectedReg] & ~InstructionGapMask;
  309. Debug.Assert(splitPosition > current.GetStart(), "Trying to split a interval at a invalid position.");
  310. LiveInterval splitChild = current.Split(splitPosition);
  311. if (splitChild.UsesCount != 0)
  312. {
  313. Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
  314. InsertInterval(splitChild);
  315. }
  316. else
  317. {
  318. Spill(context, splitChild);
  319. }
  320. SplitAndSpillOverlappingIntervals(context, current);
  321. context.Active.Set(cIndex);
  322. }
  323. }
  324. private static int GetHighestValueIndex(int[] array)
  325. {
  326. int higuest = array[0];
  327. if (higuest == int.MaxValue)
  328. {
  329. return 0;
  330. }
  331. int selected = 0;
  332. for (int index = 1; index < array.Length; index++)
  333. {
  334. int current = array[index];
  335. if (higuest < current)
  336. {
  337. higuest = current;
  338. selected = index;
  339. if (current == int.MaxValue)
  340. {
  341. break;
  342. }
  343. }
  344. }
  345. return selected;
  346. }
  347. private void SplitAndSpillOverlappingIntervals(AllocationContext context, LiveInterval current)
  348. {
  349. foreach (int iIndex in context.Active)
  350. {
  351. LiveInterval interval = _intervals[iIndex];
  352. if (!interval.IsFixed && interval.Register == current.Register)
  353. {
  354. SplitAndSpillOverlappingInterval(context, current, interval);
  355. context.Active.Clear(iIndex);
  356. }
  357. }
  358. foreach (int iIndex in context.Inactive)
  359. {
  360. LiveInterval interval = _intervals[iIndex];
  361. if (!interval.IsFixed && interval.Register == current.Register && interval.Overlaps(current))
  362. {
  363. SplitAndSpillOverlappingInterval(context, current, interval);
  364. context.Inactive.Clear(iIndex);
  365. }
  366. }
  367. }
  368. private void SplitAndSpillOverlappingInterval(
  369. AllocationContext context,
  370. LiveInterval current,
  371. LiveInterval interval)
  372. {
  373. // If there's a next use after the start of the current interval,
  374. // we need to split the spilled interval twice, and re-insert it
  375. // on the "pending" list to ensure that it will get a new register
  376. // on that use position.
  377. int nextUse = interval.NextUseAfter(current.GetStart());
  378. LiveInterval splitChild;
  379. if (interval.GetStart() < current.GetStart())
  380. {
  381. splitChild = interval.Split(current.GetStart());
  382. }
  383. else
  384. {
  385. splitChild = interval;
  386. }
  387. if (nextUse != -1)
  388. {
  389. Debug.Assert(nextUse > current.GetStart(), "Trying to spill a interval currently being used.");
  390. if (nextUse > splitChild.GetStart())
  391. {
  392. LiveInterval right = splitChild.Split(nextUse);
  393. Spill(context, splitChild);
  394. splitChild = right;
  395. }
  396. InsertInterval(splitChild);
  397. }
  398. else
  399. {
  400. Spill(context, splitChild);
  401. }
  402. }
  403. private void InsertInterval(LiveInterval interval)
  404. {
  405. Debug.Assert(interval.UsesCount != 0, "Trying to insert a interval without uses.");
  406. Debug.Assert(!interval.IsEmpty, "Trying to insert a empty interval.");
  407. Debug.Assert(!interval.IsSpilled, "Trying to insert a spilled interval.");
  408. int startIndex = RegistersCount * 2;
  409. int insertIndex = _intervals.BinarySearch(startIndex, _intervals.Count - startIndex, interval, null);
  410. if (insertIndex < 0)
  411. {
  412. insertIndex = ~insertIndex;
  413. }
  414. _intervals.Insert(insertIndex, interval);
  415. }
  416. private void Spill(AllocationContext context, LiveInterval interval)
  417. {
  418. Debug.Assert(!interval.IsFixed, "Trying to spill a fixed interval.");
  419. Debug.Assert(interval.UsesCount == 0, "Trying to spill a interval with uses.");
  420. // We first check if any of the siblings were spilled, if so we can reuse
  421. // the stack offset. Otherwise, we allocate a new space on the stack.
  422. // This prevents stack-to-stack copies being necessary for a split interval.
  423. if (!interval.TrySpillWithSiblingOffset())
  424. {
  425. interval.Spill(context.StackAlloc.Allocate(interval.Local.Type));
  426. }
  427. }
  428. private void InsertSplitCopies()
  429. {
  430. Dictionary<int, CopyResolver> copyResolvers = new Dictionary<int, CopyResolver>();
  431. CopyResolver GetCopyResolver(int position)
  432. {
  433. CopyResolver copyResolver = new CopyResolver();
  434. if (copyResolvers.TryAdd(position, copyResolver))
  435. {
  436. return copyResolver;
  437. }
  438. return copyResolvers[position];
  439. }
  440. foreach (LiveInterval interval in _intervals.Where(x => x.IsSplit))
  441. {
  442. LiveInterval previous = interval;
  443. foreach (LiveInterval splitChild in interval.SplitChilds())
  444. {
  445. int splitPosition = splitChild.GetStart();
  446. if (!_blockEdges.Contains(splitPosition) && previous.GetEnd() == splitPosition)
  447. {
  448. GetCopyResolver(splitPosition).AddSplit(previous, splitChild);
  449. }
  450. previous = splitChild;
  451. }
  452. }
  453. foreach (KeyValuePair<int, CopyResolver> kv in copyResolvers)
  454. {
  455. CopyResolver copyResolver = kv.Value;
  456. if (!copyResolver.HasCopy)
  457. {
  458. continue;
  459. }
  460. int splitPosition = kv.Key;
  461. (IntrusiveList<Node> nodes, Node node) = GetOperationNode(splitPosition);
  462. Operation[] sequence = copyResolver.Sequence();
  463. nodes.AddBefore(node, sequence[0]);
  464. node = sequence[0];
  465. for (int index = 1; index < sequence.Length; index++)
  466. {
  467. nodes.AddAfter(node, sequence[index]);
  468. node = sequence[index];
  469. }
  470. }
  471. }
  472. private void InsertSplitCopiesAtEdges(ControlFlowGraph cfg)
  473. {
  474. int blocksCount = cfg.Blocks.Count;
  475. bool IsSplitEdgeBlock(BasicBlock block)
  476. {
  477. return block.Index >= blocksCount;
  478. }
  479. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  480. {
  481. if (IsSplitEdgeBlock(block))
  482. {
  483. continue;
  484. }
  485. bool hasSingleOrNoSuccessor = block.SuccessorCount <= 1;
  486. for (int i = 0; i < block.SuccessorCount; i++)
  487. {
  488. BasicBlock successor = block.GetSuccessor(i);
  489. int succIndex = successor.Index;
  490. // If the current node is a split node, then the actual successor node
  491. // (the successor before the split) should be right after it.
  492. if (IsSplitEdgeBlock(successor))
  493. {
  494. succIndex = successor.GetSuccessor(0).Index;
  495. }
  496. CopyResolver copyResolver = new CopyResolver();
  497. foreach (int iIndex in _blockLiveIn[succIndex])
  498. {
  499. LiveInterval interval = _parentIntervals[iIndex];
  500. if (!interval.IsSplit)
  501. {
  502. continue;
  503. }
  504. int lEnd = _blockRanges[block.Index].End - 1;
  505. int rStart = _blockRanges[succIndex].Start;
  506. LiveInterval left = interval.GetSplitChild(lEnd);
  507. LiveInterval right = interval.GetSplitChild(rStart);
  508. if (left != null && right != null && left != right)
  509. {
  510. copyResolver.AddSplit(left, right);
  511. }
  512. }
  513. if (!copyResolver.HasCopy)
  514. {
  515. continue;
  516. }
  517. Operation[] sequence = copyResolver.Sequence();
  518. if (hasSingleOrNoSuccessor)
  519. {
  520. foreach (Operation operation in sequence)
  521. {
  522. block.Append(operation);
  523. }
  524. }
  525. else if (successor.Predecessors.Count == 1)
  526. {
  527. successor.Operations.AddFirst(sequence[0]);
  528. Node prependNode = sequence[0];
  529. for (int index = 1; index < sequence.Length; index++)
  530. {
  531. Operation operation = sequence[index];
  532. successor.Operations.AddAfter(prependNode, operation);
  533. prependNode = operation;
  534. }
  535. }
  536. else
  537. {
  538. // Split the critical edge.
  539. BasicBlock splitBlock = cfg.SplitEdge(block, successor);
  540. foreach (Operation operation in sequence)
  541. {
  542. splitBlock.Append(operation);
  543. }
  544. }
  545. }
  546. }
  547. }
  548. private void ReplaceLocalWithRegister(LiveInterval current)
  549. {
  550. Operand register = GetRegister(current);
  551. IList<int> usePositions = current.UsePositions();
  552. for (int i = usePositions.Count - 1; i >= 0; i--)
  553. {
  554. int usePosition = -usePositions[i];
  555. (_, Node operation) = GetOperationNode(usePosition);
  556. for (int index = 0; index < operation.SourcesCount; index++)
  557. {
  558. Operand source = operation.GetSource(index);
  559. if (source == current.Local)
  560. {
  561. operation.SetSource(index, register);
  562. }
  563. else if (source.Kind == OperandKind.Memory)
  564. {
  565. MemoryOperand memOp = (MemoryOperand)source;
  566. if (memOp.BaseAddress == current.Local)
  567. {
  568. memOp.BaseAddress = register;
  569. }
  570. if (memOp.Index == current.Local)
  571. {
  572. memOp.Index = register;
  573. }
  574. }
  575. }
  576. for (int index = 0; index < operation.DestinationsCount; index++)
  577. {
  578. Operand dest = operation.GetDestination(index);
  579. if (dest == current.Local)
  580. {
  581. operation.SetDestination(index, register);
  582. }
  583. }
  584. }
  585. }
  586. private static Operand GetRegister(LiveInterval interval)
  587. {
  588. Debug.Assert(!interval.IsSpilled, "Spilled intervals are not allowed.");
  589. return OperandHelper.Register(
  590. interval.Register.Index,
  591. interval.Register.Type,
  592. interval.Local.Type);
  593. }
  594. private (IntrusiveList<Node>, Node) GetOperationNode(int position)
  595. {
  596. return _operationNodes[position / InstructionGap];
  597. }
  598. private void NumberLocals(ControlFlowGraph cfg)
  599. {
  600. _operationNodes = new List<(IntrusiveList<Node>, Node)>();
  601. _intervals = new List<LiveInterval>();
  602. for (int index = 0; index < RegistersCount; index++)
  603. {
  604. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Integer)));
  605. _intervals.Add(new LiveInterval(new Register(index, RegisterType.Vector)));
  606. }
  607. HashSet<Operand> visited = new HashSet<Operand>();
  608. _operationsCount = 0;
  609. for (int index = cfg.PostOrderBlocks.Length - 1; index >= 0; index--)
  610. {
  611. BasicBlock block = cfg.PostOrderBlocks[index];
  612. for (Node node = block.Operations.First; node != null; node = node.ListNext)
  613. {
  614. _operationNodes.Add((block.Operations, node));
  615. for (int i = 0; i < node.DestinationsCount; i++)
  616. {
  617. Operand dest = node.GetDestination(i);
  618. if (dest.Kind == OperandKind.LocalVariable && visited.Add(dest))
  619. {
  620. dest.NumberLocal(_intervals.Count);
  621. _intervals.Add(new LiveInterval(dest));
  622. }
  623. }
  624. }
  625. _operationsCount += block.Operations.Count * InstructionGap;
  626. if (block.Operations.Count == 0)
  627. {
  628. // Pretend we have a dummy instruction on the empty block.
  629. _operationNodes.Add((null, null));
  630. _operationsCount += InstructionGap;
  631. }
  632. }
  633. _parentIntervals = _intervals.ToArray();
  634. }
  635. private void BuildIntervals(ControlFlowGraph cfg, AllocationContext context)
  636. {
  637. _blockRanges = new LiveRange[cfg.Blocks.Count];
  638. int mapSize = _intervals.Count;
  639. BitMap[] blkLiveGen = new BitMap[cfg.Blocks.Count];
  640. BitMap[] blkLiveKill = new BitMap[cfg.Blocks.Count];
  641. // Compute local live sets.
  642. for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
  643. {
  644. BitMap liveGen = BitMapPool.Allocate(mapSize);
  645. BitMap liveKill = BitMapPool.Allocate(mapSize);
  646. for (Node node = block.Operations.First; node != null; node = node.ListNext)
  647. {
  648. Sources(node, (source) =>
  649. {
  650. int id = GetOperandId(source);
  651. if (!liveKill.IsSet(id))
  652. {
  653. liveGen.Set(id);
  654. }
  655. });
  656. for (int i = 0; i < node.DestinationsCount; i++)
  657. {
  658. Operand dest = node.GetDestination(i);
  659. liveKill.Set(GetOperandId(dest));
  660. }
  661. }
  662. blkLiveGen [block.Index] = liveGen;
  663. blkLiveKill[block.Index] = liveKill;
  664. }
  665. // Compute global live sets.
  666. BitMap[] blkLiveIn = new BitMap[cfg.Blocks.Count];
  667. BitMap[] blkLiveOut = new BitMap[cfg.Blocks.Count];
  668. for (int index = 0; index < cfg.Blocks.Count; index++)
  669. {
  670. blkLiveIn [index] = BitMapPool.Allocate(mapSize);
  671. blkLiveOut[index] = BitMapPool.Allocate(mapSize);
  672. }
  673. bool modified;
  674. do
  675. {
  676. modified = false;
  677. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  678. {
  679. BasicBlock block = cfg.PostOrderBlocks[index];
  680. BitMap liveOut = blkLiveOut[block.Index];
  681. for (int i = 0; i < block.SuccessorCount; i++)
  682. {
  683. BasicBlock succ = block.GetSuccessor(i);
  684. modified |= liveOut.Set(blkLiveIn[succ.Index]);
  685. }
  686. BitMap liveIn = blkLiveIn[block.Index];
  687. liveIn.Set (liveOut);
  688. liveIn.Clear(blkLiveKill[block.Index]);
  689. liveIn.Set (blkLiveGen [block.Index]);
  690. }
  691. }
  692. while (modified);
  693. _blockLiveIn = blkLiveIn;
  694. _blockEdges = new HashSet<int>();
  695. // Compute lifetime intervals.
  696. int operationPos = _operationsCount;
  697. for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
  698. {
  699. BasicBlock block = cfg.PostOrderBlocks[index];
  700. // We handle empty blocks by pretending they have a dummy instruction,
  701. // because otherwise the block would have the same start and end position,
  702. // and this is not valid.
  703. int instCount = Math.Max(block.Operations.Count, 1);
  704. int blockStart = operationPos - instCount * InstructionGap;
  705. int blockEnd = operationPos;
  706. _blockRanges[block.Index] = new LiveRange(blockStart, blockEnd);
  707. _blockEdges.Add(blockStart);
  708. BitMap liveOut = blkLiveOut[block.Index];
  709. foreach (int id in liveOut)
  710. {
  711. _intervals[id].AddRange(blockStart, blockEnd);
  712. }
  713. if (block.Operations.Count == 0)
  714. {
  715. operationPos -= InstructionGap;
  716. continue;
  717. }
  718. foreach (Node node in BottomOperations(block))
  719. {
  720. operationPos -= InstructionGap;
  721. for (int i = 0; i < node.DestinationsCount; i++)
  722. {
  723. Operand dest = node.GetDestination(i);
  724. LiveInterval interval = _intervals[GetOperandId(dest)];
  725. interval.SetStart(operationPos + 1);
  726. interval.AddUsePosition(operationPos + 1);
  727. }
  728. Sources(node, (source) =>
  729. {
  730. LiveInterval interval = _intervals[GetOperandId(source)];
  731. interval.AddRange(blockStart, operationPos + 1);
  732. interval.AddUsePosition(operationPos);
  733. });
  734. if (node is Operation operation && operation.Instruction == Instruction.Call)
  735. {
  736. AddIntervalCallerSavedReg(context.Masks.IntCallerSavedRegisters, operationPos, RegisterType.Integer);
  737. AddIntervalCallerSavedReg(context.Masks.VecCallerSavedRegisters, operationPos, RegisterType.Vector);
  738. }
  739. }
  740. }
  741. }
  742. private void AddIntervalCallerSavedReg(int mask, int operationPos, RegisterType regType)
  743. {
  744. while (mask != 0)
  745. {
  746. int regIndex = BitOperations.TrailingZeroCount(mask);
  747. Register callerSavedReg = new Register(regIndex, regType);
  748. LiveInterval interval = _intervals[GetRegisterId(callerSavedReg)];
  749. interval.AddRange(operationPos + 1, operationPos + InstructionGap);
  750. mask &= ~(1 << regIndex);
  751. }
  752. }
  753. private static int GetOperandId(Operand operand)
  754. {
  755. if (operand.Kind == OperandKind.LocalVariable)
  756. {
  757. return operand.AsInt32();
  758. }
  759. else if (operand.Kind == OperandKind.Register)
  760. {
  761. return GetRegisterId(operand.GetRegister());
  762. }
  763. else
  764. {
  765. throw new ArgumentException($"Invalid operand kind \"{operand.Kind}\".");
  766. }
  767. }
  768. private static int GetRegisterId(Register register)
  769. {
  770. return (register.Index << 1) | (register.Type == RegisterType.Vector ? 1 : 0);
  771. }
  772. private static IEnumerable<Node> BottomOperations(BasicBlock block)
  773. {
  774. Node node = block.Operations.Last;
  775. while (node != null && !(node is PhiNode))
  776. {
  777. yield return node;
  778. node = node.ListPrevious;
  779. }
  780. }
  781. private static void Sources(Node node, Action<Operand> action)
  782. {
  783. for (int index = 0; index < node.SourcesCount; index++)
  784. {
  785. Operand source = node.GetSource(index);
  786. if (IsLocalOrRegister(source.Kind))
  787. {
  788. action(source);
  789. }
  790. else if (source.Kind == OperandKind.Memory)
  791. {
  792. MemoryOperand memOp = (MemoryOperand)source;
  793. if (memOp.BaseAddress != null)
  794. {
  795. action(memOp.BaseAddress);
  796. }
  797. if (memOp.Index != null)
  798. {
  799. action(memOp.Index);
  800. }
  801. }
  802. }
  803. }
  804. private static bool IsLocalOrRegister(OperandKind kind)
  805. {
  806. return kind == OperandKind.LocalVariable ||
  807. kind == OperandKind.Register;
  808. }
  809. }
  810. }