1 //===-- RegAllocPBQP.h ------------------------------------------*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines the PBQPBuilder interface, for classes which build PBQP
11 // instances to represent register allocation problems, and the RegAllocPBQP
14 //===----------------------------------------------------------------------===//
16 #ifndef LLVM_CODEGEN_REGALLOCPBQP_H
17 #define LLVM_CODEGEN_REGALLOCPBQP_H
19 #include "llvm/CodeGen/MachineFunctionPass.h"
20 #include "llvm/CodeGen/PBQPRAConstraint.h"
21 #include "llvm/CodeGen/PBQP/CostAllocator.h"
22 #include "llvm/CodeGen/PBQP/ReductionRules.h"
23 #include "llvm/Support/ErrorHandling.h"
29 /// @brief Spill option index.
30 inline unsigned getSpillOptionIdx() { return 0; }
32 /// \brief Metadata to speed allocatability test.
34 /// Keeps track of the number of infinities in each row and column.
35 class MatrixMetadata {
37 MatrixMetadata(const MatrixMetadata&);
38 void operator=(const MatrixMetadata&);
40 MatrixMetadata(const Matrix& M)
41 : WorstRow(0), WorstCol(0),
42 UnsafeRows(new bool[M.getRows() - 1]()),
43 UnsafeCols(new bool[M.getCols() - 1]()) {
45 unsigned* ColCounts = new unsigned[M.getCols() - 1]();
47 for (unsigned i = 1; i < M.getRows(); ++i) {
48 unsigned RowCount = 0;
49 for (unsigned j = 1; j < M.getCols(); ++j) {
50 if (M[i][j] == std::numeric_limits<PBQPNum>::infinity()) {
53 UnsafeRows[i - 1] = true;
54 UnsafeCols[j - 1] = true;
57 WorstRow = std::max(WorstRow, RowCount);
59 unsigned WorstColCountForCurRow =
60 *std::max_element(ColCounts, ColCounts + M.getCols() - 1);
61 WorstCol = std::max(WorstCol, WorstColCountForCurRow);
65 unsigned getWorstRow() const { return WorstRow; }
66 unsigned getWorstCol() const { return WorstCol; }
67 const bool* getUnsafeRows() const { return UnsafeRows.get(); }
68 const bool* getUnsafeCols() const { return UnsafeCols.get(); }
71 unsigned WorstRow, WorstCol;
72 std::unique_ptr<bool[]> UnsafeRows;
73 std::unique_ptr<bool[]> UnsafeCols;
78 typedef std::vector<unsigned> OptionToRegMap;
80 typedef enum { Unprocessed,
82 ConservativelyAllocatable,
83 NotProvablyAllocatable } ReductionState;
86 : RS(Unprocessed), NumOpts(0), DeniedOpts(0), OptUnsafeEdges(nullptr),
89 // FIXME: Re-implementing default behavior to work around MSVC. Remove once
90 // MSVC synthesizes move constructors properly.
91 NodeMetadata(const NodeMetadata &Other)
92 : RS(Other.RS), NumOpts(Other.NumOpts), DeniedOpts(Other.DeniedOpts),
93 OptUnsafeEdges(new unsigned[NumOpts]), VReg(Other.VReg),
94 OptionRegs(Other.OptionRegs) {
95 std::copy(&Other.OptUnsafeEdges[0], &Other.OptUnsafeEdges[NumOpts],
99 // FIXME: Re-implementing default behavior to work around MSVC. Remove once
100 // MSVC synthesizes move constructors properly.
101 NodeMetadata(NodeMetadata &&Other)
102 : RS(Other.RS), NumOpts(Other.NumOpts), DeniedOpts(Other.DeniedOpts),
103 OptUnsafeEdges(std::move(Other.OptUnsafeEdges)), VReg(Other.VReg),
104 OptionRegs(std::move(Other.OptionRegs)) {}
106 // FIXME: Re-implementing default behavior to work around MSVC. Remove once
107 // MSVC synthesizes move constructors properly.
108 NodeMetadata& operator=(const NodeMetadata &Other) {
110 NumOpts = Other.NumOpts;
111 DeniedOpts = Other.DeniedOpts;
112 OptUnsafeEdges.reset(new unsigned[NumOpts]);
113 std::copy(Other.OptUnsafeEdges.get(), Other.OptUnsafeEdges.get() + NumOpts,
114 OptUnsafeEdges.get());
116 OptionRegs = Other.OptionRegs;
120 // FIXME: Re-implementing default behavior to work around MSVC. Remove once
121 // MSVC synthesizes move constructors properly.
122 NodeMetadata& operator=(NodeMetadata &&Other) {
124 NumOpts = Other.NumOpts;
125 DeniedOpts = Other.DeniedOpts;
126 OptUnsafeEdges = std::move(Other.OptUnsafeEdges);
128 OptionRegs = std::move(Other.OptionRegs);
132 void setVReg(unsigned VReg) { this->VReg = VReg; }
133 unsigned getVReg() const { return VReg; }
135 void setOptionRegs(OptionToRegMap OptionRegs) {
136 this->OptionRegs = std::move(OptionRegs);
138 const OptionToRegMap& getOptionRegs() const { return OptionRegs; }
140 void setup(const Vector& Costs) {
141 NumOpts = Costs.getLength() - 1;
142 OptUnsafeEdges = std::unique_ptr<unsigned[]>(new unsigned[NumOpts]());
145 ReductionState getReductionState() const { return RS; }
146 void setReductionState(ReductionState RS) { this->RS = RS; }
148 void handleAddEdge(const MatrixMetadata& MD, bool Transpose) {
149 DeniedOpts += Transpose ? MD.getWorstCol() : MD.getWorstRow();
150 const bool* UnsafeOpts =
151 Transpose ? MD.getUnsafeCols() : MD.getUnsafeRows();
152 for (unsigned i = 0; i < NumOpts; ++i)
153 OptUnsafeEdges[i] += UnsafeOpts[i];
156 void handleRemoveEdge(const MatrixMetadata& MD, bool Transpose) {
157 DeniedOpts -= Transpose ? MD.getWorstCol() : MD.getWorstRow();
158 const bool* UnsafeOpts =
159 Transpose ? MD.getUnsafeCols() : MD.getUnsafeRows();
160 for (unsigned i = 0; i < NumOpts; ++i)
161 OptUnsafeEdges[i] -= UnsafeOpts[i];
164 bool isConservativelyAllocatable() const {
165 return (DeniedOpts < NumOpts) ||
166 (std::find(&OptUnsafeEdges[0], &OptUnsafeEdges[NumOpts], 0) !=
167 &OptUnsafeEdges[NumOpts]);
174 std::unique_ptr<unsigned[]> OptUnsafeEdges;
176 OptionToRegMap OptionRegs;
179 class RegAllocSolverImpl {
181 typedef MDMatrix<MatrixMetadata> RAMatrix;
183 typedef PBQP::Vector RawVector;
184 typedef PBQP::Matrix RawMatrix;
185 typedef PBQP::Vector Vector;
186 typedef RAMatrix Matrix;
187 typedef PBQP::PoolCostAllocator<Vector, Matrix> CostAllocator;
189 typedef GraphBase::NodeId NodeId;
190 typedef GraphBase::EdgeId EdgeId;
192 typedef RegAlloc::NodeMetadata NodeMetadata;
194 struct EdgeMetadata { };
196 class GraphMetadata {
198 GraphMetadata(MachineFunction &MF,
200 MachineBlockFrequencyInfo &MBFI)
201 : MF(MF), LIS(LIS), MBFI(MBFI) {}
205 MachineBlockFrequencyInfo &MBFI;
207 void setNodeIdForVReg(unsigned VReg, GraphBase::NodeId NId) {
208 VRegToNodeId[VReg] = NId;
211 GraphBase::NodeId getNodeIdForVReg(unsigned VReg) const {
212 auto VRegItr = VRegToNodeId.find(VReg);
213 if (VRegItr == VRegToNodeId.end())
214 return GraphBase::invalidNodeId();
215 return VRegItr->second;
218 void eraseNodeIdForVReg(unsigned VReg) {
219 VRegToNodeId.erase(VReg);
223 DenseMap<unsigned, NodeId> VRegToNodeId;
226 typedef PBQP::Graph<RegAllocSolverImpl> Graph;
228 RegAllocSolverImpl(Graph &G) : G(G) {}
234 S = backpropagate(G, reduce());
239 void handleAddNode(NodeId NId) {
240 G.getNodeMetadata(NId).setup(G.getNodeCosts(NId));
242 void handleRemoveNode(NodeId NId) {}
243 void handleSetNodeCosts(NodeId NId, const Vector& newCosts) {}
245 void handleAddEdge(EdgeId EId) {
246 handleReconnectEdge(EId, G.getEdgeNode1Id(EId));
247 handleReconnectEdge(EId, G.getEdgeNode2Id(EId));
250 void handleRemoveEdge(EdgeId EId) {
251 handleDisconnectEdge(EId, G.getEdgeNode1Id(EId));
252 handleDisconnectEdge(EId, G.getEdgeNode2Id(EId));
255 void handleDisconnectEdge(EdgeId EId, NodeId NId) {
256 NodeMetadata& NMd = G.getNodeMetadata(NId);
257 const MatrixMetadata& MMd = G.getEdgeCosts(EId).getMetadata();
258 NMd.handleRemoveEdge(MMd, NId == G.getEdgeNode2Id(EId));
259 if (G.getNodeDegree(NId) == 3) {
260 // This node is becoming optimally reducible.
261 moveToOptimallyReducibleNodes(NId);
262 } else if (NMd.getReductionState() ==
263 NodeMetadata::NotProvablyAllocatable &&
264 NMd.isConservativelyAllocatable()) {
265 // This node just became conservatively allocatable.
266 moveToConservativelyAllocatableNodes(NId);
270 void handleReconnectEdge(EdgeId EId, NodeId NId) {
271 NodeMetadata& NMd = G.getNodeMetadata(NId);
272 const MatrixMetadata& MMd = G.getEdgeCosts(EId).getMetadata();
273 NMd.handleAddEdge(MMd, NId == G.getEdgeNode2Id(EId));
276 void handleSetEdgeCosts(EdgeId EId, const Matrix& NewCosts) {
277 handleRemoveEdge(EId);
279 NodeId N1Id = G.getEdgeNode1Id(EId);
280 NodeId N2Id = G.getEdgeNode2Id(EId);
281 NodeMetadata& N1Md = G.getNodeMetadata(N1Id);
282 NodeMetadata& N2Md = G.getNodeMetadata(N2Id);
283 const MatrixMetadata& MMd = NewCosts.getMetadata();
284 N1Md.handleAddEdge(MMd, N1Id != G.getEdgeNode1Id(EId));
285 N2Md.handleAddEdge(MMd, N2Id != G.getEdgeNode1Id(EId));
290 void removeFromCurrentSet(NodeId NId) {
291 switch (G.getNodeMetadata(NId).getReductionState()) {
292 case NodeMetadata::Unprocessed: break;
293 case NodeMetadata::OptimallyReducible:
294 assert(OptimallyReducibleNodes.find(NId) !=
295 OptimallyReducibleNodes.end() &&
296 "Node not in optimally reducible set.");
297 OptimallyReducibleNodes.erase(NId);
299 case NodeMetadata::ConservativelyAllocatable:
300 assert(ConservativelyAllocatableNodes.find(NId) !=
301 ConservativelyAllocatableNodes.end() &&
302 "Node not in conservatively allocatable set.");
303 ConservativelyAllocatableNodes.erase(NId);
305 case NodeMetadata::NotProvablyAllocatable:
306 assert(NotProvablyAllocatableNodes.find(NId) !=
307 NotProvablyAllocatableNodes.end() &&
308 "Node not in not-provably-allocatable set.");
309 NotProvablyAllocatableNodes.erase(NId);
314 void moveToOptimallyReducibleNodes(NodeId NId) {
315 removeFromCurrentSet(NId);
316 OptimallyReducibleNodes.insert(NId);
317 G.getNodeMetadata(NId).setReductionState(
318 NodeMetadata::OptimallyReducible);
321 void moveToConservativelyAllocatableNodes(NodeId NId) {
322 removeFromCurrentSet(NId);
323 ConservativelyAllocatableNodes.insert(NId);
324 G.getNodeMetadata(NId).setReductionState(
325 NodeMetadata::ConservativelyAllocatable);
328 void moveToNotProvablyAllocatableNodes(NodeId NId) {
329 removeFromCurrentSet(NId);
330 NotProvablyAllocatableNodes.insert(NId);
331 G.getNodeMetadata(NId).setReductionState(
332 NodeMetadata::NotProvablyAllocatable);
337 for (auto NId : G.nodeIds()) {
338 if (G.getNodeDegree(NId) < 3)
339 moveToOptimallyReducibleNodes(NId);
340 else if (G.getNodeMetadata(NId).isConservativelyAllocatable())
341 moveToConservativelyAllocatableNodes(NId);
343 moveToNotProvablyAllocatableNodes(NId);
347 // Compute a reduction order for the graph by iteratively applying PBQP
348 // reduction rules. Locally optimal rules are applied whenever possible (R0,
349 // R1, R2). If no locally-optimal rules apply then any conservatively
350 // allocatable node is reduced. Finally, if no conservatively allocatable
351 // node exists then the node with the lowest spill-cost:degree ratio is
353 std::vector<GraphBase::NodeId> reduce() {
354 assert(!G.empty() && "Cannot reduce empty graph.");
356 typedef GraphBase::NodeId NodeId;
357 std::vector<NodeId> NodeStack;
359 // Consume worklists.
361 if (!OptimallyReducibleNodes.empty()) {
362 NodeSet::iterator NItr = OptimallyReducibleNodes.begin();
364 OptimallyReducibleNodes.erase(NItr);
365 NodeStack.push_back(NId);
366 switch (G.getNodeDegree(NId)) {
375 default: llvm_unreachable("Not an optimally reducible node.");
377 } else if (!ConservativelyAllocatableNodes.empty()) {
378 // Conservatively allocatable nodes will never spill. For now just
379 // take the first node in the set and push it on the stack. When we
380 // start optimizing more heavily for register preferencing, it may
381 // would be better to push nodes with lower 'expected' or worst-case
382 // register costs first (since early nodes are the most
384 NodeSet::iterator NItr = ConservativelyAllocatableNodes.begin();
386 ConservativelyAllocatableNodes.erase(NItr);
387 NodeStack.push_back(NId);
388 G.disconnectAllNeighborsFromNode(NId);
390 } else if (!NotProvablyAllocatableNodes.empty()) {
391 NodeSet::iterator NItr =
392 std::min_element(NotProvablyAllocatableNodes.begin(),
393 NotProvablyAllocatableNodes.end(),
394 SpillCostComparator(G));
396 NotProvablyAllocatableNodes.erase(NItr);
397 NodeStack.push_back(NId);
398 G.disconnectAllNeighborsFromNode(NId);
406 class SpillCostComparator {
408 SpillCostComparator(const Graph& G) : G(G) {}
409 bool operator()(NodeId N1Id, NodeId N2Id) {
410 PBQPNum N1SC = G.getNodeCosts(N1Id)[0] / G.getNodeDegree(N1Id);
411 PBQPNum N2SC = G.getNodeCosts(N2Id)[0] / G.getNodeDegree(N2Id);
419 typedef std::set<NodeId> NodeSet;
420 NodeSet OptimallyReducibleNodes;
421 NodeSet ConservativelyAllocatableNodes;
422 NodeSet NotProvablyAllocatableNodes;
425 class PBQPRAGraph : public PBQP::Graph<RegAllocSolverImpl> {
427 typedef PBQP::Graph<RegAllocSolverImpl> BaseT;
429 PBQPRAGraph(GraphMetadata Metadata) : BaseT(Metadata) {}
432 inline Solution solve(PBQPRAGraph& G) {
435 RegAllocSolverImpl RegAllocSolver(G);
436 return RegAllocSolver.solve();
439 } // namespace RegAlloc
442 /// @brief Create a PBQP register allocator instance.
444 createPBQPRegisterAllocator(char *customPassID = nullptr);
448 #endif /* LLVM_CODEGEN_REGALLOCPBQP_H */