1 //===-- ARMSubtarget.cpp - ARM Subtarget Information ------------*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements the ARM specific subclass of TargetSubtarget.
12 //===----------------------------------------------------------------------===//
14 #include "ARMSubtarget.h"
15 #include "ARMGenSubtarget.inc"
16 #include "ARMBaseRegisterInfo.h"
17 #include "llvm/GlobalValue.h"
18 #include "llvm/Target/TargetOptions.h"
19 #include "llvm/Support/CommandLine.h"
20 #include "llvm/ADT/SmallVector.h"
24 ReserveR9("arm-reserve-r9", cl::Hidden,
25 cl::desc("Reserve R9, making it unavailable as GPR"));
28 DarwinUseMOVT("arm-darwin-use-movt", cl::init(true), cl::Hidden);
31 StrictAlign("arm-strict-align", cl::Hidden,
32 cl::desc("Disallow all unaligned memory accesses"));
34 ARMSubtarget::ARMSubtarget(const std::string &TT, const std::string &FS,
37 , ARMProcFamily(Others)
39 , UseNEONForSinglePrecisionFP(false)
45 , PostRAScheduler(false)
46 , IsR9Reserved(ReserveR9)
50 , HasHardwareDivide(false)
51 , HasT2ExtractPack(false)
52 , HasDataBarrier(false)
53 , Pref32BitThumb(false)
54 , HasMPExtension(false)
56 , AllowsUnalignedMem(false)
58 , CPUString("generic")
60 , TargetABI(ARM_ABI_APCS) {
61 // Default to soft float ABI
62 if (FloatABIType == FloatABI::Default)
63 FloatABIType = FloatABI::Soft;
65 // Determine default and user specified characteristics
67 // When no arch is specified either by CPU or by attributes, make the default
69 const char *ARMArchFeature = "";
70 if (CPUString == "generic" && (FS.empty() || FS == "generic")) {
72 ARMArchFeature = ",+v4t";
75 // Set the boolean corresponding to the current target triple, or the default
76 // if one cannot be determined, to true.
77 unsigned Len = TT.length();
80 if (Len >= 5 && TT.substr(0, 4) == "armv")
82 else if (Len >= 6 && TT.substr(0, 5) == "thumb") {
84 if (Len >= 7 && TT[5] == 'v')
88 unsigned SubVer = TT[Idx];
89 if (SubVer >= '7' && SubVer <= '9') {
91 ARMArchFeature = ",+v7a";
92 if (Len >= Idx+2 && TT[Idx+1] == 'm') {
94 ARMArchFeature = ",+v7m";
96 } else if (SubVer == '6') {
98 ARMArchFeature = ",+v6";
99 if (Len >= Idx+3 && TT[Idx+1] == 't' && TT[Idx+2] == '2') {
100 ARMArchVersion = V6T2;
101 ARMArchFeature = ",+v6t2";
103 } else if (SubVer == '5') {
104 ARMArchVersion = V5T;
105 ARMArchFeature = ",+v5t";
106 if (Len >= Idx+3 && TT[Idx+1] == 't' && TT[Idx+2] == 'e') {
107 ARMArchVersion = V5TE;
108 ARMArchFeature = ",+v5te";
110 } else if (SubVer == '4') {
111 if (Len >= Idx+2 && TT[Idx+1] == 't') {
112 ARMArchVersion = V4T;
113 ARMArchFeature = ",+v4t";
121 if (TT.find("eabi") != std::string::npos)
122 TargetABI = ARM_ABI_AAPCS;
124 // Parse features string. If the first entry in FS (the CPU) is missing,
125 // insert the architecture feature derived from the target triple. This is
126 // important for setting features that are implied based on the architecture
128 std::string FSWithArch;
130 FSWithArch = std::string(ARMArchFeature);
131 else if (FS.find(',') == 0)
132 FSWithArch = std::string(ARMArchFeature) + FS;
135 CPUString = ParseSubtargetFeatures(FSWithArch, CPUString);
137 // After parsing Itineraries, set ItinData.IssueWidth.
140 // Thumb2 implies at least V6T2.
141 if (ARMArchVersion >= V6T2)
143 else if (ThumbMode >= Thumb2)
144 ARMArchVersion = V6T2;
149 if (!isTargetDarwin())
150 UseMovt = hasV6T2Ops();
152 IsR9Reserved = ReserveR9 | (ARMArchVersion < V6);
153 UseMovt = DarwinUseMOVT && hasV6T2Ops();
156 if (!isThumb() || hasThumb2())
157 PostRAScheduler = true;
159 // v6+ may or may not support unaligned mem access depending on the system
161 if (!StrictAlign && hasV6Ops() && isTargetDarwin())
162 AllowsUnalignedMem = true;
165 /// GVIsIndirectSymbol - true if the GV will be accessed via an indirect symbol.
167 ARMSubtarget::GVIsIndirectSymbol(const GlobalValue *GV,
168 Reloc::Model RelocM) const {
169 if (RelocM == Reloc::Static)
172 // Materializable GVs (in JIT lazy compilation mode) do not require an extra
174 bool isDecl = GV->isDeclaration() && !GV->isMaterializable();
176 if (!isTargetDarwin()) {
177 // Extra load is needed for all externally visible.
178 if (GV->hasLocalLinkage() || GV->hasHiddenVisibility())
182 if (RelocM == Reloc::PIC_) {
183 // If this is a strong reference to a definition, it is definitely not
185 if (!isDecl && !GV->isWeakForLinker())
188 // Unless we have a symbol with hidden visibility, we have to go through a
189 // normal $non_lazy_ptr stub because this symbol might be resolved late.
190 if (!GV->hasHiddenVisibility()) // Non-hidden $non_lazy_ptr reference.
193 // If symbol visibility is hidden, we have a stub for common symbol
194 // references and external declarations.
195 if (isDecl || GV->hasCommonLinkage())
196 // Hidden $non_lazy_ptr reference.
201 // If this is a strong reference to a definition, it is definitely not
203 if (!isDecl && !GV->isWeakForLinker())
206 // Unless we have a symbol with hidden visibility, we have to go through a
207 // normal $non_lazy_ptr stub because this symbol might be resolved late.
208 if (!GV->hasHiddenVisibility()) // Non-hidden $non_lazy_ptr reference.
216 unsigned ARMSubtarget::getMispredictionPenalty() const {
217 // If we have a reasonable estimate of the pipeline depth, then we can
218 // estimate the penalty of a misprediction based on that.
221 else if (isCortexA9())
224 // Otherwise, just return a sensible default.
228 void ARMSubtarget::computeIssueWidth() {
229 unsigned allStage1Units = 0;
230 for (const InstrItinerary *itin = InstrItins.Itineraries;
231 itin->FirstStage != ~0U; ++itin) {
232 const InstrStage *IS = InstrItins.Stages + itin->FirstStage;
233 allStage1Units |= IS->getUnits();
235 InstrItins.IssueWidth = 0;
236 while (allStage1Units) {
237 ++InstrItins.IssueWidth;
238 // clear the lowest bit
239 allStage1Units ^= allStage1Units & ~(allStage1Units - 1);
241 assert(InstrItins.IssueWidth <= 2 && "itinerary bug, too many stage 1 units");
244 bool ARMSubtarget::enablePostRAScheduler(
245 CodeGenOpt::Level OptLevel,
246 TargetSubtarget::AntiDepBreakMode& Mode,
247 RegClassVector& CriticalPathRCs) const {
248 Mode = TargetSubtarget::ANTIDEP_CRITICAL;
249 CriticalPathRCs.clear();
250 CriticalPathRCs.push_back(&ARM::GPRRegClass);
251 return PostRAScheduler && OptLevel >= CodeGenOpt::Default;