1 //===-- JITEmitter.cpp - Write machine code to executable memory ----------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines a MachineCodeEmitter object that is used by the JIT to
11 // write machine code to memory and remember where relocatable values are.
13 //===----------------------------------------------------------------------===//
16 #include "llvm/ADT/DenseMap.h"
17 #include "llvm/ADT/SmallPtrSet.h"
18 #include "llvm/ADT/SmallVector.h"
19 #include "llvm/ADT/Statistic.h"
20 #include "llvm/CodeGen/JITCodeEmitter.h"
21 #include "llvm/CodeGen/MachineCodeInfo.h"
22 #include "llvm/CodeGen/MachineConstantPool.h"
23 #include "llvm/CodeGen/MachineFunction.h"
24 #include "llvm/CodeGen/MachineJumpTableInfo.h"
25 #include "llvm/CodeGen/MachineModuleInfo.h"
26 #include "llvm/CodeGen/MachineRelocation.h"
27 #include "llvm/ExecutionEngine/GenericValue.h"
28 #include "llvm/ExecutionEngine/JITEventListener.h"
29 #include "llvm/ExecutionEngine/JITMemoryManager.h"
30 #include "llvm/IR/Constants.h"
31 #include "llvm/IR/DataLayout.h"
32 #include "llvm/IR/DebugInfo.h"
33 #include "llvm/IR/DerivedTypes.h"
34 #include "llvm/IR/Module.h"
35 #include "llvm/IR/ValueHandle.h"
36 #include "llvm/IR/ValueMap.h"
37 #include "llvm/Support/Debug.h"
38 #include "llvm/Support/Disassembler.h"
39 #include "llvm/Support/ErrorHandling.h"
40 #include "llvm/Support/ManagedStatic.h"
41 #include "llvm/Support/Memory.h"
42 #include "llvm/Support/MutexGuard.h"
43 #include "llvm/Support/raw_ostream.h"
44 #include "llvm/Target/TargetInstrInfo.h"
45 #include "llvm/Target/TargetJITInfo.h"
46 #include "llvm/Target/TargetMachine.h"
47 #include "llvm/Target/TargetOptions.h"
54 #define DEBUG_TYPE "jit"
56 STATISTIC(NumBytes, "Number of bytes of machine code compiled");
57 STATISTIC(NumRelos, "Number of relocations applied");
58 STATISTIC(NumRetries, "Number of retries with more memory");
61 // A declaration may stop being a declaration once it's fully read from bitcode.
62 // This function returns true if F is fully read and is still a declaration.
63 static bool isNonGhostDeclaration(const Function *F) {
64 return F->isDeclaration() && !F->isMaterializable();
67 //===----------------------------------------------------------------------===//
68 // JIT lazy compilation code.
72 class JITResolverState;
74 template<typename ValueTy>
75 struct NoRAUWValueMapConfig : public ValueMapConfig<ValueTy> {
76 typedef JITResolverState *ExtraData;
77 static void onRAUW(JITResolverState *, Value *Old, Value *New) {
78 llvm_unreachable("The JIT doesn't know how to handle a"
79 " RAUW on a value it has emitted.");
83 struct CallSiteValueMapConfig : public NoRAUWValueMapConfig<Function*> {
84 typedef JITResolverState *ExtraData;
85 static void onDelete(JITResolverState *JRS, Function *F);
88 class JITResolverState {
90 typedef ValueMap<Function*, void*, NoRAUWValueMapConfig<Function*> >
91 FunctionToLazyStubMapTy;
92 typedef std::map<void*, AssertingVH<Function> > CallSiteToFunctionMapTy;
93 typedef ValueMap<Function *, SmallPtrSet<void*, 1>,
94 CallSiteValueMapConfig> FunctionToCallSitesMapTy;
95 typedef std::map<AssertingVH<GlobalValue>, void*> GlobalToIndirectSymMapTy;
97 /// FunctionToLazyStubMap - Keep track of the lazy stub created for a
98 /// particular function so that we can reuse them if necessary.
99 FunctionToLazyStubMapTy FunctionToLazyStubMap;
101 /// CallSiteToFunctionMap - Keep track of the function that each lazy call
102 /// site corresponds to, and vice versa.
103 CallSiteToFunctionMapTy CallSiteToFunctionMap;
104 FunctionToCallSitesMapTy FunctionToCallSitesMap;
106 /// GlobalToIndirectSymMap - Keep track of the indirect symbol created for a
107 /// particular GlobalVariable so that we can reuse them if necessary.
108 GlobalToIndirectSymMapTy GlobalToIndirectSymMap;
111 /// Instance of the JIT this ResolverState serves.
116 JITResolverState(JIT *jit) : FunctionToLazyStubMap(this),
117 FunctionToCallSitesMap(this) {
123 FunctionToLazyStubMapTy& getFunctionToLazyStubMap(
124 const MutexGuard& locked) {
125 assert(locked.holds(TheJIT->lock));
126 return FunctionToLazyStubMap;
129 GlobalToIndirectSymMapTy& getGlobalToIndirectSymMap(const MutexGuard& lck) {
130 assert(lck.holds(TheJIT->lock));
131 return GlobalToIndirectSymMap;
134 std::pair<void *, Function *> LookupFunctionFromCallSite(
135 const MutexGuard &locked, void *CallSite) const {
136 assert(locked.holds(TheJIT->lock));
138 // The address given to us for the stub may not be exactly right, it
139 // might be a little bit after the stub. As such, use upper_bound to
141 CallSiteToFunctionMapTy::const_iterator I =
142 CallSiteToFunctionMap.upper_bound(CallSite);
143 assert(I != CallSiteToFunctionMap.begin() &&
144 "This is not a known call site!");
149 void AddCallSite(const MutexGuard &locked, void *CallSite, Function *F) {
150 assert(locked.holds(TheJIT->lock));
152 bool Inserted = CallSiteToFunctionMap.insert(
153 std::make_pair(CallSite, F)).second;
155 assert(Inserted && "Pair was already in CallSiteToFunctionMap");
156 FunctionToCallSitesMap[F].insert(CallSite);
159 void EraseAllCallSitesForPrelocked(Function *F);
161 // Erases _all_ call sites regardless of their function. This is used to
162 // unregister the stub addresses from the StubToResolverMap in
164 void EraseAllCallSitesPrelocked();
167 /// JITResolver - Keep track of, and resolve, call sites for functions that
168 /// have not yet been compiled.
170 typedef JITResolverState::FunctionToLazyStubMapTy FunctionToLazyStubMapTy;
171 typedef JITResolverState::CallSiteToFunctionMapTy CallSiteToFunctionMapTy;
172 typedef JITResolverState::GlobalToIndirectSymMapTy GlobalToIndirectSymMapTy;
174 /// LazyResolverFn - The target lazy resolver function that we actually
175 /// rewrite instructions to use.
176 TargetJITInfo::LazyResolverFn LazyResolverFn;
178 JITResolverState state;
180 /// ExternalFnToStubMap - This is the equivalent of FunctionToLazyStubMap
181 /// for external functions. TODO: Of course, external functions don't need
182 /// a lazy stub. It's actually here to make it more likely that far calls
183 /// succeed, but no single stub can guarantee that. I'll remove this in a
184 /// subsequent checkin when I actually fix far calls.
185 std::map<void*, void*> ExternalFnToStubMap;
187 /// revGOTMap - map addresses to indexes in the GOT
188 std::map<void*, unsigned> revGOTMap;
189 unsigned nextGOTIndex;
193 /// Instance of JIT corresponding to this Resolver.
197 explicit JITResolver(JIT &jit, JITEmitter &je)
198 : state(&jit), nextGOTIndex(0), JE(je), TheJIT(&jit) {
199 LazyResolverFn = jit.getJITInfo().getLazyResolverFunction(JITCompilerFn);
204 /// getLazyFunctionStubIfAvailable - This returns a pointer to a function's
205 /// lazy-compilation stub if it has already been created.
206 void *getLazyFunctionStubIfAvailable(Function *F);
208 /// getLazyFunctionStub - This returns a pointer to a function's
209 /// lazy-compilation stub, creating one on demand as needed.
210 void *getLazyFunctionStub(Function *F);
212 /// getExternalFunctionStub - Return a stub for the function at the
213 /// specified address, created lazily on demand.
214 void *getExternalFunctionStub(void *FnAddr);
216 /// getGlobalValueIndirectSym - Return an indirect symbol containing the
217 /// specified GV address.
218 void *getGlobalValueIndirectSym(GlobalValue *V, void *GVAddress);
220 /// getGOTIndexForAddress - Return a new or existing index in the GOT for
221 /// an address. This function only manages slots, it does not manage the
222 /// contents of the slots or the memory associated with the GOT.
223 unsigned getGOTIndexForAddr(void *addr);
225 /// JITCompilerFn - This function is called to resolve a stub to a compiled
226 /// address. If the LLVM Function corresponding to the stub has not yet
227 /// been compiled, this function compiles it first.
228 static void *JITCompilerFn(void *Stub);
231 class StubToResolverMapTy {
232 /// Map a stub address to a specific instance of a JITResolver so that
233 /// lazily-compiled functions can find the right resolver to use.
236 std::map<void*, JITResolver*> Map;
238 /// Guards Map from concurrent accesses.
239 mutable sys::Mutex Lock;
242 /// Registers a Stub to be resolved by Resolver.
243 void RegisterStubResolver(void *Stub, JITResolver *Resolver) {
244 MutexGuard guard(Lock);
245 Map.insert(std::make_pair(Stub, Resolver));
247 /// Unregisters the Stub when it's invalidated.
248 void UnregisterStubResolver(void *Stub) {
249 MutexGuard guard(Lock);
252 /// Returns the JITResolver instance that owns the Stub.
253 JITResolver *getResolverFromStub(void *Stub) const {
254 MutexGuard guard(Lock);
255 // The address given to us for the stub may not be exactly right, it might
256 // be a little bit after the stub. As such, use upper_bound to find it.
257 // This is the same trick as in LookupFunctionFromCallSite from
259 std::map<void*, JITResolver*>::const_iterator I = Map.upper_bound(Stub);
260 assert(I != Map.begin() && "This is not a known stub!");
264 /// True if any stubs refer to the given resolver. Only used in an assert().
266 bool ResolverHasStubs(JITResolver* Resolver) const {
267 MutexGuard guard(Lock);
268 for (std::map<void*, JITResolver*>::const_iterator I = Map.begin(),
269 E = Map.end(); I != E; ++I) {
270 if (I->second == Resolver)
276 /// This needs to be static so that a lazy call stub can access it with no
277 /// context except the address of the stub.
278 ManagedStatic<StubToResolverMapTy> StubToResolverMap;
280 /// JITEmitter - The JIT implementation of the MachineCodeEmitter, which is
281 /// used to output functions to memory for execution.
282 class JITEmitter : public JITCodeEmitter {
283 JITMemoryManager *MemMgr;
285 // When outputting a function stub in the context of some other function, we
286 // save BufferBegin/BufferEnd/CurBufferPtr here.
287 uint8_t *SavedBufferBegin, *SavedBufferEnd, *SavedCurBufferPtr;
289 // When reattempting to JIT a function after running out of space, we store
290 // the estimated size of the function we're trying to JIT here, so we can
291 // ask the memory manager for at least this much space. When we
292 // successfully emit the function, we reset this back to zero.
293 uintptr_t SizeEstimate;
295 /// Relocations - These are the relocations that the function needs, as
297 std::vector<MachineRelocation> Relocations;
299 /// MBBLocations - This vector is a mapping from MBB ID's to their address.
300 /// It is filled in by the StartMachineBasicBlock callback and queried by
301 /// the getMachineBasicBlockAddress callback.
302 std::vector<uintptr_t> MBBLocations;
304 /// ConstantPool - The constant pool for the current function.
306 MachineConstantPool *ConstantPool;
308 /// ConstantPoolBase - A pointer to the first entry in the constant pool.
310 void *ConstantPoolBase;
312 /// ConstPoolAddresses - Addresses of individual constant pool entries.
314 SmallVector<uintptr_t, 8> ConstPoolAddresses;
316 /// JumpTable - The jump tables for the current function.
318 MachineJumpTableInfo *JumpTable;
320 /// JumpTableBase - A pointer to the first entry in the jump table.
324 /// Resolver - This contains info about the currently resolved functions.
325 JITResolver Resolver;
327 /// LabelLocations - This vector is a mapping from Label ID's to their
329 DenseMap<MCSymbol*, uintptr_t> LabelLocations;
331 /// MMI - Machine module info for exception informations
332 MachineModuleInfo* MMI;
334 // CurFn - The llvm function being emitted. Only valid during
336 const Function *CurFn;
338 /// Information about emitted code, which is passed to the
339 /// JITEventListeners. This is reset in startFunction and used in
341 JITEvent_EmittedFunctionDetails EmissionDetails;
344 void *FunctionBody; // Beginning of the function's allocation.
345 void *Code; // The address the function's code actually starts at.
346 void *ExceptionTable;
347 EmittedCode() : FunctionBody(nullptr), Code(nullptr),
348 ExceptionTable(nullptr) {}
350 struct EmittedFunctionConfig : public ValueMapConfig<const Function*> {
351 typedef JITEmitter *ExtraData;
352 static void onDelete(JITEmitter *, const Function*);
353 static void onRAUW(JITEmitter *, const Function*, const Function*);
355 ValueMap<const Function *, EmittedCode,
356 EmittedFunctionConfig> EmittedFunctions;
360 /// Instance of the JIT
364 JITEmitter(JIT &jit, JITMemoryManager *JMM, TargetMachine &TM)
365 : SizeEstimate(0), Resolver(jit, *this), MMI(nullptr), CurFn(nullptr),
366 EmittedFunctions(this), TheJIT(&jit) {
367 MemMgr = JMM ? JMM : JITMemoryManager::CreateDefaultMemManager();
368 if (jit.getJITInfo().needsGOT()) {
369 MemMgr->AllocateGOT();
370 DEBUG(dbgs() << "JIT is managing a GOT\n");
378 JITResolver &getJITResolver() { return Resolver; }
380 void startFunction(MachineFunction &F) override;
381 bool finishFunction(MachineFunction &F) override;
383 void emitConstantPool(MachineConstantPool *MCP);
384 void initJumpTableInfo(MachineJumpTableInfo *MJTI);
385 void emitJumpTableInfo(MachineJumpTableInfo *MJTI);
387 void startGVStub(const GlobalValue* GV,
388 unsigned StubSize, unsigned Alignment = 1);
389 void startGVStub(void *Buffer, unsigned StubSize);
391 void *allocIndirectGV(const GlobalValue *GV, const uint8_t *Buffer,
392 size_t Size, unsigned Alignment) override;
394 /// allocateSpace - Reserves space in the current block if any, or
395 /// allocate a new one of the given size.
396 void *allocateSpace(uintptr_t Size, unsigned Alignment) override;
398 /// allocateGlobal - Allocate memory for a global. Unlike allocateSpace,
399 /// this method does not allocate memory in the current output buffer,
400 /// because a global may live longer than the current function.
401 void *allocateGlobal(uintptr_t Size, unsigned Alignment) override;
403 void addRelocation(const MachineRelocation &MR) override {
404 Relocations.push_back(MR);
407 void StartMachineBasicBlock(MachineBasicBlock *MBB) override {
408 if (MBBLocations.size() <= (unsigned)MBB->getNumber())
409 MBBLocations.resize((MBB->getNumber()+1)*2);
410 MBBLocations[MBB->getNumber()] = getCurrentPCValue();
411 if (MBB->hasAddressTaken())
412 TheJIT->addPointerToBasicBlock(MBB->getBasicBlock(),
413 (void*)getCurrentPCValue());
414 DEBUG(dbgs() << "JIT: Emitting BB" << MBB->getNumber() << " at ["
415 << (void*) getCurrentPCValue() << "]\n");
418 uintptr_t getConstantPoolEntryAddress(unsigned Entry) const override;
419 uintptr_t getJumpTableEntryAddress(unsigned Entry) const override;
422 getMachineBasicBlockAddress(MachineBasicBlock *MBB) const override {
423 assert(MBBLocations.size() > (unsigned)MBB->getNumber() &&
424 MBBLocations[MBB->getNumber()] && "MBB not emitted!");
425 return MBBLocations[MBB->getNumber()];
428 /// retryWithMoreMemory - Log a retry and deallocate all memory for the
429 /// given function. Increase the minimum allocation size so that we get
430 /// more memory next time.
431 void retryWithMoreMemory(MachineFunction &F);
433 /// deallocateMemForFunction - Deallocate all memory for the specified
435 void deallocateMemForFunction(const Function *F);
437 void processDebugLoc(DebugLoc DL, bool BeforePrintingInsn) override;
439 void emitLabel(MCSymbol *Label) override {
440 LabelLocations[Label] = getCurrentPCValue();
443 DenseMap<MCSymbol*, uintptr_t> *getLabelLocations() override {
444 return &LabelLocations;
447 uintptr_t getLabelAddress(MCSymbol *Label) const override {
448 assert(LabelLocations.count(Label) && "Label not emitted!");
449 return LabelLocations.find(Label)->second;
452 void setModuleInfo(MachineModuleInfo* Info) override {
457 void *getPointerToGlobal(GlobalValue *GV, void *Reference,
458 bool MayNeedFarStub);
459 void *getPointerToGVIndirectSym(GlobalValue *V, void *Reference);
463 void CallSiteValueMapConfig::onDelete(JITResolverState *JRS, Function *F) {
464 JRS->EraseAllCallSitesForPrelocked(F);
467 void JITResolverState::EraseAllCallSitesForPrelocked(Function *F) {
468 FunctionToCallSitesMapTy::iterator F2C = FunctionToCallSitesMap.find(F);
469 if (F2C == FunctionToCallSitesMap.end())
471 StubToResolverMapTy &S2RMap = *StubToResolverMap;
472 for (SmallPtrSet<void*, 1>::const_iterator I = F2C->second.begin(),
473 E = F2C->second.end(); I != E; ++I) {
474 S2RMap.UnregisterStubResolver(*I);
475 bool Erased = CallSiteToFunctionMap.erase(*I);
477 assert(Erased && "Missing call site->function mapping");
479 FunctionToCallSitesMap.erase(F2C);
482 void JITResolverState::EraseAllCallSitesPrelocked() {
483 StubToResolverMapTy &S2RMap = *StubToResolverMap;
484 for (CallSiteToFunctionMapTy::const_iterator
485 I = CallSiteToFunctionMap.begin(),
486 E = CallSiteToFunctionMap.end(); I != E; ++I) {
487 S2RMap.UnregisterStubResolver(I->first);
489 CallSiteToFunctionMap.clear();
490 FunctionToCallSitesMap.clear();
493 JITResolver::~JITResolver() {
494 // No need to lock because we're in the destructor, and state isn't shared.
495 state.EraseAllCallSitesPrelocked();
496 assert(!StubToResolverMap->ResolverHasStubs(this) &&
497 "Resolver destroyed with stubs still alive.");
500 /// getLazyFunctionStubIfAvailable - This returns a pointer to a function stub
501 /// if it has already been created.
502 void *JITResolver::getLazyFunctionStubIfAvailable(Function *F) {
503 MutexGuard locked(TheJIT->lock);
505 // If we already have a stub for this function, recycle it.
506 return state.getFunctionToLazyStubMap(locked).lookup(F);
509 /// getFunctionStub - This returns a pointer to a function stub, creating
510 /// one on demand as needed.
511 void *JITResolver::getLazyFunctionStub(Function *F) {
512 MutexGuard locked(TheJIT->lock);
514 // If we already have a lazy stub for this function, recycle it.
515 void *&Stub = state.getFunctionToLazyStubMap(locked)[F];
516 if (Stub) return Stub;
518 // Call the lazy resolver function if we are JIT'ing lazily. Otherwise we
519 // must resolve the symbol now.
520 void *Actual = TheJIT->isCompilingLazily()
521 ? (void *)(intptr_t)LazyResolverFn : (void *)nullptr;
523 // If this is an external declaration, attempt to resolve the address now
524 // to place in the stub.
525 if (isNonGhostDeclaration(F) || F->hasAvailableExternallyLinkage()) {
526 Actual = TheJIT->getPointerToFunction(F);
528 // If we resolved the symbol to a null address (eg. a weak external)
529 // don't emit a stub. Return a null pointer to the application.
530 if (!Actual) return nullptr;
533 TargetJITInfo::StubLayout SL = TheJIT->getJITInfo().getStubLayout();
534 JE.startGVStub(F, SL.Size, SL.Alignment);
535 // Codegen a new stub, calling the lazy resolver or the actual address of the
536 // external function, if it was resolved.
537 Stub = TheJIT->getJITInfo().emitFunctionStub(F, Actual, JE);
540 if (Actual != (void*)(intptr_t)LazyResolverFn) {
541 // If we are getting the stub for an external function, we really want the
542 // address of the stub in the GlobalAddressMap for the JIT, not the address
543 // of the external function.
544 TheJIT->updateGlobalMapping(F, Stub);
547 DEBUG(dbgs() << "JIT: Lazy stub emitted at [" << Stub << "] for function '"
548 << F->getName() << "'\n");
550 if (TheJIT->isCompilingLazily()) {
551 // Register this JITResolver as the one corresponding to this call site so
552 // JITCompilerFn will be able to find it.
553 StubToResolverMap->RegisterStubResolver(Stub, this);
555 // Finally, keep track of the stub-to-Function mapping so that the
556 // JITCompilerFn knows which function to compile!
557 state.AddCallSite(locked, Stub, F);
558 } else if (!Actual) {
559 // If we are JIT'ing non-lazily but need to call a function that does not
560 // exist yet, add it to the JIT's work list so that we can fill in the
561 // stub address later.
562 assert(!isNonGhostDeclaration(F) && !F->hasAvailableExternallyLinkage() &&
563 "'Actual' should have been set above.");
564 TheJIT->addPendingFunction(F);
570 /// getGlobalValueIndirectSym - Return a lazy pointer containing the specified
572 void *JITResolver::getGlobalValueIndirectSym(GlobalValue *GV, void *GVAddress) {
573 MutexGuard locked(TheJIT->lock);
575 // If we already have a stub for this global variable, recycle it.
576 void *&IndirectSym = state.getGlobalToIndirectSymMap(locked)[GV];
577 if (IndirectSym) return IndirectSym;
579 // Otherwise, codegen a new indirect symbol.
580 IndirectSym = TheJIT->getJITInfo().emitGlobalValueIndirectSym(GV, GVAddress,
583 DEBUG(dbgs() << "JIT: Indirect symbol emitted at [" << IndirectSym
584 << "] for GV '" << GV->getName() << "'\n");
589 /// getExternalFunctionStub - Return a stub for the function at the
590 /// specified address, created lazily on demand.
591 void *JITResolver::getExternalFunctionStub(void *FnAddr) {
592 // If we already have a stub for this function, recycle it.
593 void *&Stub = ExternalFnToStubMap[FnAddr];
594 if (Stub) return Stub;
596 TargetJITInfo::StubLayout SL = TheJIT->getJITInfo().getStubLayout();
597 JE.startGVStub(nullptr, SL.Size, SL.Alignment);
598 Stub = TheJIT->getJITInfo().emitFunctionStub(nullptr, FnAddr, JE);
601 DEBUG(dbgs() << "JIT: Stub emitted at [" << Stub
602 << "] for external function at '" << FnAddr << "'\n");
606 unsigned JITResolver::getGOTIndexForAddr(void* addr) {
607 unsigned idx = revGOTMap[addr];
609 idx = ++nextGOTIndex;
610 revGOTMap[addr] = idx;
611 DEBUG(dbgs() << "JIT: Adding GOT entry " << idx << " for addr ["
617 /// JITCompilerFn - This function is called when a lazy compilation stub has
618 /// been entered. It looks up which function this stub corresponds to, compiles
619 /// it if necessary, then returns the resultant function pointer.
620 void *JITResolver::JITCompilerFn(void *Stub) {
621 JITResolver *JR = StubToResolverMap->getResolverFromStub(Stub);
622 assert(JR && "Unable to find the corresponding JITResolver to the call site");
624 Function* F = nullptr;
625 void* ActualPtr = nullptr;
628 // Only lock for getting the Function. The call getPointerToFunction made
629 // in this function might trigger function materializing, which requires
630 // JIT lock to be unlocked.
631 MutexGuard locked(JR->TheJIT->lock);
633 // The address given to us for the stub may not be exactly right, it might
634 // be a little bit after the stub. As such, use upper_bound to find it.
635 std::pair<void*, Function*> I =
636 JR->state.LookupFunctionFromCallSite(locked, Stub);
641 // If we have already code generated the function, just return the address.
642 void *Result = JR->TheJIT->getPointerToGlobalIfAvailable(F);
645 // Otherwise we don't have it, do lazy compilation now.
647 // If lazy compilation is disabled, emit a useful error message and abort.
648 if (!JR->TheJIT->isCompilingLazily()) {
649 report_fatal_error("LLVM JIT requested to do lazy compilation of"
651 + F->getName() + "' when lazy compiles are disabled!");
654 DEBUG(dbgs() << "JIT: Lazily resolving function '" << F->getName()
655 << "' In stub ptr = " << Stub << " actual ptr = "
656 << ActualPtr << "\n");
659 Result = JR->TheJIT->getPointerToFunction(F);
662 // Reacquire the lock to update the GOT map.
663 MutexGuard locked(JR->TheJIT->lock);
665 // We might like to remove the call site from the CallSiteToFunction map, but
666 // we can't do that! Multiple threads could be stuck, waiting to acquire the
667 // lock above. As soon as the 1st function finishes compiling the function,
668 // the next one will be released, and needs to be able to find the function it
671 // FIXME: We could rewrite all references to this stub if we knew them.
673 // What we will do is set the compiled function address to map to the
674 // same GOT entry as the stub so that later clients may update the GOT
675 // if they see it still using the stub address.
676 // Note: this is done so the Resolver doesn't have to manage GOT memory
677 // Do this without allocating map space if the target isn't using a GOT
678 if(JR->revGOTMap.find(Stub) != JR->revGOTMap.end())
679 JR->revGOTMap[Result] = JR->revGOTMap[Stub];
684 //===----------------------------------------------------------------------===//
687 void *JITEmitter::getPointerToGlobal(GlobalValue *V, void *Reference,
688 bool MayNeedFarStub) {
689 if (GlobalVariable *GV = dyn_cast<GlobalVariable>(V))
690 return TheJIT->getOrEmitGlobalVariable(GV);
692 if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V))
693 return TheJIT->getPointerToGlobal(GA->getAliasee());
695 // If we have already compiled the function, return a pointer to its body.
696 Function *F = cast<Function>(V);
698 void *FnStub = Resolver.getLazyFunctionStubIfAvailable(F);
700 // Return the function stub if it's already created. We do this first so
701 // that we're returning the same address for the function as any previous
702 // call. TODO: Yes, this is wrong. The lazy stub isn't guaranteed to be
703 // close enough to call.
707 // If we know the target can handle arbitrary-distance calls, try to
708 // return a direct pointer.
709 if (!MayNeedFarStub) {
710 // If we have code, go ahead and return that.
711 void *ResultPtr = TheJIT->getPointerToGlobalIfAvailable(F);
712 if (ResultPtr) return ResultPtr;
714 // If this is an external function pointer, we can force the JIT to
715 // 'compile' it, which really just adds it to the map.
716 if (isNonGhostDeclaration(F) || F->hasAvailableExternallyLinkage())
717 return TheJIT->getPointerToFunction(F);
720 // Otherwise, we may need a to emit a stub, and, conservatively, we always do
721 // so. Note that it's possible to return null from getLazyFunctionStub in the
722 // case of a weak extern that fails to resolve.
723 return Resolver.getLazyFunctionStub(F);
726 void *JITEmitter::getPointerToGVIndirectSym(GlobalValue *V, void *Reference) {
727 // Make sure GV is emitted first, and create a stub containing the fully
729 void *GVAddress = getPointerToGlobal(V, Reference, false);
730 void *StubAddr = Resolver.getGlobalValueIndirectSym(V, GVAddress);
734 void JITEmitter::processDebugLoc(DebugLoc DL, bool BeforePrintingInsn) {
735 if (DL.isUnknown()) return;
736 if (!BeforePrintingInsn) return;
738 const LLVMContext &Context = EmissionDetails.MF->getFunction()->getContext();
740 if (DL.getScope(Context) != nullptr && PrevDL != DL) {
741 JITEvent_EmittedFunctionDetails::LineStart NextLine;
742 NextLine.Address = getCurrentPCValue();
744 EmissionDetails.LineStarts.push_back(NextLine);
750 static unsigned GetConstantPoolSizeInBytes(MachineConstantPool *MCP,
751 const DataLayout *TD) {
752 const std::vector<MachineConstantPoolEntry> &Constants = MCP->getConstants();
753 if (Constants.empty()) return 0;
756 for (unsigned i = 0, e = Constants.size(); i != e; ++i) {
757 MachineConstantPoolEntry CPE = Constants[i];
758 unsigned AlignMask = CPE.getAlignment() - 1;
759 Size = (Size + AlignMask) & ~AlignMask;
760 Type *Ty = CPE.getType();
761 Size += TD->getTypeAllocSize(Ty);
766 void JITEmitter::startFunction(MachineFunction &F) {
767 DEBUG(dbgs() << "JIT: Starting CodeGen of Function "
768 << F.getName() << "\n");
770 uintptr_t ActualSize = 0;
771 // Set the memory writable, if it's not already
772 MemMgr->setMemoryWritable();
774 if (SizeEstimate > 0) {
775 // SizeEstimate will be non-zero on reallocation attempts.
776 ActualSize = SizeEstimate;
779 BufferBegin = CurBufferPtr = MemMgr->startFunctionBody(F.getFunction(),
781 BufferEnd = BufferBegin+ActualSize;
782 EmittedFunctions[F.getFunction()].FunctionBody = BufferBegin;
784 // Ensure the constant pool/jump table info is at least 4-byte aligned.
787 emitConstantPool(F.getConstantPool());
788 if (MachineJumpTableInfo *MJTI = F.getJumpTableInfo())
789 initJumpTableInfo(MJTI);
791 // About to start emitting the machine code for the function.
792 emitAlignment(std::max(F.getFunction()->getAlignment(), 8U));
793 TheJIT->updateGlobalMapping(F.getFunction(), CurBufferPtr);
794 EmittedFunctions[F.getFunction()].Code = CurBufferPtr;
796 MBBLocations.clear();
798 EmissionDetails.MF = &F;
799 EmissionDetails.LineStarts.clear();
802 bool JITEmitter::finishFunction(MachineFunction &F) {
803 if (CurBufferPtr == BufferEnd) {
804 // We must call endFunctionBody before retrying, because
805 // deallocateMemForFunction requires it.
806 MemMgr->endFunctionBody(F.getFunction(), BufferBegin, CurBufferPtr);
807 retryWithMoreMemory(F);
811 if (MachineJumpTableInfo *MJTI = F.getJumpTableInfo())
812 emitJumpTableInfo(MJTI);
814 // FnStart is the start of the text, not the start of the constant pool and
815 // other per-function data.
817 (uint8_t *)TheJIT->getPointerToGlobalIfAvailable(F.getFunction());
819 // FnEnd is the end of the function's machine code.
820 uint8_t *FnEnd = CurBufferPtr;
822 if (!Relocations.empty()) {
823 CurFn = F.getFunction();
824 NumRelos += Relocations.size();
826 // Resolve the relocations to concrete pointers.
827 for (unsigned i = 0, e = Relocations.size(); i != e; ++i) {
828 MachineRelocation &MR = Relocations[i];
829 void *ResultPtr = nullptr;
830 if (!MR.letTargetResolve()) {
831 if (MR.isExternalSymbol()) {
832 ResultPtr = TheJIT->getPointerToNamedFunction(MR.getExternalSymbol(),
834 DEBUG(dbgs() << "JIT: Map \'" << MR.getExternalSymbol() << "\' to ["
835 << ResultPtr << "]\n");
837 // If the target REALLY wants a stub for this function, emit it now.
838 if (MR.mayNeedFarStub()) {
839 ResultPtr = Resolver.getExternalFunctionStub(ResultPtr);
841 } else if (MR.isGlobalValue()) {
842 ResultPtr = getPointerToGlobal(MR.getGlobalValue(),
843 BufferBegin+MR.getMachineCodeOffset(),
844 MR.mayNeedFarStub());
845 } else if (MR.isIndirectSymbol()) {
846 ResultPtr = getPointerToGVIndirectSym(
847 MR.getGlobalValue(), BufferBegin+MR.getMachineCodeOffset());
848 } else if (MR.isBasicBlock()) {
849 ResultPtr = (void*)getMachineBasicBlockAddress(MR.getBasicBlock());
850 } else if (MR.isConstantPoolIndex()) {
852 (void*)getConstantPoolEntryAddress(MR.getConstantPoolIndex());
854 assert(MR.isJumpTableIndex());
855 ResultPtr=(void*)getJumpTableEntryAddress(MR.getJumpTableIndex());
858 MR.setResultPointer(ResultPtr);
861 // if we are managing the GOT and the relocation wants an index,
863 if (MR.isGOTRelative() && MemMgr->isManagingGOT()) {
864 unsigned idx = Resolver.getGOTIndexForAddr(ResultPtr);
866 if (((void**)MemMgr->getGOTBase())[idx] != ResultPtr) {
867 DEBUG(dbgs() << "JIT: GOT was out of date for " << ResultPtr
868 << " pointing at " << ((void**)MemMgr->getGOTBase())[idx]
870 ((void**)MemMgr->getGOTBase())[idx] = ResultPtr;
876 TheJIT->getJITInfo().relocate(BufferBegin, &Relocations[0],
877 Relocations.size(), MemMgr->getGOTBase());
880 // Update the GOT entry for F to point to the new code.
881 if (MemMgr->isManagingGOT()) {
882 unsigned idx = Resolver.getGOTIndexForAddr((void*)BufferBegin);
883 if (((void**)MemMgr->getGOTBase())[idx] != (void*)BufferBegin) {
884 DEBUG(dbgs() << "JIT: GOT was out of date for " << (void*)BufferBegin
885 << " pointing at " << ((void**)MemMgr->getGOTBase())[idx]
887 ((void**)MemMgr->getGOTBase())[idx] = (void*)BufferBegin;
891 // CurBufferPtr may have moved beyond FnEnd, due to memory allocation for
892 // global variables that were referenced in the relocations.
893 MemMgr->endFunctionBody(F.getFunction(), BufferBegin, CurBufferPtr);
895 if (CurBufferPtr == BufferEnd) {
896 retryWithMoreMemory(F);
899 // Now that we've succeeded in emitting the function, reset the
900 // SizeEstimate back down to zero.
904 BufferBegin = CurBufferPtr = nullptr;
905 NumBytes += FnEnd-FnStart;
907 // Invalidate the icache if necessary.
908 sys::Memory::InvalidateInstructionCache(FnStart, FnEnd-FnStart);
910 TheJIT->NotifyFunctionEmitted(*F.getFunction(), FnStart, FnEnd-FnStart,
913 // Reset the previous debug location.
916 DEBUG(dbgs() << "JIT: Finished CodeGen of [" << (void*)FnStart
917 << "] Function: " << F.getName()
918 << ": " << (FnEnd-FnStart) << " bytes of text, "
919 << Relocations.size() << " relocations\n");
922 ConstPoolAddresses.clear();
924 // Mark code region readable and executable if it's not so already.
925 MemMgr->setMemoryExecutable();
928 if (sys::hasDisassembler()) {
929 dbgs() << "JIT: Disassembled code:\n";
930 dbgs() << sys::disassembleBuffer(FnStart, FnEnd-FnStart,
933 dbgs() << "JIT: Binary code:\n";
934 uint8_t* q = FnStart;
935 for (int i = 0; q < FnEnd; q += 4, ++i) {
939 dbgs() << "JIT: " << (long)(q - FnStart) << ": ";
941 for (int j = 3; j >= 0; --j) {
945 dbgs() << (unsigned short)q[j];
963 void JITEmitter::retryWithMoreMemory(MachineFunction &F) {
964 DEBUG(dbgs() << "JIT: Ran out of space for native code. Reattempting.\n");
965 Relocations.clear(); // Clear the old relocations or we'll reapply them.
966 ConstPoolAddresses.clear();
968 deallocateMemForFunction(F.getFunction());
969 // Try again with at least twice as much free space.
970 SizeEstimate = (uintptr_t)(2 * (BufferEnd - BufferBegin));
972 for (MachineFunction::iterator MBB = F.begin(), E = F.end(); MBB != E; ++MBB){
973 if (MBB->hasAddressTaken())
974 TheJIT->clearPointerToBasicBlock(MBB->getBasicBlock());
978 /// deallocateMemForFunction - Deallocate all memory for the specified
979 /// function body. Also drop any references the function has to stubs.
980 /// May be called while the Function is being destroyed inside ~Value().
981 void JITEmitter::deallocateMemForFunction(const Function *F) {
982 ValueMap<const Function *, EmittedCode, EmittedFunctionConfig>::iterator
983 Emitted = EmittedFunctions.find(F);
984 if (Emitted != EmittedFunctions.end()) {
985 MemMgr->deallocateFunctionBody(Emitted->second.FunctionBody);
986 TheJIT->NotifyFreeingMachineCode(Emitted->second.Code);
988 EmittedFunctions.erase(Emitted);
993 void *JITEmitter::allocateSpace(uintptr_t Size, unsigned Alignment) {
995 return JITCodeEmitter::allocateSpace(Size, Alignment);
997 // create a new memory block if there is no active one.
998 // care must be taken so that BufferBegin is invalidated when a
1000 BufferBegin = CurBufferPtr = MemMgr->allocateSpace(Size, Alignment);
1001 BufferEnd = BufferBegin+Size;
1002 return CurBufferPtr;
1005 void *JITEmitter::allocateGlobal(uintptr_t Size, unsigned Alignment) {
1006 // Delegate this call through the memory manager.
1007 return MemMgr->allocateGlobal(Size, Alignment);
1010 void JITEmitter::emitConstantPool(MachineConstantPool *MCP) {
1011 if (TheJIT->getJITInfo().hasCustomConstantPool())
1014 const std::vector<MachineConstantPoolEntry> &Constants = MCP->getConstants();
1015 if (Constants.empty()) return;
1017 unsigned Size = GetConstantPoolSizeInBytes(MCP, TheJIT->getDataLayout());
1018 unsigned Align = MCP->getConstantPoolAlignment();
1019 ConstantPoolBase = allocateSpace(Size, Align);
1022 if (!ConstantPoolBase) return; // Buffer overflow.
1024 DEBUG(dbgs() << "JIT: Emitted constant pool at [" << ConstantPoolBase
1025 << "] (size: " << Size << ", alignment: " << Align << ")\n");
1027 // Initialize the memory for all of the constant pool entries.
1028 unsigned Offset = 0;
1029 for (unsigned i = 0, e = Constants.size(); i != e; ++i) {
1030 MachineConstantPoolEntry CPE = Constants[i];
1031 unsigned AlignMask = CPE.getAlignment() - 1;
1032 Offset = (Offset + AlignMask) & ~AlignMask;
1034 uintptr_t CAddr = (uintptr_t)ConstantPoolBase + Offset;
1035 ConstPoolAddresses.push_back(CAddr);
1036 if (CPE.isMachineConstantPoolEntry()) {
1037 // FIXME: add support to lower machine constant pool values into bytes!
1038 report_fatal_error("Initialize memory with machine specific constant pool"
1039 "entry has not been implemented!");
1041 TheJIT->InitializeMemory(CPE.Val.ConstVal, (void*)CAddr);
1042 DEBUG(dbgs() << "JIT: CP" << i << " at [0x";
1043 dbgs().write_hex(CAddr) << "]\n");
1045 Type *Ty = CPE.Val.ConstVal->getType();
1046 Offset += TheJIT->getDataLayout()->getTypeAllocSize(Ty);
1050 void JITEmitter::initJumpTableInfo(MachineJumpTableInfo *MJTI) {
1051 if (TheJIT->getJITInfo().hasCustomJumpTables())
1053 if (MJTI->getEntryKind() == MachineJumpTableInfo::EK_Inline)
1056 const std::vector<MachineJumpTableEntry> &JT = MJTI->getJumpTables();
1057 if (JT.empty()) return;
1059 unsigned NumEntries = 0;
1060 for (unsigned i = 0, e = JT.size(); i != e; ++i)
1061 NumEntries += JT[i].MBBs.size();
1063 unsigned EntrySize = MJTI->getEntrySize(*TheJIT->getDataLayout());
1065 // Just allocate space for all the jump tables now. We will fix up the actual
1066 // MBB entries in the tables after we emit the code for each block, since then
1067 // we will know the final locations of the MBBs in memory.
1069 JumpTableBase = allocateSpace(NumEntries * EntrySize,
1070 MJTI->getEntryAlignment(*TheJIT->getDataLayout()));
1073 void JITEmitter::emitJumpTableInfo(MachineJumpTableInfo *MJTI) {
1074 if (TheJIT->getJITInfo().hasCustomJumpTables())
1077 const std::vector<MachineJumpTableEntry> &JT = MJTI->getJumpTables();
1078 if (JT.empty() || !JumpTableBase) return;
1081 switch (MJTI->getEntryKind()) {
1082 case MachineJumpTableInfo::EK_Inline:
1084 case MachineJumpTableInfo::EK_BlockAddress: {
1085 // EK_BlockAddress - Each entry is a plain address of block, e.g.:
1087 assert(MJTI->getEntrySize(*TheJIT->getDataLayout()) == sizeof(void*) &&
1090 // For each jump table, map each target in the jump table to the address of
1091 // an emitted MachineBasicBlock.
1092 intptr_t *SlotPtr = (intptr_t*)JumpTableBase;
1094 for (unsigned i = 0, e = JT.size(); i != e; ++i) {
1095 const std::vector<MachineBasicBlock*> &MBBs = JT[i].MBBs;
1096 // Store the address of the basic block for this jump table slot in the
1097 // memory we allocated for the jump table in 'initJumpTableInfo'
1098 for (unsigned mi = 0, me = MBBs.size(); mi != me; ++mi)
1099 *SlotPtr++ = getMachineBasicBlockAddress(MBBs[mi]);
1104 case MachineJumpTableInfo::EK_Custom32:
1105 case MachineJumpTableInfo::EK_GPRel32BlockAddress:
1106 case MachineJumpTableInfo::EK_LabelDifference32: {
1107 assert(MJTI->getEntrySize(*TheJIT->getDataLayout()) == 4&&"Cross JIT'ing?");
1108 // For each jump table, place the offset from the beginning of the table
1109 // to the target address.
1110 int *SlotPtr = (int*)JumpTableBase;
1112 for (unsigned i = 0, e = JT.size(); i != e; ++i) {
1113 const std::vector<MachineBasicBlock*> &MBBs = JT[i].MBBs;
1114 // Store the offset of the basic block for this jump table slot in the
1115 // memory we allocated for the jump table in 'initJumpTableInfo'
1116 uintptr_t Base = (uintptr_t)SlotPtr;
1117 for (unsigned mi = 0, me = MBBs.size(); mi != me; ++mi) {
1118 uintptr_t MBBAddr = getMachineBasicBlockAddress(MBBs[mi]);
1119 /// FIXME: USe EntryKind instead of magic "getPICJumpTableEntry" hook.
1120 *SlotPtr++ = TheJIT->getJITInfo().getPICJumpTableEntry(MBBAddr, Base);
1125 case MachineJumpTableInfo::EK_GPRel64BlockAddress:
1127 "JT Info emission not implemented for GPRel64BlockAddress yet.");
1131 void JITEmitter::startGVStub(const GlobalValue* GV,
1132 unsigned StubSize, unsigned Alignment) {
1133 SavedBufferBegin = BufferBegin;
1134 SavedBufferEnd = BufferEnd;
1135 SavedCurBufferPtr = CurBufferPtr;
1137 BufferBegin = CurBufferPtr = MemMgr->allocateStub(GV, StubSize, Alignment);
1138 BufferEnd = BufferBegin+StubSize+1;
1141 void JITEmitter::startGVStub(void *Buffer, unsigned StubSize) {
1142 SavedBufferBegin = BufferBegin;
1143 SavedBufferEnd = BufferEnd;
1144 SavedCurBufferPtr = CurBufferPtr;
1146 BufferBegin = CurBufferPtr = (uint8_t *)Buffer;
1147 BufferEnd = BufferBegin+StubSize+1;
1150 void JITEmitter::finishGVStub() {
1151 assert(CurBufferPtr != BufferEnd && "Stub overflowed allocated space.");
1152 NumBytes += getCurrentPCOffset();
1153 BufferBegin = SavedBufferBegin;
1154 BufferEnd = SavedBufferEnd;
1155 CurBufferPtr = SavedCurBufferPtr;
1158 void *JITEmitter::allocIndirectGV(const GlobalValue *GV,
1159 const uint8_t *Buffer, size_t Size,
1160 unsigned Alignment) {
1161 uint8_t *IndGV = MemMgr->allocateStub(GV, Size, Alignment);
1162 memcpy(IndGV, Buffer, Size);
1166 // getConstantPoolEntryAddress - Return the address of the 'ConstantNum' entry
1167 // in the constant pool that was last emitted with the 'emitConstantPool'
1170 uintptr_t JITEmitter::getConstantPoolEntryAddress(unsigned ConstantNum) const {
1171 assert(ConstantNum < ConstantPool->getConstants().size() &&
1172 "Invalid ConstantPoolIndex!");
1173 return ConstPoolAddresses[ConstantNum];
1176 // getJumpTableEntryAddress - Return the address of the JumpTable with index
1177 // 'Index' in the jumpp table that was last initialized with 'initJumpTableInfo'
1179 uintptr_t JITEmitter::getJumpTableEntryAddress(unsigned Index) const {
1180 const std::vector<MachineJumpTableEntry> &JT = JumpTable->getJumpTables();
1181 assert(Index < JT.size() && "Invalid jump table index!");
1183 unsigned EntrySize = JumpTable->getEntrySize(*TheJIT->getDataLayout());
1185 unsigned Offset = 0;
1186 for (unsigned i = 0; i < Index; ++i)
1187 Offset += JT[i].MBBs.size();
1189 Offset *= EntrySize;
1191 return (uintptr_t)((char *)JumpTableBase + Offset);
1194 void JITEmitter::EmittedFunctionConfig::onDelete(
1195 JITEmitter *Emitter, const Function *F) {
1196 Emitter->deallocateMemForFunction(F);
1198 void JITEmitter::EmittedFunctionConfig::onRAUW(
1199 JITEmitter *, const Function*, const Function*) {
1200 llvm_unreachable("The JIT doesn't know how to handle a"
1201 " RAUW on a value it has emitted.");
1205 //===----------------------------------------------------------------------===//
1206 // Public interface to this file
1207 //===----------------------------------------------------------------------===//
1209 JITCodeEmitter *JIT::createEmitter(JIT &jit, JITMemoryManager *JMM,
1210 TargetMachine &tm) {
1211 return new JITEmitter(jit, JMM, tm);
1214 // getPointerToFunctionOrStub - If the specified function has been
1215 // code-gen'd, return a pointer to the function. If not, compile it, or use
1216 // a stub to implement lazy compilation if available.
1218 void *JIT::getPointerToFunctionOrStub(Function *F) {
1219 // If we have already code generated the function, just return the address.
1220 if (void *Addr = getPointerToGlobalIfAvailable(F))
1223 // Get a stub if the target supports it.
1224 JITEmitter *JE = static_cast<JITEmitter*>(getCodeEmitter());
1225 return JE->getJITResolver().getLazyFunctionStub(F);
1228 void JIT::updateFunctionStub(Function *F) {
1229 // Get the empty stub we generated earlier.
1230 JITEmitter *JE = static_cast<JITEmitter*>(getCodeEmitter());
1231 void *Stub = JE->getJITResolver().getLazyFunctionStub(F);
1232 void *Addr = getPointerToGlobalIfAvailable(F);
1233 assert(Addr != Stub && "Function must have non-stub address to be updated.");
1235 // Tell the target jit info to rewrite the stub at the specified address,
1236 // rather than creating a new one.
1237 TargetJITInfo::StubLayout layout = getJITInfo().getStubLayout();
1238 JE->startGVStub(Stub, layout.Size);
1239 getJITInfo().emitFunctionStub(F, Addr, *getCodeEmitter());
1243 /// freeMachineCodeForFunction - release machine code memory for given Function.
1245 void JIT::freeMachineCodeForFunction(Function *F) {
1246 // Delete translation for this from the ExecutionEngine, so it will get
1247 // retranslated next time it is used.
1248 updateGlobalMapping(F, nullptr);
1250 // Free the actual memory for the function body and related stuff.
1251 static_cast<JITEmitter*>(JCE)->deallocateMemForFunction(F);