X-Git-Url: http://demsky.eecs.uci.edu/git/?a=blobdiff_plain;f=lib%2FSupport%2FSmallPtrSet.cpp;h=a80e095ec35a927a7c1a3adb81a71a9b771539be;hb=f15492fd7292563049ace40be9a2e0048e64b8f0;hp=3326923115545dbbd0fa5c7af18bf0e005997c62;hpb=bf31b85ea2a3bf17f3934b7e139b7475a84429fa;p=oota-llvm.git diff --git a/lib/Support/SmallPtrSet.cpp b/lib/Support/SmallPtrSet.cpp index 33269231155..a80e095ec35 100644 --- a/lib/Support/SmallPtrSet.cpp +++ b/lib/Support/SmallPtrSet.cpp @@ -2,8 +2,8 @@ // // The LLVM Compiler Infrastructure // -// This file was developed by Chris Lattner and is distributed under -// the University of Illinois Open Source License. See LICENSE.TXT for details. +// This file is distributed under the University of Illinois Open Source +// License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // @@ -13,15 +13,31 @@ //===----------------------------------------------------------------------===// #include "llvm/ADT/SmallPtrSet.h" +#include "llvm/ADT/DenseMapInfo.h" #include "llvm/Support/MathExtras.h" +#include #include using namespace llvm; -bool SmallPtrSetImpl::insert(void *Ptr) { +void SmallPtrSetImplBase::shrink_and_clear() { + assert(!isSmall() && "Can't shrink a small set!"); + free(CurArray); + + // Reduce the number of buckets. + CurArraySize = NumElements > 16 ? 1 << (Log2_32_Ceil(NumElements) + 1) : 32; + NumElements = NumTombstones = 0; + + // Install the new array. Clear all the buckets to empty. + CurArray = (const void**)malloc(sizeof(void*) * CurArraySize); + assert(CurArray && "Failed to allocate memory?"); + memset(CurArray, -1, CurArraySize*sizeof(void*)); +} + +bool SmallPtrSetImplBase::insert_imp(const void * Ptr) { if (isSmall()) { // Check to see if it is already in the set. - for (void **APtr = SmallArray, **E = SmallArray+NumElements; + for (const void **APtr = SmallArray, **E = SmallArray+NumElements; APtr != E; ++APtr) if (*APtr == Ptr) return false; @@ -34,13 +50,17 @@ bool SmallPtrSetImpl::insert(void *Ptr) { // Otherwise, hit the big set case, which will call grow. } - // If more than 3/4 of the array is full, grow. - if (NumElements*4 >= CurArraySize*3 || - CurArraySize-(NumElements+NumTombstones) < CurArraySize/8) - Grow(); + if (NumElements*4 >= CurArraySize*3) { + // If more than 3/4 of the array is full, grow. + Grow(CurArraySize < 64 ? 128 : CurArraySize*2); + } else if (CurArraySize-(NumElements+NumTombstones) < CurArraySize/8) { + // If fewer of 1/8 of the array is empty (meaning that many are filled with + // tombstones), rehash. + Grow(CurArraySize); + } // Okay, we know we have space. Find a hash bucket. - void **Bucket = const_cast(FindBucketFor(Ptr)); + const void **Bucket = const_cast(FindBucketFor(Ptr)); if (*Bucket == Ptr) return false; // Already inserted, good. // Otherwise, insert it! @@ -51,10 +71,10 @@ bool SmallPtrSetImpl::insert(void *Ptr) { return true; } -bool SmallPtrSetImpl::erase(void *Ptr) { +bool SmallPtrSetImplBase::erase_imp(const void * Ptr) { if (isSmall()) { // Check to see if it is in the set. - for (void **APtr = SmallArray, **E = SmallArray+NumElements; + for (const void **APtr = SmallArray, **E = SmallArray+NumElements; APtr != E; ++APtr) if (*APtr == Ptr) { // If it is in the set, replace this element. @@ -78,12 +98,12 @@ bool SmallPtrSetImpl::erase(void *Ptr) { return true; } -void * const *SmallPtrSetImpl::FindBucketFor(void *Ptr) const { - unsigned Bucket = Hash(Ptr); +const void * const *SmallPtrSetImplBase::FindBucketFor(const void *Ptr) const { + unsigned Bucket = DenseMapInfo::getHashValue(Ptr) & (CurArraySize-1); unsigned ArraySize = CurArraySize; unsigned ProbeAmt = 1; - void *const *Array = CurArray; - void *const *Tombstone = 0; + const void *const *Array = CurArray; + const void *const *Tombstone = nullptr; while (1) { // Found Ptr's bucket? if (Array[Bucket] == Ptr) @@ -107,40 +127,35 @@ void * const *SmallPtrSetImpl::FindBucketFor(void *Ptr) const { /// Grow - Allocate a larger backing store for the buckets and move it over. /// -void SmallPtrSetImpl::Grow() { +void SmallPtrSetImplBase::Grow(unsigned NewSize) { // Allocate at twice as many buckets, but at least 128. unsigned OldSize = CurArraySize; - unsigned NewSize = OldSize < 64 ? 128 : OldSize*2; - void **OldBuckets = CurArray; + const void **OldBuckets = CurArray; bool WasSmall = isSmall(); // Install the new array. Clear all the buckets to empty. - CurArray = (void**)malloc(sizeof(void*) * (NewSize+1)); + CurArray = (const void**)malloc(sizeof(void*) * NewSize); assert(CurArray && "Failed to allocate memory?"); CurArraySize = NewSize; memset(CurArray, -1, NewSize*sizeof(void*)); - // The end pointer, always valid, is set to a valid element to help the - // iterator. - CurArray[NewSize] = 0; - // Copy over all the elements. if (WasSmall) { // Small sets store their elements in order. - for (void **BucketPtr = OldBuckets, **E = OldBuckets+NumElements; + for (const void **BucketPtr = OldBuckets, **E = OldBuckets+NumElements; BucketPtr != E; ++BucketPtr) { - void *Elt = *BucketPtr; - *const_cast(FindBucketFor(Elt)) = Elt; + const void *Elt = *BucketPtr; + *const_cast(FindBucketFor(Elt)) = const_cast(Elt); } } else { // Copy over all valid entries. - for (void **BucketPtr = OldBuckets, **E = OldBuckets+OldSize; + for (const void **BucketPtr = OldBuckets, **E = OldBuckets+OldSize; BucketPtr != E; ++BucketPtr) { // Copy over the element if it is valid. - void *Elt = *BucketPtr; + const void *Elt = *BucketPtr; if (Elt != getTombstoneMarker() && Elt != getEmptyMarker()) - *const_cast(FindBucketFor(Elt)) = Elt; + *const_cast(FindBucketFor(Elt)) = const_cast(Elt); } free(OldBuckets); @@ -148,13 +163,16 @@ void SmallPtrSetImpl::Grow() { } } -SmallPtrSetImpl::SmallPtrSetImpl(const SmallPtrSetImpl& that) { +SmallPtrSetImplBase::SmallPtrSetImplBase(const void **SmallStorage, + const SmallPtrSetImplBase& that) { + SmallArray = SmallStorage; + // If we're becoming small, prepare to insert into our stack space if (that.isSmall()) { - CurArray = &SmallArray[0]; + CurArray = SmallArray; // Otherwise, allocate new heap space (unless we were the same size) } else { - CurArray = (void**)malloc(sizeof(void*) * (that.CurArraySize+1)); + CurArray = (const void**)malloc(sizeof(void*) * that.CurArraySize); assert(CurArray && "Failed to allocate memory?"); } @@ -162,30 +180,65 @@ SmallPtrSetImpl::SmallPtrSetImpl(const SmallPtrSetImpl& that) { CurArraySize = that.CurArraySize; // Copy over the contents from the other set - memcpy(CurArray, that.CurArray, sizeof(void*)*(CurArraySize+1)); + memcpy(CurArray, that.CurArray, sizeof(void*)*CurArraySize); NumElements = that.NumElements; NumTombstones = that.NumTombstones; } +SmallPtrSetImplBase::SmallPtrSetImplBase(const void **SmallStorage, + unsigned SmallSize, + SmallPtrSetImplBase &&that) { + SmallArray = SmallStorage; + + // Copy over the basic members. + CurArraySize = that.CurArraySize; + NumElements = that.NumElements; + NumTombstones = that.NumTombstones; + + // When small, just copy into our small buffer. + if (that.isSmall()) { + CurArray = SmallArray; + memcpy(CurArray, that.CurArray, sizeof(void *) * CurArraySize); + return; + } + + // Otherwise, we steal the large memory allocation and no copy is needed. + CurArray = that.CurArray; + that.CurArray = that.SmallArray; + + // Make the "that" object small and empty. + that.CurArraySize = SmallSize; + assert(that.CurArray == that.SmallArray); + that.NumElements = 0; + that.NumTombstones = 0; +} + /// CopyFrom - implement operator= from a smallptrset that has the same pointer /// type, but may have a different small size. -void SmallPtrSetImpl::CopyFrom(const SmallPtrSetImpl &RHS) { +void SmallPtrSetImplBase::CopyFrom(const SmallPtrSetImplBase &RHS) { + assert(&RHS != this && "Self-copy should be handled by the caller."); + if (isSmall() && RHS.isSmall()) assert(CurArraySize == RHS.CurArraySize && "Cannot assign sets with different small sizes"); - + // If we're becoming small, prepare to insert into our stack space if (RHS.isSmall()) { if (!isSmall()) free(CurArray); - CurArray = &SmallArray[0]; + CurArray = SmallArray; // Otherwise, allocate new heap space (unless we were the same size) } else if (CurArraySize != RHS.CurArraySize) { if (isSmall()) - CurArray = (void**)malloc(sizeof(void*) * (RHS.CurArraySize+1)); - else - CurArray = (void**)realloc(CurArray, sizeof(void*)*(RHS.CurArraySize+1)); + CurArray = (const void**)malloc(sizeof(void*) * RHS.CurArraySize); + else { + const void **T = (const void**)realloc(CurArray, + sizeof(void*) * RHS.CurArraySize); + if (!T) + free(CurArray); + CurArray = T; + } assert(CurArray && "Failed to allocate memory?"); } @@ -193,13 +246,91 @@ void SmallPtrSetImpl::CopyFrom(const SmallPtrSetImpl &RHS) { CurArraySize = RHS.CurArraySize; // Copy over the contents from the other set - memcpy(CurArray, RHS.CurArray, sizeof(void*)*(CurArraySize+1)); + memcpy(CurArray, RHS.CurArray, sizeof(void*)*CurArraySize); NumElements = RHS.NumElements; NumTombstones = RHS.NumTombstones; } -SmallPtrSetImpl::~SmallPtrSetImpl() { +void SmallPtrSetImplBase::MoveFrom(unsigned SmallSize, + SmallPtrSetImplBase &&RHS) { + assert(&RHS != this && "Self-move should be handled by the caller."); + + if (!isSmall()) + free(CurArray); + + if (RHS.isSmall()) { + // Copy a small RHS rather than moving. + CurArray = SmallArray; + memcpy(CurArray, RHS.CurArray, sizeof(void*)*RHS.CurArraySize); + } else { + CurArray = RHS.CurArray; + RHS.CurArray = RHS.SmallArray; + } + + // Copy the rest of the trivial members. + CurArraySize = RHS.CurArraySize; + NumElements = RHS.NumElements; + NumTombstones = RHS.NumTombstones; + + // Make the RHS small and empty. + RHS.CurArraySize = SmallSize; + assert(RHS.CurArray == RHS.SmallArray); + RHS.NumElements = 0; + RHS.NumTombstones = 0; +} + +void SmallPtrSetImplBase::swap(SmallPtrSetImplBase &RHS) { + if (this == &RHS) return; + + // We can only avoid copying elements if neither set is small. + if (!this->isSmall() && !RHS.isSmall()) { + std::swap(this->CurArray, RHS.CurArray); + std::swap(this->CurArraySize, RHS.CurArraySize); + std::swap(this->NumElements, RHS.NumElements); + std::swap(this->NumTombstones, RHS.NumTombstones); + return; + } + + // FIXME: From here on we assume that both sets have the same small size. + + // If only RHS is small, copy the small elements into LHS and move the pointer + // from LHS to RHS. + if (!this->isSmall() && RHS.isSmall()) { + std::copy(RHS.SmallArray, RHS.SmallArray+RHS.CurArraySize, + this->SmallArray); + std::swap(this->NumElements, RHS.NumElements); + std::swap(this->CurArraySize, RHS.CurArraySize); + RHS.CurArray = this->CurArray; + RHS.NumTombstones = this->NumTombstones; + this->CurArray = this->SmallArray; + this->NumTombstones = 0; + return; + } + + // If only LHS is small, copy the small elements into RHS and move the pointer + // from RHS to LHS. + if (this->isSmall() && !RHS.isSmall()) { + std::copy(this->SmallArray, this->SmallArray+this->CurArraySize, + RHS.SmallArray); + std::swap(RHS.NumElements, this->NumElements); + std::swap(RHS.CurArraySize, this->CurArraySize); + this->CurArray = RHS.CurArray; + this->NumTombstones = RHS.NumTombstones; + RHS.CurArray = RHS.SmallArray; + RHS.NumTombstones = 0; + return; + } + + // Both a small, just swap the small elements. + assert(this->isSmall() && RHS.isSmall()); + assert(this->CurArraySize == RHS.CurArraySize); + std::swap_ranges(this->SmallArray, this->SmallArray+this->CurArraySize, + RHS.SmallArray); + std::swap(this->NumElements, RHS.NumElements); +} + +SmallPtrSetImplBase::~SmallPtrSetImplBase() { if (!isSmall()) free(CurArray); }