From 654cce1e5f5f4cadf3d6a05fe964d30d6120ffbc Mon Sep 17 00:00:00 2001 From: Christopher Dykes Date: Thu, 15 Dec 2016 19:59:27 -0800 Subject: [PATCH] Refactor stats to use the same type for indexes Summary: This refactors folly/stats/* to use a single type for indexes rather than `size_t`, `int`, `unsigned int`, `uint64_t` and `int64_t` depending on where you looked. This also has the result of getting MSVC to not complain about implicit sign conversions and implicit truncations. Reviewed By: simpkins Differential Revision: D4282174 fbshipit-source-id: 8529be34dce8ad18bc64395330bbdf2cd7305be4 --- folly/stats/Histogram-defs.h | 34 ++++++------- folly/stats/Histogram.h | 47 +++++++++-------- folly/stats/Instantiations.cpp | 13 +++-- folly/stats/MultiLevelTimeSeries-defs.h | 4 +- folly/stats/MultiLevelTimeSeries.h | 39 +++++++------- folly/stats/TimeseriesHistogram-defs.h | 31 +++++------ folly/stats/TimeseriesHistogram.h | 68 +++++++++++++------------ folly/test/TimeseriesHistogramTest.cpp | 22 ++++---- 8 files changed, 132 insertions(+), 126 deletions(-) diff --git a/folly/stats/Histogram-defs.h b/folly/stats/Histogram-defs.h index 8d4af44f..5cc62f89 100644 --- a/folly/stats/Histogram-defs.h +++ b/folly/stats/Histogram-defs.h @@ -37,26 +37,25 @@ HistogramBuckets::HistogramBuckets(ValueType bucketSize, // Deliberately make this a signed type, because we're about // to compare it against max-min, which is nominally signed, too. - int numBuckets = (max - min) / bucketSize; + int64_t numBuckets = int64_t((max - min) / bucketSize); // Round up if the bucket size does not fit evenly if (numBuckets * bucketSize < max - min) { ++numBuckets; } // Add 2 for the extra 'below min' and 'above max' buckets numBuckets += 2; - buckets_.assign(numBuckets, defaultBucket); + buckets_.assign(size_t(numBuckets), defaultBucket); } template -unsigned int HistogramBuckets::getBucketIdx( - ValueType value) const { +size_t HistogramBuckets::getBucketIdx(ValueType value) const { if (value < min_) { return 0; } else if (value >= max_) { return buckets_.size() - 1; } else { // the 1 is the below_min bucket - return ((value - min_) / bucketSize_) + 1; + return size_t(((value - min_) / bucketSize_) + 1); } } @@ -65,7 +64,7 @@ template uint64_t HistogramBuckets::computeTotalCount( CountFn countFromBucket) const { uint64_t count = 0; - for (unsigned int n = 0; n < buckets_.size(); ++n) { + for (size_t n = 0; n < buckets_.size(); ++n) { count += countFromBucket(const_cast(buckets_[n])); } return count; @@ -73,19 +72,20 @@ uint64_t HistogramBuckets::computeTotalCount( template template -unsigned int HistogramBuckets::getPercentileBucketIdx( +size_t HistogramBuckets::getPercentileBucketIdx( double pct, CountFn countFromBucket, - double* lowPct, double* highPct) const { + double* lowPct, + double* highPct) const { CHECK_GE(pct, 0.0); CHECK_LE(pct, 1.0); - unsigned int numBuckets = buckets_.size(); + auto numBuckets = buckets_.size(); // Compute the counts in each bucket std::vector counts(numBuckets); uint64_t totalCount = 0; - for (unsigned int n = 0; n < numBuckets; ++n) { + for (size_t n = 0; n < numBuckets; ++n) { uint64_t bucketCount = countFromBucket(const_cast(buckets_[n])); counts[n] = bucketCount; @@ -114,7 +114,7 @@ unsigned int HistogramBuckets::getPercentileBucketIdx( double prevPct = 0.0; double curPct = 0.0; uint64_t curCount = 0; - unsigned int idx; + size_t idx; for (idx = 0; idx < numBuckets; ++idx) { if (counts[idx] == 0) { // skip empty buckets @@ -149,8 +149,8 @@ T HistogramBuckets::getPercentileEstimate( // Find the bucket where this percentile falls double lowPct; double highPct; - unsigned int bucketIdx = getPercentileBucketIdx(pct, countFromBucket, - &lowPct, &highPct); + size_t bucketIdx = + getPercentileBucketIdx(pct, countFromBucket, &lowPct, &highPct); if (lowPct == 0.0 && highPct == 0.0) { // Invalid range -- the buckets must all be empty // Return the default value for ValueType. @@ -235,12 +235,12 @@ T HistogramBuckets::getPercentileEstimate( // Assume that the data points lower than the median of this bucket // are uniformly distributed between low and avg double pctThroughSection = (pct - lowPct) / (medianPct - lowPct); - return low + ((avg - low) * pctThroughSection); + return T(low + ((avg - low) * pctThroughSection)); } else { // Assume that the data points greater than the median of this bucket // are uniformly distributed between avg and high double pctThroughSection = (pct - medianPct) / (highPct - medianPct); - return avg + ((high - avg) * pctThroughSection); + return T(avg + ((high - avg) * pctThroughSection)); } } @@ -254,7 +254,7 @@ std::string Histogram::debugString() const { ", bucketSize: ", buckets_.getBucketSize(), ", min: ", buckets_.getMin(), ", max: ", buckets_.getMax(), "\n"); - for (unsigned int i = 0; i < buckets_.getNumBuckets(); ++i) { + for (size_t i = 0; i < buckets_.getNumBuckets(); ++i) { folly::toAppend(" ", buckets_.getBucketMin(i), ": ", buckets_.getByIndex(i).count, "\n", &ret); @@ -265,7 +265,7 @@ std::string Histogram::debugString() const { template void Histogram::toTSV(std::ostream& out, bool skipEmptyBuckets) const { - for (unsigned int i = 0; i < buckets_.getNumBuckets(); ++i) { + for (size_t i = 0; i < buckets_.getNumBuckets(); ++i) { // Do not output empty buckets in order to reduce data file size. if (skipEmptyBuckets && getBucketByIndex(i).count == 0) { continue; diff --git a/folly/stats/Histogram.h b/folly/stats/Histogram.h index f6fbea71..5981e019 100644 --- a/folly/stats/Histogram.h +++ b/folly/stats/Histogram.h @@ -17,11 +17,12 @@ #pragma once #include +#include #include #include +#include #include #include -#include #include #include @@ -77,12 +78,12 @@ class HistogramBuckets { * plus 2 extra buckets, one for handling values less than min, and one for * values greater than max. */ - unsigned int getNumBuckets() const { + size_t getNumBuckets() const { return buckets_.size(); } /* Returns the bucket index into which the given value would fall. */ - unsigned int getBucketIdx(ValueType value) const; + size_t getBucketIdx(ValueType value) const; /* Returns the bucket for the specified value */ BucketType& getByValue(ValueType value) { @@ -100,12 +101,12 @@ class HistogramBuckets { * Note that index 0 is the bucket for all values less than the specified * minimum. Index 1 is the first bucket in the specified bucket range. */ - BucketType& getByIndex(unsigned int idx) { + BucketType& getByIndex(size_t idx) { return buckets_[idx]; } /* Returns the bucket at the specified index. */ - const BucketType& getByIndex(unsigned int idx) const { + const BucketType& getByIndex(size_t idx) const { return buckets_[idx]; } @@ -116,7 +117,7 @@ class HistogramBuckets { * [bucketMin, bucketMin + bucketSize), or [bucketMin, max), if the overall * max is smaller than bucketMin + bucketSize. */ - ValueType getBucketMin(unsigned int idx) const { + ValueType getBucketMin(size_t idx) const { if (idx == 0) { return std::numeric_limits::min(); } @@ -134,7 +135,7 @@ class HistogramBuckets { * [bucketMin, bucketMin + bucketSize), or [bucketMin, max), if the overall * max is smaller than bucketMin + bucketSize. */ - ValueType getBucketMax(unsigned int idx) const { + ValueType getBucketMax(size_t idx) const { if (idx == buckets_.size() - 1) { return std::numeric_limits::max(); } @@ -171,10 +172,11 @@ class HistogramBuckets { * data point. */ template - unsigned int getPercentileBucketIdx(double pct, - CountFn countFromBucket, - double* lowPct = nullptr, - double* highPct = nullptr) const; + size_t getPercentileBucketIdx( + double pct, + CountFn countFromBucket, + double* lowPct = nullptr, + double* highPct = nullptr) const; /** * Estimate the value at the specified percentile. @@ -302,7 +304,7 @@ class Histogram { /* Remove all data points from the histogram */ void clear() { - for (unsigned int i = 0; i < buckets_.getNumBuckets(); i++) { + for (size_t i = 0; i < buckets_.getNumBuckets(); i++) { buckets_.getByIndex(i).clear(); } } @@ -318,7 +320,7 @@ class Histogram { throw std::invalid_argument("Cannot subtract input histogram."); } - for (unsigned int i = 0; i < buckets_.getNumBuckets(); i++) { + for (size_t i = 0; i < buckets_.getNumBuckets(); i++) { buckets_.getByIndex(i) -= hist.buckets_.getByIndex(i); } } @@ -334,7 +336,7 @@ class Histogram { throw std::invalid_argument("Cannot merge from input histogram."); } - for (unsigned int i = 0; i < buckets_.getNumBuckets(); i++) { + for (size_t i = 0; i < buckets_.getNumBuckets(); i++) { buckets_.getByIndex(i) += hist.buckets_.getByIndex(i); } } @@ -349,7 +351,7 @@ class Histogram { throw std::invalid_argument("Cannot copy from input histogram."); } - for (unsigned int i = 0; i < buckets_.getNumBuckets(); i++) { + for (size_t i = 0; i < buckets_.getNumBuckets(); i++) { buckets_.getByIndex(i) = hist.buckets_.getByIndex(i); } } @@ -367,12 +369,12 @@ class Histogram { return buckets_.getMax(); } /* Returns the number of buckets */ - unsigned int getNumBuckets() const { + size_t getNumBuckets() const { return buckets_.getNumBuckets(); } /* Returns the specified bucket (for reading only!) */ - const Bucket& getBucketByIndex(int idx) const { + const Bucket& getBucketByIndex(size_t idx) const { return buckets_.getByIndex(idx); } @@ -383,7 +385,7 @@ class Histogram { * [bucketMin, bucketMin + bucketSize), or [bucketMin, max), if the overall * max is smaller than bucketMin + bucketSize. */ - ValueType getBucketMin(unsigned int idx) const { + ValueType getBucketMin(size_t idx) const { return buckets_.getBucketMin(idx); } @@ -394,7 +396,7 @@ class Histogram { * [bucketMin, bucketMin + bucketSize), or [bucketMin, max), if the overall * max is smaller than bucketMin + bucketSize. */ - ValueType getBucketMax(unsigned int idx) const { + ValueType getBucketMax(size_t idx) const { return buckets_.getBucketMax(idx); } @@ -414,9 +416,10 @@ class Histogram { * The lowest and highest percentile data points in returned bucket will be * returned in the lowPct and highPct arguments, if they are non-NULL. */ - unsigned int getPercentileBucketIdx(double pct, - double* lowPct = nullptr, - double* highPct = nullptr) const { + size_t getPercentileBucketIdx( + double pct, + double* lowPct = nullptr, + double* highPct = nullptr) const { // We unfortunately can't use lambdas here yet; // Some users of this code are still built with gcc-4.4. CountFromBucket countFn; diff --git a/folly/stats/Instantiations.cpp b/folly/stats/Instantiations.cpp index 74c7ee02..754ae5c6 100644 --- a/folly/stats/Instantiations.cpp +++ b/folly/stats/Instantiations.cpp @@ -46,13 +46,12 @@ template class TimeseriesHistogram; // are implemented using template methods. Instantiate the default versions of // these methods too, so anyone using them won't also need to explicitly // include Histogram-defs.h -template unsigned int detail::HistogramBuckets< - int64_t, Histogram::Bucket>:: - getPercentileBucketIdx::CountFromBucket>( - double pct, - Histogram::CountFromBucket countFromBucket, - double* lowPct, - double* highPct) const; +template size_t detail::HistogramBuckets::Bucket>:: + getPercentileBucketIdx::CountFromBucket>( + double pct, + Histogram::CountFromBucket countFromBucket, + double* lowPct, + double* highPct) const; template int64_t detail::HistogramBuckets::Bucket> ::getPercentileEstimate::CountFromBucket, Histogram::AvgFromBucket>( diff --git a/folly/stats/MultiLevelTimeSeries-defs.h b/folly/stats/MultiLevelTimeSeries-defs.h index 02694a53..5043f2b6 100644 --- a/folly/stats/MultiLevelTimeSeries-defs.h +++ b/folly/stats/MultiLevelTimeSeries-defs.h @@ -74,7 +74,7 @@ template void MultiLevelTimeSeries::addValue( TimePoint now, const ValueType& val, - int64_t times) { + uint64_t times) { addValueAggregated(now, val * times, times); } @@ -82,7 +82,7 @@ template void MultiLevelTimeSeries::addValueAggregated( TimePoint now, const ValueType& total, - int64_t nsamples) { + uint64_t nsamples) { if (cachedTime_ != now) { flush(); cachedTime_ = now; diff --git a/folly/stats/MultiLevelTimeSeries.h b/folly/stats/MultiLevelTimeSeries.h index c1c082ed..edd731a6 100644 --- a/folly/stats/MultiLevelTimeSeries.h +++ b/folly/stats/MultiLevelTimeSeries.h @@ -98,9 +98,8 @@ class MultiLevelTimeSeries { * data. Otherwise you may be reading stale data if update() or flush() has * not been called recently. */ - const Level& getLevel(int level) const { - CHECK(level >= 0); - CHECK_LT(size_t(level), levels_.size()); + const Level& getLevel(size_t level) const { + CHECK_LT(level, levels_.size()); return levels_[level]; } @@ -158,7 +157,7 @@ class MultiLevelTimeSeries { * data. Otherwise you may be reading stale data if update() or flush() has * not been called recently. */ - ValueType sum(int level) const { + ValueType sum(size_t level) const { return getLevel(level).sum(); } @@ -173,8 +172,8 @@ class MultiLevelTimeSeries { * data. Otherwise you may be reading stale data if update() or flush() has * not been called recently. */ - template - ReturnType avg(int level) const { + template + ReturnType avg(size_t level) const { return getLevel(level).template avg(); } @@ -187,7 +186,7 @@ class MultiLevelTimeSeries { * not been called recently. */ template - ReturnType rate(int level) const { + ReturnType rate(size_t level) const { return getLevel(level).template rate(); } @@ -198,7 +197,7 @@ class MultiLevelTimeSeries { * data. Otherwise you may be reading stale data if update() or flush() has * not been called recently. */ - int64_t count(int level) const { + uint64_t count(size_t level) const { return getLevel(level).count(); } @@ -210,14 +209,14 @@ class MultiLevelTimeSeries { * not been called recently. */ template - ReturnType countRate(int level) const { + ReturnType countRate(size_t level) const { return getLevel(level).template countRate(); } /* * Return the sum of all the data points currently tracked at this level. * - * This method is identical to sum(int level) above but takes in the + * This method is identical to sum(size_t level) above but takes in the * duration that the user is interested in querying as the parameter. * * Note: you should generally call update() or flush() before accessing the @@ -232,7 +231,7 @@ class MultiLevelTimeSeries { * Return the average (sum / count) of all the data points currently tracked * at this level. * - * This method is identical to avg(int level) above but takes in the + * This method is identical to avg(size_t level) above but takes in the * duration that the user is interested in querying as the parameter. * * Note: you should generally call update() or flush() before accessing the @@ -248,7 +247,7 @@ class MultiLevelTimeSeries { * Return the rate (sum divided by elaspsed time) of the all data points * currently tracked at this level. * - * This method is identical to rate(int level) above but takes in the + * This method is identical to rate(size_t level) above but takes in the * duration that the user is interested in querying as the parameter. * * Note: you should generally call update() or flush() before accessing the @@ -263,21 +262,21 @@ class MultiLevelTimeSeries { /* * Return the number of data points currently tracked at this level. * - * This method is identical to count(int level) above but takes in the + * This method is identical to count(size_t level) above but takes in the * duration that the user is interested in querying as the parameter. * * Note: you should generally call update() or flush() before accessing the * data. Otherwise you may be reading stale data if update() or flush() has * not been called recently. */ - int64_t count(Duration duration) const { + uint64_t count(Duration duration) const { return getLevelByDuration(duration).count(); } /* * Return the count divided by the elapsed time tracked at this level. * - * This method is identical to countRate(int level) above but takes in the + * This method is identical to countRate(size_t level) above but takes in the * duration that the user is interested in querying as the parameter. * * Note: you should generally call update() or flush() before accessing the @@ -352,7 +351,7 @@ class MultiLevelTimeSeries { * data. Otherwise you may be reading stale data if update() or flush() has * not been called recently. */ - int64_t count(TimePoint start, TimePoint end) const { + uint64_t count(TimePoint start, TimePoint end) const { return getLevel(start).count(start, end); } @@ -374,14 +373,14 @@ class MultiLevelTimeSeries { /* * Adds the value 'val' at time 'now' to all levels. */ - void addValue(TimePoint now, const ValueType& val, int64_t times); + void addValue(TimePoint now, const ValueType& val, uint64_t times); /* * Adds the value 'total' at time 'now' to all levels as the sum of * 'nsamples' samples. */ void - addValueAggregated(TimePoint now, const ValueType& total, int64_t nsamples); + addValueAggregated(TimePoint now, const ValueType& total, uint64_t nsamples); /* * Update all the levels to the specified time, doing all the necessary @@ -417,11 +416,11 @@ class MultiLevelTimeSeries { void addValue(Duration now, const ValueType& value) { addValue(TimePoint(now), value); } - void addValue(Duration now, const ValueType& value, int64_t times) { + void addValue(Duration now, const ValueType& value, uint64_t times) { addValue(TimePoint(now), value, times); } void - addValueAggregated(Duration now, const ValueType& total, int64_t nsamples) { + addValueAggregated(Duration now, const ValueType& total, uint64_t nsamples) { addValueAggregated(TimePoint(now), total, nsamples); } diff --git a/folly/stats/TimeseriesHistogram-defs.h b/folly/stats/TimeseriesHistogram-defs.h index 9375e5c4..08c44e96 100644 --- a/folly/stats/TimeseriesHistogram-defs.h +++ b/folly/stats/TimeseriesHistogram-defs.h @@ -45,7 +45,7 @@ template void TimeseriesHistogram::addValue( TimePoint now, const ValueType& value, - int64_t times) { + uint64_t times) { buckets_.getByValue(value).addValue(now, value, times); maybeHandleSingleUniqueValue(value); } @@ -59,7 +59,7 @@ void TimeseriesHistogram::addValues( CHECK_EQ(hist.getBucketSize(), getBucketSize()); CHECK_EQ(hist.getNumBuckets(), getNumBuckets()); - for (unsigned int n = 0; n < hist.getNumBuckets(); ++n) { + for (size_t n = 0; n < hist.getNumBuckets(); ++n) { const typename folly::Histogram::Bucket& histBucket = hist.getBucketByIndex(n); Bucket& myBucket = buckets_.getByIndex(n); @@ -86,7 +86,7 @@ void TimeseriesHistogram::maybeHandleSingleUniqueValue( } template -T TimeseriesHistogram::getPercentileEstimate(double pct, int level) +T TimeseriesHistogram::getPercentileEstimate(double pct, size_t level) const { if (singleUniqueValue_) { return firstValue_; @@ -111,13 +111,14 @@ T TimeseriesHistogram::getPercentileEstimate( } template -int TimeseriesHistogram::getPercentileBucketIdx(double pct, int level) - const { +size_t TimeseriesHistogram::getPercentileBucketIdx( + double pct, + size_t level) const { return buckets_.getPercentileBucketIdx(pct / 100.0, CountFromLevel(level)); } template -int TimeseriesHistogram::getPercentileBucketIdx( +size_t TimeseriesHistogram::getPercentileBucketIdx( double pct, TimePoint start, TimePoint end) const { @@ -140,7 +141,7 @@ void TimeseriesHistogram::update(TimePoint now) { } template -std::string TimeseriesHistogram::getString(int level) const { +std::string TimeseriesHistogram::getString(size_t level) const { std::string result; for (size_t i = 0; i < buckets_.getNumBuckets(); i++) { @@ -178,9 +179,9 @@ std::string TimeseriesHistogram::getString( template void TimeseriesHistogram::computeAvgData( ValueType* total, - int64_t* nsamples, - int level) const { - for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) { + uint64_t* nsamples, + size_t level) const { + for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) { const auto& levelObj = buckets_.getByIndex(b).getLevel(level); *total += levelObj.sum(); *nsamples += levelObj.count(); @@ -190,10 +191,10 @@ void TimeseriesHistogram::computeAvgData( template void TimeseriesHistogram::computeAvgData( ValueType* total, - int64_t* nsamples, + uint64_t* nsamples, TimePoint start, TimePoint end) const { - for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) { + for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) { const auto& levelObj = buckets_.getByIndex(b).getLevel(start); *total += levelObj.sum(start, end); *nsamples += levelObj.count(start, end); @@ -204,8 +205,8 @@ template void TimeseriesHistogram::computeRateData( ValueType* total, Duration* elapsed, - int level) const { - for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) { + size_t level) const { + for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) { const auto& levelObj = buckets_.getByIndex(b).getLevel(level); *total += levelObj.sum(); *elapsed = std::max(*elapsed, levelObj.elapsed()); @@ -218,7 +219,7 @@ void TimeseriesHistogram::computeRateData( Duration* elapsed, TimePoint start, TimePoint end) const { - for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) { + for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) { const auto& level = buckets_.getByIndex(b).getLevel(start); *total += level.sum(start, end); *elapsed = std::max(*elapsed, level.elapsed(start, end)); diff --git a/folly/stats/TimeseriesHistogram.h b/folly/stats/TimeseriesHistogram.h index 3ffa8703..43af8676 100644 --- a/folly/stats/TimeseriesHistogram.h +++ b/folly/stats/TimeseriesHistogram.h @@ -93,40 +93,42 @@ class TimeseriesHistogram { ValueType getMax() const { return buckets_.getMax(); } /* Return the number of levels of the Timeseries object in each bucket */ - int getNumLevels() const { + size_t getNumLevels() const { return buckets_.getByIndex(0).numLevels(); } /* Return the number of buckets */ - int getNumBuckets() const { return buckets_.getNumBuckets(); } + size_t getNumBuckets() const { + return buckets_.getNumBuckets(); + } /* * Return the threshold of the bucket for the given index in range * [0..numBuckets). The bucket will have range [thresh, thresh + bucketSize) * or [thresh, max), whichever is shorter. */ - ValueType getBucketMin(int bucketIdx) const { + ValueType getBucketMin(size_t bucketIdx) const { return buckets_.getBucketMin(bucketIdx); } /* Return the actual timeseries in the given bucket (for reading only!) */ - const ContainerType& getBucket(int bucketIdx) const { + const ContainerType& getBucket(size_t bucketIdx) const { return buckets_.getByIndex(bucketIdx); } /* Total count of values at the given timeseries level (all buckets). */ - int64_t count(int level) const { - int64_t total = 0; - for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) { + uint64_t count(size_t level) const { + uint64_t total = 0; + for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) { total += buckets_.getByIndex(b).count(level); } return total; } /* Total count of values added during the given interval (all buckets). */ - int64_t count(TimePoint start, TimePoint end) const { - int64_t total = 0; - for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) { + uint64_t count(TimePoint start, TimePoint end) const { + uint64_t total = 0; + for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) { total += buckets_.getByIndex(b).count(start, end); } return total; @@ -135,7 +137,7 @@ class TimeseriesHistogram { /* Total sum of values at the given timeseries level (all buckets). */ ValueType sum(int level) const { ValueType total = ValueType(); - for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) { + for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) { total += buckets_.getByIndex(b).sum(level); } return total; @@ -144,7 +146,7 @@ class TimeseriesHistogram { /* Total sum of values added during the given interval (all buckets). */ ValueType sum(TimePoint start, TimePoint end) const { ValueType total = ValueType(); - for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) { + for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) { total += buckets_.getByIndex(b).sum(start, end); } return total; @@ -154,7 +156,7 @@ class TimeseriesHistogram { template ReturnType avg(int level) const { auto total = ValueType(); - int64_t nsamples = 0; + uint64_t nsamples = 0; computeAvgData(&total, &nsamples, level); return folly::detail::avgHelper(total, nsamples); } @@ -163,7 +165,7 @@ class TimeseriesHistogram { template ReturnType avg(TimePoint start, TimePoint end) const { auto total = ValueType(); - int64_t nsamples = 0; + uint64_t nsamples = 0; computeAvgData(&total, &nsamples, start, end); return folly::detail::avgHelper(total, nsamples); } @@ -173,7 +175,7 @@ class TimeseriesHistogram { * This is the sum of all values divided by the time interval (in seconds). */ template - ReturnType rate(int level) const { + ReturnType rate(size_t level) const { auto total = ValueType(); Duration elapsed(0); computeRateData(&total, &elapsed, level); @@ -207,7 +209,7 @@ class TimeseriesHistogram { /* Add a value into the histogram with timestamp 'now' */ void addValue(TimePoint now, const ValueType& value); /* Add a value the given number of times with timestamp 'now' */ - void addValue(TimePoint now, const ValueType& value, int64_t times); + void addValue(TimePoint now, const ValueType& value, uint64_t times); /* * Add all of the values from the specified histogram. @@ -241,11 +243,11 @@ class TimeseriesHistogram { * average and the known bound is equal to the distance between the average * and the unknown bound. */ - ValueType getPercentileEstimate(double pct, int level) const; + ValueType getPercentileEstimate(double pct, size_t level) const; /* * Return an estimate of the value at the given percentile in the histogram * in the given historical interval. Please see the documentation for - * getPercentileEstimate(int pct, int level) for the explanation of the + * getPercentileEstimate(double pct, size_t level) for the explanation of the * estimation algorithm. */ ValueType getPercentileEstimate(double pct, TimePoint start, TimePoint end) @@ -256,20 +258,22 @@ class TimeseriesHistogram { * given timeseries level). This index can then be used to retrieve either * the bucket threshold, or other data from inside the bucket. */ - int getPercentileBucketIdx(double pct, int level) const; + size_t getPercentileBucketIdx(double pct, size_t level) const; /* * Return the bucket index that the given percentile falls into (in the * given historical interval). This index can then be used to retrieve either * the bucket threshold, or other data from inside the bucket. */ - int getPercentileBucketIdx(double pct, TimePoint start, TimePoint end) const; + size_t getPercentileBucketIdx(double pct, TimePoint start, TimePoint end) + const; /* Get the bucket threshold for the bucket containing the given pct. */ - int getPercentileBucketMin(double pct, int level) const { + ValueType getPercentileBucketMin(double pct, size_t level) const { return getBucketMin(getPercentileBucketIdx(pct, level)); } /* Get the bucket threshold for the bucket containing the given pct. */ - int getPercentileBucketMin(double pct, TimePoint start, TimePoint end) const { + ValueType getPercentileBucketMin(double pct, TimePoint start, TimePoint end) + const { return getBucketMin(getPercentileBucketIdx(pct, start, end)); } @@ -278,11 +282,11 @@ class TimeseriesHistogram { * Format is: BUCKET [',' BUCKET ...] * Where: BUCKET == bucketMin ':' count ':' avg */ - std::string getString(int level) const; + std::string getString(size_t level) const; /* * Print out serialized data for all buckets in the historical interval. - * For format, please see getString(int level). + * For format, please see getString(size_t level). */ std::string getString(TimePoint start, TimePoint end) const; @@ -299,7 +303,7 @@ class TimeseriesHistogram { void addValue(Duration now, const ValueType& value) { addValue(TimePoint(now), value); } - void addValue(Duration now, const ValueType& value, int64_t times) { + void addValue(Duration now, const ValueType& value, uint64_t times) { addValue(TimePoint(now), value, times); } void addValues(Duration now, const folly::Histogram& values) { @@ -309,14 +313,14 @@ class TimeseriesHistogram { private: typedef ContainerType Bucket; struct CountFromLevel { - explicit CountFromLevel(int level) : level_(level) {} + explicit CountFromLevel(size_t level) : level_(level) {} uint64_t operator()(const ContainerType& bucket) const { return bucket.count(level_); } private: - int level_; + size_t level_; }; struct CountFromInterval { explicit CountFromInterval(TimePoint start, TimePoint end) @@ -332,14 +336,14 @@ class TimeseriesHistogram { }; struct AvgFromLevel { - explicit AvgFromLevel(int level) : level_(level) {} + explicit AvgFromLevel(size_t level) : level_(level) {} ValueType operator()(const ContainerType& bucket) const { return bucket.template avg(level_); } private: - int level_; + size_t level_; }; template @@ -364,13 +368,13 @@ class TimeseriesHistogram { */ void maybeHandleSingleUniqueValue(const ValueType& value); - void computeAvgData(ValueType* total, int64_t* nsamples, int level) const; + void computeAvgData(ValueType* total, uint64_t* nsamples, size_t level) const; void computeAvgData( ValueType* total, - int64_t* nsamples, + uint64_t* nsamples, TimePoint start, TimePoint end) const; - void computeRateData(ValueType* total, Duration* elapsed, int level) const; + void computeRateData(ValueType* total, Duration* elapsed, size_t level) const; void computeRateData( ValueType* total, Duration* elapsed, diff --git a/folly/test/TimeseriesHistogramTest.cpp b/folly/test/TimeseriesHistogramTest.cpp index cb6d134e..b4b76f55 100644 --- a/folly/test/TimeseriesHistogramTest.cpp +++ b/folly/test/TimeseriesHistogramTest.cpp @@ -77,7 +77,7 @@ TEST(TimeseriesHistogram, Percentile) { EXPECT_EQ(10, h.getMin()); EXPECT_EQ(110, h.getMax()); - for (int i = 0; i < h.getNumBuckets(); ++i) { + for (size_t i = 0; i < h.getNumBuckets(); ++i) { EXPECT_EQ(4, h.getBucket(i).numLevels()); } @@ -133,7 +133,7 @@ TEST(TimeseriesHistogram, String) { CHECK_EQ(IntMTMHTS::NUM_LEVELS, hist.getNumLevels()); - for (int level = 0; level < hist.getNumLevels(); ++level) { + for (size_t level = 0; level < hist.getNumLevels(); ++level) { EXPECT_EQ(kStringValues1[level], hist.getString(level)); } @@ -150,7 +150,7 @@ TEST(TimeseriesHistogram, String) { CHECK_EQ(IntMTMHTS::NUM_LEVELS, hist.getNumLevels()); - for (int level = 0; level < hist.getNumLevels(); ++level) { + for (size_t level = 0; level < hist.getNumLevels(); ++level) { EXPECT_EQ(kStringValues2[level], hist.getString(level)); } } @@ -172,7 +172,7 @@ TEST(TimeseriesHistogram, Clear) { // check clearing hist.clear(); - for (int b = 0; b < hist.getNumBuckets(); ++b) { + for (size_t b = 0; b < hist.getNumBuckets(); ++b) { EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::MINUTE)); EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE)); EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::HOUR)); @@ -217,7 +217,7 @@ TEST(TimeseriesHistogram, Basic) { EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME)); } - for (int b = 1; (b + 1) < hist.getNumBuckets(); ++b) { + for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) { EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE)); EXPECT_EQ(6000, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE)); EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::HOUR)); @@ -296,11 +296,11 @@ TEST(TimeseriesHistogram, Basic) { EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME)); } - for (int b = 1; (b + 1) < hist.getNumBuckets(); ++b) { - EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE)); - EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE)); - EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR)); - EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME)); + for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) { + EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE)); + EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE)); + EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR)); + EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME)); } EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE)); EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count( @@ -346,7 +346,7 @@ TEST(TimeseriesHistogram, Basic) { hist.getBucket(hist.getNumBuckets() - 1).count( IntMTMHTS::ALLTIME)); - for (int b = 1; (b + 1) < hist.getNumBuckets(); ++b) { + for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) { EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE)); EXPECT_EQ(6000, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE)); EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::HOUR)); -- 2.34.1