size_t currentBucket;
TimePoint currentBucketStart;
TimePoint nextBucketStart;
- getBucketInfo(latestTime_, ¤tBucket,
- ¤tBucketStart, &nextBucketStart);
+ getBucketInfo(
+ latestTime_, ¤tBucket, ¤tBucketStart, &nextBucketStart);
// Update latestTime_
latestTime_ = now;
size_t currentBucket;
TimePoint currentBucketStart;
TimePoint nextBucketStart;
- getBucketInfo(latestTime_, ¤tBucket,
- ¤tBucketStart, &nextBucketStart);
+ getBucketInfo(
+ latestTime_, ¤tBucket, ¤tBucketStart, &nextBucketStart);
// Subtract 1 duration from the start of the next bucket to find the
// earliest possible data point we could be tracking.
TimePoint intervalStart = std::max(start, bucketStart);
TimePoint intervalEnd = std::min(end, nextBucketStart);
return input * (intervalEnd - intervalStart) /
- (nextBucketStart - bucketStart);
+ (nextBucketStart - bucketStart);
}
template <typename VT, typename CT>
* Note that you generally should call update() before calling avg(), to
* make sure you are not reading stale data.
*/
- template <typename ReturnType=double>
+ template <typename ReturnType = double>
ReturnType avg() const {
return total_.template avg<ReturnType>();
}
TimePoint latestTime_; // time of last update()
Duration duration_; // total duration ("window length") of the time series
- Bucket total_; // sum and count of everything in time series
- std::vector<Bucket> buckets_; // actual buckets of values
+ Bucket total_; // sum and count of everything in time series
+ std::vector<Bucket> buckets_; // actual buckets of values
};
} // folly
namespace detail {
template <typename T, typename BucketT>
-HistogramBuckets<T, BucketT>::HistogramBuckets(ValueType bucketSize,
- ValueType min,
- ValueType max,
- const BucketType& defaultBucket)
- : bucketSize_(bucketSize),
- min_(min),
- max_(max) {
+HistogramBuckets<T, BucketT>::HistogramBuckets(
+ ValueType bucketSize,
+ ValueType min,
+ ValueType max,
+ const BucketType& defaultBucket)
+ : bucketSize_(bucketSize), min_(min), max_(max) {
CHECK_GT(bucketSize_, ValueType(0));
CHECK_LT(min_, max_);
uint64_t totalCount = 0;
for (size_t n = 0; n < numBuckets; ++n) {
uint64_t bucketCount =
- countFromBucket(const_cast<const BucketType&>(buckets_[n]));
+ countFromBucket(const_cast<const BucketType&>(buckets_[n]));
counts[n] = bucketCount;
totalCount += bucketCount;
}
double pct,
CountFn countFromBucket,
AvgFn avgFromBucket) const {
-
// Find the bucket where this percentile falls
double lowPct;
double highPct;
// (Note that if the counter keeps being decremented, eventually it will
// wrap and become small enough that we won't detect this any more, and
// we will return bogus information.)
- LOG(ERROR) << "invalid average value in histogram minimum bucket: " <<
- avg << " > " << min_ << ": possible integer overflow?";
+ LOG(ERROR) << "invalid average value in histogram minimum bucket: " << avg
+ << " > " << min_ << ": possible integer overflow?";
return getBucketMin(bucketIdx);
}
// For the below-min bucket, just assume the lowest value ever seen is
if (avg < max_) {
// Most likely this means integer overflow occurred. See the comments
// above in the minimum case.
- LOG(ERROR) << "invalid average value in histogram maximum bucket: " <<
- avg << " < " << max_ << ": possible integer overflow?";
+ LOG(ERROR) << "invalid average value in histogram maximum bucket: " << avg
+ << " < " << max_ << ": possible integer overflow?";
return getBucketMax(bucketIdx);
}
// Similarly for the above-max bucket, assume the highest value ever seen
// Most likely this means an integer overflow occurred.
// See the comments above. Return the midpoint between low and high
// as a best guess, since avg is meaningless.
- LOG(ERROR) << "invalid average value in histogram bucket: " <<
- avg << " not in range [" << low << ", " << high <<
- "]: possible integer overflow?";
+ LOG(ERROR) << "invalid average value in histogram bucket: " << avg
+ << " not in range [" << low << ", " << high
+ << "]: possible integer overflow?";
return (low + high) / 2;
}
}
} // detail
-
template <typename T>
std::string Histogram<T>::debugString() const {
std::string ret = folly::to<std::string>(
- "num buckets: ", buckets_.getNumBuckets(),
- ", bucketSize: ", buckets_.getBucketSize(),
- ", min: ", buckets_.getMin(), ", max: ", buckets_.getMax(), "\n");
+ "num buckets: ",
+ buckets_.getNumBuckets(),
+ ", bucketSize: ",
+ buckets_.getBucketSize(),
+ ", min: ",
+ buckets_.getMin(),
+ ", max: ",
+ buckets_.getMax(),
+ "\n");
for (size_t i = 0; i < buckets_.getNumBuckets(); ++i) {
- folly::toAppend(" ", buckets_.getBucketMin(i), ": ",
- buckets_.getByIndex(i).count, "\n",
- &ret);
+ folly::toAppend(
+ " ",
+ buckets_.getBucketMin(i),
+ ": ",
+ buckets_.getByIndex(i).count,
+ "\n",
+ &ret);
}
return ret;
continue;
}
const auto& bucket = getBucketByIndex(i);
- out << getBucketMin(i) << '\t' << getBucketMax(i) << '\t'
- << bucket.count << '\t' << bucket.sum << '\n';
+ out << getBucketMin(i) << '\t' << getBucketMax(i) << '\t' << bucket.count
+ << '\t' << bucket.sum << '\n';
}
}
Histogram<int64_t>::CountFromBucket countFromBucket,
double* lowPct,
double* highPct) const;
-template int64_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>
- ::getPercentileEstimate<Histogram<int64_t>::CountFromBucket,
- Histogram<int64_t>::AvgFromBucket>(
- double pct,
- Histogram<int64_t>::CountFromBucket countFromBucket,
- Histogram<int64_t>::AvgFromBucket avgFromBucket) const;
-template uint64_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>
- ::computeTotalCount<Histogram<int64_t>::CountFromBucket>(
- Histogram<int64_t>::CountFromBucket countFromBucket) const;
+template int64_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>::
+ getPercentileEstimate<
+ Histogram<int64_t>::CountFromBucket,
+ Histogram<int64_t>::AvgFromBucket>(
+ double pct,
+ Histogram<int64_t>::CountFromBucket countFromBucket,
+ Histogram<int64_t>::AvgFromBucket avgFromBucket) const;
+template uint64_t
+detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>::
+ computeTotalCount<Histogram<int64_t>::CountFromBucket>(
+ Histogram<int64_t>::CountFromBucket countFromBucket) const;
} // folly
*
* (max - min) must be larger than or equal to bucketSize.
*/
- HistogramBuckets(ValueType bucketSize, ValueType min, ValueType max,
- const BucketType& defaultBucket);
+ HistogramBuckets(
+ ValueType bucketSize,
+ ValueType min,
+ ValueType max,
+ const BucketType& defaultBucket);
/* Returns the bucket size of each bucket in the histogram. */
ValueType getBucketSize() const {
* percentage of the data points in the histogram are less than N.
*/
template <typename CountFn, typename AvgFn>
- ValueType getPercentileEstimate(double pct,
- CountFn countFromBucket,
- AvgFn avgFromBucket) const;
+ ValueType getPercentileEstimate(
+ double pct,
+ CountFn countFromBucket,
+ AvgFn avgFromBucket) const;
/*
* Iterator access to the buckets.
} // detail
-
/*
* A basic histogram class.
*
typedef detail::Bucket<T> Bucket;
Histogram(ValueType bucketSize, ValueType min, ValueType max)
- : buckets_(bucketSize, min, max, Bucket()) {}
+ : buckets_(bucketSize, min, max, Bucket()) {}
/* Add a data point to the histogram */
void addValue(ValueType value) FOLLY_DISABLE_UNDEFINED_BEHAVIOR_SANITIZER(
}
/* Subtract another histogram data from the histogram */
- void subtract(const Histogram &hist) {
+ void subtract(const Histogram& hist) {
// the two histogram bucket definitions must match to support
// subtract.
- if (getBucketSize() != hist.getBucketSize() ||
- getMin() != hist.getMin() ||
- getMax() != hist.getMax() ||
- getNumBuckets() != hist.getNumBuckets() ) {
+ if (getBucketSize() != hist.getBucketSize() || getMin() != hist.getMin() ||
+ getMax() != hist.getMax() || getNumBuckets() != hist.getNumBuckets()) {
throw std::invalid_argument("Cannot subtract input histogram.");
}
}
/* Merge two histogram data together */
- void merge(const Histogram &hist) {
+ void merge(const Histogram& hist) {
// the two histogram bucket definitions must match to support
// a merge.
- if (getBucketSize() != hist.getBucketSize() ||
- getMin() != hist.getMin() ||
- getMax() != hist.getMax() ||
- getNumBuckets() != hist.getNumBuckets() ) {
+ if (getBucketSize() != hist.getBucketSize() || getMin() != hist.getMin() ||
+ getMax() != hist.getMax() || getNumBuckets() != hist.getNumBuckets()) {
throw std::invalid_argument("Cannot merge from input histogram.");
}
}
/* Copy bucket values from another histogram */
- void copy(const Histogram &hist) {
+ void copy(const Histogram& hist) {
// the two histogram bucket definition must match
- if (getBucketSize() != hist.getBucketSize() ||
- getMin() != hist.getMin() ||
- getMax() != hist.getMax() ||
- getNumBuckets() != hist.getNumBuckets() ) {
+ if (getBucketSize() != hist.getBucketSize() || getMin() != hist.getMin() ||
+ getMax() != hist.getMax() || getNumBuckets() != hist.getNumBuckets()) {
throw std::invalid_argument("Cannot copy from input histogram.");
}
template <typename VT, typename CT>
void MultiLevelTimeSeries<VT, CT>::clear() {
- for (auto & level : levels_) {
+ for (auto& level : levels_) {
level.clear();
}
cachedCount_ = 0;
}
-} // folly
+} // namespace folly
/*
* Return the number of levels tracked by MultiLevelTimeSeries.
*/
- size_t numLevels() const { return levels_.size(); }
+ size_t numLevels() const {
+ return levels_.size();
+ }
/*
* Get the BucketedTimeSeries backing the specified level.
for (size_t n = 0; n < hist.getNumBuckets(); ++n) {
const typename folly::Histogram<ValueType>::Bucket& histBucket =
- hist.getBucketByIndex(n);
+ hist.getBucketByIndex(n);
Bucket& myBucket = buckets_.getByIndex(n);
myBucket.addValueAggregated(now, histBucket.sum, histBucket.count);
}
return firstValue_;
}
- return buckets_.getPercentileEstimate(pct / 100.0, CountFromLevel(level),
- AvgFromLevel(level));
+ return buckets_.getPercentileEstimate(
+ pct / 100.0, CountFromLevel(level), AvgFromLevel(level));
}
template <typename T, typename CT, typename C>
return firstValue_;
}
- return buckets_.getPercentileEstimate(pct / 100.0,
- CountFromInterval(start, end),
- AvgFromInterval<T>(start, end));
+ return buckets_.getPercentileEstimate(
+ pct / 100.0,
+ CountFromInterval(start, end),
+ AvgFromInterval<T>(start, end));
}
template <typename T, typename CT, typename C>
double pct,
TimePoint start,
TimePoint end) const {
- return buckets_.getPercentileBucketIdx(pct / 100.0,
- CountFromInterval(start, end));
+ return buckets_.getPercentileBucketIdx(
+ pct / 100.0, CountFromInterval(start, end));
}
template <typename T, typename CT, typename C>
toAppend(",", &result);
}
const ContainerType& cont = buckets_.getByIndex(i);
- toAppend(buckets_.getBucketMin(i),
- ":", cont.count(level),
- ":", cont.template avg<ValueType>(level), &result);
+ toAppend(
+ buckets_.getBucketMin(i),
+ ":",
+ cont.count(level),
+ ":",
+ cont.template avg<ValueType>(level),
+ &result);
}
return result;
toAppend(",", &result);
}
const ContainerType& cont = buckets_.getByIndex(i);
- toAppend(buckets_.getBucketMin(i),
- ":", cont.count(start, end),
- ":", cont.avg(start, end), &result);
+ toAppend(
+ buckets_.getBucketMin(i),
+ ":",
+ cont.count(start, end),
+ ":",
+ cont.avg(start, end),
+ &result);
}
return result;
}
}
-} // namespace folly
+} // namespace folly
#pragma once
-#include <string>
#include <folly/stats/Histogram.h>
#include <folly/stats/MultiLevelTimeSeries.h>
+#include <string>
namespace folly {
class C = folly::MultiLevelTimeSeries<T, CT>>
class TimeseriesHistogram {
private:
- // NOTE: T must be equivalent to _signed_ numeric type for our math.
- static_assert(std::numeric_limits<T>::is_signed, "");
+ // NOTE: T must be equivalent to _signed_ numeric type for our math.
+ static_assert(std::numeric_limits<T>::is_signed, "");
public:
// Values to be inserted into container
* @param defaultContainer a pre-initialized timeseries with the desired
* number of levels and their durations.
*/
- TimeseriesHistogram(ValueType bucketSize, ValueType min, ValueType max,
- const ContainerType& defaultContainer);
+ TimeseriesHistogram(
+ ValueType bucketSize,
+ ValueType min,
+ ValueType max,
+ const ContainerType& defaultContainer);
/* Return the bucket size of each bucket in the histogram. */
- ValueType getBucketSize() const { return buckets_.getBucketSize(); }
+ ValueType getBucketSize() const {
+ return buckets_.getBucketSize();
+ }
/* Return the min value at which bucketing begins. */
- ValueType getMin() const { return buckets_.getMin(); }
+ ValueType getMin() const {
+ return buckets_.getMin();
+ }
/* Return the max value at which bucketing ends. */
- ValueType getMax() const { return buckets_.getMax(); }
+ ValueType getMax() const {
+ return buckets_.getMax();
+ }
/* Return the number of levels of the Timeseries object in each bucket */
size_t getNumLevels() const {
bool singleUniqueValue_;
ValueType firstValue_;
};
-} // folly
+} // namespace folly
#include <cstdint>
#include <type_traits>
-namespace folly { namespace detail {
+namespace folly {
+namespace detail {
/*
* Helper function to compute the average, given a specified input type and
// precision.
template <typename ReturnType>
ReturnType avgHelper(long double sum, uint64_t count) {
- if (count == 0) { return ReturnType(0); }
+ if (count == 0) {
+ return ReturnType(0);
+ }
const long double countf = count;
return static_cast<ReturnType>(sum / countf);
}
// In all other cases divide using double precision.
// This should be relatively fast, and accurate enough for most use cases.
template <typename ReturnType, typename ValueType>
-typename std::enable_if<!std::is_same<typename std::remove_cv<ValueType>::type,
- long double>::value,
- ReturnType>::type
+typename std::enable_if<
+ !std::is_same<typename std::remove_cv<ValueType>::type, long double>::value,
+ ReturnType>::type
avgHelper(ValueType sum, uint64_t count) {
- if (count == 0) { return ReturnType(0); }
+ if (count == 0) {
+ return ReturnType(0);
+ }
const double sumf = double(sum);
const double countf = double(count);
return static_cast<ReturnType>(sumf / countf);
std::ratio<Duration::period::den, Duration::period::num>>
NativeRate;
typedef std::chrono::duration<
- ReturnType, std::ratio<Interval::period::den,
- Interval::period::num>> DesiredRate;
+ ReturnType,
+ std::ratio<Interval::period::den, Interval::period::num>>
+ DesiredRate;
NativeRate native(count / elapsed.count());
DesiredRate desired = std::chrono::duration_cast<DesiredRate>(native);
return desired.count();
}
-
-template<typename T>
+template <typename T>
struct Bucket {
public:
typedef T ValueType;
- Bucket()
- : sum(ValueType()),
- count(0) {}
+ Bucket() : sum(ValueType()), count(0) {}
void clear() {
sum = ValueType();
ValueType sum;
uint64_t count;
};
-
-}} // folly::detail
+} // namespace detail
+} // namespace folly
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
#include <folly/stats/BucketedTimeSeries.h>
-#include <folly/stats/BucketedTimeSeries-defs.h>
#include <glog/logging.h>
#include <folly/Benchmark.h>
+#include <folly/stats/BucketedTimeSeries-defs.h>
using std::chrono::seconds;
using folly::BenchmarkSuspender;
using folly::BucketedTimeSeries;
-void addValue(unsigned int iters,
- seconds duration, size_t numBuckets,
- size_t callsPerSecond) {
+void addValue(
+ unsigned int iters,
+ seconds duration,
+ size_t numBuckets,
+ size_t callsPerSecond) {
BenchmarkSuspender suspend;
BucketedTimeSeries<int64_t> ts(numBuckets, duration);
suspend.dismiss();
BENCHMARK_NAMED_PARAM(addValue, 71x5_100perSec, seconds(71), 5, 100);
BENCHMARK_NAMED_PARAM(addValue, 1x1_100perSec, seconds(1), 1, 100);
-int main(int argc, char *argv[]) {
+int main(int argc, char* argv[]) {
gflags::ParseCommandLineFlags(&argc, &argv, true);
folly::runBenchmarks();
return 0;
FOR_EACH_RANGE (i, 0, n) {
hist.addValue(num);
++num;
- if (num > max) { num = min; }
+ if (num > max) {
+ num = min;
+ }
}
}
BENCHMARK_NAMED_PARAM(addValue, 0_to_1000, 10, 0, 1000);
BENCHMARK_NAMED_PARAM(addValue, 5k_to_20k, 250, 5000, 20000);
-int main(int argc, char *argv[]) {
+int main(int argc, char* argv[]) {
gflags::ParseCommandLineFlags(&argc, &argv, true);
folly::runBenchmarks();
return 0;
*/
#include <folly/stats/Histogram.h>
-#include <folly/stats/Histogram-defs.h>
#include <folly/portability/GTest.h>
+#include <folly/stats/Histogram-defs.h>
using folly::Histogram;
if (n < 100) {
double lowPct = -1.0;
double highPct = -1.0;
- unsigned int bucketIdx = h.getPercentileBucketIdx(pct + epsilon,
- &lowPct, &highPct);
+ unsigned int bucketIdx =
+ h.getPercentileBucketIdx(pct + epsilon, &lowPct, &highPct);
EXPECT_EQ(n + 1, bucketIdx);
EXPECT_FLOAT_EQ(n / 100.0, lowPct);
EXPECT_FLOAT_EQ((n + 1) / 100.0, highPct);
if (n > 0) {
double lowPct = -1.0;
double highPct = -1.0;
- unsigned int bucketIdx = h.getPercentileBucketIdx(pct - epsilon,
- &lowPct, &highPct);
+ unsigned int bucketIdx =
+ h.getPercentileBucketIdx(pct - epsilon, &lowPct, &highPct);
EXPECT_EQ(n, bucketIdx);
EXPECT_FLOAT_EQ((n - 1) / 100.0, lowPct);
EXPECT_FLOAT_EQ(n / 100.0, highPct);
// Add one to each bucket, make sure the counts match
for (int32_t i = 0; i < 10; i++) {
h.addValue(i);
- EXPECT_EQ(i+1, h.computeTotalCount());
+ EXPECT_EQ(i + 1, h.computeTotalCount());
}
// Add a lot to one bucket, make sure the counts still make sense
vector<TimePoint> bucketStarts;
};
vector<TestData> testData = {
- // 71 seconds x 4 buckets
- { 71, 4, {0, 18, 36, 54}},
- // 100 seconds x 10 buckets
- { 100, 10, {0, 10, 20, 30, 40, 50, 60, 70, 80, 90}},
- // 10 seconds x 10 buckets
- { 10, 10, {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}},
- // 10 seconds x 1 buckets
- { 10, 1, {0}},
- // 1 second x 1 buckets
- { 1, 1, {0}},
+ // 71 seconds x 4 buckets
+ {71, 4, {0, 18, 36, 54}},
+ // 100 seconds x 10 buckets
+ {100, 10, {0, 10, 20, 30, 40, 50, 60, 70, 80, 90}},
+ // 10 seconds x 10 buckets
+ {10, 10, {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}},
+ // 10 seconds x 1 buckets
+ {10, 1, {0}},
+ // 1 second x 1 buckets
+ {1, 1, {0}},
};
}
size_t returnedIdx;
TimePoint returnedStart;
TimePoint returnedNextStart;
- ts.getBucketInfo(expectedStart, &returnedIdx,
- &returnedStart, &returnedNextStart);
- EXPECT_EQ(idx, returnedIdx) << data.duration << "x" << data.numBuckets
- << ": " << point.first << "="
- << point.second;
+ ts.getBucketInfo(
+ expectedStart, &returnedIdx, &returnedStart, &returnedNextStart);
+ EXPECT_EQ(idx, returnedIdx)
+ << data.duration << "x" << data.numBuckets << ": " << point.first
+ << "=" << point.second;
EXPECT_EQ(expectedStart, returnedStart)
<< data.duration << "x" << data.numBuckets << ": " << point.first
<< "=" << point.second;
setup();
ts.update(seconds(151 + offset));
EXPECT_EQ(4, ts.count());
- //EXPECT_EQ(6, ts.sum());
+ // EXPECT_EQ(6, ts.sum());
EXPECT_EQ(6, ts.avg());
// The last time we added was 95.
{
// Test uint64_t values that would overflow int64_t
BucketedTimeSeries<uint64_t> ts(60, seconds(600));
- ts.addValueAggregated(seconds(0),
- std::numeric_limits<uint64_t>::max(),
- std::numeric_limits<uint64_t>::max());
+ ts.addValueAggregated(
+ seconds(0),
+ std::numeric_limits<uint64_t>::max(),
+ std::numeric_limits<uint64_t>::max());
EXPECT_DOUBLE_EQ(1.0, ts.avg());
EXPECT_DOUBLE_EQ(1.0, ts.avg<float>());
// but the average fits in an int64_t
BucketedTimeSeries<double> ts(60, seconds(600));
uint64_t value = 0x3fffffffffffffff;
- FOR_EACH_RANGE(i, 0, 16) {
- ts.addValue(seconds(0), value);
- }
+ FOR_EACH_RANGE (i, 0, 16) { ts.addValue(seconds(0), value); }
EXPECT_DOUBLE_EQ(value, ts.avg());
EXPECT_DOUBLE_EQ(value, ts.avg<float>());
{
// Test BucketedTimeSeries with a smaller integer type
BucketedTimeSeries<int16_t> ts(60, seconds(600));
- FOR_EACH_RANGE(i, 0, 101) {
- ts.addValue(seconds(0), i);
- }
+ FOR_EACH_RANGE (i, 0, 101) { ts.addValue(seconds(0), i); }
EXPECT_DOUBLE_EQ(50.0, ts.avg());
EXPECT_DOUBLE_EQ(50.0, ts.avg<float>());
BucketedTimeSeries<int64_t> ts(data.numBuckets, seconds(data.duration));
vector<BucketInfo> info;
- auto fn = [&](
- const Bucket& bucket,
- TimePoint bucketStart,
- TimePoint bucketEnd) -> bool {
+ auto fn = [&](const Bucket& bucket,
+ TimePoint bucketStart,
+ TimePoint bucketEnd) -> bool {
info.emplace_back(&bucket, bucketStart, bucketEnd);
return true;
};
// 0: time=[0, 2): values=(0, 1), sum=1, count=2
// 1: time=[2, 4): values=(2, 3), sum=5, count=1
// 2: time=[4, 6): values=(4, 5), sum=9, count=2
+ // clang-format off
double expectedSums1[kDuration + 1][kDuration + 1] = {
- {0, 4.5, 9, 11.5, 14, 14.5, 15},
- {0, 4.5, 7, 9.5, 10, 10.5, -1},
- {0, 2.5, 5, 5.5, 6, -1, -1},
- {0, 2.5, 3, 3.5, -1, -1, -1},
- {0, 0.5, 1, -1, -1, -1, -1},
- {0, 0.5, -1, -1, -1, -1, -1},
- {0, -1, -1, -1, -1, -1, -1}
+ {0, 4.5, 9, 11.5, 14, 14.5, 15},
+ {0, 4.5, 7, 9.5, 10, 10.5, -1},
+ {0, 2.5, 5, 5.5, 6, -1, -1},
+ {0, 2.5, 3, 3.5, -1, -1, -1},
+ {0, 0.5, 1, -1, -1, -1, -1},
+ {0, 0.5, -1, -1, -1, -1, -1},
+ {0, -1, -1, -1, -1, -1, -1},
};
int expectedCounts1[kDuration + 1][kDuration + 1] = {
- {0, 1, 2, 3, 4, 5, 6},
- {0, 1, 2, 3, 4, 5, -1},
- {0, 1, 2, 3, 4, -1, -1},
- {0, 1, 2, 3, -1, -1, -1},
- {0, 1, 2, -1, -1, -1, -1},
- {0, 1, -1, -1, -1, -1, -1},
- {0, -1, -1, -1, -1, -1, -1}
+ {0, 1, 2, 3, 4, 5, 6},
+ {0, 1, 2, 3, 4, 5, -1},
+ {0, 1, 2, 3, 4, -1, -1},
+ {0, 1, 2, 3, -1, -1, -1},
+ {0, 1, 2, -1, -1, -1, -1},
+ {0, 1, -1, -1, -1, -1, -1},
+ {0, -1, -1, -1, -1, -1, -1},
};
+ // clang-format on
TimePoint currentTime = b.getLatestTime() + seconds(1);
for (int i = 0; i <= kDuration + 1; i++) {
// 0: time=[6, 8): values=(6, 7), sum=13, count=2
// 1: time=[8, 10): values=(8), sum=8, count=1
// 2: time=[4, 6): values=(4, 5), sum=9, count=2
+ // clang-format off
double expectedSums2[kDuration + 1][kDuration + 1] = {
- {0, 8, 14.5, 21, 25.5, 30, 30},
- {0, 6.5, 13, 17.5, 22, 22, -1},
- {0, 6.5, 11, 15.5, 15.5, -1, -1},
- {0, 4.5, 9, 9, -1, -1, -1},
- {0, 4.5, 4.5, -1, -1, -1, -1},
- {0, 0, -1, -1, -1, -1, -1},
- {0, -1, -1, -1, -1, -1, -1}
+ {0, 8, 14.5, 21, 25.5, 30, 30},
+ {0, 6.5, 13, 17.5, 22, 22, -1},
+ {0, 6.5, 11, 15.5, 15.5, -1, -1},
+ {0, 4.5, 9, 9, -1, -1, -1},
+ {0, 4.5, 4.5, -1, -1, -1, -1},
+ {0, 0, -1, -1, -1, -1, -1},
+ {0, -1, -1, -1, -1, -1, -1},
};
int expectedCounts2[kDuration + 1][kDuration + 1] = {
- {0, 1, 2, 3, 4, 5, 5},
- {0, 1, 2, 3, 4, 4, -1},
- {0, 1, 2, 3, 3, -1, -1},
- {0, 1, 2, 2, -1, -1, -1},
- {0, 1, 1, -1, -1, -1, -1},
- {0, 0, -1, -1, -1, -1, -1},
- {0, -1, -1, -1, -1, -1, -1}
+ {0, 1, 2, 3, 4, 5, 5},
+ {0, 1, 2, 3, 4, 4, -1},
+ {0, 1, 2, 3, 3, -1, -1},
+ {0, 1, 2, 2, -1, -1, -1},
+ {0, 1, 1, -1, -1, -1, -1},
+ {0, 0, -1, -1, -1, -1, -1},
+ {0, -1, -1, -1, -1, -1, -1},
};
+ // clang-format on
currentTime = b.getLatestTime() + seconds(1);
for (int i = 0; i <= kDuration + 1; i++) {
<< "i=" << i << ", j=" << j << ", interval=[" << start << ", " << end
<< ")";
- double expectedRate = expectedInterval.count() ?
- expectedSum / expectedInterval.count() : 0;
+ double expectedRate =
+ expectedInterval.count() ? expectedSum / expectedInterval.count() : 0;
EXPECT_EQ(expectedRate, b.rate(start, end))
<< "i=" << i << ", j=" << j << ", interval=[" << start << ", " << end
<< ")";
}
namespace IntMHTS {
- enum Levels {
- MINUTE,
- HOUR,
- ALLTIME,
- NUM_LEVELS,
- };
+enum Levels {
+ MINUTE,
+ HOUR,
+ ALLTIME,
+ NUM_LEVELS,
+};
- const seconds kMinuteHourDurations[] = {
- seconds(60), seconds(3600), seconds(0)
- };
+const seconds kMinuteHourDurations[] = {seconds(60), seconds(3600), seconds(0)};
};
TEST(MinuteHourTimeSeries, Basic) {
- folly::MultiLevelTimeSeries<int> mhts(60, IntMHTS::NUM_LEVELS,
- IntMHTS::kMinuteHourDurations);
+ folly::MultiLevelTimeSeries<int> mhts(
+ 60, IntMHTS::NUM_LEVELS, IntMHTS::kMinuteHourDurations);
EXPECT_EQ(mhts.numLevels(), IntMHTS::NUM_LEVELS);
EXPECT_EQ(mhts.numLevels(), 3);
EXPECT_EQ(mhts.getLevel(IntMHTS::ALLTIME).elapsed().count(), 300);
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 600);
- EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 300*10);
- EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 300*10);
+ EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 300 * 10);
+ EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 300 * 10);
EXPECT_EQ(mhts.avg(IntMHTS::MINUTE), 10);
EXPECT_EQ(mhts.avg(IntMHTS::HOUR), 10);
EXPECT_EQ(mhts.rate(IntMHTS::HOUR), 10);
EXPECT_EQ(mhts.rate(IntMHTS::ALLTIME), 10);
- for (int i = 0; i < 3600*3 - 300; ++i) {
+ for (int i = 0; i < 3600 * 3 - 300; ++i) {
mhts.addValue(cur_time++, 10);
}
mhts.flush();
EXPECT_EQ(mhts.getLevel(IntMHTS::MINUTE).elapsed().count(), 60);
EXPECT_EQ(mhts.getLevel(IntMHTS::HOUR).elapsed().count(), 3600);
- EXPECT_EQ(mhts.getLevel(IntMHTS::ALLTIME).elapsed().count(), 3600*3);
+ EXPECT_EQ(mhts.getLevel(IntMHTS::ALLTIME).elapsed().count(), 3600 * 3);
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 600);
- EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 3600*10);
- EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 3600*3*10);
+ EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 3600 * 10);
+ EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 3600 * 3 * 10);
EXPECT_EQ(mhts.avg(IntMHTS::MINUTE), 10);
EXPECT_EQ(mhts.avg(IntMHTS::HOUR), 10);
}
mhts.flush();
- EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60*100);
- EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 3600*100);
- EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME),
- 3600*3*10 + 3600*100);
+ EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60 * 100);
+ EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 3600 * 100);
+ EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 3600 * 3 * 10 + 3600 * 100);
EXPECT_EQ(mhts.avg(IntMHTS::MINUTE), 100);
EXPECT_EQ(mhts.avg(IntMHTS::HOUR), 100);
}
mhts.flush();
- EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60*120);
- EXPECT_EQ(mhts.sum(IntMHTS::HOUR),
- 1800*100 + 1800*120);
- EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME),
- 3600*3*10 + 3600*100 + 1800*120);
+ EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60 * 120);
+ EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 1800 * 100 + 1800 * 120);
+ EXPECT_EQ(
+ mhts.sum(IntMHTS::ALLTIME), 3600 * 3 * 10 + 3600 * 100 + 1800 * 120);
for (int i = 0; i < 60; ++i) {
mhts.addValue(cur_time++, 1000);
}
mhts.flush();
- EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60*1000);
- EXPECT_EQ(mhts.sum(IntMHTS::HOUR),
- 1740*100 + 1800*120 + 60*1000);
- EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME),
- 3600*3*10 + 3600*100 + 1800*120 + 60*1000);
+ EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60 * 1000);
+ EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 1740 * 100 + 1800 * 120 + 60 * 1000);
+ EXPECT_EQ(
+ mhts.sum(IntMHTS::ALLTIME),
+ 3600 * 3 * 10 + 3600 * 100 + 1800 * 120 + 60 * 1000);
mhts.clear();
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 0);
}
TEST(MinuteHourTimeSeries, QueryByInterval) {
- folly::MultiLevelTimeSeries<int> mhts(60, IntMHTS::NUM_LEVELS,
- IntMHTS::kMinuteHourDurations);
+ folly::MultiLevelTimeSeries<int> mhts(
+ 60, IntMHTS::NUM_LEVELS, IntMHTS::kMinuteHourDurations);
TimePoint curTime;
for (curTime = mkTimePoint(0); curTime < mkTimePoint(7200);
TimePoint end;
};
TimeInterval intervals[12] = {
- { curTime - seconds(60), curTime },
- { curTime - seconds(3600), curTime },
- { curTime - seconds(7200), curTime },
- { curTime - seconds(3600), curTime - seconds(60) },
- { curTime - seconds(7200), curTime - seconds(60) },
- { curTime - seconds(7200), curTime - seconds(3600) },
- { curTime - seconds(50), curTime - seconds(20) },
- { curTime - seconds(3020), curTime - seconds(20) },
- { curTime - seconds(7200), curTime - seconds(20) },
- { curTime - seconds(3000), curTime - seconds(1000) },
- { curTime - seconds(7200), curTime - seconds(1000) },
- { curTime - seconds(7200), curTime - seconds(3600) },
+ {curTime - seconds(60), curTime},
+ {curTime - seconds(3600), curTime},
+ {curTime - seconds(7200), curTime},
+ {curTime - seconds(3600), curTime - seconds(60)},
+ {curTime - seconds(7200), curTime - seconds(60)},
+ {curTime - seconds(7200), curTime - seconds(3600)},
+ {curTime - seconds(50), curTime - seconds(20)},
+ {curTime - seconds(3020), curTime - seconds(20)},
+ {curTime - seconds(7200), curTime - seconds(20)},
+ {curTime - seconds(3000), curTime - seconds(1000)},
+ {curTime - seconds(7200), curTime - seconds(1000)},
+ {curTime - seconds(7200), curTime - seconds(3600)},
};
int expectedSums[12] = {
- 6000, 41400, 32400, 35400, 32130, 16200, 3000, 33600, 32310, 20000, 27900,
- 16200
+ 6000,
+ 41400,
+ 32400,
+ 35400,
+ 32130,
+ 16200,
+ 3000,
+ 33600,
+ 32310,
+ 20000,
+ 27900,
+ 16200,
};
int expectedCounts[12] = {
- 60, 3600, 7200, 3540, 7140, 3600, 30, 3000, 7180, 2000, 6200, 3600
+ 60, 3600, 7200, 3540, 7140, 3600, 30, 3000, 7180, 2000, 6200, 3600,
};
for (int i = 0; i < 12; ++i) {
EXPECT_EQ(expectedCounts[i], c);
int a = mhts.avg<int>(interval.start, interval.end);
- EXPECT_EQ(expectedCounts[i] ?
- (expectedSums[i] / expectedCounts[i]) : 0,
- a);
+ EXPECT_EQ(expectedCounts[i] ? (expectedSums[i] / expectedCounts[i]) : 0, a);
int r = mhts.rate<int>(interval.start, interval.end);
int expectedRate =
- expectedSums[i] / (interval.end - interval.start).count();
+ expectedSums[i] / (interval.end - interval.start).count();
EXPECT_EQ(expectedRate, r);
}
}
*/
#include <folly/stats/TimeseriesHistogram.h>
-#include <folly/stats/TimeseriesHistogram-defs.h>
#include <random>
#include <folly/portability/GTest.h>
+#include <folly/stats/TimeseriesHistogram-defs.h>
using namespace std;
using namespace folly;
namespace {
namespace IntMTMHTS {
- enum Levels {
- MINUTE,
- TEN_MINUTE,
- HOUR,
- ALLTIME,
- NUM_LEVELS,
- };
+enum Levels {
+ MINUTE,
+ TEN_MINUTE,
+ HOUR,
+ ALLTIME,
+ NUM_LEVELS,
+};
- const seconds kDurations[] = {
- seconds(60), seconds(600), seconds(3600), seconds(0)
- };
+const seconds kDurations[] = {
+ seconds(60),
+ seconds(600),
+ seconds(3600),
+ seconds(0),
+};
};
namespace IntMHTS {
- enum Levels {
- MINUTE,
- HOUR,
- ALLTIME,
- NUM_LEVELS,
- };
+enum Levels {
+ MINUTE,
+ HOUR,
+ ALLTIME,
+ NUM_LEVELS,
+};
- const seconds kDurations[] = {
- seconds(60), seconds(3600), seconds(0)
- };
+const seconds kDurations[] = {
+ seconds(60),
+ seconds(3600),
+ seconds(0),
+};
};
typedef std::mt19937 RandomInt32;
RandomInt32 random(5);
// [10, 109], 12 buckets including above and below
{
- TimeseriesHistogram<int> h(10, 10, 110,
- MultiLevelTimeSeries<int>(
- 60, IntMTMHTS::NUM_LEVELS,
- IntMTMHTS::kDurations));
+ TimeseriesHistogram<int> h(
+ 10,
+ 10,
+ 110,
+ MultiLevelTimeSeries<int>(
+ 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
EXPECT_EQ(0, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
h.update(mkTimePoint(1500000000));
// bucket 0 stores everything below min, so its minimum
// is the lowest possible number
- EXPECT_EQ(std::numeric_limits<int>::min(),
- h.getPercentileBucketMin(1, IntMTMHTS::ALLTIME));
+ EXPECT_EQ(
+ std::numeric_limits<int>::min(),
+ h.getPercentileBucketMin(1, IntMTMHTS::ALLTIME));
EXPECT_EQ(110, h.getPercentileBucketMin(99, IntMTMHTS::ALLTIME));
EXPECT_EQ(-2, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
RandomInt32 random(5);
// [10, 109], 12 buckets including above and below
{
- TimeseriesHistogram<int> hist(10, 10, 110,
- MultiLevelTimeSeries<int>(
- 60, IntMTMHTS::NUM_LEVELS,
- IntMTMHTS::kDurations));
+ TimeseriesHistogram<int> hist(
+ 10,
+ 10,
+ 110,
+ MultiLevelTimeSeries<int>(
+ 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
int maxVal = 120;
hist.addValue(mkTimePoint(0), 0);
hist.update(mkTimePoint(0));
- const char* const kStringValues1[IntMTMHTS::NUM_LEVELS] = {
- "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
+ const char* const kStringValues1[IntMTMHTS::NUM_LEVELS] = {
+ "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
- "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
+ "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
- "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
+ "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
- "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
+ "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
};
EXPECT_EQ(kStringValues1[level], hist.getString(level));
}
- const char* const kStringValues2[IntMTMHTS::NUM_LEVELS] = {
- "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
+ const char* const kStringValues2[IntMTMHTS::NUM_LEVELS] = {
+ "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
- "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
+ "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
- "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
+ "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
- "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
+ "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
};
TEST(TimeseriesHistogram, Clear) {
{
- TimeseriesHistogram<int> hist(10, 0, 100,
- MultiLevelTimeSeries<int>(
- 60, IntMTMHTS::NUM_LEVELS,
- IntMTMHTS::kDurations));
+ TimeseriesHistogram<int> hist(
+ 10,
+ 0,
+ 100,
+ MultiLevelTimeSeries<int>(
+ 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
for (int now = 0; now < 3600; now++) {
for (int i = 0; i < 100; i++) {
}
}
-
TEST(TimeseriesHistogram, Basic) {
{
- TimeseriesHistogram<int> hist(10, 0, 100,
- MultiLevelTimeSeries<int>(
- 60, IntMTMHTS::NUM_LEVELS,
- IntMTMHTS::kDurations));
+ TimeseriesHistogram<int> hist(
+ 10,
+ 0,
+ 100,
+ MultiLevelTimeSeries<int>(
+ 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
for (int now = 0; now < 3600; now++) {
for (int i = 0; i < 100; i++) {
for (int pct = 1; pct <= 100; pct++) {
int expected = (pct - 1) / 10 * 10;
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
- EXPECT_EQ(expected, hist.getPercentileBucketMin(pct,
- IntMTMHTS::TEN_MINUTE));
+ EXPECT_EQ(
+ expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
}
EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
}
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
- EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
- IntMTMHTS::MINUTE));
+ EXPECT_EQ(
+ 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
EXPECT_EQ(6000, hist.count(IntMTMHTS::MINUTE));
EXPECT_EQ(60000, hist.count(IntMTMHTS::TEN_MINUTE));
// -----------------
{
- TimeseriesHistogram<int> hist(10, 0, 100,
- MultiLevelTimeSeries<int>(
- 60, IntMTMHTS::NUM_LEVELS,
- IntMTMHTS::kDurations));
+ TimeseriesHistogram<int> hist(
+ 10,
+ 0,
+ 100,
+ MultiLevelTimeSeries<int>(
+ 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
for (int now = 0; now < 3600; now++) {
for (int i = 0; i < 100; i++) {
for (int pct = 1; pct <= 100; pct++) {
int expected = (pct - 1) / 10 * 10;
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
- EXPECT_EQ(expected, hist.getPercentileBucketMin(pct,
- IntMTMHTS::TEN_MINUTE));
+ EXPECT_EQ(
+ expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
- }
+ }
- for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
- EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE));
- EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
- EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR));
- EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
+ for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
+ EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE));
+ EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
+ EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR));
+ EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
}
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
- EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
- IntMTMHTS::MINUTE));
+ EXPECT_EQ(
+ 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
}
// -----------------
{
- TimeseriesHistogram<int> hist(10, 0, 100,
- MultiLevelTimeSeries<int>(
- 60, IntMTMHTS::NUM_LEVELS,
- IntMTMHTS::kDurations));
+ TimeseriesHistogram<int> hist(
+ 10,
+ 0,
+ 100,
+ MultiLevelTimeSeries<int>(
+ 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
for (int now = 0; now < 3600; now++) {
for (int i = 0; i < 50; i++) {
for (int pct = 1; pct <= 100; pct++) {
int expected = (pct - 1) / 10 * 10;
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
- EXPECT_EQ(expected, hist.getPercentileBucketMin(pct,
- IntMTMHTS::TEN_MINUTE));
+ EXPECT_EQ(
+ expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
}
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::HOUR));
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::ALLTIME));
- EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
- IntMTMHTS::MINUTE));
- EXPECT_EQ(0,
- hist.getBucket(hist.getNumBuckets() - 1).
- count(IntMTMHTS::TEN_MINUTE));
- EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
- IntMTMHTS::HOUR));
- EXPECT_EQ(0,
- hist.getBucket(hist.getNumBuckets() - 1).count(
- IntMTMHTS::ALLTIME));
+ EXPECT_EQ(
+ 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
+ EXPECT_EQ(
+ 0,
+ hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::TEN_MINUTE));
+ EXPECT_EQ(
+ 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::HOUR));
+ EXPECT_EQ(
+ 0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::ALLTIME));
for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE));
hist.addValue(mkTimePoint(3599), 200 + i);
}
hist.update(mkTimePoint(3599));
- EXPECT_EQ(100,
- hist.getBucket(hist.getNumBuckets() - 1).count(
- IntMTMHTS::ALLTIME));
-
+ EXPECT_EQ(
+ 100,
+ hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::ALLTIME));
}
}
TEST(TimeseriesHistogram, QueryByInterval) {
- TimeseriesHistogram<int> mhts(8, 8, 120,
- MultiLevelTimeSeries<int>(
- 60, IntMHTS::NUM_LEVELS,
- IntMHTS::kDurations));
+ TimeseriesHistogram<int> mhts(
+ 8,
+ 8,
+ 120,
+ MultiLevelTimeSeries<int>(60, IntMHTS::NUM_LEVELS, IntMHTS::kDurations));
mhts.update(mkTimePoint(0));
StatsClock::time_point end;
};
TimeInterval intervals[12] = {
- { curTime - 60, curTime },
- { curTime - 3600, curTime },
- { curTime - 7200, curTime },
- { curTime - 3600, curTime - 60 },
- { curTime - 7200, curTime - 60 },
- { curTime - 7200, curTime - 3600 },
- { curTime - 50, curTime - 20 },
- { curTime - 3020, curTime - 20 },
- { curTime - 7200, curTime - 20 },
- { curTime - 3000, curTime - 1000 },
- { curTime - 7200, curTime - 1000 },
- { curTime - 7200, curTime - 3600 },
+ {curTime - 60, curTime},
+ {curTime - 3600, curTime},
+ {curTime - 7200, curTime},
+ {curTime - 3600, curTime - 60},
+ {curTime - 7200, curTime - 60},
+ {curTime - 7200, curTime - 3600},
+ {curTime - 50, curTime - 20},
+ {curTime - 3020, curTime - 20},
+ {curTime - 7200, curTime - 20},
+ {curTime - 3000, curTime - 1000},
+ {curTime - 7200, curTime - 1000},
+ {curTime - 7200, curTime - 3600},
};
int expectedSums[12] = {
- 6000, 41400, 32400, 35400, 32129, 16200, 3000, 33600, 32308, 20000, 27899,
- 16200
+ 6000,
+ 41400,
+ 32400,
+ 35400,
+ 32129,
+ 16200,
+ 3000,
+ 33600,
+ 32308,
+ 20000,
+ 27899,
+ 16200,
};
int expectedCounts[12] = {
- 60, 3600, 7200, 3540, 7139, 3600, 30, 3000, 7178, 2000, 6199, 3600
+ 60, 3600, 7200, 3540, 7139, 3600, 30, 3000, 7178, 2000, 6199, 3600,
};
// The first 7200 values added all fell below the histogram minimum,
int belowMinBucket = std::numeric_limits<int>::min();
int expectedValues[12][3] = {
- {96, 96, 96},
- { 8, 8, 96},
- { belowMinBucket, belowMinBucket, 8}, // alltime
- { 8, 8, 8},
- { belowMinBucket, belowMinBucket, 8}, // alltime
- { belowMinBucket, belowMinBucket, 8}, // alltime
- {96, 96, 96},
- { 8, 8, 96},
- { belowMinBucket, belowMinBucket, 8}, // alltime
- { 8, 8, 8},
- { belowMinBucket, belowMinBucket, 8}, // alltime
- { belowMinBucket, belowMinBucket, 8} // alltime
+ {96, 96, 96},
+ {8, 8, 96},
+ {belowMinBucket, belowMinBucket, 8}, // alltime
+ {8, 8, 8},
+ {belowMinBucket, belowMinBucket, 8}, // alltime
+ {belowMinBucket, belowMinBucket, 8}, // alltime
+ {96, 96, 96},
+ {8, 8, 96},
+ {belowMinBucket, belowMinBucket, 8}, // alltime
+ {8, 8, 8},
+ {belowMinBucket, belowMinBucket, 8}, // alltime
+ {belowMinBucket, belowMinBucket, 8} // alltime
};
for (int i = 0; i < 12; i++) {
int values[] = {-1, 0, 500, 1000, 1500};
for (int ii = 0; ii < 5; ++ii) {
int value = values[ii];
- TimeseriesHistogram<int> h(10, 0, 1000,
- MultiLevelTimeSeries<int>(
- 60, IntMTMHTS::NUM_LEVELS,
- IntMTMHTS::kDurations));
+ TimeseriesHistogram<int> h(
+ 10,
+ 0,
+ 1000,
+ MultiLevelTimeSeries<int>(
+ 60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
const int kNumIters = 1000;
for (int jj = 0; jj < kNumIters; ++jj) {
// Things get trickier if there are multiple unique values.
const int kNewValue = 750;
- for (int kk = 0; kk < 2*kNumIters; ++kk) {
+ for (int kk = 0; kk < 2 * kNumIters; ++kk) {
h.addValue(mkTimePoint(1), kNewValue);
}
h.update(mkTimePoint(1));
- EXPECT_NEAR(h.getPercentileEstimate(50, 0), kNewValue+5, 5);
+ EXPECT_NEAR(h.getPercentileEstimate(50, 0), kNewValue + 5, 5);
if (value >= 0 && value <= 1000) {
// only do further testing if value is within our bucket range,
// else estimates can be wildly off
if (kNewValue > value) {
- EXPECT_NEAR(h.getPercentileEstimate(10, 0), value+5, 5);
- EXPECT_NEAR(h.getPercentileEstimate(99, 0), kNewValue+5, 5);
+ EXPECT_NEAR(h.getPercentileEstimate(10, 0), value + 5, 5);
+ EXPECT_NEAR(h.getPercentileEstimate(99, 0), kNewValue + 5, 5);
} else {
- EXPECT_NEAR(h.getPercentileEstimate(10, 0), kNewValue+5, 5);
- EXPECT_NEAR(h.getPercentileEstimate(99, 0), value+5, 5);
+ EXPECT_NEAR(h.getPercentileEstimate(10, 0), kNewValue + 5, 5);
+ EXPECT_NEAR(h.getPercentileEstimate(99, 0), value + 5, 5);
}
}
}