Adds writer test case for RCU
[folly.git] / folly / stats / BucketedTimeSeries-defs.h
index 08d9c292c8524fb7f075e68288243c92015666d6..6155c3243f49260b00c548709ad6046cb5706a7f 100644 (file)
@@ -1,5 +1,5 @@
 /*
- * Copyright 2017 Facebook, Inc.
+ * Copyright 2012-present Facebook, Inc.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
 
 #pragma once
 
-#include <algorithm>
-#include <glog/logging.h>
 #include <folly/Likely.h>
 #include <folly/stats/BucketedTimeSeries.h>
+#include <glog/logging.h>
+#include <algorithm>
+#include <stdexcept>
 
 namespace folly {
 
@@ -36,13 +37,48 @@ BucketedTimeSeries<VT, CT>::BucketedTimeSeries(
     // There is no point in having more buckets than our timestamp
     // granularity: otherwise we would have buckets that could never be used.
     if (nBuckets > size_t(duration_.count())) {
-      nBuckets = duration_.count();
+      nBuckets = size_t(duration_.count());
     }
 
     buckets_.resize(nBuckets, Bucket());
   }
 }
 
+template <typename VT, typename CT>
+BucketedTimeSeries<VT, CT>::BucketedTimeSeries(
+    TimePoint theFirstTime,
+    TimePoint theLatestTime,
+    Duration maxDuration,
+    const std::vector<Bucket>& bucketsList)
+    : firstTime_(theFirstTime),
+      latestTime_(theLatestTime),
+      duration_(maxDuration),
+      buckets_(bucketsList) {
+  // Come up with the total_ from buckets_ being passed in
+  for (auto const& bucket : buckets_) {
+    total_.add(bucket.sum, bucket.count);
+  }
+
+  // Verify the integrity of the data
+
+  // If firstTime is greater than latestTime, the total count should be 0.
+  // (firstTime being greater than latestTime means that no data points have
+  // ever been added to the time series.)
+  if (firstTime_ > latestTime_ && (total_.sum != 0 || total_.count != 0)) {
+    throw std::invalid_argument(
+        "The total should have been 0 "
+        "if firstTime is greater than lastestTime");
+  }
+
+  // If firstTime is less than or equal to latestTime,
+  // latestTime - firstTime should be less than or equal to the duration.
+  if (firstTime_ <= latestTime_ && latestTime_ - firstTime_ > duration_) {
+    throw std::invalid_argument(
+        "The difference between firstTime and latestTime "
+        "should be less than or equal to the duration");
+  }
+}
+
 template <typename VT, typename CT>
 bool BucketedTimeSeries<VT, CT>::addValue(TimePoint now, const ValueType& val) {
   return addValueAggregated(now, val, 1);
@@ -52,15 +88,15 @@ template <typename VT, typename CT>
 bool BucketedTimeSeries<VT, CT>::addValue(
     TimePoint now,
     const ValueType& val,
-    int64_t times) {
-  return addValueAggregated(now, val * times, times);
+    uint64_t times) {
+  return addValueAggregated(now, val * ValueType(times), times);
 }
 
 template <typename VT, typename CT>
 bool BucketedTimeSeries<VT, CT>::addValueAggregated(
     TimePoint now,
     const ValueType& total,
-    int64_t nsamples) {
+    uint64_t nsamples) {
   if (isAllTime()) {
     if (UNLIKELY(empty())) {
       firstTime_ = now;
@@ -137,8 +173,8 @@ size_t BucketedTimeSeries<VT, CT>::updateBuckets(TimePoint now) {
   size_t currentBucket;
   TimePoint currentBucketStart;
   TimePoint nextBucketStart;
-  getBucketInfo(latestTime_, &currentBucket,
-                &currentBucketStart, &nextBucketStart);
+  getBucketInfo(
+      latestTime_, &currentBucket, &currentBucketStart, &nextBucketStart);
 
   // Update latestTime_
   latestTime_ = now;
@@ -210,8 +246,8 @@ typename CT::time_point BucketedTimeSeries<VT, CT>::getEarliestTimeNonEmpty()
   size_t currentBucket;
   TimePoint currentBucketStart;
   TimePoint nextBucketStart;
-  getBucketInfo(latestTime_, &currentBucket,
-                &currentBucketStart, &nextBucketStart);
+  getBucketInfo(
+      latestTime_, &currentBucket, &currentBucketStart, &nextBucketStart);
 
   // Subtract 1 duration from the start of the next bucket to find the
   // earliest possible data point we could be tracking.
@@ -269,7 +305,7 @@ uint64_t BucketedTimeSeries<VT, CT>::count(TimePoint start, TimePoint end)
           TimePoint bucketStart,
           TimePoint nextBucketStart) -> bool {
         sample_count += this->rangeAdjust(
-            bucketStart, nextBucketStart, start, end, bucket.count);
+            bucketStart, nextBucketStart, start, end, ValueType(bucket.count));
         return true;
       });
 
@@ -291,7 +327,7 @@ ReturnType BucketedTimeSeries<VT, CT>::avg(TimePoint start, TimePoint end)
         total += this->rangeAdjust(
             bucketStart, nextBucketStart, start, end, bucket.sum);
         sample_count += this->rangeAdjust(
-            bucketStart, nextBucketStart, start, end, bucket.count);
+            bucketStart, nextBucketStart, start, end, ValueType(bucket.count));
         return true;
       });
 
@@ -346,11 +382,11 @@ void BucketedTimeSeries<VT, CT>::getBucketInfo(
   Duration timeMod = time.time_since_epoch() % duration_;
   TimeInt numFullDurations = time.time_since_epoch() / duration_;
 
-  TimeInt scaledTime = timeMod.count() * buckets_.size();
+  TimeInt scaledTime = timeMod.count() * TimeInt(buckets_.size());
 
   // Keep these two lines together.  The compiler should be able to compute
   // both the division and modulus with a single operation.
-  *bucketIdx = scaledTime / duration_.count();
+  *bucketIdx = size_t(scaledTime / duration_.count());
   TimeInt scaledOffsetInBucket = scaledTime % duration_.count();
 
   TimeInt scaledBucketStart = scaledTime - scaledOffsetInBucket;
@@ -381,8 +417,8 @@ void BucketedTimeSeries<VT, CT>::forEachBucket(Function fn) const {
   Duration timeMod = latestTime_.time_since_epoch() % duration_;
   TimeInt numFullDurations = latestTime_.time_since_epoch() / duration_;
   TimePoint durationStart(numFullDurations * duration_);
-  TimeInt scaledTime = timeMod.count() * buckets_.size();
-  size_t latestBucketIdx = scaledTime / duration_.count();
+  TimeInt scaledTime = timeMod.count() * TimeInt(buckets_.size());
+  size_t latestBucketIdx = size_t(scaledTime / duration_.count());
   TimeInt scaledOffsetInBucket = scaledTime % duration_.count();
   TimeInt scaledBucketStart = scaledTime - scaledOffsetInBucket;
   TimeInt scaledNextBucketStart = scaledBucketStart + duration_.count();
@@ -464,7 +500,7 @@ VT BucketedTimeSeries<VT, CT>::rangeAdjust(
   TimePoint intervalStart = std::max(start, bucketStart);
   TimePoint intervalEnd = std::min(end, nextBucketStart);
   return input * (intervalEnd - intervalStart) /
-    (nextBucketStart - bucketStart);
+      (nextBucketStart - bucketStart);
 }
 
 template <typename VT, typename CT>
@@ -489,4 +525,4 @@ void BucketedTimeSeries<VT, CT>::forEachBucket(
       });
 }
 
-} // folly
+} // namespace folly