2 * Copyright 2017 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #include <folly/stats/TimeseriesHistogram.h>
18 #include <folly/stats/TimeseriesHistogram-defs.h>
22 #include <folly/portability/GTest.h>
25 using namespace folly;
26 using std::chrono::seconds;
38 const seconds kDurations[] = {
39 seconds(60), seconds(600), seconds(3600), seconds(0)
51 const seconds kDurations[] = {
52 seconds(60), seconds(3600), seconds(0)
56 typedef std::mt19937 RandomInt32;
58 using StatsClock = folly::LegacyStatsClock<std::chrono::seconds>;
59 StatsClock::time_point mkTimePoint(int value) {
60 return StatsClock::time_point(StatsClock::duration(value));
64 TEST(TimeseriesHistogram, Percentile) {
65 RandomInt32 random(5);
66 // [10, 109], 12 buckets including above and below
68 TimeseriesHistogram<int> h(10, 10, 110,
69 MultiLevelTimeSeries<int>(
70 60, IntMTMHTS::NUM_LEVELS,
71 IntMTMHTS::kDurations));
73 EXPECT_EQ(0, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
75 EXPECT_EQ(12, h.getNumBuckets());
76 EXPECT_EQ(10, h.getBucketSize());
77 EXPECT_EQ(10, h.getMin());
78 EXPECT_EQ(110, h.getMax());
80 for (size_t i = 0; i < h.getNumBuckets(); ++i) {
81 EXPECT_EQ(4, h.getBucket(i).numLevels());
85 h.addValue(mkTimePoint(0), 0);
86 h.addValue(mkTimePoint(0), maxVal);
87 for (int i = 0; i < 98; i++) {
88 h.addValue(mkTimePoint(0), random() % maxVal);
91 h.update(mkTimePoint(1500000000));
92 // bucket 0 stores everything below min, so its minimum
93 // is the lowest possible number
94 EXPECT_EQ(std::numeric_limits<int>::min(),
95 h.getPercentileBucketMin(1, IntMTMHTS::ALLTIME));
96 EXPECT_EQ(110, h.getPercentileBucketMin(99, IntMTMHTS::ALLTIME));
98 EXPECT_EQ(-2, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
99 EXPECT_EQ(-1, h.getPercentileEstimate(1, IntMTMHTS::ALLTIME));
100 EXPECT_EQ(119, h.getPercentileEstimate(99, IntMTMHTS::ALLTIME));
101 EXPECT_EQ(120, h.getPercentileEstimate(100, IntMTMHTS::ALLTIME));
105 TEST(TimeseriesHistogram, String) {
106 RandomInt32 random(5);
107 // [10, 109], 12 buckets including above and below
109 TimeseriesHistogram<int> hist(10, 10, 110,
110 MultiLevelTimeSeries<int>(
111 60, IntMTMHTS::NUM_LEVELS,
112 IntMTMHTS::kDurations));
115 hist.addValue(mkTimePoint(0), 0);
116 hist.addValue(mkTimePoint(0), maxVal);
117 for (int i = 0; i < 98; i++) {
118 hist.addValue(mkTimePoint(0), random() % maxVal);
121 hist.update(mkTimePoint(0));
123 const char* const kStringValues1[IntMTMHTS::NUM_LEVELS] = {
124 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
125 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
126 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
127 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
128 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
129 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
130 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
131 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
134 CHECK_EQ(IntMTMHTS::NUM_LEVELS, hist.getNumLevels());
136 for (size_t level = 0; level < hist.getNumLevels(); ++level) {
137 EXPECT_EQ(kStringValues1[level], hist.getString(level));
140 const char* const kStringValues2[IntMTMHTS::NUM_LEVELS] = {
141 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
142 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
143 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
144 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
145 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
146 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
147 "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
148 "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
151 CHECK_EQ(IntMTMHTS::NUM_LEVELS, hist.getNumLevels());
153 for (size_t level = 0; level < hist.getNumLevels(); ++level) {
154 EXPECT_EQ(kStringValues2[level], hist.getString(level));
159 TEST(TimeseriesHistogram, Clear) {
161 TimeseriesHistogram<int> hist(10, 0, 100,
162 MultiLevelTimeSeries<int>(
163 60, IntMTMHTS::NUM_LEVELS,
164 IntMTMHTS::kDurations));
166 for (int now = 0; now < 3600; now++) {
167 for (int i = 0; i < 100; i++) {
168 hist.addValue(mkTimePoint(now), i, 2); // adds each item 2 times
175 for (size_t b = 0; b < hist.getNumBuckets(); ++b) {
176 EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::MINUTE));
177 EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
178 EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::HOUR));
179 EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
182 for (int pct = 0; pct <= 100; pct++) {
183 EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
184 EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
185 EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
186 EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
188 EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::MINUTE));
189 EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::TEN_MINUTE));
190 EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::HOUR));
191 EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::ALLTIME));
197 TEST(TimeseriesHistogram, Basic) {
199 TimeseriesHistogram<int> hist(10, 0, 100,
200 MultiLevelTimeSeries<int>(
201 60, IntMTMHTS::NUM_LEVELS,
202 IntMTMHTS::kDurations));
204 for (int now = 0; now < 3600; now++) {
205 for (int i = 0; i < 100; i++) {
206 hist.addValue(mkTimePoint(now), i);
210 hist.update(mkTimePoint(3599));
211 for (int pct = 1; pct <= 100; pct++) {
212 int expected = (pct - 1) / 10 * 10;
213 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
214 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct,
215 IntMTMHTS::TEN_MINUTE));
216 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
217 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
220 for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
221 EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE));
222 EXPECT_EQ(6000, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
223 EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::HOUR));
224 EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
226 EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
227 EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
230 EXPECT_EQ(6000, hist.count(IntMTMHTS::MINUTE));
231 EXPECT_EQ(60000, hist.count(IntMTMHTS::TEN_MINUTE));
232 EXPECT_EQ(360000, hist.count(IntMTMHTS::HOUR));
233 EXPECT_EQ(360000, hist.count(IntMTMHTS::ALLTIME));
235 // Each second we added 4950 total over 100 data points
236 EXPECT_EQ(297000, hist.sum(IntMTMHTS::MINUTE));
237 EXPECT_EQ(2970000, hist.sum(IntMTMHTS::TEN_MINUTE));
238 EXPECT_EQ(17820000, hist.sum(IntMTMHTS::HOUR));
239 EXPECT_EQ(17820000, hist.sum(IntMTMHTS::ALLTIME));
241 EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::MINUTE));
242 EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::TEN_MINUTE));
243 EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::HOUR));
244 EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::ALLTIME));
245 EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::MINUTE));
246 EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::TEN_MINUTE));
247 EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::HOUR));
248 EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::ALLTIME));
250 EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::MINUTE));
251 EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::TEN_MINUTE));
252 EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::HOUR));
253 EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::ALLTIME));
254 EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::MINUTE));
255 EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::TEN_MINUTE));
256 EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::HOUR));
257 EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::ALLTIME));
259 EXPECT_EQ(1000, hist.count(mkTimePoint(10), mkTimePoint(20)));
260 EXPECT_EQ(49500, hist.sum(mkTimePoint(10), mkTimePoint(20)));
261 EXPECT_EQ(4950, hist.rate(mkTimePoint(10), mkTimePoint(20)));
262 EXPECT_EQ(49.5, hist.avg<double>(mkTimePoint(10), mkTimePoint(20)));
264 EXPECT_EQ(200, hist.count(mkTimePoint(3550), mkTimePoint(3552)));
265 EXPECT_EQ(9900, hist.sum(mkTimePoint(3550), mkTimePoint(3552)));
266 EXPECT_EQ(4950, hist.rate(mkTimePoint(3550), mkTimePoint(3552)));
267 EXPECT_EQ(49.5, hist.avg<double>(mkTimePoint(3550), mkTimePoint(3552)));
269 EXPECT_EQ(0, hist.count(mkTimePoint(4550), mkTimePoint(4552)));
270 EXPECT_EQ(0, hist.sum(mkTimePoint(4550), mkTimePoint(4552)));
271 EXPECT_EQ(0, hist.rate(mkTimePoint(4550), mkTimePoint(4552)));
272 EXPECT_EQ(0, hist.avg<double>(mkTimePoint(4550), mkTimePoint(4552)));
278 TimeseriesHistogram<int> hist(10, 0, 100,
279 MultiLevelTimeSeries<int>(
280 60, IntMTMHTS::NUM_LEVELS,
281 IntMTMHTS::kDurations));
283 for (int now = 0; now < 3600; now++) {
284 for (int i = 0; i < 100; i++) {
285 hist.addValue(mkTimePoint(now), i, 2); // adds each item 2 times
289 hist.update(mkTimePoint(3599));
290 for (int pct = 1; pct <= 100; pct++) {
291 int expected = (pct - 1) / 10 * 10;
292 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
293 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct,
294 IntMTMHTS::TEN_MINUTE));
295 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
296 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
299 for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
300 EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE));
301 EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
302 EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR));
303 EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
305 EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
306 EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
313 TimeseriesHistogram<int> hist(10, 0, 100,
314 MultiLevelTimeSeries<int>(
315 60, IntMTMHTS::NUM_LEVELS,
316 IntMTMHTS::kDurations));
318 for (int now = 0; now < 3600; now++) {
319 for (int i = 0; i < 50; i++) {
320 hist.addValue(mkTimePoint(now), i * 2, 2); // adds each item 2 times
324 hist.update(mkTimePoint(3599));
325 for (int pct = 1; pct <= 100; pct++) {
326 int expected = (pct - 1) / 10 * 10;
327 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
328 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct,
329 IntMTMHTS::TEN_MINUTE));
330 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
331 EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
334 EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
335 EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::TEN_MINUTE));
336 EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::HOUR));
337 EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::ALLTIME));
338 EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
341 hist.getBucket(hist.getNumBuckets() - 1).
342 count(IntMTMHTS::TEN_MINUTE));
343 EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
346 hist.getBucket(hist.getNumBuckets() - 1).count(
347 IntMTMHTS::ALLTIME));
349 for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
350 EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE));
351 EXPECT_EQ(6000, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
352 EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::HOUR));
353 EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
356 for (int i = 0; i < 100; ++i) {
357 hist.addValue(mkTimePoint(3599), 200 + i);
359 hist.update(mkTimePoint(3599));
361 hist.getBucket(hist.getNumBuckets() - 1).count(
362 IntMTMHTS::ALLTIME));
367 TEST(TimeseriesHistogram, QueryByInterval) {
368 TimeseriesHistogram<int> mhts(8, 8, 120,
369 MultiLevelTimeSeries<int>(
370 60, IntMHTS::NUM_LEVELS,
371 IntMHTS::kDurations));
373 mhts.update(mkTimePoint(0));
376 for (curTime = 0; curTime < 7200; curTime++) {
377 mhts.addValue(mkTimePoint(curTime), 1);
379 for (curTime = 7200; curTime < 7200 + 3540; curTime++) {
380 mhts.addValue(mkTimePoint(curTime), 10);
382 for (curTime = 7200 + 3540; curTime < 7200 + 3600; curTime++) {
383 mhts.addValue(mkTimePoint(curTime), 100);
386 mhts.update(mkTimePoint(7200 + 3600 - 1));
388 struct TimeInterval {
389 TimeInterval(int s, int e) : start(mkTimePoint(s)), end(mkTimePoint(e)) {}
391 StatsClock::time_point start;
392 StatsClock::time_point end;
394 TimeInterval intervals[12] = {
395 { curTime - 60, curTime },
396 { curTime - 3600, curTime },
397 { curTime - 7200, curTime },
398 { curTime - 3600, curTime - 60 },
399 { curTime - 7200, curTime - 60 },
400 { curTime - 7200, curTime - 3600 },
401 { curTime - 50, curTime - 20 },
402 { curTime - 3020, curTime - 20 },
403 { curTime - 7200, curTime - 20 },
404 { curTime - 3000, curTime - 1000 },
405 { curTime - 7200, curTime - 1000 },
406 { curTime - 7200, curTime - 3600 },
409 int expectedSums[12] = {
410 6000, 41400, 32400, 35400, 32129, 16200, 3000, 33600, 32308, 20000, 27899,
414 int expectedCounts[12] = {
415 60, 3600, 7200, 3540, 7139, 3600, 30, 3000, 7178, 2000, 6199, 3600
418 // The first 7200 values added all fell below the histogram minimum,
419 // and went into the bucket that tracks all of the too-small values.
420 // This bucket reports a minimum value of the smallest possible integer.
421 int belowMinBucket = std::numeric_limits<int>::min();
423 int expectedValues[12][3] = {
426 { belowMinBucket, belowMinBucket, 8}, // alltime
428 { belowMinBucket, belowMinBucket, 8}, // alltime
429 { belowMinBucket, belowMinBucket, 8}, // alltime
432 { belowMinBucket, belowMinBucket, 8}, // alltime
434 { belowMinBucket, belowMinBucket, 8}, // alltime
435 { belowMinBucket, belowMinBucket, 8} // alltime
438 for (int i = 0; i < 12; i++) {
439 const auto& itv = intervals[i];
440 int s = mhts.sum(itv.start, itv.end);
441 EXPECT_EQ(expectedSums[i], s);
443 int c = mhts.count(itv.start, itv.end);
444 EXPECT_EQ(expectedCounts[i], c);
448 for (int i = 1; i <= 100; i++) {
449 EXPECT_EQ(96, mhts.getPercentileBucketMin(i, 0));
452 mhts.getPercentileBucketMin(
453 i, mkTimePoint(curTime - 60), mkTimePoint(curTime)));
456 mhts.getPercentileBucketMin(
457 i, mkTimePoint(curTime - 3540), mkTimePoint(curTime - 60)));
460 EXPECT_EQ(8, mhts.getPercentileBucketMin(1, 1));
461 EXPECT_EQ(8, mhts.getPercentileBucketMin(98, 1));
462 EXPECT_EQ(96, mhts.getPercentileBucketMin(99, 1));
463 EXPECT_EQ(96, mhts.getPercentileBucketMin(100, 1));
465 EXPECT_EQ(belowMinBucket, mhts.getPercentileBucketMin(1, 2));
466 EXPECT_EQ(belowMinBucket, mhts.getPercentileBucketMin(66, 2));
467 EXPECT_EQ(8, mhts.getPercentileBucketMin(67, 2));
468 EXPECT_EQ(8, mhts.getPercentileBucketMin(99, 2));
469 EXPECT_EQ(96, mhts.getPercentileBucketMin(100, 2));
471 // 0 is currently the value for bucket 0 (below min)
472 for (int i = 0; i < 12; i++) {
473 const auto& itv = intervals[i];
474 int v = mhts.getPercentileBucketMin(1, itv.start, itv.end);
475 EXPECT_EQ(expectedValues[i][0], v);
477 v = mhts.getPercentileBucketMin(50, itv.start, itv.end);
478 EXPECT_EQ(expectedValues[i][1], v);
480 v = mhts.getPercentileBucketMin(99, itv.start, itv.end);
481 EXPECT_EQ(expectedValues[i][2], v);
484 for (int i = 0; i < 12; i++) {
485 const auto& itv = intervals[i];
486 // Some of the older intervals that fall in the alltime bucket
487 // are off by 1 or 2 in their estimated counts.
488 size_t tolerance = 0;
489 if (itv.start <= mkTimePoint(curTime - 7200)) {
491 } else if (itv.start <= mkTimePoint(curTime - 3000)) {
494 size_t actualCount = (itv.end - itv.start).count();
495 size_t estimatedCount = mhts.count(itv.start, itv.end);
496 EXPECT_GE(actualCount, estimatedCount);
497 EXPECT_LE(actualCount - tolerance, estimatedCount);
501 TEST(TimeseriesHistogram, SingleUniqueValue) {
502 int values[] = {-1, 0, 500, 1000, 1500};
503 for (int ii = 0; ii < 5; ++ii) {
504 int value = values[ii];
505 TimeseriesHistogram<int> h(10, 0, 1000,
506 MultiLevelTimeSeries<int>(
507 60, IntMTMHTS::NUM_LEVELS,
508 IntMTMHTS::kDurations));
510 const int kNumIters = 1000;
511 for (int jj = 0; jj < kNumIters; ++jj) {
512 h.addValue(mkTimePoint(1), value);
514 h.update(mkTimePoint(1));
515 // since we've only added one unique value, all percentiles should
517 EXPECT_EQ(h.getPercentileEstimate(10, 0), value);
518 EXPECT_EQ(h.getPercentileEstimate(50, 0), value);
519 EXPECT_EQ(h.getPercentileEstimate(99, 0), value);
521 // Things get trickier if there are multiple unique values.
522 const int kNewValue = 750;
523 for (int kk = 0; kk < 2*kNumIters; ++kk) {
524 h.addValue(mkTimePoint(1), kNewValue);
526 h.update(mkTimePoint(1));
527 EXPECT_NEAR(h.getPercentileEstimate(50, 0), kNewValue+5, 5);
528 if (value >= 0 && value <= 1000) {
529 // only do further testing if value is within our bucket range,
530 // else estimates can be wildly off
531 if (kNewValue > value) {
532 EXPECT_NEAR(h.getPercentileEstimate(10, 0), value+5, 5);
533 EXPECT_NEAR(h.getPercentileEstimate(99, 0), kNewValue+5, 5);
535 EXPECT_NEAR(h.getPercentileEstimate(10, 0), kNewValue+5, 5);
536 EXPECT_NEAR(h.getPercentileEstimate(99, 0), value+5, 5);