2017-01-07 04:21:34 +01:00
|
|
|
from math import sqrt
|
|
|
|
from random import gauss, random, seed
|
2017-03-03 19:01:52 +01:00
|
|
|
from typing import List
|
2017-01-07 04:21:34 +01:00
|
|
|
|
2017-11-16 00:55:49 +01:00
|
|
|
from analytics.lib.counts import CountStat
|
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2017-11-22 07:15:46 +01:00
|
|
|
def generate_time_series_data(days: int=100, business_hours_base: float=10,
|
|
|
|
non_business_hours_base: float=10, growth: float=1,
|
|
|
|
autocorrelation: float=0, spikiness: float=1,
|
|
|
|
holiday_rate: float=0, frequency: str=CountStat.DAY,
|
|
|
|
partial_sum: bool=False, random_seed: int=26) -> List[int]:
|
2017-01-07 04:21:34 +01:00
|
|
|
"""
|
|
|
|
Generate semi-realistic looking time series data for testing analytics graphs.
|
|
|
|
|
2017-01-16 20:05:21 +01:00
|
|
|
days -- Number of days of data. Is the number of data points generated if
|
|
|
|
frequency is CountStat.DAY.
|
2017-01-07 04:21:34 +01:00
|
|
|
business_hours_base -- Average value during a business hour (or day) at beginning of
|
|
|
|
time series, if frequency is CountStat.HOUR (CountStat.DAY, respectively).
|
|
|
|
non_business_hours_base -- The above, for non-business hours/days.
|
|
|
|
growth -- Ratio between average values at end of time series and beginning of time series.
|
|
|
|
autocorrelation -- Makes neighboring data points look more like each other. At 0 each
|
|
|
|
point is unaffected by the previous point, and at 1 each point is a deterministic
|
|
|
|
function of the previous point.
|
|
|
|
spikiness -- 0 means no randomness (other than holiday_rate), higher values increase
|
|
|
|
the variance.
|
2017-01-16 20:05:21 +01:00
|
|
|
holiday_rate -- Fraction of days randomly set to 0, largely for testing how we handle 0s.
|
2017-01-07 04:21:34 +01:00
|
|
|
frequency -- Should be CountStat.HOUR or CountStat.DAY.
|
2017-03-16 05:08:36 +01:00
|
|
|
partial_sum -- If True, return partial sum of the series.
|
2017-01-17 22:55:16 +01:00
|
|
|
random_seed -- Seed for random number generator.
|
2017-01-07 04:21:34 +01:00
|
|
|
"""
|
|
|
|
if frequency == CountStat.HOUR:
|
2017-01-16 20:05:21 +01:00
|
|
|
length = days*24
|
2017-01-07 04:21:34 +01:00
|
|
|
seasonality = [non_business_hours_base] * 24 * 7
|
|
|
|
for day in range(5):
|
|
|
|
for hour in range(8):
|
|
|
|
seasonality[24*day + hour] = business_hours_base
|
2017-01-16 20:05:21 +01:00
|
|
|
holidays = []
|
|
|
|
for i in range(days):
|
|
|
|
holidays.extend([random() < holiday_rate] * 24)
|
2017-01-07 04:21:34 +01:00
|
|
|
elif frequency == CountStat.DAY:
|
2017-01-16 20:05:21 +01:00
|
|
|
length = days
|
|
|
|
seasonality = [8*business_hours_base + 16*non_business_hours_base] * 5 + \
|
|
|
|
[24*non_business_hours_base] * 2
|
|
|
|
holidays = [random() < holiday_rate for i in range(days)]
|
2017-01-07 04:21:34 +01:00
|
|
|
else:
|
2020-06-10 06:41:04 +02:00
|
|
|
raise AssertionError(f"Unknown frequency: {frequency}")
|
2017-01-16 20:05:21 +01:00
|
|
|
if length < 2:
|
2017-03-12 01:43:51 +01:00
|
|
|
raise AssertionError("Must be generating at least 2 data points. "
|
2020-06-14 02:57:50 +02:00
|
|
|
f"Currently generating {length}")
|
2017-01-07 04:21:34 +01:00
|
|
|
growth_base = growth ** (1. / (length-1))
|
|
|
|
values_no_noise = [seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)]
|
|
|
|
|
2017-01-17 22:55:16 +01:00
|
|
|
seed(random_seed)
|
2017-01-07 04:21:34 +01:00
|
|
|
noise_scalars = [gauss(0, 1)]
|
|
|
|
for i in range(1, length):
|
|
|
|
noise_scalars.append(noise_scalars[-1]*autocorrelation + gauss(0, 1)*(1-autocorrelation))
|
|
|
|
|
2017-01-16 20:05:21 +01:00
|
|
|
values = [0 if holiday else int(v + sqrt(v)*noise_scalar*spikiness)
|
|
|
|
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)]
|
2017-03-16 05:08:36 +01:00
|
|
|
if partial_sum:
|
2017-01-07 04:21:34 +01:00
|
|
|
for i in range(1, length):
|
|
|
|
values[i] = values[i-1] + values[i]
|
2017-01-16 20:00:29 +01:00
|
|
|
return [max(v, 0) for v in values]
|