--- title: MQESRNN model keywords: fastai sidebar: home_sidebar summary: "API details." description: "API details." nb_path: "nbs/models_esrnn__mqesrnn.ipynb" ---
import numpy as np
import pandas as pd
import pytorch_lightning as pl
import matplotlib.pyplot as plt
import torch as t
from nixtlats.data.tsdataset import TimeSeriesDataset
from nixtlats.data.tsloader import TimeSeriesLoader
n_ds = 100
n_ts = 1_000
output_size = 2
uids = [f'uid_{i + 1}' for i in range(n_ts)]
dss = pd.date_range(end='2020-01-01', periods=n_ds)
Y_df = pd.DataFrame({'unique_id': np.repeat(uids, n_ds), 'ds': np.tile(dss, n_ts)})
np.random.seed(10)
Y_df['y'] = Y_df.groupby('unique_id').transform(lambda x: np.random.uniform(1, 100, size=len(x)))
Y_df = Y_df.sort_values(['unique_id', 'ds'])
train_dataset = TimeSeriesDataset(Y_df=Y_df,
ds_in_test=3*output_size,
is_test=False,
input_size=7*output_size,
output_size=output_size,
verbose=True)
valid_dataset = TimeSeriesDataset(Y_df=Y_df,
input_size=7*output_size,
output_size=output_size,
verbose=False)
train_loader = TimeSeriesLoader(dataset=train_dataset,
batch_size=32,
eq_batch_size=True,
shuffle=True)
valid_loader = TimeSeriesLoader(dataset=valid_dataset,
batch_size=32,
shuffle=False)
model = MQESRNN(n_series=train_dataset.n_series,
n_s=train_dataset.n_s,
n_x=train_dataset.n_x,
#sample_freq=dataset.sample_freq,
sample_freq=1,
input_size=7*2,
output_size=output_size,
learning_rate=1e-2,
lr_scheduler_step_size=30,
lr_decay=0.1,
gradient_eps=1e-8,
gradient_clipping_threshold=10,
rnn_weight_decay=0,
noise_std=0.001,
testing_percentiles=[30, 50, 70, 90],
training_percentiles=[30, 50, 70, 90],
es_component='median_residual',
cell_type='LSTM',
state_hsize=100,
dilations=[[1, 2], [4, 8]],
add_nl_layer=False,
loss='MQ',
val_loss='MQ')
trainer = pl.Trainer(max_epochs=1, progress_bar_refresh_rate=5, deterministic=True)
trainer.fit(model, train_loader, valid_loader)
outputs = trainer.predict(model, valid_loader)
y_true, y_hat, mask = zip(*outputs)
y_true = t.cat(y_true).numpy()
y_hat = t.cat(y_hat).numpy()
mask = t.cat(mask).numpy()
y_true = y_true[-1000:, -1]
y_hat = y_hat[-1000:, -1]
plt.plot(y_true.flatten(), alpha=0.5, label='y')
for idx, p in enumerate([30, 50, 70, 90]):
y_p = y_hat[:, :, idx]
plt.plot(y_p.flatten(), alpha=0.5, label=f'p{p}')
print(f'calibration p{p}: ', (y_true.flatten() <= y_p.flatten()).mean())
plt.legend()