--- title: Experiment Utils keywords: fastai sidebar: home_sidebar summary: "Set of functions to easily perform experiments." description: "Set of functions to easily perform experiments." nb_path: "nbs/experiments__utils.ipynb" ---
{% raw %}
{% endraw %} {% raw %}
{% endraw %} {% raw %}
{% endraw %} {% raw %}

get_mask_dfs[source]

get_mask_dfs(Y_df, ds_in_val, ds_in_test)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

get_random_mask_dfs[source]

get_random_mask_dfs(Y_df, ds_in_test, n_val_windows, n_ds_val_window, n_uids, freq)

Generates train, test and random validation mask. Train mask begins by avoiding ds_in_test

Validation mask: 1) samples n_uids unique ids 2) creates windows of size n_ds_val_window

Parameters

ds_in_test: int Number of ds in test. n_uids: int Number of unique ids in validation. n_val_windows: int Number of windows for validation. n_ds_val_window: int Number of ds in each validation window. periods: int ds_in_test multiplier. freq: str string that determines datestamp frequency, used in random windows creation.

{% endraw %} {% raw %}
{% endraw %} {% raw %}

scale_data[source]

scale_data(Y_df, X_df, mask_df, normalizer_y, normalizer_x)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

create_datasets[source]

create_datasets(mc, S_df, Y_df, X_df, f_cols, ds_in_test, ds_in_val, n_uids, n_val_windows, freq, is_val_random)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

instantiate_loaders[source]

instantiate_loaders(mc, train_dataset, val_dataset, test_dataset)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

instantiate_nbeats[source]

instantiate_nbeats(mc)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

instantiate_esrnn[source]

instantiate_esrnn(mc)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

instantiate_mqesrnn[source]

instantiate_mqesrnn(mc)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

instantiate_model[source]

instantiate_model(mc)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

model_fit_predict[source]

model_fit_predict(mc, S_df, Y_df, X_df, f_cols, ds_in_test, ds_in_val, n_uids, n_val_windows, freq, is_val_random)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

evaluate_model[source]

evaluate_model(mc, loss_function, S_df, Y_df, X_df, f_cols, ds_in_test, ds_in_val, n_uids, n_val_windows, freq, is_val_random, loss_kwargs)

{% endraw %} {% raw %}
{% endraw %} {% raw %}

hyperopt_tunning[source]

hyperopt_tunning(space, hyperopt_max_evals, loss_function, S_df, Y_df, X_df, f_cols, ds_in_val, n_uids, n_val_windows, freq, is_val_random, save_trials=False, loss_kwargs=None)

{% endraw %} {% raw %}
{% endraw %}

Experiment Utils Examples

{% raw %}
import torch as t
from nixtlats.losses.numpy import mae, mape, smape, rmse, pinball_loss
{% endraw %} {% raw %}
device = 'cpu'
if t.cuda.is_available(): device = 'cuda'  

nbeats_space= {# Architecture parameters
               'model':'nbeats',
               'mode': 'simple',
               'n_time_in': hp.choice('n_time_in', [7*24]),
               'n_time_out': hp.choice('n_time_out', [24]),
               'n_x_hidden': hp.quniform('n_x_hidden', 1, 10, 1),
               'n_s_hidden': hp.choice('n_s_hidden', [0]),
               'shared_weights': hp.choice('shared_weights', [False]),
               'activation': hp.choice('activation', ['SELU']),
               'initialization':  hp.choice('initialization', ['glorot_normal','he_normal']),
               'stack_types': hp.choice('stack_types', [2*['identity'],
                                                        1*['identity']+1*['exogenous_tcn'],
                                                        1*['exogenous_tcn']+1*['identity'] ]),
               'n_blocks': hp.choice('n_blocks', [ [1, 1] ]),
               'n_layers': hp.choice('n_layers', [ [2, 2] ]),
               'n_hidden': hp.choice('n_hidden', [ 256 ]),
               'n_harmonics': hp.choice('n_harmonics', [1]),
               'n_polynomials': hp.choice('n_polynomials', [2]),
               # Regularization and optimization parameters
               'batch_normalization': hp.choice('batch_normalization', [False]),
               'dropout_prob_theta': hp.uniform('dropout_prob_theta', 0, 0.5),
               'dropout_prob_exogenous': hp.uniform('dropout_prob_exogenous', 0, 0.5),
               'learning_rate': hp.loguniform('learning_rate', np.log(5e-4), np.log(0.001)),
               'lr_decay': hp.uniform('lr_decay', 0.3, 0.5),
               'lr_decay_step_size': hp.choice('lr_decay_step_size', [100]), 
               'weight_decay': hp.loguniform('weight_decay', np.log(5e-5), np.log(5e-3)),
               'max_epochs': hp.choice('max_epochs', [10]), #'n_iterations': hp.choice('n_iterations', [10])
               'max_steps': hp.choice('max_steps', [None]),
               'early_stop_patience': hp.choice('early_stop_patience', [16]),
               'eval_freq': hp.choice('eval_freq', [50]),
               'n_val_weeks': hp.choice('n_val_weeks', [52*2]),
               'loss_train': hp.choice('loss', ['MAE']),
               'loss_hypar': hp.choice('loss_hypar', [0.5]),                
               'loss_valid': hp.choice('loss_valid', ['MAE']), #[args.val_loss]),
               'l1_theta': hp.choice('l1_theta', [0]),
               # Data parameters
               'len_sample_chunks': hp.choice('len_sample_chunks', [None]),
               'normalizer_y': hp.choice('normalizer_y', [None]),
               'normalizer_x': hp.choice('normalizer_x', ['median']),
               'window_sampling_limit': hp.choice('window_sampling_limit', [100_000]),
               'complete_inputs': hp.choice('complete_inputs', [False]),
               'complete_sample': hp.choice('complete_sample', [False]),                
               'frequency': hp.choice('frequency', ['H']),
               'seasonality': hp.choice('seasonality', [24]),      
               'idx_to_sample_freq': hp.choice('idx_to_sample_freq', [24]),
               'val_idx_to_sample_freq': hp.choice('val_idx_to_sample_freq', [24]),
               'batch_size': hp.choice('batch_size', [256]),
               'n_series_per_batch': hp.choice('n_series_per_batch', [1]),
               'random_seed': hp.quniform('random_seed', 10, 20, 1),
               'device': hp.choice('device', [device])}

mc = {'model':'nbeats',
      # Architecture parameters
      'n_time_in': 7*24,
      'n_time_out': 24,
      'n_x_hidden': 3,
      'n_s_hidden': 0,
      'shared_weights': False,
      'activation': 'SELU',
      'initialization': 'he_normal',
      'stack_types': ['exogenous_tcn']+1*['identity'],
      'n_blocks': [1, 1],
      'n_layers': [2, 2],
      'n_hidden': 364,
      'n_polynomials': 2,
      'n_harmonics': 1,
      # Regularization and optimization parameters
      'max_epochs': 10, #'n_iterations': 100,
      'max_steps': None,      
      'early_stop_patience': 8,
      'batch_normalization': False,
      'dropout_prob_theta': 0.2,
      'learning_rate': 0.0005, #0.002,
      'lr_decay': 0.64,
      'lr_decay_step_size': 100,
      'weight_decay': 0.00015,
      'eval_freq': 50,
      'n_val_weeks': 52*2,
      'loss_train': 'PINBALL',
      'loss_hypar': 0.5, #0.49,
      'loss_valid': 'MAE',
      'l1_theta': 0,
      # Data parameters
      'normalizer_y': None,
      'normalizer_x': 'median',
      'window_sampling_limit': 100_000,
      'complete_inputs': False,
      'frequency':'H',
      'seasonality': 24,
      'idx_to_sample_freq': 24,
      'val_idx_to_sample_freq': 24,
      'batch_size': 256,
      'n_series_per_batch': 1,
      'random_seed': 10,
      'device': 'cpu'}
{% endraw %} {% raw %}
esrnn_space = {'model': hp.choice('model', ['esrnn']),
               'mode': 'full',
               # Architecture parameters
               'n_time_in': hp.choice('n_time_in', [7*24]),
               'n_time_out': hp.choice('n_time_out', [24]),
               'dilations': hp.choice('dilations', [ [[1, 2]], [[1,2], [7, 14]] ]),
               'es_component': hp.choice('es_component', ['multiplicative']),
               'cell_type': hp.choice('cell_type', ['LSTM']),
               'state_hsize': hp.quniform('state_hsize', 10, 100, 10),
               'add_nl_layer': hp.choice('add_nl_layer', [True, False]),
               'seasonality': hp.choice('seasonality', [ [24] ]),
               # Regularization and optimization parameters
               'max_epochs':hp.choice('max_epochs', [10]),
               'max_steps':hp.choice('max_steps', [None]),
               'early_stop_patience':hp.choice('early_stop_patience', [10]),
               'eval_freq': hp.choice('eval_freq', [10]),
               'batch_size': hp.choice('batch_size', [32]),
               'learning_rate': hp.loguniform('learning_rate', np.log(5e-4), np.log(0.01)),
               'lr_decay': hp.quniform('lr_decay', 0.5, 0.8, 0.1),
               'lr_decay_step_size': hp.choice('lr_decay_step_size', [100]), 
               'per_series_lr_multip': hp.choice('per_series_lr_multip', [0.5, 1.0, 1.5, 2.0, 3.0]),
               'gradient_eps': hp.choice('gradient_eps', [1e-8]),
               'gradient_clipping_threshold': hp.choice('gradient_clipping_threshold', [10, 50]),
               'rnn_weight_decay': hp.choice('rnn_weight_decay', [0, 0.0005, 0.005]),
               'noise_std': hp.loguniform('noise_std', np.log(0.0001), np.log(0.001)),
               'level_variability_penalty': hp.quniform('level_variability_penalty', 0, 100, 10),
               'testing_percentile': hp.choice('testing_percentile', [50]),
               'training_percentile': hp.choice('training_percentile', [48, 49, 50, 51]),
               'random_seed': hp.quniform('random_seed', 1, 1000, 1),
               'loss_train': hp.choice('loss_train', ['SMYL']),
               'loss_valid': hp.choice('loss_valid', ['MAE']),
               # Data parameters
               'len_sample_chunks': hp.choice('len_sample_chunks', [7*3*24]),
               'window_sampling_limit': hp.choice('window_sampling_limit', [500_000]),
               'complete_inputs': hp.choice('complete_inputs', [True]),
               'complete_sample': hp.choice('complete_sample', [True]),
               'sample_freq': hp.choice('sample_freq', [24]),
               'val_sample_freq': hp.choice('val_sample_freq', [24]),
               'n_series_per_batch': hp.choice('n_series_per_batch', [1]),
               'normalizer_y': hp.choice('normalizer_y', [None]),
               'normalizer_x': hp.choice('normalizer_x',  [None])}

mc = {'model':'esrnn',
      'mode': 'full',
      # Architecture parameters
      'n_series': 1,
      'n_time_in': 7*24,
      'n_time_out': 24,
      'n_x': 1,
      'n_s': 1,
      'dilations': [[1,2], [7]],
      'es_component': 'multiplicative',
      'cell_type': 'LSTM',
      'state_hsize': 50,
      'add_nl_layer': False,
      'seasonality': [24],
      # Regularization and optimization parameters
      'max_epochs': 10, #'n_iterations': 100,
      'max_steps': None,
      'early_stop_patience': 10,
      'eval_freq': 10,
      'batch_size': 32,
      'eq_batch_size': False,
      'learning_rate': 0.0005,
      'lr_decay': 0.8,
      'lr_decay_step_size': 100,
      'per_series_lr_multip': 1.5,
      'gradient_eps': 1e-8, 
      'gradient_clipping_threshold': 20,
      'rnn_weight_decay': 0.0,
      'noise_std': 0.0005,
      'level_variability_penalty': 10,
      'testing_percentile': 50,
      'training_percentile': 50,
      'random_seed': 1,
      'loss_train': 'SMYL',
      'loss_valid': 'MAE',
      # Data parameters
      'len_sample_chunks': 7*4*24,
      'window_sampling_limit': 500_000,
      'complete_inputs': True,
      'sample_freq': 24,
      'val_idx_to_sample_freq': 24,
      'n_series_per_batch': 1,
      'normalizer_y': None,
      'normalizer_x': None}
{% endraw %} {% raw %}
model = instantiate_esrnn(mc)
{% endraw %} {% raw %}
from nixtlats.data.datasets.epf import EPF, EPFInfo
import matplotlib.pyplot as plt

dataset = ['NP']

Y_df, X_df, S_df = EPF.load_groups(directory='data', groups=dataset)

X_df = X_df[['unique_id', 'ds', 'week_day']]
Y_min = Y_df.y.min()
#Y_df.y = Y_df.y - Y_min + 20

plt.plot(Y_df.y.values)
plt.show()
{% endraw %} {% raw %}
# backpropagation trough time is slow
# result = evaluate_model(loss_function=mae, mc=mc, 
#                         S_df=S_df, Y_df=Y_df, X_df=X_df, f_cols=[],
#                         ds_in_test=0, ds_in_val=728*24,
#                         n_uids=None, n_val_windows=None, freq=None,
#                         is_val_random=False, loss_kwargs={})
# result
{% endraw %} {% raw %}
# plt.plot(Y_df['y'][-728*24:].values)
{% endraw %} {% raw %}
trials = hyperopt_tunning(space=nbeats_space, hyperopt_max_evals=2, loss_function=mae,
                          S_df=S_df, Y_df=Y_df, X_df=X_df, f_cols=[],
                          ds_in_val=728*24, n_uids=None, n_val_windows=None, freq=None,
                          is_val_random=False, loss_kwargs={})
  0%|          | 0/2 [00:00<?, ?trial/s, best loss=?]
INFO:hyperopt.tpe:build_posterior_wrapper took 0.012577 seconds
INFO:hyperopt.tpe:TPE using 0 trials
===============================================      

activation                                     SELU  
batch_normalization                           False
batch_size                                      256
complete_inputs                               False
complete_sample                               False
device                                         cuda
dropout_prob_exogenous                     0.246426
dropout_prob_theta                         0.044532
early_stop_patience                              16
eval_freq                                        50
frequency                                         H
idx_to_sample_freq                               24
initialization                            he_normal
l1_theta                                          0
learning_rate                               0.00059
len_sample_chunks                              None
loss_hypar                                      0.5
loss_train                                      MAE
loss_valid                                      MAE
lr_decay                                   0.472903
lr_decay_step_size                              100
max_epochs                                       10
max_steps                                      None
mode                                         simple
model                                        nbeats
n_blocks                                     (1, 1)
n_harmonics                                       1
n_hidden                                        256
n_layers                                     (2, 2)
n_polynomials                                     2
n_s_hidden                                        0
n_series_per_batch                                1
n_time_in                                       168
n_time_out                                       24
n_val_weeks                                     104
n_x_hidden                                      5.0
normalizer_x                                 median
normalizer_y                                   None
random_seed                                    16.0
seasonality                                      24
shared_weights                                False
stack_types               (exogenous_tcn, identity)
val_idx_to_sample_freq                           24
weight_decay                               0.000058
window_sampling_limit                        100000
dtype: object
===============================================      

  0%|          | 0/2 [00:00<?, ?trial/s, best loss=?]
INFO:root:Train Validation splits

INFO:root:                              ds                    
                             min                 max
unique_id sample_mask                               
NP        0           2016-12-27 2018-12-24 23:00:00
          1           2013-01-01 2016-12-26 23:00:00
INFO:root:
Total data 			52416 time stamps 
Available percentage=100.0, 	52416 time stamps 
Insample  percentage=66.67, 	34944 time stamps 
Outsample percentage=33.33, 	17472 time stamps 

INFO:root:Train Validation splits

INFO:root:                              ds                    
                             min                 max
unique_id sample_mask                               
NP        0           2013-01-01 2016-12-26 23:00:00
          1           2016-12-27 2018-12-24 23:00:00
INFO:root:
Total data 			52416 time stamps 
Available percentage=100.0, 	52416 time stamps 
Insample  percentage=33.33, 	17472 time stamps 
Outsample percentage=66.67, 	34944 time stamps 

INFO:root:Train Validation splits

INFO:root:                              ds                    
                             min                 max
unique_id sample_mask                               
NP        0           2013-01-01 2018-12-24 23:00:00
INFO:root:
Total data 			52416 time stamps 
Available percentage=100.0, 	52416 time stamps 
Insample  percentage=0.0, 	0 time stamps 
Outsample percentage=100.0, 	52416 time stamps 

GPU available: True, used: False
TPU available: False, using: 0 TPU cores
/home/ubuntu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/utilities/distributed.py:69: UserWarning: GPU available but not used. Set the gpus flag in your trainer `Trainer(gpus=1)` or script `--gpus=1`.
  warnings.warn(*args, **kwargs)


  | Name  | Type    | Params
----------------------------------
0 | model | _NBEATS | 368 K 
----------------------------------
368 K     Trainable params
0         Non-trainable params
368 K     Total params
1.475     Total estimated model params size (MB)
/home/ubuntu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/utilities/distributed.py:69: UserWarning: The dataloader, val dataloader 0, does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` (try 8 which is the number of cpus on this machine) in the `DataLoader` init to improve performance.
  warnings.warn(*args, **kwargs)

/home/ubuntu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/utilities/distributed.py:69: UserWarning: The dataloader, train dataloader, does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` (try 8 which is the number of cpus on this machine) in the `DataLoader` init to improve performance.
  warnings.warn(*args, **kwargs)

Metric val_loss improved. New best score: 12.557
Metric val_loss improved by 6.057 >= min_delta = 0.0001. New best score: 6.500
Metric val_loss improved by 1.609 >= min_delta = 0.0001. New best score: 4.891
Metric val_loss improved by 0.366 >= min_delta = 0.0001. New best score: 4.525
Metric val_loss improved by 1.341 >= min_delta = 0.0001. New best score: 3.184
Metric val_loss improved by 0.115 >= min_delta = 0.0001. New best score: 3.069
/home/ubuntu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/utilities/distributed.py:69: UserWarning: The dataloader, predict dataloader 0, does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` (try 8 which is the number of cpus on this machine) in the `DataLoader` init to improve performance.
  warnings.warn(*args, **kwargs)

y_true.shape (#n_series, #n_fcds, #lt): (728,)       
y_hat.shape (#n_series, #n_fcds, #lt): (728,)        
 50%|█████     | 1/2 [00:02<00:02,  2.67s/trial, best loss: 3.701502561569214]
INFO:hyperopt.tpe:build_posterior_wrapper took 0.013880 seconds
INFO:hyperopt.tpe:TPE using 1/1 trials with best loss 3.701503
===============================================                               

activation                                SELU                                
batch_normalization                      False
batch_size                                 256
complete_inputs                          False
complete_sample                          False
device                                    cuda
dropout_prob_exogenous                0.365531
dropout_prob_theta                    0.241073
early_stop_patience                         16
eval_freq                                   50
frequency                                    H
idx_to_sample_freq                          24
initialization                       he_normal
l1_theta                                     0
learning_rate                         0.000853
len_sample_chunks                         None
loss_hypar                                 0.5
loss_train                                 MAE
loss_valid                                 MAE
lr_decay                              0.355323
lr_decay_step_size                         100
max_epochs                                  10
max_steps                                 None
mode                                    simple
model                                   nbeats
n_blocks                                (1, 1)
n_harmonics                                  1
n_hidden                                   256
n_layers                                (2, 2)
n_polynomials                                2
n_s_hidden                                   0
n_series_per_batch                           1
n_time_in                                  168
n_time_out                                  24
n_val_weeks                                104
n_x_hidden                                 6.0
normalizer_x                            median
normalizer_y                              None
random_seed                               18.0
seasonality                                 24
shared_weights                           False
stack_types               (identity, identity)
val_idx_to_sample_freq                      24
weight_decay                          0.000123
window_sampling_limit                   100000
dtype: object
===============================================                               

 50%|█████     | 1/2 [00:02<00:02,  2.67s/trial, best loss: 3.701502561569214]
INFO:root:Train Validation splits

INFO:root:                              ds                    
                             min                 max
unique_id sample_mask                               
NP        0           2016-12-27 2018-12-24 23:00:00
          1           2013-01-01 2016-12-26 23:00:00
INFO:root:
Total data 			52416 time stamps 
Available percentage=100.0, 	52416 time stamps 
Insample  percentage=66.67, 	34944 time stamps 
Outsample percentage=33.33, 	17472 time stamps 

INFO:root:Train Validation splits

INFO:root:                              ds                    
                             min                 max
unique_id sample_mask                               
NP        0           2013-01-01 2016-12-26 23:00:00
          1           2016-12-27 2018-12-24 23:00:00
INFO:root:
Total data 			52416 time stamps 
Available percentage=100.0, 	52416 time stamps 
Insample  percentage=33.33, 	17472 time stamps 
Outsample percentage=66.67, 	34944 time stamps 

INFO:root:Train Validation splits

INFO:root:                              ds                    
                             min                 max
unique_id sample_mask                               
NP        0           2013-01-01 2018-12-24 23:00:00
INFO:root:
Total data 			52416 time stamps 
Available percentage=100.0, 	52416 time stamps 
Insample  percentage=0.0, 	0 time stamps 
Outsample percentage=100.0, 	52416 time stamps 

GPU available: True, used: False
TPU available: False, using: 0 TPU cores
/home/ubuntu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/utilities/distributed.py:69: UserWarning: GPU available but not used. Set the gpus flag in your trainer `Trainer(gpus=1)` or script `--gpus=1`.
  warnings.warn(*args, **kwargs)


  | Name  | Type    | Params
----------------------------------
0 | model | _NBEATS | 415 K 
----------------------------------
415 K     Trainable params
0         Non-trainable params
415 K     Total params
1.660     Total estimated model params size (MB)
/home/ubuntu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/utilities/distributed.py:69: UserWarning: The dataloader, val dataloader 0, does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` (try 8 which is the number of cpus on this machine) in the `DataLoader` init to improve performance.
  warnings.warn(*args, **kwargs)

/home/ubuntu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/utilities/distributed.py:69: UserWarning: The dataloader, train dataloader, does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` (try 8 which is the number of cpus on this machine) in the `DataLoader` init to improve performance.
  warnings.warn(*args, **kwargs)

Metric val_loss improved. New best score: 4.409
Metric val_loss improved by 0.711 >= min_delta = 0.0001. New best score: 3.698
Metric val_loss improved by 0.311 >= min_delta = 0.0001. New best score: 3.388
Metric val_loss improved by 0.352 >= min_delta = 0.0001. New best score: 3.035
Metric val_loss improved by 0.003 >= min_delta = 0.0001. New best score: 3.032
Metric val_loss improved by 0.147 >= min_delta = 0.0001. New best score: 2.885
Metric val_loss improved by 0.074 >= min_delta = 0.0001. New best score: 2.811
/home/ubuntu/anaconda3/envs/nixtla/lib/python3.7/site-packages/pytorch_lightning/utilities/distributed.py:69: UserWarning: The dataloader, predict dataloader 0, does not have many workers which may be a bottleneck. Consider increasing the value of the `num_workers` argument` (try 8 which is the number of cpus on this machine) in the `DataLoader` init to improve performance.
  warnings.warn(*args, **kwargs)

y_true.shape (#n_series, #n_fcds, #lt): (728,)                                
y_hat.shape (#n_series, #n_fcds, #lt): (728,)                                 
100%|██████████| 2/2 [00:04<00:00,  2.42s/trial, best loss: 1.9704331159591675]
{% endraw %} {% raw %}
trials.trials
[{'state': 2,
  'tid': 0,
  'spec': None,
  'result': {'loss': 3.701502561569214,
   'mc': {'activation': 'SELU',
    'batch_normalization': False,
    'batch_size': 256,
    'complete_inputs': False,
    'complete_sample': False,
    'device': 'cuda',
    'dropout_prob_exogenous': 0.24642550819005665,
    'dropout_prob_theta': 0.04453225609606154,
    'early_stop_patience': 16,
    'eval_freq': 50,
    'frequency': 'H',
    'idx_to_sample_freq': 24,
    'initialization': 'he_normal',
    'l1_theta': 0,
    'learning_rate': 0.0005903125740803596,
    'len_sample_chunks': None,
    'loss_hypar': 0.5,
    'loss_train': 'MAE',
    'loss_valid': 'MAE',
    'lr_decay': 0.47290326767085267,
    'lr_decay_step_size': 100,
    'max_epochs': 10,
    'max_steps': None,
    'mode': 'simple',
    'model': 'nbeats',
    'n_blocks': (1, 1),
    'n_harmonics': 1,
    'n_hidden': 256,
    'n_layers': (2, 2),
    'n_polynomials': 2,
    'n_s_hidden': 0,
    'n_series_per_batch': 1,
    'n_time_in': 168,
    'n_time_out': 24,
    'n_val_weeks': 104,
    'n_x_hidden': 5.0,
    'normalizer_x': 'median',
    'normalizer_y': None,
    'random_seed': 16.0,
    'seasonality': 24,
    'shared_weights': False,
    'stack_types': ('exogenous_tcn', 'identity'),
    'val_idx_to_sample_freq': 24,
    'weight_decay': 5.8073579520205266e-05,
    'window_sampling_limit': 100000,
    'n_x': 1,
    'n_s': 1,
    'n_theta_hidden': [[256, 256], [256, 256]]},
   'y_true': array([25.73, 29.37, 28.76, 25.95, 26.71, 29.36, 30.93, 28.3 , 30.58,
          33.32, 28.86, 30.5 , 29.72, 28.69, 28.  , 25.04, 28.72, 29.79,
          29.41, 30.89, 30.99, 29.56, 27.84, 28.19, 27.36, 27.84, 28.13,
          28.42, 28.4 , 27.41, 28.66, 28.67, 28.51, 27.96, 29.31, 29.99,
          28.97, 29.96, 30.59, 30.8 , 30.62, 31.01, 32.  , 33.14, 32.63,
          32.11, 31.77, 31.04, 31.35, 31.96, 31.01, 29.98, 30.36, 28.15,
          28.87, 27.66, 29.85, 26.38, 27.82, 30.46, 28.49, 28.67, 27.58,
          29.55, 29.93, 29.03, 30.83, 29.08, 30.54, 30.97, 32.54, 30.1 ,
          29.72, 31.56, 29.66, 29.13, 29.94, 25.2 , 28.56, 27.86, 25.65,
          29.17, 27.05, 27.23, 28.04, 28.94, 29.22, 28.55, 28.05, 28.15,
          28.5 , 28.9 , 29.68, 27.87, 28.14, 27.49, 27.38, 28.06, 27.12,
          24.5 , 26.57, 27.03, 27.59, 25.54, 22.99, 25.42, 24.72, 27.11,
          28.73, 27.15, 29.23, 28.95, 29.76, 29.91, 27.27, 24.44, 25.9 ,
          29.06, 28.89, 31.36, 31.24, 31.39, 30.1 , 30.12, 27.03, 27.1 ,
          28.  , 27.99, 27.48, 29.84, 30.25, 27.39, 30.99, 31.73, 32.4 ,
          28.76, 30.05, 30.05, 30.26, 30.21, 26.95, 26.68, 26.74, 25.06,
          25.17, 24.04, 25.8 , 25.6 , 25.65, 25.42, 26.33, 24.14, 24.06,
          26.78, 26.11, 25.61, 26.57, 26.97, 26.21, 26.29, 26.2 , 13.97,
          19.62, 26.43, 24.68, 24.5 , 24.28, 20.64, 25.43, 24.76, 24.89,
          22.06, 23.07, 24.79, 24.81, 24.03, 26.5 , 26.34, 24.42, 23.49,
          21.8 , 23.48, 24.23, 23.69, 23.57, 22.99, 24.14, 24.19, 24.33,
          25.6 , 26.23, 26.55, 27.41, 26.73, 27.4 , 27.12, 27.44, 26.51,
          26.96, 26.51, 26.33, 25.51, 25.02, 25.63, 25.47, 26.65, 27.45,
          26.47, 26.65, 26.77, 27.02, 26.89, 27.46, 26.35, 26.75, 26.36,
          26.42, 26.7 , 27.04, 25.04, 23.65, 24.86, 25.4 , 26.11, 24.57,
          25.86, 25.87, 25.57, 21.97, 26.01, 26.3 , 26.17, 26.85, 25.74,
          25.99, 23.77, 25.79, 27.04, 27.77, 27.7 , 28.65, 29.16, 29.65,
          30.36, 29.78, 30.32, 30.15, 31.4 , 32.27, 31.37, 31.55, 31.47,
          31.56, 29.66, 30.83, 29.27, 29.87, 27.35, 27.67, 28.47, 26.5 ,
          28.19, 30.22, 31.05, 31.52, 31.57, 31.55, 31.55, 30.51, 30.65,
          30.42, 29.68, 29.51, 29.8 , 29.38, 29.81, 27.96, 27.85, 24.06,
          20.41, 24.3 , 17.11, 26.12, 26.91, 26.6 , 28.79, 30.26, 28.56,
          27.11, 27.63, 26.63, 26.03, 21.72, 26.06, 25.26, 28.37, 27.81,
          27.91, 26.  , 28.97, 29.57, 27.19, 27.1 , 27.64, 20.1 , 20.06,
          20.03, 28.5 , 28.95, 23.95, 28.85, 27.1 , 24.96, 28.03, 28.25,
          28.3 , 28.57, 26.03, 26.01, 28.29, 29.71, 30.37, 29.74, 29.66,
          27.57, 27.58, 27.88, 28.67, 31.55, 29.76, 27.01, 26.05, 29.13,
          28.24, 28.01, 28.62, 30.82, 32.18, 30.92, 30.97, 27.9 , 28.24,
          29.83, 27.07, 27.94, 24.64, 26.06, 26.79, 27.52, 28.21, 27.96,
          26.49, 28.26, 29.03, 29.02, 28.59, 30.43, 29.06, 28.  , 27.3 ,
          26.94, 20.08, 25.65, 25.22, 24.51, 28.22, 26.82, 26.68, 25.59,
          25.43, 25.02, 27.55, 27.31, 28.07, 29.66, 29.03, 27.75, 28.11,
          28.6 , 30.37, 30.05, 29.53, 28.79, 28.82, 25.69, 28.05, 29.31,
          30.45, 32.79, 31.64, 30.52, 33.06, 28.08, 25.5 , 28.98, 30.08,
          28.01, 27.57, 28.33, 30.14, 27.7 , 29.53, 31.9 , 32.12, 32.78,
          34.9 , 33.7 , 32.74, 30.91, 31.17, 29.96, 28.87, 31.02, 31.14,
          29.74, 32.22, 32.97, 34.89, 33.77, 35.77, 37.41, 37.34, 38.21,
          36.05, 36.93, 36.48, 38.98, 37.72, 37.73, 39.09, 39.81, 38.41,
          38.17, 40.4 , 38.6 , 37.29, 35.57, 37.9 , 36.03, 35.68, 36.62,
          37.64, 37.75, 37.68, 37.43, 36.79, 37.14, 37.48, 39.6 , 37.88,
          41.43, 39.59, 38.68, 39.54, 42.1 , 39.38, 39.26, 40.87, 39.52,
          39.56, 39.22, 39.22, 39.13, 38.39, 38.09, 37.42, 38.62, 38.68,
          38.01, 37.64, 38.96, 38.5 , 37.96, 40.01, 39.18, 41.55, 39.83,
          38.04, 34.93, 32.64, 35.34, 33.97, 34.21, 34.77, 34.35, 35.93,
          36.02, 35.91, 33.77, 32.13, 31.09, 36.03, 35.26, 34.15, 28.06,
          27.2 , 24.88, 12.42,  4.44,  9.48, 30.11, 19.5 , 20.07, 32.98,
          32.97, 24.98, 29.06, 33.81, 31.77, 15.27, 28.05, 36.81, 36.07,
          36.34, 39.04, 39.12, 37.69, 39.17, 38.85, 42.26, 41.36, 44.16,
          42.2 , 40.66, 43.07, 44.91, 45.17, 46.5 , 47.28, 45.29, 46.73,
          45.1 , 46.11, 46.91, 41.1 , 45.56, 44.46, 44.48, 41.49, 42.22,
          41.97, 38.05, 37.47, 41.01, 43.27, 43.92, 44.45, 45.29, 42.82,
          44.73, 45.48, 46.78, 48.94, 50.24, 50.84, 50.19, 47.56, 48.25,
          49.76, 49.68, 50.8 , 51.54, 51.2 , 51.94, 52.4 , 53.18, 51.73,
          51.33, 50.51, 51.9 , 52.56, 52.44, 53.29, 53.92, 54.39, 54.33,
          51.99, 50.87, 50.53, 53.  , 52.67, 52.72, 53.6 , 53.33, 53.44,
          49.6 , 51.51, 53.6 , 51.78, 50.74, 47.48, 49.08, 49.32, 49.43,
          49.87, 50.53, 50.59, 48.72, 48.83, 43.64, 48.23, 48.17, 49.92,
          48.26, 50.62, 49.63, 49.77, 50.14, 50.15, 55.8 , 52.37, 54.05,
          56.58, 55.2 , 53.87, 53.8 , 56.61, 56.84, 54.61, 53.44, 53.65,
          53.05, 54.19, 50.99, 49.92, 53.13, 50.19, 49.35, 48.58, 48.91,
          44.41, 39.04, 43.01,  3.27, 21.04, 30.61, 39.25, 34.23, 20.99,
          30.48, 41.79, 19.92, 42.05, 42.76, 42.51, 44.67, 43.34, 45.61,
          45.34, 47.03, 44.08, 41.38, 41.88, 42.4 , 39.36, 19.55,  4.91,
          40.59, 41.44, 41.01, 39.89, 41.37, 40.77, 35.08, 34.58, 30.02,
          41.19, 44.3 , 42.5 , 44.23, 40.99, 39.09, 39.04, 40.23, 41.93,
          42.54, 41.44, 42.1 , 42.99, 43.91, 43.49, 43.8 , 42.69, 39.58,
          38.47, 41.95, 43.68, 45.17, 44.97, 43.78, 45.38, 44.16, 43.48,
          44.65, 47.6 , 49.43, 47.59, 48.16, 47.88, 49.28, 49.5 , 42.4 ,
          42.66, 43.21, 43.23, 42.89, 44.65, 45.06, 46.69, 46.56, 43.81,
          44.25, 43.8 , 44.43, 48.58, 49.21, 49.65, 51.36, 46.47, 49.86,
          52.49, 48.69, 50.12, 48.12, 49.01, 50.47, 52.32, 48.1 ],
         dtype=float32),
   'y_hat': array([27.701649 , 27.975748 , 31.803595 , 31.472857 , 28.643927 ,
          28.219172 , 31.853521 , 33.827473 , 30.845858 , 33.125107 ,
          36.181877 , 31.319834 , 33.202126 , 32.05909  , 31.78731  ,
          31.477406 , 27.625643 , 31.886206 , 33.18131  , 31.663424 ,
          33.47664  , 33.102306 , 30.76027  , 30.854624 , 30.513063 ,
          31.119108 , 31.45578  , 30.727148 , 31.015617 , 31.072279 ,
          29.91333  , 31.884443 , 31.577305 , 31.11768  , 30.809654 ,
          31.933662 , 32.662987 , 31.777435 , 33.141994 , 33.583065 ,
          33.415916 , 33.67277  , 33.67393  , 34.7538   , 35.73098  ,
          34.790936 , 33.86699  , 33.681534 , 33.691025 , 35.487526 ,
          35.726833 , 34.177856 , 33.04427  , 33.61656  , 30.676077 ,
          32.29714  , 30.447182 , 32.80938  , 29.365845 , 30.978231 ,
          33.46982  , 31.01993  , 31.37819  , 30.004261 , 32.07784  ,
          33.05431  , 32.186184 , 34.075848 , 31.757715 , 33.468113 ,
          33.654778 , 34.731728 , 32.142467 , 33.155827 , 34.23377  ,
          32.650238 , 32.461346 , 32.81042  , 27.75075  , 31.551151 ,
          30.965195 , 28.564816 , 31.68756  , 29.758919 , 29.686258 ,
          30.2022   , 31.718147 , 32.293495 , 31.605698 , 30.203241 ,
          30.906057 , 31.021599 , 31.610073 , 32.170662 , 30.912388 ,
          30.811941 , 29.598139 , 30.292961 , 30.65223  , 29.147573 ,
          27.314524 , 29.550224 , 29.97744  , 29.968029 , 28.101295 ,
          25.158245 , 27.554085 , 27.501097 , 30.007065 , 31.786043 ,
          29.360703 , 31.8217   , 31.33803  , 32.28236  , 32.072117 ,
          29.792221 , 27.287334 , 28.04121  , 32.11424  , 31.66252  ,
          33.691414 , 33.059284 , 33.28169  , 32.11442  , 31.789    ,
          30.190199 , 29.862118 , 31.577236 , 30.565132 , 30.82904  ,
          32.587406 , 32.69837  , 30.077812 , 33.508186 , 34.395542 ,
          34.74857  , 31.200544 , 32.430794 , 32.262714 , 33.08537  ,
          33.395508 , 29.128036 , 29.362097 , 29.608278 , 27.92922  ,
          27.02654  , 26.394283 , 28.339012 , 27.25325  , 27.914608 ,
          27.493322 , 28.818203 , 26.07845  , 26.297255 , 28.600729 ,
          27.653435 , 27.809496 , 28.893476 , 28.784111 , 28.29351  ,
          28.631182 , 28.20375  , 15.827183 , 20.83848  , 28.532713 ,
          26.407932 , 26.380068 , 25.573887 , 21.184254 , 26.372822 ,
          26.85384  , 27.186338 , 24.613485 , 24.034958 , 26.529345 ,
          27.095375 , 26.01566  , 28.533981 , 28.779293 , 26.78483  ,
          25.258823 , 24.121305 , 25.542557 , 26.027983 , 25.805328 ,
          26.068983 , 25.050343 , 25.963276 , 26.386597 , 26.41921  ,
          27.66286  , 28.730894 , 29.326147 , 29.848225 , 28.787792 ,
          29.89353  , 29.584927 , 30.182766 , 29.392458 , 29.99966  ,
          29.57572  , 28.808083 , 28.217983 , 27.245605 , 27.927732 ,
          28.180653 , 29.684679 , 30.347237 , 28.715103 , 29.052837 ,
          29.060444 , 29.475107 , 29.56176  , 30.340288 , 29.287489 ,
          29.029518 , 28.780752 , 28.22973  , 29.160313 , 29.684525 ,
          27.260353 , 26.054703 , 26.877941 , 28.052938 , 28.59358  ,
          26.314325 , 28.004002 , 28.309336 , 27.970922 , 24.17208  ,
          28.47196  , 28.379786 , 28.33188  , 29.347889 , 28.264673 ,
          28.490177 , 26.092579 , 28.270226 , 29.259161 , 30.01529  ,
          30.176893 , 30.951273 , 31.655579 , 31.866587 , 33.29403  ,
          32.32773  , 32.391518 , 33.013805 , 34.2049   , 35.39885  ,
          33.71306  , 34.747    , 34.328926 , 34.239143 , 32.38846  ,
          33.92605  , 32.34575  , 32.566185 , 30.322432 , 30.019318 ,
          31.097141 , 29.253351 , 31.422064 , 33.120487 , 33.34096  ,
          34.198757 , 33.978367 , 34.00793  , 34.345467 , 33.55096  ,
          33.821102 , 33.069893 , 32.86123  , 32.431793 , 32.791203 ,
          32.479095 , 33.151684 , 31.260265 , 30.307457 , 26.91224  ,
          22.465515 , 26.214489 , 18.628647 , 28.102814 , 28.87575  ,
          27.881306 , 30.460846 , 31.438694 , 30.465012 , 29.497032 ,
          30.0089   , 29.787655 , 28.00818  , 24.77789  , 27.852478 ,
          27.819096 , 30.92728  , 30.650131 , 30.579054 , 28.079836 ,
          31.493883 , 32.205677 , 29.632847 , 29.815783 , 30.475258 ,
          23.096104 , 22.133955 , 22.233164 , 30.209286 , 31.261034 ,
          26.111069 , 31.214556 , 29.260584 , 27.040693 , 30.788288 ,
          30.552942 , 30.781445 , 31.151209 , 28.85097  , 28.95206  ,
          30.680902 , 32.994823 , 33.09333  , 32.04963  , 32.48223  ,
          30.602373 , 30.763361 , 30.8481   , 31.841734 , 34.812283 ,
          30.905994 , 28.58185  , 28.962284 , 31.419739 , 31.667522 ,
          30.726204 , 30.979689 , 32.892284 , 35.24731  , 32.386612 ,
          32.536953 , 27.545795 , 30.979244 , 34.07229  , 29.211065 ,
          31.310766 , 27.148113 , 29.085176 , 29.240799 , 30.554121 ,
          30.748781 , 28.725815 , 28.75438  , 30.609417 , 32.094326 ,
          30.310951 , 31.347704 , 32.299927 , 31.04108  , 28.933968 ,
          29.49194  , 29.331661 , 22.787268 , 28.163752 , 27.508389 ,
          27.15601  , 30.905706 , 29.17673  , 28.989784 , 27.551468 ,
          27.799349 , 27.869843 , 30.540209 , 29.43601  , 31.478884 ,
          31.646564 , 31.34626  , 29.995522 , 30.587994 , 30.872017 ,
          33.53783  , 31.656082 , 31.184862 , 29.8596   , 31.200302 ,
          28.988365 , 31.1772   , 32.04352  , 33.33186  , 34.686695 ,
          32.270588 , 32.81985  , 34.82245  , 30.255907 , 27.000778 ,
          31.60639  , 33.21228  , 30.502766 , 29.421745 , 30.67129  ,
          33.128506 , 31.022188 , 32.806087 , 34.58911  , 34.167183 ,
          35.84772  , 36.8175   , 34.41109  , 34.334137 , 32.636158 ,
          33.382095 , 32.60263  , 32.761208 , 34.902267 , 33.970512 ,
          32.612194 , 35.29174  , 35.63135  , 37.379665 , 37.15835  ,
          37.996864 , 38.55955  , 39.62931  , 40.94131  , 39.045547 ,
          39.259083 , 40.67536  , 43.198765 , 41.556595 , 40.81526  ,
          40.75979  , 39.169403 , 40.062454 , 39.544426 , 43.903584 ,
          42.37286  , 42.325348 , 38.61     , 42.271305 , 40.341484 ,
          40.199306 , 40.85454  , 40.683975 , 41.333935 , 40.204926 ,
          41.462906 , 39.571663 , 41.945198 , 41.608612 , 42.790592 ,
          41.463665 , 45.25569  , 43.36491  , 41.163616 , 43.871017 ,
          45.4927   , 42.59713  , 41.977512 , 45.38557  , 44.078102 ,
          43.563152 , 43.628407 , 43.38679  , 43.35113  , 42.114872 ,
          41.94418  , 41.04163  , 42.020454 , 43.02959  , 41.9142   ,
          40.711254 , 42.9427   , 42.758297 , 42.442474 , 43.66887  ,
          43.3195   , 45.293713 , 42.29823  , 41.17005  , 38.19075  ,
          36.101604 , 38.252167 , 38.092922 , 37.095108 , 37.838417 ,
          37.761803 , 39.38203  , 39.604942 , 38.35947  , 37.192986 ,
          34.91715  , 34.0571   , 39.86787  , 38.606857 , 37.521458 ,
          29.980778 , 29.033047 , 27.31145  , 13.800947 ,  6.0904064,
           9.970101 , 30.535395 , 20.79597  , 20.864275 , 32.450157 ,
          33.952015 , 26.99879  , 30.519081 , 34.91247  , 33.947    ,
          17.917217 , 28.553782 , 37.87419  , 39.01217  , 39.743526 ,
          40.577442 , 40.99245  , 41.410915 , 42.328983 , 41.977943 ,
          46.016853 , 45.31137  , 48.043182 , 45.53443  , 44.655006 ,
          46.543743 , 48.812256 , 49.53628  , 50.94025  , 51.33296  ,
          48.679546 , 51.131973 , 49.402878 , 50.521526 , 51.516975 ,
          45.923653 , 50.18677  , 48.530865 , 49.261944 , 45.76501  ,
          46.07821  , 46.40535  , 42.76134  , 42.00964  , 44.957035 ,
          47.39794  , 47.693336 , 48.011383 , 49.165527 , 46.89182  ,
          49.04584  , 49.60355  , 51.375633 , 53.59237  , 54.29658  ,
          55.176777 , 54.874542 , 52.320477 , 52.85983  , 55.06989  ,
          54.72569  , 55.65646  , 56.23277  , 56.42413  , 57.19465  ,
          57.115982 , 58.316833 , 56.682034 , 56.355537 , 55.877407 ,
          57.45604  , 58.18929  , 57.33557  , 58.4829   , 58.71005  ,
          59.28448  , 59.773758 , 57.70367  , 56.553947 , 55.581455 ,
          58.48933  , 57.707726 , 57.668633 , 58.90658  , 58.638313 ,
          58.92934  , 54.6363   , 56.973274 , 58.90937  , 57.008236 ,
          55.968906 , 52.896873 , 54.284573 , 54.306793 , 54.622375 ,
          54.832138 , 55.11455  , 55.21072  , 53.679844 , 53.95129  ,
          48.443203 , 53.31453  , 53.152607 , 54.75939  , 53.162964 ,
          55.702606 , 54.479435 , 54.600582 , 55.40517  , 55.008972 ,
          61.249275 , 56.707996 , 58.786995 , 61.139122 , 59.757816 ,
          59.712875 , 59.467102 , 61.925026 , 62.40662  , 60.2509   ,
          58.961067 , 58.977623 , 59.357323 , 60.032764 , 56.529358 ,
          55.562675 , 59.210716 , 56.242374 , 54.238422 , 53.827488 ,
          53.938107 , 49.232834 , 43.94818  , 47.780464 ,  7.518135 ,
          22.608507 , 34.081154 , 40.401752 , 37.462734 , 21.43678  ,
          31.341328 , 43.646458 , 22.532    , 44.00136  , 44.447636 ,
          44.912804 , 48.062458 , 46.74743  , 49.90155  , 49.416477 ,
          51.37282  , 48.461166 , 45.72746  , 46.469196 , 46.934605 ,
          44.053886 , 22.644484 ,  6.8766646, 42.375244 , 44.543037 ,
          44.539543 , 42.44923  , 43.2529   , 43.820484 , 39.382084 ,
          37.480038 , 32.197952 , 44.780067 , 48.77445  , 46.61584  ,
          46.138626 , 43.456142 , 42.042835 , 43.18084  , 44.793903 ,
          46.872726 , 47.292267 , 45.51595  , 46.30709  , 47.181713 ,
          48.00544  , 47.559364 , 48.243324 , 46.930027 , 43.474533 ,
          43.177113 , 46.444042 , 48.409523 , 49.503876 , 48.877235 ,
          48.031235 , 49.474457 , 49.27266  , 47.87698  , 48.969242 ,
          52.262043 , 54.22861  , 51.19327  , 52.31781  , 52.05568  ,
          54.560577 , 54.59873  , 45.36566  , 47.72178  , 47.66583  ,
          49.503857 , 48.44381  , 48.557514 , 49.059517 , 51.484367 ,
          51.29379  , 47.785267 , 47.74033  , 48.47073  , 49.527966 ,
          53.69879  , 53.383175 , 52.97548  , 55.211697 , 48.94556  ,
          54.80213  , 57.946636 , 52.23061  , 55.074226 , 52.95395  ,
          54.286552 , 56.05054  , 57.66334  ], dtype=float32),
   'run_time': 2.62833571434021,
   'status': 'ok'},
  'misc': {'tid': 0,
   'cmd': ('domain_attachment', 'FMinIter_Domain'),
   'workdir': None,
   'idxs': {'activation': [0],
    'batch_normalization': [0],
    'batch_size': [0],
    'complete_inputs': [0],
    'complete_sample': [0],
    'device': [0],
    'dropout_prob_exogenous': [0],
    'dropout_prob_theta': [0],
    'early_stop_patience': [0],
    'eval_freq': [0],
    'frequency': [0],
    'idx_to_sample_freq': [0],
    'initialization': [0],
    'l1_theta': [0],
    'learning_rate': [0],
    'len_sample_chunks': [0],
    'loss': [0],
    'loss_hypar': [0],
    'loss_valid': [0],
    'lr_decay': [0],
    'lr_decay_step_size': [0],
    'max_epochs': [0],
    'max_steps': [0],
    'n_blocks': [0],
    'n_harmonics': [0],
    'n_hidden': [0],
    'n_layers': [0],
    'n_polynomials': [0],
    'n_s_hidden': [0],
    'n_series_per_batch': [0],
    'n_time_in': [0],
    'n_time_out': [0],
    'n_val_weeks': [0],
    'n_x_hidden': [0],
    'normalizer_x': [0],
    'normalizer_y': [0],
    'random_seed': [0],
    'seasonality': [0],
    'shared_weights': [0],
    'stack_types': [0],
    'val_idx_to_sample_freq': [0],
    'weight_decay': [0],
    'window_sampling_limit': [0]},
   'vals': {'activation': [0],
    'batch_normalization': [0],
    'batch_size': [0],
    'complete_inputs': [0],
    'complete_sample': [0],
    'device': [0],
    'dropout_prob_exogenous': [0.24642550819005665],
    'dropout_prob_theta': [0.04453225609606154],
    'early_stop_patience': [0],
    'eval_freq': [0],
    'frequency': [0],
    'idx_to_sample_freq': [0],
    'initialization': [1],
    'l1_theta': [0],
    'learning_rate': [0.0005903125740803596],
    'len_sample_chunks': [0],
    'loss': [0],
    'loss_hypar': [0],
    'loss_valid': [0],
    'lr_decay': [0.47290326767085267],
    'lr_decay_step_size': [0],
    'max_epochs': [0],
    'max_steps': [0],
    'n_blocks': [0],
    'n_harmonics': [0],
    'n_hidden': [0],
    'n_layers': [0],
    'n_polynomials': [0],
    'n_s_hidden': [0],
    'n_series_per_batch': [0],
    'n_time_in': [0],
    'n_time_out': [0],
    'n_val_weeks': [0],
    'n_x_hidden': [5.0],
    'normalizer_x': [0],
    'normalizer_y': [0],
    'random_seed': [16.0],
    'seasonality': [0],
    'shared_weights': [0],
    'stack_types': [2],
    'val_idx_to_sample_freq': [0],
    'weight_decay': [5.8073579520205266e-05],
    'window_sampling_limit': [0]}},
  'exp_key': None,
  'owner': None,
  'version': 0,
  'book_time': datetime.datetime(2021, 6, 16, 16, 19, 26, 482000),
  'refresh_time': datetime.datetime(2021, 6, 16, 16, 19, 29, 129000)},
 {'state': 2,
  'tid': 1,
  'spec': None,
  'result': {'loss': 1.9704331159591675,
   'mc': {'activation': 'SELU',
    'batch_normalization': False,
    'batch_size': 256,
    'complete_inputs': False,
    'complete_sample': False,
    'device': 'cuda',
    'dropout_prob_exogenous': 0.36553112690476813,
    'dropout_prob_theta': 0.24107333575459605,
    'early_stop_patience': 16,
    'eval_freq': 50,
    'frequency': 'H',
    'idx_to_sample_freq': 24,
    'initialization': 'he_normal',
    'l1_theta': 0,
    'learning_rate': 0.0008532112183090035,
    'len_sample_chunks': None,
    'loss_hypar': 0.5,
    'loss_train': 'MAE',
    'loss_valid': 'MAE',
    'lr_decay': 0.3553227609462469,
    'lr_decay_step_size': 100,
    'max_epochs': 10,
    'max_steps': None,
    'mode': 'simple',
    'model': 'nbeats',
    'n_blocks': (1, 1),
    'n_harmonics': 1,
    'n_hidden': 256,
    'n_layers': (2, 2),
    'n_polynomials': 2,
    'n_s_hidden': 0,
    'n_series_per_batch': 1,
    'n_time_in': 168,
    'n_time_out': 24,
    'n_val_weeks': 104,
    'n_x_hidden': 6.0,
    'normalizer_x': 'median',
    'normalizer_y': None,
    'random_seed': 18.0,
    'seasonality': 24,
    'shared_weights': False,
    'stack_types': ('identity', 'identity'),
    'val_idx_to_sample_freq': 24,
    'weight_decay': 0.00012349475141123045,
    'window_sampling_limit': 100000,
    'n_x': 1,
    'n_s': 1,
    'n_theta_hidden': [[256, 256], [256, 256]]},
   'y_true': array([25.73, 29.37, 28.76, 25.95, 26.71, 29.36, 30.93, 28.3 , 30.58,
          33.32, 28.86, 30.5 , 29.72, 28.69, 28.  , 25.04, 28.72, 29.79,
          29.41, 30.89, 30.99, 29.56, 27.84, 28.19, 27.36, 27.84, 28.13,
          28.42, 28.4 , 27.41, 28.66, 28.67, 28.51, 27.96, 29.31, 29.99,
          28.97, 29.96, 30.59, 30.8 , 30.62, 31.01, 32.  , 33.14, 32.63,
          32.11, 31.77, 31.04, 31.35, 31.96, 31.01, 29.98, 30.36, 28.15,
          28.87, 27.66, 29.85, 26.38, 27.82, 30.46, 28.49, 28.67, 27.58,
          29.55, 29.93, 29.03, 30.83, 29.08, 30.54, 30.97, 32.54, 30.1 ,
          29.72, 31.56, 29.66, 29.13, 29.94, 25.2 , 28.56, 27.86, 25.65,
          29.17, 27.05, 27.23, 28.04, 28.94, 29.22, 28.55, 28.05, 28.15,
          28.5 , 28.9 , 29.68, 27.87, 28.14, 27.49, 27.38, 28.06, 27.12,
          24.5 , 26.57, 27.03, 27.59, 25.54, 22.99, 25.42, 24.72, 27.11,
          28.73, 27.15, 29.23, 28.95, 29.76, 29.91, 27.27, 24.44, 25.9 ,
          29.06, 28.89, 31.36, 31.24, 31.39, 30.1 , 30.12, 27.03, 27.1 ,
          28.  , 27.99, 27.48, 29.84, 30.25, 27.39, 30.99, 31.73, 32.4 ,
          28.76, 30.05, 30.05, 30.26, 30.21, 26.95, 26.68, 26.74, 25.06,
          25.17, 24.04, 25.8 , 25.6 , 25.65, 25.42, 26.33, 24.14, 24.06,
          26.78, 26.11, 25.61, 26.57, 26.97, 26.21, 26.29, 26.2 , 13.97,
          19.62, 26.43, 24.68, 24.5 , 24.28, 20.64, 25.43, 24.76, 24.89,
          22.06, 23.07, 24.79, 24.81, 24.03, 26.5 , 26.34, 24.42, 23.49,
          21.8 , 23.48, 24.23, 23.69, 23.57, 22.99, 24.14, 24.19, 24.33,
          25.6 , 26.23, 26.55, 27.41, 26.73, 27.4 , 27.12, 27.44, 26.51,
          26.96, 26.51, 26.33, 25.51, 25.02, 25.63, 25.47, 26.65, 27.45,
          26.47, 26.65, 26.77, 27.02, 26.89, 27.46, 26.35, 26.75, 26.36,
          26.42, 26.7 , 27.04, 25.04, 23.65, 24.86, 25.4 , 26.11, 24.57,
          25.86, 25.87, 25.57, 21.97, 26.01, 26.3 , 26.17, 26.85, 25.74,
          25.99, 23.77, 25.79, 27.04, 27.77, 27.7 , 28.65, 29.16, 29.65,
          30.36, 29.78, 30.32, 30.15, 31.4 , 32.27, 31.37, 31.55, 31.47,
          31.56, 29.66, 30.83, 29.27, 29.87, 27.35, 27.67, 28.47, 26.5 ,
          28.19, 30.22, 31.05, 31.52, 31.57, 31.55, 31.55, 30.51, 30.65,
          30.42, 29.68, 29.51, 29.8 , 29.38, 29.81, 27.96, 27.85, 24.06,
          20.41, 24.3 , 17.11, 26.12, 26.91, 26.6 , 28.79, 30.26, 28.56,
          27.11, 27.63, 26.63, 26.03, 21.72, 26.06, 25.26, 28.37, 27.81,
          27.91, 26.  , 28.97, 29.57, 27.19, 27.1 , 27.64, 20.1 , 20.06,
          20.03, 28.5 , 28.95, 23.95, 28.85, 27.1 , 24.96, 28.03, 28.25,
          28.3 , 28.57, 26.03, 26.01, 28.29, 29.71, 30.37, 29.74, 29.66,
          27.57, 27.58, 27.88, 28.67, 31.55, 29.76, 27.01, 26.05, 29.13,
          28.24, 28.01, 28.62, 30.82, 32.18, 30.92, 30.97, 27.9 , 28.24,
          29.83, 27.07, 27.94, 24.64, 26.06, 26.79, 27.52, 28.21, 27.96,
          26.49, 28.26, 29.03, 29.02, 28.59, 30.43, 29.06, 28.  , 27.3 ,
          26.94, 20.08, 25.65, 25.22, 24.51, 28.22, 26.82, 26.68, 25.59,
          25.43, 25.02, 27.55, 27.31, 28.07, 29.66, 29.03, 27.75, 28.11,
          28.6 , 30.37, 30.05, 29.53, 28.79, 28.82, 25.69, 28.05, 29.31,
          30.45, 32.79, 31.64, 30.52, 33.06, 28.08, 25.5 , 28.98, 30.08,
          28.01, 27.57, 28.33, 30.14, 27.7 , 29.53, 31.9 , 32.12, 32.78,
          34.9 , 33.7 , 32.74, 30.91, 31.17, 29.96, 28.87, 31.02, 31.14,
          29.74, 32.22, 32.97, 34.89, 33.77, 35.77, 37.41, 37.34, 38.21,
          36.05, 36.93, 36.48, 38.98, 37.72, 37.73, 39.09, 39.81, 38.41,
          38.17, 40.4 , 38.6 , 37.29, 35.57, 37.9 , 36.03, 35.68, 36.62,
          37.64, 37.75, 37.68, 37.43, 36.79, 37.14, 37.48, 39.6 , 37.88,
          41.43, 39.59, 38.68, 39.54, 42.1 , 39.38, 39.26, 40.87, 39.52,
          39.56, 39.22, 39.22, 39.13, 38.39, 38.09, 37.42, 38.62, 38.68,
          38.01, 37.64, 38.96, 38.5 , 37.96, 40.01, 39.18, 41.55, 39.83,
          38.04, 34.93, 32.64, 35.34, 33.97, 34.21, 34.77, 34.35, 35.93,
          36.02, 35.91, 33.77, 32.13, 31.09, 36.03, 35.26, 34.15, 28.06,
          27.2 , 24.88, 12.42,  4.44,  9.48, 30.11, 19.5 , 20.07, 32.98,
          32.97, 24.98, 29.06, 33.81, 31.77, 15.27, 28.05, 36.81, 36.07,
          36.34, 39.04, 39.12, 37.69, 39.17, 38.85, 42.26, 41.36, 44.16,
          42.2 , 40.66, 43.07, 44.91, 45.17, 46.5 , 47.28, 45.29, 46.73,
          45.1 , 46.11, 46.91, 41.1 , 45.56, 44.46, 44.48, 41.49, 42.22,
          41.97, 38.05, 37.47, 41.01, 43.27, 43.92, 44.45, 45.29, 42.82,
          44.73, 45.48, 46.78, 48.94, 50.24, 50.84, 50.19, 47.56, 48.25,
          49.76, 49.68, 50.8 , 51.54, 51.2 , 51.94, 52.4 , 53.18, 51.73,
          51.33, 50.51, 51.9 , 52.56, 52.44, 53.29, 53.92, 54.39, 54.33,
          51.99, 50.87, 50.53, 53.  , 52.67, 52.72, 53.6 , 53.33, 53.44,
          49.6 , 51.51, 53.6 , 51.78, 50.74, 47.48, 49.08, 49.32, 49.43,
          49.87, 50.53, 50.59, 48.72, 48.83, 43.64, 48.23, 48.17, 49.92,
          48.26, 50.62, 49.63, 49.77, 50.14, 50.15, 55.8 , 52.37, 54.05,
          56.58, 55.2 , 53.87, 53.8 , 56.61, 56.84, 54.61, 53.44, 53.65,
          53.05, 54.19, 50.99, 49.92, 53.13, 50.19, 49.35, 48.58, 48.91,
          44.41, 39.04, 43.01,  3.27, 21.04, 30.61, 39.25, 34.23, 20.99,
          30.48, 41.79, 19.92, 42.05, 42.76, 42.51, 44.67, 43.34, 45.61,
          45.34, 47.03, 44.08, 41.38, 41.88, 42.4 , 39.36, 19.55,  4.91,
          40.59, 41.44, 41.01, 39.89, 41.37, 40.77, 35.08, 34.58, 30.02,
          41.19, 44.3 , 42.5 , 44.23, 40.99, 39.09, 39.04, 40.23, 41.93,
          42.54, 41.44, 42.1 , 42.99, 43.91, 43.49, 43.8 , 42.69, 39.58,
          38.47, 41.95, 43.68, 45.17, 44.97, 43.78, 45.38, 44.16, 43.48,
          44.65, 47.6 , 49.43, 47.59, 48.16, 47.88, 49.28, 49.5 , 42.4 ,
          42.66, 43.21, 43.23, 42.89, 44.65, 45.06, 46.69, 46.56, 43.81,
          44.25, 43.8 , 44.43, 48.58, 49.21, 49.65, 51.36, 46.47, 49.86,
          52.49, 48.69, 50.12, 48.12, 49.01, 50.47, 52.32, 48.1 ],
         dtype=float32),
   'y_hat': array([26.316542 , 26.332678 , 29.868633 , 29.212381 , 26.364874 ,
          26.917328 , 29.745722 , 31.536858 , 28.970425 , 31.199947 ,
          33.604595 , 29.44432  , 31.221601 , 30.21905  , 29.22446  ,
          28.915838 , 25.89044  , 29.230467 , 30.18915  , 29.975319 ,
          31.366655 , 31.25101  , 30.071215 , 28.731358 , 28.535212 ,
          27.618187 , 29.029678 , 29.363562 , 29.049566 , 28.913328 ,
          28.011992 , 29.117641 , 29.140635 , 29.044228 , 28.66233  ,
          29.913248 , 30.595543 , 29.616873 , 30.44652  , 31.032312 ,
          31.402649 , 31.28904  , 31.644335 , 32.50862  , 33.501453 ,
          32.649067 , 32.44691  , 32.40678  , 31.825151 , 32.10069  ,
          33.180458 , 31.670181 , 30.474667 , 30.778875 , 28.836325 ,
          29.560535 , 28.58201  , 30.497644 , 26.998016 , 28.269503 ,
          30.825726 , 28.996218 , 29.206642 , 28.11583  , 30.245121 ,
          30.50653  , 29.545559 , 31.27154  , 29.62038  , 31.091951 ,
          31.401278 , 32.87783  , 30.627516 , 30.204292 , 32.011948 ,
          30.288706 , 30.168816 , 30.597305 , 25.951916 , 29.145744 ,
          28.223104 , 26.144302 , 29.699013 , 27.67291  , 27.693502 ,
          28.665813 , 29.434685 , 29.595596 , 28.981207 , 28.706457 ,
          28.617367 , 29.054865 , 29.54745  , 30.282429 , 28.1299   ,
          28.439993 , 27.934156 , 28.018017 , 28.524834 , 27.756063 ,
          25.052336 , 26.91068  , 27.404531 , 28.184435 , 26.043272 ,
          23.491674 , 25.962397 , 25.21885  , 27.582739 , 29.087084 ,
          27.533033 , 29.615124 , 29.390865 , 30.074804 , 30.195353 ,
          27.71758  , 24.908722 , 26.24871  , 29.815231 , 29.513865 ,
          31.788504 , 31.33913  , 31.55614  , 30.190062 , 30.616823 ,
          27.675676 , 27.844856 , 28.76709  , 28.721764 , 27.732212 ,
          30.144617 , 30.558546 , 27.959848 , 31.511566 , 32.05157  ,
          32.824352 , 28.970306 , 30.320168 , 30.302729 , 31.071394 ,
          30.95294  , 27.713512 , 27.281979 , 27.102125 , 25.239094 ,
          25.68945  , 24.52259  , 26.123549 , 26.075962 , 26.127329 ,
          25.531578 , 26.201128 , 24.493105 , 24.480654 , 26.935532 ,
          26.475863 , 25.935757 , 26.59186  , 26.857464 , 26.650738 ,
          26.893343 , 26.680841 , 14.411107 , 19.875776 , 26.351929 ,
          24.763182 , 24.917603 , 24.733768 , 20.66013  , 25.580776 ,
          24.815365 , 25.177776 , 22.208311 , 23.26691  , 24.852436 ,
          25.302544 , 24.503868 , 26.88481  , 26.467033 , 24.698137 ,
          23.865673 , 22.090553 , 23.847254 , 24.60143  , 24.05421  ,
          23.585033 , 23.06441  , 24.394993 , 24.594437 , 24.765886 ,
          26.094141 , 26.627785 , 26.85206  , 27.626198 , 27.08107  ,
          27.824722 , 27.614399 , 27.995546 , 27.070292 , 27.359705 ,
          26.868534 , 26.771917 , 26.021866 , 25.52235  , 26.10642  ,
          25.980162 , 27.049232 , 27.784496 , 26.839733 , 27.043272 ,
          27.195377 , 27.59984  , 27.399073 , 27.767735 , 26.6787   ,
          27.154633 , 26.630852 , 26.905252 , 27.053562 , 27.463858 ,
          25.143562 , 23.751587 , 25.07697  , 25.762617 , 26.608791 ,
          25.115149 , 26.088032 , 25.91419  , 25.679714 , 22.293678 ,
          26.299696 , 26.720356 , 26.651005 , 27.149632 , 25.823586 ,
          26.134111 , 24.134195 , 26.175089 , 27.44743  , 28.230444 ,
          28.000671 , 28.873348 , 29.42867  , 30.11124  , 30.919012 ,
          30.2908   , 30.877293 , 30.623606 , 31.723726 , 32.62584  ,
          32.156883 , 32.145863 , 32.08568  , 32.12507  , 30.233706 ,
          31.256123 , 29.642857 , 30.489922 , 27.976486 , 28.370348 ,
          29.065554 , 27.136566 , 28.586424 , 30.631649 , 31.50462  ,
          32.025738 , 32.114616 , 32.210636 , 32.140903 , 30.921831 ,
          31.137455 , 31.053627 , 30.380733 , 30.206448 , 30.584906 ,
          30.054352 , 30.343832 , 28.48287  , 28.451479 , 24.675198 ,
          21.146889 , 24.696201 , 17.433626 , 26.086557 , 27.301338 ,
          27.265387 , 29.282425 , 30.623447 , 29.146275 , 27.52272  ,
          27.98941  , 26.849022 , 26.740168 , 22.38552  , 26.839417 ,
          25.774244 , 29.045444 , 27.960386 , 28.351294 , 26.508701 ,
          29.440002 , 30.05442  , 27.924397 , 27.73619  , 27.995207 ,
          20.528921 , 20.765812 , 20.544168 , 28.832373 , 29.41216  ,
          24.71784  , 29.136946 , 27.31434  , 25.516087 , 28.598337 ,
          28.874022 , 28.856686 , 29.12458  , 26.513725 , 26.403013 ,
          28.738852 , 30.380236 , 31.10622  , 30.421041 , 30.409613 ,
          27.905996 , 28.16787  , 28.562185 , 29.52323  , 32.14226  ,
          30.318306 , 28.027819 , 26.413975 , 29.565416 , 28.970282 ,
          29.826061 , 29.512224 , 31.537811 , 32.631557 , 31.484304 ,
          31.545141 , 28.204159 , 28.776415 , 31.095438 , 29.054096 ,
          29.708689 , 25.235985 , 26.537136 , 27.516718 , 28.378603 ,
          28.342533 , 28.741978 , 27.157059 , 28.862612 , 29.119162 ,
          30.469688 , 29.117876 , 30.994406 , 29.255133 , 29.341637 ,
          27.652409 , 27.545116 , 20.736565 , 27.372335 , 26.40428  ,
          25.29222  , 28.8366   , 27.054873 , 27.304193 , 25.882988 ,
          25.884678 , 25.54018  , 28.416327 , 28.001759 , 28.558659 ,
          29.993645 , 29.379341 , 28.49898  , 28.465815 , 29.212906 ,
          30.855318 , 30.34043  , 29.961828 , 29.639576 , 29.473892 ,
          26.471424 , 29.210135 , 30.053345 , 30.727957 , 32.94761  ,
          32.15546  , 31.193436 , 33.843246 , 28.561438 , 26.848959 ,
          29.268343 , 30.703053 , 28.9061   , 29.35433  , 28.848923 ,
          30.778301 , 28.436895 , 30.09379  , 31.942581 , 32.588036 ,
          33.461864 , 34.643566 , 33.95859  , 33.534912 , 31.044155 ,
          31.758682 , 31.487186 , 29.986835 , 32.09246  , 32.108078 ,
          30.08011  , 32.725605 , 33.202538 , 35.461895 , 34.702103 ,
          36.060993 , 37.93341  , 37.990437 , 38.69841  , 36.546772 ,
          38.402966 , 37.67462  , 40.327503 , 38.808056 , 38.59757  ,
          37.354355 , 40.097652 , 39.22136  , 40.11081  , 42.41455  ,
          41.79919  , 38.72615  , 36.235508 , 38.690426 , 37.238087 ,
          36.860565 , 37.63107  , 38.600697 , 38.1995   , 38.342266 ,
          38.040173 , 38.063576 , 38.05651  , 38.64399  , 40.61821  ,
          38.614876 , 42.18752  , 39.919403 , 39.836254 , 40.241505 ,
          42.736107 , 39.877953 , 40.51393  , 41.508724 , 40.397    ,
          40.910168 , 40.498013 , 40.27202  , 39.93956  , 39.096313 ,
          38.720898 , 38.018013 , 39.356915 , 39.80988  , 39.0107   ,
          38.62855  , 39.78748  , 39.326687 , 38.84446  , 41.0381   ,
          40.05979  , 42.211327 , 40.55543  , 38.72581  , 35.50433  ,
          33.15156  , 36.467445 , 34.798786 , 35.143898 , 35.689423 ,
          35.015102 , 36.370106 , 36.476543 , 36.59173  , 34.445282 ,
          33.071697 , 31.722181 , 36.62848  , 35.782845 , 34.39219  ,
          28.274609 , 27.277254 , 25.39128  , 12.862366 ,  4.698904 ,
           9.863125 , 29.90889  , 19.838423 , 21.137693 , 33.227917 ,
          32.970398 , 26.007902 , 29.414219 , 33.386658 , 32.39855  ,
          16.183578 , 28.62634  , 36.90141  , 36.811344 , 36.869556 ,
          39.44994  , 39.804955 , 38.48359  , 40.009052 , 39.709885 ,
          43.02932  , 41.918503 , 44.804497 , 43.032177 , 41.58474  ,
          43.969055 , 45.815964 , 46.111465 , 47.253162 , 48.00756  ,
          46.111145 , 47.686134 , 46.296547 , 47.267548 , 47.958515 ,
          42.02845  , 46.445835 , 45.28598  , 45.598274 , 42.70627  ,
          43.30676  , 42.91156  , 38.901825 , 38.331062 , 41.85373  ,
          44.1439   , 44.823734 , 45.402973 , 46.05648  , 43.599392 ,
          45.608562 , 46.367874 , 47.931347 , 50.099613 , 51.423164 ,
          51.921333 , 51.116806 , 48.533573 , 49.353764 , 51.01707  ,
          50.999355 , 52.101753 , 52.731388 , 52.267128 , 53.069786 ,
          53.526093 , 54.40946  , 52.984356 , 52.713955 , 51.782814 ,
          52.99036  , 53.61709  , 53.616722 , 54.582867 , 55.174137 ,
          55.692905 , 55.687843 , 53.244335 , 52.042747 , 51.7111   ,
          54.247387 , 53.975327 , 54.110504 , 54.86047  , 54.442204 ,
          54.59453  , 50.847523 , 52.842854 , 54.923733 , 53.203354 ,
          52.044605 , 48.57166  , 50.018295 , 50.36679  , 50.743805 ,
          51.15432  , 51.724545 , 51.65057  , 49.591064 , 49.721752 ,
          44.74381  , 49.513412 , 49.29022  , 51.188946 , 49.48531  ,
          51.719593 , 50.495228 , 50.902714 , 51.452557 , 51.513752 ,
          56.99558  , 53.626606 , 55.109184 , 57.32006  , 56.420982 ,
          55.64347  , 55.371906 , 58.149998 , 58.330627 , 55.8005   ,
          54.714012 , 55.281902 , 54.717293 , 55.871803 , 52.709415 ,
          51.4374   , 54.2903   , 51.54014  , 50.73443  , 49.893337 ,
          50.198578 , 45.737324 , 40.384453 , 43.900368 ,  4.19298  ,
          21.630276 , 31.28498  , 40.02079  , 34.719013 , 22.665482 ,
          30.285952 , 42.299335 , 20.716028 , 42.98018  , 43.155254 ,
          43.601974 , 45.674557 , 44.030983 , 46.29506  , 46.5326   ,
          48.242615 , 45.327084 , 42.744022 , 42.876106 , 43.25196  ,
          40.296913 , 20.625963 ,  5.4051633, 40.82934  , 41.9999   ,
          42.35335  , 41.01998  , 41.718838 , 41.781094 , 36.02886  ,
          35.426888 , 30.570454 , 41.61625  , 45.09385  , 43.553562 ,
          45.386765 , 41.580982 , 40.024704 , 40.175323 , 41.487953 ,
          42.785683 , 43.610043 , 42.59862  , 43.141163 , 44.079082 ,
          45.06972  , 44.559586 , 44.769234 , 43.62031  , 40.80087  ,
          39.618958 , 43.246197 , 44.907097 , 46.330902 , 45.865074 ,
          44.80873  , 46.673355 , 45.473827 , 44.896606 , 45.874157 ,
          48.74119  , 50.453503 , 48.681767 , 49.592327 , 48.882355 ,
          50.794422 , 50.980145 , 44.191353 , 43.60242  , 44.356518 ,
          44.391727 , 45.00895  , 46.265785 , 46.24252  , 47.741894 ,
          47.641434 , 44.845787 , 45.413986 , 44.862644 , 45.823215 ,
          50.0524   , 50.28477  , 50.6477   , 52.38364  , 47.75348  ,
          51.431583 , 54.140472 , 50.58671  , 52.07413  , 49.381317 ,
          50.11864  , 52.503525 , 54.325073 ], dtype=float32),
   'run_time': 2.133913516998291,
   'status': 'ok'},
  'misc': {'tid': 1,
   'cmd': ('domain_attachment', 'FMinIter_Domain'),
   'workdir': None,
   'idxs': {'activation': [1],
    'batch_normalization': [1],
    'batch_size': [1],
    'complete_inputs': [1],
    'complete_sample': [1],
    'device': [1],
    'dropout_prob_exogenous': [1],
    'dropout_prob_theta': [1],
    'early_stop_patience': [1],
    'eval_freq': [1],
    'frequency': [1],
    'idx_to_sample_freq': [1],
    'initialization': [1],
    'l1_theta': [1],
    'learning_rate': [1],
    'len_sample_chunks': [1],
    'loss': [1],
    'loss_hypar': [1],
    'loss_valid': [1],
    'lr_decay': [1],
    'lr_decay_step_size': [1],
    'max_epochs': [1],
    'max_steps': [1],
    'n_blocks': [1],
    'n_harmonics': [1],
    'n_hidden': [1],
    'n_layers': [1],
    'n_polynomials': [1],
    'n_s_hidden': [1],
    'n_series_per_batch': [1],
    'n_time_in': [1],
    'n_time_out': [1],
    'n_val_weeks': [1],
    'n_x_hidden': [1],
    'normalizer_x': [1],
    'normalizer_y': [1],
    'random_seed': [1],
    'seasonality': [1],
    'shared_weights': [1],
    'stack_types': [1],
    'val_idx_to_sample_freq': [1],
    'weight_decay': [1],
    'window_sampling_limit': [1]},
   'vals': {'activation': [0],
    'batch_normalization': [0],
    'batch_size': [0],
    'complete_inputs': [0],
    'complete_sample': [0],
    'device': [0],
    'dropout_prob_exogenous': [0.36553112690476813],
    'dropout_prob_theta': [0.24107333575459605],
    'early_stop_patience': [0],
    'eval_freq': [0],
    'frequency': [0],
    'idx_to_sample_freq': [0],
    'initialization': [1],
    'l1_theta': [0],
    'learning_rate': [0.0008532112183090035],
    'len_sample_chunks': [0],
    'loss': [0],
    'loss_hypar': [0],
    'loss_valid': [0],
    'lr_decay': [0.3553227609462469],
    'lr_decay_step_size': [0],
    'max_epochs': [0],
    'max_steps': [0],
    'n_blocks': [0],
    'n_harmonics': [0],
    'n_hidden': [0],
    'n_layers': [0],
    'n_polynomials': [0],
    'n_s_hidden': [0],
    'n_series_per_batch': [0],
    'n_time_in': [0],
    'n_time_out': [0],
    'n_val_weeks': [0],
    'n_x_hidden': [6.0],
    'normalizer_x': [0],
    'normalizer_y': [0],
    'random_seed': [18.0],
    'seasonality': [0],
    'shared_weights': [0],
    'stack_types': [0],
    'val_idx_to_sample_freq': [0],
    'weight_decay': [0.00012349475141123045],
    'window_sampling_limit': [0]}},
  'exp_key': None,
  'owner': None,
  'version': 0,
  'book_time': datetime.datetime(2021, 6, 16, 16, 19, 29, 151000),
  'refresh_time': datetime.datetime(2021, 6, 16, 16, 19, 31, 305000)}]
{% endraw %}