Refer to section 4 of this chapter, 9.1 Working with sequences
def k_step_pred(k):
features = []
for i in range(data.tau):
features.append(data.x[i : i+data.T-data.tau-k+1])
# The (i+tau)-th element stores the (i+1)-step-ahead predictions
for i in range(k):
preds = model(torch.stack(features[i : i+data.tau], 1))
features.append(preds.reshape(-1))
return features[data.tau:]
steps = (1, 4, 16, 64)
preds = k_step_pred(steps[-1])
d2l.plot(data.time[data.tau+steps[-1]-1:],
[preds[k - 1].detach().numpy() for k in steps], 'time', 'x',
legend=[f'{k}-step preds' for k in steps], figsize=(6, 3))
Why do we need to subtract the steps of prediction in the k_step_pred
function? I tried to remove the subtraction and the change the graph code a bit, it works almost the same.
def k_step_pred(k):
features = []
for i in range(data.tau):
features.append(data.x[i : i + data.T - data.tau + 1])
# The (i+tau)-th element stores the (i+1)-step-ahead predictions
for i in range(k):
preds = model(torch.stack(features[i : i + data.tau], 1))
features.append(preds.reshape(-1))
return features[data.tau :]