Temporal Convolutional Networks (TCNs) are a powerful alternative to recurrent neural networks (RNNs) for sequence modeling. They use dilated convolutions and residual connections to capture long-range dependencies efficiently. Here’s an example of how you can train a TCN model for time series forecasting using Python and TensorFlow/Keras:
python
pip install tensorflow numpy pandas matplotlib
python
import numpy as np
import pandas as pd
import tensorflow as tf
import matplotlib.pyplot as plt
from tensorflow.keras.layers import Conv1D, BatchNormalization, ReLU, Dense, Flatten, Input
from tensorflow.keras.models import Model
python
def generate_synthetic_data(n_samples=1000, time_steps=50):
X = np.random.rand(n_samples, time_steps, 1)
y = np.sum(X, axis=1) # Example target: sum over time steps
return X, y
X_train, y_train = generate_synthetic_data()
X_test, y_test = generate_synthetic_data()
python
def build_tcn(input_shape):
inputs = Input(shape=input_shape)
# Temporal Convolutional Layers
x = Conv1D(filters=32, kernel_size=3, dilation_rate=1, padding="causal")(inputs)
x = BatchNormalization()(x)
x = ReLU()(x)
x = Conv1D(filters=32, kernel_size=3, dilation_rate=2, padding="causal")(x)
x = BatchNormalization()(x)
x = ReLU()(x)
x = Flatten()(x)
outputs = Dense(1)(x)
model = Model(inputs, outputs)
model.compile(optimizer="adam", loss="mse")
return model
model = build_tcn(X_train.shape[1:])
python
history = model.fit(X_train, y_train, epochs=20, batch_size=32, validation_data=(X_test, y_test))
python
plt.plot(history.history['loss'], label="Training Loss")
plt.plot(history.history['val_loss'], label="Validation Loss")
plt.legend()
plt.show()
predictions = model.predict(X_test[:10])
print("Predictions:", predictions.flatten())
You need to login in order to like this post: click here
YOU MIGHT ALSO LIKE