Layers and Modules

https://d2l.ai/chapter_builders-guide/model-construction.html

Exercises 2:

class MySequential(tf.keras.Model):
    def __init__(self, *args):
        super().__init__()
        self.net1 = []
        self.net2 = []
        for block in args[0]:
            self.net1.append(block)

        for block in args[1]:
            self.net1.append(block)

    def net1add(self, block):
        self.net1.append(block)
    def net1add(self, block):
        self.net1.append(block)
    
    def call(self, inputs):
        net1_prob:tf.Tensor = inputs
        net2_prob:tf.Tensor = inputs
        for block in self.net1:
            net1_prob = block(net1_prob)
        for block in self.net2:
            net2_prob = block(net1_prob)
        return net1_prob, net2_prob

net = MySequential(
    [tf.keras.layers.Dense(64, activation=tf.nn.relu),
     tf.keras.layers.Dense(32, activation=tf.nn.relu),
     tf.keras.layers.Dense(10)],
    [tf.keras.layers.Dense(64, activation=tf.nn.relu),
     tf.keras.layers.Dense(10)]
)
net1, net2 = net(X)
print(net1)
print(net2)

#Same idea as above but more concise

class ParallelModule(tf.keras.Model):
def init(self, net1, net2):
super().init()
self.net1 = net1
self.net2 = net2

def call(self, X):
return tf.concat([self.net1(X),self.net2(X)], -1)

net1 = tf.keras.Sequential([
tf.keras.layers.Dense(64, activation=‘relu’),
tf.keras.layers.Dense(32, activation=‘relu’)
])

net2 = tf.keras.Sequential([
tf.keras.layers.Dense(32, activation=‘relu’),
])

parallel_module = ParallelModule(net1, net2)