Layers and Modules

My solutions to the exs: 6.1

My Solution to Q3:


class DaisyX(nn.Module):
    def __init__(self, genericModule: nn.Module, chain_length=5):
        super().__init__()
        for idx in range(chain_length):
            self: nn.Module 
            self.add_module(str(idx)+ genericModule.__name__, genericModule())
    
    def forward(self, X):
        for m in self.children():
            X = m(X)
        return X
    

class Increment(nn.Module):
    def __init__(self):
        super().__init__()

    def forward(self, X):
        return (X + 1)
    
net = DaisyX(Increment, 5)
X = torch.zeros((2, 2))
net(X)

My exercise:

  1. I don’t really understand this question, we use self.add_module method in nn.module to store modules (?), if we want to store modules in a Python list, meaning we have to rewrite the whole structure?

  2. class parallelModule(nn.Module):
    def init(self, net1, net2, dim):
    super().init()
    self.net1 = net1
    self.net2 = net2
    self.dim = dim

    def forward(self, X):
    output1 = self.net1(X)
    output2 = self.net2(X)
    return torch.cat((output1, output2), dim=self.dim)

  3. My layer factory:
    def layer_factory(num_layers):
    layers =
    for _ in range(num_layers):
    layers.append(MLP())
    return layers

My deep net work:
class DeepMLP(nn.Module):
def init(self, num_layers):
super().init()
layers = layer_factory(num_layers)
for idx, layer in enumerate(layers):
self.add_module(str(idx), layer)

def forward(self, X):
    for module in self.children():
        X = module(X)
    return X