From bcb2e09d56152c64dc60dd7c4f81a421c2912c5d Mon Sep 17 00:00:00 2001 From: David Rotermund <54365609+davrot@users.noreply.github.com> Date: Thu, 28 Dec 2023 18:40:52 +0100 Subject: [PATCH] Update README.md Signed-off-by: David Rotermund <54365609+davrot@users.noreply.github.com> --- pytorch/networks/README.md | 93 +++++++++++++++++++++++++++++++++++++- 1 file changed, 92 insertions(+), 1 deletion(-) diff --git a/pytorch/networks/README.md b/pytorch/networks/README.md index a1c81db..1358717 100644 --- a/pytorch/networks/README.md +++ b/pytorch/networks/README.md @@ -24,7 +24,98 @@ CLASS torch.nn.Sequential(*args: Module) Example: -```python +![image0](network_0.png) +We can just chain the layers together: + +```python +import torch + +input_number_of_channel: int = 1 +input_dim_x: int = 24 +input_dim_y: int = 24 + +number_of_output_channels_conv1: int = 32 +number_of_output_channels_conv2: int = 64 +number_of_output_channels_flatten1: int +number_of_output_channels_full1: int = 1024 +number_of_output_channels_out: int = 10 + +kernel_size_conv1: tuple[int, int] = (5, 5) +kernel_size_pool1: tuple[int, int] = (2, 2) +kernel_size_conv2: tuple[int, int] = (5, 5) +kernel_size_pool2: tuple[int, int] = (2, 2) + +stride_conv1: tuple[int, int] = (1, 1) +stride_pool1: tuple[int, int] = (2, 2) +stride_conv2: tuple[int, int] = (1, 1) +stride_pool2: tuple[int, int] = (2, 2) + +padding_conv1: int = 0 +padding_pool1: int = 0 +padding_conv2: int = 0 +padding_pool2: int = 0 + +number_of_output_channels_flatten1 = 576 + +network = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=input_number_of_channel, + out_channels=number_of_output_channels_conv1, + kernel_size=kernel_size_conv1, + stride=stride_conv1, + padding=padding_conv1, + ), + torch.nn.ReLU(), + torch.nn.MaxPool2d( + kernel_size=kernel_size_pool1, stride=stride_pool1, padding=padding_pool1 + ), + torch.nn.Conv2d( + in_channels=number_of_output_channels_conv1, + out_channels=number_of_output_channels_conv2, + kernel_size=kernel_size_conv2, + stride=stride_conv2, + padding=padding_conv2, + ), + torch.nn.ReLU(), + torch.nn.MaxPool2d( + kernel_size=kernel_size_pool2, stride=stride_pool2, padding=padding_pool2 + ), + torch.nn.Flatten( + start_dim=1, + ), + torch.nn.Linear( + in_features=number_of_output_channels_flatten1, + out_features=number_of_output_channels_full1, + bias=True, + ), + torch.nn.ReLU(), + torch.nn.Linear( + in_features=number_of_output_channels_full1, + out_features=number_of_output_channels_out, + bias=True, + ), +) + +print(network) ``` +```python +Sequential( + (0): Conv2d(1, 32, kernel_size=(5, 5), stride=(1, 1)) + (1): ReLU() + (2): MaxPool2d(kernel_size=(2, 2), stride=(2, 2), padding=0, dilation=1, ceil_mode=False) + (3): Conv2d(32, 64, kernel_size=(5, 5), stride=(1, 1)) + (4): ReLU() + (5): MaxPool2d(kernel_size=(2, 2), stride=(2, 2), padding=0, dilation=1, ceil_mode=False) + (6): Flatten(start_dim=1, end_dim=-1) + (7): Linear(in_features=576, out_features=1024, bias=True) + (8): ReLU() + (9): Linear(in_features=1024, out_features=10, bias=True) +) +``` + +Congratulations you now have the network you wanted. + + +