Skip to content

Commit

Permalink
[Application] update yolo v2 modeling
Browse files Browse the repository at this point in the history
update yolo v2 modeling part of yolo v2.
(update some hyper param values)

- update yolo v2 pytorch(python) script
- update yolo v2 nntrainer(c++) script

* issue
- activation function(in this case, leaky relu) of nntrainer needs to support setting negative slope via
parameter...

**Self evaluation:**
1. Build test:	 [X]Passed [ ]Failed [ ]Skipped
2. Run test:	 [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Seungbaek Hong <[email protected]>
  • Loading branch information
baek2sm authored and jijoongmoon committed May 23, 2024
1 parent ddf8104 commit 3fe9a1e
Show file tree
Hide file tree
Showing 2 changed files with 72 additions and 46 deletions.
108 changes: 66 additions & 42 deletions Applications/YOLOv2/PyTorch/yolo.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,98 +20,122 @@ def __init__(self, num_classes, num_anchors=5):
self.num_classes = num_classes
self.num_anchors = num_anchors
self.conv1 = nn.Sequential(
nn.Conv2d(3, 32, 3, 1, 1),
nn.BatchNorm2d(32, eps=1e-3),
nn.LeakyReLU(),
nn.Conv2d(3, 32, 3, 1, 1, bias=False),
nn.BatchNorm2d(32),
nn.LeakyReLU(0.1),
nn.MaxPool2d(2, 2),
)
self.conv2 = nn.Sequential(
nn.Conv2d(32, 64, 3, 1, 1),
nn.BatchNorm2d(64, eps=1e-3),
nn.LeakyReLU(),
nn.Conv2d(32, 64, 3, 1, 1, bias=False),
nn.BatchNorm2d(64),
nn.LeakyReLU(0.1),
nn.MaxPool2d(2, 2),
)
self.conv3 = nn.Sequential(
nn.Conv2d(64, 128, 3, 1, 1), nn.BatchNorm2d(128, eps=1e-3), nn.LeakyReLU()
nn.Conv2d(64, 128, 3, 1, 1, bias=False),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.1),
)
self.conv4 = nn.Sequential(
nn.Conv2d(128, 64, 1, 1, 0), nn.BatchNorm2d(64, eps=1e-3), nn.LeakyReLU()
nn.Conv2d(128, 64, 1, 1, 0, bias=False),
nn.BatchNorm2d(64),
nn.LeakyReLU(0.1),
)
self.conv5 = nn.Sequential(
nn.Conv2d(64, 128, 3, 1, 1),
nn.BatchNorm2d(128, eps=1e-3),
nn.LeakyReLU(),
nn.Conv2d(64, 128, 3, 1, 1, bias=False),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.1),
nn.MaxPool2d(2, 2),
)
self.conv6 = nn.Sequential(
nn.Conv2d(128, 256, 3, 1, 1), nn.BatchNorm2d(256, eps=1e-3), nn.LeakyReLU()
nn.Conv2d(128, 256, 3, 1, 1, bias=False),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.1),
)
self.conv7 = nn.Sequential(
nn.Conv2d(256, 128, 1, 1, 0), nn.BatchNorm2d(128, eps=1e-3), nn.LeakyReLU()
nn.Conv2d(256, 128, 1, 1, 0, bias=False),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.1),
)
self.conv8 = nn.Sequential(
nn.Conv2d(128, 256, 3, 1, 1),
nn.BatchNorm2d(256, eps=1e-3),
nn.LeakyReLU(),
nn.Conv2d(128, 256, 3, 1, 1, bias=False),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.1),
nn.MaxPool2d(2, 2),
)
self.conv9 = nn.Sequential(
nn.Conv2d(256, 512, 3, 1, 1), nn.BatchNorm2d(512, eps=1e-3), nn.LeakyReLU()
nn.Conv2d(256, 512, 3, 1, 1, bias=False),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.1),
)
self.conv10 = nn.Sequential(
nn.Conv2d(512, 256, 1, 1, 0), nn.BatchNorm2d(256, eps=1e-3), nn.LeakyReLU()
nn.Conv2d(512, 256, 1, 1, 0, bias=False),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.1),
)
self.conv11 = nn.Sequential(
nn.Conv2d(256, 512, 3, 1, 1), nn.BatchNorm2d(512, eps=1e-3), nn.LeakyReLU()
nn.Conv2d(256, 512, 3, 1, 1, bias=False),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.1),
)
self.conv12 = nn.Sequential(
nn.Conv2d(512, 256, 1, 1, 0), nn.BatchNorm2d(256, eps=1e-3), nn.LeakyReLU()
nn.Conv2d(512, 256, 1, 1, 0, bias=False),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.1),
)
self.conv13 = nn.Sequential(
nn.Conv2d(256, 512, 3, 1, 1), nn.BatchNorm2d(512, eps=1e-3), nn.LeakyReLU()
nn.Conv2d(256, 512, 3, 1, 1, bias=False),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.1),
)

self.conv_b = nn.Sequential(
nn.Conv2d(512, 64, 1, 1, 0), nn.BatchNorm2d(64, eps=1e-3), nn.LeakyReLU()
nn.Conv2d(512, 64, 1, 1, 0, bias=False),
nn.BatchNorm2d(64),
nn.LeakyReLU(0.1),
)

self.maxpool_a = nn.MaxPool2d(2, 2)
self.conv_a1 = nn.Sequential(
nn.Conv2d(512, 1024, 3, 1, 1),
nn.BatchNorm2d(1024, eps=1e-3),
nn.LeakyReLU(),
nn.Conv2d(512, 1024, 3, 1, 1, bias=False),
nn.BatchNorm2d(1024),
nn.LeakyReLU(0.1),
)
self.conv_a2 = nn.Sequential(
nn.Conv2d(1024, 512, 1, 1, 0), nn.BatchNorm2d(512, eps=1e-3), nn.LeakyReLU()
nn.Conv2d(1024, 512, 1, 1, 0, bias=False),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.1),
)
self.conv_a3 = nn.Sequential(
nn.Conv2d(512, 1024, 3, 1, 1),
nn.BatchNorm2d(1024, eps=1e-3),
nn.LeakyReLU(),
nn.Conv2d(512, 1024, 3, 1, 1, bias=False),
nn.BatchNorm2d(1024),
nn.LeakyReLU(0.1),
)
self.conv_a4 = nn.Sequential(
nn.Conv2d(1024, 512, 1, 1, 0), nn.BatchNorm2d(512, eps=1e-3), nn.LeakyReLU()
nn.Conv2d(1024, 512, 1, 1, 0, bias=False),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.1),
)
self.conv_a5 = nn.Sequential(
nn.Conv2d(512, 1024, 3, 1, 1),
nn.BatchNorm2d(1024, eps=1e-3),
nn.LeakyReLU(),
nn.Conv2d(512, 1024, 3, 1, 1, bias=False),
nn.BatchNorm2d(1024),
nn.LeakyReLU(0.1),
)
self.conv_a6 = nn.Sequential(
nn.Conv2d(1024, 1024, 3, 1, 1),
nn.BatchNorm2d(1024, eps=1e-3),
nn.LeakyReLU(),
nn.Conv2d(1024, 1024, 3, 1, 1, bias=False),
nn.BatchNorm2d(1024),
nn.LeakyReLU(0.1),
)
self.conv_a7 = nn.Sequential(
nn.Conv2d(1024, 1024, 3, 1, 1),
nn.BatchNorm2d(1024, eps=1e-3),
nn.LeakyReLU(),
nn.Conv2d(1024, 1024, 3, 1, 1, bias=False),
nn.BatchNorm2d(1024),
nn.LeakyReLU(0.1),
)

self.conv_out1 = nn.Sequential(
nn.Conv2d(1280, 1024, 3, 1, 1),
nn.BatchNorm2d(1024, eps=1e-3),
nn.LeakyReLU(),
nn.Conv2d(1280, 1024, 3, 1, 1, bias=False),
nn.BatchNorm2d(1024),
nn.LeakyReLU(0.1),
)

self.conv_out2 = nn.Conv2d(1024, self.num_anchors * (5 + num_classes), 1, 1, 0)
Expand Down
10 changes: 6 additions & 4 deletions Applications/YOLOv2/jni/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,7 @@ std::vector<LayerHandle> yoloBlock(const std::string &block_name,
withKey("filters", filters),
withKey("kernel_size", {kernel_size, kernel_size}),
withKey("padding", padding),
withKey("disable_bias", "true"),
withKey("input_layers", input_layer)};

return createLayer("conv2d", props);
Expand All @@ -150,6 +151,7 @@ std::vector<LayerHandle> yoloBlock(const std::string &block_name,
if (downsample) {
LayerHandle a2 = createLayer("batch_normalization",
{with_name("a2"), withKey("momentum", "0.9"),
withKey("epsilon", 0.00001),
withKey("activation", "leaky_relu")});

LayerHandle a3 = createLayer(
Expand All @@ -158,10 +160,10 @@ std::vector<LayerHandle> yoloBlock(const std::string &block_name,

return {a1, a2, a3};
} else {
LayerHandle a2 =
createLayer("batch_normalization",
{withKey("name", block_name), withKey("momentum", "0.9"),
withKey("activation", "leaky_relu")});
LayerHandle a2 = createLayer(
"batch_normalization",
{withKey("name", block_name), withKey("momentum", "0.9"),
withKey("epsilon", 0.00001), withKey("activation", "leaky_relu")});

return {a1, a2};
}
Expand Down

0 comments on commit 3fe9a1e

Please sign in to comment.