Skip to content

Commit

Permalink
Fixes missing parameter
Browse files Browse the repository at this point in the history
  • Loading branch information
Francisco Facioni committed Oct 22, 2018
1 parent 72178dd commit 3347296
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions mmdnn/conversion/darknet/darknet_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,14 @@ def conv_output_height(height, padding, kernel_size, stride):
return (height + 2*padding - kernel_size)/stride + 1


def activation(self, block, pre_node_name, input_shape):
def activation(self, block, pre_node_name, input_shape, id):
if block['activation'] != 'linear':
relu_layer = OrderedDict()
relu_layer['input'] = [pre_node_name]
if 'name' in block.keys():
relu_layer['name'] = '%s-act' % block['name']
else:
relu_layer['name'] = 'layer%d-act' % i
relu_layer['name'] = 'layer%d-act' % id
relu_layer['type'] = 'ReLU'
relu_param = OrderedDict()
if block['activation'] == 'leaky':
Expand Down Expand Up @@ -180,7 +180,7 @@ def build(self):

pre_node_name = bn_layer['name']

pre_node_name = self.activation(block, pre_node_name, input_shape)
pre_node_name = self.activation(block, pre_node_name, input_shape, i)


elif block['type'] == 'maxpool':
Expand Down Expand Up @@ -301,7 +301,7 @@ def build(self):
self.layer_num_map[i] = shortcut_layer['name']
pre_node_name = shortcut_layer['name']

pre_node_name = self.activation(block, pre_node_name, input_shape)
pre_node_name = self.activation(block, pre_node_name, input_shape, i)

elif block['type'] == 'connected':
fc_layer = OrderedDict()
Expand All @@ -321,7 +321,7 @@ def build(self):
self.layer_num_map[i] = fc_layer['name']
pre_node_name = fc_layer['name']

pre_node_name = self.activation(block, pre_node_name, input_shape)
pre_node_name = self.activation(block, pre_node_name, input_shape, i)

elif block['type'] == 'softmax':
sm_layer = OrderedDict()
Expand Down

0 comments on commit 3347296

Please sign in to comment.