Skip to content

Commit

Permalink
Fix for coding style
Browse files Browse the repository at this point in the history
It fixes coding style.

Signed-off-by: Jiho Chu <[email protected]>
  • Loading branch information
jihochu committed Feb 21, 2024
1 parent 8bef976 commit 2361418
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 12 deletions.
4 changes: 2 additions & 2 deletions nntrainer/layers/conv2d_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ static void col2im(const Tensor &col_matrix, const TensorDim &kdim,
int h_stride_end = im_eff_height - eff_k_height - pt;
int w_stride_end = im_eff_width - eff_k_width - pl;

auto apply_data = [&]<typename T>(T * val) {
auto apply_data = [&]<typename T>(T *val) {
unsigned col_w = 0;
for (int hs = -pt; hs <= h_stride_end; hs += hstride) {
for (int ws = -pl; ws <= w_stride_end; ws += wstride) {
Expand Down Expand Up @@ -218,7 +218,7 @@ static void im2col(const Tensor &in, const TensorDim &kdim,
TensorDim({out_height * out_width, in.channel() * k_height * k_width},
in.getTensorType()));

auto apply_data = [&]<typename T>(T * out_data) {
auto apply_data = [&]<typename T>(T *out_data) {
int h_stride_end = height - eff_k_height - pt;
int w_stride_end = width - eff_k_width - pl;

Expand Down
4 changes: 1 addition & 3 deletions nntrainer/layers/fc_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,7 @@ static constexpr size_t SINGLE_INOUT_IDX = 0;
enum FCParams { weight, bias };

FullyConnectedLayer::FullyConnectedLayer() :
LayerImpl(),
fc_props(props::Unit()) {
LayerImpl(), fc_props(props::Unit()) {
weight_idx.fill(std::numeric_limits<unsigned>::max());
}

Expand Down Expand Up @@ -273,7 +272,6 @@ void FullyConnectedLayer::calcGradient(RunLayerContext &context) {
}
input_.dot_deriv_wrt_2(djdw_, derivative_, false, false, !wg_first_access);
}

}

} /* namespace nntrainer */
11 changes: 6 additions & 5 deletions nntrainer/layers/pooling2d_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ void Pooling2DLayer::calcDerivative(RunLayerContext &context) {
unsigned int out_map_size = deriv.height() * deriv.width();
unsigned int in_map_size = height * width;

auto apply_max = [&]<typename T>(T * result_data) {
auto apply_max = [&]<typename T>(T *result_data) {
const int *iter = pool_helper.getData<int>();
const T *deriv_data = deriv.getData<T>();
for (unsigned int b = 0; b < batch; ++b) {
Expand All @@ -207,7 +207,7 @@ void Pooling2DLayer::calcDerivative(RunLayerContext &context) {
}
};

auto apply_average = [&]<typename T>(T * result_data) {
auto apply_average = [&]<typename T>(T *result_data) {
int height_stride_end = height - p_height + pt;
int width_stride_end = width - p_width + pl;
const int *iter = pool_helper.getData<int>();
Expand Down Expand Up @@ -239,7 +239,7 @@ void Pooling2DLayer::calcDerivative(RunLayerContext &context) {
}
};

auto apply_global_max = [&]<typename T>(T * result_data) {
auto apply_global_max = [&]<typename T>(T *result_data) {
const T *deriv_data = deriv.getData<T>();
for (unsigned int b = 0; b < batch; b++) {
for (unsigned int c = 0; c < channel; c++) {
Expand Down Expand Up @@ -372,8 +372,9 @@ void Pooling2DLayer::pooling2d(Tensor &in, bool training, Tensor &output,
return max_val;
};

auto pool_fn_global_max = [&, this ]<typename T>(
const T *in_data, int channel_idx, int start_h, int start_w) {
auto pool_fn_global_max = [&, this]<typename T>(const T *in_data,
int channel_idx, int start_h,
int start_w) {
int end_h = start_h + patch_height;
int end_w = start_w + patch_width;

Expand Down
3 changes: 1 addition & 2 deletions nntrainer/tensor/tensor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -127,8 +127,7 @@ class SrcSharedTensor {
SrcSharedTensor() : src(nullptr), off(0) {}

SrcSharedTensor(const Tensor *tensor, size_t offset) :
src(tensor),
off(offset) {}
src(tensor), off(offset) {}

/**
* @brief Get the allocated src tensor
Expand Down

0 comments on commit 2361418

Please sign in to comment.