Skip to content

Commit

Permalink
[unittest] Added test for incremental forwarding for layers
Browse files Browse the repository at this point in the history
Added incremental forwarding as an option for unit testing layers

Signed-off-by: Debadri Samaddar <[email protected]>
  • Loading branch information
s-debadri committed May 10, 2024
1 parent f49a75e commit 5a34d75
Show file tree
Hide file tree
Showing 4 changed files with 34 additions and 3 deletions.
2 changes: 1 addition & 1 deletion nntrainer/layers/cl_layers/fc_layer_cl.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ std::string fc_dot_cl_kernel_ =
R"(__kernel void fc_dot_cl(const __global float* A, const __global float* X, unsigned int K, float res) {
res = 0;
for (unsigned int i = 0; i < K; i++){
res += A[i] * X[i];`
res += A[i] * X[i];
}
})";

Expand Down
9 changes: 9 additions & 0 deletions test/unittest/layers/layers_common_tests.h
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ class LayerPropertySemantics : public LayerSemantics {};
typedef enum {
SKIP_CALC_GRAD = 1 << 0, /**< skip calculating gradient and compare */
SKIP_CALC_DERIV = 1 << 1, /**< skip calculating derivative and compare */
USE_INC_FORWARD = 1 << 2, /**< use incremental forwarding and compare */

FORWARD_MODE_INFERENCE =
1 << 2, /**< set if layer should be forwarded with inference mode */
Expand Down Expand Up @@ -172,6 +173,14 @@ class LayerGoldenTest
*/
bool shouldSkipCalcGrad();

/**
* @brief check if given test suite should use incremental forwarding instead
* of normal forwarding
*
* @return bool true if should use incremental forwarding
*/
bool shouldUseIncForward();

/**
* @brief check if given test suite should skip cosine similarity check
*
Expand Down
23 changes: 22 additions & 1 deletion test/unittest/layers/layers_golden_tests.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -364,6 +364,11 @@ bool LayerGoldenTest::shouldSkipCalcGrad() {
LayerGoldenTestParamOptions::SKIP_CALC_GRAD;
}

bool LayerGoldenTest::shouldUseIncForward() {
return std::get<int>(GetParam()) &
LayerGoldenTestParamOptions::USE_INC_FORWARD;
}

bool LayerGoldenTest::shouldSkipCosineSimilarity() {
return std::get<int>(GetParam()) &
LayerGoldenTestParamOptions::SKIP_COSINE_SIMILARITY;
Expand All @@ -387,15 +392,31 @@ TEST_P(LayerGoldenTest, run) {

bool skip_calc_grad = shouldSkipCalcGrad();
bool skip_calc_deriv = shouldSkipCalcDeriv();
bool use_inc_forward = shouldUseIncForward();
bool dropout_compare_60_percent = shouldMatchDropout60Percent();
bool skip_cos_sim = shouldSkipCosineSimilarity();

Tensor &input = rc.getInput(0);
TensorDim input_dim = input.getDim();
size_t inputHeight = input_dim.height();

for (int i = 0; i < 4; ++i) {
/// warm layer multiple times
if (use_inc_forward) {
layer->incremental_forwarding(rc, 0, inputHeight,
!shouldForwardWithInferenceMode());
} else {
layer->forwarding(rc, !shouldForwardWithInferenceMode());
}
}

if (use_inc_forward) {
layer->incremental_forwarding(rc, 0, inputHeight,
!shouldForwardWithInferenceMode());
} else {
layer->forwarding(rc, !shouldForwardWithInferenceMode());
}

layer->forwarding(rc, !shouldForwardWithInferenceMode());
if (!skip_calc_grad) {
layer->calcGradient(rc);
}
Expand Down
3 changes: 2 additions & 1 deletion test/unittest/layers/unittest_layers_fully_connected_cl.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ auto fc_basic_plain_nhwc = LayerGoldenTestParamType(
nntrainer::createLayer<nntrainer::FullyConnectedLayerCl>, {"unit=5"},
"3:10:1:1", "fc_plain.nnlayergolden",
LayerGoldenTestParamOptions::SKIP_CALC_DERIV |
LayerGoldenTestParamOptions::SKIP_CALC_GRAD,
LayerGoldenTestParamOptions::SKIP_CALC_GRAD |
LayerGoldenTestParamOptions::USE_INC_FORWARD,
"nhwc", "fp32", "fp32");

auto fc_basic_single_batch_nhwc = LayerGoldenTestParamType(
Expand Down

0 comments on commit 5a34d75

Please sign in to comment.