Skip to content

Commit

Permalink
Merge branch 'intermediate_output' into 'master'
Browse files Browse the repository at this point in the history
Add a function to support intermediate layer output.

See merge request ai/esp-dl!121
  • Loading branch information
sun-xiangyu committed Jan 3, 2025
2 parents 0aa80ee + fe4272e commit b02c1ae
Show file tree
Hide file tree
Showing 5 changed files with 82 additions and 17 deletions.
14 changes: 14 additions & 0 deletions esp-dl/dl/tensor/include/dl_tensor_base.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -251,13 +251,27 @@ class TensorBase {
*/
TensorBase &set_shape(const std::vector<int> shape);

/**
* @brief Get the exponent of Tensor
*
* @return int the exponent of Tensor
*/
int get_exponent() { return this->exponent; }

/**
* @brief Get the data type of Tensor
*
* @return dtype_t the data type of Tensor
*/
dtype_t get_dtype() { return this->dtype; }

/**
* @brief Get the memory flags of Tensor
*
* @return uint32_t the memory flags of Tensor
*/
uint32_t get_caps() { return this->caps; }

/**
* @brief Change a new shape to the Tensor without changing its data.
*
Expand Down
2 changes: 1 addition & 1 deletion esp-dl/dl/tool/include/dl_tool.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -614,7 +614,7 @@ class Latency {
#endif
#else
if (debug)
ESP_LOGD("latency", "%s::%s: %lu us\n", prefix, key, this->get_average_period());
ESP_LOGI("latency", "%s::%s: %lu us\n", prefix, key, this->get_average_period());
else
printf("%s::%s: %lu us\n", prefix, key, this->get_average_period());
#endif
Expand Down
36 changes: 33 additions & 3 deletions test_apps/esp-dl/main/test_dl_model.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ void compare_test_outputs(Model *model, std::map<std::string, TensorBase *> infe
fbs_model_instance->load_map();
int i = 0;
for (auto iter = infer_outputs.begin(); iter != infer_outputs.end(); iter++) {
ESP_LOGI(TAG, "output index: %d", i++);
ESP_LOGI(TAG, "output index: %d, name: %s", i++, iter->first.c_str());
std::string infer_output_name = iter->first;
TensorBase *infer_output = iter->second;
if (infer_output) {
Expand Down Expand Up @@ -66,6 +66,25 @@ std::map<std::string, TensorBase *> get_graph_test_inputs(Model *model)
return test_inputs;
}

std::map<std::string, TensorBase *> get_graph_user_outputs(Model *model, std::vector<std::string> user_outputs_name)
{
std::map<std::string, TensorBase *> user_outputs;

if (!model || user_outputs_name.empty()) {
return user_outputs;
}

for (int i = 0; i < user_outputs_name.size(); i++) {
TensorBase *output = model->get_intermediate(user_outputs_name[i]);
if (output) {
TensorBase *user_output = new TensorBase(
output->get_shape(), nullptr, output->get_exponent(), output->get_dtype(), true, output->get_caps());
user_outputs.emplace(user_outputs_name[i], user_output);
}
}
return user_outputs;
}

TEST_CASE("Test espdl model", "[dl_model]")
{
ESP_LOGI(TAG, "get into app_main");
Expand All @@ -86,21 +105,32 @@ TEST_CASE("Test espdl model", "[dl_model]")
fbs::FbsModel *fbs_model = fbs_loader->load(i);
Model *model = new Model(fbs_model);
std::map<std::string, TensorBase *> graph_test_inputs = get_graph_test_inputs(model);
std::map<std::string, TensorBase *> graph_user_outputs = get_graph_user_outputs(model, {});
model->print();
latency.start();
model->run(graph_test_inputs);
model->run(graph_test_inputs, RUNTIME_MODE_SINGLE_CORE, graph_user_outputs);
latency.end();
model_run_time += latency.get_period();
ESP_LOGI(TAG, "model index:%d run time:%d us\n", i, latency.get_period());

compare_test_outputs(model, model->get_outputs());
::compare_test_outputs(model, graph_user_outputs.empty() ? model->get_outputs() : graph_user_outputs);
for (auto graph_test_inputs_iter = graph_test_inputs.begin(); graph_test_inputs_iter != graph_test_inputs.end();
graph_test_inputs_iter++) {
if (graph_test_inputs_iter->second) {
delete graph_test_inputs_iter->second;
}
}
graph_test_inputs.clear();

for (auto graph_user_outputs_iter = graph_user_outputs.begin();
graph_user_outputs_iter != graph_user_outputs.end();
graph_user_outputs_iter++) {
if (graph_user_outputs_iter->second) {
delete graph_user_outputs_iter->second;
}
}
graph_user_outputs.clear();

delete model;
delete fbs_model;
}
Expand Down
24 changes: 19 additions & 5 deletions tools/ops_test/config/op_cfg.toml
Original file line number Diff line number Diff line change
Expand Up @@ -1444,28 +1444,42 @@
export_name_prefix = "Unsqueeze_ishape_1_2_4_dim_3"
dim = 3


[ops_test.Slice]
class_name = "SLICE_TEST"
quant_bits = ["int8", "int16"]
[[ops_test.Slice.cfg]]
input_shape = [1, 96, 20, 20]
dim = 4
starts = [0, 1, 1]
ends = [1, 20, -1]
axes = [0, 1, 2]
steps = [1, 2, 1]
export_name_prefix = "slice_ishap_1_96_20_20"

[[ops_test.Slice.cfg]]
input_shape = [1, 10, 10]
dim = 3
starts = [0, 1, 1]
ends = [1, 9, -1]
axes = [0, 1, 2]
steps = [1, 3, 2]
export_name_prefix = "slice_ishap_1_10_10"

[[ops_test.Slice.cfg]]
input_shape = [15, 133]
dim = 2
starts = [0, 10]
ends = [15, -10]
axes = [0, 1]
steps = [1, 1]
export_name_prefix = "slice_ishap_15_133"

[[ops_test.Slice.cfg]]
input_shape = [38, 2, 2]
dim = 1
export_name_prefix = "slice_ishap_38"
starts = [0]
ends = [2]
axes = [0]
steps = [1]
export_name_prefix = "slice_ishap_38_2_2"


[ops_test.Pad]
class_name = "PAD_TEST"
Expand Down
23 changes: 15 additions & 8 deletions tools/ops_test/ops_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,17 +495,24 @@ class SLICE_TEST(nn.Module):
def __init__(self, config):
super().__init__()
self.config = config
self.starts = self.config["starts"]
self.ends = self.config["ends"]
self.axes = self.config["axes"]
self.steps = self.config["steps"]

def forward(self, input):
input = nn.ReLU()(input)
if self.config["dim"] == 4:
return input[0:1, 1:20:2, 1:-1, :]
elif self.config["dim"] == 3:
return input[0:1, 1:9:3, 1:-1:2]
elif self.config["dim"] == 2:
return input[:, 10:-10]
elif self.config["dim"] == 1:
return input[0:2]
array_idx = []
for i, dim in enumerate(input.shape):
if i in self.axes:
index = self.axes.index(i)
array_idx.append(
slice(self.starts[index], self.ends[index], self.steps[index])
)
else:
array_idx.append(slice(dim))
output = input[array_idx]
return output


class PAD_TEST(nn.Module):
Expand Down

0 comments on commit b02c1ae

Please sign in to comment.