Skip to content

Commit

Permalink
[res/tfl_recipes] Add REGRESS_Issue_13863 (#14064)
Browse files Browse the repository at this point in the history
This commit adds REGRESS_Issue_13863 tflite recipe.

ONE-DCO-1.0-Signed-off-by: HanJin Choi [email protected]
  • Loading branch information
Hanjin-Choi authored and seanshpark committed Sep 30, 2024
1 parent c59ddc6 commit 6e938e2
Show file tree
Hide file tree
Showing 5 changed files with 81 additions and 6 deletions.
1 change: 1 addition & 0 deletions compiler/common-artifacts/exclude.lst
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ tcgenerate(ReLU6_dynamic_000) # TestDataGenerator does not support unknown dimen
tcgenerate(ReLUN1To1_000)
tcgenerate(ReLUN1To1_dynamic_000) # TestDataGenerator does not support unknown dimension
tcgenerate(Reshape_003) # luci-interpreter doesn't support reshape without built-in option
tcgenerate(Reshape_004) # has 0 in shape
tcgenerate(ReverseSequence_000)
tcgenerate(ReverseV2_000)
tcgenerate(Round_000)
Expand Down
19 changes: 15 additions & 4 deletions compiler/luci/import/src/ImporterEx.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,14 @@
namespace luci
{

namespace
{

// limitation of current flatbuffers file size
inline constexpr uint64_t FLATBUFFERS_SIZE_MAX = 2147483648UL; // 2GB

} // namespace

std::unique_ptr<Module> ImporterEx::importVerifyModule(const std::string &input_path) const
{
foder::FileLoader file_loader{input_path};
Expand All @@ -43,11 +51,14 @@ std::unique_ptr<Module> ImporterEx::importVerifyModule(const std::string &input_
auto data_data = reinterpret_cast<uint8_t *>(model_data.data());
auto data_size = model_data.size();

flatbuffers::Verifier verifier{data_data, data_size};
if (!circle::VerifyModelBuffer(verifier))
if (data_size < FLATBUFFERS_SIZE_MAX)
{
std::cerr << "ERROR: Invalid input file '" << input_path << "'" << std::endl;
return nullptr;
flatbuffers::Verifier verifier{data_data, data_size};
if (!circle::VerifyModelBuffer(verifier))
{
std::cerr << "ERROR: Invalid input file '" << input_path << "'" << std::endl;
return nullptr;
}
}

Importer importer(_source);
Expand Down
34 changes: 32 additions & 2 deletions compiler/luci/service/src/Nodes/CircleReshape.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -82,15 +82,32 @@ loco::TensorShape Algorithm::visit(const luci::CircleReshape *node)
{
LUCI_ASSERT(const_shape_node->dtype() == S32, "Only support int32 CircleConst");

// NOTE for rank(shape_by_input before) < rank(shape_by_input after),
// shape_by_input after will be filled with unknown dims
shape_by_input.rank(const_shape_node->size<S32>());

for (uint32_t axis = 0; axis < shape_by_input.rank(); ++axis)
{
shape_by_input.dim(axis) = const_shape_node->at<S32>(axis);
if (const_shape_node->at<S32>(axis) < 0)
{
shape_by_input.dim(axis).unset();
}
else if (const_shape_node->at<S32>(axis) == 0)
{
const auto node_tensor = loco::must_cast<luci::CircleNode *>(node->tensor());
// set dim value to input
if (node_tensor->shape_status() == luci::ShapeStatus::VALID && axis < node_tensor->rank())
shape_by_input.dim(axis) = node_tensor->dim(axis);
else
shape_by_input.dim(axis).set(0);
// TODO allow 0 from ONNX
}
else
{
shape_by_input.dim(axis).set(const_shape_node->at<S32>(axis));
}
// check valid or stop for debugging
assert(shape_by_input.dim(axis).value() > 0 || !shape_by_input.dim(axis).known());
}
}
else
Expand Down Expand Up @@ -148,14 +165,27 @@ loco::TensorShape Algorithm::visit(const luci::CircleReshape *node)
}
for (uint32_t dim_index = 0; dim_index < output_shape.rank(); ++dim_index)
{
const uint32_t dim_value = output_shape.dim(dim_index).value();
uint32_t dim_value = output_shape.dim(dim_index).value();
if (not output_shape.dim(dim_index).known())
{
LUCI_ASSERT(unknown_dim_index == UINT32_MAX, "More than one unknown dimension");
unknown_dim_index = dim_index;
}
else
{
if (!dim_value)
{
// refer https://github.com/Samsung/ONE/issues/14074#issuecomment-2370795003
// set dim value to follow input
if (dim_index < input_shape.rank())
dim_value = input_shape.dim(dim_index).value();
else
{
// stop to check if this case exist for debugging
assert(dim_index < input_shape.rank());
dim_value = 1;
}
}
output_element_count *= dim_value;
}
}
Expand Down
2 changes: 2 additions & 0 deletions compiler/luci/tests/test.lst
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ addread(Reshape_000)
addread(Reshape_001)
addread(Reshape_002)
#addread(Reshape_003) # no input, no option is not supported
addread(Reshape_004)
addread(Reshape_U8_000)
addread(ResizeBilinear_000)
addread(ResizeBilinear_U8_000)
Expand Down Expand Up @@ -374,6 +375,7 @@ addwrite(Reshape_000)
addwrite(Reshape_001)
addwrite(Reshape_002)
#addwrite(Reshape_003) # no input, no option is not supported
addwrite(Reshape_004)
addwrite(Reshape_U8_000)
addwrite(ResizeBilinear_000)
addwrite(ResizeBilinear_U8_000)
Expand Down
31 changes: 31 additions & 0 deletions res/TensorFlowLiteRecipes/Reshape_004/test.recipe
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# NOTE test model for 0 in shape.
# May not work in interpreter.
operand {
name: "ifm"
type: FLOAT32
shape { dim: 1 dim: 3 dim: 2 dim: 3 }
}
operand {
name: "shape"
type: INT32
shape { dim: 3 }
filler { tag: "explicit" arg: "0" arg: "3" arg: "-1" }
}
operand {
name: "ofm"
type: FLOAT32
shape { dim: 1 dim: 3 dim: 6 }
}
operation {
type: "Reshape"
reshape_options {
new_shape: 0
new_shape: 3
new_shape: -1
}
input: "ifm"
input: "shape"
output: "ofm"
}
input: "ifm"
output: "ofm"

0 comments on commit 6e938e2

Please sign in to comment.