Skip to content

Commit

Permalink
[onert] Simplify OperationLowerInfo usage (#13572)
Browse files Browse the repository at this point in the history
This commit removes OperationLowerInfo and use Backend directly.

ONE-DCO-1.0-Signed-off-by: Hyeongseok Oh <[email protected]>
  • Loading branch information
hseok-oh authored Aug 1, 2024
1 parent 28de5e3 commit 2a2dbc6
Show file tree
Hide file tree
Showing 20 changed files with 66 additions and 159 deletions.
8 changes: 5 additions & 3 deletions runtime/onert/backend/ruy/BackendContext.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,12 @@
#ifndef __ONERT_BACKEND_RUY_BACKEND_CONTEXT_H__
#define __ONERT_BACKEND_RUY_BACKEND_CONTEXT_H__

#include <backend/BackendContext.h>
#include "TensorBuilder.h"
#include "KernelGenerator.h"
#include "ExternalContext.h"
#include "KernelGenerator.h"
#include "TensorBuilder.h"

#include <backend/BackendContext.h>
#include <compiler/GraphLowerInfo.h>

namespace onert
{
Expand Down
2 changes: 1 addition & 1 deletion runtime/onert/core/include/backend/BackendContext.h
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
#include "ir/Graph.h"
#include "ir/OperationIndexMap.h"
#include "ir/OperandIndexMap.h"
#include "compiler/GraphLowerInfo.h"
#include "exec/FunctionSequence.h"
#include "util/Set.h"

namespace onert
{
Expand Down
14 changes: 7 additions & 7 deletions runtime/onert/core/include/compiler/CodeMap.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,12 @@
#ifndef __ONERT_COMPILER_CODE_MAP_H__
#define __ONERT_COMPILER_CODE_MAP_H__

#include <unordered_map>
#include "backend/Backend.h"
#include "exec/FunctionSequence.h"
#include "ir/Index.h"
#include "ir/IOperation.h"
#include "exec/FunctionSequence.h"
#include "OperationLowerInfo.h"

#include <unordered_map>

namespace onert
{
Expand All @@ -32,13 +33,12 @@ struct CodeAndInfo
{
ir::OperationIndex op_ind;
const ir::IOperation *op;
const OperationLowerInfo *lower_info;
const backend::Backend *op_backend;
std::unique_ptr<exec::FunctionSequence> fn_seq;

CodeAndInfo(const ir::OperationIndex op_ind, const ir::IOperation *op,
const OperationLowerInfo *lower_info,
std::unique_ptr<exec::FunctionSequence> &&fn_seq)
: op_ind{op_ind}, op{op}, lower_info{lower_info}, fn_seq{std::move(fn_seq)}
const backend::Backend *op_backend, std::unique_ptr<exec::FunctionSequence> &&fn_seq)
: op_ind{op_ind}, op{op}, op_backend{op_backend}, fn_seq{std::move(fn_seq)}
{
}
};
Expand Down
12 changes: 6 additions & 6 deletions runtime/onert/core/include/compiler/GraphLowerInfo.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,13 @@
#ifndef __ONERT_COMPILER_GRAPH_LOWER_INFO_H__
#define __ONERT_COMPILER_GRAPH_LOWER_INFO_H__

#include <memory>
#include <unordered_map>

#include "backend/Backend.h"
#include "compiler/OperandLowerInfo.h"
#include "compiler/OperationLowerInfo.h"
#include "util/ObjectManager.h"
#include "ir/Index.h"
#include "util/ObjectManager.h"

#include <memory>
#include <unordered_map>

namespace onert
{
Expand All @@ -32,7 +32,7 @@ namespace compiler

struct GraphLowerInfo
{
util::ObjectManager<ir::OperationIndex, OperationLowerInfo> operation;
std::unordered_map<ir::OperationIndex, const backend::Backend *> operation;
util::ObjectManager<ir::OperandIndex, OperandLowerInfo> operand;
};

Expand Down
52 changes: 0 additions & 52 deletions runtime/onert/core/include/compiler/OperationLowerInfo.h

This file was deleted.

11 changes: 6 additions & 5 deletions runtime/onert/core/include/compiler/train/TrainableCodeMap.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,12 @@
#ifndef __ONERT_COMPILER_TRAIN_TRAINABLE_CODE_MAP_H__
#define __ONERT_COMPILER_TRAIN_TRAINABLE_CODE_MAP_H__

#include <unordered_map>
#include "compiler/OperationLowerInfo.h"
#include "backend/Backend.h"
#include "exec/train/TrainableFnSequence.h"
#include "ir/train/ITrainableOperation.h"

#include <unordered_map>

namespace onert
{
namespace compiler
Expand All @@ -33,14 +34,14 @@ struct TrainableCodeAndInfo
{
ir::OperationIndex op_ind;
const ir::train::ITrainableOperation *op;
const OperationLowerInfo *lower_info;
const backend::Backend *op_backend;
// TODO Change to TrainableFnSequence
std::unique_ptr<exec::train::TrainableFnSequence> tn_seq;

TrainableCodeAndInfo(const ir::OperationIndex op_ind, const ir::train::ITrainableOperation *op,
const OperationLowerInfo *lower_info,
const backend::Backend *op_backend,
std::unique_ptr<exec::train::TrainableFnSequence> &&tn_seq)
: op_ind{op_ind}, op{op}, lower_info{lower_info}, tn_seq{std::move(tn_seq)}
: op_ind{op_ind}, op{op}, op_backend{op_backend}, tn_seq{std::move(tn_seq)}
{
}
};
Expand Down
7 changes: 4 additions & 3 deletions runtime/onert/core/src/backend/builtin/BackendContext.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,11 @@
#ifndef __ONERT_BACKEND_BUILTIN_BACKEND_CONTEXT_H__
#define __ONERT_BACKEND_BUILTIN_BACKEND_CONTEXT_H__

#include <backend/BackendContext.h>
#include "TensorBuilder.h"
#include "KernelGenerator.h"
#include "ExternalContext.h"
#include "KernelGenerator.h"
#include "TensorBuilder.h"
#include "backend/BackendContext.h"
#include "compiler/GraphLowerInfo.h"

namespace onert
{
Expand Down
26 changes: 13 additions & 13 deletions runtime/onert/core/src/compiler/ExecutorFactory.cc
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ createBackendContexts(compiler::ILoweredGraph &lgraph, bool linear_executor,
whole_graph.operations().iterate(
[&](const ir::OperationIndex &op_ind, const ir::IOperation &operation) {
auto &op_li = lgraph.lower_info().operation;
auto backend = op_li.at(op_ind).backend();
const auto backend = op_li.at(op_ind);
if (context_data_map.find(backend) == context_data_map.end())
init_context_data(backend);

Expand Down Expand Up @@ -315,8 +315,8 @@ void ExecutorFactory::prepareMigrantTensors(compiler::ILoweredGraph &lowered_gra

lowered_graph.graph().operations().iterate(
[&](const ir::OperationIndex &op_ind, const ir::IOperation &op) {
auto lower_info = lowered_graph.lower_info().operation.getRawPtr(op_ind);
auto &backend_ctx = backend_contexts.at(lower_info->backend());
const auto backend = lowered_graph.lower_info().operation.at(op_ind);
auto &backend_ctx = backend_contexts.at(backend);
for (auto &&ind :
(op.getInputs() + op.getOutputs()) | ir::Remove::DUPLICATED | ir::Remove::UNDEFINED)
{
Expand Down Expand Up @@ -471,12 +471,12 @@ ExecutorFactory::createLinearExecutor(std::unique_ptr<compiler::LoweredGraph> lo
for (auto &&[op_ind, fn_seq] : codes)
{
auto &op = lowered_graph->graph().operations().at(op_ind);
auto lower_info = lowered_graph->lower_info().operation.getRawPtr(op_ind);
const auto backend = lowered_graph->lower_info().operation.at(op_ind);
if (options->he_profiling_mode)
fn_seq->wrap<SyncFunction>(lower_info->backend()->config());
fn_seq->wrap<SyncFunction>(backend->config());
if (!dealloc_list_map[op_ind].empty())
fn_seq->append(std::make_unique<DeallocFunction>(dealloc_list_map[op_ind]));
builder.append(op_ind, {op_ind, &op, lower_info, std::move(fn_seq)});
builder.append(op_ind, {op_ind, &op, backend, std::move(fn_seq)});
}
}

Expand Down Expand Up @@ -542,10 +542,10 @@ ExecutorFactory::createDataflowExecutor(std::unique_ptr<compiler::LoweredGraph>
for (auto &&[op_ind, fn_seq] : codes)
{
auto &op = lowered_graph->graph().operations().at(op_ind);
auto lower_info = lowered_graph->lower_info().operation.getRawPtr(op_ind);
const auto backend = lowered_graph->lower_info().operation.at(op_ind);
if (options->he_profiling_mode)
fn_seq->wrap<SyncFunction>(lower_info->backend()->config());
builder.append(op_ind, {op_ind, &op, lower_info, std::move(fn_seq)});
fn_seq->wrap<SyncFunction>(backend->config());
builder.append(op_ind, {op_ind, &op, backend, std::move(fn_seq)});
}
}

Expand Down Expand Up @@ -608,8 +608,8 @@ void ExecutorFactory::prepareMigrantTensors(

lowered_graph.graph().operations().iterate(
[&](const ir::OperationIndex &op_ind, const ir::IOperation &op) {
auto lower_info = lowered_graph.lower_info().operation.getRawPtr(op_ind);
auto &backend_ctx = backend_contexts.at(lower_info->backend());
const auto backend = lowered_graph.lower_info().operation.at(op_ind);
auto &backend_ctx = backend_contexts.at(backend);
for (auto &&ind :
(op.getInputs() + op.getOutputs()) | ir::Remove::DUPLICATED | ir::Remove::UNDEFINED)
{
Expand Down Expand Up @@ -856,11 +856,11 @@ exec::IExecutor *ExecutorFactory::createTrainableExecutor(
for (auto &&[op_ind, tn_seq] : codes)
{
auto &op = lowered_graph->trainable_graph().operation(op_ind);
auto lower_info = lowered_graph->lower_info().operation.getRawPtr(op_ind);
const auto backend = lowered_graph->lower_info().operation.at(op_ind);

assert(code_map.find(op_ind) == code_map.end());
code_map.insert(
{op_ind, train::TrainableCodeAndInfo{op_ind, &op, lower_info, std::move(tn_seq)}});
{op_ind, train::TrainableCodeAndInfo{op_ind, &op, backend, std::move(tn_seq)}});
}
}

Expand Down
5 changes: 2 additions & 3 deletions runtime/onert/core/src/compiler/LoweredGraph.cc
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ void LoweredGraph::makeLowerInfo(const compiler::BackendResolver &backend_resolv
// Set operand lower info using assigned backends to operations
_graph.operations().iterate([&](const ir::OperationIndex &op_ind, const ir::IOperation &) {
const ir::IOperation &op = _graph.operations().at(op_ind);
auto backend = backend_resolver.getBackend(op_ind);
const auto backend = backend_resolver.getBackend(op_ind);
if (!backend)
{
throw std::runtime_error{"Fail to find backend for " + op.name() + " operation"};
Expand All @@ -140,8 +140,7 @@ void LoweredGraph::makeLowerInfo(const compiler::BackendResolver &backend_resolv
auto &operand_li = lower_info().operand.at(ind);
operand_li.addDefPermuteFactor(PermuteFactor{backend, backend_layout});
}
lower_info().operation.set(
op_ind, std::make_unique<compiler::OperationLowerInfo>(backend, backend_layout));
lower_info().operation.emplace(op_ind, backend);
});

// Handle graph inputs and outputs
Expand Down
31 changes: 0 additions & 31 deletions runtime/onert/core/src/compiler/OperationLowerInfo.cc

This file was deleted.

6 changes: 2 additions & 4 deletions runtime/onert/core/src/compiler/pass/ConstantInsertionPass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,8 @@ namespace pass

void ConstantInsertionPass::callback(const ir::OperationIndex &node_index, ir::IOperation &node)
{
const auto op_lower_info = _lowered_graph.lower_info().operation.getRawPtr(node_index);
const auto backend = op_lower_info->backend();
const auto layout = op_lower_info->layout();
const auto factor = PermuteFactor{backend, layout};
const auto backend = _lowered_graph.lower_info().operation.at(node_index);
const auto factor = PermuteFactor{backend, ir::Layout::NHWC};

for (const auto &input : node.getInputs() | ir::Remove::DUPLICATED | ir::Remove::UNDEFINED)
{
Expand Down
6 changes: 2 additions & 4 deletions runtime/onert/core/src/compiler/pass/ConstantLoweringPass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,8 @@ namespace pass

void ConstantLoweringPass::callback(const ir::OperationIndex &node_index, ir::IOperation &node)
{
const auto op_lower_info = _lowered_graph.lower_info().operation.getRawPtr(node_index);
const auto backend = op_lower_info->backend();
const auto layout = op_lower_info->layout();
const auto factor = PermuteFactor{backend, layout};
const auto backend = _lowered_graph.lower_info().operation.at(node_index);
const auto factor = PermuteFactor{backend, ir::Layout::NHWC};

// Now this runtime does not support the node making output of operation as constant
for (const auto &input : node.getInputs() | ir::Remove::DUPLICATED | ir::Remove::UNDEFINED)
Expand Down
11 changes: 3 additions & 8 deletions runtime/onert/core/src/compiler/pass/PermutationInsertionPass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

#include "../../backend/builtin/Config.h"

#include "compiler/OperationLowerInfo.h"
#include "ir/operation/Permute.h"
#include "util/logging.h"

Expand Down Expand Up @@ -82,14 +81,11 @@ void PermutationInsertionPass::callback(const ir::OperandIndex &index, ir::Opera
continue;

auto &operation = _graph.operations().at(use);
auto op_li = _lowered_graph.lower_info().operation.getRawPtr(use);
assert(op_li);
const auto op_layout = op_li->layout();
const backend::Backend *backend = op_li->backend();
const auto backend = _lowered_graph.lower_info().operation.at(use);
assert(backend);
assert(operation.getInputs().contains(index));

auto new_index = factor_to_index.at({backend, op_layout});
auto new_index = factor_to_index.at({backend, ir::Layout::NHWC});
if (index != new_index)
{
// Update from operation
Expand Down Expand Up @@ -194,8 +190,7 @@ ir::OperationIndex PermutationInsertionPass::insertPermute(const ir::OperandInde
// Operation LowerInfo
{
auto &operation_li_map = _lowered_graph.lower_info().operation;
operation_li_map.set(node_index, std::make_unique<compiler::OperationLowerInfo>(
permute_node_backend, permute_node_layout));
operation_li_map.emplace(node_index, permute_node_backend);
}

// Update Use/Def info
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -170,8 +170,7 @@ void LoweredTrainableGraph::makeLowerInfo(const compiler::BackendResolver &backe
auto &operand_li = lower_info().operand.at(ind);
operand_li.addDefPermuteFactor(PermuteFactor{backend, backend_layout});
}
lower_info().operation.set(
op_ind, std::make_unique<compiler::OperationLowerInfo>(backend, backend_layout));
lower_info().operation.emplace(op_ind, backend);
});

// Handle graph inputs and outputs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,8 @@ void TrainableConstantInsertionPass::updateUseDef(const ir::OperandIndex &old_in
const ir::OperandIndex &new_index,
const ir::OperationIndex &node_index)
{
const auto op_lower_info = _lowered_graph.lower_info().operation.getRawPtr(node_index);
const auto backend = op_lower_info->backend();
const auto layout = op_lower_info->layout();
const auto factor = PermuteFactor{backend, layout};
const auto backend = _lowered_graph.lower_info().operation.at(node_index);
const auto factor = PermuteFactor{backend, ir::Layout::NHWC};

// Update the same inputs of a node at once because inputs of an operation have the same
// PermuteFactor
Expand Down
Loading

0 comments on commit 2a2dbc6

Please sign in to comment.