From c7bd3d2369866bf4c1edc061072ed2552543dac5 Mon Sep 17 00:00:00 2001 From: SeungHui Youn <61981457+zetwhite@users.noreply.github.com> Date: Mon, 7 Oct 2024 10:13:42 +0900 Subject: [PATCH] [onert/train] Add LayerScopeManager into TensorManager (#14047) This PR adds LayerScopeManager into TensorManager. ONE-DCO-1.0-Signed-off-by: seunghui youn --- runtime/onert/backend/train/TensorManager.cc | 23 +++++++++++++++++++- runtime/onert/backend/train/TensorManager.h | 7 +++--- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/runtime/onert/backend/train/TensorManager.cc b/runtime/onert/backend/train/TensorManager.cc index d8404fcc9ed..22ed48cc02f 100644 --- a/runtime/onert/backend/train/TensorManager.cc +++ b/runtime/onert/backend/train/TensorManager.cc @@ -58,7 +58,8 @@ TensorManager::TensorManager(const std::shared_ptr ®, uint32_ _trainable_mgr{new TrainableMemoryManager(optim_vars_count)}, _back_prop_mgr{new MemoryManager()}, _gradient_mgr{new MemoryManager()}, // TODO Find a suitable planner of disposable tensors to reduce peak memory usage - _disposable_back_prop_mgr{new DisposableMemoryManager()}, _tensors{reg} + _disposable_back_prop_mgr{new DisposableMemoryManager()}, + _layer_scope_mgr{new LayerScopeMemoryManager()}, _tensors{reg} { // DO NOTHING } @@ -106,6 +107,12 @@ void TensorManager::allocateDisposableBackPropTensors() std::string{"DISPOSABLE BACK_PROP TENSOR "}); } +void TensorManager::allocateLayerScopeTensors() +{ + allocateMemory(_layer_scope_mgr.get(), _tensors->layerscope_tensors(), + std::string{" LAYERSCOPE TENSOR "}); +} + void TensorManager::claimNonConstPlan(const ir::OperandIndex &index) { auto tensor = _tensors->getNonConstTensor(index); @@ -187,6 +194,20 @@ void TensorManager::releaseDisposableBackPropPlan(const DisposableTensorIndex &i _disposable_back_prop_mgr->releasePlan(index); } +void TensorManager::claimLayerScopePlan(const LayerScopeTensorIndex &index) +{ + const auto tensor = _tensors->getLayerScopeTensor(index); + + auto size = alignedSize(tensor->total_size(), _align); + _layer_scope_mgr->claimPlan(index, size); +} + +void TensorManager::releaseLayerScopePlan(const LayerScopeTensorIndex &index) +{ + assert(_tensors->getLayerScopeTensor(index)); + _layer_scope_mgr->releasePlan(index); +} + } // namespace train } // namespace backend } // namespace onert diff --git a/runtime/onert/backend/train/TensorManager.h b/runtime/onert/backend/train/TensorManager.h index c9553c3913e..faec36b8016 100644 --- a/runtime/onert/backend/train/TensorManager.h +++ b/runtime/onert/backend/train/TensorManager.h @@ -49,6 +49,7 @@ class TensorManager void allocateBackPropTensors(); void allocateGradientTensors(); void allocateDisposableBackPropTensors(); + void allocateLayerScopeTensors(); // TODO Add member functions to deallocate tensors void claimNonConstPlan(const ir::OperandIndex &ind); @@ -61,7 +62,8 @@ class TensorManager void releaseGradientPlan(const ir::OperandIndex &ind); void claimDisposableBackPropPlan(const DisposableTensorIndex &ind); void releaseDisposableBackPropPlan(const DisposableTensorIndex &ind); - // TODO Add member functions related to LayerScopeMemoryManager + void claimLayerScopePlan(const LayerScopeTensorIndex &ind); + void releaseLayerScopePlan(const LayerScopeTensorIndex &ind); private: std::unique_ptr _nonconst_mgr; @@ -69,8 +71,7 @@ class TensorManager std::unique_ptr _back_prop_mgr; std::unique_ptr _gradient_mgr; std::unique_ptr _disposable_back_prop_mgr; - // TODO: enable _layer_scope_mgr - // std::unique_ptr _layer_scope_mgr; + std::unique_ptr _layer_scope_mgr; const std::shared_ptr _tensors; };