diff options
author | Tae-Young Chung <ty83.chung@samsung.com> | 2021-03-26 10:23:23 +0900 |
---|---|---|
committer | Tae-Young Chung <ty83.chung@samsung.com> | 2021-03-26 10:23:26 +0900 |
commit | 8a9f3702a23dea5b2daeada684cc48f8792a4b3a (patch) | |
tree | d3e6c4988373ef64e15a898078e6a30a2541481d | |
parent | 43d75de671b4b96f3fa0c33fb7af911a498b5067 (diff) | |
download | inference-engine-armnn-tizen_6.5.tar.gz inference-engine-armnn-tizen_6.5.tar.bz2 inference-engine-armnn-tizen_6.5.zip |
Remove swap and clear codes in destructortizen_6.5.m2_releasesubmit/tizen_6.5/20211028.162401submit/tizen/20210604.014824submit/tizen/20210604.014750submit/tizen/20210513.045159submit/tizen/20210513.034723submit/tizen/20210507.005054submit/tizen/20210506.010918submit/tizen/20210428.062907submit/tizen/20210422.072212accepted/tizen/unified/20210608.131159accepted/tizen/unified/20210509.123814accepted/tizen/6.5/unified/20211028.115235tizen_6.5accepted/tizen_6.5_unified
std::map destroyer will deallocates all the storage capacity so
we don't need to clear and swap memeber variables which have std::map type.
Change-Id: Ide07a16e6b723495428dbaab89884989dc0cb2f6
Signed-off-by: Tae-Young Chung <ty83.chung@samsung.com>
-rw-r--r-- | packaging/inference-engine-armnn.spec | 2 | ||||
-rw-r--r-- | src/inference_engine_armnn.cpp | 18 |
2 files changed, 1 insertions, 19 deletions
diff --git a/packaging/inference-engine-armnn.spec b/packaging/inference-engine-armnn.spec index d529f4d..83e7a44 100644 --- a/packaging/inference-engine-armnn.spec +++ b/packaging/inference-engine-armnn.spec @@ -1,7 +1,7 @@ Name: inference-engine-armnn Summary: ARM Neural Network Runtime based implementation of inference-engine-interface Version: 0.0.1 -Release: 2 +Release: 3 Group: Multimedia/Libraries License: Apache-2.0 ExclusiveArch: %{arm} aarch64 diff --git a/src/inference_engine_armnn.cpp b/src/inference_engine_armnn.cpp index baf96ad..29ba8c4 100644 --- a/src/inference_engine_armnn.cpp +++ b/src/inference_engine_armnn.cpp @@ -51,24 +51,6 @@ namespace ARMNNImpl InferenceARMNN::~InferenceARMNN() { - mDesignated_inputs.clear(); - std::map<std::string, int>().swap(mDesignated_inputs); - - mDesignated_outputs.clear(); - std::map<std::string, int>().swap(mDesignated_outputs); - - mInputBindingInfo.clear(); - std::map<std::string, armnn::BindingPointInfo>().swap(mInputBindingInfo); - - mOutputBindingInfo.clear(); - std::map<std::string, armnn::BindingPointInfo>().swap(mOutputBindingInfo); - - mInputProperty.layers.clear(); - std::map<std::string, inference_engine_tensor_info>().swap(mInputProperty.layers); - - mOutputProperty.layers.clear(); - std::map<std::string, inference_engine_tensor_info>().swap(mOutputProperty.layers); - armnn::IRuntime::Destroy(sRuntime); sRuntime = nullptr; } |