summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorInki Dae <inki.dae@samsung.com>2020-06-02 18:12:42 +0900
committerInki Dae <inki.dae@samsung.com>2020-06-02 18:12:42 +0900
commitb45f4147999f55ee8260b7cb31ec812b8bbfed42 (patch)
tree62f2c4432d34510fa72c28403764f2df13ec4154
parent04a0566ec72d7536c9bc1e74475876614aaa3ab9 (diff)
downloadinference-engine-interface-b45f4147999f55ee8260b7cb31ec812b8bbfed42.tar.gz
inference-engine-interface-b45f4147999f55ee8260b7cb31ec812b8bbfed42.tar.bz2
inference-engine-interface-b45f4147999f55ee8260b7cb31ec812b8bbfed42.zip
Change a backend type from VIVANTE to MLAPI
Change-Id: Ia4210279f8efc4875f0c0db813c9f03d8411f7ff Signed-off-by: Inki Dae <inki.dae@samsung.com>
-rw-r--r--include/inference_engine_type.h2
-rwxr-xr-xsrc/inference_engine_common_impl.cpp4
2 files changed, 3 insertions, 3 deletions
diff --git a/include/inference_engine_type.h b/include/inference_engine_type.h
index 33c656e..a8f70d2 100644
--- a/include/inference_engine_type.h
+++ b/include/inference_engine_type.h
@@ -38,7 +38,7 @@ typedef enum {
INFERENCE_BACKEND_OPENCV, /**< OpenCV */
INFERENCE_BACKEND_TFLITE, /**< TensorFlow-Lite */
INFERENCE_BACKEND_ARMNN, /**< ARMNN */
- INFERENCE_BACKEND_VIVANTE, /** < Vivante */
+ INFERENCE_BACKEND_MLAPI, /** < ML Single API of NNStreamer.*/
INFERENCE_BACKEND_NNFW, /** < NNFW */
INFERENCE_BACKEND_MAX /**< Backend MAX */
} inference_backend_type_e;
diff --git a/src/inference_engine_common_impl.cpp b/src/inference_engine_common_impl.cpp
index ada716f..0befd03 100755
--- a/src/inference_engine_common_impl.cpp
+++ b/src/inference_engine_common_impl.cpp
@@ -197,7 +197,7 @@ int InferenceEngineCommon::InitBackendEngine(const std::string &backend_path, in
}
// If a backend is nnstreamer then set a tensor filter plugin type.
- if (backend_type == INFERENCE_BACKEND_NNFW || backend_type == INFERENCE_BACKEND_VIVANTE) {
+ if (backend_type == INFERENCE_BACKEND_NNFW || backend_type == INFERENCE_BACKEND_MLAPI) {
int ret = mBackendHandle->SetPluginType(backend_type);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
LOGE("Failed to set a tensor filter plugin.");
@@ -266,7 +266,7 @@ int InferenceEngineCommon::BindBackend(int backend_type)
[INFERENCE_BACKEND_OPENCV] = "opencv",
[INFERENCE_BACKEND_TFLITE] = "tflite",
[INFERENCE_BACKEND_ARMNN] = "armnn",
- [INFERENCE_BACKEND_VIVANTE] = "nnstreamer",
+ [INFERENCE_BACKEND_MLAPI] = "nnstreamer",
[INFERENCE_BACKEND_NNFW] = "nnstreamer"
};