summaryrefslogtreecommitdiff
path: root/inference-engine/tests/unit/inference_engine_tests
diff options
context:
space:
mode:
Diffstat (limited to 'inference-engine/tests/unit/inference_engine_tests')
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/alocator_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/blob_proxy_test.cpp23
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/blob_test.cpp4
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/caslesseq_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cnn_network_test.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_base_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_thread_safe_default_tests.cpp5
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_thread_safe_internal.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/callback_manager_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_base_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_thread_safe_async_only_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_thread_safe_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executor_manager_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/iinference_plugin_internal_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/memory_state_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/plugin_base_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_common_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_executor_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_synchronizer_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_tests_utils.hpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_with_stages_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/data_test.cpp19
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/debug_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/device_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/exception_test.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/inference_engine_plugin_test.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/inference_engine_test.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/layer_transform_test.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/layers_test.cpp169
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/locked_memory_test.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/plugin_dispatcher_tests.cpp18
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/pointer_test.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/pre_allocator_test.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/precision_test.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/preprocess_test.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/range_iterator_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/response_buffer_test.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/shared_object_loader_test.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/so_pointer_tests.cpp1
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/tensor_desc_test.cpp28
-rw-r--r--inference-engine/tests/unit/inference_engine_tests/util_test.cpp1
44 files changed, 203 insertions, 100 deletions
diff --git a/inference-engine/tests/unit/inference_engine_tests/alocator_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/alocator_tests.cpp
index 6fc2b8eb1..178f11676 100644
--- a/inference-engine/tests/unit/inference_engine_tests/alocator_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/alocator_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/blob_proxy_test.cpp b/inference-engine/tests/unit/inference_engine_tests/blob_proxy_test.cpp
index 427098bd8..9de222c50 100644
--- a/inference-engine/tests/unit/inference_engine_tests/blob_proxy_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/blob_proxy_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
@@ -47,23 +46,14 @@ TEST_F(BlobProxyTests, convertByteBlobToFloat) {
}
}
-TEST_F(BlobProxyTests, shouldThrowOnAllocate) {
- SizeVector v = {1, 2, 3};
- auto allocator = createMockAllocator();
-
- TBlobProxy<float> proxy(Precision::FP32, C, TBlob<float>(Precision::FP32, CHW, v, dynamic_pointer_cast<IAllocator>(allocator)), 2, {2});
-
- EXPECT_THROW(((Blob&)proxy).allocate(), InferenceEngineException);
-}
-
-TEST_F(BlobProxyTests, shouldThrowOnDeAllocate)
+TEST_F(BlobProxyTests, shouldNotDeAllocate)
{
SizeVector v = {1, 2, 3};
auto allocator = createMockAllocator();
TBlobProxy<float> proxy(Precision::FP32, C, TBlob<float>(Precision::FP32, CHW, v, dynamic_pointer_cast<IAllocator>(allocator)), 2, {2});
- EXPECT_THROW(((Blob&)proxy).deallocate(), InferenceEngineException);
+ EXPECT_EQ(((Blob&)proxy).deallocate(), false);
}
@@ -236,15 +226,6 @@ TEST_F(BlobProxyTests, canReturnConstantData) {
ASSERT_NE(proxy.cbuffer().as<const void*>(), nullptr);
}
-TEST_F(BlobProxyTests, noAllocDeallocLogic) {
- TBlob<float>::Ptr b(new TBlob<float>(Precision::FP32, C));
- b->set({ 1.0f, 2.0f, 3.0f });
- TBlobProxy<uint8_t> proxy(Precision::U8, C, b, 0, { b->byteSize() });
- ASSERT_ANY_THROW(((Blob*) &proxy)->allocate());
- ASSERT_ANY_THROW(((Blob*) &proxy)->deallocate());
-}
-
-
TEST_F(BlobProxyTests, canIterateOverData) {
TBlob<uint8_t>::Ptr b(new TBlob<uint8_t >(Precision::FP32, C));
b->set({ 1, 2, 3 });
diff --git a/inference-engine/tests/unit/inference_engine_tests/blob_test.cpp b/inference-engine/tests/unit/inference_engine_tests/blob_test.cpp
index 59015c265..e104c4cd1 100644
--- a/inference-engine/tests/unit/inference_engine_tests/blob_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/blob_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
@@ -35,7 +34,6 @@ protected:
return shared_ptr<MockAllocator>(new MockAllocator());
}
-
public:
};
@@ -489,4 +487,4 @@ TEST_F(BlobTests, makeRoiBlobWrongSize) {
// try to create ROI blob with wrong size
ROI roi = {0, 1, 1, 4, 4}; // cropped picture with: id = 0, (x,y) = (1,1), sizeX (W) = 4, sizeY (H) = 4
ASSERT_THROW(make_shared_blob(blob, roi), InferenceEngine::details::InferenceEngineException);
-} \ No newline at end of file
+}
diff --git a/inference-engine/tests/unit/inference_engine_tests/caslesseq_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/caslesseq_tests.cpp
index 98037672a..b0c23e583 100644
--- a/inference-engine/tests/unit/inference_engine_tests/caslesseq_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/caslesseq_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cnn_network_test.cpp b/inference-engine/tests/unit/inference_engine_tests/cnn_network_test.cpp
index 69bfdb194..4a4b3d4c6 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cnn_network_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cnn_network_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_base_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_base_tests.cpp
index beed3e5c4..425b06276 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_base_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_base_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_tests.cpp
index 00fe58f38..b1c93683e 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_thread_safe_default_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_thread_safe_default_tests.cpp
index 5ac02ae19..594ee1912 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_thread_safe_default_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_thread_safe_default_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
@@ -170,9 +169,9 @@ TEST_F(InferRequestThreadSafeDefaultTests, callbackTakesOKIfAsyncRequestWasOK) {
testRequest), [](IInferRequest *p) { p->Release(); });
testRequest->SetPointerToPublicInterface(asyncRequest);
- testRequest->SetCompletionCallback({[](InferenceEngine::IInferRequest::Ptr request, StatusCode status) {
+ testRequest->SetCompletionCallback([](InferenceEngine::IInferRequest::Ptr request, StatusCode status) {
ASSERT_EQ((int) StatusCode::OK, status);
- }});
+ });
EXPECT_CALL(*mockInferRequestInternal.get(), InferImpl()).Times(1);
testRequest->StartAsync();
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_thread_safe_internal.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_thread_safe_internal.cpp
index 285938e72..49cdadc5c 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_thread_safe_internal.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/async_infer_request_thread_safe_internal.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/callback_manager_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/callback_manager_tests.cpp
index 0d20efd84..7d1013793 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/callback_manager_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/callback_manager_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_base_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_base_tests.cpp
index 320d3df37..f4c472e72 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_base_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_base_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_tests.cpp
index d31ca1792..399ec7a52 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_thread_safe_async_only_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_thread_safe_async_only_tests.cpp
index 76c44fe0b..2542017f5 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_thread_safe_async_only_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_thread_safe_async_only_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_thread_safe_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_thread_safe_tests.cpp
index 23544e611..3c09801ec 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_thread_safe_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executable_network_thread_safe_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executor_manager_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executor_manager_tests.cpp
index 909380aa4..450bcd3ec 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executor_manager_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/executor_manager_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/iinference_plugin_internal_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/iinference_plugin_internal_tests.cpp
index 701085c8b..a76857b46 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/iinference_plugin_internal_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/iinference_plugin_internal_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/memory_state_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/memory_state_tests.cpp
index 33a431f6a..799f0bd76 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/memory_state_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/memory_state_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/plugin_base_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/plugin_base_tests.cpp
index 6779fea56..3df6a6021 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/plugin_base_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/plugin_base_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_common_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_common_tests.cpp
index b535cc97e..e0918ab8b 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_common_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_common_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_executor_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_executor_tests.cpp
index b9d4e2602..0cbc51634 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_executor_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_executor_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_synchronizer_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_synchronizer_tests.cpp
index f94b7b890..47b1ef213 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_synchronizer_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_synchronizer_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_tests.cpp
index a2836e1b0..792e134a8 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_tests_utils.hpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_tests_utils.hpp
index 6523c2631..5f4238f6d 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_tests_utils.hpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_tests_utils.hpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_with_stages_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_with_stages_tests.cpp
index 38e4be252..6f665e651 100644
--- a/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_with_stages_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/cpp_interfaces/task_with_stages_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/data_test.cpp b/inference-engine/tests/unit/inference_engine_tests/data_test.cpp
index 263859b0f..883986141 100644
--- a/inference-engine/tests/unit/inference_engine_tests/data_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/data_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
@@ -93,3 +92,21 @@ TEST_F(DataTests, canSetNotEmptyDimsForBlockingDescBlocked) {
TEST_F(DataTests, canSetNotEmptyDimsForBlockingDescNCHW) {
ASSERT_NO_THROW(BlockingDesc(notEmptyDims, NCHW));
}
+
+TEST_F(DataTests, setPrecision) {
+ Data data(data_name, emptyDims, Precision::FP32, Layout::NCHW);
+
+ EXPECT_EQ(Precision::FP32, data.precision);
+ EXPECT_EQ(Precision::FP32, data.getPrecision());
+ EXPECT_EQ(Precision::FP32, data.getTensorDesc().getPrecision());
+
+ data.setPrecision(Precision::FP16);
+ EXPECT_EQ(Precision::FP16, data.precision);
+ EXPECT_EQ(Precision::FP16, data.getPrecision());
+ EXPECT_EQ(Precision::FP16, data.getTensorDesc().getPrecision());
+
+ data.precision = Precision::Q78;
+ EXPECT_EQ(Precision::Q78, data.precision);
+ EXPECT_EQ(Precision::Q78, data.getPrecision());
+ EXPECT_EQ(Precision::Q78, data.getTensorDesc().getPrecision());
+}
diff --git a/inference-engine/tests/unit/inference_engine_tests/debug_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/debug_tests.cpp
index 7833b9025..5acaeabe3 100644
--- a/inference-engine/tests/unit/inference_engine_tests/debug_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/debug_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/device_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/device_tests.cpp
index 13f4a8a6d..c83d89aae 100644
--- a/inference-engine/tests/unit/inference_engine_tests/device_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/device_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/exception_test.cpp b/inference-engine/tests/unit/inference_engine_tests/exception_test.cpp
index 8dff71f14..fc93d4881 100644
--- a/inference-engine/tests/unit/inference_engine_tests/exception_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/exception_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/inference_engine_plugin_test.cpp b/inference-engine/tests/unit/inference_engine_tests/inference_engine_plugin_test.cpp
index baa5e8cff..a23b74c55 100644
--- a/inference-engine/tests/unit/inference_engine_tests/inference_engine_plugin_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/inference_engine_plugin_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/inference_engine_test.cpp b/inference-engine/tests/unit/inference_engine_tests/inference_engine_test.cpp
index a0ea44f03..ab307cf40 100644
--- a/inference-engine/tests/unit/inference_engine_tests/inference_engine_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/inference_engine_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/layer_transform_test.cpp b/inference-engine/tests/unit/inference_engine_tests/layer_transform_test.cpp
index a62750224..fcb5875da 100644
--- a/inference-engine/tests/unit/inference_engine_tests/layer_transform_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/layer_transform_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/layers_test.cpp b/inference-engine/tests/unit/inference_engine_tests/layers_test.cpp
index 9a39c1ebf..6d18b6422 100644
--- a/inference-engine/tests/unit/inference_engine_tests/layers_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/layers_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
@@ -353,33 +352,56 @@ TEST_F(LayersTests, poolIRv2BackwardCompatibility) {
pool._stride[Y_AXIS] = 3u;
ASSERT_EQ(pool._stride_y, 3u);
}
+
TEST_F(LayersTests, canGetPadBeginForConvolution) {
ConvolutionLayer layer(getDefaultParamsForLayer());
+ PropertyVector<unsigned> ref{{1, 2}};
+ layer._padding = ref;
+
+ auto allPads = getPaddings(layer);
+
+ ASSERT_EQ(allPads.begin, ref);
+}
+
+TEST_F(LayersTests, canGetPadEndForConvolution) {
+ ConvolutionLayer layer(getDefaultParamsForLayer());
+ PropertyVector<unsigned> ref{{1, 2}};
+ layer._pads_end = ref;
+
+ auto allPads = getPaddings(layer);
+
+ ASSERT_EQ(allPads.end, ref);
+}
+
+TEST_F(LayersTests, canGetPad3DBeginForConvolution) {
+ ConvolutionLayer layer(getDefaultParamsForLayer());
PropertyVector<unsigned> ref;
ref.insert(X_AXIS, 1);
ref.insert(Y_AXIS, 2);
+ ref.insert(Z_AXIS, 3);
layer._padding = ref;
- auto allPads = getConvPaddings(layer);
+ auto allPads = getPaddings(layer);
ASSERT_EQ(allPads.begin, ref);
}
-TEST_F(LayersTests, canGetPadEndForConvolution) {
+TEST_F(LayersTests, canGetPad3DEndForConvolution) {
ConvolutionLayer layer(getDefaultParamsForLayer());
PropertyVector<unsigned> ref;
ref.insert(X_AXIS, 1);
ref.insert(Y_AXIS, 2);
+ ref.insert(Z_AXIS, 3);
layer._pads_end = ref;
- auto allPads = getConvPaddings(layer);
+ auto allPads = getPaddings(layer);
ASSERT_EQ(allPads.end, ref);
}
TEST_F(LayersTests, returnDefaultPadForEmptyConvolution) {
ConvolutionLayer layer(getDefaultParamsForLayer());
- auto allPads = getConvPaddings(layer);
+ auto allPads = getPaddings(layer);
PropertyVector<unsigned> ref_begin(2, 0u);
PropertyVector<unsigned> ref_end;
ASSERT_EQ(allPads.begin, ref_begin);
@@ -389,16 +411,21 @@ TEST_F(LayersTests, returnDefaultPadForEmptyConvolution) {
TEST_F(LayersTests, returnEmptyPadForValidPadConvolution) {
ConvolutionLayer layer(getDefaultParamsForLayer());
layer.params["auto_pad"] = "valid";
- auto allPads = getConvPaddings(layer);
- PropertyVector<unsigned> ref(2,0);
+ auto allPads = getPaddings(layer);
+ PropertyVector<unsigned> ref(2,0u);
ASSERT_EQ(allPads.begin, ref);
ASSERT_EQ(allPads.end, ref);
+
+ PropertyVector<unsigned> ref3D(2,0u);
+ layer._kernel.insert(Z_AXIS, 0u);
+ ASSERT_EQ(allPads.begin, ref3D);
+ ASSERT_EQ(allPads.end, ref3D);
}
TEST_F(LayersTests, throwOnSamePadForEmptyConvolution) {
ConvolutionLayer layer(getDefaultParamsForLayer());
layer.params["auto_pad"] = "same_upper";
- ASSERT_THROW(getConvPaddings(layer), details::InferenceEngineException);
+ ASSERT_THROW(getPaddings(layer), details::InferenceEngineException);
}
TEST_F(LayersTests, throwOnInvalidDimsSamePadForConvolution) {
@@ -406,7 +433,7 @@ TEST_F(LayersTests, throwOnInvalidDimsSamePadForConvolution) {
layer.params["auto_pad"] = "same_upper";
auto emptyData = std::make_shared<InferenceEngine::Data>("", Precision::UNSPECIFIED);
layer.insData.push_back(emptyData);
- ASSERT_THROW(getConvPaddings(layer), details::InferenceEngineException);
+ ASSERT_THROW(getPaddings(layer), details::InferenceEngineException);
}
TEST_F(LayersTests, throwOn2DSamePadForConvolution) {
@@ -415,7 +442,7 @@ TEST_F(LayersTests, throwOn2DSamePadForConvolution) {
auto notEmptyData = std::make_shared<InferenceEngine::Data>("", SizeVector{1, 1}, Precision::UNSPECIFIED,
Layout::NC);
layer.insData.push_back(notEmptyData);
- ASSERT_THROW(getConvPaddings(layer), details::InferenceEngineException);
+ ASSERT_THROW(getPaddings(layer), details::InferenceEngineException);
}
TEST_F(LayersTests, throwWithNotEnoughParamsSamePadForConvolution) {
@@ -423,7 +450,12 @@ TEST_F(LayersTests, throwWithNotEnoughParamsSamePadForConvolution) {
layer.params["auto_pad"] = "same_upper";
auto notEmptyData = std::make_shared<InferenceEngine::Data>("", SizeVector{1, 2, 3, 4}, Precision::UNSPECIFIED);
layer.insData.push_back(notEmptyData);
- ASSERT_NO_THROW(getConvPaddings(layer));
+ ASSERT_NO_THROW(getPaddings(layer));
+
+ auto notEmptyData3D = std::make_shared<InferenceEngine::Data>("", SizeVector{1, 2, 3, 4, 5}, Precision::UNSPECIFIED, Layout::NCDHW);
+ layer._kernel.insert(Z_AXIS, 0u);
+ layer.insData[0] = notEmptyData3D;
+ ASSERT_NO_THROW(getPaddings(layer));
}
// parameters are from real model, like Mobilenet-SSD
@@ -433,19 +465,39 @@ TEST_F(LayersTests, canGetSamePadForConvolutionEvenInput) {
TensorDesc tensorDesc(Precision::UNSPECIFIED, SizeVector{1, 144, 160, 160}, Layout::NCHW);
auto notEmptyData = std::make_shared<InferenceEngine::Data>("", tensorDesc);
layer.insData.push_back(notEmptyData);
- layer._dilation.insert(X_AXIS, 1);
- layer._dilation.insert(Y_AXIS, 1);
- layer._kernel.insert(X_AXIS, 3);
- layer._kernel.insert(Y_AXIS, 3);
- layer._stride.insert(X_AXIS, 2);
- layer._stride.insert(Y_AXIS, 2);
+ layer._dilation = PropertyVector<unsigned>{{1, 1}};
+ layer._kernel = PropertyVector<unsigned>{{3, 3}};
+ layer._stride = PropertyVector<unsigned>{{2, 2}};
- auto pad = getConvPaddings(layer);
+ auto pad = getPaddings(layer);
ASSERT_EQ(pad.begin, PropertyVector<unsigned>(2, 0));
ASSERT_EQ(pad.end, PropertyVector<unsigned>(2, 1));
}
+// parameters are from real model, like V-Net
+TEST_F(LayersTests, canGetSamePadForConvolutionEvenInput3D) {
+ ConvolutionLayer layer(getDefaultParamsForLayer());
+ layer.params["auto_pad"] = "same_upper";
+ TensorDesc tensorDesc(Precision::UNSPECIFIED, SizeVector{1, 6, 190, 190, 20}, Layout::NCDHW);
+ auto notEmptyData = std::make_shared<InferenceEngine::Data>("", tensorDesc);
+ layer.insData.push_back(notEmptyData);
+ layer._dilation.insert(X_AXIS, 1u);
+ layer._dilation.insert(Y_AXIS, 1u);
+ layer._dilation.insert(Z_AXIS, 1u);
+ layer._kernel.insert(X_AXIS, 5u);
+ layer._kernel.insert(Y_AXIS, 5u);
+ layer._kernel.insert(Z_AXIS, 5u);
+ layer._stride.insert(X_AXIS, 1u);
+ layer._stride.insert(Y_AXIS, 1u);
+ layer._stride.insert(Z_AXIS, 1u);
+
+ auto pad = getPaddings(layer);
+
+ ASSERT_EQ(pad.begin, PropertyVector<unsigned>(3, 2u));
+ ASSERT_EQ(pad.end, PropertyVector<unsigned>(3, 2u));
+}
+
// parameters are from real model, like Mobilenet-SSD
TEST_F(LayersTests, canGetSamePadForConvolutionOddInput) {
ConvolutionLayer layer(getDefaultParamsForLayer());
@@ -453,16 +505,83 @@ TEST_F(LayersTests, canGetSamePadForConvolutionOddInput) {
TensorDesc tensorDesc(Precision::UNSPECIFIED, SizeVector{1, 144, 75, 75}, Layout::NCHW);
auto notEmptyData = std::make_shared<InferenceEngine::Data>("", tensorDesc);
layer.insData.push_back(notEmptyData);
- layer._dilation.insert(X_AXIS, 1);
- layer._dilation.insert(Y_AXIS, 1);
- layer._kernel.insert(X_AXIS, 3);
- layer._kernel.insert(Y_AXIS, 3);
- layer._stride.insert(X_AXIS, 2);
- layer._stride.insert(Y_AXIS, 2);
+ layer._dilation = PropertyVector<unsigned>{{1, 1}};
+ layer._kernel = PropertyVector<unsigned>{{3, 3}};
+ layer._stride = PropertyVector<unsigned>{{2, 2}};
+ PropertyVector<unsigned> ref(2, 1);
+
+ auto pad = getPaddings(layer);
+
+ ASSERT_EQ(pad.begin, ref);
+ ASSERT_EQ(pad.end, ref);
+}
+
+TEST_F(LayersTests, canGetSamePadForDeConvolutionEvenInput) {
+ DeconvolutionLayer layer(getDefaultParamsForLayer());
+ layer.params["auto_pad"] = "same_upper";
+ TensorDesc tensorDesc(Precision::UNSPECIFIED, SizeVector{1, 144, 160, 160}, Layout::NCHW);
+ auto notEmptyData = std::make_shared<InferenceEngine::Data>("", tensorDesc);
+ layer.insData.push_back(notEmptyData);
+ layer._dilation = PropertyVector<unsigned>{{1, 1}};
+ layer._kernel = PropertyVector<unsigned>{{3, 3}};
+ layer._stride = PropertyVector<unsigned>{{2, 2}};
+
+ auto pad = getPaddings(layer);
+
+ ASSERT_EQ(pad.begin, PropertyVector<unsigned>(2, 0));
+ ASSERT_EQ(pad.end, PropertyVector<unsigned>(2, 1));
+}
+
+TEST_F(LayersTests, canGetSamePadForDeConvolutionOddInput) {
+ DeconvolutionLayer layer(getDefaultParamsForLayer());
+ layer.params["auto_pad"] = "same_upper";
+ TensorDesc tensorDesc(Precision::UNSPECIFIED, SizeVector{1, 144, 75, 75}, Layout::NCHW);
+ auto notEmptyData = std::make_shared<InferenceEngine::Data>("", tensorDesc);
+ layer.insData.push_back(notEmptyData);
+ layer._dilation = PropertyVector<unsigned>{{1, 1}};
+ layer._kernel = PropertyVector<unsigned>{{3, 3}};
+ layer._stride = PropertyVector<unsigned>{{2, 2}};
PropertyVector<unsigned> ref(2, 1);
- auto pad = getConvPaddings(layer);
+ auto pad = getPaddings(layer);
ASSERT_EQ(pad.begin, ref);
ASSERT_EQ(pad.end, ref);
}
+
+TEST_F(LayersTests, canGetSamePadForPoolingEvenInput) {
+ PoolingLayer layer(getDefaultParamsForLayer());
+ layer.params["auto_pad"] = "same_upper";
+ TensorDesc tensorDesc(Precision::UNSPECIFIED, SizeVector{1, 144, 160, 160}, Layout::NCHW);
+ auto notEmptyData = std::make_shared<InferenceEngine::Data>("", tensorDesc);
+ layer.insData.push_back(notEmptyData);
+ layer._kernel = PropertyVector<unsigned>{{3, 3}};
+ layer._stride = PropertyVector<unsigned>{{2, 2}};
+
+ auto pad = getPaddings(layer);
+
+ ASSERT_EQ(pad.begin, PropertyVector<unsigned>(2, 0));
+ ASSERT_EQ(pad.end, PropertyVector<unsigned>(2, 1));
+}
+
+TEST_F(LayersTests, canGetSamePadForPoolingOddInput) {
+ PoolingLayer layer(getDefaultParamsForLayer());
+ layer.params["auto_pad"] = "same_upper";
+ TensorDesc tensorDesc(Precision::UNSPECIFIED, SizeVector{1, 144, 75, 75}, Layout::NCHW);
+ auto notEmptyData = std::make_shared<InferenceEngine::Data>("", tensorDesc);
+ layer.insData.push_back(notEmptyData);
+ layer._kernel = PropertyVector<unsigned>{{3, 3}};
+ layer._stride = PropertyVector<unsigned>{{2, 2}};
+ PropertyVector<unsigned> ref(2, 1);
+
+ auto pad = getPaddings(layer);
+
+ ASSERT_EQ(pad.begin, ref);
+ ASSERT_EQ(pad.end, ref);
+}
+
+
+TEST_F(LayersTests, cannotGetPadForUnsupportedLayer) {
+ FullyConnectedLayer layer(getDefaultParamsForLayer());
+ ASSERT_ANY_THROW(getPaddingsImpl(layer));
+} \ No newline at end of file
diff --git a/inference-engine/tests/unit/inference_engine_tests/locked_memory_test.cpp b/inference-engine/tests/unit/inference_engine_tests/locked_memory_test.cpp
index 4ba4688a2..7a7ee5e7b 100644
--- a/inference-engine/tests/unit/inference_engine_tests/locked_memory_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/locked_memory_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/plugin_dispatcher_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/plugin_dispatcher_tests.cpp
index e60a80595..b54aa383e 100644
--- a/inference-engine/tests/unit/inference_engine_tests/plugin_dispatcher_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/plugin_dispatcher_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
@@ -14,16 +13,19 @@
using namespace InferenceEngine;
using namespace ::testing;
-class PluginDispatcherTests : public ::testing::Test {};
+class PluginDispatcherTests : public ::testing::Test {
+public:
+ const std::string nameExt(const std::string& name) { return name + IE_BUILD_POSTFIX;}
+};
TEST_F(PluginDispatcherTests, canLoadMockPlugin) {
PluginDispatcher dispatcher({ "", "./", "./lib" });
- ASSERT_NO_THROW(dispatcher.getPluginByName("mock_engine"));
+ ASSERT_NO_THROW(dispatcher.getPluginByName(nameExt("mock_engine")));
}
TEST_F(PluginDispatcherTests, throwsOnUnknownPlugin) {
PluginDispatcher dispatcher({ "./", "./lib" });
- ASSERT_THROW(dispatcher.getPluginByName("unknown_plugin"), InferenceEngine::details::InferenceEngineException);
+ ASSERT_THROW(dispatcher.getPluginByName(nameExt("unknown_plugin")), InferenceEngine::details::InferenceEngineException);
}
TEST_F(PluginDispatcherTests, throwsOnDeviceWithoutPlugins) {
@@ -42,12 +44,12 @@ TEST_F(PluginDispatcherTests, triesToLoadEveryPluginSuitableForDevice) {
ON_CALL(disp, getPluginByName(_)).WillByDefault(ThrowException());
#ifdef ENABLE_MKL_DNN
- EXPECT_CALL(disp, getPluginByName("MKLDNNPlugin")).Times(1);
+ EXPECT_CALL(disp, getPluginByName(nameExt("MKLDNNPlugin"))).Times(1);
#endif
#ifdef ENABLE_OPENVX_CVE
- EXPECT_CALL(disp, getPluginByName("OpenVXPluginCVE")).Times(1);
+ EXPECT_CALL(disp, getPluginByName(nameExt("OpenVXPluginCVE"))).Times(1);
#elif defined ENABLE_OPENVX
- EXPECT_CALL(disp, getPluginByName("OpenVXPlugin")).Times(1);
+ EXPECT_CALL(disp, getPluginByName(nameExt("OpenVXPlugin"))).Times(1);
#endif
ASSERT_THROW(disp.getSuitablePlugin(TargetDevice::eCPU), InferenceEngine::details::InferenceEngineException);
}
@@ -56,7 +58,7 @@ TEST_F(PluginDispatcherTests, triesToLoadEveryPluginSuitableForDevice) {
TEST_F(PluginDispatcherTests, returnsIfLoadSuccessfull) {
MockDispatcher disp({ "./", "./lib" });
PluginDispatcher dispatcher({ "", "./", "./lib" });
- auto ptr = dispatcher.getPluginByName("mock_engine");
+ auto ptr = dispatcher.getPluginByName(nameExt("mock_engine"));
EXPECT_CALL(disp, getPluginByName(_)).WillOnce(Return(ptr));
ASSERT_NO_THROW(disp.getSuitablePlugin(TargetDevice::eCPU));
diff --git a/inference-engine/tests/unit/inference_engine_tests/pointer_test.cpp b/inference-engine/tests/unit/inference_engine_tests/pointer_test.cpp
index c317c9cb7..78985fe6a 100644
--- a/inference-engine/tests/unit/inference_engine_tests/pointer_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/pointer_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/pre_allocator_test.cpp b/inference-engine/tests/unit/inference_engine_tests/pre_allocator_test.cpp
index ed03e138a..42e06a04d 100644
--- a/inference-engine/tests/unit/inference_engine_tests/pre_allocator_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/pre_allocator_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/precision_test.cpp b/inference-engine/tests/unit/inference_engine_tests/precision_test.cpp
index 84d69c0b7..a044b95ee 100644
--- a/inference-engine/tests/unit/inference_engine_tests/precision_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/precision_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/preprocess_test.cpp b/inference-engine/tests/unit/inference_engine_tests/preprocess_test.cpp
index d39aab4c4..70ef0dccc 100644
--- a/inference-engine/tests/unit/inference_engine_tests/preprocess_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/preprocess_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/range_iterator_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/range_iterator_tests.cpp
index 0368df7af..0a10c8cdf 100644
--- a/inference-engine/tests/unit/inference_engine_tests/range_iterator_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/range_iterator_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/response_buffer_test.cpp b/inference-engine/tests/unit/inference_engine_tests/response_buffer_test.cpp
index 6e25efe0d..4087637d8 100644
--- a/inference-engine/tests/unit/inference_engine_tests/response_buffer_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/response_buffer_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/shared_object_loader_test.cpp b/inference-engine/tests/unit/inference_engine_tests/shared_object_loader_test.cpp
index a70d3e315..cdea8de81 100644
--- a/inference-engine/tests/unit/inference_engine_tests/shared_object_loader_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/shared_object_loader_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/so_pointer_tests.cpp b/inference-engine/tests/unit/inference_engine_tests/so_pointer_tests.cpp
index 374f82784..ed0e35249 100644
--- a/inference-engine/tests/unit/inference_engine_tests/so_pointer_tests.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/so_pointer_tests.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
diff --git a/inference-engine/tests/unit/inference_engine_tests/tensor_desc_test.cpp b/inference-engine/tests/unit/inference_engine_tests/tensor_desc_test.cpp
index 04f30a815..16bd43ba3 100644
--- a/inference-engine/tests/unit/inference_engine_tests/tensor_desc_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/tensor_desc_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
@@ -46,7 +45,7 @@ TEST_F(TensorDescTests, CreateEmptyBlob) {
ASSERT_EQ(blob->getTensorDesc().getLayout(), Layout::NCHW);
}
-TEST_F(TensorDescTests, CreateBlockedBlob) {
+TEST_F(TensorDescTests, CreateBlockedBlobNCHW) {
TensorDesc desc(Precision::FP32, {1, 4, 2, 1}, {{1, 2, 2, 1, 2}, {0, 1, 2, 3, 1}});
float data[8] = {1, 2, 3, 4, 5, 6, 7, 8};
Blob::Ptr blockedBlob = make_shared_blob<float>(desc, data);
@@ -58,6 +57,18 @@ TEST_F(TensorDescTests, CreateBlockedBlob) {
ASSERT_EQ(Layout::BLOCKED, blockedBlob->layout());
}
+TEST_F(TensorDescTests, CreateBlockedBlobNCDHW) {
+ TensorDesc desc(Precision::FP32, {1, 4, 2, 2, 1}, {{1, 2, 2, 2, 1, 2}, {0, 1, 2, 3, 4, 1}});
+ float data[8] = {1, 2, 3, 4, 5, 6, 7, 8};
+ Blob::Ptr blockedBlob = make_shared_blob<float>(desc, data);
+ Blob::Ptr ncdhwBlob = make_shared_blob<float>({Precision::FP32, {1, 4, 2, 2, 1}, Layout::NCDHW}, data);
+ ASSERT_NE(blockedBlob->getTensorDesc().offset(6), ncdhwBlob->getTensorDesc().offset(6));
+ ASSERT_EQ(5, blockedBlob->getTensorDesc().offset(6));
+ ASSERT_EQ(6, ncdhwBlob->getTensorDesc().offset(6));
+ ASSERT_EQ(Layout::NCDHW, ncdhwBlob->layout());
+ ASSERT_EQ(Layout::BLOCKED, blockedBlob->layout());
+}
+
TEST_F(TensorDescTests, CompareNHWCandNCHWLayouts) {
TensorDesc descNCHW(Precision::FP32, {1, 3, 4, 2}, Layout::NCHW);
TensorDesc descNHWC(Precision::FP32, {1, 3, 4, 2}, Layout::NHWC);
@@ -70,3 +81,16 @@ TEST_F(TensorDescTests, CompareNHWCandNCHWLayouts) {
ASSERT_EQ(descNCHW.getBlockingDesc().getOrder(), nchw);
ASSERT_EQ(descNHWC.getBlockingDesc().getOrder(), nhwc);
}
+
+TEST_F(TensorDescTests, CompareNDHWCandNCDHWLayouts) {
+ TensorDesc descNCDHW(Precision::FP32, {1, 3, 4, 4, 2}, Layout::NCDHW);
+ TensorDesc descNDHWC(Precision::FP32, {1, 3, 4, 4, 2}, Layout::NDHWC);
+ SizeVector ncdhw = {0, 1, 2, 3, 4};
+ SizeVector ndhwc = {0, 2, 3, 4, 1};
+
+ ASSERT_NE(descNCDHW, descNDHWC);
+ ASSERT_NE(descNCDHW.getBlockingDesc(), descNDHWC.getBlockingDesc());
+ ASSERT_NE(descNCDHW.getBlockingDesc().getOrder(), descNDHWC.getBlockingDesc().getOrder());
+ ASSERT_EQ(descNCDHW.getBlockingDesc().getOrder(), ncdhw);
+ ASSERT_EQ(descNDHWC.getBlockingDesc().getOrder(), ndhwc);
+}
diff --git a/inference-engine/tests/unit/inference_engine_tests/util_test.cpp b/inference-engine/tests/unit/inference_engine_tests/util_test.cpp
index 8bd11a311..d62e0a1af 100644
--- a/inference-engine/tests/unit/inference_engine_tests/util_test.cpp
+++ b/inference-engine/tests/unit/inference_engine_tests/util_test.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//