summaryrefslogtreecommitdiff
path: root/tests/CaffeYolo-Armnn
diff options
context:
space:
mode:
authortelsoa01 <telmo.soares@arm.com>2018-03-09 14:13:49 +0000
committertelsoa01 <telmo.soares@arm.com>2018-03-09 14:13:49 +0000
commit4fcda0101ec3d110c1d6d7bee5c83416b645528a (patch)
treec9a70aeb2887006160c1b3d265c27efadb7bdbae /tests/CaffeYolo-Armnn
downloadarmnn-4fcda0101ec3d110c1d6d7bee5c83416b645528a.tar.gz
armnn-4fcda0101ec3d110c1d6d7bee5c83416b645528a.tar.bz2
armnn-4fcda0101ec3d110c1d6d7bee5c83416b645528a.zip
Release 18.02
Change-Id: Id3c11dc5ee94ef664374a988fcc6901e9a232fa6
Diffstat (limited to 'tests/CaffeYolo-Armnn')
-rw-r--r--tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp39
1 files changed, 39 insertions, 0 deletions
diff --git a/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp b/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp
new file mode 100644
index 000000000..af60be95e
--- /dev/null
+++ b/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp
@@ -0,0 +1,39 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// See LICENSE file in the project root for full license information.
+//
+#include "../YoloInferenceTest.hpp"
+#include "armnnCaffeParser/ICaffeParser.hpp"
+#include "armnn/TypesUtils.hpp"
+
+int main(int argc, char* argv[])
+{
+ armnn::TensorShape inputTensorShape{ { 1, 3, YoloImageHeight, YoloImageWidth } };
+
+ using YoloInferenceModel = InferenceModel<armnnCaffeParser::ICaffeParser,
+ float>;
+
+ return InferenceTestMain(argc, argv, { 0 },
+ [&inputTensorShape]()
+ {
+ return make_unique<YoloTestCaseProvider<YoloInferenceModel>>(
+ [&]
+ (typename YoloInferenceModel::CommandLineOptions modelOptions)
+ {
+ if (!ValidateDirectory(modelOptions.m_ModelDir))
+ {
+ return std::unique_ptr<YoloInferenceModel>();
+ }
+
+ typename YoloInferenceModel::Params modelParams;
+ modelParams.m_ModelPath = modelOptions.m_ModelDir + "yolov1_tiny_voc2007_model.caffemodel";
+ modelParams.m_InputBinding = "data";
+ modelParams.m_OutputBinding = "fc12";
+ modelParams.m_InputTensorShape = &inputTensorShape;
+ modelParams.m_IsModelBinary = true;
+ modelParams.m_ComputeDevice = modelOptions.m_ComputeDevice;
+
+ return std::make_unique<YoloInferenceModel>(modelParams);
+ });
+ });
+}