summaryrefslogtreecommitdiff
path: root/inference-engine/samples/hello_classification/main.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'inference-engine/samples/hello_classification/main.cpp')
-rw-r--r--inference-engine/samples/hello_classification/main.cpp30
1 files changed, 22 insertions, 8 deletions
diff --git a/inference-engine/samples/hello_classification/main.cpp b/inference-engine/samples/hello_classification/main.cpp
index abc010854..d9482e19b 100644
--- a/inference-engine/samples/hello_classification/main.cpp
+++ b/inference-engine/samples/hello_classification/main.cpp
@@ -1,5 +1,4 @@
// Copyright (C) 2018 Intel Corporation
-//
// SPDX-License-Identifier: Apache-2.0
//
@@ -9,32 +8,47 @@
#include <string>
#include <cstdlib>
+#ifdef UNICODE
+#include <tchar.h>
+#endif
+
#include <opencv2/opencv.hpp>
#include <inference_engine.hpp>
using namespace InferenceEngine;
+#ifndef UNICODE
+#define tcout std::cout
+#define _T(STR) STR
+#else
+#define tcout std::wcout
+#endif
+
+#ifndef UNICODE
int main(int argc, char *argv[]) {
+#else
+int wmain(int argc, wchar_t *argv[]) {
+#endif
try {
// ------------------------------ Parsing and validation of input args ---------------------------------
if (argc != 3) {
- std::cout << "Usage : ./hello_classification <path_to_model> <path_to_image>" << std::endl;
+ tcout << _T("Usage : ./hello_classification <path_to_model> <path_to_image>") << std::endl;
return EXIT_FAILURE;
}
- const std::string input_model{argv[1]};
- const std::string input_image_path{argv[2]};
+ const file_name_t input_model{argv[1]};
+ const file_name_t input_image_path{argv[2]};
// -----------------------------------------------------------------------------------------------------
// --------------------------- 1. Load Plugin for inference engine -------------------------------------
- PluginDispatcher dispatcher({"../../../lib/intel64", ""});
+ PluginDispatcher dispatcher({_T("../../../lib/intel64"), _T("")});
InferencePlugin plugin(dispatcher.getSuitablePlugin(TargetDevice::eCPU));
// -----------------------------------------------------------------------------------------------------
// --------------------------- 2. Read IR Generated by ModelOptimizer (.xml and .bin files) ------------
CNNNetReader network_reader;
- network_reader.ReadNetwork(input_model);
- network_reader.ReadWeights(input_model.substr(0, input_model.size() - 4) + ".bin");
+ network_reader.ReadNetwork(fileNameToString(input_model));
+ network_reader.ReadWeights(fileNameToString(input_model).substr(0, input_model.size() - 4) + ".bin");
network_reader.getNetwork().setBatchSize(1);
CNNNetwork network = network_reader.getNetwork();
// -----------------------------------------------------------------------------------------------------
@@ -64,7 +78,7 @@ int main(int argc, char *argv[]) {
// --------------------------- 6. Prepare input --------------------------------------------------------
- cv::Mat image = cv::imread(input_image_path);
+ cv::Mat image = cv::imread(fileNameToString(input_image_path));
/* Resize manually and copy data from the image to the input blob */
Blob::Ptr input = infer_request.GetBlob(input_name);