summaryrefslogtreecommitdiff
path: root/src/inference_engine_tflite_private.h
blob: 2c388f7d82df42db8335304beb01e10c69c22024 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
/**
 * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef __INFERENCE_ENGINE_IMPL_TFLite_H__
#define __INFERENCE_ENGINE_IMPL_TFLite_H__

#include <inference_engine_common.h>

#include "tensorflow/contrib/lite/string.h"
#include "tensorflow/contrib/lite/kernels/register.h"
#include "tensorflow/contrib/lite/model.h"
#include "tensorflow/contrib/lite/context.h"


#include <memory>
#include <dlog.h>
/**
* @file inference_engine_tflite_private.h
* @brief This file contains the InferenceTFLite class which
*        provide Tensorflow-lite based inference functionality
*/

#ifdef LOG_TAG
#undef LOG_TAG
#endif

#define LOG_TAG "INFERENCE_ENGINE_TFLITE"

using namespace InferenceEngineInterface::Common;

namespace InferenceEngineImpl {
namespace TFLiteImpl {

class InferenceTFLite : public IInferenceEngineCommon {
public:
    InferenceTFLite(std::string protoFile,
                    std::string weightFile);

    ~InferenceTFLite();

    // InputTensor
    int SetInputTensorParam() override;

    int SetInputTensorParamNode(std::string node = "input") override;

    // Output Tensor Params
    int SetOutputTensorParam() override;

    int SetOutputTensorParamNodes(std::vector<std::string> nodes) override;

    int SetTargetDevice(inference_target_type_e type) override;

    // Load and Run
    int Load() override;

    int CreateInputLayerPassage() override;

    int GetInputLayerAttrType() override;

    void * GetInputDataPtr() override;

    int SetInputDataBuffer(tensor_t data) override;

    int Run() override;

    int Run(std::vector<float> tensor) override;

    int GetInferenceResult(tensor_t& results);

    
private:
    std::unique_ptr<tflite::Interpreter> mInterpreter;
    std::unique_ptr<tflite::FlatBufferModel> mFlatBuffModel;

    std::string mInputLayer;
    std::vector<std::string> mOutputLayer; /**< Output layer name */

    int mInputLayerId;
    std::vector<int> mOutputLayerId;

    TfLiteType mInputAttrType;

    void *mInputData;

    std::string mConfigFile;
    std::string mWeightFile;
};

} /* InferenceEngineImpl */
} /* TFLiteImpl */

#endif /* __INFERENCE_ENGINE_IMPL_TFLite_H__ */