summaryrefslogtreecommitdiff
path: root/runtimes/libs/tflite/include/tflite/ext/kernels/TensorFlowMax.h
blob: d573308edaac9d86d3602b3387daee87841a21c8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
/*
 * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

/**
 * @file     TensorFlowMax.h
 * @brief    This file contains TensorFlowMax namespace and TensorFlowMax function definitions
 * @ingroup  COM_AI_RUNTIME
 */

#ifndef __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_MAX_H__
#define __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_MAX_H__

#include "tensorflow/lite/context.h"

namespace nnfw
{
namespace tflite
{
namespace custom
{
namespace TensorFlowMax
{

/**
 * @brief Initialize TensorFlowMax operand using the contents of buffer
 * @param[in] context The TfLite context
 * @param[in] buffer The buffer with contents
 * @param[in] length The buffer length
 * @return The void pointer for user data
 */
void *InitTensorFlowMax(TfLiteContext *context, const char *buffer, size_t length);

/**
 * @brief Release any memory it might have allocated via 'InitTensorFlowMax'
 * @param[in] context The TfLite context
 * @param[in] buffer The buffer with contents
 * @return N/A
 */
void FreeTensorFlowMax(TfLiteContext *context, void *buffer);

/**
 * @brief Prepare the TensorFlowMax operand for execution
 * @param[in] context The TfLite context
 * @param[in] node The operand node
 * @return The TfLite status
 */
TfLiteStatus PrepareTensorFlowMax(TfLiteContext *context, TfLiteNode *node);

/**
 * @brief Evaluation the TensorFlowMax operand for execution
 * @param[in] context The TfLite context
 * @param[in] node The operand node
 * @return The TfLite status
 */
TfLiteStatus EvalTensorFlowMax(TfLiteContext *context, TfLiteNode *node);

} // namespace TensorFlowMax
} // namespace custom
} // namespace tflite
} // namespace nnfw

#endif // __NNFW_TFLITE_EXT_KERNELS_TENSORFLOW_MAX_H__