summaryrefslogtreecommitdiff
path: root/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.cc
diff options
context:
space:
mode:
Diffstat (limited to 'runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.cc')
-rw-r--r--runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.cc257
1 files changed, 257 insertions, 0 deletions
diff --git a/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.cc b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.cc
new file mode 100644
index 000000000..e3bb29161
--- /dev/null
+++ b/runtimes/neurun/frontend/nnapi/wrapper/ANeuralNetworksModel.cc
@@ -0,0 +1,257 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ANeuralNetworksModel.h"
+#include "OperationFactory.h"
+#include "NNAPIConvert.h"
+
+#include "model/Operations.Include.h"
+#include "util/logging.h"
+
+#include "cpp14/memory.h"
+
+//
+// ANeuralNetworksModel
+//
+ANeuralNetworksModel::ANeuralNetworksModel() noexcept
+ : _model{new neurun::model::Model}, _graph{nullptr}, _optional_operands{}, _operand_usages{}
+{
+ // DO NOTHING
+}
+
+bool ANeuralNetworksModel::addOperand(const ANeuralNetworksOperandType *type) noexcept
+{
+ try
+ {
+ const auto shape = NNAPIConvert::getShape(type);
+ const auto typeInfo = NNAPIConvert::getTypeInfo(type);
+ _model->operands.emplace(shape, typeInfo);
+ _operand_usages.emplace_back(OperandUsage::NOT_DEFINED);
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+bool ANeuralNetworksModel::setOperandValue(uint32_t index, const void *buffer, size_t length,
+ bool optional, bool copy) noexcept
+{
+ const neurun::model::OperandIndex ind{index};
+
+ try
+ {
+ _operand_usages[index] = OperandUsage::CONSTANT;
+
+ // Remain operands.at(ind).data()->base() as nullptr for optional operand
+ // This will be filled when model finished
+ if (optional)
+ {
+ setOptionalOperand(ind);
+ }
+
+ using ::neurun::model::CachedData;
+ using ::neurun::model::ExternalData;
+ if (copy)
+ {
+ _model->operands.at(ind).data(
+ nnfw::cpp14::make_unique<CachedData>(reinterpret_cast<const uint8_t *>(buffer), length));
+ }
+ else
+ {
+ _model->operands.at(ind).data(nnfw::cpp14::make_unique<ExternalData>(
+ reinterpret_cast<const uint8_t *>(buffer), length));
+ }
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+bool ANeuralNetworksModel::addOperation(ANeuralNetworksOperationType type, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs) noexcept
+{
+ try
+ {
+ for (uint32_t i = 0; i < outputCount; i++)
+ {
+ _operand_usages[outputs[i]] = OperandUsage::OPERATION_OUTPUT;
+ }
+
+ auto &factory = OperationFactory::instance();
+ OperationFactory::Param param{inputCount, inputs, outputCount, outputs};
+
+ auto node = factory.create(type, param, _model->operands);
+ _model->operations.push(std::unique_ptr<neurun::model::Operation>{node});
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+bool ANeuralNetworksModel::addOperationEx(ANeuralNetworksOperationTypeEx type, uint32_t inputCount,
+ const uint32_t *inputs, uint32_t outputCount,
+ const uint32_t *outputs) noexcept
+{
+ try
+ {
+ for (uint32_t i = 0; i < outputCount; i++)
+ {
+ _operand_usages[outputs[i]] = OperandUsage::OPERATION_OUTPUT;
+ }
+
+ auto &factory = OperationFactory::instance();
+ OperationFactory::Param param{inputCount, inputs, outputCount, outputs};
+
+ auto node = factory.create(type, param, _model->operands);
+ _model->operations.push(std::unique_ptr<neurun::model::Operation>{node});
+ }
+ catch (const std::exception &e)
+ {
+ return false;
+ }
+ return true;
+}
+
+bool ANeuralNetworksModel::addModelInput(uint32_t index) noexcept
+{
+ try
+ {
+ _operand_usages[index] = OperandUsage::MODEL_INPUT;
+
+ const neurun::model::OperandIndex ind{index};
+ _model->inputs.append(ind);
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+bool ANeuralNetworksModel::addModelOutput(uint32_t index) noexcept
+{
+ try
+ {
+ const neurun::model::OperandIndex ind{index};
+
+ // Duplicated output is not allowed
+ if (_model->outputs.contains(ind))
+ {
+ return false;
+ }
+
+ _model->outputs.append(ind);
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << std::endl;
+
+ return false;
+ }
+
+ return true;
+}
+
+bool ANeuralNetworksModel::finish() noexcept
+{
+ try
+ {
+ fillOptionalOperand();
+
+ _graph = std::make_shared<neurun::graph::Graph>(std::move(_model));
+
+ _graph->finishBuilding();
+
+ _operand_usages.clear();
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << '\n';
+
+ return false;
+ }
+
+ return true;
+}
+
+bool ANeuralNetworksModel::isFinished() noexcept { return _graph != nullptr; }
+
+bool ANeuralNetworksModel::isExistOperand(uint32_t index) noexcept
+{
+ return _model->operands.exist(neurun::model::OperandIndex{index});
+}
+
+size_t ANeuralNetworksModel::operandSize(uint32_t index) noexcept
+{
+ try
+ {
+ return _model->operands.at(neurun::model::OperandIndex{index}).operandSize();
+ }
+ catch (const std::exception &e)
+ {
+ VERBOSE(EXCEPTION) << e.what() << '\n';
+
+ return 0;
+ }
+}
+
+bool ANeuralNetworksModel::isUsageSet(uint32_t index) noexcept
+{
+ return (_operand_usages[index] != OperandUsage::NOT_DEFINED);
+}
+
+bool ANeuralNetworksModel::isOperationOutput(uint32_t index) noexcept
+{
+ return (_operand_usages[index] == OperandUsage::OPERATION_OUTPUT);
+}
+
+void ANeuralNetworksModel::setOptionalOperand(const neurun::model::OperandIndex idx)
+{
+ _optional_operands.insert(idx);
+}
+
+void ANeuralNetworksModel::fillOptionalOperand(void)
+{
+ _model->operations.iterate(
+ [&](const ::neurun::model::OperationIndex &, ::neurun::model::Operation &node) {
+ for (auto input : node.getInputs())
+ {
+ // TODO fill default value for optional operands
+ if (_optional_operands.find(input) != _optional_operands.end())
+ {
+ throw std::runtime_error{"Optional operand is not supported yet"};
+ }
+ }
+ });
+}