summaryrefslogtreecommitdiff
path: root/runtime/neurun/backend/srcn/kernel/InstanceNormLayer.cc
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/neurun/backend/srcn/kernel/InstanceNormLayer.cc')
-rw-r--r--runtime/neurun/backend/srcn/kernel/InstanceNormLayer.cc155
1 files changed, 0 insertions, 155 deletions
diff --git a/runtime/neurun/backend/srcn/kernel/InstanceNormLayer.cc b/runtime/neurun/backend/srcn/kernel/InstanceNormLayer.cc
deleted file mode 100644
index c83fe6d67..000000000
--- a/runtime/neurun/backend/srcn/kernel/InstanceNormLayer.cc
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "InstanceNormLayer.h"
-
-#include "OperationUtils.h"
-#include "ncnn/layer/instance_norm.h"
-
-namespace neurun
-{
-namespace backend
-{
-namespace srcn
-{
-namespace kernel
-{
-
-InstanceNormLayer::InstanceNormLayer()
- : _inputData(), _gammaData(), _betaData(), _outputData(), _inputDescr(), _gammaDescr(),
- _betaDescr(), _outputDescr(), _epsilon(1e-5), _activation(ir::Activation::NONE),
- _inputType(OperandType::FLOAT32), _backendLayout(ir::Layout::UNKNOWN)
-{
- // DO NOTHING
-}
-
-void InstanceNormLayer::instanceNormFloat32()
-{
- // Call kernel for NCHW data layout
- if (_backendLayout == ir::Layout::NCHW)
- {
- // Supports single batch only
- assert(_inputDescr.dimensions[0] == 1);
- const int input_channels = _inputDescr.dimensions[1];
- const int input_height = _inputDescr.dimensions[2];
- const int input_width = _inputDescr.dimensions[3];
- nnfw::ncnn::Mat in_mat(input_width, input_height, input_channels, _inputData.f);
-
- const int gamma_channels = _gammaDescr.dimensions[0];
- nnfw::ncnn::Mat gamma_mat(gamma_channels, _gammaData.f);
-
- const int beta_channels = _betaDescr.dimensions[0];
- nnfw::ncnn::Mat beta_mat(beta_channels, _betaData.f);
-
- assert(_outputDescr.dimensions[0] == 1);
- const int output_channels = _outputDescr.dimensions[1];
- const int output_height = _outputDescr.dimensions[2];
- const int output_width = _outputDescr.dimensions[3];
- nnfw::ncnn::Mat out_mat(output_width, output_height, output_channels, _outputData.f);
-
- if (_activation == ir::Activation::NONE)
- {
- nnfw::ncnn::ncnn_instance_norm_rowmajor(in_mat, out_mat, gamma_mat, beta_mat, input_channels,
- _epsilon);
- }
- else if (_activation == ir::Activation::RELU)
- {
- nnfw::ncnn::ncnn_instance_norm_with_relu_rowmajor(in_mat, out_mat, gamma_mat, beta_mat,
- input_channels, _epsilon, 0.f);
- }
- else
- {
- std::runtime_error("Unsupported activation type");
- }
- }
- // Call kernel for NHWC data layout
- else if (_backendLayout == ir::Layout::NHWC)
- {
- // Supports single batch only
- assert(_inputDescr.dimensions[0] == 1);
- const int input_height = _inputDescr.dimensions[1];
- const int input_width = _inputDescr.dimensions[2];
- const int input_channels = _inputDescr.dimensions[3];
- nnfw::ncnn::Mat in_mat(input_channels, input_width, input_height, _inputData.f);
-
- const int gamma_channels = _gammaDescr.dimensions[0];
- nnfw::ncnn::Mat gamma_mat(gamma_channels, _gammaData.f);
-
- const int beta_channels = _betaDescr.dimensions[0];
- nnfw::ncnn::Mat beta_mat(beta_channels, _betaData.f);
-
- assert(_outputDescr.dimensions[0] == 1);
- const int output_height = _outputDescr.dimensions[1];
- const int output_width = _outputDescr.dimensions[2];
- const int output_channels = _outputDescr.dimensions[3];
- nnfw::ncnn::Mat out_mat(output_channels, output_width, output_height, _outputData.f);
-
- if (_activation == ir::Activation::NONE)
- {
- nnfw::ncnn::ncnn_instance_norm_colmajor(in_mat, out_mat, gamma_mat, beta_mat, input_channels,
- _epsilon);
- }
- else if (_activation == ir::Activation::RELU)
- {
- nnfw::ncnn::ncnn_instance_norm_with_relu_colmajor(in_mat, out_mat, gamma_mat, beta_mat,
- input_channels, _epsilon, 0.f);
- }
- {
- std::runtime_error("Unsupported activation type");
- }
- }
- else
- {
- std::runtime_error("Unsupported backend layout");
- }
-}
-
-void InstanceNormLayer::configure(uint8_t *inputData, const TensorDescriptor inputDescr,
- uint8_t *gammaData, const TensorDescriptor gammaDescr,
- uint8_t *betaData, const TensorDescriptor betaDescr,
- uint8_t *outputData, const TensorDescriptor outputDescr,
- float epsilon, ir::Activation activation,
- ir::Layout backendLayout)
-{
- _inputData.u8 = inputData;
- _inputDescr = inputDescr;
- _gammaData.u8 = gammaData;
- _gammaDescr = gammaDescr;
- _betaData.u8 = betaData;
- _betaDescr = betaDescr;
- _outputData.u8 = outputData;
- _outputDescr = outputDescr;
- _epsilon = epsilon;
- _activation = activation;
- _backendLayout = backendLayout;
-}
-
-void InstanceNormLayer::run()
-{
- if (_inputType == OperandType::FLOAT32)
- {
- instanceNormFloat32();
- }
- else if (_inputType == OperandType::QUANT8_ASYMM)
- {
- throw std::runtime_error("NYI");
- }
-}
-
-} // namespace kernel
-} // namespace srcn
-} // namespace backend
-} // namespace neurun