summaryrefslogtreecommitdiff
path: root/infra
diff options
context:
space:
mode:
authorChunseok Lee <chunseok.lee@samsung.com>2020-12-14 14:43:04 +0900
committerChunseok Lee <chunseok.lee@samsung.com>2020-12-14 14:43:04 +0900
commit12d88feea8573f8490629cf62fc342b152e57d65 (patch)
tree3c734cc4d629834d2d523f4575ef84cd64684e57 /infra
parentd6b371e095d737922187a518b8faba1ef6f3a2b1 (diff)
downloadnnfw-12d88feea8573f8490629cf62fc342b152e57d65.tar.gz
nnfw-12d88feea8573f8490629cf62fc342b152e57d65.tar.bz2
nnfw-12d88feea8573f8490629cf62fc342b152e57d65.zip
Imported Upstream version 1.11.0upstream/1.11.0
Diffstat (limited to 'infra')
-rw-r--r--infra/3rdparty/.gitignore (renamed from infra/nncc/3rdparty/.gitignore)0
-rw-r--r--infra/3rdparty/Eigen/fd6845384b86/URL.default1
-rw-r--r--infra/3rdparty/Eigen/fd6845384b86/URL.info (renamed from infra/nncc/3rdparty/Eigen/fd6845384b86/URL.info)0
-rw-r--r--infra/cmake/modules/AddSubdirectories.cmake (renamed from infra/nncc/cmake/modules/add_subdirectories.cmake)0
-rw-r--r--infra/cmake/modules/Asserts.cmake (renamed from infra/nncc/cmake/modules/Asserts.cmake)0
-rw-r--r--infra/cmake/modules/ExternalBuildTools.cmake84
-rw-r--r--infra/cmake/modules/ExternalProjectTools.cmake (renamed from infra/nnfw/cmake/modules/ExternalProjectTools.cmake)2
-rw-r--r--infra/cmake/modules/ExternalSourceTools.cmake (renamed from infra/nncc/cmake/modules/ExternalSourceTools.cmake)32
-rw-r--r--infra/cmake/modules/IdentifyPlatform.cmake (renamed from infra/nnfw/cmake/modules/IdentifyPlatform.cmake)27
-rw-r--r--infra/cmake/modules/ListFile.cmake (renamed from infra/nncc/cmake/modules/ListFile.cmake)0
-rw-r--r--infra/cmake/modules/OptionTools.cmake (renamed from infra/nncc/cmake/modules/OptionTools.cmake)0
-rw-r--r--infra/cmake/modules/OptionalTargetTools.cmake (renamed from infra/nncc/cmake/modules/OptionalTargetTools.cmake)0
-rw-r--r--infra/cmake/modules/StampTools.cmake (renamed from infra/nncc/cmake/modules/StampTools.cmake)0
-rw-r--r--infra/cmake/modules/TargetRequire.cmake (renamed from infra/nncc/cmake/modules/TargetRequire.cmake)0
-rw-r--r--infra/cmake/modules/ThirdPartyTools.cmake (renamed from infra/nncc/cmake/modules/ThirdPartyTools.cmake)2
-rw-r--r--infra/cmake/packages/ARMComputeSourceConfig.cmake18
-rw-r--r--infra/cmake/packages/AbseilConfig.cmake (renamed from infra/nncc/cmake/packages/AbseilConfig.cmake)10
-rw-r--r--infra/cmake/packages/AbseilSourceConfig.cmake (renamed from infra/nncc/cmake/packages/AbseilSourceConfig.cmake)14
-rw-r--r--infra/cmake/packages/BoostConfig.cmake88
-rw-r--r--infra/cmake/packages/BoostSourceConfig.cmake19
-rw-r--r--infra/cmake/packages/Caffe/CMakeLists.txt (renamed from infra/nncc/cmake/packages/Caffe/CMakeLists.txt)0
-rw-r--r--infra/cmake/packages/CaffeConfig.cmake (renamed from infra/nncc/cmake/packages/CaffeConfig.cmake)12
-rw-r--r--infra/cmake/packages/CaffeProto/CMakeLists.txt (renamed from infra/nncc/cmake/packages/CaffeProto/CMakeLists.txt)0
-rw-r--r--infra/cmake/packages/CaffeProtoConfig.cmake (renamed from infra/nncc/cmake/packages/CaffeProtoConfig.cmake)6
-rw-r--r--infra/cmake/packages/CaffeSourceConfig.cmake (renamed from infra/nncc/cmake/packages/CaffeSourceConfig.cmake)4
-rw-r--r--infra/cmake/packages/CpuInfoSourceConfig.cmake21
-rw-r--r--infra/cmake/packages/Eigen-fd6845384b86Config.cmake25
-rw-r--r--infra/cmake/packages/EigenConfig.cmake (renamed from infra/nncc/cmake/packages/EigenConfig.cmake)4
-rw-r--r--infra/cmake/packages/EigenSource-fd6845384b86Config.cmake (renamed from infra/nncc/cmake/packages/EigenSource-fd6845384b86Config.cmake)5
-rw-r--r--infra/cmake/packages/EigenSourceConfig.cmake24
-rw-r--r--infra/cmake/packages/Farmhash/CMakeLists.txt (renamed from infra/nncc/cmake/packages/Farmhash/CMakeLists.txt)0
-rw-r--r--infra/cmake/packages/FarmhashConfig.cmake (renamed from infra/nncc/cmake/packages/FarmhashConfig.cmake)4
-rw-r--r--infra/cmake/packages/FarmhashSourceConfig.cmake21
-rw-r--r--infra/cmake/packages/FlatBuffersConfig.cmake (renamed from infra/nncc/cmake/packages/FlatBuffersConfig.cmake)61
-rw-r--r--infra/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfig.cmake (renamed from infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfig.cmake)11
-rw-r--r--infra/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfigVersion.cmake (renamed from infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfigVersion.cmake)0
-rw-r--r--infra/cmake/packages/FlatBuffersSource-1.12/FlatBuffersSourceConfig.cmake (renamed from infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfig.cmake)13
-rw-r--r--infra/cmake/packages/FlatBuffersSource-1.12/FlatBuffersSourceConfigVersion.cmake (renamed from infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfigVersion.cmake)0
-rw-r--r--infra/cmake/packages/FlatBuffersSourceConfig.cmake (renamed from infra/nncc/cmake/packages/FlatBuffersSourceConfig.cmake)11
-rw-r--r--infra/cmake/packages/FlatBuffersSourceConfigVersion.cmake (renamed from infra/nncc/cmake/packages/FlatBuffersSourceConfigVersion.cmake)0
-rw-r--r--infra/cmake/packages/GEMMLowpConfig.cmake (renamed from infra/nncc/cmake/packages/GEMMLowpConfig.cmake)2
-rw-r--r--infra/cmake/packages/GEMMLowpSourceConfig.cmake (renamed from infra/nncc/cmake/packages/GEMMLowpSourceConfig.cmake)9
-rw-r--r--infra/cmake/packages/GFlagsConfig.cmake (renamed from infra/nncc/cmake/packages/GFlagsConfig.cmake)4
-rw-r--r--infra/cmake/packages/GFlagsSourceConfig.cmake (renamed from infra/nncc/cmake/packages/GFlagsSourceConfig.cmake)4
-rw-r--r--infra/cmake/packages/GLogConfig.cmake (renamed from infra/nncc/cmake/packages/GLogConfig.cmake)0
-rw-r--r--infra/cmake/packages/GTestConfig.cmake52
-rw-r--r--infra/cmake/packages/GTestSourceConfig.cmake (renamed from infra/nncc/cmake/packages/GTestSourceConfig.cmake)4
-rw-r--r--infra/cmake/packages/HDF5Config.cmake71
-rw-r--r--infra/cmake/packages/HDF5SourceConfig.cmake18
-rw-r--r--infra/cmake/packages/LLVMConfig.cmake (renamed from infra/nncc/cmake/packages/LLVMConfig.cmake)0
-rw-r--r--infra/cmake/packages/NEON2SSEConfig.cmake (renamed from infra/nncc/cmake/packages/NEON2SSEConfig.cmake)2
-rw-r--r--infra/cmake/packages/NEON2SSESourceConfig.cmake (renamed from infra/nncc/cmake/packages/NEON2SSESourceConfig.cmake)11
-rw-r--r--infra/cmake/packages/Nonius/html_report_template.g.h++ (renamed from infra/nnfw/cmake/packages/Nonius/html_report_template.g.h++)0
-rw-r--r--infra/cmake/packages/NoniusSourceConfig.cmake26
-rw-r--r--infra/cmake/packages/ONNXRuntimeConfig.cmake (renamed from infra/nncc/cmake/packages/ONNXRuntimeConfig.cmake)0
-rw-r--r--infra/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfig.cmake (renamed from infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfig.cmake)4
-rw-r--r--infra/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfigVersion.cmake (renamed from infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfigVersion.cmake)0
-rw-r--r--infra/cmake/packages/ONNXSource-1.6.0/ONNXSourceConfig.cmake20
-rw-r--r--infra/cmake/packages/ONNXSource-1.6.0/ONNXSourceConfigVersion.cmake (renamed from infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfigVersion.cmake)2
-rw-r--r--infra/cmake/packages/ProtobufConfig.cmake (renamed from infra/nncc/cmake/packages/ProtobufConfig.cmake)63
-rw-r--r--infra/cmake/packages/ProtobufSourceConfig.cmake (renamed from infra/nncc/cmake/packages/ProtobufSourceConfig.cmake)4
-rw-r--r--infra/cmake/packages/Pybind11Config.cmake22
-rw-r--r--infra/cmake/packages/Pybind11SourceConfig.cmake18
-rw-r--r--infra/cmake/packages/PytorchSourceConfig.cmake (renamed from infra/nncc/cmake/packages/PytorchSourceConfig.cmake)6
-rw-r--r--infra/cmake/packages/RuySourceConfig.cmake21
-rw-r--r--infra/cmake/packages/TensorFlow-1.13/TensorFlowConfig.cmake56
-rw-r--r--infra/cmake/packages/TensorFlow-1.13/TensorFlowConfigVersion.cmake (renamed from infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfigVersion.cmake)2
-rw-r--r--infra/cmake/packages/TensorFlow-1.13/TensorFlowVersionChecker.c25
-rw-r--r--infra/cmake/packages/TensorFlowConfig.cmake (renamed from infra/nncc/cmake/packages/TensorFlowConfig.cmake)0
-rw-r--r--infra/cmake/packages/TensorFlowEigenSource-2.1.0/TensorFlowEigenSourceConfig.cmake20
-rw-r--r--infra/cmake/packages/TensorFlowEigenSource-2.1.0/TensorFlowEigenSourceConfigVersion.cmake10
-rw-r--r--infra/cmake/packages/TensorFlowEigenSource-2.3.0/TensorFlowEigenSourceConfig.cmake21
-rw-r--r--infra/cmake/packages/TensorFlowEigenSource-2.3.0/TensorFlowEigenSourceConfigVersion.cmake10
-rw-r--r--infra/cmake/packages/TensorFlowGEMMLowpSource-2.1.0/TensorFlowGEMMLowpSourceConfig.cmake20
-rw-r--r--infra/cmake/packages/TensorFlowGEMMLowpSource-2.1.0/TensorFlowGEMMLowpSourceConfigVersion.cmake10
-rw-r--r--infra/cmake/packages/TensorFlowGEMMLowpSource-2.3.0/TensorFlowGEMMLowpSourceConfig.cmake20
-rw-r--r--infra/cmake/packages/TensorFlowGEMMLowpSource-2.3.0/TensorFlowGEMMLowpSourceConfigVersion.cmake10
-rw-r--r--infra/cmake/packages/TensorFlowLite-1.13.1/Lite/CMakeLists.txt (renamed from infra/nncc/cmake/packages/TensorFlowLite-1.12/Lite/CMakeLists.txt)25
-rw-r--r--infra/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLiteConfig.cmake (renamed from infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfig.cmake)25
-rw-r--r--infra/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLiteConfigVersion.cmake (renamed from infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfigVersion.cmake)2
-rw-r--r--infra/cmake/packages/TensorFlowRuySource-2.3.0/TensorFlowRuySourceConfig.cmake20
-rw-r--r--infra/cmake/packages/TensorFlowRuySource-2.3.0/TensorFlowRuySourceConfigVersion.cmake10
-rw-r--r--infra/cmake/packages/TensorFlowSource-1.13.1/TensorFlowSourceConfig.cmake19
-rw-r--r--infra/cmake/packages/TensorFlowSource-1.13.1/TensorFlowSourceConfigVersion.cmake10
-rw-r--r--infra/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfig.cmake (renamed from infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfig.cmake)4
-rw-r--r--infra/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfigVersion.cmake (renamed from infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfigVersion.cmake)0
-rw-r--r--infra/cmake/packages/TensorFlowSource-2.1.0/TensorFlowSourceConfig.cmake (renamed from infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfig.cmake)8
-rw-r--r--infra/cmake/packages/TensorFlowSource-2.1.0/TensorFlowSourceConfigVersion.cmake (renamed from infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfigVersion.cmake)2
-rw-r--r--infra/cmake/packages/TensorFlowSource-2.2.0/TensorFlowSourceConfig.cmake (renamed from infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfig.cmake)8
-rw-r--r--infra/cmake/packages/TensorFlowSource-2.2.0/TensorFlowSourceConfigVersion.cmake10
-rw-r--r--infra/cmake/packages/TensorFlowSource-2.3.0-rc0Config.cmake21
-rw-r--r--infra/cmake/packages/TensorFlowSource-2.3.0/TensorFlowSourceConfig.cmake18
-rw-r--r--infra/cmake/packages/TensorFlowSource-2.3.0/TensorFlowSourceConfigVersion.cmake10
-rw-r--r--infra/cmake/packages/TensorFlowVersionChecker.c25
-rw-r--r--infra/command/build-docker-image43
-rw-r--r--infra/command/create-package3
-rw-r--r--infra/command/docker-run19
-rw-r--r--infra/command/doxygen18
-rw-r--r--infra/command/format120
-rw-r--r--infra/command/gen-coverage-report7
-rw-r--r--infra/command/install-githooks68
-rw-r--r--infra/command/pylint9
-rw-r--r--infra/command/verify-package3
-rw-r--r--infra/config/docker.configuration52
-rw-r--r--infra/docker/bionic/Dockerfile115
-rw-r--r--infra/docker/focal/Dockerfile47
-rw-r--r--infra/docker/xenial/Dockerfile (renamed from infra/docker/Dockerfile)9
-rw-r--r--infra/doxygen/Doxyfile (renamed from infra/nnfw/doxygen/Doxyfile)17
-rwxr-xr-xinfra/git-hooks/pre-commit.sh32
-rwxr-xr-xinfra/git-hooks/pre-push.sh2
-rw-r--r--infra/nncc/3rdparty/Eigen/fd6845384b86/URL.default1
-rw-r--r--infra/nncc/CMakeLists.txt58
-rw-r--r--infra/nncc/cmake/modules/ExternalProjectTools.cmake3
-rw-r--r--infra/nncc/cmake/packages/EigenSourceConfig.cmake19
-rw-r--r--infra/nncc/cmake/packages/FarmhashSourceConfig.cmake19
-rw-r--r--infra/nncc/cmake/packages/GTestConfig.cmake86
-rw-r--r--infra/nncc/cmake/packages/GoogleDoubleConversionConfig.cmake52
-rw-r--r--infra/nncc/cmake/packages/GoogleNSyncConfig.cmake62
-rw-r--r--infra/nncc/cmake/packages/ONNXProto/CMakeLists.txt6
-rw-r--r--infra/nncc/cmake/packages/ONNXProtoConfig.cmake25
-rw-r--r--infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfig.cmake20
-rw-r--r--infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfig.cmake20
-rw-r--r--infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfigVersion.cmake10
-rw-r--r--infra/nncc/cmake/packages/TensorFlowLite-1.7/Lite/CMakeLists.txt38
-rw-r--r--infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfig.cmake55
-rw-r--r--infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfig.cmake104
-rw-r--r--infra/nncc/cmake/packages/TensorFlowProtoText-1.12/build/CMakeLists.txt78
-rwxr-xr-xinfra/nncc/cmake/packages/TensorFlowProtoText-1.12/make_directories.sh6
-rw-r--r--infra/nncc/cmake/packages/TensorFlowVersionChecker.c9
-rw-r--r--infra/nncc/command/utcount45
-rw-r--r--infra/nncc/config/docker.configuration2
-rw-r--r--infra/nnfw/CMakeLists.txt92
-rw-r--r--infra/nnfw/cmake/ApplyCompileFlags.cmake6
-rw-r--r--infra/nnfw/cmake/CfgOptionFlags.cmake60
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_aarch64-android.cmake (renamed from infra/nnfw/cmake/buildtool/config/config_arm64-android.cmake)2
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_aarch64-tizen.cmake4
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_armv7l-linux.cmake3
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_armv7l-tizen.cmake6
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_linux.cmake19
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_x86_64-darwin.cmake12
-rw-r--r--infra/nnfw/cmake/buildtool/config/config_x86_64-tizen.cmake17
-rw-r--r--infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-android.cmake38
-rw-r--r--infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-linux.cmake6
-rw-r--r--infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-tizen.cmake10
-rw-r--r--infra/nnfw/cmake/buildtool/cross/toolchain_arm64-android.cmake44
-rw-r--r--infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-linux.cmake7
-rw-r--r--infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-tizen.cmake6
-rw-r--r--infra/nnfw/cmake/modules/ExtendCMakeFunction.cmake27
-rw-r--r--infra/nnfw/cmake/modules/ExternalSourceTools.cmake102
-rw-r--r--infra/nnfw/cmake/modules/OptionTools.cmake11
-rw-r--r--infra/nnfw/cmake/options/options_aarch64-android.cmake17
-rw-r--r--infra/nnfw/cmake/options/options_aarch64-linux.cmake4
-rw-r--r--infra/nnfw/cmake/options/options_aarch64-tizen.cmake12
-rw-r--r--infra/nnfw/cmake/options/options_arm64-android.cmake7
-rw-r--r--infra/nnfw/cmake/options/options_armv7l-linux.cmake5
-rw-r--r--infra/nnfw/cmake/options/options_armv7l-tizen.cmake14
-rw-r--r--infra/nnfw/cmake/options/options_x86_64-darwin.cmake5
-rw-r--r--infra/nnfw/cmake/options/options_x86_64-linux.cmake2
-rw-r--r--infra/nnfw/cmake/options/options_x86_64-tizen.cmake10
-rw-r--r--infra/nnfw/cmake/packages/ARMCompute/SConstruct309
-rw-r--r--infra/nnfw/cmake/packages/ARMComputeConfig.cmake142
-rw-r--r--infra/nnfw/cmake/packages/ARMComputeSourceConfig.cmake13
-rw-r--r--infra/nnfw/cmake/packages/AbslSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/BoostConfig.cmake71
-rw-r--r--infra/nnfw/cmake/packages/BoostSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/CpuInfoConfig.cmake31
-rw-r--r--infra/nnfw/cmake/packages/CpuinfoSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/EigenConfig.cmake10
-rw-r--r--infra/nnfw/cmake/packages/EigenSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/Enum34SourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/FP16SourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/FXdivSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/FarmhashSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/FlatBuffersConfig.cmake68
-rw-r--r--infra/nnfw/cmake/packages/FlatBuffersSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/GEMMLowpConfig.cmake20
-rw-r--r--infra/nnfw/cmake/packages/GEMMLowpSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/GTestConfig.cmake78
-rw-r--r--infra/nnfw/cmake/packages/HDF5Config.cmake57
-rw-r--r--infra/nnfw/cmake/packages/NEON2SSESourceConfig.cmake20
-rw-r--r--infra/nnfw/cmake/packages/NNPACKConfig.cmake51
-rw-r--r--infra/nnfw/cmake/packages/NNPACKSourceConfig.cmake20
-rw-r--r--infra/nnfw/cmake/packages/NoniusConfig.cmake10
-rw-r--r--infra/nnfw/cmake/packages/NoniusSourceConfig.cmake13
-rw-r--r--infra/nnfw/cmake/packages/OpcodesSourceConfig.cmake13
-rw-r--r--infra/nnfw/cmake/packages/PSIMDSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/PeachpySourceConfig.cmake31
-rw-r--r--infra/nnfw/cmake/packages/PthreadpoolSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/Ruy/CMakeLists.txt51
-rw-r--r--infra/nnfw/cmake/packages/RuyConfig.cmake31
-rw-r--r--infra/nnfw/cmake/packages/SixSourceConfig.cmake14
-rw-r--r--infra/nnfw/cmake/packages/TensorFlowEigen-1.13.1/TensorFlowEigenConfig.cmake19
-rw-r--r--infra/nnfw/cmake/packages/TensorFlowEigen-1.13.1/TensorFlowEigenConfigVersion.cmake (renamed from infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfigVersion.cmake)2
-rw-r--r--infra/nnfw/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLite/CMakeLists.txt (renamed from infra/nnfw/cmake/packages/TensorFlowLite/CMakeLists.txt)16
-rw-r--r--infra/nnfw/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLiteConfig.cmake (renamed from infra/nnfw/cmake/packages/TensorFlowLiteConfig.cmake)34
-rw-r--r--infra/nnfw/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLiteConfigVersion.cmake (renamed from infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfigVersion.cmake)2
-rw-r--r--infra/nnfw/cmake/packages/TensorFlowLite-2.3.0/CMakeLists.txt124
-rw-r--r--infra/nnfw/cmake/packages/TensorFlowLite-2.3.0Config.cmake107
-rw-r--r--infra/nnfw/cmake/packages/TensorFlowSourceConfig.cmake18
-rw-r--r--infra/nnfw/cmake/packages/TensorflowConfig.cmake2
-rw-r--r--infra/nnfw/command/build21
-rw-r--r--infra/nnfw/command/configure18
-rw-r--r--infra/nnfw/command/copyright-check72
-rw-r--r--infra/nnfw/command/count-unittest74
-rw-r--r--infra/nnfw/command/doxygen7
-rw-r--r--infra/nnfw/command/gen-coverage-report59
-rw-r--r--infra/nnfw/command/install11
-rw-r--r--infra/nnfw/config/build.configuration11
-rw-r--r--infra/nnfw/config/docker.configuration6
-rw-r--r--infra/nnfw/config/gbs.conf6
-rw-r--r--infra/packaging/build96
-rw-r--r--infra/packaging/chklist/LAYOUT_19111547
-rw-r--r--infra/packaging/chklist/LAYOUT_19121539
-rw-r--r--infra/packaging/chklist/TF2CIRCLE_EXIST13
-rw-r--r--infra/packaging/chklist/TF2CIRCLE_RUNNABLE16
-rw-r--r--infra/packaging/chklist/TF2NNPKG_EXIST13
-rw-r--r--infra/packaging/chklist/TF2TFLITE_EXIST13
-rw-r--r--infra/packaging/chklist/TF2TFLITE_RUNNABLE13
-rw-r--r--infra/packaging/preset/2019111549
-rw-r--r--infra/packaging/preset/2019121542
-rw-r--r--infra/packaging/preset/20191231_windows54
-rw-r--r--infra/packaging/preset/20200115_windows53
-rw-r--r--infra/packaging/preset/2020022044
-rw-r--r--infra/packaging/preset/2020050854
-rw-r--r--infra/packaging/preset/20200616_windows60
-rw-r--r--infra/packaging/preset/2020063052
-rw-r--r--infra/packaging/preset/20200731_windows65
-rw-r--r--infra/packaging/res/tf2nnpkg87
-rw-r--r--infra/packaging/res/tf2nnpkg.2019121568
-rw-r--r--infra/packaging/res/tf2nnpkg.2020022089
-rw-r--r--infra/packaging/res/tf2nnpkg.2020050897
-rw-r--r--infra/packaging/res/tf2nnpkg.2020061689
-rw-r--r--infra/packaging/res/tf2nnpkg.20200630130
-rw-r--r--infra/packaging/res/tflite_schema.fbs698
-rw-r--r--infra/packaging/verify82
-rwxr-xr-xinfra/scripts/build-tcm.sh24
-rwxr-xr-xinfra/scripts/build_android_runtime_release.sh21
-rwxr-xr-xinfra/scripts/build_nnpkg.sh19
-rwxr-xr-xinfra/scripts/common.sh155
-rw-r--r--infra/scripts/compiler_modules.sh23
-rwxr-xr-xinfra/scripts/configure_compiler_coverage.sh22
-rwxr-xr-xinfra/scripts/docker_build_cross_aarch64_runtime.sh (renamed from infra/scripts/docker_build_cross_arm_pacl.sh)11
-rwxr-xr-xinfra/scripts/docker_build_cross_arm_benchmark_model.sh49
-rwxr-xr-xinfra/scripts/docker_build_cross_arm_pacl_release.sh48
-rwxr-xr-xinfra/scripts/docker_build_cross_arm_runtime.sh (renamed from infra/scripts/docker_build_cross_arm_neurun.sh)4
-rwxr-xr-xinfra/scripts/docker_build_cross_arm_runtime_release.sh (renamed from infra/scripts/docker_build_cross_arm_neurun_release.sh)4
-rwxr-xr-xinfra/scripts/docker_build_cross_coverage.sh13
-rwxr-xr-xinfra/scripts/docker_build_nncc.sh61
-rwxr-xr-xinfra/scripts/docker_build_test_x64.sh14
-rwxr-xr-xinfra/scripts/docker_build_tizen_cross.sh9
-rwxr-xr-xinfra/scripts/docker_build_tizen_gbs.sh2
-rwxr-xr-xinfra/scripts/docker_collect_nnpkg_resources.sh100
-rwxr-xr-xinfra/scripts/docker_coverage_report.sh13
-rwxr-xr-xinfra/scripts/test_arm_neurun_acl_cl.sh36
-rwxr-xr-xinfra/scripts/test_arm_neurun_acl_neon.sh40
-rwxr-xr-xinfra/scripts/test_arm_neurun_cpu.sh47
-rwxr-xr-xinfra/scripts/test_arm_neurun_mixed.sh44
-rwxr-xr-xinfra/scripts/test_arm_nnpkg.sh26
-rwxr-xr-xinfra/scripts/test_arm_pacl.sh24
-rwxr-xr-xinfra/scripts/test_coverage.sh50
-rwxr-xr-xinfra/scripts/test_make_nnpkg.sh35
-rwxr-xr-xinfra/scripts/test_neurun_interp.sh18
-rwxr-xr-xinfra/scripts/test_tizen_neurun_acl_cl.sh30
-rwxr-xr-xinfra/scripts/test_tizen_neurun_mixed.sh38
-rwxr-xr-xinfra/scripts/test_ubuntu_runtime.sh109
-rwxr-xr-xinfra/scripts/test_ubuntu_runtime_mixed.sh62
-rwxr-xr-xinfra/scripts/test_x64_neurun_cpu.sh20
-rwxr-xr-xinfra/scripts/tizen_xu4_test.sh155
-rwxr-xr-xinfra/scripts/unittest_compiler_xml.sh29
269 files changed, 5605 insertions, 2776 deletions
diff --git a/infra/nncc/3rdparty/.gitignore b/infra/3rdparty/.gitignore
index c3d773e35..c3d773e35 100644
--- a/infra/nncc/3rdparty/.gitignore
+++ b/infra/3rdparty/.gitignore
diff --git a/infra/3rdparty/Eigen/fd6845384b86/URL.default b/infra/3rdparty/Eigen/fd6845384b86/URL.default
new file mode 100644
index 000000000..76b000a52
--- /dev/null
+++ b/infra/3rdparty/Eigen/fd6845384b86/URL.default
@@ -0,0 +1 @@
+https://mirror.bazel.build/bitbucket.org/eigen/eigen/get/fd6845384b86.tar.gz
diff --git a/infra/nncc/3rdparty/Eigen/fd6845384b86/URL.info b/infra/3rdparty/Eigen/fd6845384b86/URL.info
index 8e7a3c2f0..8e7a3c2f0 100644
--- a/infra/nncc/3rdparty/Eigen/fd6845384b86/URL.info
+++ b/infra/3rdparty/Eigen/fd6845384b86/URL.info
diff --git a/infra/nncc/cmake/modules/add_subdirectories.cmake b/infra/cmake/modules/AddSubdirectories.cmake
index 06b7c768d..06b7c768d 100644
--- a/infra/nncc/cmake/modules/add_subdirectories.cmake
+++ b/infra/cmake/modules/AddSubdirectories.cmake
diff --git a/infra/nncc/cmake/modules/Asserts.cmake b/infra/cmake/modules/Asserts.cmake
index e40097e31..e40097e31 100644
--- a/infra/nncc/cmake/modules/Asserts.cmake
+++ b/infra/cmake/modules/Asserts.cmake
diff --git a/infra/cmake/modules/ExternalBuildTools.cmake b/infra/cmake/modules/ExternalBuildTools.cmake
new file mode 100644
index 000000000..4f2027b4b
--- /dev/null
+++ b/infra/cmake/modules/ExternalBuildTools.cmake
@@ -0,0 +1,84 @@
+function(ExternalBuild_CMake)
+ # CMAKE_DIR Path to cmake root script to build (required)
+ # BUILD_DIR Path to build workspace (required)
+ # INSTALL_DIR Path to install (required)
+ # PKG_NAME External package name word for logging and stamp file name (required)
+ # IDENTIFIER String to identify package version (optional)
+ # BUILD_FLAGS Multiple argument to set compiler flag
+ # EXTRA_OPTS Multiple argument to pass options, etc for cmake configuration
+ include(CMakeParseArguments)
+ cmake_parse_arguments(ARG
+ ""
+ "CMAKE_DIR;BUILD_DIR;INSTALL_DIR;PKG_NAME;IDENTIFIER"
+ "BUILD_FLAGS;EXTRA_OPTS"
+ ${ARGN}
+ )
+
+ set(BUILD_STAMP_PATH "${ARG_BUILD_DIR}/${ARG_PKG_NAME}.stamp")
+ set(BUILD_LOG_PATH "${ARG_BUILD_DIR}/${ARG_PKG_NAME}.log")
+ set(INSTALL_STAMP_PATH "${ARG_INSTALL_DIR}/${ARG_PKG_NAME}.stamp")
+ set(INSTALL_LOG_PATH "${ARG_INSTALL_DIR}/${ARG_PKG_NAME}.log")
+
+ set(PKG_IDENTIFIER "")
+ if(DEFINED ARG_IDENTIFIER)
+ set(PKG_IDENTIFIER "${ARG_IDENTIFIER}")
+ endif(DEFINED ARG_IDENTIFIER)
+
+ # NOTE Do NOT retry build once it fails
+ if(EXISTS ${BUILD_STAMP_PATH})
+ file(READ ${BUILD_STAMP_PATH} READ_IDENTIFIER)
+ if("${READ_IDENTIFIER}" STREQUAL "${PKG_IDENTIFIER}")
+ return()
+ endif("${READ_IDENTIFIER}" STREQUAL "${PKG_IDENTIFIER}")
+ endif(EXISTS ${BUILD_STAMP_PATH})
+
+ # NOTE Do NOT build pre-installed exists
+ if(EXISTS ${INSTALL_STAMP_PATH})
+ file(READ ${INSTALL_STAMP_PATH} READ_IDENTIFIER)
+ if("${READ_IDENTIFIER}" STREQUAL "${PKG_IDENTIFIER}")
+ return()
+ endif("${READ_IDENTIFIER}" STREQUAL "${PKG_IDENTIFIER}")
+ endif(EXISTS ${INSTALL_STAMP_PATH})
+
+ message(STATUS "Build ${ARG_PKG_NAME} from ${ARG_CMAKE_DIR}")
+
+ file(MAKE_DIRECTORY ${ARG_BUILD_DIR})
+ file(MAKE_DIRECTORY ${ARG_INSTALL_DIR})
+
+ file(WRITE "${BUILD_STAMP_PATH}" "${PKG_IDENTIFIER}")
+
+ execute_process(COMMAND ${CMAKE_COMMAND}
+ -G "${CMAKE_GENERATOR}"
+ -DCMAKE_INSTALL_PREFIX=${ARG_INSTALL_DIR}
+ -DCMAKE_BUILD_TYPE=Release
+ -DCMAKE_CXX_FLAGS=${ARG_BUILD_FLAGS}
+ ${ARG_EXTRA_OPTS}
+ ${ARG_CMAKE_DIR}
+ OUTPUT_FILE ${BUILD_LOG_PATH}
+ ERROR_FILE ${BUILD_LOG_PATH}
+ WORKING_DIRECTORY ${ARG_BUILD_DIR}
+ RESULT_VARIABLE BUILD_EXITCODE)
+
+ if(NOT BUILD_EXITCODE EQUAL 0)
+ message(FATAL_ERROR "${ARG_PKG_NAME} Package: Build failed (check '${BUILD_LOG_PATH}' for details)")
+ endif(NOT BUILD_EXITCODE EQUAL 0)
+
+ set(NUM_BUILD_THREADS 1)
+ if(DEFINED EXTERNALS_BUILD_THREADS)
+ set(NUM_BUILD_THREADS ${EXTERNALS_BUILD_THREADS})
+ endif(DEFINED EXTERNALS_BUILD_THREADS)
+
+ execute_process(COMMAND ${CMAKE_COMMAND} --build . -- -j${NUM_BUILD_THREADS} install
+ OUTPUT_FILE ${INSTALL_LOG_PATH}
+ ERROR_FILE ${INSTALL_LOG_PATH}
+ WORKING_DIRECTORY ${ARG_BUILD_DIR}
+ RESULT_VARIABLE INSTALL_EXITCODE)
+
+ if(NOT INSTALL_EXITCODE EQUAL 0)
+ message(FATAL_ERROR "${ARG_PKG_NAME} Package: Installation failed (check '${INSTALL_LOG_PATH}' for details)")
+ endif(NOT INSTALL_EXITCODE EQUAL 0)
+
+ file(WRITE "${INSTALL_STAMP_PATH}" "${PKG_IDENTIFIER}")
+
+ message(STATUS "${ARG_PKG_NAME} Package: Done")
+endfunction(ExternalBuild_CMake)
diff --git a/infra/nnfw/cmake/modules/ExternalProjectTools.cmake b/infra/cmake/modules/ExternalProjectTools.cmake
index 71d74e5a6..afa290c3e 100644
--- a/infra/nnfw/cmake/modules/ExternalProjectTools.cmake
+++ b/infra/cmake/modules/ExternalProjectTools.cmake
@@ -6,5 +6,3 @@ macro(add_extdirectory DIR TAG)
add_subdirectory(${DIR} "${CMAKE_BINARY_DIR}/externals/${TAG}")
endif(ARG_EXCLUDE_FROM_ALL)
endmacro(add_extdirectory)
-
-set(ExternalProjectTools_FOUND TRUE)
diff --git a/infra/nncc/cmake/modules/ExternalSourceTools.cmake b/infra/cmake/modules/ExternalSourceTools.cmake
index 3baaeba8e..87cb15270 100644
--- a/infra/nncc/cmake/modules/ExternalSourceTools.cmake
+++ b/infra/cmake/modules/ExternalSourceTools.cmake
@@ -3,7 +3,7 @@
#
function(ExternalSource_Download PREFIX)
include(CMakeParseArguments)
- nncc_include(StampTools)
+ nnas_include(StampTools)
cmake_parse_arguments(ARG "" "DIRNAME;URL;CHECKSUM" "" ${ARGN})
@@ -25,7 +25,7 @@ function(ExternalSource_Download PREFIX)
get_filename_component(FILENAME ${URL} NAME)
- set(CACHE_DIR "${NNCC_EXTERNALS_DIR}")
+ set(CACHE_DIR "${NNAS_EXTERNALS_DIR}")
set(OUT_DIR "${CACHE_DIR}/${DIRNAME}")
set(TMP_DIR "${CACHE_DIR}/${DIRNAME}-tmp")
@@ -39,16 +39,28 @@ function(ExternalSource_Download PREFIX)
# Compare URL in STAMP file and the given URL
Stamp_Check(URL_CHECK "${STAMP_PATH}" "${URL}")
- if(NOT URL_CHECK)
+ if(NOT EXISTS "${OUT_DIR}" OR NOT URL_CHECK)
file(REMOVE "${STAMP_PATH}")
file(REMOVE_RECURSE "${OUT_DIR}")
file(REMOVE_RECURSE "${TMP_DIR}")
file(MAKE_DIRECTORY "${TMP_DIR}")
- message("-- Download ${PREFIX} from ${URL}")
- file(DOWNLOAD ${URL} "${DOWNLOAD_PATH}" SHOW_PROGRESS)
- message("-- Download ${PREFIX} from ${URL} - done")
+ message(STATUS "Download ${PREFIX} from ${URL}")
+ file(DOWNLOAD ${URL} "${DOWNLOAD_PATH}"
+ STATUS status
+ LOG log)
+
+ list(GET status 0 status_code)
+ list(GET status 1 status_string)
+
+ if(NOT status_code EQUAL 0)
+ message(FATAL_ERROR "error: downloading '${URL}' failed
+ status_code: ${status_code}
+ status_string: ${status_string}
+ log: ${log}")
+ endif()
+ message(STATUS "Download ${PREFIX} from ${URL} - done")
# Verify checksum
if(ARG_CHECKSUM)
@@ -72,13 +84,13 @@ function(ExternalSource_Download PREFIX)
message(STATUS "Verify ${PREFIX} archive - done")
endif(ARG_CHECKSUM)
- message("-- Extract ${PREFIX}")
+ message(STATUS "Extract ${PREFIX}")
execute_process(COMMAND ${CMAKE_COMMAND} -E tar xfz "${DOWNLOAD_PATH}"
WORKING_DIRECTORY "${TMP_DIR}")
file(REMOVE "${DOWNLOAD_PATH}")
- message("-- Extract ${PREFIX} - done")
+ message(STATUS "Extract ${PREFIX} - done")
- message("-- Cleanup ${PREFIX}")
+ message(STATUS "Cleanup ${PREFIX}")
file(GLOB contents "${TMP_DIR}/*")
list(LENGTH contents n)
if(NOT n EQUAL 1 OR NOT IS_DIRECTORY "${contents}")
@@ -90,7 +102,7 @@ function(ExternalSource_Download PREFIX)
file(RENAME ${contents} "${OUT_DIR}")
file(REMOVE_RECURSE "${TMP_DIR}")
file(WRITE "${STAMP_PATH}" "${URL}")
- message("-- Cleanup ${PREFIX} - done")
+ message(STATUS "Cleanup ${PREFIX} - done")
endif()
set(${PREFIX}_SOURCE_DIR "${OUT_DIR}" PARENT_SCOPE)
diff --git a/infra/nnfw/cmake/modules/IdentifyPlatform.cmake b/infra/cmake/modules/IdentifyPlatform.cmake
index 9313eefcf..69fe48cad 100644
--- a/infra/nnfw/cmake/modules/IdentifyPlatform.cmake
+++ b/infra/cmake/modules/IdentifyPlatform.cmake
@@ -1,27 +1,42 @@
# set host platform to build
if(NOT HOST_ARCH OR "${HOST_ARCH}" STREQUAL "")
- set(HOST_ARCH ${CMAKE_HOST_SYSTEM_PROCESSOR})
+ string(TOLOWER ${CMAKE_HOST_SYSTEM_PROCESSOR} HOST_ARCH)
+else()
+ string(TOLOWER ${HOST_ARCH} HOST_ARCH)
endif()
# set target platform to run
if(NOT TARGET_ARCH OR "${TARGET_ARCH}" STREQUAL "")
- set(TARGET_ARCH "${HOST_ARCH}")
+ string(TOLOWER ${CMAKE_SYSTEM_PROCESSOR} TARGET_ARCH)
+else()
+ string(TOLOWER ${TARGET_ARCH} TARGET_ARCH)
endif()
if(NOT DEFINED HOST_OS)
string(TOLOWER ${CMAKE_HOST_SYSTEM_NAME} HOST_OS)
+else()
+ string(TOLOWER ${HOST_OS} HOST_OS)
endif()
if(NOT DEFINED TARGET_OS)
- set(TARGET_OS "${HOST_OS}")
+ string(TOLOWER ${CMAKE_SYSTEM_NAME} TARGET_OS)
+else()
+ string(TOLOWER ${TARGET_OS} TARGET_OS)
+endif()
+
+# If HOST_ARCH, TARGET_ARCH from CMAKE_HOST_SYSTEM_PROCESSOR, CMAKE_SYSTEM_NAME is arm64
+# Change ARCH name to aarch64
+if("${HOST_ARCH}" STREQUAL "arm64")
+ set(HOST_ARCH "aarch64")
+endif()
+if("${TARGET_ARCH}" STREQUAL "arm64")
+ set(TARGET_ARCH "aarch64")
endif()
if("${HOST_ARCH}" STREQUAL "x86_64")
set(HOST_ARCH_BASE ${HOST_ARCH})
elseif("${HOST_ARCH}" STREQUAL "armv7l")
set(HOST_ARCH_BASE "arm")
-elseif("${HOST_ARCH}" STREQUAL "arm64")
- set(HOST_ARCH_BASE "arm64")
elseif("${HOST_ARCH}" STREQUAL "aarch64")
set(HOST_ARCH_BASE "aarch64")
else()
@@ -32,8 +47,6 @@ if("${TARGET_ARCH}" STREQUAL "x86_64")
set(TARGET_ARCH_BASE ${TARGET_ARCH})
elseif("${TARGET_ARCH}" STREQUAL "armv7l")
set(TARGET_ARCH_BASE "arm")
-elseif("${TARGET_ARCH}" STREQUAL "arm64")
- set(TARGET_ARCH_BASE "arm64")
elseif("${TARGET_ARCH}" STREQUAL "aarch64")
set(TARGET_ARCH_BASE "aarch64")
else()
diff --git a/infra/nncc/cmake/modules/ListFile.cmake b/infra/cmake/modules/ListFile.cmake
index aee0d162a..aee0d162a 100644
--- a/infra/nncc/cmake/modules/ListFile.cmake
+++ b/infra/cmake/modules/ListFile.cmake
diff --git a/infra/nncc/cmake/modules/OptionTools.cmake b/infra/cmake/modules/OptionTools.cmake
index 0ca50f7c3..0ca50f7c3 100644
--- a/infra/nncc/cmake/modules/OptionTools.cmake
+++ b/infra/cmake/modules/OptionTools.cmake
diff --git a/infra/nncc/cmake/modules/OptionalTargetTools.cmake b/infra/cmake/modules/OptionalTargetTools.cmake
index 8bf2c37ef..8bf2c37ef 100644
--- a/infra/nncc/cmake/modules/OptionalTargetTools.cmake
+++ b/infra/cmake/modules/OptionalTargetTools.cmake
diff --git a/infra/nncc/cmake/modules/StampTools.cmake b/infra/cmake/modules/StampTools.cmake
index d38e033ff..d38e033ff 100644
--- a/infra/nncc/cmake/modules/StampTools.cmake
+++ b/infra/cmake/modules/StampTools.cmake
diff --git a/infra/nncc/cmake/modules/TargetRequire.cmake b/infra/cmake/modules/TargetRequire.cmake
index 801600dd9..801600dd9 100644
--- a/infra/nncc/cmake/modules/TargetRequire.cmake
+++ b/infra/cmake/modules/TargetRequire.cmake
diff --git a/infra/nncc/cmake/modules/ThirdPartyTools.cmake b/infra/cmake/modules/ThirdPartyTools.cmake
index 8fbeacf6e..895fc8b91 100644
--- a/infra/nncc/cmake/modules/ThirdPartyTools.cmake
+++ b/infra/cmake/modules/ThirdPartyTools.cmake
@@ -15,7 +15,7 @@ function(ThirdParty_URL VARNAME)
message(FATAL_ERROR "VERSION is missing")
endif(NOT ARG_VERSION)
- set(PACKAGE_INFO_DIR "${NNCC_PROJECT_SOURCE_DIR}/infra/nncc/3rdparty/${ARG_PACKAGE}/${ARG_VERSION}")
+ set(PACKAGE_INFO_DIR "${NNAS_PROJECT_SOURCE_DIR}/infra/3rdparty/${ARG_PACKAGE}/${ARG_VERSION}")
set(PACKAGE_URL_FILE "${PACKAGE_INFO_DIR}/URL.default")
set(PACKAGE_URL_LOCAL_FILE "${PACKAGE_INFO_DIR}/URL.local")
diff --git a/infra/cmake/packages/ARMComputeSourceConfig.cmake b/infra/cmake/packages/ARMComputeSourceConfig.cmake
new file mode 100644
index 000000000..0ffa0cd35
--- /dev/null
+++ b/infra/cmake/packages/ARMComputeSourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_ARMComputeSource_import)
+ if(NOT ${DOWNLOAD_ARMCOMPUTE})
+ set(ARMComputeSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT ${DOWNLOAD_ARMCOMPUTE})
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(ARMCOMPUTE_URL ${EXTERNAL_DOWNLOAD_SERVER}/ARM-software/ComputeLibrary/archive/v20.05.tar.gz)
+ ExternalSource_Download(ARMCOMPUTE ${ARMCOMPUTE_URL})
+
+ set(ARMComputeSource_DIR ${ARMCOMPUTE_SOURCE_DIR} PARENT_SCOPE)
+ set(ARMComputeSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_ARMComputeSource_import)
+
+_ARMComputeSource_import()
diff --git a/infra/nncc/cmake/packages/AbseilConfig.cmake b/infra/cmake/packages/AbseilConfig.cmake
index 4c731008a..e16dd94d7 100644
--- a/infra/nncc/cmake/packages/AbseilConfig.cmake
+++ b/infra/cmake/packages/AbseilConfig.cmake
@@ -1,5 +1,5 @@
function(_Abseil_import)
- nncc_find_package(AbseilSource QUIET)
+ nnas_find_package(AbseilSource QUIET)
if(NOT AbseilSource_FOUND)
message("Abseil: NOT FOUND (Cannot access source)")
@@ -8,7 +8,7 @@ function(_Abseil_import)
endif(NOT AbseilSource_FOUND)
if(NOT TARGET abseil)
- nncc_include(ExternalProjectTools)
+ nnas_include(ExternalProjectTools)
# NOTE Turn off abseil testing
set(BUILD_TESTING OFF)
@@ -17,13 +17,15 @@ function(_Abseil_import)
add_library(abseil INTERFACE)
target_link_libraries(abseil INTERFACE
# From "Available Abseil CMake Public Targets" in CMake/README.md
- absl::base
absl::algorithm
- absl::container
+ absl::base
absl::debugging
+ absl::flat_hash_map
+ absl::flags
absl::memory
absl::meta
absl::numeric
+ absl::random_random
absl::strings
absl::synchronization
absl::time
diff --git a/infra/nncc/cmake/packages/AbseilSourceConfig.cmake b/infra/cmake/packages/AbseilSourceConfig.cmake
index d980ac653..8be732660 100644
--- a/infra/nncc/cmake/packages/AbseilSourceConfig.cmake
+++ b/infra/cmake/packages/AbseilSourceConfig.cmake
@@ -4,18 +4,22 @@ function(_AbseilSource_import)
return()
endif(NOT DOWNLOAD_ABSEIL)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
# NOTE TensorFlow 1.12 downloads abseil from the following URL
# - https://github.com/abseil/abseil-cpp/archive/48cd2c3f351ff188bc85684b84a91b6e6d17d896.tar.gz
#
# The last change of "48cd2c3f351" was commited on 2018.09.27
#
- # Let's use the latest released version (2018-12 release)
- envoption(ABSEIL_URL https://github.com/abseil/abseil-cpp/archive/20181200.tar.gz)
+ # Let's use the latest released version (2020-02 release patch 2)
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ envoption(ABSEIL_URL ${EXTERNAL_DOWNLOAD_SERVER}/abseil/abseil-cpp/archive/20200225.2.tar.gz)
- ExternalSource_Download(ABSEIL ${ABSEIL_URL})
+ ExternalSource_Download(ABSEIL
+ DIRNAME ABSEIL
+ URL ${ABSEIL_URL}
+ CHECKSUM MD5=73f2b6e72f1599a9139170c29482ddc4)
set(AbseilSource_DIR ${ABSEIL_SOURCE_DIR} PARENT_SCOPE)
set(AbseilSource_FOUND TRUE PARENT_SCOPE)
diff --git a/infra/cmake/packages/BoostConfig.cmake b/infra/cmake/packages/BoostConfig.cmake
new file mode 100644
index 000000000..e72f742f3
--- /dev/null
+++ b/infra/cmake/packages/BoostConfig.cmake
@@ -0,0 +1,88 @@
+# Let's build and install Boost libraries
+function(_Boost_Build Boost_PREFIX)
+ nnas_find_package(BoostSource QUIET)
+
+ if(NOT BoostSource_FOUND)
+ return()
+ endif(NOT BoostSource_FOUND)
+
+ #### Generic configurations
+ if(NOT EXISTS ${BoostSource_DIR}/b2)
+ execute_process(COMMAND "${BoostSource_DIR}/bootstrap.sh"
+ WORKING_DIRECTORY ${BoostSource_DIR}
+ RESULT_VARIABLE Boost_BUILD)
+ endif()
+
+ set(BoostBuild_DIR ${BoostSource_DIR})
+ set(BoostInstall_DIR ${Boost_PREFIX})
+
+ unset(Boost_Options)
+
+ list(APPEND Boost_Options --build-dir=${BoostBuild_DIR})
+ list(APPEND Boost_Options --prefix=${BoostInstall_DIR})
+ list(APPEND Boost_Options --with-log)
+ list(APPEND Boost_Options --with-program_options)
+ list(APPEND Boost_Options --with-system)
+ list(APPEND Boost_Options --with-filesystem)
+
+ if(DEFINED EXTERNALS_BUILD_THREADS)
+ set(N ${EXTERNALS_BUILD_THREADS})
+ else(DEFINED EXTERNALS_BUILD_THREADS)
+ include(ProcessorCount)
+ ProcessorCount(N)
+ endif(DEFINED EXTERNALS_BUILD_THREADS)
+
+ if((NOT N EQUAL 0) AND BUILD_EXT_MULTITHREAD)
+ list(APPEND Boost_Options -j${N})
+ endif()
+
+ set(JAM_FILENAME ${BoostBuild_DIR}/user-config.jam)
+
+ if(ANDROID)
+ set(NDK_CXX ${NDK_DIR}/toolchains/llvm/prebuilt/linux-x86_64/bin/${TARGET_ARCH}-linux-android${ANDROID_API_LEVEL}-clang++)
+ file(WRITE ${JAM_FILENAME} "using clang : arm64v8a : ${NDK_CXX} ;")
+ list(APPEND Boost_Options toolset=clang-arm64v8a)
+ # without target-os=android, it complains it cannot find -lrt.
+ list(APPEND Boost_Options target-os=android)
+ else()
+ file(WRITE ${JAM_FILENAME} "using gcc : local : ${CMAKE_CXX_COMPILER} ;\n")
+ list(APPEND Boost_Options toolset=gcc-local)
+ endif(ANDROID)
+
+ # Install Boost libraries
+ execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory "${BoostInstall_DIR}")
+ execute_process(COMMAND /usr/bin/env BOOST_BUILD_PATH="${BoostBuild_DIR}" ${BoostSource_DIR}/b2 install ${Boost_Options}
+ WORKING_DIRECTORY ${BoostSource_DIR})
+
+endfunction(_Boost_Build)
+
+# Find pre-installed boost library and update Boost variables.
+if (NOT BUILD_BOOST)
+ # BoostConfig.cmake does not honor QUIET argument at least till cmake 1.70.0.
+ # Thus, don't try to find_package if you're not entirely sure you have boost.
+ find_package(Boost 1.58.0 QUIET COMPONENTS log program_options filesystem system)
+ if(Boost_FOUND)
+ return()
+ endif()
+endif()
+
+set(Boost_PREFIX ${CMAKE_INSTALL_PREFIX})
+
+if(BUILD_BOOST)
+ _Boost_Build("${Boost_PREFIX}")
+
+ # Let's use locally built boost to system-wide one so sub modules
+ # needing Boost library and header files can search for them
+ # in ${Boost_PREFIX} directory
+ list(APPEND CMAKE_PREFIX_PATH "${Boost_PREFIX}")
+
+ # Without Boost_INCLUDE_DIR, it complains the variable is missing during find_package.
+ set(Boost_INCLUDE_DIR ${CMAKE_INSTALL_PREFIX}/include)
+
+ # 1) without static build, it will complain it cannot find libc++_shared.so.
+ # 2) We uses static libraries for other libraries.
+ set(Boost_USE_STATIC_LIBS ON)
+
+ # We built boost library so update Boost variables.
+ find_package(Boost 1.58.0 QUIET COMPONENTS log program_options filesystem system)
+endif(BUILD_BOOST)
diff --git a/infra/cmake/packages/BoostSourceConfig.cmake b/infra/cmake/packages/BoostSourceConfig.cmake
new file mode 100644
index 000000000..2477a4857
--- /dev/null
+++ b/infra/cmake/packages/BoostSourceConfig.cmake
@@ -0,0 +1,19 @@
+function(_BoostSource_import)
+ if(NOT ${DOWNLOAD_BOOST})
+ set(BoostSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT ${DOWNLOAD_BOOST})
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ # EXTERNAL_DOWNLOAD_SERVER will be overwritten by CI server to use mirror server.
+ envoption(EXTERNAL_DOWNLOAD_SERVER "http://sourceforge.net")
+ envoption(BOOST_URL ${EXTERNAL_DOWNLOAD_SERVER}/projects/boost/files/boost/1.58.0/boost_1_58_0.tar.gz)
+ ExternalSource_Download(BOOST ${BOOST_URL})
+
+ set(BoostSource_DIR ${BOOST_SOURCE_DIR} PARENT_SCOPE)
+ set(BoostSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_BoostSource_import)
+
+_BoostSource_import()
diff --git a/infra/nncc/cmake/packages/Caffe/CMakeLists.txt b/infra/cmake/packages/Caffe/CMakeLists.txt
index 51b723edd..51b723edd 100644
--- a/infra/nncc/cmake/packages/Caffe/CMakeLists.txt
+++ b/infra/cmake/packages/Caffe/CMakeLists.txt
diff --git a/infra/nncc/cmake/packages/CaffeConfig.cmake b/infra/cmake/packages/CaffeConfig.cmake
index 7b5eb2f2e..3ae5440e6 100644
--- a/infra/nncc/cmake/packages/CaffeConfig.cmake
+++ b/infra/cmake/packages/CaffeConfig.cmake
@@ -1,12 +1,12 @@
function(_Caffe_import)
- nncc_find_package(CaffeSource QUIET)
+ nnas_find_package(CaffeSource QUIET)
if(NOT CaffeSource_FOUND)
set(Caffe_FOUND FALSE PARENT_SCOPE)
return()
endif(NOT CaffeSource_FOUND)
- nncc_find_package(CaffeProto QUIET)
+ nnas_find_package(CaffeProto QUIET)
if(NOT CaffeProto_FOUND)
set(Caffe_FOUND FALSE PARENT_SCOPE)
@@ -20,7 +20,7 @@ function(_Caffe_import)
return()
endif()
- find_package(HDF5 COMPONENTS HL QUIET)
+ nnas_find_package(HDF5 QUIET)
if(NOT HDF5_FOUND)
set(Caffe_FOUND FALSE PARENT_SCOPE)
@@ -36,14 +36,14 @@ function(_Caffe_import)
return()
endif()
- nncc_find_package(GLog QUIET)
+ nnas_find_package(GLog QUIET)
if(NOT GLog_FOUND)
set(Caffe_FOUND FALSE PARENT_SCOPE)
return()
endif()
- nncc_find_package(GFlags QUIET)
+ nnas_find_package(GFlags QUIET)
if(NOT GFlags_FOUND)
set(Caffe_FOUND FALSE PARENT_SCOPE)
@@ -51,7 +51,7 @@ function(_Caffe_import)
endif()
if(NOT TARGET caffe)
- nncc_include(ExternalProjectTools)
+ nnas_include(ExternalProjectTools)
add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/Caffe" caffe)
message(STATUS "Found Caffe: TRUE")
endif(NOT TARGET caffe)
diff --git a/infra/nncc/cmake/packages/CaffeProto/CMakeLists.txt b/infra/cmake/packages/CaffeProto/CMakeLists.txt
index f9f8724a0..f9f8724a0 100644
--- a/infra/nncc/cmake/packages/CaffeProto/CMakeLists.txt
+++ b/infra/cmake/packages/CaffeProto/CMakeLists.txt
diff --git a/infra/nncc/cmake/packages/CaffeProtoConfig.cmake b/infra/cmake/packages/CaffeProtoConfig.cmake
index 33c239509..40ea91319 100644
--- a/infra/nncc/cmake/packages/CaffeProtoConfig.cmake
+++ b/infra/cmake/packages/CaffeProtoConfig.cmake
@@ -1,12 +1,12 @@
function(_CaffeProto_import)
- nncc_find_package(CaffeSource QUIET)
+ nnas_find_package(CaffeSource QUIET)
if(NOT CaffeSource_FOUND)
set(CaffeProto_FOUND FALSE PARENT_SCOPE)
return()
endif(NOT CaffeSource_FOUND)
- nncc_find_package(Protobuf QUIET)
+ nnas_find_package(Protobuf QUIET)
if(NOT Protobuf_FOUND)
set(CaffeProto_FOUND FALSE PARENT_SCOPE)
@@ -14,7 +14,7 @@ function(_CaffeProto_import)
endif(NOT Protobuf_FOUND)
if(NOT TARGET caffeproto)
- nncc_include(ExternalProjectTools)
+ nnas_include(ExternalProjectTools)
add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/CaffeProto" caffeproto)
endif(NOT TARGET caffeproto)
diff --git a/infra/nncc/cmake/packages/CaffeSourceConfig.cmake b/infra/cmake/packages/CaffeSourceConfig.cmake
index 91d334235..41cc2c9f7 100644
--- a/infra/nncc/cmake/packages/CaffeSourceConfig.cmake
+++ b/infra/cmake/packages/CaffeSourceConfig.cmake
@@ -4,8 +4,8 @@ function(_CaffeSource_import)
return()
endif(NOT DOWNLOAD_CAFFE)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
envoption(CAFFE_URL https://github.com/BVLC/caffe/archive/1.0.tar.gz)
diff --git a/infra/cmake/packages/CpuInfoSourceConfig.cmake b/infra/cmake/packages/CpuInfoSourceConfig.cmake
new file mode 100644
index 000000000..60419ad9f
--- /dev/null
+++ b/infra/cmake/packages/CpuInfoSourceConfig.cmake
@@ -0,0 +1,21 @@
+function(_CpuInfoSource_import)
+ if(NOT ${DOWNLOAD_CPUINFO})
+ set(CpuInfoSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT ${DOWNLOAD_CPUINFO})
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ # CPUINFO commit including patch from tflite v2.3
+ envoption(CPUINFO_URL ${EXTERNAL_DOWNLOAD_SERVER}/pytorch/cpuinfo/archive/63b254577ed77a8004a9be6ac707f3dccc4e1fd9.tar.gz)
+ ExternalSource_Download(CPUINFO
+ DIRNAME CPUINFO
+ URL ${CPUINFO_URL})
+
+ set(CpuInfoSource_DIR ${CPUINFO_SOURCE_DIR} PARENT_SCOPE)
+ set(CpuInfoSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_CpuInfoSource_import)
+
+_CpuInfoSource_import()
diff --git a/infra/cmake/packages/Eigen-fd6845384b86Config.cmake b/infra/cmake/packages/Eigen-fd6845384b86Config.cmake
new file mode 100644
index 000000000..e98856af5
--- /dev/null
+++ b/infra/cmake/packages/Eigen-fd6845384b86Config.cmake
@@ -0,0 +1,25 @@
+# NOTE TensorFlow 1.12 uses eigen commit ID fd6845384b86
+
+# find_package rejects version with commit number. Commit ID is appended to the package name
+# as a workaround.
+#
+# TODO Find a better way
+function(_Eigen_import)
+ nnas_find_package(EigenSource-fd6845384b86 QUIET)
+
+ if(NOT EigenSource-fd6845384b86_FOUND)
+ set(Eigen-fd6845384b86_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT EigenSource-fd6845384b86_FOUND)
+
+ if(NOT TARGET eigen-fd6845384b86)
+ add_library(eigen-fd6845384b86 INTERFACE)
+ target_include_directories(eigen-fd6845384b86 INTERFACE "${EigenSource_DIR}")
+ # Add EIGEN_MPL2_ONLY to remove license issue posibility
+ target_compile_definitions(eigen-fd6845384b86 INTERFACE EIGEN_MPL2_ONLY)
+ endif(NOT TARGET eigen-fd6845384b86)
+
+ set(Eigen-fd6845384b86_FOUND TRUE PARENT_SCOPE)
+endfunction(_Eigen_import)
+
+_Eigen_import()
diff --git a/infra/nncc/cmake/packages/EigenConfig.cmake b/infra/cmake/packages/EigenConfig.cmake
index ac5164f68..18aee13d7 100644
--- a/infra/nncc/cmake/packages/EigenConfig.cmake
+++ b/infra/cmake/packages/EigenConfig.cmake
@@ -1,5 +1,5 @@
function(_Eigen_import)
- nncc_find_package(EigenSource QUIET)
+ nnas_find_package(EigenSource QUIET)
if(NOT EigenSource_FOUND)
set(Eigen_FOUND FALSE PARENT_SCOPE)
@@ -9,6 +9,8 @@ function(_Eigen_import)
if(NOT TARGET eigen)
add_library(eigen INTERFACE)
target_include_directories(eigen INTERFACE "${EigenSource_DIR}")
+ # Add EIGEN_MPL2_ONLY to remove license issue posibility
+ target_compile_definitions(eigen INTERFACE EIGEN_MPL2_ONLY)
endif(NOT TARGET eigen)
set(EigenSource_FOUND TRUE PARENT_SCOPE)
diff --git a/infra/nncc/cmake/packages/EigenSource-fd6845384b86Config.cmake b/infra/cmake/packages/EigenSource-fd6845384b86Config.cmake
index bf0f94d29..4a854e77b 100644
--- a/infra/nncc/cmake/packages/EigenSource-fd6845384b86Config.cmake
+++ b/infra/cmake/packages/EigenSource-fd6845384b86Config.cmake
@@ -8,9 +8,10 @@ function(_import)
return()
endif(NOT DOWNLOAD_EIGEN)
- nncc_include(ExternalSourceTools)
- nncc_include(ThirdPartyTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(ThirdPartyTools)
+ # NOTE TensorFlow 1.12 downloads eigen from the following URL
ThirdParty_URL(EIGEN_URL PACKAGE Eigen VERSION fd6845384b86)
ExternalSource_Download(EIGEN
diff --git a/infra/cmake/packages/EigenSourceConfig.cmake b/infra/cmake/packages/EigenSourceConfig.cmake
new file mode 100644
index 000000000..4aaeb3d00
--- /dev/null
+++ b/infra/cmake/packages/EigenSourceConfig.cmake
@@ -0,0 +1,24 @@
+function(_EigenSource_import)
+ if(NOT DOWNLOAD_EIGEN)
+ set(EigenSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_EIGEN)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ # NOTE TensorFlow 1.13.1 uses https://bitbucket.org/eigen/eigen/get/9f48e814419e.tar.gz
+ # but it has a issue https://eigen.tuxfamily.org/bz/show_bug.cgi?id=1643
+ # The following URL resolves above issue
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://mirror.bazel.build/bitbucket.org")
+ envoption(EIGEN_1_13_1_URL ${EXTERNAL_DOWNLOAD_SERVER}/eigen/eigen/get/88fc23324517.tar.gz)
+
+ ExternalSource_Download(EIGEN
+ DIRNAME EIGEN
+ URL ${EIGEN_1_13_1_URL})
+
+ set(EigenSource_DIR ${EIGEN_SOURCE_DIR} PARENT_SCOPE)
+ set(EigenSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_EigenSource_import)
+
+_EigenSource_import()
diff --git a/infra/nncc/cmake/packages/Farmhash/CMakeLists.txt b/infra/cmake/packages/Farmhash/CMakeLists.txt
index 3da57a498..3da57a498 100644
--- a/infra/nncc/cmake/packages/Farmhash/CMakeLists.txt
+++ b/infra/cmake/packages/Farmhash/CMakeLists.txt
diff --git a/infra/nncc/cmake/packages/FarmhashConfig.cmake b/infra/cmake/packages/FarmhashConfig.cmake
index 68f3d7c49..1ab18e9b0 100644
--- a/infra/nncc/cmake/packages/FarmhashConfig.cmake
+++ b/infra/cmake/packages/FarmhashConfig.cmake
@@ -1,5 +1,5 @@
function(_Farmhash_import)
- nncc_find_package(FarmhashSource QUIET)
+ nnas_find_package(FarmhashSource QUIET)
if(NOT FarmhashSource_FOUND)
set(Farmhash_FOUND FALSE PARENT_SCOPE)
@@ -7,7 +7,7 @@ function(_Farmhash_import)
endif(NOT FarmhashSource_FOUND)
if(NOT TARGET farmhash)
- nncc_include(ExternalProjectTools)
+ nnas_include(ExternalProjectTools)
add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/Farmhash" farmhash)
endif(NOT TARGET farmhash)
diff --git a/infra/cmake/packages/FarmhashSourceConfig.cmake b/infra/cmake/packages/FarmhashSourceConfig.cmake
new file mode 100644
index 000000000..a19c8b992
--- /dev/null
+++ b/infra/cmake/packages/FarmhashSourceConfig.cmake
@@ -0,0 +1,21 @@
+function(_FarmhashSource_import)
+ if(NOT DOWNLOAD_FARMHASH)
+ set(FarmhashSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_FARMHASH)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ # NOTE TensorFlow 1.12 downloads farmhash from the following URL
+ # TensorFlow 1.13.1 downloads farmhash from the following URL
+ # TensorFlow 2.3.0 downloads farmhash from the following URL
+ envoption(FARMHASH_1_12_URL https://github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz)
+
+ ExternalSource_Download(FARMHASH ${FARMHASH_1_12_URL})
+
+ set(FarmhashSource_DIR ${FARMHASH_SOURCE_DIR} PARENT_SCOPE)
+ set(FarmhashSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_FarmhashSource_import)
+
+_FarmhashSource_import()
diff --git a/infra/nncc/cmake/packages/FlatBuffersConfig.cmake b/infra/cmake/packages/FlatBuffersConfig.cmake
index 45511ca5e..da084e7d3 100644
--- a/infra/nncc/cmake/packages/FlatBuffersConfig.cmake
+++ b/infra/cmake/packages/FlatBuffersConfig.cmake
@@ -8,52 +8,27 @@ function(_FlatBuffers_build)
return()
endif(NOT BUILD_FLATBUFFERS)
- nncc_find_package(FlatBuffersSource EXACT 1.10 QUIET)
+ nnas_find_package(FlatBuffersSource EXACT 1.10 QUIET)
if(NOT FlatBuffersSource_FOUND)
# Source is not available
return()
endif(NOT FlatBuffersSource_FOUND)
- # TODO Introduce helper functions
- set(FLATBUFFERS_BUILD "${CMAKE_BINARY_DIR}/externals/FLATBUFFERS/build")
- set(FLATBUFFERS_INSTALL "${NNCC_OVERLAY_DIR}")
+ set(ADDITIONAL_CXX_FLAGS "")
+ if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 8.0)
+ set(ADDITIONAL_CXX_FLAGS "-Wno-error=class-memaccess")
+ endif()
- set(STAMP_PATH "${FLATBUFFERS_INSTALL}/FLATBUFFERS.stamp")
- set(LOG_PATH "${FLATBUFFERS_INSTALL}/FLATBUFFERS.log")
+ nnas_include(ExternalBuildTools)
+ ExternalBuild_CMake(CMAKE_DIR ${FlatBuffersSource_DIR}
+ BUILD_DIR ${CMAKE_BINARY_DIR}/externals/FLATBUFFERS/build
+ INSTALL_DIR ${EXT_OVERLAY_DIR}
+ BUILD_FLAGS ${ADDITIONAL_CXX_FLAGS}
+ IDENTIFIER "1.10-fix2"
+ EXTRA_OPTS "-DFLATBUFFERS_BUILD_TESTS:BOOL=OFF"
+ PKG_NAME "FLATBUFFERS")
- if(EXISTS ${STAMP_PATH})
- return()
- endif(EXISTS ${STAMP_PATH})
-
- message(STATUS "Build Flatbuffers from ${FlatBuffersSource_DIR}")
-
- file(MAKE_DIRECTORY ${FLATBUFFERS_BUILD})
- file(MAKE_DIRECTORY ${FLATBUFFERS_INSTALL})
-
- # NOTE Do NOT retry Flatbuffers build once it fails
- file(WRITE "${STAMP_PATH}")
-
- execute_process(COMMAND ${CMAKE_COMMAND}
- -DCMAKE_INSTALL_PREFIX=${FLATBUFFERS_INSTALL}
- -DCMAKE_BUILD_TYPE=Release
- ${FlatBuffersSource_DIR}
- OUTPUT_FILE ${LOG_PATH}
- ERROR_FILE ${LOG_PATH}
- WORKING_DIRECTORY ${FLATBUFFERS_BUILD}
- RESULT_VARIABLE BUILD_EXITCODE)
-
- execute_process(COMMAND ${CMAKE_COMMAND} --build . -- install
- OUTPUT_FILE ${LOG_PATH}
- ERROR_FILE ${LOG_PATH}
- WORKING_DIRECTORY ${FLATBUFFERS_BUILD}
- RESULT_VARIABLE INSTALL_EXITCODE)
-
- if(BUILD_EXITCODE EQUAL 0 AND INSTALL_EXITCODE EQUAL 0)
- message(STATUS "Succeeded in building Flatbuffers")
- else()
- message(FATAL_ERROR "Fail to build Flatbuffers (check '${LOG_PATH}' for details)")
- endif(BUILD_EXITCODE EQUAL 0 AND INSTALL_EXITCODE EQUAL 0)
endfunction(_FlatBuffers_build)
_FlatBuffers_build()
@@ -93,11 +68,17 @@ if(FlatBuffers_FOUND)
endfunction(FlatBuffers_Generate)
function(FlatBuffers_Target TGT)
- set(oneValueArgs OUTPUT_DIR SCHEMA_DIR)
+ set(oneValueArgs OUTPUT_DIR SCHEMA_DIR INCLUDE_DIR)
set(multiValueArgs SCHEMA_FILES)
cmake_parse_arguments(ARG "" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
+ # Use OUTPUT_DIR as INCLUDE_DIR if INCLUDE_DIR is not specified
+ if(NOT ARG_INCLUDE_DIR)
+ set(ARG_INCLUDE_DIR ${ARG_OUTPUT_DIR})
+ endif(NOT ARG_INCLUDE_DIR)
+
get_filename_component(abs_output_dir ${ARG_OUTPUT_DIR} ABSOLUTE)
+ get_filename_component(abs_include_dir ${ARG_INCLUDE_DIR} ABSOLUTE)
get_filename_component(abs_schema_dir ${ARG_SCHEMA_DIR} ABSOLUTE)
# Let's reset list variables before using them
@@ -129,7 +110,7 @@ if(FlatBuffers_FOUND)
# to avoid possible scope issues related with generated files
add_library(${TGT} STATIC ${OUTPUT_FILES})
set_target_properties(${TGT} PROPERTIES LINKER_LANGUAGE CXX)
- target_include_directories(${TGT} PUBLIC "${ARG_OUTPUT_DIR}")
+ target_include_directories(${TGT} PUBLIC "${ARG_INCLUDE_DIR}")
target_link_libraries(${TGT} PUBLIC flatbuffers)
endfunction(FlatBuffers_Target)
endif(FlatBuffers_FOUND)
diff --git a/infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfig.cmake b/infra/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfig.cmake
index c5f4dc9b7..09a922b67 100644
--- a/infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfig.cmake
+++ b/infra/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfig.cmake
@@ -4,12 +4,15 @@ function(_FlatBuffersSource_import)
return()
endif(NOT DOWNLOAD_FLATBUFFERS)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
envoption(FLATBUFFERS_1_10_URL https://github.com/google/flatbuffers/archive/v1.10.0.tar.gz)
-
- ExternalSource_Download(FLATBUFFERS DIRNAME FLATBUFFERS-1.10 ${FLATBUFFERS_1_10_URL})
+ ExternalSource_Download(FLATBUFFERS
+ DIRNAME FLATBUFFERS-1.10
+ CHECKSUM MD5=f7d19a3f021d93422b0bc287d7148cd2
+ URL ${FLATBUFFERS_1_10_URL}
+ )
set(FlatBuffersSource_DIR ${FLATBUFFERS_SOURCE_DIR} PARENT_SCOPE)
set(FlatBuffersSource_FOUND TRUE PARENT_SCOPE)
diff --git a/infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfigVersion.cmake b/infra/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfigVersion.cmake
index 6585f21d5..6585f21d5 100644
--- a/infra/nncc/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfigVersion.cmake
+++ b/infra/cmake/packages/FlatBuffersSource-1.10/FlatBuffersSourceConfigVersion.cmake
diff --git a/infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfig.cmake b/infra/cmake/packages/FlatBuffersSource-1.12/FlatBuffersSourceConfig.cmake
index 46935b9f7..9ee2c49f7 100644
--- a/infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfig.cmake
+++ b/infra/cmake/packages/FlatBuffersSource-1.12/FlatBuffersSourceConfig.cmake
@@ -4,12 +4,15 @@ function(_FlatBuffersSource_import)
return()
endif(NOT DOWNLOAD_FLATBUFFERS)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
- envoption(FLATBUFFERS_1_8_URL https://github.com/google/flatbuffers/archive/v1.8.0.tar.gz)
-
- ExternalSource_Download(FLATBUFFERS DIRNAME FLATBUFFERS-1.8 ${FLATBUFFERS_1_8_URL})
+ envoption(FLATBUFFERS_1_12_URL https://github.com/google/flatbuffers/archive/v1.12.0.tar.gz)
+ ExternalSource_Download(FLATBUFFERS
+ DIRNAME FLATBUFFERS-1.12
+ CHECKSUM MD5=c62ffefb3d4548b127cca14ce047f16c
+ URL ${FLATBUFFERS_1_12_URL}
+ )
set(FlatBuffersSource_DIR ${FLATBUFFERS_SOURCE_DIR} PARENT_SCOPE)
set(FlatBuffersSource_FOUND TRUE PARENT_SCOPE)
diff --git a/infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfigVersion.cmake b/infra/cmake/packages/FlatBuffersSource-1.12/FlatBuffersSourceConfigVersion.cmake
index 8cfdbf8e5..8cfdbf8e5 100644
--- a/infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfigVersion.cmake
+++ b/infra/cmake/packages/FlatBuffersSource-1.12/FlatBuffersSourceConfigVersion.cmake
diff --git a/infra/nncc/cmake/packages/FlatBuffersSourceConfig.cmake b/infra/cmake/packages/FlatBuffersSourceConfig.cmake
index 63a9ccdd0..52bce6de0 100644
--- a/infra/nncc/cmake/packages/FlatBuffersSourceConfig.cmake
+++ b/infra/cmake/packages/FlatBuffersSourceConfig.cmake
@@ -4,8 +4,8 @@ function(_FlatBuffersSource_import)
return()
endif(NOT DOWNLOAD_FLATBUFFERS)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
# Each TensorFlow needs a specific version of Flatbuffers
# - TensorFlow 1.7 downloads it from https://github.com/google/flatbuffers/archive/971a68110e4.tar.gz
@@ -15,8 +15,11 @@ function(_FlatBuffersSource_import)
#
# TODO Manage multiple versions
envoption(FLATBUFFERS_URL https://github.com/google/flatbuffers/archive/v1.10.0.tar.gz)
-
- ExternalSource_Download(FLATBUFFERS ${FLATBUFFERS_URL})
+ ExternalSource_Download(FLATBUFFERS
+ DIRNAME FLATBUFFERS
+ CHECKSUM MD5=f7d19a3f021d93422b0bc287d7148cd2
+ URL ${FLATBUFFERS_URL}
+ )
set(FlatBuffersSource_DIR ${FLATBUFFERS_SOURCE_DIR} PARENT_SCOPE)
set(FlatBuffersSource_FOUND TRUE PARENT_SCOPE)
diff --git a/infra/nncc/cmake/packages/FlatBuffersSourceConfigVersion.cmake b/infra/cmake/packages/FlatBuffersSourceConfigVersion.cmake
index ac9e22e51..ac9e22e51 100644
--- a/infra/nncc/cmake/packages/FlatBuffersSourceConfigVersion.cmake
+++ b/infra/cmake/packages/FlatBuffersSourceConfigVersion.cmake
diff --git a/infra/nncc/cmake/packages/GEMMLowpConfig.cmake b/infra/cmake/packages/GEMMLowpConfig.cmake
index f469a0a08..7cd4d05ac 100644
--- a/infra/nncc/cmake/packages/GEMMLowpConfig.cmake
+++ b/infra/cmake/packages/GEMMLowpConfig.cmake
@@ -1,5 +1,5 @@
function(_GEMMLowp_import)
- nncc_find_package(GEMMLowpSource QUIET)
+ nnas_find_package(GEMMLowpSource QUIET)
if(NOT GEMMLowpSource_FOUND)
set(GEMMLowp_FOUND FALSE PARENT_SCOPE)
diff --git a/infra/nncc/cmake/packages/GEMMLowpSourceConfig.cmake b/infra/cmake/packages/GEMMLowpSourceConfig.cmake
index a18a4cdc0..6e1cfa9c9 100644
--- a/infra/nncc/cmake/packages/GEMMLowpSourceConfig.cmake
+++ b/infra/cmake/packages/GEMMLowpSourceConfig.cmake
@@ -4,11 +4,12 @@ function(_GEMMLowpSource_import)
return()
endif(NOT DOWNLOAD_GEMMLOWP)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
- # NOTE TensorFlow 1.7 uses the following URL
- envoption(GEMMLOWP_URL https://github.com/google/gemmlowp/archive/7c7c744640ddc3d0af18fb245b4d23228813a71b.zip)
+ # NOTE TensorFlow 1.12 uses the following URL
+ # TensorFlow 1.13.1 uses the following URL
+ envoption(GEMMLOWP_URL https://github.com/google/gemmlowp/archive/38ebac7b059e84692f53e5938f97a9943c120d98.tar.gz)
ExternalSource_Download(GEMMLOWP ${GEMMLOWP_URL})
diff --git a/infra/nncc/cmake/packages/GFlagsConfig.cmake b/infra/cmake/packages/GFlagsConfig.cmake
index 891320a95..1ab445a4d 100644
--- a/infra/nncc/cmake/packages/GFlagsConfig.cmake
+++ b/infra/cmake/packages/GFlagsConfig.cmake
@@ -4,10 +4,10 @@ function(_GFlags_import)
return()
endif()
- nncc_find_package(GFlagsSource QUIET)
+ nnas_find_package(GFlagsSource QUIET)
if(GFlagsSource_FOUND)
- nncc_include(ExternalProjectTools)
+ nnas_include(ExternalProjectTools)
# build shared multi-threading gflag library
set(BUILD_SHARED_LIBS On)
set(BUILD_STATIC_LIBS Off)
diff --git a/infra/nncc/cmake/packages/GFlagsSourceConfig.cmake b/infra/cmake/packages/GFlagsSourceConfig.cmake
index 17970b0ec..3e70d89fc 100644
--- a/infra/nncc/cmake/packages/GFlagsSourceConfig.cmake
+++ b/infra/cmake/packages/GFlagsSourceConfig.cmake
@@ -4,8 +4,8 @@ function(_GFlagsSource_import)
return()
endif(NOT DOWNLOAD_GFLAGS)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
envoption(GFLAGS_URL https://github.com/gflags/gflags/archive/v2.2.1.tar.gz)
diff --git a/infra/nncc/cmake/packages/GLogConfig.cmake b/infra/cmake/packages/GLogConfig.cmake
index e5ed02cd3..e5ed02cd3 100644
--- a/infra/nncc/cmake/packages/GLogConfig.cmake
+++ b/infra/cmake/packages/GLogConfig.cmake
diff --git a/infra/cmake/packages/GTestConfig.cmake b/infra/cmake/packages/GTestConfig.cmake
new file mode 100644
index 000000000..62a15e0cc
--- /dev/null
+++ b/infra/cmake/packages/GTestConfig.cmake
@@ -0,0 +1,52 @@
+function(_GTest_build)
+ if(NOT BUILD_GTEST)
+ return()
+ endif(NOT BUILD_GTEST)
+
+ nnas_find_package(GTestSource QUIET)
+
+ if(NOT GTestSource_FOUND)
+ return()
+ endif(NOT GTestSource_FOUND)
+
+ nnas_include(ExternalBuildTools)
+ ExternalBuild_CMake(CMAKE_DIR ${GTestSource_DIR}
+ BUILD_DIR ${CMAKE_BINARY_DIR}/externals/GTEST/build
+ INSTALL_DIR ${EXT_OVERLAY_DIR}
+ IDENTIFIER "1.8.0-fix1"
+ PKG_NAME "GTEST")
+
+endfunction(_GTest_build)
+
+_GTest_build()
+
+### Find and use pre-installed Google Test
+# Note: cmake supports GTest and does not find GTestConfig.cmake or GTest-config.cmake.
+# Refer to "https://cmake.org/cmake/help/v3.5/module/FindGTest.html"
+# find_package(GTest) creates options like GTEST_FOUND, not GTest_FOUND.
+find_package(GTest)
+find_package(Threads)
+
+if(${GTEST_FOUND} AND TARGET Threads::Threads)
+ if(NOT TARGET gtest)
+ add_library(gtest INTERFACE)
+ target_include_directories(gtest INTERFACE ${GTEST_INCLUDE_DIRS})
+ target_link_libraries(gtest INTERFACE ${GTEST_LIBRARIES} Threads::Threads)
+ endif(NOT TARGET gtest)
+
+ if(NOT TARGET gtest_main)
+ add_library(gtest_main INTERFACE)
+ target_include_directories(gtest_main INTERFACE ${GTEST_INCLUDE_DIRS})
+ target_link_libraries(gtest_main INTERFACE gtest)
+ target_link_libraries(gtest_main INTERFACE ${GTEST_MAIN_LIBRARIES})
+
+ # GTest_AddTest(TGT ...) creates an executable target and registers that executable as a CMake test
+ function(GTest_AddTest TGT)
+ add_executable(${TGT} ${ARGN})
+ target_link_libraries(${TGT} gtest_main)
+ add_test(${TGT} ${TGT})
+ endfunction(GTest_AddTest)
+ endif(NOT TARGET gtest_main)
+
+ set(GTest_FOUND TRUE)
+endif(${GTEST_FOUND} AND TARGET Threads::Threads)
diff --git a/infra/nncc/cmake/packages/GTestSourceConfig.cmake b/infra/cmake/packages/GTestSourceConfig.cmake
index d7c9d53c6..8b7495fbc 100644
--- a/infra/nncc/cmake/packages/GTestSourceConfig.cmake
+++ b/infra/cmake/packages/GTestSourceConfig.cmake
@@ -4,8 +4,8 @@ function(_GTestSource_import)
return()
endif(NOT DOWNLOAD_GTEST)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
envoption(GTEST_URL https://github.com/google/googletest/archive/release-1.8.0.tar.gz)
diff --git a/infra/cmake/packages/HDF5Config.cmake b/infra/cmake/packages/HDF5Config.cmake
new file mode 100644
index 000000000..19803f1ea
--- /dev/null
+++ b/infra/cmake/packages/HDF5Config.cmake
@@ -0,0 +1,71 @@
+function(_HDF5_build)
+ if(NOT BUILD_HDF5)
+ return()
+ endif(NOT BUILD_HDF5)
+
+ nnas_find_package(HDF5Source QUIET)
+
+ if(NOT HDF5Source_FOUND)
+ return()
+ endif(NOT HDF5Source_FOUND)
+
+ nnas_include(ExternalBuildTools)
+ ExternalBuild_CMake(CMAKE_DIR ${HDF5Source_DIR}
+ BUILD_DIR ${CMAKE_BINARY_DIR}/externals/HDF5/build
+ INSTALL_DIR ${EXT_OVERLAY_DIR}
+ IDENTIFIER "1.8.16"
+ PKG_NAME "HDF5"
+ EXTRA_OPTS "-DBUILD_SHARED_LIBS:BOOL=ON"
+ "-DHDF5_BUILD_TOOLS:BOOL=ON"
+ "-DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF"
+ "-DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF")
+
+endfunction(_HDF5_build)
+
+_HDF5_build()
+
+find_path(HDF5_CONFIG_DIR "hdf5-config.cmake"
+ PATHS ${EXT_OVERLAY_DIR}
+ PATH_SUFFIXES
+ cmake
+ share/cmake
+ share/cmake/hdf5
+ cmake/hdf5
+ lib/cmake/hdf5)
+
+include(${HDF5_CONFIG_DIR}/hdf5-config.cmake)
+
+unset(HDF5_INCLUDE_DIRS)
+unset(HDF5_C_INCLUDE_DIRS)
+unset(HDF5_CXX_INCLUDE_DIRS)
+unset(HDF5_HL_INCLUDE_DIRS)
+
+unset(HDF5_LIBRARIES)
+unset(HDF5_HL_LIBRARIES)
+unset(HDF5_C_LIBRARIES)
+unset(HDF5_CXX_LIBRARIES)
+unset(HDF5_C_HL_LIBRARIES)
+unset(HDF5_CXX_HL_LIBRARIES)
+
+# If user doesn't specify static or shared, set it to shared by default
+list(FIND HDF5_FIND_COMPONENTS "STATIC" _index)
+if(${_index} GREATER -1)
+ # static
+ set(_SUFFIX "-static")
+else()
+ # shared
+ set(_SUFFIX "-shared")
+endif()
+
+list(REMOVE_ITEM HDF5_FIND_COMPONENTS "static;shared")
+set(HDF5_INCLUDE_DIRS ${HDF5_INCLUDE_DIR})
+foreach(COMP HDF5_FIND_COMPONENTS)
+ set(HDF5_${COMP}_INCLUDE_DIRS ${HDF5_INCLUDE_DIR})
+endforeach()
+
+set(HDF5_LIBRARIES "hdf5${_SUFFIX}")
+set(HDF5_C_LIBRARIES "hdf5${_SUFFIX}")
+set(HDF5_CXX_LIBRARIES "hdf5_cpp${_SUFFIX}")
+set(HDF5_HL_LIBRARIES "hdf5_hl${_SUFFIX}")
+set(HDF5_C_HL_LIBRARIES "hdf5_hl${_SUFFIX}")
+set(HDF5_CXX_HL_LIBRARIES "hdf5_hl_cpp${_SUFFIX}")
diff --git a/infra/cmake/packages/HDF5SourceConfig.cmake b/infra/cmake/packages/HDF5SourceConfig.cmake
new file mode 100644
index 000000000..134efa6f4
--- /dev/null
+++ b/infra/cmake/packages/HDF5SourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_HDF5Source_import)
+ if(NOT DOWNLOAD_HDF5)
+ set(HDF5Source_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_HDF5)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ envoption(HDF5_URL https://github.com/HDFGroup/hdf5/archive/hdf5-1_8_16.tar.gz)
+
+ ExternalSource_Download(HDF5 ${HDF5_URL})
+
+ set(HDF5Source_DIR ${HDF5_SOURCE_DIR} PARENT_SCOPE)
+ set(HDF5Source_FOUND TRUE PARENT_SCOPE)
+endfunction(_HDF5Source_import)
+
+_HDF5Source_import()
diff --git a/infra/nncc/cmake/packages/LLVMConfig.cmake b/infra/cmake/packages/LLVMConfig.cmake
index 0f8faec7f..0f8faec7f 100644
--- a/infra/nncc/cmake/packages/LLVMConfig.cmake
+++ b/infra/cmake/packages/LLVMConfig.cmake
diff --git a/infra/nncc/cmake/packages/NEON2SSEConfig.cmake b/infra/cmake/packages/NEON2SSEConfig.cmake
index c7f0c294e..8add6d381 100644
--- a/infra/nncc/cmake/packages/NEON2SSEConfig.cmake
+++ b/infra/cmake/packages/NEON2SSEConfig.cmake
@@ -1,5 +1,5 @@
function(_NEON2SSE_import)
- nncc_find_package(NEON2SSESource QUIET)
+ nnas_find_package(NEON2SSESource QUIET)
if(NOT NEON2SSESource_FOUND)
set(NEON2SSE_FOUND FALSE PARENT_SCOPE)
diff --git a/infra/nncc/cmake/packages/NEON2SSESourceConfig.cmake b/infra/cmake/packages/NEON2SSESourceConfig.cmake
index f66c5cf41..bd40267a5 100644
--- a/infra/nncc/cmake/packages/NEON2SSESourceConfig.cmake
+++ b/infra/cmake/packages/NEON2SSESourceConfig.cmake
@@ -4,11 +4,14 @@ function(_NEON2SSESource_import)
return()
endif(NOT DOWNLOAD_NEON2SSE)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
- # NOTE TensorFlow 1.7 downloads NEON2SSE from the following URL
- envoption(NEON2SSE_URL https://github.com/intel/ARM_NEON_2_x86_SSE/archive/0f77d9d182265259b135dad949230ecbf1a2633d.tar.gz)
+ # NOTE TensorFlow 1.13.1 downloads NEON2SSE from the following URL
+ # NOTE TensorFlow 2.1 downloads NEON2SSE from the following URL
+ # NOTE TensorFlow 2.2 downloads NEON2SSE from the following URL
+ # NOTE TensorFlow 2.3 downloads NEON2SSE from the following URL
+ envoption(NEON2SSE_URL https://github.com/intel/ARM_NEON_2_x86_SSE/archive/1200fe90bb174a6224a525ee60148671a786a71f.tar.gz)
ExternalSource_Download(NEON2SSE ${NEON2SSE_URL})
diff --git a/infra/nnfw/cmake/packages/Nonius/html_report_template.g.h++ b/infra/cmake/packages/Nonius/html_report_template.g.h++
index fa159c6a4..fa159c6a4 100644
--- a/infra/nnfw/cmake/packages/Nonius/html_report_template.g.h++
+++ b/infra/cmake/packages/Nonius/html_report_template.g.h++
diff --git a/infra/cmake/packages/NoniusSourceConfig.cmake b/infra/cmake/packages/NoniusSourceConfig.cmake
new file mode 100644
index 000000000..17965f1eb
--- /dev/null
+++ b/infra/cmake/packages/NoniusSourceConfig.cmake
@@ -0,0 +1,26 @@
+function(_NoniusSource_import)
+ if(NOT ${DOWNLOAD_NONIUS})
+ set(NoniusSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT ${DOWNLOAD_NONIUS})
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ set(NONIUS_URL ${EXTERNAL_DOWNLOAD_SERVER}/libnonius/nonius/archive/v1.2.0-beta.1.tar.gz)
+ ExternalSource_Download("NONIUS" ${NONIUS_URL})
+
+ if(BUILD_KBENCHMARK)
+ # Copy html_report_template.g.h++ file to externals/nonius.
+ # This header file is modified to show the html summary view according to the layer in kbenchmark.
+ execute_process(COMMAND ${CMAKE_COMMAND} -E copy
+ "${CMAKE_CURRENT_LIST_DIR}/Nonius/html_report_template.g.h++"
+ "${NoniusSource_DIR}/include/nonius/detail")
+ endif(BUILD_KBENCHMARK)
+
+ set(NoniusSource_DIR ${NONIUS_SOURCE_DIR} PARENT_SCOPE)
+ set(NoniusSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_NoniusSource_import)
+
+_NoniusSource_import()
diff --git a/infra/nncc/cmake/packages/ONNXRuntimeConfig.cmake b/infra/cmake/packages/ONNXRuntimeConfig.cmake
index cfccfff88..cfccfff88 100644
--- a/infra/nncc/cmake/packages/ONNXRuntimeConfig.cmake
+++ b/infra/cmake/packages/ONNXRuntimeConfig.cmake
diff --git a/infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfig.cmake b/infra/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfig.cmake
index b60e9446e..c9fb5e490 100644
--- a/infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfig.cmake
+++ b/infra/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfig.cmake
@@ -4,8 +4,8 @@ function(_ONNXSource_import)
return()
endif(NOT DOWNLOAD_ONNX)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
envoption(ONNX_1_4_1_URL https://github.com/onnx/onnx/archive/v1.4.1.zip)
diff --git a/infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfigVersion.cmake b/infra/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfigVersion.cmake
index 802b464da..802b464da 100644
--- a/infra/nncc/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfigVersion.cmake
+++ b/infra/cmake/packages/ONNXSource-1.4.1/ONNXSourceConfigVersion.cmake
diff --git a/infra/cmake/packages/ONNXSource-1.6.0/ONNXSourceConfig.cmake b/infra/cmake/packages/ONNXSource-1.6.0/ONNXSourceConfig.cmake
new file mode 100644
index 000000000..ef903f834
--- /dev/null
+++ b/infra/cmake/packages/ONNXSource-1.6.0/ONNXSourceConfig.cmake
@@ -0,0 +1,20 @@
+function(_ONNXSource_import)
+ if(NOT DOWNLOAD_ONNX)
+ set(ONNXSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_ONNX)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ envoption(ONNX_1_6_0_URL https://github.com/onnx/onnx/archive/v1.6.0.zip)
+
+ ExternalSource_Download(ONNX DIRNAME ONNX-1.6.0
+ CHECKSUM MD5=cbdc547a527f1b59c7f066c8d258b966
+ URL ${ONNX_1_6_0_URL})
+
+ set(ONNXSource_DIR ${ONNX_SOURCE_DIR} PARENT_SCOPE)
+ set(ONNXSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_ONNXSource_import)
+
+_ONNXSource_import()
diff --git a/infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfigVersion.cmake b/infra/cmake/packages/ONNXSource-1.6.0/ONNXSourceConfigVersion.cmake
index 0ecf9d222..5612fc61d 100644
--- a/infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfigVersion.cmake
+++ b/infra/cmake/packages/ONNXSource-1.6.0/ONNXSourceConfigVersion.cmake
@@ -1,4 +1,4 @@
-set(PACKAGE_VERSION "1.3.0")
+set(PACKAGE_VERSION "1.6.0")
set(PACKAGE_VERSION_EXACT FALSE)
set(PACKAGE_VERSION_COMPATIBLE FALSE)
set(PACKAGE_VERSION_UNSUITABLE TRUE)
diff --git a/infra/nncc/cmake/packages/ProtobufConfig.cmake b/infra/cmake/packages/ProtobufConfig.cmake
index 9064d1140..3c8d2320f 100644
--- a/infra/nncc/cmake/packages/ProtobufConfig.cmake
+++ b/infra/cmake/packages/ProtobufConfig.cmake
@@ -1,4 +1,3 @@
-# NOTE This function is unused, but remains for future reference
function(_Protobuf_module_import)
# Let's use find_package here not to export unnecessary definitions
find_package(Protobuf MODULE QUIET)
@@ -45,63 +44,31 @@ function(_Protobuf_build)
return()
endif(NOT BUILD_PROTOBUF)
- nncc_find_package(ProtobufSource QUIET)
+ nnas_find_package(ProtobufSource QUIET)
if(NOT ProtobufSource_FOUND)
# Source is not available
return()
endif(NOT ProtobufSource_FOUND)
- # TODO Introduce helper functions
- set(PROTOBUF_BUILD_DIR "${CMAKE_BINARY_DIR}/externals/PROTOBUF/build")
- set(PROTOBUF_INSTALL_DIR "${NNCC_OVERLAY_DIR}")
+ nnas_include(ExternalBuildTools)
+ ExternalBuild_CMake(CMAKE_DIR ${ProtobufSource_DIR}/cmake
+ BUILD_DIR ${CMAKE_BINARY_DIR}/externals/PROTOBUF/build
+ INSTALL_DIR ${EXT_OVERLAY_DIR}
+ BUILD_FLAGS -fPIC
+ EXTRA_OPTS -Dprotobuf_BUILD_TESTS=OFF -Dprotobuf_WITH_ZLIB=OFF
+ IDENTIFIER "3.5.2-fix1"
+ PKG_NAME "PROTOBUF")
- set(STAMP_PATH "${PROTOBUF_INSTALL_DIR}/PROTOBUF.stamp")
- set(LOG_PATH "${PROTOBUF_INSTALL_DIR}/PROTOBUF.log")
-
- if(EXISTS ${STAMP_PATH})
- return()
- endif(EXISTS ${STAMP_PATH})
-
- message(STATUS "Build Protocol Buffer from ${ProtobufSource_DIR}")
-
- file(MAKE_DIRECTORY ${PROTOBUF_BUILD_DIR})
- file(MAKE_DIRECTORY ${PROTOBUF_INSTALL_DIR})
-
- # NOTE Do NOT retry Protocol Buffer build
- file(WRITE "${STAMP_PATH}")
-
- execute_process(COMMAND ${CMAKE_COMMAND}
- -DCMAKE_INSTALL_PREFIX=${PROTOBUF_INSTALL_DIR}
- -DCMAKE_BUILD_TYPE=Release
- -DCMAKE_CXX_FLAGS="-fPIC"
- -Dprotobuf_BUILD_TESTS=OFF
- -Dprotobuf_WITH_ZLIB=OFF
- "${ProtobufSource_DIR}/cmake"
- OUTPUT_FILE ${LOG_PATH}
- ERROR_FILE ${LOG_PATH}
- WORKING_DIRECTORY ${PROTOBUF_BUILD_DIR}
- RESULT_VARIABLE CONFIGURE_EXITCODE)
-
- if(NOT CONFIGURE_EXITCODE EQUAL 0)
- message(FATAL_ERROR "Fail to configure Protocol Buffer (check '${LOG_PATH}' for details)")
- endif(NOT CONFIGURE_EXITCODE EQUAL 0)
-
- execute_process(COMMAND ${CMAKE_COMMAND} --build . -- install
- OUTPUT_FILE ${LOG_PATH}
- ERROR_FILE ${LOG_PATH}
- WORKING_DIRECTORY ${PROTOBUF_BUILD_DIR}
- RESULT_VARIABLE BUILD_AND_INSTALL_EXITCODE)
-
- if(NOT BUILD_AND_INSTALL_EXITCODE EQUAL 0)
- message(FATAL_ERROR "Fail to build/install Protocol Buffer (check '${LOG_PATH}' for details)")
- endif(NOT BUILD_AND_INSTALL_EXITCODE EQUAL 0)
-
- message(STATUS "Succeeded in building Protocol Buffer")
endfunction(_Protobuf_build)
_Protobuf_build()
-_Protobuf_import()
+
+if(USE_PROTOBUF_LEGACY_IMPORT)
+ _Protobuf_module_import()
+else(USE_PROTOBUF_LEGACY_IMPORT)
+ _Protobuf_import()
+endif(USE_PROTOBUF_LEGACY_IMPORT)
if(Protobuf_FOUND)
function(Protobuf_Generate PREFIX OUTPUT_DIR PROTO_DIR)
diff --git a/infra/nncc/cmake/packages/ProtobufSourceConfig.cmake b/infra/cmake/packages/ProtobufSourceConfig.cmake
index 89176eb61..6b35ae7dc 100644
--- a/infra/nncc/cmake/packages/ProtobufSourceConfig.cmake
+++ b/infra/cmake/packages/ProtobufSourceConfig.cmake
@@ -4,8 +4,8 @@ function(_ProtobufSource_import)
return()
endif(NOT DOWNLOAD_PROTOBUF)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
envoption(PROTOBUF_URL https://github.com/protocolbuffers/protobuf/archive/v3.5.2.tar.gz)
diff --git a/infra/cmake/packages/Pybind11Config.cmake b/infra/cmake/packages/Pybind11Config.cmake
new file mode 100644
index 000000000..b6d500496
--- /dev/null
+++ b/infra/cmake/packages/Pybind11Config.cmake
@@ -0,0 +1,22 @@
+function(_Pybind11_import)
+ nnas_find_package(Pybind11Source QUIET)
+
+ if(NOT Pybind11Source_FOUND)
+ set(Pybind11_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT Pybind11Source_FOUND)
+
+ nnas_include(ExternalBuildTools)
+ ExternalBuild_CMake(CMAKE_DIR ${Pybind11Source_DIR}
+ BUILD_DIR ${CMAKE_BINARY_DIR}/externals/PYBIND11/build
+ INSTALL_DIR ${EXT_OVERLAY_DIR}
+ IDENTIFIER "2.5.0"
+ PKG_NAME "PYBIND11"
+ EXTRA_OPTS "-DPYBIND11_TEST:BOOL=OFF")
+
+ find_path(Pybind11_INCLUDE_DIRS NAMES pybind11.h PATHS ${EXT_OVERLAY_DIR} PATH_SUFFIXES include/pybind11)
+
+ set(Pybind11_FOUND TRUE PARENT_SCOPE)
+endfunction(_Pybind11_import)
+
+_Pybind11_import()
diff --git a/infra/cmake/packages/Pybind11SourceConfig.cmake b/infra/cmake/packages/Pybind11SourceConfig.cmake
new file mode 100644
index 000000000..76f51e4d3
--- /dev/null
+++ b/infra/cmake/packages/Pybind11SourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_Pybind11Source_import)
+ if(NOT DOWNLOAD_PYBIND11)
+ set(Pybind11Source_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_PYBIND11)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ envoption(PYBIND11_URL https://github.com/pybind/pybind11/archive/v2.5.0.tar.gz)
+
+ ExternalSource_Download(PYBIND11 ${PYBIND11_URL})
+
+ set(Pybind11Source_DIR ${PYBIND11_SOURCE_DIR} PARENT_SCOPE)
+ set(Pybind11Source_FOUND TRUE PARENT_SCOPE)
+endfunction(_Pybind11Source_import)
+
+_Pybind11Source_import()
diff --git a/infra/nncc/cmake/packages/PytorchSourceConfig.cmake b/infra/cmake/packages/PytorchSourceConfig.cmake
index c28bc7c00..0212f2f4b 100644
--- a/infra/nncc/cmake/packages/PytorchSourceConfig.cmake
+++ b/infra/cmake/packages/PytorchSourceConfig.cmake
@@ -4,10 +4,10 @@ function(_PytorchSource_import)
return()
endif(NOT DOWNLOAD_PYTORCH)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
- envoption(PYTORCH_URL https://github.com/pytorch/pytorch/archive/v0.4.1.tar.gz)
+ envoption(PYTORCH_URL https://github.com/pytorch/pytorch/archive/v0.4.1.tar.gz)
ExternalSource_Download(PYTORCH ${PYTORCH_URL})
diff --git a/infra/cmake/packages/RuySourceConfig.cmake b/infra/cmake/packages/RuySourceConfig.cmake
new file mode 100644
index 000000000..4faf0bb9f
--- /dev/null
+++ b/infra/cmake/packages/RuySourceConfig.cmake
@@ -0,0 +1,21 @@
+function(_RuySource_import)
+ if(NOT ${DOWNLOAD_RUY})
+ set(RuySource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT ${DOWNLOAD_RUY})
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ # NOTE Downloads ruy source used by tensorflow v2.3.0
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ envoption(RUY_URL ${EXTERNAL_DOWNLOAD_SERVER}/google/ruy/archive/34ea9f4993955fa1ff4eb58e504421806b7f2e8f.tar.gz)
+ ExternalSource_Download(RUY
+ DIRNAME RUY
+ URL ${RUY_URL})
+
+ set(RuySource_DIR ${RUY_SOURCE_DIR} PARENT_SCOPE)
+ set(RuySource_FOUND TRUE PARENT_SCOPE)
+endfunction(_RuySource_import)
+
+_RuySource_import()
diff --git a/infra/cmake/packages/TensorFlow-1.13/TensorFlowConfig.cmake b/infra/cmake/packages/TensorFlow-1.13/TensorFlowConfig.cmake
new file mode 100644
index 000000000..8fedc9537
--- /dev/null
+++ b/infra/cmake/packages/TensorFlow-1.13/TensorFlowConfig.cmake
@@ -0,0 +1,56 @@
+set(TENSORFLOW_PREFIX "/usr" CACHE PATH "The location of pre-installed TensorFlow 1.13 library")
+set(TENSORFLOW_VERSION_REQUIRED "1.13")
+
+# TODO Build TensorFlow from the (downloaded) source
+
+function(_TensorFlow_import)
+ # Clean cache
+ unset(TensorFlow_LIB CACHE)
+ unset(TensorFlow_INCLUDE_DIR CACHE)
+ # Find the header & lib
+ find_library(TensorFlow_LIB NAMES tensorflow PATHS "${TENSORFLOW_PREFIX}/lib")
+ find_path(TensorFlow_INCLUDE_DIR NAMES tensorflow/c/c_api.h PATHS "${TENSORFLOW_PREFIX}/include")
+
+ if(NOT TensorFlow_LIB OR NOT TensorFlow_INCLUDE_DIR)
+ message(STATUS "Found TensorFlow: FALSE")
+
+ set(TensorFlow_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT TensorFlow_LIB OR NOT TensorFlow_INCLUDE_DIR)
+
+ # Check TensorFlow version
+ try_run(RUN_RESULT_VAR COMPILE_RESULT_VAR
+ ${CMAKE_BINARY_DIR}
+ ${CMAKE_CURRENT_LIST_DIR}/TensorFlowVersionChecker.c
+ COMPILE_DEFINITIONS -I${TensorFlow_INCLUDE_DIR}
+ LINK_LIBRARIES ${TensorFlow_LIB}
+ ARGS ${TENSORFLOW_VERSION_REQUIRED})
+
+ if(NOT COMPILE_RESULT_VAR)
+ message(STATUS "Failed to build TensorFlowVersionChecker. Your libtensorflow may be built on different version of Ubuntu.")
+ message(STATUS "Found TensorFlow: FALSE")
+ set(TensorFlow_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT COMPILE_RESULT_VAR)
+
+ if(NOT RUN_RESULT_VAR EQUAL 0)
+ message(STATUS "you need tensorflow version ${TENSORFLOW_VERSION_REQUIRED}")
+ message(STATUS "Found TensorFlow: FALSE")
+ set(TensorFlow_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT RUN_RESULT_VAR EQUAL 0)
+
+ # Add tensorflow target (if necessary)
+ if(NOT TARGET tensorflow-1.13)
+ message(STATUS "Found TensorFlow (include: ${TensorFlow_INCLUDE_DIR}, library: ${TensorFlow_LIB})")
+
+ # NOTE IMPORTED target may be more appropriate for this case
+ add_library(tensorflow-1.13 INTERFACE)
+ target_link_libraries(tensorflow-1.13 INTERFACE ${TensorFlow_LIB})
+ target_include_directories(tensorflow-1.13 INTERFACE ${TensorFlow_INCLUDE_DIR})
+ endif(NOT TARGET tensorflow-1.13)
+
+ set(TensorFlow_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlow_import)
+
+_TensorFlow_import()
diff --git a/infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfigVersion.cmake b/infra/cmake/packages/TensorFlow-1.13/TensorFlowConfigVersion.cmake
index ac5e9b2b9..b5a37ddba 100644
--- a/infra/nncc/cmake/packages/FlatBuffersSource-1.8/FlatBuffersSourceConfigVersion.cmake
+++ b/infra/cmake/packages/TensorFlow-1.13/TensorFlowConfigVersion.cmake
@@ -1,4 +1,4 @@
-set(PACKAGE_VERSION "1.8")
+set(PACKAGE_VERSION "1.13")
set(PACKAGE_VERSION_EXACT FALSE)
set(PACKAGE_VERSION_COMPATIBLE FALSE)
set(PACKAGE_VERSION_UNSUITABLE TRUE)
diff --git a/infra/cmake/packages/TensorFlow-1.13/TensorFlowVersionChecker.c b/infra/cmake/packages/TensorFlow-1.13/TensorFlowVersionChecker.c
new file mode 100644
index 000000000..fcd6be122
--- /dev/null
+++ b/infra/cmake/packages/TensorFlow-1.13/TensorFlowVersionChecker.c
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <string.h>
+#include <tensorflow/c/c_api.h>
+
+int main(int argc, char **argv)
+{
+ if (argc >= 2 && !strncmp(argv[1], TF_Version(), 4))
+ return 0;
+ return 255;
+}
diff --git a/infra/nncc/cmake/packages/TensorFlowConfig.cmake b/infra/cmake/packages/TensorFlowConfig.cmake
index 14d2fdf26..14d2fdf26 100644
--- a/infra/nncc/cmake/packages/TensorFlowConfig.cmake
+++ b/infra/cmake/packages/TensorFlowConfig.cmake
diff --git a/infra/cmake/packages/TensorFlowEigenSource-2.1.0/TensorFlowEigenSourceConfig.cmake b/infra/cmake/packages/TensorFlowEigenSource-2.1.0/TensorFlowEigenSourceConfig.cmake
new file mode 100644
index 000000000..f84675596
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowEigenSource-2.1.0/TensorFlowEigenSourceConfig.cmake
@@ -0,0 +1,20 @@
+function(_TensorFlowEigenSource_import)
+ if(NOT DOWNLOAD_EIGEN)
+ set(TensorFlowEigenSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_EIGEN)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ # Exact version used by TensorFlow v2.1.0.
+ # See tensorflow/tensorflow/workspace.bzl.
+ envoption(TENSORFLOW_2_1_0_EIGEN_URL https://gitlab.com/libeigen/eigen/-/archive/4e696901f873a2347f76d931cf2f701e31e15d05/eigen-4e696901f873a2347f76d931cf2f701e31e15d05.tar.gz)
+
+ ExternalSource_Download(EIGEN DIRNAME TENSORFLOW-2.1.0-EIGEN ${TENSORFLOW_2_1_0_EIGEN_URL})
+
+ set(TensorFlowEigenSource_DIR ${EIGEN_SOURCE_DIR} PARENT_SCOPE)
+ set(TensorFlowEigenSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlowEigenSource_import)
+
+_TensorFlowEigenSource_import()
diff --git a/infra/cmake/packages/TensorFlowEigenSource-2.1.0/TensorFlowEigenSourceConfigVersion.cmake b/infra/cmake/packages/TensorFlowEigenSource-2.1.0/TensorFlowEigenSourceConfigVersion.cmake
new file mode 100644
index 000000000..80f43dde8
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowEigenSource-2.1.0/TensorFlowEigenSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "2.1.0")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/cmake/packages/TensorFlowEigenSource-2.3.0/TensorFlowEigenSourceConfig.cmake b/infra/cmake/packages/TensorFlowEigenSource-2.3.0/TensorFlowEigenSourceConfig.cmake
new file mode 100644
index 000000000..d50d04508
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowEigenSource-2.3.0/TensorFlowEigenSourceConfig.cmake
@@ -0,0 +1,21 @@
+function(_TensorFlowEigenSource_import)
+ if(NOT DOWNLOAD_EIGEN)
+ set(TensorFlowEigenSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_EIGEN)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ # Exact version used by TensorFlow v2.3.0.
+ # See tensorflow/tensorflow/workspace.bzl.
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://gitlab.com")
+ envoption(TENSORFLOW_2_3_0_EIGEN_URL ${EXTERNAL_DOWNLOAD_SERVER}/libeigen/eigen/-/archive/386d809bde475c65b7940f290efe80e6a05878c4/eigen-386d809bde475c65b7940f290efe80e6a05878c4.tar.gz)
+
+ ExternalSource_Download(EIGEN DIRNAME TENSORFLOW-2.3.0-EIGEN ${TENSORFLOW_2_3_0_EIGEN_URL})
+
+ set(TensorFlowEigenSource_DIR ${EIGEN_SOURCE_DIR} PARENT_SCOPE)
+ set(TensorFlowEigenSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlowEigenSource_import)
+
+_TensorFlowEigenSource_import()
diff --git a/infra/cmake/packages/TensorFlowEigenSource-2.3.0/TensorFlowEigenSourceConfigVersion.cmake b/infra/cmake/packages/TensorFlowEigenSource-2.3.0/TensorFlowEigenSourceConfigVersion.cmake
new file mode 100644
index 000000000..04df5eb6d
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowEigenSource-2.3.0/TensorFlowEigenSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "2.3.0")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/cmake/packages/TensorFlowGEMMLowpSource-2.1.0/TensorFlowGEMMLowpSourceConfig.cmake b/infra/cmake/packages/TensorFlowGEMMLowpSource-2.1.0/TensorFlowGEMMLowpSourceConfig.cmake
new file mode 100644
index 000000000..035264fa9
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowGEMMLowpSource-2.1.0/TensorFlowGEMMLowpSourceConfig.cmake
@@ -0,0 +1,20 @@
+function(_TensorFlowGEMMLowpSource_import)
+ if(NOT DOWNLOAD_GEMMLOWP)
+ set(TensorFlowGEMMLowpSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_GEMMLOWP)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ # Exact version used by TensorFlow v2.1.0.
+ # See tensorflow/tensorflow/workspace.bzl.
+ envoption(TENSORFLOW_2_1_0_GEMMLOWP_URL https://github.com/google/gemmlowp/archive/12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip)
+
+ ExternalSource_Download(GEMMLOWP DIRNAME TENSORFLOW-2.1.0-GEMMLOWP ${TENSORFLOW_2_1_0_GEMMLOWP_URL})
+
+ set(TensorFlowGEMMLowpSource_DIR ${GEMMLOWP_SOURCE_DIR} PARENT_SCOPE)
+ set(TensorFlowGEMMLowpSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlowGEMMLowpSource_import)
+
+_TensorFlowGEMMLowpSource_import()
diff --git a/infra/cmake/packages/TensorFlowGEMMLowpSource-2.1.0/TensorFlowGEMMLowpSourceConfigVersion.cmake b/infra/cmake/packages/TensorFlowGEMMLowpSource-2.1.0/TensorFlowGEMMLowpSourceConfigVersion.cmake
new file mode 100644
index 000000000..80f43dde8
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowGEMMLowpSource-2.1.0/TensorFlowGEMMLowpSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "2.1.0")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/cmake/packages/TensorFlowGEMMLowpSource-2.3.0/TensorFlowGEMMLowpSourceConfig.cmake b/infra/cmake/packages/TensorFlowGEMMLowpSource-2.3.0/TensorFlowGEMMLowpSourceConfig.cmake
new file mode 100644
index 000000000..bc13d6227
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowGEMMLowpSource-2.3.0/TensorFlowGEMMLowpSourceConfig.cmake
@@ -0,0 +1,20 @@
+function(_TensorFlowGEMMLowpSource_import)
+ if(NOT DOWNLOAD_GEMMLOWP)
+ set(TensorFlowGEMMLowpSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_GEMMLOWP)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ # Exact version used by TensorFlow v2.3.0.
+ # See tensorflow/tensorflow/workspace.bzl.
+ envoption(TENSORFLOW_2_3_0_GEMMLOWP_URL https://github.com/google/gemmlowp/archive/fda83bdc38b118cc6b56753bd540caa49e570745.zip)
+
+ ExternalSource_Download(GEMMLOWP DIRNAME TENSORFLOW-2.3.0-GEMMLOWP ${TENSORFLOW_2_3_0_GEMMLOWP_URL})
+
+ set(TensorFlowGEMMLowpSource_DIR ${GEMMLOWP_SOURCE_DIR} PARENT_SCOPE)
+ set(TensorFlowGEMMLowpSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlowGEMMLowpSource_import)
+
+_TensorFlowGEMMLowpSource_import()
diff --git a/infra/cmake/packages/TensorFlowGEMMLowpSource-2.3.0/TensorFlowGEMMLowpSourceConfigVersion.cmake b/infra/cmake/packages/TensorFlowGEMMLowpSource-2.3.0/TensorFlowGEMMLowpSourceConfigVersion.cmake
new file mode 100644
index 000000000..04df5eb6d
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowGEMMLowpSource-2.3.0/TensorFlowGEMMLowpSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "2.3.0")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/TensorFlowLite-1.12/Lite/CMakeLists.txt b/infra/cmake/packages/TensorFlowLite-1.13.1/Lite/CMakeLists.txt
index 068022fcf..c35617497 100644
--- a/infra/nncc/cmake/packages/TensorFlowLite-1.12/Lite/CMakeLists.txt
+++ b/infra/cmake/packages/TensorFlowLite-1.13.1/Lite/CMakeLists.txt
@@ -10,9 +10,14 @@
#
message(STATUS "Build TensorFlow Lite from ${TensorFlowSource_DIR}")
-set(TensorFlowLiteSource_DIR ${TensorFlowSource_DIR}/tensorflow/contrib/lite)
+set(TensorFlowLiteSource_DIR ${TensorFlowSource_DIR}/tensorflow/lite)
-file(GLOB CORE_SRCS "${TensorFlowLiteSource_DIR}/*.c" "${TensorFlowLiteSource_DIR}/*.cc" "${TensorFlowLiteSource_DIR}/c/*.c" "${TensorFlowLiteSource_DIR}/core/api/*.cc")
+file(GLOB CORE_SRCS
+ "${TensorFlowLiteSource_DIR}/*.c"
+ "${TensorFlowLiteSource_DIR}/*.cc"
+ "${TensorFlowLiteSource_DIR}/c/*.c"
+ "${TensorFlowLiteSource_DIR}/core/*.cc"
+ "${TensorFlowLiteSource_DIR}/core/api/*.cc")
file(GLOB_RECURSE CORE_TESTS "${TensorFlowLiteSource_DIR}/*test*.cc")
list(REMOVE_ITEM CORE_SRCS ${CORE_TESTS})
@@ -29,13 +34,13 @@ include(CheckCXXCompilerFlag)
CHECK_CXX_COMPILER_FLAG(-Wno-extern-c-compat COMPILER_SUPPORT_EXTERN_C_COMPAT_WARNING)
-add_library(tensorflowlite-1.12 ${SRCS})
-set_target_properties(tensorflowlite-1.12 PROPERTIES POSITION_INDEPENDENT_CODE ON)
-target_include_directories(tensorflowlite-1.12 PUBLIC ${TensorFlowSource_DIR})
-target_include_directories(tensorflowlite-1.12 PUBLIC ${FlatBuffersSource_DIR}/include)
-target_compile_options(tensorflowlite-1.12 PUBLIC -Wno-ignored-attributes)
+add_library(tensorflowlite-1.13.1 ${SRCS})
+set_target_properties(tensorflowlite-1.13.1 PROPERTIES POSITION_INDEPENDENT_CODE ON)
+target_include_directories(tensorflowlite-1.13.1 PUBLIC ${TensorFlowSource_DIR})
+target_include_directories(tensorflowlite-1.13.1 PUBLIC ${FlatBuffersSource_DIR}/include)
+target_compile_options(tensorflowlite-1.13.1 PUBLIC -Wno-ignored-attributes)
if(COMPILER_SUPPORT_EXTERN_C_COMPAT_WARNING)
- target_compile_options(tensorflowlite-1.12 PUBLIC -Wno-extern-c-compat)
+ target_compile_options(tensorflowlite-1.13.1 PUBLIC -Wno-extern-c-compat)
endif(COMPILER_SUPPORT_EXTERN_C_COMPAT_WARNING)
-target_compile_definitions(tensorflowlite-1.12 PUBLIC "GEMMLOWP_ALLOW_SLOW_SCALAR_FALLBACK")
-target_link_libraries(tensorflowlite-1.12 eigen gemmlowp neon2sse farmhash abseil dl)
+target_compile_definitions(tensorflowlite-1.13.1 PUBLIC "GEMMLOWP_ALLOW_SLOW_SCALAR_FALLBACK")
+target_link_libraries(tensorflowlite-1.13.1 eigen gemmlowp neon2sse farmhash abseil dl)
diff --git a/infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfig.cmake b/infra/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLiteConfig.cmake
index 2f4ff0a46..2c6bd9f7a 100644
--- a/infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfig.cmake
+++ b/infra/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLiteConfig.cmake
@@ -1,60 +1,57 @@
function(_TensorFlowLite_import)
- nncc_find_package(TensorFlowSource EXACT 1.12 QUIET)
+ nnas_find_package(TensorFlowSource EXACT 1.13.1 QUIET)
if(NOT TensorFlowSource_FOUND)
set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
return()
endif(NOT TensorFlowSource_FOUND)
- # TensorFlow 1.12 downloads FlatBuffers from https://github.com/google/flatbuffers/archive/1f5eae5d6a1.tar.gz
- #
- # Let's use 1.10 released in 2018.10 (compatible with 1f5eae5d6a1).
- nncc_find_package(FlatBuffersSource EXACT 1.10 QUIET)
+ nnas_find_package(FlatBuffersSource EXACT 1.10 QUIET)
if(NOT FlatBuffersSource_FOUND)
set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
return()
endif(NOT FlatBuffersSource_FOUND)
- nncc_find_package(Farmhash QUIET)
+ nnas_find_package(Farmhash QUIET)
if(NOT Farmhash_FOUND)
set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
return()
endif(NOT Farmhash_FOUND)
- nncc_find_package(Eigen QUIET)
+ nnas_find_package(Eigen QUIET)
if(NOT Eigen_FOUND)
set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
return()
endif(NOT Eigen_FOUND)
- nncc_find_package(GEMMLowp QUIET)
+ nnas_find_package(GEMMLowp QUIET)
if(NOT GEMMLowp_FOUND)
set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
return()
endif(NOT GEMMLowp_FOUND)
- nncc_find_package(NEON2SSE QUIET)
+ nnas_find_package(NEON2SSE QUIET)
if(NOT NEON2SSE_FOUND)
set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
return()
endif(NOT NEON2SSE_FOUND)
- nncc_find_package(Abseil QUIET)
+ nnas_find_package(Abseil QUIET)
if(NOT Abseil_FOUND)
set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
return()
endif(NOT Abseil_FOUND)
- if(NOT TARGET tensorflowlite-1.12)
- nncc_include(ExternalProjectTools)
- add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/Lite" tflite-1.12)
- endif(NOT TARGET tensorflowlite-1.12)
+ if(NOT TARGET tensorflowlite-1.13.1)
+ nnas_include(ExternalProjectTools)
+ add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/Lite" tflite-1.13.1)
+ endif(NOT TARGET tensorflowlite-1.13.1)
set(TensorFlowLite_FOUND TRUE PARENT_SCOPE)
endfunction(_TensorFlowLite_import)
diff --git a/infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfigVersion.cmake b/infra/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLiteConfigVersion.cmake
index 4a57b655b..ed79ecd91 100644
--- a/infra/nncc/cmake/packages/TensorFlowLite-1.12/TensorFlowLiteConfigVersion.cmake
+++ b/infra/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLiteConfigVersion.cmake
@@ -1,4 +1,4 @@
-set(PACKAGE_VERSION "1.12")
+set(PACKAGE_VERSION "1.13.1")
set(PACKAGE_VERSION_EXACT FALSE)
set(PACKAGE_VERSION_COMPATIBLE FALSE)
set(PACKAGE_VERSION_UNSUITABLE TRUE)
diff --git a/infra/cmake/packages/TensorFlowRuySource-2.3.0/TensorFlowRuySourceConfig.cmake b/infra/cmake/packages/TensorFlowRuySource-2.3.0/TensorFlowRuySourceConfig.cmake
new file mode 100644
index 000000000..3dbf05ece
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowRuySource-2.3.0/TensorFlowRuySourceConfig.cmake
@@ -0,0 +1,20 @@
+function(_TensorFlowRuySource_import)
+ if(NOT DOWNLOAD_RUY)
+ set(TensorFlowRuySource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_RUY)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ # Exact version used by TensorFlow v2.3.0.
+ # See tensorflow/third_party/ruy/workspace.bzl
+ envoption(TENSORFLOW_2_3_0_RUY_URL https://github.com/google/ruy/archive/34ea9f4993955fa1ff4eb58e504421806b7f2e8f.zip)
+
+ ExternalSource_Download(RUY DIRNAME TENSORFLOW-2.3.0-RUY ${TENSORFLOW_2_3_0_RUY_URL})
+
+ set(TensorFlowRuySource_DIR ${RUY_SOURCE_DIR} PARENT_SCOPE)
+ set(TensorFlowRuySource_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlowRuySource_import)
+
+_TensorFlowRuySource_import()
diff --git a/infra/cmake/packages/TensorFlowRuySource-2.3.0/TensorFlowRuySourceConfigVersion.cmake b/infra/cmake/packages/TensorFlowRuySource-2.3.0/TensorFlowRuySourceConfigVersion.cmake
new file mode 100644
index 000000000..04df5eb6d
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowRuySource-2.3.0/TensorFlowRuySourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "2.3.0")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/cmake/packages/TensorFlowSource-1.13.1/TensorFlowSourceConfig.cmake b/infra/cmake/packages/TensorFlowSource-1.13.1/TensorFlowSourceConfig.cmake
new file mode 100644
index 000000000..d837af731
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowSource-1.13.1/TensorFlowSourceConfig.cmake
@@ -0,0 +1,19 @@
+function(_TensorFlowSource_import)
+ if(NOT DOWNLOAD_TENSORFLOW)
+ set(TensorFlowSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_TENSORFLOW)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
+ envoption(TENSORFLOW_1_13_1_URL ${EXTERNAL_DOWNLOAD_SERVER}/tensorflow/tensorflow/archive/v1.13.1.tar.gz)
+
+ ExternalSource_Download(TENSORFLOW DIRNAME TENSORFLOW-1.13.1 ${TENSORFLOW_1_13_1_URL})
+
+ set(TensorFlowSource_DIR ${TENSORFLOW_SOURCE_DIR} PARENT_SCOPE)
+ set(TensorFlowSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlowSource_import)
+
+_TensorFlowSource_import()
diff --git a/infra/cmake/packages/TensorFlowSource-1.13.1/TensorFlowSourceConfigVersion.cmake b/infra/cmake/packages/TensorFlowSource-1.13.1/TensorFlowSourceConfigVersion.cmake
new file mode 100644
index 000000000..1a17dc8a9
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowSource-1.13.1/TensorFlowSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "1.13.1")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfig.cmake b/infra/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfig.cmake
index f9df7dc18..bcdf9f28c 100644
--- a/infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfig.cmake
+++ b/infra/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfig.cmake
@@ -4,8 +4,8 @@ function(_TensorFlowSource_import)
return()
endif(NOT DOWNLOAD_TENSORFLOW)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
envoption(TENSORFLOW_1_14_URL https://github.com/tensorflow/tensorflow/archive/v1.14.0.tar.gz)
diff --git a/infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfigVersion.cmake b/infra/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfigVersion.cmake
index e9d7db2be..e9d7db2be 100644
--- a/infra/nncc/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfigVersion.cmake
+++ b/infra/cmake/packages/TensorFlowSource-1.14/TensorFlowSourceConfigVersion.cmake
diff --git a/infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfig.cmake b/infra/cmake/packages/TensorFlowSource-2.1.0/TensorFlowSourceConfig.cmake
index b3adef052..0d2a95056 100644
--- a/infra/nncc/cmake/packages/TensorFlowSource-1.12/TensorFlowSourceConfig.cmake
+++ b/infra/cmake/packages/TensorFlowSource-2.1.0/TensorFlowSourceConfig.cmake
@@ -4,12 +4,12 @@ function(_TensorFlowSource_import)
return()
endif(NOT DOWNLOAD_TENSORFLOW)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
- envoption(TENSORFLOW_1_12_URL https://github.com/tensorflow/tensorflow/archive/v1.12.0.tar.gz)
+ envoption(TENSORFLOW_2_1_0_URL https://github.com/tensorflow/tensorflow/archive/v2.1.0.tar.gz)
- ExternalSource_Download(TENSORFLOW DIRNAME TENSORFLOW-1.12 ${TENSORFLOW_1_12_URL})
+ ExternalSource_Download(TENSORFLOW DIRNAME TENSORFLOW-2.1.0 ${TENSORFLOW_2_1_0_URL})
set(TensorFlowSource_DIR ${TENSORFLOW_SOURCE_DIR} PARENT_SCOPE)
set(TensorFlowSource_FOUND TRUE PARENT_SCOPE)
diff --git a/infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfigVersion.cmake b/infra/cmake/packages/TensorFlowSource-2.1.0/TensorFlowSourceConfigVersion.cmake
index b1d5282b2..80f43dde8 100644
--- a/infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfigVersion.cmake
+++ b/infra/cmake/packages/TensorFlowSource-2.1.0/TensorFlowSourceConfigVersion.cmake
@@ -1,4 +1,4 @@
-set(PACKAGE_VERSION "1.7")
+set(PACKAGE_VERSION "2.1.0")
set(PACKAGE_VERSION_EXACT FALSE)
set(PACKAGE_VERSION_COMPATIBLE FALSE)
set(PACKAGE_VERSION_UNSUITABLE TRUE)
diff --git a/infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfig.cmake b/infra/cmake/packages/TensorFlowSource-2.2.0/TensorFlowSourceConfig.cmake
index 11dbf01c5..71220d743 100644
--- a/infra/nncc/cmake/packages/TensorFlowSource-1.7/TensorFlowSourceConfig.cmake
+++ b/infra/cmake/packages/TensorFlowSource-2.2.0/TensorFlowSourceConfig.cmake
@@ -4,12 +4,12 @@ function(_TensorFlowSource_import)
return()
endif(NOT DOWNLOAD_TENSORFLOW)
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
- envoption(TENSORFLOW_URL https://github.com/tensorflow/tensorflow/archive/v1.7.0.tar.gz)
+ envoption(TENSORFLOW_2_2_0_URL https://github.com/tensorflow/tensorflow/archive/v2.2.0.tar.gz)
- ExternalSource_Download(TENSORFLOW ${TENSORFLOW_URL})
+ ExternalSource_Download(TENSORFLOW DIRNAME TENSORFLOW-2.2.0 ${TENSORFLOW_2_2_0_URL})
set(TensorFlowSource_DIR ${TENSORFLOW_SOURCE_DIR} PARENT_SCOPE)
set(TensorFlowSource_FOUND TRUE PARENT_SCOPE)
diff --git a/infra/cmake/packages/TensorFlowSource-2.2.0/TensorFlowSourceConfigVersion.cmake b/infra/cmake/packages/TensorFlowSource-2.2.0/TensorFlowSourceConfigVersion.cmake
new file mode 100644
index 000000000..8269dc5d5
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowSource-2.2.0/TensorFlowSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "2.2.0")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/cmake/packages/TensorFlowSource-2.3.0-rc0Config.cmake b/infra/cmake/packages/TensorFlowSource-2.3.0-rc0Config.cmake
new file mode 100644
index 000000000..82df579a1
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowSource-2.3.0-rc0Config.cmake
@@ -0,0 +1,21 @@
+# find_package rejects version with extra string like "2.3.0-rc0"
+#
+# TODO Find a better way
+function(_import)
+ if(NOT DOWNLOAD_TENSORFLOW)
+ set(TensorFlowSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_TENSORFLOW)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ envoption(TENSORFLOW_2_3_0_RC0_URL https://github.com/tensorflow/tensorflow/archive/v2.3.0-rc0.tar.gz)
+
+ ExternalSource_Download(TENSORFLOW DIRNAME TENSORFLOW-2.3.0-RC0 ${TENSORFLOW_2_3_0_RC0_URL})
+
+ set(TensorFlowSource_DIR ${TENSORFLOW_SOURCE_DIR} PARENT_SCOPE)
+ set(TensorFlowSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_import)
+
+_import()
diff --git a/infra/cmake/packages/TensorFlowSource-2.3.0/TensorFlowSourceConfig.cmake b/infra/cmake/packages/TensorFlowSource-2.3.0/TensorFlowSourceConfig.cmake
new file mode 100644
index 000000000..5c3a0f8cc
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowSource-2.3.0/TensorFlowSourceConfig.cmake
@@ -0,0 +1,18 @@
+function(_TensorFlowSource_import)
+ if(NOT DOWNLOAD_TENSORFLOW)
+ set(TensorFlowSource_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT DOWNLOAD_TENSORFLOW)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ envoption(TENSORFLOW_2_3_0_URL https://github.com/tensorflow/tensorflow/archive/v2.3.0.tar.gz)
+
+ ExternalSource_Download(TENSORFLOW DIRNAME TENSORFLOW-2.3.0 ${TENSORFLOW_2_3_0_URL})
+
+ set(TensorFlowSource_DIR ${TENSORFLOW_SOURCE_DIR} PARENT_SCOPE)
+ set(TensorFlowSource_FOUND TRUE PARENT_SCOPE)
+endfunction(_TensorFlowSource_import)
+
+_TensorFlowSource_import()
diff --git a/infra/cmake/packages/TensorFlowSource-2.3.0/TensorFlowSourceConfigVersion.cmake b/infra/cmake/packages/TensorFlowSource-2.3.0/TensorFlowSourceConfigVersion.cmake
new file mode 100644
index 000000000..04df5eb6d
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowSource-2.3.0/TensorFlowSourceConfigVersion.cmake
@@ -0,0 +1,10 @@
+set(PACKAGE_VERSION "2.3.0")
+set(PACKAGE_VERSION_EXACT FALSE)
+set(PACKAGE_VERSION_COMPATIBLE FALSE)
+set(PACKAGE_VERSION_UNSUITABLE TRUE)
+
+if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ set(PACKAGE_VERSION_UNSUITABLE FALSE)
+endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/cmake/packages/TensorFlowVersionChecker.c b/infra/cmake/packages/TensorFlowVersionChecker.c
new file mode 100644
index 000000000..3759c2d56
--- /dev/null
+++ b/infra/cmake/packages/TensorFlowVersionChecker.c
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <string.h>
+#include <tensorflow/c/c_api.h>
+
+int main(int argc, char **argv)
+{
+ if (argc >= 2 && !strcmp(argv[1], TF_Version()))
+ return 0;
+ return 255;
+}
diff --git a/infra/command/build-docker-image b/infra/command/build-docker-image
index 5b1dbb5e1..f05266b58 100644
--- a/infra/command/build-docker-image
+++ b/infra/command/build-docker-image
@@ -5,41 +5,48 @@ function Usage()
echo "Usage: $0 $(basename ${BASH_SOURCE[0]}) [OPTIONS]"
echo ""
echo "Options:"
- echo " --tizen Build docker image for tizen build"
+ echo " --codename ubuntu codename, default image name is nnfw/one-devtools:[codename]"
echo "Options can use as docker build option:"
docker build --help
}
-DOCKER_FILE_RPATH="infra/docker/Dockerfile"
+DOCKER_FILE_RPATH_BASE="infra/docker"
DOCKER_BUILD_ARGS=()
-# Handle argument for this script
-# Set default docker image name, tag
-for i in "$@"
+# Default setting
+UBUNTU_CODENAME="xenial"
+DOCKER_TAG="latest"
+
+while [[ $# -gt 0 ]]
do
- case ${i} in
+ arg="$1"
+ # Handle argument for this script
+ # Set default docker image name, tag
+ case $arg in
-h|--help|help)
Usage
exit 1
;;
- esac
-done
-
-DOCKER_BUILD_ARGS+="-t ${DOCKER_IMAGE_NAME:-nnas}"
-
-# Argument for docker build commands
-for i in "$@"
-do
- case ${i} in
- -h|--help|help)
- # Already handled argument
+ --codename)
+ UBUNTU_CODENAME=$2
+ DOCKER_TAG=$2
+ shift 2
+ ;;
+ -t|--tag)
+ DOCKER_IMAGE_NAME="$2"
+ shift 2
;;
*)
- DOCKER_BUILD_ARGS+=(${i})
+ DOCKER_BUILD_ARGS+=(${1})
+ shift
;;
esac
done
+DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw/one-devtools:$DOCKER_TAG}
+DOCKER_FILE_RPATH=$DOCKER_FILE_RPATH_BASE/$UBUNTU_CODENAME/Dockerfile
+DOCKER_BUILD_ARGS+=("-t ${DOCKER_IMAGE_NAME}")
+
docker build --build-arg http_proxy="${http_proxy}" \
--build-arg https_proxy="${https_proxy}" \
${DOCKER_BUILD_ARGS[@]} \
diff --git a/infra/command/create-package b/infra/command/create-package
new file mode 100644
index 000000000..c42667b6b
--- /dev/null
+++ b/infra/command/create-package
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+source "${NNAS_PROJECT_PATH}/infra/packaging/build"
diff --git a/infra/command/docker-run b/infra/command/docker-run
new file mode 100644
index 000000000..9a186b3d2
--- /dev/null
+++ b/infra/command/docker-run
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+import "docker.configuration"
+USER_MODE=0
+
+if [[ $1 == '--user' ]]; then
+ DOCKER_RUN_OPTS+=" -u $(stat -c "%u" $NNAS_PROJECT_PATH):$(stat -c "%g" $NNAS_PROJECT_PATH)"
+ USER_MODE=1
+ shift
+fi
+
+docker run ${DOCKER_RUN_OPTS} ${DOCKER_ENV_VARS} ${DOCKER_VOLUMES} ${DOCKER_IMAGE_NAME} "$@"
+EXITCODE=$?
+
+if [ $USER_MODE -eq 0 ]; then
+ docker_cleanup
+fi
+
+exit ${EXITCODE}
diff --git a/infra/command/doxygen b/infra/command/doxygen
new file mode 100644
index 000000000..e02d3b0da
--- /dev/null
+++ b/infra/command/doxygen
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+import "build.configuration"
+
+pushd ${NNAS_PROJECT_PATH} > /dev/null
+
+# Modify Doxyfile for custom config
+# OUTPUT_DIRECTORY: Use workspace for doxygen html storage
+# Generated html is in ${NNAS_PROJECT_PATH}/${BUILD_WORKSPACE_RPATH}/doxygen/html/
+# EXCLUDE: Exclude custom workspace
+mkdir -p ${BUILD_WORKSPACE_RPATH}/doxygen
+cp infra/doxygen/Doxyfile ${BUILD_WORKSPACE_RPATH}/doxygen/Doxyfile
+echo "OUTPUT_DIRECTORY = ${BUILD_WORKSPACE_RPATH}/doxygen" >> ${BUILD_WORKSPACE_RPATH}/doxygen/Doxyfile
+echo "EXCLUDE += ${BUILD_WORKSPACE_RPATH}" >> ${BUILD_WORKSPACE_RPATH}/doxygen/Doxyfile
+
+doxygen ${BUILD_WORKSPACE_RPATH}/doxygen/Doxyfile
+
+popd > /dev/null
diff --git a/infra/command/format b/infra/command/format
index 1015c4473..7f37e069d 100644
--- a/infra/command/format
+++ b/infra/command/format
@@ -2,7 +2,56 @@
INVALID_EXIT=0
FILES_TO_CHECK=()
+DIRECTORIES_TO_BE_TESTED=()
DIRECTORIES_NOT_TO_BE_TESTED=()
+CLANG_FORMAT_CANDIDATES=()
+PATCH_FILE=format.patch
+CHECK_DIFF_ONLY="0"
+CHECK_STAGED_ONLY="0"
+
+function Usage()
+{
+ echo "Usage: $0 $(basename ${BASH_SOURCE[0]}) [OPTIONS] [<file|dir> ...]"
+ echo "If no arguments are specified, it formats all nnas codes"
+ echo "If <file>s are given, it reformats the files"
+ echo ""
+ echo "Options:"
+ echo " --clang-format <TOOL> clang format bin (default: clang-format-3.9, clang-format)"
+ echo " --diff-only check diff files with master"
+ echo " --staged-only check git staged files"
+}
+
+while [[ $# -gt 0 ]]
+do
+ arg="$1"
+ case $arg in
+ -h|--help|help)
+ Usage
+ exit 0
+ ;;
+ --clang-format)
+ CLANG_FORMAT_CANDIDATES=($2)
+ shift 2
+ ;;
+ --clang-format=*)
+ CLANG_FORMAT_CANDIDATES=(${1#*=})
+ shift
+ ;;
+ --staged-only)
+ CHECK_STAGED_ONLY="1"
+ CHECK_DIFF_ONLY="1"
+ shift
+ ;;
+ --diff-only)
+ CHECK_DIFF_ONLY="1"
+ shift
+ ;;
+ *)
+ DIRECTORIES_TO_BE_TESTED+=($1)
+ shift
+ ;;
+ esac
+done
function pushd () {
command pushd "$@" > /dev/null
@@ -16,10 +65,28 @@ function command_exists() {
command -v $1 > /dev/null 2>&1
}
-function check_newline() {
+function exclude_symbolic_links() {
# Check all files (CMakeLists.txt, *.cl, ... not only for C++, Python)
if [[ ${#FILES_TO_CHECK} -ne 0 ]]; then
- CRCHECK=$(file ${FILES_TO_CHECK} | grep 'with CR')
+ FILES_EXCLUDE_SYMLINKS=$(file ${FILES_TO_CHECK} | grep -v "symbolic link" | cut -d':' -f1)
+ FILES_TO_CHECK=${FILES_EXCLUDE_SYMLINKS}
+ fi
+}
+
+function check_newline() {
+ FILES_TO_CHECK_CR=()
+ for f in ${FILES_TO_CHECK[@]}; do
+ # Manually ignore style checking
+ if [[ ${f} == !(*.svg|*.pdf|*.png) ]]; then
+ FILES_TO_CHECK_CR+=("${f}")
+ fi
+ done
+
+ # Check all files (CMakeLists.txt, *.cl, ... not only for C++, Python)
+ if [[ ${#FILES_TO_CHECK_CR} -ne 0 ]]; then
+ CRCHECK=$(file ${FILES_TO_CHECK_CR} | grep 'with CR')
+ else
+ return
fi
FILES_TO_FIX=($(echo "$CRCHECK" | grep "with CRLF line" | cut -d':' -f1))
for f in ${FILES_TO_FIX[@]}; do
@@ -29,6 +96,12 @@ function check_newline() {
for f in ${FILES_TO_FIX[@]}; do
tr '\r' '\n' < $f > $f.fixed && cat $f.fixed > $f && rm $f.fixed
done
+ # Check no new line at end of file
+ for f in ${FILES_TO_CHECK_CR[@]}; do
+ if diff /dev/null "$f" | tail -1 | grep '^\\ No newline' > /dev/null; then
+ echo >> "$f"
+ fi
+ done
}
function check_permission() {
@@ -36,7 +109,7 @@ function check_permission() {
FILES_TO_CHECK_PERMISSION=()
for f in ${FILES_TO_CHECK[@]}; do
# Manually ignore permission checking
- if [[ ${f} == !(nnas|nnfw|nncc|*.sh|*.py) ]]; then
+ if [[ ${f} == !(nnas|nnfw|nncc|*.sh|*.py|*/gradlew) ]] || [[ ${f} == tests/nnapi/specs/**/*.py ]]; then
FILES_TO_CHECK_PERMISSION+=("${f}")
fi
done
@@ -58,13 +131,13 @@ function check_cpp_files() {
return
fi
- CLANG_FORMAT_CANDIDATES=()
- CLANG_FORMAT_CANDIDATES+=("clang-format")
CLANG_FORMAT_CANDIDATES+=("clang-format-3.9")
+ CLANG_FORMAT_CANDIDATES+=("clang-format")
for CLANG_FORMAT_CANDIDATE in ${CLANG_FORMAT_CANDIDATES[@]}; do
if command_exists ${CLANG_FORMAT_CANDIDATE} ; then
CLANG_FORMAT="${CLANG_FORMAT_CANDIDATE}"
+ break
fi
done
@@ -95,7 +168,7 @@ function check_cpp_files() {
done
if [[ ${#FILES_TO_CHECK_CPP} -ne 0 ]]; then
- clang-format-3.9 -i ${FILES_TO_CHECK_CPP[@]}
+ ${CLANG_FORMAT} -i ${FILES_TO_CHECK_CPP[@]}
EXIT_CODE=$?
if [[ ${EXIT_CODE} -ne 0 ]]; then
INVALID_EXIT=${EXIT_CODE}
@@ -122,6 +195,14 @@ function check_python_files() {
if [[ ${f} == *.py ]]; then
FILES_TO_CHECK_PYTHON+=("${f}")
fi
+ # Exceptional case: one-cmds don't have '.py' extension
+ if [[ ${f} == compiler/one-cmds/* ]]; then
+ # Ignore non-python source (cmake, etc)
+ # Ignore shell script: one-prepare-venv
+ if [[ ${f} != compiler/one-cmds/*.* ]] && [[ ${f} != compiler/one-cmds/one-prepare-venv ]]; then
+ FILES_TO_CHECK_PYTHON+=("${f}")
+ fi
+ fi
done
for s in ${DIRECTORIES_NOT_TO_BE_TESTED[@]}; do
skip=${s#'.'/}/
@@ -129,7 +210,7 @@ function check_python_files() {
done
if [[ ${#FILES_TO_CHECK_PYTHON} -ne 0 ]]; then
- yapf -i --style='{based_on_style: pep8, column_limit: 90}' ${FILES_TO_CHECK_PYTHON[@]}
+ yapf -i ${FILES_TO_CHECK_PYTHON[@]}
EXIT_CODE=$?
if [[ ${EXIT_CODE} -ne 0 ]]; then
INVALID_EXIT=${EXIT_CODE}
@@ -139,15 +220,15 @@ function check_python_files() {
pushd ${NNAS_PROJECT_PATH}
-if [ -n "$(git diff)" ]; then
+if [[ -n "$(git diff)" ]] && { [[ "${CHECK_DIFF_ONLY}" != "1" ]] || [[ "${CHECK_STAGED_ONLY}" != "1" ]]; }; then
echo "[WARNING] Commit all the changes before running format check"
- echo " format.patch file will contain unstaged files"
+ echo " ${PATCH_FILE} file will contain unstaged files"
fi
__Check_CPP=${CHECK_CPP:-"1"}
__Check_PYTHON=${CHECK_PYTHON:-"1"}
-FILES_TO_CHECK=$(git ls-files -co --exclude-standard)
+FILES_TO_CHECK=$(git ls-files -c --exclude-standard ${DIRECTORIES_TO_BE_TESTED[@]})
if [[ "${CHECK_DIFF_ONLY}" = "1" ]]; then
MASTER_EXIST=$(git rev-parse --verify master)
CURRENT_BRANCH=$(git branch | grep \* | cut -d ' ' -f2-)
@@ -157,7 +238,11 @@ if [[ "${CHECK_DIFF_ONLY}" = "1" ]]; then
elif [[ "${CURRENT_BRANCH}" = "master" ]]; then
echo "Current branch is master"
else
- FILES_TO_CHECK=$(git diff --name-only --diff-filter=d HEAD~${DIFF_COMMITS}..HEAD)
+ if [[ "${CHECK_STAGED_ONLY}" = "1" ]]; then
+ FILES_TO_CHECK=$(git diff --staged --name-only --diff-filter=d)
+ else
+ FILES_TO_CHECK=$(git diff --name-only --diff-filter=d HEAD~${DIFF_COMMITS})
+ fi
fi
fi
@@ -165,12 +250,19 @@ for DIR_NOT_TO_BE_TESTED in $(git ls-files -co --exclude-standard '*/.FORMATDENY
DIRECTORIES_NOT_TO_BE_TESTED+=($(dirname "${DIR_NOT_TO_BE_TESTED}"))
done
+exclude_symbolic_links
check_newline
check_permission
check_cpp_files
check_python_files
-DIFF=$(git diff | tee format.patch)
+if [[ "${CHECK_DIFF_ONLY}" = "1" ]] && [[ "${CHECK_STAGED_ONLY}" = "1" ]]; then
+ if [[ ! -z "${FILES_TO_CHECK}" ]]; then
+ DIFF=$(git diff ${FILES_TO_CHECK} | tee ${PATCH_FILE})
+ fi
+else
+ DIFF=$(git diff | tee ${PATCH_FILE})
+fi
popd
@@ -186,9 +278,9 @@ if [[ ! -z "${CRCHECK}" ]]; then
echo "${CRCHECK}"
fi
-if [[ ${PATCHFILE_SIZE} -ne 0 ]]; then
+if [[ -s ${PATCH_FILE} ]]; then
echo "[FAILED] Format checker failed and update code to follow convention."
- echo " You can find changes in format.patch"
+ echo " You can find changes in ${PATCH_FILE}"
fi
if [[ ${INVALID_EXIT} -ne 0 ]]; then
diff --git a/infra/command/gen-coverage-report b/infra/command/gen-coverage-report
index 5f928eca5..c3a8202e7 100644
--- a/infra/command/gen-coverage-report
+++ b/infra/command/gen-coverage-report
@@ -44,10 +44,11 @@ COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.info"
HTML_PATH="${OUTPUT_PATH}/html"
CANDIDATES=()
+SRC_PREFIX=${SRC_PREFIX:-${NNAS_PROJECT_PATH}}
for CANDIDATE in "$@";
do
- CANDIDATES+=("${NNAS_PROJECT_PATH}/${CANDIDATE}/*")
+ CANDIDATES+=("${SRC_PREFIX}/${CANDIDATE}/*")
done
# Capture initial zero coverage data
@@ -69,6 +70,10 @@ done
"${LCOV_PATH}" -r "${EXTRACTED_COVERAGE_INFO_PATH}" -o "${EXCLUDED_COVERAGE_INFO_PATH}" \
'*.test.cpp'
+# Exclude flatbuffer generated files from coverage report
+"${LCOV_PATH}" -r "${EXTRACTED_COVERAGE_INFO_PATH}" -o "${EXCLUDED_COVERAGE_INFO_PATH}" \
+ '*_schema_generated.h'
+
# Final coverage data
cp -v ${EXCLUDED_COVERAGE_INFO_PATH} ${COVERAGE_INFO_PATH}
diff --git a/infra/command/install-githooks b/infra/command/install-githooks
index e624aa6d7..909a16542 100644
--- a/infra/command/install-githooks
+++ b/infra/command/install-githooks
@@ -1,15 +1,65 @@
#!/usr/bin/env bash
+function Usage()
+{
+ echo "Usage: $0 $(basename ${BASH_SOURCE[0]}) [<OPTIONS>]"
+ echo ""
+ echo "Options:"
+ echo " --no-pre-push don't install pre-push hook"
+ echo " --no-pre-commit don't install pre-commit hook"
+}
+
+SKIP_PREPUSH_INSTALL="0"
+SKIP_PRECOMMIT_INSTALL="0"
+
+while [[ $# -gt 0 ]]
+do
+ arg="$1"
+ case $arg in
+ -h|--help|help)
+ Usage
+ exit 1
+ ;;
+ --no-pre-push)
+ SKIP_PREPUSH_INSTALL="1"
+ shift
+ ;;
+ --no-pre-commit)
+ SKIP_PRECOMMIT_INSTALL="1"
+ shift
+ ;;
+ *)
+ echo "ERROR: invalid option"
+ exit 255
+ ;;
+ esac
+done
+
REPO_HOOKS_PATH=$NNAS_PROJECT_PATH/infra/git-hooks
GIT_HOOKS_PATH=$NNAS_PROJECT_PATH/.git/hooks
-# Create symbolic links to hooks dir
-if [ -e $GIT_HOOKS_PATH/pre-push ]; then
- echo "Backup old $GIT_HOOKS_PATH/pre-push to $GIT_HOOKS_PATH/pre-push~"
- mv -v $GIT_HOOKS_PATH/pre-push $GIT_HOOKS_PATH/pre-push~
-elif [ -h $GIT_HOOKS_PATH/pre-push ]; then
- ls -l $GIT_HOOKS_PATH/pre-push
- echo "Remove broken symlink $GIT_HOOKS_PATH/pre-push"
- rm -v $GIT_HOOKS_PATH/pre-push
+if [ $SKIP_PREPUSH_INSTALL == "0" ]; then
+ # Create symbolic links to hooks dir
+ if [ -e $GIT_HOOKS_PATH/pre-push ]; then
+ echo "Backup old $GIT_HOOKS_PATH/pre-push to $GIT_HOOKS_PATH/pre-push~"
+ mv -v $GIT_HOOKS_PATH/pre-push $GIT_HOOKS_PATH/pre-push~
+ elif [ -h $GIT_HOOKS_PATH/pre-push ]; then
+ ls -l $GIT_HOOKS_PATH/pre-push
+ echo "Remove broken symlink $GIT_HOOKS_PATH/pre-push"
+ rm -v $GIT_HOOKS_PATH/pre-push
+ fi
+ ln -sv $REPO_HOOKS_PATH/pre-push.sh $GIT_HOOKS_PATH/pre-push
+fi
+
+if [ $SKIP_PRECOMMIT_INSTALL == "0" ]; then
+ # Create symbolic links to hooks dir
+ if [ -e $GIT_HOOKS_PATH/pre-commit ]; then
+ echo "Backup old $GIT_HOOKS_PATH/pre-commit to $GIT_HOOKS_PATH/pre-commit~"
+ mv -v $GIT_HOOKS_PATH/pre-commit $GIT_HOOKS_PATH/pre-commit~
+ elif [ -h $GIT_HOOKS_PATH/pre-commit ]; then
+ ls -l $GIT_HOOKS_PATH/pre-commit
+ echo "Remove broken symlink $GIT_HOOKS_PATH/pre-commit"
+ rm -v $GIT_HOOKS_PATH/pre-commit
+ fi
+ ln -sv $REPO_HOOKS_PATH/pre-commit.sh $GIT_HOOKS_PATH/pre-commit
fi
-ln -sv $REPO_HOOKS_PATH/pre-push.sh $GIT_HOOKS_PATH/pre-push
diff --git a/infra/command/pylint b/infra/command/pylint
index d20f89169..900156086 100644
--- a/infra/command/pylint
+++ b/infra/command/pylint
@@ -6,8 +6,10 @@ __Check_PYLINT=${CHECK_PYLINT:-"1"}
DIRECTORIES_NOT_TO_BE_TESTED=()
-for DIR_NOT_TO_BE_TESTED in $(find -name '.FORMATDENY' -exec dirname {} \;); do
- DIRECTORIES_NOT_TO_BE_TESTED+=("$DIR_NOT_TO_BE_TESTED")
+pushd ${NNAS_PROJECT_PATH} > /dev/null
+
+for DIR_NOT_TO_BE_TESTED in $(git ls-files -co --exclude-standard '*/.FORMATDENY'); do
+ DIRECTORIES_NOT_TO_BE_TESTED+=($(dirname "${DIR_NOT_TO_BE_TESTED}"))
done
PYTHON_FILES_TO_CHECK=$(git ls-files '*.py')
@@ -25,8 +27,11 @@ if [[ ${#PYTHON_FILES_TO_CHECK} -ne 0 ]]; then
fi
fi
+popd > /dev/null
+
if [[ $INVALID_EXIT -eq 0 ]]; then
echo "[PASSED] Format checker succeed."
return
fi
+
exit 1
diff --git a/infra/command/verify-package b/infra/command/verify-package
new file mode 100644
index 000000000..739680070
--- /dev/null
+++ b/infra/command/verify-package
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+source "${NNAS_PROJECT_PATH}/infra/packaging/verify"
diff --git a/infra/config/docker.configuration b/infra/config/docker.configuration
new file mode 100644
index 000000000..2e001373b
--- /dev/null
+++ b/infra/config/docker.configuration
@@ -0,0 +1,52 @@
+#!/bin/bash
+
+# Don't run this script
+[[ "${BASH_SOURCE[0]}" == "${0}" ]] && echo "Please don't execute ${BASH_SOURCE[0]}" && exit 1
+
+DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw/one-devtools}
+echo "Using docker image ${DOCKER_IMAGE_NAME}"
+
+if [ -z "`docker images ${DOCKER_IMAGE_NAME}`" ]; then
+ echo "Need docker image!"
+ exit 1
+fi
+
+HOST_PATH="${NNAS_PROJECT_PATH}"
+DOCKER_PATH="${NNAS_PROJECT_PATH}"
+
+export GIT_SSL_NO_VERIFY=1
+
+DOCKER_VOLUMES+=" -v ${HOST_PATH}:${DOCKER_PATH}"
+
+if [[ ! -z "${ENV_FILE}" ]]; then
+ if [[ -e ${ENV_FILE} ]]; then
+ DOCKER_ENV_VARS+=" --env-file ${ENV_FILE} "
+ else
+ echo "[WARNING] Cannot find docker environment variable list file: ${ENV_FILE}"
+ fi
+fi
+
+DOCKER_ENV_VARS+=" -e http_proxy"
+DOCKER_ENV_VARS+=" -e no_proxy"
+DOCKER_ENV_VARS+=" -e GIT_SSL_NO_VERIFY"
+DOCKER_ENV_VARS+=" -e NNAS_WORKSPACE"
+DOCKER_ENV_VARS+=" -e NNCC_WORKSPACE"
+DOCKER_ENV_VARS+=" -e NNFW_WORKSPACE"
+
+DOCKER_RUN_OPTS="${DOCKER_OPTS}"
+DOCKER_RUN_OPTS+=" --rm"
+DOCKER_RUN_OPTS+=" -w ${DOCKER_PATH}"
+
+function docker_cleanup()
+{
+ # Newly created files during during docker run can have different ownership.
+ # This may cause some problems, for example, some jenkins slaves or developers
+ # can't remove built files due to lack of permission.
+ # To address this issue, let's change owner of all files
+ # in nncc to owner of nncc.
+ NNFW_OWNER_UID=$(stat -c "%u" ${HOST_PATH})
+ NNFW_OWNER_GID=$(stat -c "%g" ${HOST_PATH})
+
+ CMD="chown -R ${NNFW_OWNER_UID}:${NNFW_OWNER_GID} ${DOCKER_PATH}"
+ docker run ${DOCKER_RUN_OPTS} ${DOCKER_ENV_VARS} ${DOCKER_VOLUMES} ${DOCKER_IMAGE_NAME} ${CMD}
+}
diff --git a/infra/docker/bionic/Dockerfile b/infra/docker/bionic/Dockerfile
new file mode 100644
index 000000000..6a5f64ace
--- /dev/null
+++ b/infra/docker/bionic/Dockerfile
@@ -0,0 +1,115 @@
+# Copyright 2016-2020 Jing Li
+# Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FROM ubuntu:18.04
+
+ARG UBUNTU_MIRROR
+
+# Install 'add-apt-repository'
+RUN apt-get update && apt-get -qqy install software-properties-common
+
+# Build tool
+RUN apt-get update && apt-get -qqy install build-essential cmake scons git g++-arm-linux-gnueabihf g++-aarch64-linux-gnu
+
+# Install extra dependencies (Caffe, nnkit)
+RUN apt-get update && apt-get -qqy install libboost-all-dev libgflags-dev libgoogle-glog-dev libatlas-base-dev libhdf5-dev
+
+# Install protocol buffer
+RUN apt-get update && apt-get -qqy install libprotobuf-dev protobuf-compiler
+
+# Additonal tools
+RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get -qqy install doxygen graphviz wget zip unzip clang-format-3.9 python3 python3-pip python3-venv hdf5-tools pylint
+RUN pip3 install --upgrade pip
+RUN pip3 install yapf==0.22.0 numpy
+
+# Install google test (source)
+RUN apt-get update && apt-get -qqy install libgtest-dev
+
+# Install build tool gcc version 8.x and set alternative link (c++17 support)
+RUN apt-get update && apt-get -qqy install g++-8 g++-8-arm-linux-gnueabihf g++-8-aarch64-linux-gnu
+RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-8 80 \
+ --slave /usr/bin/g++ g++ /usr/bin/g++-8 \
+ --slave /usr/bin/gcov gcov /usr/bin/gcov-8
+RUN update-alternatives --install /usr/bin/arm-linux-gnueabihf-gcc arm-linux-gnueabihf-gcc /usr/bin/arm-linux-gnueabihf-gcc-8 80 \
+ --slave /usr/bin/arm-linux-gnueabihf-g++ arm-linux-gnueabihf-g++ /usr/bin/arm-linux-gnueabihf-g++-8 \
+ --slave /usr/bin/arm-linux-gnueabihf-gcov arm-linux-gnueabihf-gcov /usr/bin/arm-linux-gnueabihf-gcov-8
+RUN update-alternatives --install /usr/bin/aarch64-linux-gnu-gcc aarch64-linux-gnu-gcc /usr/bin/aarch64-linux-gnu-gcc-8 80 \
+ --slave /usr/bin/aarch64-linux-gnu-g++ aarch64-linux-gnu-g++ /usr/bin/aarch64-linux-gnu-g++-8 \
+ --slave /usr/bin/aarch64-linux-gnu-gcov aarch64-linux-gnu-gcov /usr/bin/aarch64-linux-gnu-gcov-8
+
+# Install lcov 1.13-4 for gcc-8 support (installed lcov 1.13-3 can't support gcc-8)
+RUN wget http://launchpadlibrarian.net/370213541/lcov_1.13-4_all.deb
+RUN dpkg -i lcov_1.13-4_all.deb
+
+# Build and install google test static libraries
+WORKDIR /root/gtest
+RUN cmake /usr/src/gtest
+RUN make
+RUN mv *.a /usr/lib
+WORKDIR /root
+RUN rm -rf gtest
+
+# Install gbs & sdb
+RUN echo 'deb [trusted=yes] http://download.tizen.org/tools/latest-release/Ubuntu_18.04/ /' | cat >> /etc/apt/sources.list
+RUN apt-get update && apt-get -qqy install gbs
+RUN wget http://download.tizen.org/sdk/tizenstudio/official/binary/sdb_3.1.4_ubuntu-64.zip -O sdb.zip
+RUN unzip -d tmp sdb.zip && rm sdb.zip
+RUN cp tmp/data/tools/sdb /usr/bin/. && rm -rf tmp
+
+# Install java
+RUN apt-get install -y --no-install-recommends openjdk-8-jdk
+
+# download and install Gradle
+# https://services.gradle.org/distributions/
+ARG GRADLE_VERSION=6.4.1
+ARG GRADLE_DIST=bin
+RUN cd /opt && \
+ wget -q https://services.gradle.org/distributions/gradle-${GRADLE_VERSION}-${GRADLE_DIST}.zip && \
+ unzip gradle*.zip && \
+ ls -d */ | sed 's/\/*$//g' | xargs -I{} mv {} gradle && \
+ rm gradle*.zip
+
+# download and install Android SDK
+# https://developer.android.com/studio#command-tools
+ARG ANDROID_SDK_VERSION=6514223
+ENV ANDROID_SDK_ROOT /opt/android-sdk
+RUN mkdir -p ${ANDROID_SDK_ROOT}/cmdline-tools && \
+ wget -q https://dl.google.com/android/repository/commandlinetools-linux-${ANDROID_SDK_VERSION}_latest.zip && \
+ unzip *tools*linux*.zip -d ${ANDROID_SDK_ROOT}/cmdline-tools && \
+ rm *tools*linux*.zip
+
+# accept the license agreements of the SDK components
+RUN mkdir -p ${ANDROID_SDK_ROOT}/licenses
+RUN echo 24333f8a63b6825ea9c5514f83c2829b004d1fee > ${ANDROID_SDK_ROOT}/licenses/android-sdk-license
+RUN echo d56f5187479451eabf01fb78af6dfcb131a6481e >> ${ANDROID_SDK_ROOT}/licenses/android-sdk-license
+
+# Env variable for gradle build
+ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64
+ENV GRADLE_HOME /opt/gradle
+ENV PATH ${PATH}:${GRADLE_HOME}/bin:${ANDROID_SDK_ROOT}/cmdline-tools/tools/bin:${ANDROID_SDK_ROOT}/platform-tools
+ENV ANDROID_HOME ${ANDROID_SDK_ROOT}
+
+# Install NDK
+RUN sdkmanager --install "ndk;20.0.5594570"
+RUN sdkmanager "platform-tools"
+
+# Env for ko encoding build
+ENV LC_ALL "C.UTF-8"
+
+# setup adb server
+EXPOSE 5037
+
+# Clean archives (to reduce image size)
+RUN apt-get clean -y
diff --git a/infra/docker/focal/Dockerfile b/infra/docker/focal/Dockerfile
new file mode 100644
index 000000000..7f5a1b9e3
--- /dev/null
+++ b/infra/docker/focal/Dockerfile
@@ -0,0 +1,47 @@
+# Copyright (c) 2020 Samsung Electronics Co., Ltd. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FROM ubuntu:20.04
+
+ARG UBUNTU_MIRROR
+
+# Install 'add-apt-repository'
+RUN apt-get update && apt-get -qqy install software-properties-common
+
+# Build tool
+RUN apt-get update && apt-get -qqy install build-essential cmake scons git lcov g++-arm-linux-gnueabihf g++-aarch64-linux-gnu
+
+# Install extra dependencies (Caffe, nnkit)
+RUN apt-get update && apt-get -qqy install libboost-all-dev libgflags-dev libgoogle-glog-dev libatlas-base-dev libhdf5-dev
+
+# Install protocol buffer
+RUN apt-get update && apt-get -qqy install libprotobuf-dev protobuf-compiler
+
+# Additonal tools (except clang-format-3.9)
+RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get -qqy install doxygen graphviz wget zip unzip python3 python3-pip python3-venv hdf5-tools pylint
+RUN pip3 install --upgrade pip
+RUN pip3 install yapf==0.22.0 numpy
+
+# Install google test (source)
+RUN apt-get update && apt-get -qqy install libgtest-dev
+
+# Install gbs & sdb
+RUN echo 'deb [trusted=yes] http://download.tizen.org/tools/latest-release/Ubuntu_20.04/ /' | cat >> /etc/apt/sources.list
+RUN apt-get update && apt-get -qqy install gbs
+RUN wget http://download.tizen.org/sdk/tizenstudio/official/binary/sdb_4.2.19_ubuntu-64.zip -O sdb.zip
+RUN unzip -d tmp sdb.zip && rm sdb.zip
+RUN cp tmp/data/tools/sdb /usr/bin/. && rm -rf tmp
+
+# Clean archives (to reduce image size)
+RUN apt-get clean -y
diff --git a/infra/docker/Dockerfile b/infra/docker/xenial/Dockerfile
index 2ca3b44c2..052cc4fb6 100644
--- a/infra/docker/Dockerfile
+++ b/infra/docker/xenial/Dockerfile
@@ -1,8 +1,6 @@
FROM ubuntu:16.04
ARG UBUNTU_MIRROR
-ENV http_proxy $http_proxy
-ENV https_proxy $https_proxy
RUN if [ -n "$http_proxy" ] ; then echo "Acquire::http::proxy \"${http_proxy}\";" >> /etc/apt/apt.conf ; fi
RUN if [ -n "$https_proxy" ] ; then echo "Acquire::https::proxy \"${https_proxy}\";" >> /etc/apt/apt.conf ; fi
@@ -21,7 +19,8 @@ RUN apt-get update && apt-get -qqy install libboost-all-dev libgflags-dev libgoo
RUN apt-get update && apt-get -qqy install libprotobuf-dev protobuf-compiler
# Additonal tools
-RUN apt-get update && apt-get -qqy install doxygen graphviz wget unzip clang-format-3.9 python3 python3-pip hdf5-tools
+RUN apt-get update && apt-get -qqy install doxygen graphviz wget unzip clang-format-3.9 python3 python3-pip python3-venv hdf5-tools pylint
+RUN pip3 install --upgrade pip
RUN pip3 install yapf==0.22.0 numpy
# Install google test (source)
@@ -38,9 +37,11 @@ RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-6 60 --slave /us
# Install cross build tool gcc version 6.x
RUN wget https://releases.linaro.org/components/toolchain/binaries/6.3-2017.02/arm-linux-gnueabihf/gcc-linaro-6.3.1-2017.02-x86_64_arm-linux-gnueabihf.tar.xz -O gcc-hardfp.tar.xz -nv
RUN wget https://releases.linaro.org/components/toolchain/binaries/6.2-2016.11/arm-linux-gnueabi/gcc-linaro-6.2.1-2016.11-x86_64_arm-linux-gnueabi.tar.xz -O gcc-softfp.tar.xz -nv
+RUN wget https://releases.linaro.org/components/toolchain/binaries/6.2-2016.11/aarch64-linux-gnu/gcc-linaro-6.2.1-2016.11-x86_64_aarch64-linux-gnu.tar.xz -O gcc-aarch64.tar.xz -nv
RUN tar -xf gcc-hardfp.tar.xz -C /opt/ && rm -rf gcc-hardfp.tar.xz
RUN tar -xf gcc-softfp.tar.xz -C /opt/ && rm -rf gcc-softfp.tar.xz
-ENV PATH "/opt/gcc-linaro-6.2.1-2016.11-x86_64_arm-linux-gnueabi/bin:/opt/gcc-linaro-6.3.1-2017.02-x86_64_arm-linux-gnueabihf/bin:$PATH"
+RUN tar -xf gcc-aarch64.tar.xz -C /opt/ && rm -rf gcc-aarch64.tar.xz
+ENV PATH "/opt/gcc-linaro-6.2.1-2016.11-x86_64_arm-linux-gnueabi/bin:/opt/gcc-linaro-6.3.1-2017.02-x86_64_arm-linux-gnueabihf/bin:/opt/gcc-linaro-6.2.1-2016.11-x86_64_aarch64-linux-gnu/bin:$PATH"
###
### NOTE: Don't add build & install process using installed buildtool above this line
diff --git a/infra/nnfw/doxygen/Doxyfile b/infra/doxygen/Doxyfile
index c04a7bb0f..0dc6fdfff 100644
--- a/infra/nnfw/doxygen/Doxyfile
+++ b/infra/doxygen/Doxyfile
@@ -32,7 +32,7 @@ DOXYFILE_ENCODING = UTF-8
# title of most generated pages and in a few other places.
# The default value is: My Project.
-PROJECT_NAME = nnfw
+PROJECT_NAME = nnas
# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
# could be handy for archiving the generated documentation or if some version
@@ -58,7 +58,7 @@ PROJECT_LOGO =
# entered, it will be relative to the location where doxygen was started. If
# left blank the current directory will be used.
-OUTPUT_DIRECTORY = docs/doxygen
+OUTPUT_DIRECTORY = doxygen
# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub-
# directories (in 2 levels) under the output directory of each output format and
@@ -318,7 +318,7 @@ TOC_INCLUDE_HEADINGS = 0
# globally by setting AUTOLINK_SUPPORT to NO.
# The default value is: YES.
-AUTOLINK_SUPPORT = YES
+AUTOLINK_SUPPORT = NO
# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
# to include (a tag file for) the STL sources as input, then you should set this
@@ -874,15 +874,14 @@ RECURSIVE = YES
# run.
EXCLUDE = Product/ \
+ build/ \
+ doxygen/ \
report/ \
- runtimes/contrib/ \
- docs/doxygen/html/ \
externals/ \
packaging/ \
+ runtimes/contrib/ \
runtimes/pure_arm_compute/ \
- tests/framework/ \
- tests/nnapi/src/generated/ \
- tests/nnapi/specs/ \
+ tests/ \
tools/
# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
@@ -1488,7 +1487,7 @@ DISABLE_INDEX = NO
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
-GENERATE_TREEVIEW = NO
+GENERATE_TREEVIEW = YES
# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
# doxygen will group on one line in the generated HTML documentation.
diff --git a/infra/git-hooks/pre-commit.sh b/infra/git-hooks/pre-commit.sh
new file mode 100755
index 000000000..127245da1
--- /dev/null
+++ b/infra/git-hooks/pre-commit.sh
@@ -0,0 +1,32 @@
+#!/bin/sh
+
+# An example hook script to verify what is about to be pushed. Called by "git
+# push" after it has checked the remote status, but before anything has been
+# pushed. If this script exits with a non-zero status nothing will be pushed.
+#
+# This hook is called with the following parameters:
+#
+# $1 -- Name of the remote to which the push is being done
+# $2 -- URL to which the push is being done
+#
+# If pushing without using a named remote those arguments will be equal.
+#
+# Information about the commits which are being pushed is supplied as lines to
+# the standard input in the form:
+#
+# <local ref> <local sha1> <remote ref> <remote sha1>
+#
+# This sample shows how to prevent push of commits where the log message starts
+# with "WIP" (work in progress).
+
+remote="$1"
+url="$2"
+
+# RUN FORMAT CHECKER
+
+REPO_PATH=$(git rev-parse --show-toplevel)
+cd $REPO_PATH
+
+./nnas format --staged-only
+
+exit $?
diff --git a/infra/git-hooks/pre-push.sh b/infra/git-hooks/pre-push.sh
index ce751333a..d64c72317 100755
--- a/infra/git-hooks/pre-push.sh
+++ b/infra/git-hooks/pre-push.sh
@@ -27,6 +27,6 @@ url="$2"
REPO_PATH=$(git rev-parse --show-toplevel)
cd $REPO_PATH
-CHECK_DIFF_ONLY=1 ./nnas format
+./nnas format --diff-only
exit $?
diff --git a/infra/nncc/3rdparty/Eigen/fd6845384b86/URL.default b/infra/nncc/3rdparty/Eigen/fd6845384b86/URL.default
deleted file mode 100644
index 1fff1b4f3..000000000
--- a/infra/nncc/3rdparty/Eigen/fd6845384b86/URL.default
+++ /dev/null
@@ -1 +0,0 @@
-https://bitbucket.org/eigen/eigen/get/fd6845384b86.tar.gz
diff --git a/infra/nncc/CMakeLists.txt b/infra/nncc/CMakeLists.txt
index 12b840636..d416db2fd 100644
--- a/infra/nncc/CMakeLists.txt
+++ b/infra/nncc/CMakeLists.txt
@@ -4,7 +4,12 @@ project(nncc)
enable_testing()
-set(CMAKE_CXX_STANDARD 11)
+set(CMAKE_CXX_STANDARD 14)
+
+set(CMAKE_SKIP_BUILD_RPATH FALSE)
+set(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE)
+set(CMAKE_INSTALL_RPATH "$ORIGIN/../lib:$ORIGIN/")
+set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
# This feature works with CMake 3.5.2 or later. However, using previous versions does not produce
# an error. We are still officially using CMake 3.1.0, but put this code for the sake of semantic
@@ -13,30 +18,33 @@ set(CMAKE_CXX_STANDARD 11)
# such as `cmake-server`.
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
-set(NNCC_PROJECT_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}/../.." CACHE
- INTERNAL "Where to find nncc top-level source directory"
+set(NNAS_PROJECT_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}/../.." CACHE
+ INTERNAL "Where to find nnas top-level source directory"
)
-set(NNCC_EXTERNALS_DIR
- "${NNCC_PROJECT_SOURCE_DIR}/externals" CACHE
+set(NNAS_EXTERNALS_DIR
+ "${NNAS_PROJECT_SOURCE_DIR}/externals" CACHE
INTERNAL "Where to download external dependencies"
)
set(NNCC_OVERLAY_DIR "${CMAKE_BINARY_DIR}/overlay" CACHE
INTERNAL "Where locally built external dependencies are installed")
+# Share package build script with runtime
+set(EXT_OVERLAY_DIR ${NNCC_OVERLAY_DIR})
+
# This allows find_package to access configurations installed inside overlay
-list(APPEND CMAKE_PREFIX_PATH "${NNCC_OVERLAY_DIR}")
+list(APPEND CMAKE_PREFIX_PATH "${EXT_OVERLAY_DIR}")
-macro(nncc_include PREFIX)
- include("${NNCC_PROJECT_SOURCE_DIR}/infra/nncc/cmake/modules/${PREFIX}.cmake")
-endmacro(nncc_include)
+macro(nnas_include PREFIX)
+ include("${NNAS_PROJECT_SOURCE_DIR}/infra/cmake/modules/${PREFIX}.cmake")
+endmacro(nnas_include)
-macro(nncc_find_package PREFIX)
+macro(nnas_find_package PREFIX)
find_package(${PREFIX} CONFIG NO_DEFAULT_PATH
- PATHS ${NNCC_PROJECT_SOURCE_DIR}/infra/nncc/cmake/packages
+ PATHS ${NNAS_PROJECT_SOURCE_DIR}/infra/cmake/packages
${ARGN}
)
-endmacro(nncc_find_package)
+endmacro(nnas_find_package)
# nncc_find_resource(NAME) will update the following variables
#
@@ -45,7 +53,7 @@ endmacro(nncc_find_package)
#
# TODO Explain how to add a resource in README.md
function(nncc_find_resource NAME)
- set(RESOURCE_DIR "${NNCC_PROJECT_SOURCE_DIR}/res/${NAME}")
+ set(RESOURCE_DIR "${NNAS_PROJECT_SOURCE_DIR}/res/${NAME}")
if(NOT IS_DIRECTORY ${RESOURCE_DIR})
set(${NAME}_FOUND FALSE PARENT_SCOPE)
@@ -81,6 +89,7 @@ option(BUILD_PROTOBUF "Locally build Protocol Buffer from the downloaded source"
option(DOWNLOAD_EIGEN "Download Eigen source" ON)
option(DOWNLOAD_FARMHASH "Download farmhash source" ON)
option(DOWNLOAD_GEMMLOWP "Download GEMM low precesion library source" ON)
+option(DOWNLOAD_RUY "Download ruy source" ON)
option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" ON)
option(DOWNLOAD_GFLAGS "Download GFlags source" OFF)
option(DOWNLOAD_FLATBUFFERS "Download FlatBuffers source" ON)
@@ -90,11 +99,14 @@ option(DOWNLOAD_CAFFE "Download Caffe source" ON)
option(DOWNLOAD_PYTORCH "Download Pytorch source" ON)
option(DOWNLOAD_ONNX "Download ONNX source" ON)
option(DOWNLOAD_ABSEIL "Download Abseil-cpp source" ON)
+option(DOWNLOAD_PYBIND11 "Download Pybind11 source" ON)
option(DOWNLOAD_GTEST "Download Google Test source" ON)
option(BUILD_GTEST "Build Google Test from the downloaded source" ON)
+option(DOWNLOAD_HDF5 "Download HDF5 source" ON)
+option(BUILD_HDF5 "Build HDF5 from the downloaded source" ON)
-nncc_find_package(GTest QUIET)
+nnas_find_package(GTest QUIET)
option(ENABLE_TEST "Build Tests using Google Test" ${GTest_FOUND})
@@ -111,9 +123,12 @@ if(${ENABLE_TEST})
include(CTest)
endif(${ENABLE_TEST})
-option(ENABLE_CONTRIB_BUILD "Build incubating projects under contrib/" ON)
option(ENABLE_STRICT_BUILD "Treat warning as error" OFF)
+# This option might be turned ON for Windows native build.
+# Check our ProtobufConfig.cmake for its usage.
+option(USE_PROTOBUF_LEGACY_IMPORT "Use legacy MODULE mode import rather than CONFIG mode" OFF)
+
###
### Target
###
@@ -132,13 +147,8 @@ endif(ENABLE_COVERAGE)
###
### Function
###
-# TODO Remove this nncc_include
-nncc_include(OptionalTargetTools)
-nncc_include(add_subdirectories)
+# TODO Remove this nnas_include
+nnas_include(OptionalTargetTools)
+nnas_include(AddSubdirectories)
-###
-### Components
-###
-if(ENABLE_CONTRIB_BUILD)
- add_subdirectory("${NNCC_PROJECT_SOURCE_DIR}/compiler" "${CMAKE_BINARY_DIR}/compiler")
-endif(ENABLE_CONTRIB_BUILD)
+add_subdirectory("${NNAS_PROJECT_SOURCE_DIR}/compiler" "${CMAKE_BINARY_DIR}/compiler")
diff --git a/infra/nncc/cmake/modules/ExternalProjectTools.cmake b/infra/nncc/cmake/modules/ExternalProjectTools.cmake
deleted file mode 100644
index 11446c051..000000000
--- a/infra/nncc/cmake/modules/ExternalProjectTools.cmake
+++ /dev/null
@@ -1,3 +0,0 @@
-macro(add_extdirectory DIR TAG)
- add_subdirectory(${DIR} "${CMAKE_BINARY_DIR}/externals/${TAG}")
-endmacro(add_extdirectory)
diff --git a/infra/nncc/cmake/packages/EigenSourceConfig.cmake b/infra/nncc/cmake/packages/EigenSourceConfig.cmake
deleted file mode 100644
index f87f53304..000000000
--- a/infra/nncc/cmake/packages/EigenSourceConfig.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-function(_EigenSource_import)
- if(NOT DOWNLOAD_EIGEN)
- set(EigenSource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_EIGEN)
-
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
-
- # NOTE The following URL comes from TensorFlow 1.7
- envoption(EIGEN_URL https://bitbucket.org/eigen/eigen/get/2355b229ea4c.tar.gz)
-
- ExternalSource_Download(EIGEN ${EIGEN_URL})
-
- set(EigenSource_DIR ${EIGEN_SOURCE_DIR} PARENT_SCOPE)
- set(EigenSource_FOUND TRUE PARENT_SCOPE)
-endfunction(_EigenSource_import)
-
-_EigenSource_import()
diff --git a/infra/nncc/cmake/packages/FarmhashSourceConfig.cmake b/infra/nncc/cmake/packages/FarmhashSourceConfig.cmake
deleted file mode 100644
index 207909fab..000000000
--- a/infra/nncc/cmake/packages/FarmhashSourceConfig.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-function(_FarmhashSource_import)
- if(NOT DOWNLOAD_FARMHASH)
- set(FarmhashSource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_FARMHASH)
-
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
-
- # NOTE TensorFlow 1.7 downloads farmhash from the following URL
- envoption(FARMHASH_URL https://github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz)
-
- ExternalSource_Download(FARMHASH ${FARMHASH_URL})
-
- set(FarmhashSource_DIR ${FARMHASH_SOURCE_DIR} PARENT_SCOPE)
- set(FarmhashSource_FOUND TRUE PARENT_SCOPE)
-endfunction(_FarmhashSource_import)
-
-_FarmhashSource_import()
diff --git a/infra/nncc/cmake/packages/GTestConfig.cmake b/infra/nncc/cmake/packages/GTestConfig.cmake
deleted file mode 100644
index 27f96b27d..000000000
--- a/infra/nncc/cmake/packages/GTestConfig.cmake
+++ /dev/null
@@ -1,86 +0,0 @@
-function(_GTest_build)
- if(NOT BUILD_GTEST)
- return()
- endif(NOT BUILD_GTEST)
-
- nncc_find_package(GTestSource QUIET)
-
- if(NOT GTestSource_FOUND)
- return()
- endif(NOT GTestSource_FOUND)
-
- # TODO Introduce helper functions
- set(GTEST_SOURCE_DIR "${GTestSource_DIR}")
- set(GTEST_BUILD_DIR "${CMAKE_BINARY_DIR}/externals/GTEST/build")
- set(GTEST_INSTALL_DIR "${NNCC_OVERLAY_DIR}")
-
- set(STAMP_PATH "${GTEST_INSTALL_DIR}/GTEST.stamp")
- set(LOG_PATH "${GTEST_INSTALL_DIR}/GTEST.log")
-
- if(EXISTS ${STAMP_PATH})
- return()
- endif(EXISTS ${STAMP_PATH})
-
- message(STATUS "Google Test Package: Source found (path: ${GTEST_SOURCE_DIR})")
-
- file(MAKE_DIRECTORY ${GTEST_BUILD_DIR})
- file(MAKE_DIRECTORY ${GTEST_INSTALL_DIR})
-
- # NOTE Do NOT retry build once it failed
- file(WRITE "${STAMP_PATH}")
-
- execute_process(COMMAND ${CMAKE_COMMAND}
- -DCMAKE_INSTALL_PREFIX=${GTEST_INSTALL_DIR}
- -DCMAKE_BUILD_TYPE=Release
- ${GTestSource_DIR}
- OUTPUT_FILE ${LOG_PATH}
- ERROR_FILE ${LOG_PATH}
- WORKING_DIRECTORY ${GTEST_BUILD_DIR}
- RESULT_VARIABLE BUILD_EXITCODE)
-
- if(NOT BUILD_EXITCODE EQUAL 0)
- message(FATAL_ERROR "Google Test Package: Build failed (check '${LOG_PATH}' for details)")
- endif(NOT BUILD_EXITCODE EQUAL 0)
-
- execute_process(COMMAND ${CMAKE_COMMAND} --build . -- install
- OUTPUT_FILE ${LOG_PATH}
- ERROR_FILE ${LOG_PATH}
- WORKING_DIRECTORY ${GTEST_BUILD_DIR}
- RESULT_VARIABLE INSTALL_EXITCODE)
-
- if(NOT INSTALL_EXITCODE EQUAL 0)
- message(FATAL_ERROR "Google Test Package: Installation failed (check '${LOG_PATH}' for details)")
- endif(NOT INSTALL_EXITCODE EQUAL 0)
-
- message(STATUS "Google Test Package: Done")
-endfunction(_GTest_build)
-
-_GTest_build()
-
-### Find and use pre-installed Google Test
-find_package(GTest)
-find_package(Threads)
-
-if(${GTEST_FOUND} AND TARGET Threads::Threads)
- if(NOT TARGET gtest)
- add_library(gtest INTERFACE)
- target_include_directories(gtest INTERFACE ${GTEST_INCLUDE_DIRS})
- target_link_libraries(gtest INTERFACE ${GTEST_LIBRARIES} Threads::Threads)
- endif(NOT TARGET gtest)
-
- if(NOT TARGET gtest_main)
- add_library(gtest_main INTERFACE)
- target_include_directories(gtest_main INTERFACE ${GTEST_INCLUDE_DIRS})
- target_link_libraries(gtest_main INTERFACE gtest)
- target_link_libraries(gtest_main INTERFACE ${GTEST_MAIN_LIBRARIES})
-
- # GTest_AddTest(TGT ...) creates an executable target and registers that executable as a CMake test
- function(GTest_AddTest TGT)
- add_executable(${TGT} ${ARGN})
- target_link_libraries(${TGT} gtest_main)
- add_test(${TGT} ${TGT})
- endfunction(GTest_AddTest)
- endif(NOT TARGET gtest_main)
-
- set(GTest_FOUND TRUE)
-endif(${GTEST_FOUND} AND TARGET Threads::Threads)
diff --git a/infra/nncc/cmake/packages/GoogleDoubleConversionConfig.cmake b/infra/nncc/cmake/packages/GoogleDoubleConversionConfig.cmake
deleted file mode 100644
index 3fdc86102..000000000
--- a/infra/nncc/cmake/packages/GoogleDoubleConversionConfig.cmake
+++ /dev/null
@@ -1,52 +0,0 @@
-# https://github.com/google/double-conversion
-set(GOOGLE_DOUBLE_CONVERSION_PREFIX "/usr" CACHE PATH "Google DoubleConversion install prefix")
-
-function(_GoogleDoubleConversion_import)
- # Find the header & lib
- find_library(GoogleDoubleConversion_LIB
- NAMES double-conversion
- PATHS "${GOOGLE_DOUBLE_CONVERSION_PREFIX}/lib"
- )
-
- find_path(GoogleDoubleConversion_INCLUDE_DIR
- NAMES double-conversion/double-conversion.h
- PATHS "${GOOGLE_DOUBLE_CONVERSION_PREFIX}/include"
- )
-
- # TODO Version check
- set(GoogleDoubleConversion_FOUND TRUE)
-
- if(NOT GoogleDoubleConversion_LIB)
- set(GoogleDoubleConversion_FOUND FALSE)
- endif(NOT GoogleDoubleConversion_LIB)
-
- if(NOT GoogleDoubleConversion_INCLUDE_DIR)
- set(GoogleDoubleConversion_FOUND FALSE)
- endif(NOT GoogleDoubleConversion_INCLUDE_DIR)
-
- set(GoogleDoubleConversion_FOUND ${GoogleDoubleConversion_FOUND} PARENT_SCOPE)
-
- unset(MESSAGE)
- list(APPEND MESSAGE "Found Google Double Conversion")
-
- if(NOT GoogleDoubleConversion_FOUND)
- list(APPEND MESSAGE ": FALSE")
- else(NOT GoogleDoubleConversion_FOUND)
- list(APPEND MESSAGE " (include: ${GoogleDoubleConversion_INCLUDE_DIR} library: ${GoogleDoubleConversion_LIB})")
-
- # Add target
- if(NOT TARGET google_double_conversion)
- # NOTE IMPORTED target may be more appropriate for this case
- add_library(google_double_conversion INTERFACE)
- target_link_libraries(google_double_conversion INTERFACE ${GoogleDoubleConversion_LIB})
- target_include_directories(google_double_conversion INTERFACE ${GoogleDoubleConversion_INCLUDE_DIR})
-
- add_library(Google::DoubleConversion ALIAS google_double_conversion)
- endif(NOT TARGET google_double_conversion)
- endif(NOT GoogleDoubleConversion_FOUND)
-
- message(STATUS ${MESSAGE})
- set(GoogleDoubleConversion_FOUND ${GoogleDoubleConversion_FOUND} PARENT_SCOPE)
-endfunction(_GoogleDoubleConversion_import)
-
-_GoogleDoubleConversion_import()
diff --git a/infra/nncc/cmake/packages/GoogleNSyncConfig.cmake b/infra/nncc/cmake/packages/GoogleNSyncConfig.cmake
deleted file mode 100644
index 1fdf8cc20..000000000
--- a/infra/nncc/cmake/packages/GoogleNSyncConfig.cmake
+++ /dev/null
@@ -1,62 +0,0 @@
-# https://github.com/google/nsync
-set(GOOGLE_NSYNC_PREFIX "/usr" CACHE PATH "Where to find Google NSync library")
-
-function(_GoogleNSync_import)
- # Find the header & lib
- find_library(GoogleNSync_C_LIB
- NAMES nsync
- PATHS "${GOOGLE_NSYNC_PREFIX}/lib"
- )
-
- find_library(GoogleNSync_CPP_LIB
- NAMES nsync_cpp
- PATHS "${GOOGLE_NSYNC_PREFIX}/lib"
- )
-
- find_path(GoogleNSync_INCLUDE_DIR
- NAMES nsync.h
- PATHS "${GOOGLE_NSYNC_PREFIX}/include"
- )
-
- message(STATUS "GoogleNSync_C_LIB: ${GoogleNSync_C_LIB}")
- message(STATUS "GoogleNSync_CPP_LIB: ${GoogleNSync_CPP_LIB}")
- message(STATUS "GoogleNSync_INCLUDE_DIR: ${GoogleNSync_INCLUDE_DIR}")
-
- set(GoogleNSync_FOUND TRUE)
-
- if(NOT GoogleNSync_C_LIB)
- set(GoogleNSync_FOUND FALSE)
- endif(NOT GoogleNSync_C_LIB)
-
- if(NOT GoogleNSync_CPP_LIB)
- set(GoogleNSync_FOUND FALSE)
- endif(NOT GoogleNSync_CPP_LIB)
-
- if(NOT GoogleNSync_INCLUDE_DIR)
- set(GoogleNSync_FOUND FALSE)
- endif(NOT GoogleNSync_INCLUDE_DIR)
-
- unset(MESSAGE)
- list(APPEND MESSAGE "Found Google NSync")
-
- if(NOT GoogleNSync_FOUND)
- list(APPEND MESSAGE ": FALSE")
- else(NOT GoogleNSync_FOUND)
- list(APPEND MESSAGE " (include: ${GoogleNSync_INCLUDE_DIR} library: ${GoogleNSync_C_LIB} ${GoogleNSync_CPP_LIB})")
-
- # Add target
- if(NOT TARGET google_nsync)
- # NOTE IMPORTED target may be more appropriate for this case
- add_library(google_nsync INTERFACE)
- target_link_libraries(google_nsync INTERFACE ${GoogleNSync_C_LIB} ${GoogleNSync_CPP_LIB})
- target_include_directories(google_nsync INTERFACE ${GoogleNSync_INCLUDE_DIR})
-
- add_library(Google::NSync ALIAS google_nsync)
- endif(NOT TARGET google_nsync)
- endif(NOT GoogleNSync_FOUND)
-
- message(STATUS ${MESSAGE})
- set(GoogleNSync_FOUND ${GoogleNSync_FOUND} PARENT_SCOPE)
-endfunction(_GoogleNSync_import)
-
-_GoogleNSync_import()
diff --git a/infra/nncc/cmake/packages/ONNXProto/CMakeLists.txt b/infra/nncc/cmake/packages/ONNXProto/CMakeLists.txt
deleted file mode 100644
index 8291958a4..000000000
--- a/infra/nncc/cmake/packages/ONNXProto/CMakeLists.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Protobuf_Generate(ONNX_PROTO "${CMAKE_CURRENT_BINARY_DIR}/generated" "${ONNXSource_DIR}" "onnx/onnx.proto")
-
-add_library(onnxproto STATIC ${ONNX_PROTO_SOURCES})
-set_target_properties(onnxproto PROPERTIES POSITION_INDEPENDENT_CODE ON)
-target_include_directories(onnxproto PUBLIC ${ONNX_PROTO_INCLUDE_DIRS})
-target_link_libraries(onnxproto libprotobuf)
diff --git a/infra/nncc/cmake/packages/ONNXProtoConfig.cmake b/infra/nncc/cmake/packages/ONNXProtoConfig.cmake
deleted file mode 100644
index a6dc7a01d..000000000
--- a/infra/nncc/cmake/packages/ONNXProtoConfig.cmake
+++ /dev/null
@@ -1,25 +0,0 @@
-function(_ONNXProto_import)
- nncc_find_package(ONNXSource EXACT 1.3.0 QUIET)
-
- if(NOT ONNXSource_FOUND)
- set(ONNXProto_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT ONNXSource_FOUND)
-
- nncc_find_package(Protobuf QUIET)
-
- if(NOT Protobuf_FOUND)
- set(ONNXProto_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT Protobuf_FOUND)
-
- if(NOT TARGET onnxproto)
- nncc_include(ExternalProjectTools)
- add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/ONNXProto" onnxproto)
- endif(NOT TARGET onnxproto)
-
- message(STATUS "Found ONNX: TRUE")
- set(ONNXProto_FOUND TRUE PARENT_SCOPE)
-endfunction(_ONNXProto_import)
-
-_ONNXProto_import()
diff --git a/infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfig.cmake b/infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfig.cmake
deleted file mode 100644
index 0ff33232d..000000000
--- a/infra/nncc/cmake/packages/ONNXSource-1.3.0/ONNXSourceConfig.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-function(_ONNXSource_import)
- if(NOT DOWNLOAD_ONNX)
- set(ONNXSource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_ONNX)
-
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
-
- envoption(ONNX_1_3_0_URL https://github.com/onnx/onnx/archive/v1.3.0.zip)
-
- ExternalSource_Download(ONNX DIRNAME ONNX-1.3.0
- CHECKSUM MD5=5d588ffcf43bb18f99a67c015c97f92e
- URL ${ONNX_1_3_0_URL})
-
- set(ONNXSource_DIR ${ONNX_SOURCE_DIR} PARENT_SCOPE)
- set(ONNXSource_FOUND TRUE PARENT_SCOPE)
-endfunction(_ONNXSource_import)
-
-_ONNXSource_import()
diff --git a/infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfig.cmake b/infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfig.cmake
deleted file mode 100644
index 7f890e911..000000000
--- a/infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfig.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-function(_ONNXSource_import)
- if(NOT DOWNLOAD_ONNX)
- set(ONNXSource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_ONNX)
-
- nncc_include(ExternalSourceTools)
- nncc_include(OptionTools)
-
- envoption(ONNX_1_5_0_URL https://github.com/onnx/onnx/archive/v1.5.0.zip)
-
- ExternalSource_Download(ONNX DIRNAME ONNX-1.5.0
- CHECKSUM MD5=1a5fe554569a3819705b26de33d8fe02
- URL ${ONNX_1_5_0_URL})
-
- set(ONNXSource_DIR ${ONNX_SOURCE_DIR} PARENT_SCOPE)
- set(ONNXSource_FOUND TRUE PARENT_SCOPE)
-endfunction(_ONNXSource_import)
-
-_ONNXSource_import()
diff --git a/infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfigVersion.cmake b/infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfigVersion.cmake
deleted file mode 100644
index 70b2804b0..000000000
--- a/infra/nncc/cmake/packages/ONNXSource-1.5.0/ONNXSourceConfigVersion.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(PACKAGE_VERSION "1.5.0")
-set(PACKAGE_VERSION_EXACT FALSE)
-set(PACKAGE_VERSION_COMPATIBLE FALSE)
-set(PACKAGE_VERSION_UNSUITABLE TRUE)
-
-if(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
- set(PACKAGE_VERSION_EXACT TRUE)
- set(PACKAGE_VERSION_COMPATIBLE TRUE)
- set(PACKAGE_VERSION_UNSUITABLE FALSE)
-endif(PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION)
diff --git a/infra/nncc/cmake/packages/TensorFlowLite-1.7/Lite/CMakeLists.txt b/infra/nncc/cmake/packages/TensorFlowLite-1.7/Lite/CMakeLists.txt
deleted file mode 100644
index c5e89eebe..000000000
--- a/infra/nncc/cmake/packages/TensorFlowLite-1.7/Lite/CMakeLists.txt
+++ /dev/null
@@ -1,38 +0,0 @@
-# NOTE The followings SHOULD be defined before using this CMakeLists.txt
-#
-# 'TensorFlowSource_DIR' variable
-# 'FlatBuffersSource_DIR' variable
-# 'eigen' target
-# 'gemmlowp' target
-# 'neon2sse' target
-# 'farmhash' target
-#
-set(TensorFlowLiteSource_DIR ${TensorFlowSource_DIR}/tensorflow/contrib/lite)
-
-file(GLOB CORE_SRCS "${TensorFlowLiteSource_DIR}/*.c" "${TensorFlowLiteSource_DIR}/*.cc")
-file(GLOB CORE_TESTS "${TensorFlowLiteSource_DIR}/*test*.cc")
-list(REMOVE_ITEM CORE_SRCS ${CORE_TESTS})
-
-file(GLOB_RECURSE KERNEL_SRCS "${TensorFlowLiteSource_DIR}/kernels/*.cc")
-file(GLOB_RECURSE KERNEL_TESTS "${TensorFlowLiteSource_DIR}/kernels/*test*.cc")
-list(REMOVE_ITEM KERNEL_SRCS ${KERNEL_TESTS})
-# Exclude buggy kernel(s) from the build
-list(REMOVE_ITEM KERNEL_SRCS "${TensorFlowLiteSource_DIR}/kernels/internal/spectrogram.cc")
-
-list(APPEND SRCS ${CORE_SRCS})
-list(APPEND SRCS ${KERNEL_SRCS})
-
-include(CheckCXXCompilerFlag)
-
-CHECK_CXX_COMPILER_FLAG(-Wno-extern-c-compat COMPILER_SUPPORT_EXTERN_C_COMPAT_WARNING)
-
-add_library(tensorflowlite-1.7 ${SRCS})
-set_target_properties(tensorflowlite-1.7 PROPERTIES POSITION_INDEPENDENT_CODE ON)
-target_include_directories(tensorflowlite-1.7 PUBLIC ${TensorFlowSource_DIR})
-target_include_directories(tensorflowlite-1.7 PUBLIC ${FlatBuffersSource_DIR}/include)
-target_compile_options(tensorflowlite-1.7 PUBLIC -Wno-ignored-attributes)
-if(COMPILER_SUPPORT_EXTERN_C_COMPAT_WARNING)
- target_compile_options(tensorflowlite-1.7 PUBLIC -Wno-extern-c-compat)
-endif(COMPILER_SUPPORT_EXTERN_C_COMPAT_WARNING)
-target_compile_definitions(tensorflowlite-1.7 PUBLIC "GEMMLOWP_ALLOW_SLOW_SCALAR_FALLBACK")
-target_link_libraries(tensorflowlite-1.7 eigen gemmlowp neon2sse farmhash dl)
diff --git a/infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfig.cmake b/infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfig.cmake
deleted file mode 100644
index 44bc817e1..000000000
--- a/infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfig.cmake
+++ /dev/null
@@ -1,55 +0,0 @@
-function(_TensorFlowLite_import)
- nncc_find_package(TensorFlowSource EXACT 1.7 QUIET)
-
- if(NOT TensorFlowSource_FOUND)
- set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT TensorFlowSource_FOUND)
-
- # TensorFlow 1.7 downloads FlatBuffers from https://github.com/google/flatbuffers/archive/971a68110e4.tar.gz
- #
- # FlatBuffers 1.8 is compatible with 971a68110e4.
- nncc_find_package(FlatBuffersSource EXACT 1.8 QUIET)
-
- if(NOT FlatBuffersSource_FOUND)
- set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT FlatBuffersSource_FOUND)
-
- nncc_find_package(Farmhash QUIET)
-
- if(NOT Farmhash_FOUND)
- set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT Farmhash_FOUND)
-
- nncc_find_package(Eigen QUIET)
-
- if(NOT Eigen_FOUND)
- set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT Eigen_FOUND)
-
- nncc_find_package(GEMMLowp QUIET)
-
- if(NOT GEMMLowp_FOUND)
- set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT GEMMLowp_FOUND)
-
- nncc_find_package(NEON2SSE QUIET)
-
- if(NOT NEON2SSE_FOUND)
- set(TensorFlowLite_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT NEON2SSE_FOUND)
-
- if(NOT TARGET tensorflowlite-1.7)
- nncc_include(ExternalProjectTools)
- add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/Lite" tflite-1.7)
- endif(NOT TARGET tensorflowlite-1.7)
-
- set(TensorFlowLite_FOUND TRUE PARENT_SCOPE)
-endfunction(_TensorFlowLite_import)
-
-_TensorFlowLite_import()
diff --git a/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfig.cmake b/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfig.cmake
deleted file mode 100644
index 5963ce418..000000000
--- a/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfig.cmake
+++ /dev/null
@@ -1,104 +0,0 @@
-function(_TensorFlowProtoText_import)
- macro(require_package PKGNAME)
- nncc_find_package(${PKGNAME} ${ARGN} QUIET)
- if(NOT ${PKGNAME}_FOUND)
- message(STATUS "Found TensorFlowProtoText: FALSE (${PKGNAME} is missing)")
- set(TensorFlowProtoText_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT ${PKGNAME}_FOUND)
- endmacro(require_package)
-
- require_package(TensorFlowSource EXACT 1.12)
- require_package(Abseil)
- require_package(Eigen)
- require_package(Protobuf)
- require_package(GoogleDoubleConversion)
- require_package(GoogleNSync)
-
- if(NOT TARGET tensorflow-prototext-1.12)
- nncc_include(ExternalProjectTools)
- add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/build" TensorFlowProtoText-1.12)
- endif(NOT TARGET tensorflow-prototext-1.12)
-
- set(TensorFlowProtoText_FOUND TRUE PARENT_SCOPE)
-endfunction(_TensorFlowProtoText_import)
-
-_TensorFlowProtoText_import()
-
-if(TensorFlowProtoText_FOUND)
- # CMAKE_CURRENT_LIST_DIR
- #
- # ... The value has dynamic scope. ... Therefore the value of the variable inside a macro
- # or function is the directory of the file invoking the bottom-most entry on the call stack,
- # not the directory of the file containing the macro or function definition.
- #
- # Reference: https://cmake.org/cmake/help/v3.1/variable/CMAKE_CURRENT_LIST_DIR.html
- set(TENSORLFLOW_PROTO_TEXT_1_12_CMAKE_DIR
- "${CMAKE_CURRENT_LIST_DIR}" CACHE INTERNAL
- "Where to find make_directories"
- )
-
- # Comments from "gen_proto_text_functions.cc"
- # >
- # > Main program to take input protos and write output pb_text source files that
- # > contain generated proto text input and output functions.
- # >
- # > Main expects:
- # > - First argument is output path
- # > - Second argument is the relative path of the protos to the root. E.g.,
- # > for protos built by a rule in tensorflow/core, this will be
- # > tensorflow/core.
- # > - Then any number of source proto file names, plus one source name must be
- # > placeholder.txt from this gen tool's package. placeholder.txt is
- # > ignored for proto resolution, but is used to determine the root at which
- # > the build tool has placed the source proto files.
- # >
- function(ProtoText_Generate PREFIX OUTPUT_DIR)
- # THIS SHOULD SUCCEED!
- nncc_find_package(TensorFlowSource EXACT 1.12 REQUIRED)
-
- set(OUTPUT_REL "tensorflow")
- set(PROTO_DIR "${TensorFlowSource_DIR}")
-
- set(PROTO_INPUTS ${ARGN})
- list(APPEND PROTO_INPUTS "tensorflow/tools/proto_text/placeholder.txt")
-
- get_filename_component(abs_output_dir ${OUTPUT_DIR} ABSOLUTE)
- get_filename_component(abs_proto_dir ${TensorFlowSource_DIR} ABSOLUTE)
-
- # Let's reset variables before using them
- # NOTE This DOES NOT AFFECT variables in the parent scope
- unset(PROTO_FILES)
- unset(OUTPUT_DIRS)
- unset(OUTPUT_FILES)
-
- foreach(proto ${PROTO_INPUTS})
- get_filename_component(fil "${proto}" NAME)
- get_filename_component(dir "${proto}" DIRECTORY)
-
- get_filename_component(fil_we "${fil}" NAME_WE)
-
- get_filename_component(abs_fil "${abs_proto_base}/${proto}" ABSOLUTE)
- get_filename_component(abs_dir "${abs_fil}" DIRECTORY)
-
- list(APPEND PROTO_FILES "${abs_proto_dir}/${proto}")
-
- if(NOT ${fil} STREQUAL "placeholder.txt")
- list(APPEND OUTPUT_DIRS "${abs_output_dir}/${dir}")
- list(APPEND OUTPUT_FILES "${abs_output_dir}/${dir}/${fil_we}.pb_text.h")
- list(APPEND OUTPUT_FILES "${abs_output_dir}/${dir}/${fil_we}.pb_text-impl.h")
- list(APPEND OUTPUT_FILES "${abs_output_dir}/${dir}/${fil_we}.pb_text.cc")
- endif(NOT ${fil} STREQUAL "placeholder.txt")
- endforeach()
-
- add_custom_command(OUTPUT ${OUTPUT_FILES}
- # "make_directory" in CMake 3.1 cannot create multiple directories at once.
- # COMMAND ${CMAKE_COMMAND} -E make_directory ${OUTPUT_DIRS}
- COMMAND "${TENSORLFLOW_PROTO_TEXT_1_12_CMAKE_DIR}/make_directories.sh" ${OUTPUT_DIRS}
- COMMAND "$<TARGET_FILE:tensorflow-prototext-1.12>" "${abs_output_dir}/${OUTPUT_REL}" "${OUTPUT_REL}" ${PROTO_FILES}
- DEPENDS ${PROTO_FILES})
-
- set(${PREFIX}_SOURCES ${OUTPUT_FILES} PARENT_SCOPE)
- set(${PREFIX}_INCLUDE_DIRS ${abs_output_dir} PARENT_SCOPE)
- endfunction(ProtoText_Generate)
-endif(TensorFlowProtoText_FOUND)
diff --git a/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/build/CMakeLists.txt b/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/build/CMakeLists.txt
deleted file mode 100644
index 86d6e6fe5..000000000
--- a/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/build/CMakeLists.txt
+++ /dev/null
@@ -1,78 +0,0 @@
-message(STATUS "Build TensorFlowProtoText from '${TensorFlowSource_DIR}'")
-
-#
-# Build "proto_text" tool
-#
-unset(SOURCE_FILES)
-
-macro(Source_Add RPATH)
- list(APPEND SOURCE_FILES "${TensorFlowSource_DIR}/${RPATH}")
-endmacro(Source_Add)
-
-# This list comes from "tensorflow/contrib/makefile/proto_text_cc_files.txt"
-Source_Add(tensorflow/core/lib/core/status.cc)
-Source_Add(tensorflow/core/lib/core/threadpool.cc)
-Source_Add(tensorflow/core/lib/hash/hash.cc)
-Source_Add(tensorflow/core/lib/io/inputstream_interface.cc)
-Source_Add(tensorflow/core/lib/io/random_inputstream.cc)
-Source_Add(tensorflow/core/lib/io/buffered_inputstream.cc)
-Source_Add(tensorflow/core/lib/io/inputbuffer.cc)
-Source_Add(tensorflow/core/lib/io/iterator.cc)
-Source_Add(tensorflow/core/lib/io/path.cc)
-Source_Add(tensorflow/core/lib/strings/numbers.cc)
-Source_Add(tensorflow/core/lib/strings/scanner.cc)
-Source_Add(tensorflow/core/lib/strings/str_util.cc)
-Source_Add(tensorflow/core/lib/strings/strcat.cc)
-Source_Add(tensorflow/core/lib/strings/stringprintf.cc)
-Source_Add(tensorflow/core/lib/strings/proto_text_util.cc)
-Source_Add(tensorflow/core/platform/cpu_info.cc)
-Source_Add(tensorflow/core/platform/denormal.cc)
-Source_Add(tensorflow/core/platform/env.cc)
-Source_Add(tensorflow/core/platform/env_time.cc)
-Source_Add(tensorflow/core/platform/file_system.cc)
-Source_Add(tensorflow/core/platform/file_system_helper.cc)
-Source_Add(tensorflow/core/platform/protobuf_util.cc)
-Source_Add(tensorflow/core/platform/setround.cc)
-Source_Add(tensorflow/core/platform/tracing.cc)
-Source_Add(tensorflow/core/platform/posix/env.cc)
-Source_Add(tensorflow/core/platform/posix/env_time.cc)
-Source_Add(tensorflow/core/platform/posix/error.cc)
-Source_Add(tensorflow/core/platform/posix/load_library.cc)
-Source_Add(tensorflow/core/platform/posix/port.cc)
-Source_Add(tensorflow/core/platform/posix/posix_file_system.cc)
-Source_Add(tensorflow/core/platform/default/logging.cc)
-Source_Add(tensorflow/core/platform/default/mutex.cc)
-Source_Add(tensorflow/core/platform/default/protobuf.cc)
-
-Source_Add(tensorflow/tools/proto_text/gen_proto_text_functions_lib.cc)
-Source_Add(tensorflow/tools/proto_text/gen_proto_text_functions.cc)
-
-unset(PROTO_FILES)
-
-macro(Proto_Add RPATH)
- list(APPEND PROTO_FILES "${RPATH}")
-endmacro(Proto_Add)
-
-Proto_Add(tensorflow/core/lib/core/error_codes.proto)
-Proto_Add(tensorflow/core/framework/types.proto)
-Proto_Add(tensorflow/core/framework/tensor.proto)
-Proto_Add(tensorflow/core/framework/tensor_shape.proto)
-Proto_Add(tensorflow/core/framework/summary.proto)
-Proto_Add(tensorflow/core/framework/resource_handle.proto)
-
-Protobuf_Generate(PROTO_TEXT_PROTO
- "${CMAKE_CURRENT_BINARY_DIR}/generated/proto_text"
- "${TensorFlowSource_DIR}"
- ${PROTO_FILES}
-)
-
-add_executable(tensorflow-prototext-1.12 ${SOURCE_FILES} ${PROTO_TEXT_PROTO_SOURCES})
-target_include_directories(tensorflow-prototext-1.12 PRIVATE ${TensorFlowSource_DIR})
-target_include_directories(tensorflow-prototext-1.12 PRIVATE ${PROTO_TEXT_PROTO_INCLUDE_DIRS})
-
-target_link_libraries(tensorflow-prototext-1.12 PRIVATE abseil)
-target_link_libraries(tensorflow-prototext-1.12 PRIVATE eigen)
-target_link_libraries(tensorflow-prototext-1.12 PRIVATE ${PROTO_TEXT_PROTO_LIBRARIES})
-target_link_libraries(tensorflow-prototext-1.12 PRIVATE Google::DoubleConversion)
-target_link_libraries(tensorflow-prototext-1.12 PRIVATE Google::NSync)
-target_link_libraries(tensorflow-prototext-1.12 PRIVATE dl)
diff --git a/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/make_directories.sh b/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/make_directories.sh
deleted file mode 100755
index 1fb2ab683..000000000
--- a/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/make_directories.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-
-while [[ $# -ne 0 ]]; do
- DIR=$1; shift
- mkdir -p "${DIR}"
-done
diff --git a/infra/nncc/cmake/packages/TensorFlowVersionChecker.c b/infra/nncc/cmake/packages/TensorFlowVersionChecker.c
deleted file mode 100644
index 6161ef74a..000000000
--- a/infra/nncc/cmake/packages/TensorFlowVersionChecker.c
+++ /dev/null
@@ -1,9 +0,0 @@
-#include <string.h>
-#include <tensorflow/c/c_api.h>
-
-int main(int argc, char **argv)
-{
- if (argc >= 2 && !strcmp(argv[1], TF_Version()))
- return 0;
- return 255;
-}
diff --git a/infra/nncc/command/utcount b/infra/nncc/command/utcount
new file mode 100644
index 000000000..d06c5c9de
--- /dev/null
+++ b/infra/nncc/command/utcount
@@ -0,0 +1,45 @@
+#!/bin/bash
+
+import "build.configuration"
+
+BUILD_WORKSPACE_PATH="${NNCC_PROJECT_PATH}/${BUILD_WORKSPACE_RPATH}"
+
+if [[ ! -d "${BUILD_WORKSPACE_PATH}" ]]; then
+ echo "'${BUILD_WORKSPACE_RPATH}' does not exist. Please run 'configure' first"
+ exit 255
+fi
+
+BUILD_ITEMS="angkor cwrap pepper-str pepper-strcast pp stdex \
+oops pepper-assert \
+hermes hermes-std \
+loco locop locomotiv logo-core logo \
+foder souschef arser vconone \
+safemain mio-circle mio-tflite \
+tflite2circle \
+luci \
+luci-interpreter \
+luci-value-test \
+record-minmax \
+circle2circle circle-quantizer"
+
+function count_all()
+{
+ local result=`(ctest --verbose | grep -c '\[ RUN \]') 2>/dev/null`
+ echo $result
+}
+
+function count_neg()
+{
+ local result=`(ctest --verbose | grep '\[ RUN \]' | grep -c '_NEG') 2>/dev/null`
+ echo $result
+}
+
+export CTEST_OUTPUT_ON_FAILURE=0
+
+for item in $BUILD_ITEMS
+do
+ cd ${BUILD_WORKSPACE_PATH}/compiler/$item &&
+ printf "$item = " &&
+ res="$(count_all)" && printf "$res " &&
+ res="$(count_neg)" && echo "$res"
+done
diff --git a/infra/nncc/config/docker.configuration b/infra/nncc/config/docker.configuration
index 7078585a2..fb3295771 100644
--- a/infra/nncc/config/docker.configuration
+++ b/infra/nncc/config/docker.configuration
@@ -1,4 +1,4 @@
-DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnas}
+DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw/one-devtools}
echo "Using docker image ${DOCKER_IMAGE_NAME}"
if [ -z "`docker images ${DOCKER_IMAGE_NAME}`" ]; then
diff --git a/infra/nnfw/CMakeLists.txt b/infra/nnfw/CMakeLists.txt
index f2eb2f3ba..897a16fbf 100644
--- a/infra/nnfw/CMakeLists.txt
+++ b/infra/nnfw/CMakeLists.txt
@@ -4,21 +4,36 @@ project(nnfw)
enable_testing()
-set(NNFW_EXTERNALS_DIR
- "${CMAKE_CURRENT_LIST_DIR}/../../externals" CACHE
+set(CMAKE_SKIP_BUILD_RPATH FALSE)
+set(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE)
+set(CMAKE_INSTALL_RPATH "$ORIGIN/../lib:$ORIGIN/")
+set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
+
+### CMAKE_BUILD_TYPE_LC: Build type lower case
+string(TOLOWER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_LC)
+
+set(NNAS_PROJECT_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}/../.." CACHE
+ INTERNAL "Where to find nnas top-level source directory"
+)
+
+set(NNAS_EXTERNALS_DIR
+ "${NNAS_PROJECT_SOURCE_DIR}/externals" CACHE
INTERNAL "Where to download external dependencies"
)
set(NNFW_OVERLAY_DIR "${CMAKE_BINARY_DIR}/overlay" CACHE
INTERNAL "Where locally built external dependencies are installed")
+# Share package build script with compiler
+set(EXT_OVERLAY_DIR ${NNFW_OVERLAY_DIR})
+
# This allows find_package to access configurations installed inside overlay
-list(APPEND CMAKE_PREFIX_PATH "${NNFW_OVERLAY_DIR}")
+list(APPEND CMAKE_PREFIX_PATH "${EXT_OVERLAY_DIR}")
-macro(nnfw_include PREFIX)
- include("${CMAKE_SOURCE_DIR}/cmake/modules/${PREFIX}.cmake")
-endmacro(nnfw_include)
+macro(nnas_include PREFIX)
+ include("${NNAS_PROJECT_SOURCE_DIR}/infra/cmake/modules/${PREFIX}.cmake")
+endmacro(nnas_include)
-# 'find_package()' wrapper to find in cmake/packages folder
+# Runtime 'find_package()' wrapper to find in cmake/packages folder
#
# Example:
# nnfw_find_package(Boost): Load settings from 'BoostConfig.cmake' file
@@ -26,10 +41,21 @@ endmacro(nnfw_include)
# nnfw_find_package(Boost QUIET): Load settings silently, without warnings
# nnfw_find_package(Boost REQUIRED): Load settings but stop with error when failed
macro(nnfw_find_package PREFIX)
- find_package(${PREFIX} CONFIG NO_DEFAULT_PATH PATHS ${CMAKE_SOURCE_DIR}/cmake/packages ${ARGN})
+ find_package(${PREFIX} CONFIG NO_DEFAULT_PATH
+ PATHS ${CMAKE_SOURCE_DIR}/cmake/packages
+ ${ARGN}
+ )
endmacro(nnfw_find_package)
-set(CMAKE_CXX_STANDARD 11)
+# Common 'find_package()' wrapper to find in infra/cmake/packages folder
+macro(nnas_find_package PREFIX)
+ find_package(${PREFIX} CONFIG NO_DEFAULT_PATH
+ PATHS ${NNAS_PROJECT_SOURCE_DIR}/infra/cmake/packages
+ ${ARGN}
+ )
+endmacro(nnas_find_package)
+
+set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_EXTENSIONS OFF)
# This feature works with CMake 3.5.2 or later. However, using previous versions does not produce
@@ -41,11 +67,7 @@ set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
# identify platform: HOST_PLATFORM, TARGET_PLATFORM and related
# note: this should be placed before flags and options setting
-nnfw_include(IdentifyPlatform)
-
-# apply compilation flags
-# note: this should be placed after cmake/option/option_xxx.cmake files
-include("cmake/ApplyCompileFlags.cmake")
+nnas_include(IdentifyPlatform)
# Configuration flags
include("cmake/CfgOptionFlags.cmake")
@@ -55,6 +77,11 @@ include("cmake/CfgOptionFlags.cmake")
# TARGET_ARCH: target architecture string for cross building
# TARGET_OS: target os string for cross building
+# apply compilation flags
+# NOTE This should be placed after cmake/CfgOptionFlags.cmake files include
+# because compile flag setting can be decided using option (ex. ENABLE_COVERAGE)
+include("cmake/ApplyCompileFlags.cmake")
+
nnfw_find_package(GTest QUIET)
option(ENABLE_TEST "Build Tests using Google Test" ${GTest_FOUND})
@@ -72,36 +99,23 @@ if(ENABLE_STRICT_BUILD)
target_compile_options(nnfw_common INTERFACE -Werror -Wall -Wextra)
endif(ENABLE_STRICT_BUILD)
+macro(nnfw_strict_build TARGET)
+ if(ENABLE_STRICT_BUILD)
+ target_compile_options(${TARGET} PRIVATE -Werror -Wall -Wextra)
+ endif(ENABLE_STRICT_BUILD)
+endmacro(nnfw_strict_build)
+
# TODO Replace using default build option setting in cmake/buildtool/config/config_linux.cmake
# to link nnfw_coverage on each module which want to check coverage
add_library(nnfw_coverage INTERFACE)
if(ENABLE_COVERAGE)
- target_compile_options(nnfw_coverage INTERFACE -g -O -DDEBUG -fprofile-arcs -ftest-coverage)
+ target_compile_options(nnfw_coverage INTERFACE -fprofile-arcs -ftest-coverage)
target_link_libraries(nnfw_coverage INTERFACE gcov)
endif(ENABLE_COVERAGE)
-nnfw_include(ExtendCMakeFunction)
-
-set(NNFW_SOURCE_ROOT "${CMAKE_SOURCE_DIR}/../..")
-
-add_library(nnfw-header INTERFACE)
-target_include_directories(nnfw-header INTERFACE ${NNFW_SOURCE_ROOT}/runtimes/include)
-
-# TODO Support android build via fine-control for each component
-# - Introduce BUILD_CONTRIB option
-# - Set "BUILD_TFLITE_BENCHMARK_MODEL" as OFF for android build
-#
-# The original android build script (for future reference)
-#
-# add_subdirectory(libs)
-# add_subdirectory(tests/tools/nnapi_test)
-# add_subdirectory(tests/tools/tflite_benchmark)
-# add_subdirectory(tests/nnapi)
-#
-# add_subdirectory(runtimes)
+nnas_include(AddSubdirectories)
-add_subdirectory(${NNFW_SOURCE_ROOT}/runtimes/contrib contrib)
-add_subdirectory(${NNFW_SOURCE_ROOT}/runtimes/libs libs)
-add_subdirectory(${NNFW_SOURCE_ROOT}/runtimes runtimes)
-add_subdirectory(${NNFW_SOURCE_ROOT}/tests tests)
-add_subdirectory(${NNFW_SOURCE_ROOT}/tools tools)
+add_subdirectory(${NNAS_PROJECT_SOURCE_DIR}/compute compute)
+add_subdirectory(${NNAS_PROJECT_SOURCE_DIR}/runtime runtime)
+add_subdirectory(${NNAS_PROJECT_SOURCE_DIR}/tests tests)
+add_subdirectory(${NNAS_PROJECT_SOURCE_DIR}/tools tools)
diff --git a/infra/nnfw/cmake/ApplyCompileFlags.cmake b/infra/nnfw/cmake/ApplyCompileFlags.cmake
index 5098fd0f3..b042b0c42 100644
--- a/infra/nnfw/cmake/ApplyCompileFlags.cmake
+++ b/infra/nnfw/cmake/ApplyCompileFlags.cmake
@@ -4,13 +4,13 @@
# flags for build type: debug, release
set(CMAKE_C_FLAGS_DEBUG "-O0 -g -DDEBUG")
set(CMAKE_CXX_FLAGS_DEBUG "-O0 -g -DDEBUG")
-set(CMAKE_C_FLAGS_RELEASE "-O2 -DNDEBUG")
-set(CMAKE_CXX_FLAGS_RELEASE "-O2 -DNDEBUG")
+set(CMAKE_C_FLAGS_RELEASE "-O3 -DNDEBUG")
+set(CMAKE_CXX_FLAGS_RELEASE "-O3 -DNDEBUG")
#
# Platform specific compile flag setting
#
-include("cmake/buildtool/config/config_${TARGET_PLATFORM}.cmake")
+include("${CMAKE_CURRENT_LIST_DIR}/buildtool/config/config_${TARGET_PLATFORM}.cmake")
#
# Apply compile flags
diff --git a/infra/nnfw/cmake/CfgOptionFlags.cmake b/infra/nnfw/cmake/CfgOptionFlags.cmake
index d431f30a7..450aa21ab 100644
--- a/infra/nnfw/cmake/CfgOptionFlags.cmake
+++ b/infra/nnfw/cmake/CfgOptionFlags.cmake
@@ -2,64 +2,84 @@ include(CMakeDependentOption)
#
# Platfor specific configuration
-# note: this should be placed before platform common setting for option setting priority
+# note: this should be placed before default setting for option setting priority
# (platform specific setting have higher priority)
#
include("cmake/options/options_${TARGET_PLATFORM}.cmake")
#
-# Build configuration for project
+# Default build configuration for project
#
+option(ENABLE_STRICT_BUILD "Treat warning as error" ON)
+option(ENABLE_COVERAGE "Build for coverage test" OFF)
option(BUILD_EXT_MULTITHREAD "Build external build using multi thread" ON)
-option(BUILD_NEURUN "Build neurun" ON)
+option(BUILD_ONERT "Build onert" ON)
option(BUILD_LOGGING "Build logging runtime" ON)
-option(BUILD_PURE_ARM_COMPUTE "Build pure_arm_compute runtime" OFF)
CMAKE_DEPENDENT_OPTION(BUILD_RUNTIME_NNAPI_TEST "Build Runtime NN API Generated Test"
# Set BUILD_RUNTIME_NNAPI_TEST as ON
# if CMAKE_COMPILER_IS_GNUCC AND NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 6.2
ON "CMAKE_COMPILER_IS_GNUCC;NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 6.2"
# Otherwise set BUILD_RUNTIME_NNAPI_TEST as OFF
OFF)
+option(BUILD_RUNTIME_NNFW_API_TEST "Build Runtime NNFW API Tests" ON)
option(BUILD_TFLITE_RUN "Build tflite-run" ON)
+option(BUILD_TFLITE_VANILLA_RUN "Build tflite-vanilla-run" OFF)
option(BUILD_TFLITE_BENCHMARK_MODEL "Build tflite benchmark model" OFF)
-option(BUILD_NNAPI_QUICKCHECK "Build NN API Quickcheck tools" OFF)
-option(BUILD_TOOLS "Bulid nnfw projects under tools/" ON)
-option(BUILD_TFLITE_ACCURACY "Build tflite accuracy tool" OFF)
+option(BUILD_NNAPI_TEST "Build nnapi_test" ON)
option(BUILD_NNPACKAGE_RUN "Build nnpackge_run" ON)
-option(BUILD_SRCN_KERNEL "Build srcn kernel" OFF)
-option(ENVVAR_NEURUN_CONFIG "Use environment variable for neurun configuration" ON)
+option(BUILD_TFLITE_LOADER "Build TensorFlow Lite loader" ON)
+option(BUILD_CIRCLE_LOADER "Build circle loader" ON)
+option(BUILD_TFLITE_LOADER_TEST_TOOL "Build tflite loader testing tool" ON)
+option(BUILD_WITH_HDF5 "Build test tool with HDF5 library" ON)
+option(GENERATE_RUNTIME_NNAPI_TESTS "Generate NNAPI operation gtest" ON)
+option(ENVVAR_ONERT_CONFIG "Use environment variable for onert configuration" ON)
+option(INSTALL_TEST_SCRIPTS "Install test scripts" ON)
#
-# Build configuration for contrib
+# Default build configuration for contrib
#
+option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" OFF)
+option(BUILD_ANDROID_BENCHMARK_APP "Enable Android Benchmark App" OFF)
option(BUILD_BENCHMARK_ACL "Build ARM Compute Library Benchmarks" OFF)
option(BUILD_DETECTION_APP "Build detection example app" OFF)
+option(BUILD_HEAP_TRACE "Build heap trace tool" OFF)
option(BUILD_LABS "Build lab projects" OFF)
+option(BUILD_STYLE_TRANSFER_APP "Build style transfer app" OFF)
option(BUILD_TFLITE_TEST "Build tensorflow lite test" OFF)
option(BUILD_TFLITE_CLASSIFY_APP "Build tflite_classify app" OFF)
option(BUILD_UBEN "Build micro-benchmark (uben) suite" OFF)
option(BUILD_MLAPSE "Build mlapse benchmark toolkit" OFF)
-option(BUILD_TFLITE_LOADER "Build tensorflow lite file loader" ON)
-option(BUILD_TFLITE_LOADER_TEST_TOOL "Build tflite loader testing tool" ON)
+#
+# Default build configuration for tools
+#
option(BUILD_KBENCHMARK "Build kernel benchmark tool" OFF)
+option(BUILD_OPENCL_TOOL "Build OpenCL tool" OFF)
+option(BUILD_NNAPI_QUICKCHECK "Build NN API Quickcheck tools" OFF)
+option(BUILD_TFLITE_ACCURACY "Build tflite accuracy tool" OFF)
#
-# Download configuration
+# Default external libraries source download and build configuration
#
option(DOWNLOAD_TENSORFLOW "Download Tensorflow source" ON)
-option(DOWNLOAD_ABSL "Download Absl source" ON)
+option(DOWNLOAD_ABSEIL "Download Abseil source" ON)
option(DOWNLOAD_EIGEN "Download Eigen source" ON)
option(DOWNLOAD_FARMHASH "Download farmhash source" ON)
option(DOWNLOAD_GEMMLOWP "Download GEMM low precesion library source" ON)
option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" ON)
-option(DOWNLOAD_NNPACK "Download NNPACK source" ON)
option(DOWNLOAD_FLATBUFFERS "Download FlatBuffers source" ON)
-option(BUILD_TENSORFLOW_LITE "Build TensorFlow Lite from the downloaded source" ON)
option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" ON)
option(DOWNLOAD_NONIUS "Download nonius source" ON)
option(DOWNLOAD_BOOST "Download boost source" OFF)
+option(DOWNLOAD_RUY "Download ruy source" ON)
+option(DOWNLOAD_CPUINFO "Download cpuinfo source" ON)
+option(DOWNLOAD_GTEST "Download Google Test source and build Google Test" ON)
option(BUILD_BOOST "Build boost source" OFF)
+option(BUILD_TENSORFLOW_LITE "Build TensorFlow Lite from the downloaded source" ON)
+option(BUILD_TENSORFLOW_LITE_2_3_0 "Build TensorFlow Lite 2.3.0 from the downloaded source" OFF)
+option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" ON)
+option(BUILD_RUY "Build ruy library from the downloaded source" ON)
+option(BUILD_CPUINFO "Build cpuinfo library from the downloaded source" ON)
+option(PROFILE_RUY "Enable ruy library profiling" OFF)
+
#
-# GTest support
+## Default sample build configuration
#
-option(BUILD_GTEST "Download and build Google Test" ON)
-option(ENABLE_STRICT_BUILD "Treat warning as error" ON)
-option(ENABLE_COVERAGE "Build for coverage test" OFF)
+option(BUILD_MINIMAL_SAMPLE "Build minimal app" OFF)
diff --git a/infra/nnfw/cmake/buildtool/config/config_arm64-android.cmake b/infra/nnfw/cmake/buildtool/config/config_aarch64-android.cmake
index 037541c58..e0c81dee7 100644
--- a/infra/nnfw/cmake/buildtool/config/config_arm64-android.cmake
+++ b/infra/nnfw/cmake/buildtool/config/config_aarch64-android.cmake
@@ -3,7 +3,7 @@ include("cmake/buildtool/config/config_linux.cmake")
# On Android, pthread is contained in bionic(libc)
set(LIB_PTHREAD "")
-# SIMD for arm64
+# SIMD for aarch64
set(FLAGS_COMMON ${FLAGS_COMMON}
"-ftree-vectorize"
)
diff --git a/infra/nnfw/cmake/buildtool/config/config_aarch64-tizen.cmake b/infra/nnfw/cmake/buildtool/config/config_aarch64-tizen.cmake
index fa1e8b078..0f304ecf3 100644
--- a/infra/nnfw/cmake/buildtool/config/config_aarch64-tizen.cmake
+++ b/infra/nnfw/cmake/buildtool/config/config_aarch64-tizen.cmake
@@ -4,6 +4,10 @@
message(STATUS "Building for AARCH64 Tizen")
+# Build flag for tizen
+set(CMAKE_C_FLAGS_DEBUG "-O -g -DDEBUG")
+set(CMAKE_CXX_FLAGS_DEBUG "-O -g -DDEBUG")
+
# TODO : add and use option_tizen if something uncommon comes up
# include linux common
include("cmake/buildtool/config/config_linux.cmake")
diff --git a/infra/nnfw/cmake/buildtool/config/config_armv7l-linux.cmake b/infra/nnfw/cmake/buildtool/config/config_armv7l-linux.cmake
index 6d6459f0f..8963f3008 100644
--- a/infra/nnfw/cmake/buildtool/config/config_armv7l-linux.cmake
+++ b/infra/nnfw/cmake/buildtool/config/config_armv7l-linux.cmake
@@ -5,14 +5,13 @@
message(STATUS "Building for ARMv7l Linux")
# include linux common
-include("cmake/buildtool/config/config_linux.cmake")
+include("${CMAKE_CURRENT_LIST_DIR}/config_linux.cmake")
# addition for arm-linux
set(FLAGS_COMMON ${FLAGS_COMMON}
"-mcpu=cortex-a7"
"-mfloat-abi=hard"
"-mfpu=neon-vfpv4"
- "-funsafe-math-optimizations"
"-ftree-vectorize"
"-mfp16-format=ieee"
)
diff --git a/infra/nnfw/cmake/buildtool/config/config_armv7l-tizen.cmake b/infra/nnfw/cmake/buildtool/config/config_armv7l-tizen.cmake
index 0bbe0ddca..fa12ecb55 100644
--- a/infra/nnfw/cmake/buildtool/config/config_armv7l-tizen.cmake
+++ b/infra/nnfw/cmake/buildtool/config/config_armv7l-tizen.cmake
@@ -4,13 +4,17 @@
message(STATUS "Building for ARMv7l(softfp) Tizen")
+# Build flag for tizen
+set(CMAKE_C_FLAGS_DEBUG "-O -g -DDEBUG")
+set(CMAKE_CXX_FLAGS_DEBUG "-O -g -DDEBUG")
+
# TODO : add and use option_tizen if something uncommon comes up
# include linux common
include("cmake/buildtool/config/config_linux.cmake")
# addition for arm-linux
set(FLAGS_COMMON ${FLAGS_COMMON}
- "-mcpu=cortex-a8"
+ "-mtune=cortex-a8"
"-mfloat-abi=softfp"
"-mfpu=neon-vfpv4"
"-funsafe-math-optimizations"
diff --git a/infra/nnfw/cmake/buildtool/config/config_linux.cmake b/infra/nnfw/cmake/buildtool/config/config_linux.cmake
index a17bbb2bd..86dd0f217 100644
--- a/infra/nnfw/cmake/buildtool/config/config_linux.cmake
+++ b/infra/nnfw/cmake/buildtool/config/config_linux.cmake
@@ -2,20 +2,6 @@
# linux common compile options
#
-# test-coverage build flag
-if("${COVERAGE_BUILD}" STREQUAL "1")
- set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE ON)
- set(FLAGS_COMMON "${FLAGS_COMMON} -fprofile-arcs -ftest-coverage")
- set(CMAKE_EXE_LINKER_FLAGS
- "${CMAKE_EXE_LINKER_FLAGS} -fprofile-arcs -ftest-coverage")
- set(CMAKE_C_FLAGS_DEBUG "-O -g -DDEBUG")
- set(CMAKE_CXX_FLAGS_DEBUG "-O -g -DDEBUG")
-endif()
-
-#
-# linux common variable and settings
-#
-
# remove warning from arm cl
# https://github.com/ARM-software/ComputeLibrary/issues/330
set(GCC_VERSION_DISABLE_WARNING 6.0)
@@ -26,5 +12,10 @@ if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER GCC_VERSION_DISABLE_WARNING)
)
endif()
+# Disable annoying ABI compatibility warning.
+if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 7.0)
+ list(APPEND FLAGS_CXXONLY "-Wno-psabi")
+endif()
+
# lib pthread as a variable (pthread must be disabled on android)
set(LIB_PTHREAD pthread)
diff --git a/infra/nnfw/cmake/buildtool/config/config_x86_64-darwin.cmake b/infra/nnfw/cmake/buildtool/config/config_x86_64-darwin.cmake
new file mode 100644
index 000000000..dbd45fc03
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/config/config_x86_64-darwin.cmake
@@ -0,0 +1,12 @@
+#
+# x86_64 darwin(macOS) compile options
+#
+message(STATUS "Building for x86-64 Darwin")
+
+# SIMD for x86
+set(FLAGS_COMMON ${FLAGS_COMMON}
+ "-msse4"
+ )
+
+# lib pthread as a variable (pthread must be disabled on android)
+set(LIB_PTHREAD pthread)
diff --git a/infra/nnfw/cmake/buildtool/config/config_x86_64-tizen.cmake b/infra/nnfw/cmake/buildtool/config/config_x86_64-tizen.cmake
new file mode 100644
index 000000000..0f304ecf3
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/config/config_x86_64-tizen.cmake
@@ -0,0 +1,17 @@
+#
+# aarch64 tizen compile options
+#
+
+message(STATUS "Building for AARCH64 Tizen")
+
+# Build flag for tizen
+set(CMAKE_C_FLAGS_DEBUG "-O -g -DDEBUG")
+set(CMAKE_CXX_FLAGS_DEBUG "-O -g -DDEBUG")
+
+# TODO : add and use option_tizen if something uncommon comes up
+# include linux common
+include("cmake/buildtool/config/config_linux.cmake")
+
+# addition for aarch64-tizen
+set(FLAGS_COMMON ${FLAGS_COMMON}
+ )
diff --git a/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-android.cmake b/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-android.cmake
new file mode 100644
index 000000000..99cc32a4a
--- /dev/null
+++ b/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-android.cmake
@@ -0,0 +1,38 @@
+# A workaround for accessing to NDK_DIR. This works since Env Vars are always accessible
+# while cache variables are not
+if (NDK_DIR)
+ set(ENV{_NDK_DIR} "${NDK_DIR}")
+else (NDK_DIR)
+ set(NDK_DIR "$ENV{_NDK_DIR}")
+endif (NDK_DIR)
+
+if(NOT DEFINED NDK_DIR)
+ message(FATAL_ERROR "NDK_DIR should be specified via cmake argument")
+endif(NOT DEFINED NDK_DIR)
+
+set(ANDROID_ABI arm64-v8a)
+set(ANDROID_API_LEVEL 29)
+set(ANDROID_PLATFORM android-${ANDROID_API_LEVEL})
+set(ANDROID_STL c++_shared)
+set(ANDROID_STL_LIB "${NDK_DIR}/sources/cxx-stl/llvm-libc++/libs/arm64-v8a/libc++_shared.so")
+
+# Find package in the host. `nnfw_find_package` won't work without this
+set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE NEVER)
+# Find library in the host. Necessary for `add_library` searching in `out/lib` dir.
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY NEVER)
+
+# Use the toolchain file that NDK provides
+include(${NDK_DIR}/build/cmake/android.toolchain.cmake)
+
+# Install libc++_shared.so to lib folder
+install(FILES ${ANDROID_STL_LIB} DESTINATION lib)
+
+# ndk always enable debug flag -g, but we don't want debug info in release build
+# https://github.com/android/ndk/issues/243
+string(REPLACE "-g" "" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
+string(REPLACE "-g" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
+set(CMAKE_C_FLAGS_DEBUG "-g ${CMAKE_C_FLAGS_DEBUG}")
+set(CMAKE_CXX_FLAGS_DEBUG "-g ${CMAKE_CXX_FLAGS_DEBUG}")
+
+set(TARGET_OS "android")
+set(TARGET_ARCH "aarch64")
diff --git a/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-linux.cmake b/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-linux.cmake
index 2d1a08d2b..3356aa72d 100644
--- a/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-linux.cmake
+++ b/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-linux.cmake
@@ -10,9 +10,9 @@ set(CMAKE_C_COMPILER aarch64-linux-gnu-gcc)
set(CMAKE_CXX_COMPILER aarch64-linux-gnu-g++)
# where is the target environment
-set(NNFW_SOURCE_ROOT "${CMAKE_CURRENT_LIST_DIR}/../../../../..")
-set(ROOTFS_AARCH64 "${NNFW_SOURCE_ROOT}/tools/cross/rootfs/arm64")
-include("${NNFW_SOURCE_ROOT}/infra/nnfw/cmake/modules/OptionTools.cmake")
+set(NNAS_PROJECT_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}/../../../../..")
+set(ROOTFS_AARCH64 "${NNAS_PROJECT_SOURCE_DIR}/tools/cross/rootfs/aarch64")
+include("${NNAS_PROJECT_SOURCE_DIR}/infra/cmake/modules/OptionTools.cmake")
envoption(ROOTFS_DIR ${ROOTFS_AARCH64})
if(NOT EXISTS "${ROOTFS_DIR}/lib/aarch64-linux-gnu")
diff --git a/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-tizen.cmake b/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-tizen.cmake
index 9e1cf3b11..4d5d7ac56 100644
--- a/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-tizen.cmake
+++ b/infra/nnfw/cmake/buildtool/cross/toolchain_aarch64-tizen.cmake
@@ -6,15 +6,15 @@ include(CMakeForceCompiler)
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_PROCESSOR aarch64)
-set(CMAKE_C_COMPILER aarch64-linux-gnu-gcc-5)
-set(CMAKE_CXX_COMPILER aarch64-linux-gnu-g++-5)
+set(CMAKE_C_COMPILER aarch64-linux-gnu-gcc)
+set(CMAKE_CXX_COMPILER aarch64-linux-gnu-g++)
set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/6.2.1")
# where is the target environment
-set(NNFW_SOURCE_ROOT "${CMAKE_CURRENT_LIST_DIR}/../../../../..")
-set(ROOTFS_AARCH64 "${NNFW_SOURCE_ROOT}/tools/cross/rootfs/arm64")
-include("${NNFW_SOURCE_ROOT}/infra/nnfw/cmake/modules/OptionTools.cmake")
+set(NNAS_PROJECT_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}/../../../../..")
+set(ROOTFS_AARCH64 "${NNAS_PROJECT_SOURCE_DIR}/tools/cross/rootfs/aarch64")
+include("${NNAS_PROJECT_SOURCE_DIR}/infra/cmake/modules/OptionTools.cmake")
envoption(ROOTFS_DIR ${ROOTFS_AARCH64})
if(NOT EXISTS "${ROOTFS_DIR}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
diff --git a/infra/nnfw/cmake/buildtool/cross/toolchain_arm64-android.cmake b/infra/nnfw/cmake/buildtool/cross/toolchain_arm64-android.cmake
deleted file mode 100644
index c0c707dd3..000000000
--- a/infra/nnfw/cmake/buildtool/cross/toolchain_arm64-android.cmake
+++ /dev/null
@@ -1,44 +0,0 @@
-set(ANDROID_STANDALONE $ENV{ROOTFS_DIR})
-set(CROSS_NDK_TOOLCHAIN ${ANDROID_STANDALONE}/bin)
-set(CROSS_ROOTFS ${ANDROID_STANDALONE}/sysroot)
-
-set(CMAKE_SYSTEM_NAME Linux)
-set(CMAKE_SYSTEM_VERSION 1)
-set(CMAKE_SYSTEM_PROCESSOR aarch64)
-
-## Specify the toolchain
-set(TOOLCHAIN "aarch64-linux-android")
-set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN})
-set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-)
-
-find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang)
-find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++)
-find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang)
-find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar)
-find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar)
-find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy)
-find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
-
-add_compile_options(--sysroot=${CROSS_ROOTFS})
-add_compile_options(-fPIE)
-
-## Needed for Android or bionic specific conditionals
-#add_compile_options(-D__ANDROID__)
-#add_compile_options(-D__BIONIC__)
-
-## NOTE Not sure this is safe. This may cause side effects.
-## Without this, it cannot find `std::stol`, `std::stoi` and so on, with android toolchain
-add_compile_options(-D_GLIBCXX_USE_C99=1)
-
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
-set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie")
-
-set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
-set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
-set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
-
-set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
-set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
-set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
diff --git a/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-linux.cmake b/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-linux.cmake
index 8e3619879..8f2cb6735 100644
--- a/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-linux.cmake
+++ b/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-linux.cmake
@@ -2,7 +2,6 @@
# config for arm-linux
#
include(CMakeForceCompiler)
-include("${CMAKE_CURRENT_LIST_DIR}/../../modules/OptionTools.cmake")
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_PROCESSOR armv7l)
@@ -11,9 +10,11 @@ set(CMAKE_C_COMPILER arm-linux-gnueabihf-gcc)
set(CMAKE_CXX_COMPILER arm-linux-gnueabihf-g++)
# where is the target environment
-set(ROOTFS_ARM "${CMAKE_CURRENT_LIST_DIR}/../../../../../tools/cross/rootfs/arm")
-envoption(ROOTFS_DIR ${ROOTFS_ARM})
+set(NNAS_PROJECT_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}/../../../../..")
+set(ROOTFS_ARM "${NNAS_PROJECT_SOURCE_DIR}/tools/cross/rootfs/arm")
+include("${NNAS_PROJECT_SOURCE_DIR}/infra/cmake/modules/OptionTools.cmake")
+envoption(ROOTFS_DIR ${ROOTFS_ARM})
if(NOT EXISTS "${ROOTFS_DIR}/lib/arm-linux-gnueabihf")
message(FATAL_ERROR "Please prepare RootFS for ARM")
endif()
diff --git a/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-tizen.cmake b/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-tizen.cmake
index eb8e63832..72513cdc1 100644
--- a/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-tizen.cmake
+++ b/infra/nnfw/cmake/buildtool/cross/toolchain_armv7l-tizen.cmake
@@ -12,9 +12,9 @@ set(CMAKE_CXX_COMPILER arm-linux-gnueabi-g++)
set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/6.2.1")
# where is the target environment
-set(NNFW_SOURCE_ROOT "${CMAKE_CURRENT_LIST_DIR}/../../../../..")
-set(ROOTFS_ARM "${NNFW_SOURCE_ROOT}/tools/cross/rootfs/armel")
-include("${NNFW_SOURCE_ROOT}/infra/nnfw/cmake/modules/OptionTools.cmake")
+set(NNAS_PROJECT_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}/../../../../..")
+set(ROOTFS_ARM "${NNAS_PROJECT_SOURCE_DIR}/tools/cross/rootfs/armel")
+include("${NNAS_PROJECT_SOURCE_DIR}/infra/cmake/modules/OptionTools.cmake")
envoption(ROOTFS_DIR ${ROOTFS_ARM})
if(NOT EXISTS "${ROOTFS_DIR}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
diff --git a/infra/nnfw/cmake/modules/ExtendCMakeFunction.cmake b/infra/nnfw/cmake/modules/ExtendCMakeFunction.cmake
deleted file mode 100644
index 06b7c768d..000000000
--- a/infra/nnfw/cmake/modules/ExtendCMakeFunction.cmake
+++ /dev/null
@@ -1,27 +0,0 @@
-function(list_subdirectories OUTPUT_VARIABLE)
- cmake_parse_arguments(ARG "" "" "EXCLUDES" ${ARGN})
-
- file(GLOB PROJECT_FILES
- RELATIVE ${CMAKE_CURRENT_SOURCE_DIR}
- "*/CMakeLists.txt")
-
- foreach(PROJECT_FILE IN ITEMS ${PROJECT_FILES})
- get_filename_component(PROJECT_DIR ${PROJECT_FILE} DIRECTORY)
- list(FIND ARG_EXCLUDES ${PROJECT_DIR} PROJECT_INDEX)
- if(${PROJECT_INDEX} EQUAL -1)
- list(APPEND PROJECT_LIST ${PROJECT_DIR})
- endif(${PROJECT_INDEX} EQUAL -1)
- endforeach(PROJECT_FILE)
-
- set(${OUTPUT_VARIABLE} ${PROJECT_LIST} PARENT_SCOPE)
-endfunction(list_subdirectories)
-
-function(add_subdirectories)
- cmake_parse_arguments(ARG "" "" "EXCLUDES" ${ARGN})
-
- list_subdirectories(PROJECT_DIRS EXCLUDES ${ARG_EXCLUDES})
-
- foreach(PROJECT_DIR IN ITEMS ${PROJECT_DIRS})
- add_subdirectory(${PROJECT_DIR})
- endforeach(PROJECT_DIR)
-endfunction(add_subdirectories)
diff --git a/infra/nnfw/cmake/modules/ExternalSourceTools.cmake b/infra/nnfw/cmake/modules/ExternalSourceTools.cmake
deleted file mode 100644
index 01a0c4d00..000000000
--- a/infra/nnfw/cmake/modules/ExternalSourceTools.cmake
+++ /dev/null
@@ -1,102 +0,0 @@
-function(ExternalSource_Download PREFIX URL)
- get_filename_component(FILENAME ${URL} NAME)
-
- set(CACHE_DIR "${NNFW_EXTERNALS_DIR}")
- set(OUT_DIR "${CACHE_DIR}/${PREFIX}")
- set(TMP_DIR "${CACHE_DIR}/${PREFIX}-tmp")
-
- set(DOWNLOAD_PATH "${CACHE_DIR}/${PREFIX}-${FILENAME}")
- set(STAMP_PATH "${CACHE_DIR}/${PREFIX}.stamp")
-
- if(NOT EXISTS "${CACHE_DIR}")
- file(MAKE_DIRECTORY "${CACHE_DIR}")
- endif(NOT EXISTS "${CACHE_DIR}")
-
- # TODO Check MD5 for correctness
- set(MATCH_URL FALSE)
- if(EXISTS "${STAMP_PATH}")
- file(READ "${STAMP_PATH}" SAVED_URL)
- if("${SAVED_URL}" STREQUAL "${URL}")
- set(MATCH_URL TRUE)
- endif("${SAVED_URL}" STREQUAL "${URL}")
- endif(EXISTS "${STAMP_PATH}")
-
- if(NOT EXISTS "${STAMP_PATH}" OR NOT EXISTS "${OUT_DIR}" OR NOT MATCH_URL)
- file(REMOVE_RECURSE "${OUT_DIR}")
- file(REMOVE_RECURSE "${TMP_DIR}")
-
- file(MAKE_DIRECTORY "${TMP_DIR}")
-
- message(STATUS "Download ${PREFIX} from ${URL}")
- file(DOWNLOAD ${URL} "${DOWNLOAD_PATH}"
- STATUS status
- LOG log)
-
- list(GET status 0 status_code)
- list(GET status 1 status_string)
-
- if(NOT status_code EQUAL 0)
- message(FATAL_ERROR "error: downloading '${URL}' failed
- status_code: ${status_code}
- status_string: ${status_string}
- log: ${log}")
- endif()
- message(STATUS "Download ${PREFIX} from ${URL} - done")
-
- message(STATUS "Extract ${PREFIX}")
- execute_process(COMMAND ${CMAKE_COMMAND} -E tar xfz "${DOWNLOAD_PATH}"
- WORKING_DIRECTORY "${TMP_DIR}")
- file(REMOVE "${DOWNLOAD_PATH}")
- message(STATUS "Extract ${PREFIX} - done")
-
- message(STATUS "Cleanup ${PREFIX}")
- file(GLOB contents "${TMP_DIR}/*")
- list(LENGTH contents n)
- if(NOT n EQUAL 1 OR NOT IS_DIRECTORY "${contents}")
- set(contents "${TMP_DIR}")
- endif()
-
- get_filename_component(contents ${contents} ABSOLUTE)
-
- file(RENAME ${contents} "${OUT_DIR}")
- file(REMOVE_RECURSE "${TMP_DIR}")
- file(WRITE "${STAMP_PATH}" "${URL}")
- message(STATUS "Cleanup ${PREFIX} - done")
- endif()
-
- set(${PREFIX}_SOURCE_DIR "${OUT_DIR}" PARENT_SCOPE)
-endfunction(ExternalSource_Download)
-
-function(ExternalSource_Get PREFIX DOWNLOAD_FLAG URL)
- set(CACHE_DIR "${NNFW_EXTERNALS_DIR}")
- set(OUT_DIR "${CACHE_DIR}/${PREFIX}")
- set(STAMP_PATH "${CACHE_DIR}/${PREFIX}.stamp")
-
- if(NOT EXISTS "${CACHE_DIR}")
- file(MAKE_DIRECTORY "${CACHE_DIR}")
- endif(NOT EXISTS "${CACHE_DIR}")
-
- # TODO Check MD5 for correctness
- set(MATCH_URL FALSE)
- if(EXISTS "${STAMP_PATH}")
- file(READ "${STAMP_PATH}" SAVED_URL)
- if("${SAVED_URL}" STREQUAL "${URL}")
- set(MATCH_URL TRUE)
- endif("${SAVED_URL}" STREQUAL "${URL}")
- endif(EXISTS "${STAMP_PATH}")
-
- set(SOURCE_GET TRUE)
-
- if(NOT EXISTS "${STAMP_PATH}" OR NOT EXISTS "${OUT_DIR}" OR NOT MATCH_URL)
- if(NOT DOWNLOAD_FLAG)
- set(SOURCE_GET FALSE)
- else(NOT DOWNLOAD_FLAG)
- ExternalSource_Download(${PREFIX} ${URL})
- endif(NOT DOWNLOAD_FLAG)
- endif()
-
- set(${PREFIX}_SOURCE_GET ${SOURCE_GET} PARENT_SCOPE)
- set(${PREFIX}_SOURCE_DIR "${OUT_DIR}" PARENT_SCOPE)
-endfunction(ExternalSource_Get)
-
-set(ExternalSourceTools_FOUND TRUE)
diff --git a/infra/nnfw/cmake/modules/OptionTools.cmake b/infra/nnfw/cmake/modules/OptionTools.cmake
deleted file mode 100644
index 066d53078..000000000
--- a/infra/nnfw/cmake/modules/OptionTools.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-function(envoption PREFIX DEFAULT_VALUE)
- set(VALUE ${DEFAULT_VALUE})
-
- if(DEFINED ENV{${PREFIX}})
- set(VALUE $ENV{${PREFIX}})
- endif()
-
- set(${PREFIX} ${VALUE} PARENT_SCOPE)
-endfunction(envoption)
-
-set(OptionTools_FOUND TRUE)
diff --git a/infra/nnfw/cmake/options/options_aarch64-android.cmake b/infra/nnfw/cmake/options/options_aarch64-android.cmake
new file mode 100644
index 000000000..d8eceef35
--- /dev/null
+++ b/infra/nnfw/cmake/options/options_aarch64-android.cmake
@@ -0,0 +1,17 @@
+# aarch64 android cmake options
+#
+# NOTE BUILD_ANDROID_TFLITE(JNI lib) is disabled due to BuiltinOpResolver issue.
+# tensorflow-lite does not build BuiltinOpResolver but JNI lib need it
+# Related Issue : #1403
+option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" ON)
+option(BUILD_ANDROID_BENCHMARK_APP "Enable Android Benchmark App" ON)
+option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" OFF)
+# Need boost library
+option(DOWNLOAD_BOOST "Download boost source" ON)
+option(BUILD_BOOST "Build boost source" ON)
+option(BUILD_RUNTIME_NNAPI_TEST "Build Runtime NN API Generated Test" OFF)
+option(BUILD_NNAPI_TEST "Build nnapi_test" OFF)
+option(BUILD_NNPACKAGE_RUN "Build nnpackge_run" ON)
+option(BUILD_TFLITE_RUN "Build tflite-run" ON)
+option(BUILD_TFLITE_LOADER_TEST_TOOL "Build tflite loader testing tool" OFF)
+option(BUILD_LOGGING "Build logging runtime" OFF)
diff --git a/infra/nnfw/cmake/options/options_aarch64-linux.cmake b/infra/nnfw/cmake/options/options_aarch64-linux.cmake
index fbe49d421..829711ff8 100644
--- a/infra/nnfw/cmake/options/options_aarch64-linux.cmake
+++ b/infra/nnfw/cmake/options/options_aarch64-linux.cmake
@@ -1,8 +1,4 @@
#
# aarch64 linux cmake options
#
-option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" ON)
-option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" OFF)
-option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" ON)
option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" OFF)
-option(BUILD_SRCN_KERNEL "Build srcn kernel" ON)
diff --git a/infra/nnfw/cmake/options/options_aarch64-tizen.cmake b/infra/nnfw/cmake/options/options_aarch64-tizen.cmake
index 66ee34284..57d4c1061 100644
--- a/infra/nnfw/cmake/options/options_aarch64-tizen.cmake
+++ b/infra/nnfw/cmake/options/options_aarch64-tizen.cmake
@@ -1,10 +1,10 @@
#
# aarch64 tizen cmake options
#
-option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" ON)
-option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" OFF)
-option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" ON)
+option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" OFF)
+option(BUILD_TENSORFLOW_LITE "Build TensorFlow Lite from the downloaded source" OFF)
option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" OFF)
-option(DOWNLOAD_NNPACK "Download NNPACK library source" OFF)
-option(BUILD_SRCN_KERNEL "Build srcn kernel" ON)
-option(ENVVAR_NEURUN_CONFIG "Use environment variable for neurun configuration" OFF)
+
+option(BUILD_LOGGING "Build logging runtime" OFF)
+option(GENERATE_RUNTIME_NNAPI_TESTS "Generate NNAPI operation gtest" OFF)
+option(ENVVAR_ONERT_CONFIG "Use environment variable for onert configuration" OFF)
diff --git a/infra/nnfw/cmake/options/options_arm64-android.cmake b/infra/nnfw/cmake/options/options_arm64-android.cmake
deleted file mode 100644
index 486b3bb96..000000000
--- a/infra/nnfw/cmake/options/options_arm64-android.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-# arm64 android cmake options
-#
-option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" ON)
-option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" ON)
-option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" ON)
-option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" OFF)
-
diff --git a/infra/nnfw/cmake/options/options_armv7l-linux.cmake b/infra/nnfw/cmake/options/options_armv7l-linux.cmake
index f06f998d9..e10e573c4 100644
--- a/infra/nnfw/cmake/options/options_armv7l-linux.cmake
+++ b/infra/nnfw/cmake/options/options_armv7l-linux.cmake
@@ -1,8 +1,5 @@
#
# armv7l linux cmake options
#
-option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" ON)
-option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" OFF)
-option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" ON)
option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" OFF)
-option(BUILD_SRCN_KERNEL "Build srcn kernel" ON)
+option(BUILD_OPENCL_TOOL "Build OpenCL tool" ON)
diff --git a/infra/nnfw/cmake/options/options_armv7l-tizen.cmake b/infra/nnfw/cmake/options/options_armv7l-tizen.cmake
index 3ba48f332..c27a7ad01 100644
--- a/infra/nnfw/cmake/options/options_armv7l-tizen.cmake
+++ b/infra/nnfw/cmake/options/options_armv7l-tizen.cmake
@@ -1,12 +1,10 @@
#
# armv7l tizen cmake options
#
-option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" ON)
-option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" OFF)
-option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" ON)
+option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" OFF)
+option(BUILD_TENSORFLOW_LITE "Build TensorFlow Lite from the downloaded source" OFF)
option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" OFF)
-option(DOWNLOAD_NNPACK "Download NNPACK library source" OFF)
-option(BUILD_TFLITE_LOADER "Build tensorflow lite file loader" ON)
-option(BUILD_TFLITE_LOADER_TEST_TOOL "Build tflite loader testing tool" OFF)
-option(BUILD_SRCN_KERNEL "Build srcn kernel" ON)
-option(ENVVAR_NEURUN_CONFIG "Use environment variable for neurun configuration" OFF)
+
+option(BUILD_LOGGING "Build logging runtime" OFF)
+option(GENERATE_RUNTIME_NNAPI_TESTS "Generate NNAPI operation gtest" OFF)
+option(ENVVAR_ONERT_CONFIG "Use environment variable for onert configuration" OFF)
diff --git a/infra/nnfw/cmake/options/options_x86_64-darwin.cmake b/infra/nnfw/cmake/options/options_x86_64-darwin.cmake
new file mode 100644
index 000000000..97642e6ce
--- /dev/null
+++ b/infra/nnfw/cmake/options/options_x86_64-darwin.cmake
@@ -0,0 +1,5 @@
+#
+# x86_64 linux cmake options
+#
+option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" OFF)
+option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" OFF)
diff --git a/infra/nnfw/cmake/options/options_x86_64-linux.cmake b/infra/nnfw/cmake/options/options_x86_64-linux.cmake
index 179e5bdb2..97642e6ce 100644
--- a/infra/nnfw/cmake/options/options_x86_64-linux.cmake
+++ b/infra/nnfw/cmake/options/options_x86_64-linux.cmake
@@ -2,6 +2,4 @@
# x86_64 linux cmake options
#
option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" OFF)
-option(BUILD_ANDROID_TFLITE "Enable android support for TensorFlow Lite" OFF)
option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" OFF)
-option(DOWNLOAD_NEON2SSE "Download NEON2SSE library source" ON)
diff --git a/infra/nnfw/cmake/options/options_x86_64-tizen.cmake b/infra/nnfw/cmake/options/options_x86_64-tizen.cmake
new file mode 100644
index 000000000..bf8b2809e
--- /dev/null
+++ b/infra/nnfw/cmake/options/options_x86_64-tizen.cmake
@@ -0,0 +1,10 @@
+#
+# x86_64 linux cmake options
+#
+option(BUILD_ARMCOMPUTE "Build ARM Compute from the downloaded source" OFF)
+option(BUILD_TENSORFLOW_LITE "Build TensorFlow Lite from the downloaded source" OFF)
+option(DOWNLOAD_ARMCOMPUTE "Download ARM Compute source" OFF)
+
+option(BUILD_LOGGING "Build logging runtime" OFF)
+option(GENERATE_RUNTIME_NNAPI_TESTS "Generate NNAPI operation gtest" OFF)
+option(ENVVAR_ONERT_CONFIG "Use environment variable for onert configuration" OFF)
diff --git a/infra/nnfw/cmake/packages/ARMCompute/SConstruct b/infra/nnfw/cmake/packages/ARMCompute/SConstruct
deleted file mode 100644
index 9c0e4a84e..000000000
--- a/infra/nnfw/cmake/packages/ARMCompute/SConstruct
+++ /dev/null
@@ -1,309 +0,0 @@
-# Copyright (c) 2016, 2017 ARM Limited.
-#
-# SPDX-License-Identifier: MIT
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to
-# deal in the Software without restriction, including without limitation the
-# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-# sell copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-import SCons
-import os
-import subprocess
-
-def version_at_least(version, required):
- end = min(len(version), len(required))
-
- for i in range(0, end, 2):
- if int(version[i]) < int(required[i]):
- return False
- elif int(version[i]) > int(required[i]):
- return True
-
- return True
-
-vars = Variables("scons")
-vars.AddVariables(
- BoolVariable("debug", "Debug", False),
- BoolVariable("asserts", "Enable asserts (this flag is forced to 1 for debug=1)", False),
- BoolVariable("logging", "Logging (this flag is forced to 1 for debug=1)", False),
- EnumVariable("arch", "Target Architecture", "armv7a", allowed_values=("armv7a", "arm64-v8a", "arm64-v8.2-a", "arm64-v8.2-a-sve", "x86_32", "x86_64")),
- EnumVariable("os", "Target OS", "linux", allowed_values=("linux", "android", "tizen", "bare_metal")),
- EnumVariable("build", "Build type", "cross_compile", allowed_values=("native", "cross_compile", "embed_only")),
- BoolVariable("examples", "Build example programs", True),
- BoolVariable("Werror", "Enable/disable the -Werror compilation flag", True),
- BoolVariable("standalone", "Builds the tests as standalone executables, links statically with libgcc, libstdc++ and libarm_compute", False),
- BoolVariable("opencl", "Enable OpenCL support", True),
- BoolVariable("neon", "Enable Neon support", False),
- BoolVariable("gles_compute", "Enable OpenGL ES Compute Shader support", False),
- BoolVariable("embed_kernels", "Embed OpenCL kernels and OpenGL ES compute shaders in library binary", True),
- BoolVariable("set_soname", "Set the library's soname and shlibversion (requires SCons 2.4 or above)", False),
- BoolVariable("openmp", "Enable OpenMP backend", False),
- BoolVariable("cppthreads", "Enable C++11 threads backend", True),
- PathVariable("build_dir", "Specify sub-folder for the build", ".", PathVariable.PathAccept),
- PathVariable("install_dir", "Specify sub-folder for the install", "", PathVariable.PathAccept),
- ("extra_cxx_flags", "Extra CXX flags to be appended to the build command", ""),
- ("extra_link_flags", "Extra LD flags to be appended to the build command", ""),
- ("compiler_cache", "Command to prefix to the C and C++ compiler (e.g ccache)", "")
-)
-
-env = Environment(platform="posix", variables=vars, ENV = os.environ)
-build_path = env['build_dir']
-# If build_dir is a relative path then add a #build/ prefix:
-if not env['build_dir'].startswith('/'):
- SConsignFile('build/%s/.scons' % build_path)
- build_path = "#build/%s" % build_path
-else:
- SConsignFile('%s/.scons' % build_path)
-
-install_path = env['install_dir']
-#If the install_dir is a relative path then assume it's from inside build_dir
-if not env['install_dir'].startswith('/') and install_path != "":
- install_path = "%s/%s" % (build_path, install_path)
-
-env.Append(LIBPATH = [build_path])
-Export('env')
-Export('vars')
-
-def install_lib( lib ):
- # If there is no install folder, then there is nothing to do:
- if install_path == "":
- return lib
- return env.Install( "%s/lib/" % install_path, lib)
-def install_bin( bin ):
- # If there is no install folder, then there is nothing to do:
- if install_path == "":
- return bin
- return env.Install( "%s/bin/" % install_path, bin)
-def install_include( inc ):
- if install_path == "":
- return inc
- return env.Install( "%s/include/" % install_path, inc)
-
-Export('install_lib')
-Export('install_bin')
-
-Help(vars.GenerateHelpText(env))
-
-if env['build'] == "embed_only":
- SConscript('./SConscript', variant_dir=build_path, duplicate=0)
- Return()
-
-if env['neon'] and 'x86' in env['arch']:
- print("Cannot compile NEON for x86")
- Exit(1)
-
-if env['set_soname'] and not version_at_least(SCons.__version__, "2.4"):
- print("Setting the library's SONAME / SHLIBVERSION requires SCons 2.4 or above")
- print("Update your version of SCons or use set_soname=0")
- Exit(1)
-
-if env['os'] == 'bare_metal':
- if env['cppthreads'] or env['openmp']:
- print("ERROR: OpenMP and C++11 threads not supported in bare_metal. Use cppthreads=0 openmp=0")
- Exit(1)
-
-env.Append(CXXFLAGS = ['-Wno-deprecated-declarations','-Wall','-DARCH_ARM',
- '-Wextra','-Wno-unused-parameter','-pedantic','-Wdisabled-optimization','-Wformat=2',
- '-Winit-self','-Wstrict-overflow=2','-Wswitch-default',
- '-fpermissive','-std=gnu++11','-Wno-vla','-Woverloaded-virtual',
- '-Wctor-dtor-privacy','-Wsign-promo','-Weffc++','-Wno-format-nonliteral','-Wno-overlength-strings','-Wno-strict-overflow'])
-
-env.Append(CPPDEFINES = ['_GLIBCXX_USE_NANOSLEEP'])
-
-default_cpp_compiler = 'g++' if env['os'] != 'android' else 'clang++'
-default_c_compiler = 'gcc' if env['os'] != 'android' else 'clang'
-cpp_compiler = os.environ.get('CXX', default_cpp_compiler)
-c_compiler = os.environ.get('CC', default_c_compiler)
-
-if env['os'] == 'android' and ( 'clang++' not in cpp_compiler or 'clang' not in c_compiler ):
- print( "WARNING: Only clang is officially supported to build the Compute Library for Android")
-
-if 'clang++' in cpp_compiler:
- env.Append(CXXFLAGS = ['-Wno-format-nonliteral','-Wno-deprecated-increment-bool','-Wno-vla-extension','-Wno-mismatched-tags'])
-else:
- env.Append(CXXFLAGS = ['-Wlogical-op','-Wnoexcept','-Wstrict-null-sentinel','-Wno-implicit-fallthrough'])
-
-if env['cppthreads']:
- env.Append(CPPDEFINES = [('ARM_COMPUTE_CPP_SCHEDULER', 1)])
-
-if env['openmp']:
- if 'clang++' in cpp_compiler:
- print( "Clang does not support OpenMP. Use scheduler=cpp.")
- Exit(1)
-
- env.Append(CPPDEFINES = [('ARM_COMPUTE_OPENMP_SCHEDULER', 1)])
- env.Append(CXXFLAGS = ['-fopenmp'])
- env.Append(LINKFLAGS = ['-fopenmp'])
-
-prefix = ""
-if env['arch'] == 'armv7a':
- env.Append(CXXFLAGS = ['-march=armv7-a', '-mthumb', '-mfpu=neon'])
-
- if env['os'] == 'linux':
- prefix = "arm-linux-gnueabihf-"
- env.Append(CXXFLAGS = ['-mfloat-abi=hard'])
- elif env['os'] == 'bare_metal':
- prefix = "arm-eabi-"
- env.Append(CXXFLAGS = ['-mfloat-abi=hard'])
- elif env['os'] == 'android':
- prefix = "arm-linux-androideabi-"
- env.Append(CXXFLAGS = ['-mfloat-abi=softfp'])
- elif env['os'] == 'tizen':
- prefix = "arm-linux-gnueabi-"
- env.Append(CXXFLAGS = ['-mfloat-abi=softfp'])
-elif env['arch'] == 'arm64-v8a':
- env.Append(CXXFLAGS = ['-march=armv8-a'])
- env.Append(CPPDEFINES = ['ARM_COMPUTE_AARCH64_V8A','NO_DOT_IN_TOOLCHAIN'])
- if env['os'] == 'linux':
- prefix = "aarch64-linux-gnu-"
- elif env['os'] == 'bare_metal':
- prefix = "aarch64-elf-"
- elif env['os'] == 'android':
- prefix = "aarch64-linux-android-"
- elif env['os'] == 'tizen':
- prefix = "aarch64-linux-gnu-"
- if 'clang++' in cpp_compiler:
- env.Append(CXXFLAGS = ['-no-integrated-as'])
-elif 'arm64-v8.2-a' in env['arch']:
- if env['arch'] == 'arm64-v8.2-a-sve':
- if env['os'] != 'bare_metal':
- print("Only bare metal SVE is supported at the moment")
- Exit(1)
- env.Append(CXXFLAGS = ['-march=armv8.2-a+sve+fp16+dotprod'])
- else:
- env.Append(CXXFLAGS = ['-march=armv8.2-a+fp16']) # explicitly enable fp16 extension otherwise __ARM_FEATURE_FP16_VECTOR_ARITHMETIC is undefined
- if env['os'] == 'linux':
- prefix = "aarch64-linux-gnu-"
- elif env['os'] == 'bare_metal':
- prefix = "aarch64-elf-"
- elif env['os'] == 'android':
- prefix = "aarch64-linux-android-"
- elif env['os'] == 'tizen':
- prefix = "aarch64-linux-gnu-"
- env.Append(CPPDEFINES = ['ARM_COMPUTE_AARCH64_V8_2','NO_DOT_IN_TOOLCHAIN'])
- if 'clang++' in cpp_compiler:
- env.Append(CXXFLAGS = ['-no-integrated-as'])
-elif env['arch'] == 'x86_32':
- env.Append(CCFLAGS = ['-m32'])
- env.Append(LINKFLAGS = ['-m32'])
-elif env['arch'] == 'x86_64':
- env.Append(CCFLAGS = ['-m64'])
- env.Append(LINKFLAGS = ['-m64'])
-
-if env['build'] == 'native':
- prefix = ""
-
-env['CC'] = env['compiler_cache']+" "+prefix + c_compiler
-env['CXX'] = env['compiler_cache']+" "+prefix + cpp_compiler
-env['LD'] = prefix + "ld"
-env['AS'] = prefix + "as"
-env['AR'] = prefix + "ar"
-env['RANLIB'] = prefix + "ranlib"
-
-if not GetOption("help"):
- try:
- compiler_ver = subprocess.check_output(env['CXX'].split() + ["-dumpversion"]).strip()
- except OSError:
- print("ERROR: Compiler '%s' not found" % env['CXX'])
- Exit(1)
-
- if 'clang++' not in cpp_compiler:
- if env['arch'] == 'arm64-v8.2-a' and not version_at_least(compiler_ver, '6.2.1'):
- print("GCC 6.2.1 or newer is required to compile armv8.2-a code")
- Exit(1)
- elif env['arch'] == 'arm64-v8a' and not version_at_least(compiler_ver, '4.9'):
- print("GCC 4.9 or newer is required to compile NEON code for AArch64")
- Exit(1)
-
- if version_at_least(compiler_ver, '6.1'):
- env.Append(CXXFLAGS = ['-Wno-ignored-attributes'])
-
- if compiler_ver == '4.8.3':
- env.Append(CXXFLAGS = ['-Wno-array-bounds'])
-
-if env['standalone']:
- env.Append(CXXFLAGS = ['-fPIC'])
- env.Append(LINKFLAGS = ['-static-libgcc','-static-libstdc++'])
-
-if env['Werror']:
- env.Append(CXXFLAGS = ['-Werror'])
-
-if env['os'] == 'android':
- env.Append(CPPDEFINES = ['ANDROID'])
- env.Append(LINKFLAGS = ['-pie', '-static-libstdc++'])
-elif env['os'] == 'bare_metal':
- env.Append(LINKFLAGS = ['-static'])
- env.Append(LINKFLAGS = ['-specs=rdimon.specs'])
- env.Append(CXXFLAGS = ['-fPIC'])
- env.Append(CPPDEFINES = ['NO_MULTI_THREADING'])
- env.Append(CPPDEFINES = ['BARE_METAL'])
-
-if env['opencl']:
- if env['os'] in ['bare_metal'] or env['standalone']:
- print("Cannot link OpenCL statically, which is required for bare metal / standalone builds")
- Exit(1)
-
-if env['gles_compute']:
- if env['os'] in ['bare_metal'] or env['standalone']:
- print("Cannot link OpenGLES statically, which is required for bare metal / standalone builds")
- Exit(1)
-
-if env["os"] not in ["android", "bare_metal"] and (env['opencl'] or env['cppthreads']):
- env.Append(LIBS = ['pthread'])
-
-if env['opencl'] or env['gles_compute']:
- if env['embed_kernels']:
- env.Append(CPPDEFINES = ['EMBEDDED_KERNELS'])
-
-if env['debug']:
- env['asserts'] = True
- env['logging'] = True
- env.Append(CXXFLAGS = ['-O0','-g','-gdwarf-2'])
- env.Append(CPPDEFINES = ['ARM_COMPUTE_DEBUG_ENABLED'])
-else:
- env.Append(CXXFLAGS = ['-O3','-ftree-vectorize'])
-
-if env['asserts']:
- env.Append(CPPDEFINES = ['ARM_COMPUTE_ASSERTS_ENABLED'])
- env.Append(CXXFLAGS = ['-fstack-protector-strong'])
-
-if env['logging']:
- env.Append(CPPDEFINES = ['ARM_COMPUTE_LOGGING_ENABLED'])
-
-env.Append(CPPPATH = ['#/include', "#"])
-env.Append(CXXFLAGS = env['extra_cxx_flags'])
-env.Append(LINKFLAGS = env['extra_link_flags'])
-
-Default( install_include("arm_compute"))
-Default( install_include("support"))
-
-Export('version_at_least')
-
-if env['opencl']:
- SConscript("./opencl-1.2-stubs/SConscript", variant_dir="%s/opencl-1.2-stubs" % build_path, duplicate=0)
-
-if env['gles_compute'] and env['os'] != 'android':
- env.Append(CPPPATH = ['#/include/linux'])
- SConscript("./opengles-3.1-stubs/SConscript", variant_dir="%s/opengles-3.1-stubs" % build_path, duplicate=0)
-
-SConscript('./SConscript', variant_dir=build_path, duplicate=0)
-
-if env['examples'] and env['os'] != 'bare_metal':
- SConscript('./examples/SConscript', variant_dir='%s/examples' % build_path, duplicate=0)
-
-if env['os'] != 'bare_metal':
- SConscript('./tests/SConscript', variant_dir='%s/tests' % build_path, duplicate=0)
diff --git a/infra/nnfw/cmake/packages/ARMComputeConfig.cmake b/infra/nnfw/cmake/packages/ARMComputeConfig.cmake
index ccf96692e..1b5a32ef6 100644
--- a/infra/nnfw/cmake/packages/ARMComputeConfig.cmake
+++ b/infra/nnfw/cmake/packages/ARMComputeConfig.cmake
@@ -1,48 +1,39 @@
function(_ARMCompute_Import)
include(FindPackageHandleStandardArgs)
- list(APPEND ARMCompute_LIB_SEARCH_PATHS ${ARMCompute_PREFIX})
+ list(APPEND ARMCompute_LIB_SEARCH_PATHS ${ARMCompute_PREFIX}/lib)
find_path(INCLUDE_DIR NAMES arm_compute/core/ITensor.h PATHS ${ARMCompute_INCLUDE_SEARCH_PATHS})
- find_library(CORE_LIBRARY NAMES arm_compute_core PATHS ${ARMCompute_LIB_SEARCH_PATHS})
- find_library(RUNTIME_LIBRARY NAMES arm_compute PATHS ${ARMCompute_LIB_SEARCH_PATHS})
- find_library(GRAPH_LIBRARY NAMES arm_compute_graph PATHS ${ARMCompute_LIB_SEARCH_PATHS})
+ find_library(CORE_LIBRARY NAMES arm_compute_core PATHS ${ARMCompute_LIB_SEARCH_PATHS} CMAKE_FIND_ROOT_PATH_BOTH)
+ find_library(RUNTIME_LIBRARY NAMES arm_compute PATHS ${ARMCompute_LIB_SEARCH_PATHS} CMAKE_FIND_ROOT_PATH_BOTH)
+ find_library(GRAPH_LIBRARY NAMES arm_compute_graph PATHS ${ARMCompute_LIB_SEARCH_PATHS} CMAKE_FIND_ROOT_PATH_BOTH)
+
+ message(STATUS "Search acl in ${ARMCompute_LIB_SEARCH_PATHS}")
if(NOT INCLUDE_DIR)
- nnfw_find_package(ARMComputeSource QUIET)
+ nnas_find_package(ARMComputeSource QUIET)
if (NOT ARMComputeSource_FOUND)
set(ARMCompute_FOUND FALSE PARENT_SCOPE)
return()
endif()
- set(INCLUDE_DIR ${NNFW_EXTERNALS_DIR}/acl ${NNFW_EXTERNALS_DIR}/acl/include)
+ set(INCLUDE_DIR ${ARMComputeSource_DIR} ${ARMComputeSource_DIR}/include)
endif(NOT INCLUDE_DIR)
- # NOTE '${CMAKE_INSTALL_PREFIX}/lib' should be searched as CI server places
- # pre-built ARM compute libraries on this directory
- if(NOT CORE_LIBRARY AND EXISTS ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute_core.so)
- set(CORE_LIBRARY ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute_core.so)
- endif()
-
if(NOT CORE_LIBRARY)
set(ARMCompute_FOUND FALSE PARENT_SCOPE)
+ message(STATUS "Cannot find libarm_compute_core.so")
return()
endif()
- if(NOT RUNTIME_LIBRARY AND EXISTS ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute.so)
- set(RUNTIME_LIBRARY ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute.so)
- endif()
-
if(NOT RUNTIME_LIBRARY)
+ message(STATUS "Cannot find libarm_compute.so")
set(ARMCompute_FOUND FALSE PARENT_SCOPE)
return()
endif()
- if(NOT GRAPH_LIBRARY AND EXISTS ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute_graph.so)
- set(GRAPH_LIBRARY ${CMAKE_INSTALL_PREFIX}/lib/libarm_compute_graph.so)
- endif()
-
if(NOT GRAPH_LIBRARY)
+ message(STATUS "Cannot find libarm_compute_graph.so")
set(ARMCompute_FOUND FALSE PARENT_SCOPE)
return()
endif()
@@ -71,37 +62,22 @@ function(_ARMCompute_Import)
set(ARMCompute_FOUND TRUE PARENT_SCOPE)
endfunction(_ARMCompute_Import)
-### Check whether library exists
-function(_ARMCompute_Check VAR LIBDIR)
- set(FOUND TRUE)
-
- if(NOT EXISTS "${LIBDIR}/libarm_compute_core.so")
- set(FOUND FALSE)
- endif()
-
- if(NOT EXISTS "${LIBDIR}/libarm_compute.so")
- set(FOUND FALSE)
- endif()
-
- if(NOT EXISTS "${LIBDIR}/libarm_compute_graph.so")
- set(FOUND FALSE)
- endif()
-
- set(${VAR} ${FOUND} PARENT_SCOPE)
-endfunction(_ARMCompute_Check)
-
# Let's build and install ARMCompute libraries
-# NOTE This function silently returns on error
-function(_ARMCompute_Build ARMCompute_PREFIX)
- ### Check whether library exists
- _ARMCompute_Check(ARMCompute_FOUND ${ARMCompute_PREFIX})
-
- if(ARMCompute_FOUND)
- return()
- endif(ARMCompute_FOUND)
+function(_ARMCompute_Build ARMComputeInstall_DIR)
+ set(PKG_NAME "ARMCOMPUTE")
+ set(PKG_IDENTIFIER "20.05")
+ set(INSTALL_STAMP_PATH "${ARMComputeInstall_DIR}/${PKG_NAME}.stamp")
+ set(ARMComputeBuild_DIR "${CMAKE_BINARY_DIR}/externals/armcompute")
+
+ if(EXISTS ${INSTALL_STAMP_PATH})
+ file(READ ${INSTALL_STAMP_PATH} READ_IDENTIFIER)
+ if("${READ_IDENTIFIER}" STREQUAL "${PKG_IDENTIFIER}")
+ return()
+ endif("${READ_IDENTIFIER}" STREQUAL "${PKG_IDENTIFIER}")
+ endif(EXISTS ${INSTALL_STAMP_PATH})
### Let's build with SCONS
- nnfw_find_package(ARMComputeSource QUIET)
+ nnas_find_package(ARMComputeSource QUIET)
if(NOT ARMComputeSource_FOUND)
return()
@@ -110,6 +86,7 @@ function(_ARMCompute_Build ARMCompute_PREFIX)
find_program(SCONS_PATH scons)
if(NOT SCONS_PATH)
+ message(WARNING "SCONS NOT FOUND. Please install SCONS to build ARMCompute.")
return()
endif(NOT SCONS_PATH)
@@ -120,6 +97,9 @@ function(_ARMCompute_Build ARMCompute_PREFIX)
endif(CMAKE_BUILD_TYPE)
#### Architecture-specific configurations
+
+ #### BUILD_DIR is in source tree to reduce CI build overhead
+ #### TODO Change BUILD_DIR to ${ARMComputeBuild_DIR}
if(TARGET_ARCH STREQUAL "armv7l")
set(BUILD_ARCH "armv7a")
set(BUILD_DIR "${BUILD_ARCH}-${TARGET_OS}.${SCON_BUILD_TYPE}")
@@ -130,11 +110,6 @@ function(_ARMCompute_Build ARMCompute_PREFIX)
set(BUILD_DIR "${BUILD_ARCH}-${TARGET_OS}.${SCON_BUILD_TYPE}")
endif()
- if(TARGET_ARCH STREQUAL "arm64")
- set(BUILD_ARCH "arm64-v8a")
- set(BUILD_DIR "${BUILD_ARCH}-${TARGET_OS}.${SCON_BUILD_TYPE}")
- endif()
-
#### Platform-specific configurations
#### TODO Support android
@@ -149,8 +124,14 @@ function(_ARMCompute_Build ARMCompute_PREFIX)
list(APPEND SCONS_OPTIONS "examples=0")
list(APPEND SCONS_OPTIONS "Werror=0")
list(APPEND SCONS_OPTIONS "os=${TARGET_OS}")
- include(ProcessorCount)
- ProcessorCount(N)
+
+ if(DEFINED EXTERNALS_BUILD_THREADS)
+ set(N ${EXTERNALS_BUILD_THREADS})
+ else(DEFINED EXTERNALS_BUILD_THREADS)
+ include(ProcessorCount)
+ ProcessorCount(N)
+ endif(DEFINED EXTERNALS_BUILD_THREADS)
+
if((NOT N EQUAL 0) AND BUILD_EXT_MULTITHREAD)
list(APPEND SCONS_OPTIONS -j${N})
endif()
@@ -162,43 +143,34 @@ function(_ARMCompute_Build ARMCompute_PREFIX)
list(APPEND SCONS_OPTIONS "build_dir=${BUILD_DIR}")
endif(DEFINED BUILD_DIR)
- message(STATUS "Build ARMCompute with ${SCONS_PATH} ('${SCONS_OPTIONS}'")
+ list(APPEND SCONS_OPTIONS "install_dir=${ARMComputeInstall_DIR}")
- # Copy externals/SConstruct to externals/acl/ for Tizen build support.
- # TODO The change of externals/SConstruct should be upstreamed to ARM Compute Library community layer.
- execute_process(COMMAND ${CMAKE_COMMAND} -E copy "${CMAKE_CURRENT_LIST_DIR}/ARMCompute/SConstruct" "${ARMComputeSource_DIR}")
+ set(SCONS_CC "gcc")
+ set(SCONS_CXX "g++")
+ if(ANDROID)
+ list(APPEND SCONS_OPTIONS "toolchain_prefix=${ANDROID_TOOLCHAIN_PREFIX}")
+ list(APPEND SCONS_OPTIONS "compiler_prefix=${ANDROID_TOOLCHAIN_ROOT}/bin/aarch64-linux-android${ANDROID_API_LEVEL}-")
+ set(SCONS_CC "clang")
+ set(SCONS_CXX "clang++")
+ endif(ANDROID)
- # Build ARMCompute libraries with SCONS
- # NOTE ARMCompute SConstruct unconditioanlly appends "arm-linux-gnueabihf-" prefix for linux
- execute_process(COMMAND /usr/bin/env CC=gcc CXX=g++ "${SCONS_PATH}" ${SCONS_OPTIONS}
- WORKING_DIRECTORY ${ARMComputeSource_DIR}
- RESULT_VARIABLE ARMCompute_BUILD)
+ message(STATUS "Build ARMCompute with ${SCONS_PATH} ('${SCONS_OPTIONS}'")
- # Install ARMCompute libraries
- # Ps. CI server will copy below installed libraries to target device to test.
- execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory "${ARMCompute_PREFIX}"
- WORKING_DIRECTORY ${ARMComputeSource_DIR}
- RESULT_VARIABLE ARMCompute_BUILD)
- execute_process(COMMAND ${CMAKE_COMMAND} -E copy "build/${BUILD_DIR}/libarm_compute_core.so" "${ARMCompute_PREFIX}"
- COMMAND ${CMAKE_COMMAND} -E copy "build/${BUILD_DIR}/libarm_compute.so" "${ARMCompute_PREFIX}"
- COMMAND ${CMAKE_COMMAND} -E copy "build/${BUILD_DIR}/libarm_compute_graph.so" "${ARMCompute_PREFIX}"
+ # Build ARMCompute libraries with SCONS
+ # NOTE ARMCompute build process don't allow logging by using OUTPUT_FILE and ERROR_FILE option
+ execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory "${ARMComputeInstall_DIR}")
+ execute_process(COMMAND /usr/bin/env CC=${SCONS_CC} CXX=${SCONS_CXX} "${SCONS_PATH}" ${SCONS_OPTIONS}
WORKING_DIRECTORY ${ARMComputeSource_DIR}
- RESULT_VARIABLE ARMCompute_BUILD)
-endfunction(_ARMCompute_Build)
+ RESULT_VARIABLE BUILD_EXITCODE)
+ if(NOT BUILD_EXITCODE EQUAL 0)
+ message(FATAL_ERROR "${PKG_NAME} Package: Build and install failed (check '${BUILD_LOG_PATH}' for details)")
+ endif(NOT BUILD_EXITCODE EQUAL 0)
-set(ARMCompute_PREFIX ${CMAKE_INSTALL_PREFIX}/lib)
-
-# This is a workaround for CI issues
-# Ps. CI server will copy below installed libraries to target device to test.
-# TODO Remove this workaround
-if(DEFINED ARMCompute_EXTDIR)
- execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory "${ARMCompute_PREFIX}")
- execute_process(COMMAND ${CMAKE_COMMAND} -E copy "${ARMCompute_EXTDIR}/libarm_compute_core.so" "${ARMCompute_PREFIX}"
- COMMAND ${CMAKE_COMMAND} -E copy "${ARMCompute_EXTDIR}/libarm_compute.so" "${ARMCompute_PREFIX}"
- COMMAND ${CMAKE_COMMAND} -E copy "${ARMCompute_EXTDIR}/libarm_compute_graph.so" "${ARMCompute_PREFIX}")
-endif(DEFINED ARMCompute_EXTDIR)
+ file(WRITE "${INSTALL_STAMP_PATH}" "${PKG_IDENTIFIER}")
+endfunction(_ARMCompute_Build)
+set(ARMCompute_PREFIX ${EXT_OVERLAY_DIR})
if(BUILD_ARMCOMPUTE)
_ARMCompute_Build("${ARMCompute_PREFIX}")
endif(BUILD_ARMCOMPUTE)
diff --git a/infra/nnfw/cmake/packages/ARMComputeSourceConfig.cmake b/infra/nnfw/cmake/packages/ARMComputeSourceConfig.cmake
deleted file mode 100644
index ef7384d7c..000000000
--- a/infra/nnfw/cmake/packages/ARMComputeSourceConfig.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-function(_ARMComputeSource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(ARMCOMPUTE_URL ${EXTERNAL_DOWNLOAD_SERVER}/ARM-software/ComputeLibrary/archive/v19.05.tar.gz)
- ExternalSource_Get("acl" ${DOWNLOAD_ARMCOMPUTE} ${ARMCOMPUTE_URL})
-
- set(ARMComputeSource_DIR ${acl_SOURCE_DIR} PARENT_SCOPE)
- set(ARMComputeSource_FOUND ${acl_SOURCE_GET} PARENT_SCOPE)
-endfunction(_ARMComputeSource_import)
-
-_ARMComputeSource_import()
diff --git a/infra/nnfw/cmake/packages/AbslSourceConfig.cmake b/infra/nnfw/cmake/packages/AbslSourceConfig.cmake
deleted file mode 100644
index 685550d52..000000000
--- a/infra/nnfw/cmake/packages/AbslSourceConfig.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-function(_AbslSource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- # NOTE The following URL comes from TensorFlow 1.12
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(ABSL_URL ${EXTERNAL_DOWNLOAD_SERVER}/abseil/abseil-cpp/archive/389ec3f906f018661a5308458d623d01f96d7b23.tar.gz)
- ExternalSource_Get("absl" ${DOWNLOAD_ABSL} ${ABSL_URL})
-
- set(AbslSource_DIR ${absl_SOURCE_DIR} PARENT_SCOPE)
- set(AbslSource_FOUND ${absl_SOURCE_GET} PARENT_SCOPE)
-endfunction(_AbslSource_import)
-
-_AbslSource_import()
diff --git a/infra/nnfw/cmake/packages/BoostConfig.cmake b/infra/nnfw/cmake/packages/BoostConfig.cmake
index 26ad78922..f2759f8e1 100644
--- a/infra/nnfw/cmake/packages/BoostConfig.cmake
+++ b/infra/nnfw/cmake/packages/BoostConfig.cmake
@@ -1,6 +1,6 @@
# Let's build and install Boost libraries
function(_Boost_Build Boost_PREFIX)
- nnfw_find_package(BoostSource QUIET)
+ nnas_find_package(BoostSource QUIET)
if(NOT BoostSource_FOUND)
return()
@@ -13,9 +13,21 @@ function(_Boost_Build Boost_PREFIX)
RESULT_VARIABLE Boost_BUILD)
endif()
- set(BoostBuild_DIR ${BoostSource_DIR})
+ set(BoostBuild_DIR ${CMAKE_BINARY_DIR}/externals/boost)
set(BoostInstall_DIR ${Boost_PREFIX})
+ set(INSTALL_STAMP_PATH "${BoostInstall_DIR}/BOOST.stamp")
+ set(BUILD_LOG_PATH "${BoostBuild_DIR}/BOOST.log")
+ set(PKG_NAME "BOOST")
+ set(PKG_IDENTIFIER "1.58.0")
+
+ if(EXISTS ${INSTALL_STAMP_PATH})
+ file(READ ${INSTALL_STAMP_PATH} READ_IDENTIFIER)
+ if("${READ_IDENTIFIER}" STREQUAL "${PKG_IDENTIFIER}")
+ return()
+ endif("${READ_IDENTIFIER}" STREQUAL "${PKG_IDENTIFIER}")
+ endif(EXISTS ${INSTALL_STAMP_PATH})
+
unset(Boost_Options)
list(APPEND Boost_Options --build-dir=${BoostBuild_DIR})
@@ -25,33 +37,66 @@ function(_Boost_Build Boost_PREFIX)
list(APPEND Boost_Options --with-system)
list(APPEND Boost_Options --with-filesystem)
+ if(DEFINED EXTERNALS_BUILD_THREADS)
+ set(N ${EXTERNALS_BUILD_THREADS})
+ else(DEFINED EXTERNALS_BUILD_THREADS)
+ include(ProcessorCount)
+ ProcessorCount(N)
+ endif(DEFINED EXTERNALS_BUILD_THREADS)
+
+ if((NOT N EQUAL 0) AND BUILD_EXT_MULTITHREAD)
+ list(APPEND Boost_Options -j${N})
+ endif()
+
set(JAM_FILENAME ${BoostBuild_DIR}/user-config.jam)
- file(WRITE ${JAM_FILENAME} "using gcc : local : ${CMAKE_CXX_COMPILER} ;\n")
- list(APPEND Boost_Options toolset=gcc-local)
+ if(ANDROID)
+ set(NDK_CXX ${NDK_DIR}/toolchains/llvm/prebuilt/linux-x86_64/bin/${TARGET_ARCH}-linux-android${ANDROID_API_LEVEL}-clang++)
+ file(WRITE ${JAM_FILENAME} "using clang : arm64v8a : ${NDK_CXX} ;")
+ list(APPEND Boost_Options toolset=clang-arm64v8a)
+ # without target-os=android, it complains it cannot find -lrt.
+ list(APPEND Boost_Options target-os=android)
+ else()
+ file(WRITE ${JAM_FILENAME} "using gcc : local : ${CMAKE_CXX_COMPILER} ;\n")
+ list(APPEND Boost_Options toolset=gcc-local)
+ endif(ANDROID)
# Install Boost libraries
execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory "${BoostInstall_DIR}")
execute_process(COMMAND /usr/bin/env BOOST_BUILD_PATH="${BoostBuild_DIR}" ${BoostSource_DIR}/b2 install ${Boost_Options}
- WORKING_DIRECTORY ${BoostSource_DIR})
+ WORKING_DIRECTORY ${BoostSource_DIR}
+ OUTPUT_FILE ${BUILD_LOG_PATH}
+ RESULT_VARIABLE BUILD_EXITCODE)
+
+ if(NOT BUILD_EXITCODE EQUAL 0)
+ message(FATAL_ERROR "${PKG_NAME} Package: Build and install failed (check '${BUILD_LOG_PATH}' for details)")
+ endif(NOT BUILD_EXITCODE EQUAL 0)
+
+ file(WRITE "${INSTALL_STAMP_PATH}" "${PKG_IDENTIFIER}")
endfunction(_Boost_Build)
# Find pre-installed boost library and update Boost variables.
-find_package(Boost 1.58.0 QUIET COMPONENTS log program_options filesystem system)
-if(Boost_FOUND)
- return()
+if (NOT BUILD_BOOST)
+ # BoostConfig.cmake does not honor QUIET argument at least till cmake 1.70.0.
+ # Thus, don't try to find_package if you're not entirely sure you have boost.
+ find_package(Boost 1.58.0 QUIET COMPONENTS log program_options filesystem system)
+ if(Boost_FOUND)
+ return()
+ endif()
endif()
-set(Boost_PREFIX ${CMAKE_INSTALL_PREFIX})
+set(Boost_PREFIX ${EXT_OVERLAY_DIR})
if(BUILD_BOOST)
_Boost_Build("${Boost_PREFIX}")
- # Let's use locally built boost to system-wide one so sub modules
- # needing Boost library and header files can search for them
- # in ${Boost_PREFIX} directory
- list(APPEND CMAKE_PREFIX_PATH "${Boost_PREFIX}")
+ # Without Boost_INCLUDE_DIR, it complains the variable is missing during find_package.
+ set(Boost_INCLUDE_DIR ${Boost_PREFIX}/include)
+
+ # 1) without static build, it will complain it cannot find libc++_shared.so.
+ # 2) We uses static libraries for other libraries.
+ set(Boost_USE_STATIC_LIBS ON)
# We built boost library so update Boost variables.
find_package(Boost 1.58.0 QUIET COMPONENTS log program_options filesystem system)
diff --git a/infra/nnfw/cmake/packages/BoostSourceConfig.cmake b/infra/nnfw/cmake/packages/BoostSourceConfig.cmake
deleted file mode 100644
index 1b81316fd..000000000
--- a/infra/nnfw/cmake/packages/BoostSourceConfig.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-function(_BoostSource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- # EXTERNAL_DOWNLOAD_SERVER will be overwritten by CI server to use mirror server.
- envoption(EXTERNAL_DOWNLOAD_SERVER "http://sourceforge.net")
- set(BOOST_URL ${EXTERNAL_DOWNLOAD_SERVER}/projects/boost/files/boost/1.58.0/boost_1_58_0.tar.gz)
- ExternalSource_Get("boost" ${DOWNLOAD_BOOST} ${BOOST_URL})
-
- set(BoostSource_DIR ${boost_SOURCE_DIR} PARENT_SCOPE)
- set(BoostSource_FOUND ${boost_SOURCE_GET} PARENT_SCOPE)
-endfunction(_BoostSource_import)
-
-_BoostSource_import()
diff --git a/infra/nnfw/cmake/packages/CpuInfoConfig.cmake b/infra/nnfw/cmake/packages/CpuInfoConfig.cmake
new file mode 100644
index 000000000..408cf8510
--- /dev/null
+++ b/infra/nnfw/cmake/packages/CpuInfoConfig.cmake
@@ -0,0 +1,31 @@
+function(_CpuInfo_Build)
+ nnas_find_package(CpuInfoSource QUIET)
+
+ # NOTE This line prevents multiple definitions of cpuinfo target
+ if(TARGET cpuinfo)
+ set(CpuInfoSource_DIR ${CpuInfoSource_DIR} PARENT_SCOPE)
+ set(CpuInfo_FOUND TRUE PARENT_SCOPE)
+ return()
+ endif(TARGET cpuinfo)
+
+ if(NOT CpuInfoSource_FOUND)
+ message(STATUS "CPUINFO: Source not found")
+ set(CpuInfo_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT CpuInfoSource_FOUND)
+
+ set(CPUINFO_BUILD_TOOLS OFF CACHE BOOL "Build command-line tools")
+ set(CPUINFO_BUILD_BENCHMARKS OFF CACHE BOOL "Build cpuinfo unit tests")
+ set(CPUINFO_BUILD_UNIT_TESTS OFF CACHE BOOL "Build cpuinfo mock tests")
+ set(CPUINFO_BUILD_MOCK_TESTS OFF CACHE BOOL "Build cpuinfo micro-benchmarks")
+ add_extdirectory("${CpuInfoSource_DIR}" cpuinfo EXCLUDE_FROM_ALL)
+ set_target_properties(cpuinfo PROPERTIES POSITION_INDEPENDENT_CODE ON)
+ set(CpuInfoSource_DIR ${CpuInfoSource_DIR} PARENT_SCOPE)
+ set(CpuInfo_FOUND TRUE PARENT_SCOPE)
+endfunction(_CpuInfo_Build)
+
+if(BUILD_CPUINFO)
+ _CpuInfo_Build()
+else(BUILD_CPUINFO)
+ set(CpuInfo_FOUND FALSE)
+endif(BUILD_CPUINFO)
diff --git a/infra/nnfw/cmake/packages/CpuinfoSourceConfig.cmake b/infra/nnfw/cmake/packages/CpuinfoSourceConfig.cmake
deleted file mode 100644
index 0939ba3b3..000000000
--- a/infra/nnfw/cmake/packages/CpuinfoSourceConfig.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-function(_cpuinfoSource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(CPUINFO_URL ${EXTERNAL_DOWNLOAD_SERVER}/pytorch/cpuinfo/archive/d5e37adf1406cf899d7d9ec1d317c47506ccb970.tar.gz)
-
- ExternalSource_Get("cpuinfo" ${DOWNLOAD_NNPACK} ${CPUINFO_URL})
-
- set(CPUINFO_SOURCE_DIR ${cpuinfo_SOURCE_DIR} PARENT_SCOPE)
- set(CPUINFO_SOURCE_FOUND ${cpuinfo_SOURCE_GET} PARENT_SCOPE)
-endfunction(_cpuinfoSource_import)
-
-_cpuinfoSource_import()
diff --git a/infra/nnfw/cmake/packages/EigenConfig.cmake b/infra/nnfw/cmake/packages/EigenConfig.cmake
index 0feb0890a..e71830a16 100644
--- a/infra/nnfw/cmake/packages/EigenConfig.cmake
+++ b/infra/nnfw/cmake/packages/EigenConfig.cmake
@@ -1,14 +1,16 @@
function(_Eigen_import)
- nnfw_find_package(EigenSource QUIET)
+ nnas_find_package(TensorFlowEigenSource EXACT 2.3.0 QUIET)
- if(NOT EigenSource_FOUND)
+ if(NOT TensorFlowEigenSource_FOUND)
set(Eigen_FOUND FALSE PARENT_SCOPE)
return()
- endif(NOT EigenSource_FOUND)
+ endif(NOT TensorFlowEigenSource_FOUND)
if(NOT TARGET eigen)
add_library(eigen INTERFACE)
- target_include_directories(eigen INTERFACE "${EigenSource_DIR}")
+ target_include_directories(eigen SYSTEM INTERFACE "${TensorFlowEigenSource_DIR}")
+ # Add EIGEN_MPL2_ONLY to remove license issue posibility
+ target_compile_definitions(eigen INTERFACE EIGEN_MPL2_ONLY)
endif(NOT TARGET eigen)
set(Eigen_FOUND TRUE PARENT_SCOPE)
diff --git a/infra/nnfw/cmake/packages/EigenSourceConfig.cmake b/infra/nnfw/cmake/packages/EigenSourceConfig.cmake
deleted file mode 100644
index e9b0ba8f1..000000000
--- a/infra/nnfw/cmake/packages/EigenSourceConfig.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-function(_EigenSource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- # NOTE TensorFlow 1.12 downloads Eign from the following URL
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://bitbucket.org")
- set(EIGEN_URL ${EXTERNAL_DOWNLOAD_SERVER}/eigen/eigen/get/88fc23324517.tar.gz)
- ExternalSource_Get("eigen" ${DOWNLOAD_EIGEN} ${EIGEN_URL})
-
- set(EigenSource_DIR ${eigen_SOURCE_DIR} PARENT_SCOPE)
- set(EigenSource_FOUND ${eigen_SOURCE_GET} PARENT_SCOPE)
-endfunction(_EigenSource_import)
-
-_EigenSource_import()
diff --git a/infra/nnfw/cmake/packages/Enum34SourceConfig.cmake b/infra/nnfw/cmake/packages/Enum34SourceConfig.cmake
deleted file mode 100644
index 96b6ca85a..000000000
--- a/infra/nnfw/cmake/packages/Enum34SourceConfig.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-function(_enum34Source_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://bitbucket.org")
- set(ENUM34_URL ${EXTERNAL_DOWNLOAD_SERVER}/stoneleaf/enum34/get/1.1.6.tar.gz)
-
- ExternalSource_Get("python_enum" ${DOWNLOAD_NNPACK} ${ENUM34_URL})
-
- set(PYTHON_ENUM_SOURCE_DIR ${python_enum_SOURCE_DIR} PARENT_SCOPE)
- set(PYTHON_ENUM_SOURCE_FOUND ${python_enum_SOURCE_GET} PARENT_SCOPE)
-endfunction(_enum34Source_import)
-
-_enum34Source_import()
diff --git a/infra/nnfw/cmake/packages/FP16SourceConfig.cmake b/infra/nnfw/cmake/packages/FP16SourceConfig.cmake
deleted file mode 100644
index 7df52948e..000000000
--- a/infra/nnfw/cmake/packages/FP16SourceConfig.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-function(_FP16Source_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(FP16_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/FP16/archive/febbb1c163726b5db24bed55cc9dc42529068997.tar.gz)
-
- ExternalSource_Get("FP16" ${DOWNLOAD_NNPACK} ${FP16_URL})
-
- set(FP16_SOURCE_DIR ${FP16_SOURCE_DIR} PARENT_SCOPE)
- set(FP16_SOURCE_FOUND ${FP16_SOURCE_GET} PARENT_SCOPE)
-endfunction(_FP16Source_import)
-
-_FP16Source_import()
diff --git a/infra/nnfw/cmake/packages/FXdivSourceConfig.cmake b/infra/nnfw/cmake/packages/FXdivSourceConfig.cmake
deleted file mode 100644
index 2ea574ab8..000000000
--- a/infra/nnfw/cmake/packages/FXdivSourceConfig.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-function(_FXdivSource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(FXDIV_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/FXdiv/archive/f8c5354679ec2597792bc70a9e06eff50c508b9a.tar.gz)
-
- ExternalSource_Get("FXdiv" ${DOWNLOAD_NNPACK} ${FXDIV_URL})
-
- set(FXDIV_SOURCE_DIR ${FXdiv_SOURCE_DIR} PARENT_SCOPE)
- set(FXDIV_SOURCE_FOUND ${FXdiv_SOURCE_GET} PARENT_SCOPE)
-endfunction(_FXdivSource_import)
-
-_FXdivSource_import()
diff --git a/infra/nnfw/cmake/packages/FarmhashSourceConfig.cmake b/infra/nnfw/cmake/packages/FarmhashSourceConfig.cmake
deleted file mode 100644
index b2cb9886d..000000000
--- a/infra/nnfw/cmake/packages/FarmhashSourceConfig.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-function(_FarmhashSource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- # NOTE TensorFlow 1.12 downloads farmhash from the following URL
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(FARMHASH_URL ${EXTERNAL_DOWNLOAD_SERVER}/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz)
- ExternalSource_Get("farmhash" ${DOWNLOAD_FARMHASH} ${FARMHASH_URL})
-
- set(FarmhashSource_DIR ${farmhash_SOURCE_DIR} PARENT_SCOPE)
- set(FarmhashSource_FOUND ${farmhash_SOURCE_GET} PARENT_SCOPE)
-endfunction(_FarmhashSource_import)
-
-_FarmhashSource_import()
diff --git a/infra/nnfw/cmake/packages/FlatBuffersConfig.cmake b/infra/nnfw/cmake/packages/FlatBuffersConfig.cmake
index 064673158..d27ac1435 100644
--- a/infra/nnfw/cmake/packages/FlatBuffersConfig.cmake
+++ b/infra/nnfw/cmake/packages/FlatBuffersConfig.cmake
@@ -1,5 +1,13 @@
function(_FlatBuffers_import)
- nnfw_find_package(FlatBuffersSource QUIET)
+
+ find_package(Flatbuffers QUIET)
+ if(Flatbuffers_FOUND)
+ set(FlatBuffers_FOUND TRUE PARENT_SCOPE)
+ return()
+ endif(Flatbuffers_FOUND)
+
+ # NOTE Tizen uses 1.12
+ nnas_find_package(FlatBuffersSource EXACT 1.12 QUIET)
if(NOT FlatBuffersSource_FOUND)
set(FlatBuffers_FOUND FALSE PARENT_SCOPE)
@@ -13,65 +21,15 @@ function(_FlatBuffers_import)
list(APPEND FlatBuffers_Library_SRCS "${FlatBuffersSource_DIR}/src/reflection.cpp")
list(APPEND FlatBuffers_Library_SRCS "${FlatBuffersSource_DIR}/src/util.cpp")
- # From FlatBuffers's CMakeLists.txt
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/idl_gen_cpp.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/idl_gen_dart.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/idl_gen_fbs.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/idl_gen_general.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/idl_gen_go.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/idl_gen_grpc.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/idl_gen_js.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/idl_gen_json_schema.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/idl_gen_lobster.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/idl_gen_lua.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/idl_gen_php.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/idl_gen_python.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/flatc.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/src/flatc_main.cpp")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/grpc/src/compiler/cpp_generator.cc")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/grpc/src/compiler/go_generator.cc")
- list(APPEND FlatBuffers_Compiler_SRCS "${FlatBuffersSource_DIR}/grpc/src/compiler/java_generator.cc")
-
- if(NOT TARGET flatbuffers)
+ if(NOT TARGET flatbuffers::flatbuffers)
add_library(flatbuffers ${FlatBuffers_Library_SRCS})
target_include_directories(flatbuffers PUBLIC "${FlatBuffersSource_DIR}/include")
- endif(NOT TARGET flatbuffers)
+ set_property(TARGET flatbuffers PROPERTY POSITION_INDEPENDENT_CODE ON)
- if(NOT TARGET flatc)
- add_executable(flatc ${FlatBuffers_Compiler_SRCS})
- target_include_directories(flatc PRIVATE "${FlatBuffersSource_DIR}/grpc")
- target_link_libraries(flatc flatbuffers)
- endif(NOT TARGET flatc)
+ add_library(flatbuffers::flatbuffers ALIAS flatbuffers)
+ endif(NOT TARGET flatbuffers::flatbuffers)
set(FlatBuffers_FOUND TRUE PARENT_SCOPE)
endfunction(_FlatBuffers_import)
_FlatBuffers_import()
-
-if(FlatBuffers_FOUND)
- function(FlatBuffers_Generate PREFIX OUTPUT_DIR SCHEMA_DIR)
- get_filename_component(abs_output_dir ${OUTPUT_DIR} ABSOLUTE)
- get_filename_component(abs_schema_dir ${SCHEMA_DIR} ABSOLUTE)
-
- foreach(schema ${ARGN})
- get_filename_component(schema_fn "${schema}" NAME)
- get_filename_component(dir "${schema}" DIRECTORY)
-
- get_filename_component(schema_fn_we "${schema_fn}" NAME_WE)
-
- list(APPEND SCHEMA_FILES "${abs_schema_dir}/${schema}")
- list(APPEND OUTPUT_FILES "${abs_output_dir}/${schema_fn_we}_generated.h")
- endforeach()
-
- add_custom_command(OUTPUT ${OUTPUT_FILES}
- COMMAND ${CMAKE_COMMAND} -E make_directory "${abs_output_dir}"
- COMMAND "$<TARGET_FILE:flatc>" -c --no-includes
- --no-union-value-namespacing
- --gen-object-api -o "${abs_output_dir}"
- ${SCHEMA_FILES}
- DEPENDS flatc)
-
- set(${PREFIX}_SOURCES ${OUTPUT_FILES} PARENT_SCOPE)
- set(${PREFIX}_INCLUDE_DIRS ${abs_output_dir} PARENT_SCOPE)
- endfunction(FlatBuffers_Generate)
-endif(FlatBuffers_FOUND)
diff --git a/infra/nnfw/cmake/packages/FlatBuffersSourceConfig.cmake b/infra/nnfw/cmake/packages/FlatBuffersSourceConfig.cmake
deleted file mode 100644
index f8a85effc..000000000
--- a/infra/nnfw/cmake/packages/FlatBuffersSourceConfig.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-function(_FlatBuffersSource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- # NOTE TensorFlow 1.12 downloads FlatBuffers from the following URL
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(FLATBUFFERS_URL ${EXTERNAL_DOWNLOAD_SERVER}/google/flatbuffers/archive/1f5eae5d6a135ff6811724f6c57f911d1f46bb15.tar.gz)
- ExternalSource_Get("flatbuffers" ${DOWNLOAD_FLATBUFFERS} ${FLATBUFFERS_URL})
-
- set(FlatBuffersSource_DIR ${flatbuffers_SOURCE_DIR} PARENT_SCOPE)
- set(FlatBuffersSource_FOUND ${flatbuffers_SOURCE_GET} PARENT_SCOPE)
-endfunction(_FlatBuffersSource_import)
-
-_FlatBuffersSource_import()
diff --git a/infra/nnfw/cmake/packages/GEMMLowpConfig.cmake b/infra/nnfw/cmake/packages/GEMMLowpConfig.cmake
new file mode 100644
index 000000000..b321961ca
--- /dev/null
+++ b/infra/nnfw/cmake/packages/GEMMLowpConfig.cmake
@@ -0,0 +1,20 @@
+function(_GEMMLowp_import)
+ nnas_find_package(GEMMLowpSource QUIET)
+
+ if(NOT GEMMLowpSource_FOUND)
+ set(GEMMLowp_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT GEMMLowpSource_FOUND)
+
+ if(NOT TARGET gemmlowp)
+ find_package(Threads REQUIRED)
+
+ add_library(gemmlowp INTERFACE)
+ target_include_directories(gemmlowp SYSTEM INTERFACE ${GEMMLowpSource_DIR})
+ target_link_libraries(gemmlowp INTERFACE ${LIB_PTHREAD})
+ endif(NOT TARGET gemmlowp)
+
+ set(GEMMLowp_FOUND TRUE PARENT_SCOPE)
+endfunction(_GEMMLowp_import)
+
+_GEMMLowp_import()
diff --git a/infra/nnfw/cmake/packages/GEMMLowpSourceConfig.cmake b/infra/nnfw/cmake/packages/GEMMLowpSourceConfig.cmake
deleted file mode 100644
index 51b8ff993..000000000
--- a/infra/nnfw/cmake/packages/GEMMLowpSourceConfig.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-function(_GEMMLowpSource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- # NOTE TensorFlow 1.12 uses the following URL
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(GEMMLOWP_URL ${EXTERNAL_DOWNLOAD_SERVER}/google/gemmlowp/archive/38ebac7b059e84692f53e5938f97a9943c120d98.tar.gz)
- ExternalSource_Get("gemmlowp" ${DOWNLOAD_GEMMLOWP} ${GEMMLOWP_URL})
-
- set(GEMMLowpSource_DIR ${gemmlowp_SOURCE_DIR} PARENT_SCOPE)
- set(GEMMLowpSource_FOUND ${gemmlowp_SOURCE_GET} PARENT_SCOPE)
-endfunction(_GEMMLowpSource_import)
-
-_GEMMLowpSource_import()
diff --git a/infra/nnfw/cmake/packages/GTestConfig.cmake b/infra/nnfw/cmake/packages/GTestConfig.cmake
index 990a3d52e..54695531e 100644
--- a/infra/nnfw/cmake/packages/GTestConfig.cmake
+++ b/infra/nnfw/cmake/packages/GTestConfig.cmake
@@ -1,23 +1,19 @@
-if(${BUILD_GTEST})
- nnfw_include(ExternalSourceTools)
- nnfw_include(ExternalProjectTools)
- nnfw_include(OptionTools)
+if(${DOWNLOAD_GTEST})
+ nnas_find_package(GTestSource QUIET)
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(GTEST_URL ${EXTERNAL_DOWNLOAD_SERVER}/google/googletest/archive/release-1.8.0.tar.gz)
- ExternalSource_Get("gtest" TRUE ${GTEST_URL})
-
- # gtest_SOURCE_DIR is used in gtest subdirectorty's cmake
- set(sourcedir_gtest ${gtest_SOURCE_DIR})
- unset(gtest_SOURCE_DIR)
+ if(NOT GTestSource_FOUND)
+ set(GTest_FOUND FALSE)
+ return()
+ endif(NOT GTestSource_FOUND)
if(NOT TARGET gtest_main)
- add_extdirectory(${sourcedir_gtest} gtest EXCLUDE_FROM_ALL)
+ nnas_include(ExternalProjectTools)
+ add_extdirectory(${GTestSource_DIR} gtest EXCLUDE_FROM_ALL)
endif(NOT TARGET gtest_main)
set(GTest_FOUND TRUE)
return()
-endif(${BUILD_GTEST})
+endif(${DOWNLOAD_GTEST})
### Find and use pre-installed Google Test
find_package(GTest)
@@ -37,7 +33,63 @@ if(${GTEST_FOUND} AND TARGET Threads::Threads)
target_link_libraries(gtest_main INTERFACE ${GTEST_MAIN_LIBRARIES})
endif(NOT TARGET gtest_main)
+ if(NOT TARGET gmock)
+ find_library(GMOCK_LIBRARIES gmock)
+ find_path(GMOCK_INCLUDE_DIR gmock/gmock.h)
+ if(GMOCK_LIBRARIES AND GMOCK_INCLUDE_DIR)
+ add_library(gmock INTERFACE)
+ target_include_directories(gmock INTERFACE ${GMOCK_INCLUDE_DIR})
+ target_link_libraries(gmock INTERFACE ${GMOCK_LIBRARIES} Threads::Threads)
+ endif(GMOCK_LIBRARIES)
+ endif(NOT TARGET gmock)
+
+ if(NOT TARGET gmock_main)
+ find_library(GMOCK_MAIN_LIBRARIES gmock_main)
+ find_path(GMOCK_INCLUDE_DIR gmock/gmock.h)
+ if(GMOCK_MAIN_LIBRARIES AND GMOCK_INCLUDE_DIR)
+ add_library(gmock_main INTERFACE)
+ target_include_directories(gmock_main INTERFACE ${GMOCK_INCLUDE_DIR})
+ target_link_libraries(gmock_main INTERFACE gmock)
+ target_link_libraries(gmock_main INTERFACE ${GMOCK_MAIN_LIBRARIES})
+ endif(GMOCK_MAIN_LIBRARIES AND GMOCK_INCLUDE_DIR)
+ endif(NOT TARGET gmock_main)
+
# TODO Check whether this command is necessary or not
include_directories(${GTEST_INCLUDE_DIR})
set(GTest_FOUND TRUE)
+else(${GTEST_FOUND} AND TARGET Threads::Threads)
+ find_path(GTEST_INCLUDE_DIR gtest/gtest.h)
+ find_path(GMOCK_INCLUDE_DIR gmock/gmock.h)
+ find_library(GMOCK_LIBRARIES libgmock.so)
+ find_library(GMOCK_MAIN_LIBRARIES libgmock_main.so)
+
+ if(GTEST_INCLUDE_DIR AND GMOCK_INCLUDE_DIR AND GMOCK_LIBRARIES AND GMOCK_MAIN_LIBRARIES AND TARGET Threads::Threads)
+ if(NOT TARGET gmock)
+ add_library(gmock INTERFACE)
+ target_include_directories(gmock INTERFACE ${GMOCK_INCLUDE_DIRS})
+ target_link_libraries(gmock INTERFACE ${GMOCK_LIBRARIES} Threads::Threads)
+ endif(NOT TARGET gmock)
+
+ if(NOT TARGET gmock_main)
+ add_library(gmock_main INTERFACE)
+ target_include_directories(gmock_main INTERFACE ${GMOCK_INCLUDE_DIRS})
+ target_link_libraries(gmock_main INTERFACE gmock)
+ target_link_libraries(gmock_main INTERFACE ${GMOCK_MAIN_LIBRARIES})
+ endif(NOT TARGET gmock_main)
+
+ if(NOT TARGET gtest)
+ add_library(gtest INTERFACE)
+ target_include_directories(gtest INTERFACE ${GTEST_INCLUDE_DIRS})
+ target_link_libraries(gtest INTERFACE ${GMOCK_LIBRARIES} Threads::Threads)
+ endif(NOT TARGET gtest)
+
+ if(NOT TARGET gtest_main)
+ add_library(gtest_main INTERFACE)
+ target_include_directories(gtest_main INTERFACE ${GTEST_INCLUDE_DIRS})
+ target_link_libraries(gtest_main INTERFACE gtest)
+ target_link_libraries(gtest_main INTERFACE ${GMOCK_MAIN_LIBRARIES})
+ endif(NOT TARGET gtest_main)
+
+ set(GTest_FOUND TRUE)
+ endif(GTEST_INCLUDE_DIR AND GMOCK_INCLUDE_DIR AND GMOCK_LIBRARIES AND GMOCK_MAIN_LIBRARIES AND TARGET Threads::Threads)
endif(${GTEST_FOUND} AND TARGET Threads::Threads)
diff --git a/infra/nnfw/cmake/packages/HDF5Config.cmake b/infra/nnfw/cmake/packages/HDF5Config.cmake
index 1f90deaf9..8c2badf54 100644
--- a/infra/nnfw/cmake/packages/HDF5Config.cmake
+++ b/infra/nnfw/cmake/packages/HDF5Config.cmake
@@ -1,31 +1,60 @@
+# Don't cache HDF5_*. Otherwise it will use the cached value without searching.
unset(HDF5_DIR CACHE)
-find_package(HDF5 QUIET)
+unset(HDF5_INCLUDE_DIRS CACHE)
+unset(HDF5_CXX_LIBRARY_hdf5 CACHE)
+unset(HDF5_CXX_LIBRARY_hdf5_cpp CACHE)
-if (NOT HDF5_FOUND)
- # Give second chance for some systems where sytem find_package config mode fails
- unset(HDF5_FOUND)
+if(NOT BUILD_WITH_HDF5)
+ set(HDF5_FOUND FALSE)
+ return()
+endif(NOT BUILD_WITH_HDF5)
- find_path(HDF5_INCLUDE_DIRS NAMES hdf5.h PATH_SUFFIXES include/hdf5/serial)
+# Case 1. external hdf5
+if(DEFINED EXT_HDF5_DIR)
+ find_path(HDF5_INCLUDE_DIRS NAMES H5Cpp.h NO_CMAKE_FIND_ROOT_PATH PATHS "${EXT_HDF5_DIR}/include")
+ find_library(HDF5_CXX_LIBRARY_hdf5 NAMES libhdf5.a PATHS "${EXT_HDF5_DIR}/lib")
+ find_library(HDF5_CXX_LIBRARY_hdf5_cpp NAMES libhdf5_cpp.a PATHS "${EXT_HDF5_DIR}/lib")
+ if (NOT (HDF5_INCLUDE_DIRS AND HDF5_CXX_LIBRARY_hdf5 AND HDF5_CXX_LIBRARY_hdf5_cpp))
+ message(WARNING "Failed to find H5Cpp.h or libhdf5.a or libhdf5_cpp.a")
+ set(HDF5_FOUND FALSE)
+ return()
+ else()
+ # message(FATAL_ERROR "0=${HDF5_INCLUDE_DIRS},1=${HDF5_CXX_LIBRARIES}")
+ set(HDF5_FOUND TRUE)
+ list(APPEND HDF5_CXX_LIBRARIES ${HDF5_CXX_LIBRARY_hdf5_cpp} ${HDF5_CXX_LIBRARY_hdf5})
+ return()
+ endif()
+endif()
+
+# Case 2. search pre-installed locations (by apt, brew, ...)
+if(NOT CMAKE_CROSSCOMPILING)
+ find_package(HDF5 COMPONENTS CXX QUIET)
+else()
+ find_path(HDF5_INCLUDE_DIRS NAMES hdf5.h ONLY_CMAKE_FIND_ROOT_PATH PATH_SUFFIXES include/hdf5/serial)
if (NOT HDF5_INCLUDE_DIRS)
set(HDF5_FOUND FALSE)
return()
endif()
- if (HDF5_USE_STATIC_LIBRARIES)
- find_library(HDF5_LIBRARIES libhdf5.a)
- else (HDF5_USE_STATIC_LIBRARIES)
- find_library(HDF5_LIBRARIES libhdf5.so)
+ if(HDF5_USE_STATIC_LIBRARIES)
+ find_library(HDF5_CXX_LIBRARY_hdf5 libhdf5.a)
+ find_library(HDF5_CXX_LIBRARY_hdf5_cpp libhdf5_cpp.a)
+ else(HDF5_USE_STATIC_LIBRARIES)
+ find_library(HDF5_CXX_LIBRARY_hdf5 libhdf5.so)
+ find_library(HDF5_CXX_LIBRARY_hdf5_cpp libhdf5_cpp.so)
endif(HDF5_USE_STATIC_LIBRARIES)
- if (NOT HDF5_LIBRARIES)
+ if (NOT (HDF5_CXX_LIBRARY_hdf5 AND HDF5_CXX_LIBRARY_hdf5_cpp))
set(HDF5_FOUND FALSE)
return()
endif()
- list(APPEND HDF5_LIBRARIES "sz" "z" "dl" "m")
+
+ # We can use "hdf5" and "hdf5_cpp" to use the same file founded with above.
+ list(APPEND HDF5_CXX_LIBRARIES "hdf5" "hdf5_cpp" "sz" "z" "dl" "m")
+
+ # Append missing libaec which is required by libsz, which is required by libhdf5
+ list(APPEND HDF5_CXX_LIBRARIES "aec")
set(HDF5_FOUND TRUE)
endif()
-
-# Append missing libaec which is required by libsz, which is required by libhdf5
-list(APPEND HDF5_LIBRARIES "aec")
diff --git a/infra/nnfw/cmake/packages/NEON2SSESourceConfig.cmake b/infra/nnfw/cmake/packages/NEON2SSESourceConfig.cmake
deleted file mode 100644
index 114a51245..000000000
--- a/infra/nnfw/cmake/packages/NEON2SSESourceConfig.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-function(_NEON2SSESource_import)
- # TODO Remove this workaround once target preset is ready
- if(NOT (TARGET_ARCH_BASE STREQUAL "x86_64"))
- set(NEON2SSESource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT (TARGET_ARCH_BASE STREQUAL "x86_64"))
-
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- # NOTE TensorFlow 1.12 downloads NEON2SSE from the following URL
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(NEON2SSE_URL ${EXTERNAL_DOWNLOAD_SERVER}/intel/ARM_NEON_2_x86_SSE/archive/0f77d9d182265259b135dad949230ecbf1a2633d.tar.gz)
- ExternalSource_Get("neon_2_sse" ${DOWNLOAD_NEON2SSE} ${NEON2SSE_URL})
-
- set(NEON2SSESource_DIR ${neon_2_sse_SOURCE_DIR} PARENT_SCOPE)
- set(NEON2SSESource_FOUND ${neon_2_sse_SOURCE_GET} PARENT_SCOPE)
-endfunction(_NEON2SSESource_import)
-
-_NEON2SSESource_import()
diff --git a/infra/nnfw/cmake/packages/NNPACKConfig.cmake b/infra/nnfw/cmake/packages/NNPACKConfig.cmake
deleted file mode 100644
index 97382b71e..000000000
--- a/infra/nnfw/cmake/packages/NNPACKConfig.cmake
+++ /dev/null
@@ -1,51 +0,0 @@
-function(_NNPACK_Import)
- nnfw_find_package(NNPACKSource QUIET)
-
- if(NOT NNPACK_SOURCE_FOUND)
- set(NNPACK_FOUND FALSE PARENT_SCOPE)
- message(STATUS "NNPACK not found")
- return()
- endif(NOT NNPACK_SOURCE_FOUND)
-
- nnfw_find_package(CpuinfoSource REQUIRED)
- nnfw_find_package(FP16Source REQUIRED)
- nnfw_find_package(FXdivSource REQUIRED)
- nnfw_find_package(PSIMDSource REQUIRED)
- nnfw_find_package(PthreadpoolSource REQUIRED)
- nnfw_find_package(SixSource REQUIRED)
- nnfw_find_package(Enum34Source REQUIRED)
- nnfw_find_package(OpcodesSource REQUIRED)
- nnfw_find_package(PeachpySource QUIET)
-
- if(NOT PYTHON_PEACHPY_SOURCE_FOUND)
- set(NNPACK_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT PYTHON_PEACHPY_SOURCE_FOUND)
-
- # workaround for CI
- set(THREADS_PTHREAD_ARG "2" CACHE STRING "Forcibly set by CMakeLists.txt." FORCE)
- if(NOT TARGET nnpack)
- # Allows us to build nnpack at build time
- set(NNPACK_BUILD_TESTS OFF CACHE BOOL "")
- set(NNPACK_BUILD_BENCHMARKS OFF CACHE BOOL "")
- set(NNPACK_LIBRARY_TYPE "static" CACHE STRING "")
- set(PTHREADPOOL_LIBRARY_TYPE "static" CACHE STRING "")
- set(CPUINFO_LIBRARY_TYPE "static" CACHE STRING "")
- nnfw_include(ExternalProjectTools)
- add_extdirectory("${NNPACK_SOURCE_DIR}" nnpack EXCLUDE_FROM_ALL)
- # We build static versions of nnpack and pthreadpool but link
- # them into a shared library (high-perf-backend), so they need PIC.
- set_property(TARGET nnpack PROPERTY POSITION_INDEPENDENT_CODE ON)
- set_property(TARGET pthreadpool PROPERTY POSITION_INDEPENDENT_CODE ON)
- set_property(TARGET cpuinfo PROPERTY POSITION_INDEPENDENT_CODE ON)
- endif()
-
- set(NNPACK_FOUND TRUE PARENT_SCOPE)
- set(NNPACK_INCLUDE_DIRS
- $<TARGET_PROPERTY:nnpack,INCLUDE_DIRECTORIES>
- $<TARGET_PROPERTY:pthreadpool,INCLUDE_DIRECTORIES> PARENT_SCOPE)
- set(NNPACK_LIBRARIES $<TARGET_FILE:nnpack> $<TARGET_FILE:cpuinfo> PARENT_SCOPE)
-
-endfunction(_NNPACK_Import)
-
-_NNPACK_Import()
diff --git a/infra/nnfw/cmake/packages/NNPACKSourceConfig.cmake b/infra/nnfw/cmake/packages/NNPACKSourceConfig.cmake
deleted file mode 100644
index b6b5b01bd..000000000
--- a/infra/nnfw/cmake/packages/NNPACKSourceConfig.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-function(_NNPACKSource_import)
- if(NOT DOWNLOAD_NNPACK)
- set(NNPACKSource_FOUND FALSE PARENT_SCOPE)
- message(WARN "NNPACK not downloaded")
- return()
- endif(NOT DOWNLOAD_NNPACK)
-
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(NNPACK_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/NNPACK/archive/c039579abe21f5756e0f0e45e8e767adccc11852.tar.gz)
- ExternalSource_Get("NNPACK" ${DOWNLOAD_NNPACK} ${NNPACK_URL})
-
- set(NNPACK_SOURCE_DIR ${NNPACK_SOURCE_DIR} PARENT_SCOPE)
- set(NNPACK_INCLUDE_DIR ${NNPACK_SOURCE_DIR}/include PARENT_SCOPE)
- set(NNPACK_SOURCE_FOUND ${NNPACK_SOURCE_GET} PARENT_SCOPE)
-endfunction(_NNPACKSource_import)
-
-_NNPACKSource_import()
diff --git a/infra/nnfw/cmake/packages/NoniusConfig.cmake b/infra/nnfw/cmake/packages/NoniusConfig.cmake
index 2b5778ebb..049d655fa 100644
--- a/infra/nnfw/cmake/packages/NoniusConfig.cmake
+++ b/infra/nnfw/cmake/packages/NoniusConfig.cmake
@@ -1,5 +1,5 @@
function(_Nonius_import)
- nnfw_find_package(NoniusSource QUIET)
+ nnas_find_package(NoniusSource QUIET)
if(NOT NoniusSource_FOUND)
set(Nonius_FOUND FALSE PARENT_SCOPE)
@@ -12,14 +12,6 @@ function(_Nonius_import)
target_include_directories(nonius INTERFACE "${NoniusSource_DIR}/include")
endif(NOT TARGET nonius)
- if(BUILD_KBENCHMARK)
- # Copy html_report_template.g.h++ file to externals/nonius.
- # This header file is modified to show the html summary view according to the layer in kbenchmark.
- execute_process(COMMAND ${CMAKE_COMMAND} -E copy
- "${CMAKE_CURRENT_LIST_DIR}/Nonius/html_report_template.g.h++"
- "${NoniusSource_DIR}/include/nonius/detail")
- endif(BUILD_KBENCHMARK)
-
set(Nonius_FOUND TRUE PARENT_SCOPE)
endfunction(_Nonius_import)
diff --git a/infra/nnfw/cmake/packages/NoniusSourceConfig.cmake b/infra/nnfw/cmake/packages/NoniusSourceConfig.cmake
deleted file mode 100644
index 5dde6b476..000000000
--- a/infra/nnfw/cmake/packages/NoniusSourceConfig.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-function(_NoniusSource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(NONIUS_URL ${EXTERNAL_DOWNLOAD_SERVER}/libnonius/nonius/archive/v1.2.0-beta.1.tar.gz)
- ExternalSource_Get("nonius" ${DOWNLOAD_NONIUS} ${NONIUS_URL})
-
- set(NoniusSource_DIR ${nonius_SOURCE_DIR} PARENT_SCOPE)
- set(NoniusSource_FOUND ${nonius_SOURCE_GET} PARENT_SCOPE)
-endfunction(_NoniusSource_import)
-
-_NoniusSource_import()
diff --git a/infra/nnfw/cmake/packages/OpcodesSourceConfig.cmake b/infra/nnfw/cmake/packages/OpcodesSourceConfig.cmake
deleted file mode 100644
index 635249f64..000000000
--- a/infra/nnfw/cmake/packages/OpcodesSourceConfig.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-function(_PeachpySource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(PEACHPY_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/Opcodes/archive/6e2b0cd9f1403ecaf164dea7019dd54db5aea252.tar.gz)
- ExternalSource_Get("python_opcodes" ${DOWNLOAD_NNPACK} ${PEACHPY_URL})
-
- set(PYTHON_OPCODES_SOURCE_DIR ${python_opcodes_SOURCE_DIR} PARENT_SCOPE)
- set(PYTHON_OPCODES_SOURCE_FOUND ${python_opcodes_SOURCE_GET} PARENT_SCOPE)
-endfunction(_PeachpySource_import)
-
-_PeachpySource_import()
diff --git a/infra/nnfw/cmake/packages/PSIMDSourceConfig.cmake b/infra/nnfw/cmake/packages/PSIMDSourceConfig.cmake
deleted file mode 100644
index 0f208cd55..000000000
--- a/infra/nnfw/cmake/packages/PSIMDSourceConfig.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-function(_PSIMDSource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(PSIMD_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/psimd/archive/90a938f30ba414ada2f4b00674ee9631d7d85e19.tar.gz)
-
- ExternalSource_Get("PSIMD" ${DOWNLOAD_NNPACK} ${PSIMD_URL})
-
- set(PSIMD_SOURCE_DIR ${PSIMD_SOURCE_DIR} PARENT_SCOPE)
- set(PSIMD_SOURCE_FOUND ${PSIMD_SOURCE_GET} PARENT_SCOPE)
-endfunction(_PSIMDSource_import)
-
-_PSIMDSource_import()
diff --git a/infra/nnfw/cmake/packages/PeachpySourceConfig.cmake b/infra/nnfw/cmake/packages/PeachpySourceConfig.cmake
deleted file mode 100644
index 4cfd682c7..000000000
--- a/infra/nnfw/cmake/packages/PeachpySourceConfig.cmake
+++ /dev/null
@@ -1,31 +0,0 @@
-function(_PeachpySource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(PEACHPY_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/PeachPy/archive/01d15157a973a4ae16b8046313ddab371ea582db.tar.gz)
-
- ExternalSource_Get("python_peachpy" ${DOWNLOAD_NNPACK} ${PEACHPY_URL})
- FIND_PACKAGE(PythonInterp)
-
- nnfw_find_package(SixSource REQUIRED)
- nnfw_find_package(Enum34Source REQUIRED)
- nnfw_find_package(OpcodesSource REQUIRED)
-
- # Generate opcodes:
- SET(ENV{PYTHONPATH} ${python_peachpy_SOURCE_DIR}:${PYTHON_SIX_SOURCE_DIR}:${PYTHON_ENUM_SOURCE_DIR}:${PYTHON_OPCODES_SOURCE_DIR})
- EXECUTE_PROCESS(COMMAND ${PYTHON_EXECUTABLE} ./codegen/x86_64.py
- WORKING_DIRECTORY ${python_peachpy_SOURCE_DIR}
- RESULT_VARIABLE BUILT_PP)
-
- if(NOT BUILT_PP EQUAL 0)
- # Mark PYTHON_PEACHPY_SOURCE_FOUND as FALSE if source generation fails
- set(PYTHON_PEACHPY_SOURCE_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT BUILT_PP EQUAL 0)
-
- set(PYTHON_PEACHPY_SOURCE_DIR ${python_peachpy_SOURCE_DIR} PARENT_SCOPE)
- set(PYTHON_PEACHPY_SOURCE_FOUND ${python_peachpy_SOURCE_GET} PARENT_SCOPE)
-endfunction(_PeachpySource_import)
-
-_PeachpySource_import()
diff --git a/infra/nnfw/cmake/packages/PthreadpoolSourceConfig.cmake b/infra/nnfw/cmake/packages/PthreadpoolSourceConfig.cmake
deleted file mode 100644
index 0c3b61ac4..000000000
--- a/infra/nnfw/cmake/packages/PthreadpoolSourceConfig.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-function(_pthreadpoolSource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(PTHREADPOOL_URL ${EXTERNAL_DOWNLOAD_SERVER}/Maratyszcza/pthreadpool/archive/6673a4c71fe35e077c6843a74017d9c25610c537.tar.gz)
-
- ExternalSource_Get("pthreadpool" ${DOWNLOAD_NNPACK} ${PTHREADPOOL_URL})
-
- set(PTHREADPOOL_SOURCE_DIR ${pthreadpool_SOURCE_DIR} PARENT_SCOPE)
- set(PTHREADPOOL_SOURCE_FOUND ${pthreadpool_SOURCE_GET} PARENT_SCOPE)
-endfunction(_pthreadpoolSource_import)
-
-_pthreadpoolSource_import()
diff --git a/infra/nnfw/cmake/packages/Ruy/CMakeLists.txt b/infra/nnfw/cmake/packages/Ruy/CMakeLists.txt
new file mode 100644
index 000000000..9140a17a7
--- /dev/null
+++ b/infra/nnfw/cmake/packages/Ruy/CMakeLists.txt
@@ -0,0 +1,51 @@
+set(RUY_BASE ${RuySource_DIR}/ruy)
+
+#
+# Ruy library
+#
+file(GLOB RUY_CORE_SRCS "${RUY_BASE}/*.cc")
+file(GLOB RUY_CORE_TESTS "${RUY_BASE}/*test*.cc")
+list(REMOVE_ITEM RUY_CORE_SRCS ${RUY_CORE_TESTS})
+
+list(APPEND RUY_SRCS ${RUY_CORE_SRCS})
+list(REMOVE_ITEM RUY_SRCS "${RUY_BASE}/benchmark.cc")
+list(REMOVE_ITEM RUY_SRCS "${RUY_BASE}/example.cc")
+list(REMOVE_ITEM RUY_SRCS "${RUY_BASE}/example_advanced.cc")
+list(REMOVE_ITEM RUY_SRCS "${RUY_BASE}/tune_tool.cc")
+list(REMOVE_ITEM RUY_SRCS "${RUY_BASE}/pmu.cc")
+list(REMOVE_ITEM RUY_SRCS "${RUY_BASE}/create_trmul_params.cc")
+list(REMOVE_ITEM RUY_SRCS "${RUY_BASE}/prepare_packed_matrices.cc")
+
+list(APPEND RUY_INSTRUMENTATION_SRCS "${RUY_BASE}/profiler/instrumentation.cc")
+
+if(PROFILE_RUY)
+ list(APPEND RUY_PROFILER_SRCS "${RUY_BASE}/profiler/profiler.cc")
+ list(APPEND RUY_PROFILER_SRCS "${RUY_BASE}/profiler/treeview.cc")
+endif(PROFILE_RUY)
+
+list(APPEND RUY_INCLUDES "${RuySource_DIR}")
+
+add_library(ruy STATIC ${RUY_SRCS})
+target_include_directories(ruy SYSTEM PUBLIC ${RUY_INCLUDES})
+target_compile_options(ruy PRIVATE -O3)
+
+target_include_directories(ruy PRIVATE ${CpuInfoSource_DIR})
+target_link_libraries(ruy PRIVATE cpuinfo)
+target_compile_definitions(ruy PRIVATE RUY_HAVE_CPUINFO)
+
+add_library(ruy_instrumentation ${RUY_INSTRUMENTATION_SRCS})
+target_include_directories(ruy_instrumentation SYSTEM PUBLIC ${RUY_INCLUDES})
+target_compile_options(ruy_instrumentation PRIVATE -O3)
+
+set_target_properties(ruy ruy_instrumentation PROPERTIES POSITION_INDEPENDENT_CODE ON)
+
+if(PROFILE_RUY)
+ add_library(ruy_profiler STATIC ${RUY_PROFILER_SRCS})
+ target_include_directories(ruy_profiler SYSTEM PUBLIC ${RUY_INCLUDES})
+ target_compile_options(ruy_profiler PRIVATE -O3)
+ set_target_properties(ruy_profiler PROPERTIES POSITION_INDEPENDENT_CODE ON)
+
+ target_compile_definitions(ruy PUBLIC RUY_PROFILER)
+ target_compile_definitions(ruy_instrumentation PUBLIC RUY_PROFILER)
+ target_compile_definitions(ruy_profiler PUBLIC RUY_PROFILER)
+endif(PROFILE_RUY)
diff --git a/infra/nnfw/cmake/packages/RuyConfig.cmake b/infra/nnfw/cmake/packages/RuyConfig.cmake
new file mode 100644
index 000000000..4e7cc24ac
--- /dev/null
+++ b/infra/nnfw/cmake/packages/RuyConfig.cmake
@@ -0,0 +1,31 @@
+function(_Ruy_Build)
+ # NOTE This line prevents multiple definitions of ruy target
+ if(TARGET ruy)
+ set(Ruy_FOUND TRUE PARENT_SCOPE)
+ return()
+ endif(TARGET ruy)
+
+ nnas_find_package(RuySource QUIET)
+ nnfw_find_package(CpuInfo QUIET)
+
+ if(NOT RuySource_FOUND)
+ message(STATUS "RUY: Source not found")
+ set(Ruy_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT RuySource_FOUND)
+
+ if (NOT CpuInfo_FOUND)
+ message(STATUS "RUY: CPUINFO not found")
+ set(Ruy_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT CpuInfo_FOUND)
+
+ add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/Ruy" ruy)
+ set(Ruy_FOUND TRUE PARENT_SCOPE)
+endfunction(_Ruy_Build)
+
+if(BUILD_RUY)
+ _Ruy_Build()
+else(BUILD_RUY)
+ set(Ruy_FOUND FASLE)
+endif(BUILD_RUY)
diff --git a/infra/nnfw/cmake/packages/SixSourceConfig.cmake b/infra/nnfw/cmake/packages/SixSourceConfig.cmake
deleted file mode 100644
index 309ead302..000000000
--- a/infra/nnfw/cmake/packages/SixSourceConfig.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-function(_SIXSource_import)
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(six_URL ${EXTERNAL_DOWNLOAD_SERVER}/benjaminp/six/archive/1.11.0.tar.gz)
-
- ExternalSource_Get("python_six" ${DOWNLOAD_NNPACK} ${six_URL})
-
- set(PYTHON_SIX_SOURCE_DIR ${python_six_SOURCE_DIR} PARENT_SCOPE)
- set(PYTHON_SIX_SOURCE_FOUND ${python_six_SOURCE_GET} PARENT_SCOPE)
-endfunction(_SIXSource_import)
-
-_SIXSource_import()
diff --git a/infra/nnfw/cmake/packages/TensorFlowEigen-1.13.1/TensorFlowEigenConfig.cmake b/infra/nnfw/cmake/packages/TensorFlowEigen-1.13.1/TensorFlowEigenConfig.cmake
new file mode 100644
index 000000000..253b290bd
--- /dev/null
+++ b/infra/nnfw/cmake/packages/TensorFlowEigen-1.13.1/TensorFlowEigenConfig.cmake
@@ -0,0 +1,19 @@
+function(_Eigen_import)
+ nnas_find_package(EigenSource QUIET)
+
+ if(NOT EigenSource_FOUND)
+ set(TensorFlowEigen_1_13_1_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT EigenSource_FOUND)
+
+ if(NOT TARGET eigen-tf-1.13.1)
+ add_library(eigen-tf-1.13.1 INTERFACE)
+ target_include_directories(eigen-tf-1.13.1 SYSTEM INTERFACE "${EigenSource_DIR}")
+ # Add EIGEN_MPL2_ONLY to remove license issue posibility
+ target_compile_definitions(eigen-tf-1.13.1 INTERFACE EIGEN_MPL2_ONLY)
+ endif(NOT TARGET eigen-tf-1.13.1)
+
+ set(TensorFlowEigen_1_13_1_FOUND TRUE PARENT_SCOPE)
+endfunction(_Eigen_import)
+
+_Eigen_import()
diff --git a/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfigVersion.cmake b/infra/nnfw/cmake/packages/TensorFlowEigen-1.13.1/TensorFlowEigenConfigVersion.cmake
index 4a57b655b..ed79ecd91 100644
--- a/infra/nncc/cmake/packages/TensorFlowProtoText-1.12/TensorFlowProtoTextConfigVersion.cmake
+++ b/infra/nnfw/cmake/packages/TensorFlowEigen-1.13.1/TensorFlowEigenConfigVersion.cmake
@@ -1,4 +1,4 @@
-set(PACKAGE_VERSION "1.12")
+set(PACKAGE_VERSION "1.13.1")
set(PACKAGE_VERSION_EXACT FALSE)
set(PACKAGE_VERSION_COMPATIBLE FALSE)
set(PACKAGE_VERSION_UNSUITABLE TRUE)
diff --git a/infra/nnfw/cmake/packages/TensorFlowLite/CMakeLists.txt b/infra/nnfw/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLite/CMakeLists.txt
index 93676525a..2c9618d68 100644
--- a/infra/nnfw/cmake/packages/TensorFlowLite/CMakeLists.txt
+++ b/infra/nnfw/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLite/CMakeLists.txt
@@ -36,10 +36,9 @@ list(APPEND TFLITE_SRCS ${TFLITE_PROFILING_SRCS})
list(APPEND TFLITE_SRCS "${FarmhashSource_DIR}/src/farmhash.cc")
list(APPEND TFLITE_INCLUDES "${TensorFlowSource_DIR}")
-list(APPEND TFLITE_INCLUDES "${AbslSource_DIR}")
+list(APPEND TFLITE_INCLUDES "${AbseilSource_DIR}")
list(APPEND TFLITE_INCLUDES "${GEMMLowpSource_DIR}")
list(APPEND TFLITE_INCLUDES "${FarmhashSource_DIR}/src")
-list(APPEND TFLITE_INCLUDES "${FlatBuffersSource_DIR}/include")
if(NEON2SSESource_FOUND)
list(APPEND TFLITE_INCLUDES "${NEON2SSESource_DIR}")
@@ -50,15 +49,14 @@ endif(NEON2SSESource_FOUND)
list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/audio_spectrogram.cc")
list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/audio_spectrogram_test.cc")
list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/internal/spectrogram.cc")
-## mfcc
-list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/internal/mfcc.cc")
-list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/internal/mfcc_dct.cc")
-list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/internal/mfcc_mel_filterbank.cc")
-list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/mfcc.cc")
-list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/kernels/mfcc_test.cc")
add_library(tensorflow-lite STATIC ${TFLITE_SRCS})
target_include_directories(tensorflow-lite SYSTEM PUBLIC ${TFLITE_INCLUDES})
target_compile_definitions(tensorflow-lite PUBLIC "GEMMLOWP_ALLOW_SLOW_SCALAR_FALLBACK")
set_property(TARGET tensorflow-lite PROPERTY POSITION_INDEPENDENT_CODE ON)
-target_link_libraries(tensorflow-lite eigen ${LIB_PTHREAD} dl)
+target_link_libraries(tensorflow-lite eigen-tf-1.13.1 flatbuffers::flatbuffers ${LIB_PTHREAD} dl)
+
+if(ANDROID)
+ target_link_libraries(tensorflow-lite log)
+ target_include_directories(tensorflow-lite PUBLIC "${NDK_DIR}/..")
+endif()
diff --git a/infra/nnfw/cmake/packages/TensorFlowLiteConfig.cmake b/infra/nnfw/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLiteConfig.cmake
index 4dbc1d14c..e15239805 100644
--- a/infra/nnfw/cmake/packages/TensorFlowLiteConfig.cmake
+++ b/infra/nnfw/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLiteConfig.cmake
@@ -13,23 +13,23 @@ if(BUILD_TENSORFLOW_LITE)
endmacro(return_unless)
# Required packages
- nnfw_find_package(AbslSource QUIET)
- return_unless(AbslSource_FOUND)
- nnfw_find_package(Eigen QUIET)
- return_unless(Eigen_FOUND)
- nnfw_find_package(FarmhashSource QUIET)
+ nnas_find_package(AbseilSource QUIET)
+ return_unless(AbseilSource_FOUND)
+ nnfw_find_package(TensorFlowEigen EXACT 1.13.1 QUIET)
+ return_unless(TensorFlowEigen_1_13_1_FOUND)
+ nnas_find_package(FarmhashSource QUIET)
return_unless(FarmhashSource_FOUND)
- nnfw_find_package(FlatBuffersSource QUIET)
- return_unless(FlatBuffersSource_FOUND)
- nnfw_find_package(GEMMLowpSource QUIET)
+ nnfw_find_package(FlatBuffers QUIET)
+ return_unless(FlatBuffers_FOUND)
+ nnas_find_package(GEMMLowpSource QUIET)
return_unless(GEMMLowpSource_FOUND)
- nnfw_find_package(TensorFlowSource QUIET)
+ nnas_find_package(TensorFlowSource EXACT 1.13.1 QUIET)
return_unless(TensorFlowSource_FOUND)
# Optional packages
- nnfw_find_package(NEON2SSESource QUIET)
+ nnas_find_package(NEON2SSESource QUIET)
- nnfw_include(ExternalProjectTools)
+ nnas_include(ExternalProjectTools)
add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/TensorFlowLite" tflite)
set(TensorFlowLite_FOUND TRUE)
@@ -41,8 +41,12 @@ find_path(TFLITE_INCLUDE_DIR NAMES tensorflow/lite/interpreter.h)
find_library(TFLITE_LIB NAMES tensorflow-lite)
if(NOT TFLITE_INCLUDE_DIR)
- set(TensorFlowLite_FOUND FALSE)
- return()
+ # Tizen install TensorFlow Lite 1.13.1 headers in /usr/include/tensorflow1
+ find_path(TFLITE_INCLUDE_DIR NAMES tensorflow/lite/interpreter.h PATHS "/usr/include/tensorflow1")
+ if(NOT TFLITE_INCLUDE_DIR)
+ set(TensorFlowLite_FOUND FALSE)
+ return()
+ endif(NOT TFLITE_INCLUDE_DIR)
endif(NOT TFLITE_INCLUDE_DIR)
if(NOT TFLITE_LIB)
@@ -56,6 +60,10 @@ message(STATUS "Found TensorFlow Lite: TRUE (include: ${TFLITE_INCLUDE_DIR}, lib
add_library(tensorflow-lite INTERFACE)
target_include_directories(tensorflow-lite SYSTEM INTERFACE ${TFLITE_INCLUDE_DIR})
target_link_libraries(tensorflow-lite INTERFACE ${TFLITE_LIB})
+find_package(Flatbuffers)
+if(Flatbuffers_FOUND)
+ target_link_libraries(tensorflow-lite INTERFACE flatbuffers::flatbuffers)
+endif(Flatbuffers_FOUND)
# Prefer -pthread to -lpthread
set(THREADS_PREFER_PTHREAD_FLAG TRUE)
diff --git a/infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfigVersion.cmake b/infra/nnfw/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLiteConfigVersion.cmake
index 46609dc10..ed79ecd91 100644
--- a/infra/nncc/cmake/packages/TensorFlowLite-1.7/TensorFlowLiteConfigVersion.cmake
+++ b/infra/nnfw/cmake/packages/TensorFlowLite-1.13.1/TensorFlowLiteConfigVersion.cmake
@@ -1,4 +1,4 @@
-set(PACKAGE_VERSION "1.7")
+set(PACKAGE_VERSION "1.13.1")
set(PACKAGE_VERSION_EXACT FALSE)
set(PACKAGE_VERSION_COMPATIBLE FALSE)
set(PACKAGE_VERSION_UNSUITABLE TRUE)
diff --git a/infra/nnfw/cmake/packages/TensorFlowLite-2.3.0/CMakeLists.txt b/infra/nnfw/cmake/packages/TensorFlowLite-2.3.0/CMakeLists.txt
new file mode 100644
index 000000000..616f8ff8e
--- /dev/null
+++ b/infra/nnfw/cmake/packages/TensorFlowLite-2.3.0/CMakeLists.txt
@@ -0,0 +1,124 @@
+# Reference: https://github.com/tensorflow/tensorflow/blob/v2.3.0/tensorflow/lite/tools/make/Makefile
+#
+# Tensorflow Lite library 2.3.0
+#
+set(TENSORFLOW_LITE_BASE ${TFLiteVanillaTensorFlowSource_DIR}/tensorflow/lite)
+
+file(GLOB TFLITE_CORE_SRCS "${TENSORFLOW_LITE_BASE}/*.c"
+ "${TENSORFLOW_LITE_BASE}/*.cc"
+ "${TENSORFLOW_LITE_BASE}/core/*.cc")
+
+file(GLOB_RECURSE TFLITE_KERNEL_SRCS "${TENSORFLOW_LITE_BASE}/kernels/*.cc")
+
+file(GLOB TFLITE_LIB_SRCS "${TENSORFLOW_LITE_BASE}/c/*.c" "${TENSORFLOW_LITE_BASE}/c/*.cc")
+
+file(GLOB TFLITE_API_SRCS "${TENSORFLOW_LITE_BASE}/core/api/*.c"
+ "${TENSORFLOW_LITE_BASE}/core/api/*.cc")
+
+list(APPEND TFLITE_PROFILING_SRCS "${TENSORFLOW_LITE_BASE}/profiling/memory_info.cc")
+list(APPEND TFLITE_PROFILING_SRCS "${TENSORFLOW_LITE_BASE}/profiling/time.cc")
+
+file(GLOB TFLITE_EXPERIMENTAL_SRCS "${TENSORFLOW_LITE_BASE}/experimental/resource/*.cc")
+
+file(GLOB TFLITE_SPARSITY_SRCS "${TENSORFLOW_LITE_BASE}/tools/optimize/sparsity/*.cc")
+
+list(APPEND TFLITE_SRCS ${TFLITE_CORE_SRCS})
+list(APPEND TFLITE_SRCS ${TFLITE_KERNEL_SRCS})
+list(APPEND TFLITE_SRCS ${TFLITE_LIB_SRCS})
+list(APPEND TFLITE_SRCS ${TFLITE_API_SRCS})
+list(APPEND TFLITE_SRCS ${TFLITE_PROFILING_SRCS})
+list(APPEND TFLITE_SRCS ${TFLITE_EXPERIMENTAL_SRCS})
+list(APPEND TFLITE_SRCS ${TFLITE_SPARSITY_SRCS})
+
+# externals
+list(APPEND TFLITE_SRCS "${TFLiteVanillaFarmhashSource_DIR}/src/farmhash.cc")
+list(APPEND TFLITE_SRCS "${TFLiteVanillaFFT2DSource_DIR}/fftsg.c")
+list(APPEND TFLITE_SRCS "${TFLiteVanillaFFT2DSource_DIR}/fftsg2d.c")
+list(APPEND TFLITE_SRCS "${TFLiteVanillaFlatBuffersSource_DIR}/src/util.cpp")
+
+# externals - absl
+file(GLOB_RECURSE ABSL_SRCS "${TFLiteVanillaAbslSource_DIR}/absl/*.cc")
+file(GLOB_RECURSE ABSL_EXCLS "${TFLiteVanillaAbslSource_DIR}/absl/*test*.cc"
+ "${TFLiteVanillaAbslSource_DIR}/absl/*benchmark*.cc"
+ "${TFLiteVanillaAbslSource_DIR}/absl/synchronization/*.cc"
+ "${TFLiteVanillaAbslSource_DIR}/absl/debugging/*.cc"
+ "${TFLiteVanillaAbslSource_DIR}/absl/hash/*.cc"
+ "${TFLiteVanillaAbslSource_DIR}/absl/flags/*.cc"
+ "${TFLiteVanillaAbslSource_DIR}/absl/random/*.cc")
+list(REMOVE_ITEM ABSL_SRCS ${ABSL_EXCLS})
+list(APPEND TFLITE_SRCS ${ABSL_SRCS})
+
+# externals - ruy
+file(GLOB RUY_SRCS "${TFLiteVanillaRuySource_DIR}/ruy/*.cc")
+file(GLOB_RECURSE RUY_EXCLS "${TFLiteVanillaRuySource_DIR}/ruy/*test*.cc"
+ "${TFLiteVanillaRuySource_DIR}/ruy/*benchmark*.cc"
+ "${TFLiteVanillaRuySource_DIR}/ruy/*example*.cc")
+list(REMOVE_ITEM RUY_SRCS ${RUY_EXCLS})
+# Temporary fix for ruy compilation error.
+# TODO(b/158800055): Remove this hack once the ruy version is correctly bumped.
+list(REMOVE_ITEM RUY_SRCS "${TFLiteVanillaRuySource_DIR}/ruy/prepare_packed_matrices.cc")
+list(APPEND TFLITE_SRCS ${RUY_SRCS})
+
+
+# Build with mmap? true
+# caution: v2.3.0's Makefile has wrong code on this part. This is fixed on master branch.
+set(BUILD_WITH_MMAP TRUE)
+if(${BUILD_WITH_MMAP})
+ list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/mmap_allocation_disabled.cc")
+else()
+ list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/mmap_allocation.cc")
+endif()
+
+# Build with nnapi? true
+# caution: this nnapi delegate comes from tflite, not ours.
+set(BUILD_WITH_NNAPI TRUE)
+if(${BUILD_WITH_NNAPI})
+ list(APPEND TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/delegates/nnapi/nnapi_delegate.cc")
+ list(APPEND TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/delegates/nnapi/quant_lstm_sup.cc")
+ list(APPEND TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/nnapi/nnapi_implementation.cc")
+ list(APPEND TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/nnapi/nnapi_util.cc")
+else()
+ list(APPEND TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/delegates/nnapi/nnapi_delegate_disabled.cc")
+ list(APPEND TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/nnapi/nnapi_implementation_disabled.cc")
+endif()
+
+# ios: we don't support ios
+list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/minimal_logging_ios.cc")
+
+# android
+if(NOT ANDROID)
+ list(REMOVE_ITEM TFLITE_SRCS "${TENSORFLOW_LITE_BASE}/minimal_logging_android.cc")
+endif()
+
+# exclude some source files
+file(GLOB_RECURSE TFLITE_EXCLS "${TENSORFLOW_LITE_BASE}/*test*.cc"
+ "${TENSORFLOW_LITE_BASE}/*benchmark*.cc"
+ "${TENSORFLOW_LITE_BASE}/*example*.cc"
+ "${TENSORFLOW_LITE_BASE}/*tool*.cc")
+list(REMOVE_ITEM TFLITE_SRCS ${TFLITE_EXCLS})
+
+# include headers
+list(APPEND TFLITE_INCLUDES "${TFLiteVanillaTensorFlowSource_DIR}")
+list(APPEND TFLITE_INCLUDES "${TFLiteVanillaEigenSource_DIR}")
+list(APPEND TFLITE_INCLUDES "${TFLiteVanillaAbslSource_DIR}")
+list(APPEND TFLITE_INCLUDES "${TFLiteVanillaGEMMLowpSource_DIR}")
+list(APPEND TFLITE_INCLUDES "${TFLiteVanillaNEON2SSESource_DIR}")
+list(APPEND TFLITE_INCLUDES "${TFLiteVanillaFarmhashSource_DIR}/src")
+list(APPEND TFLITE_INCLUDES "${TFLiteVanillaFlatBuffersSource_DIR}/include")
+list(APPEND TFLITE_INCLUDES "${TFLiteVanillaFP16Source_DIR}/include")
+list(APPEND TFLITE_INCLUDES "${TFLiteVanillaRuySource_DIR}")
+
+add_library(tensorflow-lite-2.3.0 STATIC ${TFLITE_SRCS})
+target_include_directories(tensorflow-lite-2.3.0 SYSTEM PUBLIC ${TFLITE_INCLUDES})
+target_include_directories(tensorflow-lite-2.3.0 PRIVATE ${CpuInfoSource_DIR})
+target_compile_definitions(tensorflow-lite-2.3.0 PUBLIC "GEMMLOWP_ALLOW_SLOW_SCALAR_FALLBACK -DTFLITE_WITH_RUY -DTFLITE_WITH_RUY_GEMV -DRUY_HAVE_CPUINFO")
+set_property(TARGET tensorflow-lite-2.3.0 PROPERTY POSITION_INDEPENDENT_CODE ON)
+target_link_libraries(tensorflow-lite-2.3.0 eigen ${LIB_PTHREAD} dl cpuinfo)
+if(NOT ANDROID AND ${BUILD_WITH_NNAPI})
+ target_link_libraries(tensorflow-lite-2.3.0 rt)
+endif()
+
+if(ANDROID)
+ target_link_libraries(tensorflow-lite-2.3.0 log)
+ target_include_directories(tensorflow-lite-2.3.0 PUBLIC "${NDK_DIR}/..")
+endif()
diff --git a/infra/nnfw/cmake/packages/TensorFlowLite-2.3.0Config.cmake b/infra/nnfw/cmake/packages/TensorFlowLite-2.3.0Config.cmake
new file mode 100644
index 000000000..9671dc4af
--- /dev/null
+++ b/infra/nnfw/cmake/packages/TensorFlowLite-2.3.0Config.cmake
@@ -0,0 +1,107 @@
+if(BUILD_TENSORFLOW_LITE_2_3_0)
+ macro(return_unless VAR)
+ if(NOT ${VAR})
+ message("${VAR} NOT TRUE")
+ set(TensorFlowLite_2_3_0_FOUND PARENT_SCOPE)
+ return()
+ endif(NOT ${VAR})
+ endmacro(return_unless)
+
+ nnas_include(ExternalSourceTools)
+ nnas_include(OptionTools)
+
+ # Below urls come from https://github.com/tensorflow/tensorflow/blob/v2.3.0/tensorflow/lite/tools/make/Makefile
+
+ set(absl_url "https://github.com/abseil/abseil-cpp/archive/df3ea785d8c30a9503321a3d35ee7d35808f190d.tar.gz")
+ ExternalSource_Download("TFLiteVanilla_Absl" ${absl_url})
+ set(TFLiteVanillaAbslSource_DIR "${TFLiteVanilla_Absl_SOURCE_DIR}")
+ if (NOT TFLiteVanillaAbslSource_DIR STREQUAL "")
+ set(TFLiteVanillaAbslSource_FOUND TRUE)
+ endif()
+ return_unless(TFLiteVanillaAbslSource_FOUND)
+
+ set(eigen_url "https://gitlab.com/libeigen/eigen/-/archive/386d809bde475c65b7940f290efe80e6a05878c4/eigen-386d809bde475c65b7940f290efe80e6a05878c4.tar.gz")
+ ExternalSource_Download("TFLiteVanilla_Eigen" ${eigen_url})
+ set(TFLiteVanillaEigenSource_DIR "${TFLiteVanilla_Eigen_SOURCE_DIR}")
+ if (NOT TFLiteVanillaEigenSource_DIR STREQUAL "")
+ set(TFLiteVanillaEigenSource_FOUND TRUE)
+ endif()
+ return_unless(TFLiteVanillaEigenSource_FOUND)
+
+ set(farmhash_url "https://storage.googleapis.com/mirror.tensorflow.org/github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz")
+ ExternalSource_Download("TFLiteVanilla_Farmhash" ${farmhash_url})
+ set(TFLiteVanillaFarmhashSource_DIR "${TFLiteVanilla_Farmhash_SOURCE_DIR}")
+ if (NOT TFLiteVanillaFarmhashSource_DIR STREQUAL "")
+ set(TFLiteVanillaFarmhashSource_FOUND TRUE)
+ endif()
+ return_unless(TFLiteVanillaFarmhashSource_FOUND)
+
+ set(fft2d_url "https://storage.googleapis.com/mirror.tensorflow.org/github.com/petewarden/OouraFFT/archive/v1.0.tar.gz")
+ ExternalSource_Download("TFLiteVanilla_FFT2D" ${fft2d_url})
+ set(TFLiteVanillaFFT2DSource_DIR "${TFLiteVanilla_FFT2D_SOURCE_DIR}")
+ if (NOT TFLiteVanillaFFT2DSource_DIR STREQUAL "")
+ set(TFLiteVanillaFFT2DSource_FOUND TRUE)
+ endif()
+ return_unless(TFLiteVanillaFFT2DSource_FOUND)
+
+ set(flatbuffers_url "https://storage.googleapis.com/mirror.tensorflow.org/github.com/google/flatbuffers/archive/v1.12.0.tar.gz")
+ ExternalSource_Download("TFLiteVanilla_FlatBuffers" ${flatbuffers_url})
+ set(TFLiteVanillaFlatBuffersSource_DIR "${TFLiteVanilla_FlatBuffers_SOURCE_DIR}")
+ if (NOT TFLiteVanillaFlatBuffersSource_DIR STREQUAL "")
+ set(TFLiteVanillaFlatBuffersSource_FOUND TRUE)
+ endif()
+ return_unless(TFLiteVanillaFlatBuffersSource_FOUND)
+
+ set(fp16_url "https://github.com/Maratyszcza/FP16/archive/4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip")
+ ExternalSource_Download("TFLiteVanilla_FP16" ${fp16_url})
+ set(TFLiteVanillaFP16Source_DIR "${TFLiteVanilla_FP16_SOURCE_DIR}")
+ if (NOT TFLiteVanillaFP16Source_DIR STREQUAL "")
+ set(TFLiteVanillaFP16Source_FOUND TRUE)
+ endif()
+ return_unless(TFLiteVanillaFP16Source_FOUND)
+
+ set(gemmlowp_url "https://storage.googleapis.com/mirror.tensorflow.org/github.com/google/gemmlowp/archive/fda83bdc38b118cc6b56753bd540caa49e570745.zip")
+ ExternalSource_Download("TFLiteVanilla_GEMMLowp" ${gemmlowp_url})
+ set(TFLiteVanillaGEMMLowpSource_DIR "${TFLiteVanilla_GEMMLowp_SOURCE_DIR}")
+ if (NOT TFLiteVanillaGEMMLowpSource_DIR STREQUAL "")
+ set(TFLiteVanillaGEMMLowpSource_FOUND TRUE)
+ endif()
+ return_unless(TFLiteVanillaGEMMLowpSource_FOUND)
+
+ set(neon2sse_url "https://github.com/intel/ARM_NEON_2_x86_SSE/archive/1200fe90bb174a6224a525ee60148671a786a71f.tar.gz")
+ ExternalSource_Download("TFLiteVanilla_NEON2SSE" ${neon2sse_url})
+ set(TFLiteVanillaNEON2SSESource_DIR "${TFLiteVanilla_NEON2SSE_SOURCE_DIR}")
+ if (NOT TFLiteVanillaNEON2SSESource_DIR STREQUAL "")
+ set(TFLiteVanillaNEON2SSESource_FOUND TRUE)
+ endif()
+ return_unless(TFLiteVanillaNEON2SSESource_FOUND)
+
+ set(tensorflow_url "https://github.com/tensorflow/tensorflow/archive/v2.3.0.tar.gz")
+ ExternalSource_Download("TFLiteVanilla_TensorFlow" ${tensorflow_url})
+ set(TFLiteVanillaTensorFlowSource_DIR "${TFLiteVanilla_TensorFlow_SOURCE_DIR}")
+ if (NOT TFLiteVanillaTensorFlowSource_DIR STREQUAL "")
+ set(TFLiteVanillaTensorFlowSource_FOUND TRUE)
+ endif()
+ return_unless(TFLiteVanillaTensorFlowSource_FOUND)
+
+ set(ruy_url "https://github.com/google/ruy/archive/34ea9f4993955fa1ff4eb58e504421806b7f2e8f.zip")
+ ExternalSource_Download("TFLiteVanilla_Ruy" ${ruy_url})
+ set(TFLiteVanillaRuySource_DIR "${TFLiteVanilla_Ruy_SOURCE_DIR}")
+ if (NOT TFLiteVanillaRuySource_DIR STREQUAL "")
+ set(TFLiteVanillaRuySource_FOUND TRUE)
+ endif()
+ return_unless(TFLiteVanillaRuySource_FOUND)
+
+ nnfw_find_package(CpuInfo QUIET)
+ if (NOT CpuInfo_FOUND)
+ message(STATUS "TFLiteVanillaRun: CPUINFO not found")
+ set(TensorFlowLite_2_3_0_FOUND FALSE PARENT_SCOPE)
+ return()
+ endif(NOT CpuInfo_FOUND)
+
+ nnas_include(ExternalProjectTools)
+ add_extdirectory("${CMAKE_CURRENT_LIST_DIR}/TensorFlowLite-2.3.0" tflite-2.3.0)
+
+ set(TensorFlowLite_2_3_0_FOUND TRUE)
+ return()
+endif()
diff --git a/infra/nnfw/cmake/packages/TensorFlowSourceConfig.cmake b/infra/nnfw/cmake/packages/TensorFlowSourceConfig.cmake
deleted file mode 100644
index f9fd3af13..000000000
--- a/infra/nnfw/cmake/packages/TensorFlowSourceConfig.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-function(_TensorFlowSource_import)
- if(NOT DOWNLOAD_TENSORFLOW)
- set(TensorFlowSource_FOUND FALSE PARENT_SCOPE)
- return()
- endif(NOT DOWNLOAD_TENSORFLOW)
-
- nnfw_include(ExternalSourceTools)
- nnfw_include(OptionTools)
-
- envoption(EXTERNAL_DOWNLOAD_SERVER "https://github.com")
- set(TENSORFLOW_URL ${EXTERNAL_DOWNLOAD_SERVER}/tensorflow/tensorflow/archive/v1.13.1.tar.gz)
- ExternalSource_Get("tensorflow" ${DOWNLOAD_TENSORFLOW} ${TENSORFLOW_URL})
-
- set(TensorFlowSource_DIR ${tensorflow_SOURCE_DIR} PARENT_SCOPE)
- set(TensorFlowSource_FOUND ${tensorflow_SOURCE_GET} PARENT_SCOPE)
-endfunction(_TensorFlowSource_import)
-
-_TensorFlowSource_import()
diff --git a/infra/nnfw/cmake/packages/TensorflowConfig.cmake b/infra/nnfw/cmake/packages/TensorflowConfig.cmake
index 5df000819..b9c947c5b 100644
--- a/infra/nnfw/cmake/packages/TensorflowConfig.cmake
+++ b/infra/nnfw/cmake/packages/TensorflowConfig.cmake
@@ -1,6 +1,6 @@
function(_Tensorflow_Import)
if(NOT DEFINED TENSORFLOW_DIR)
- set(TENSORFLOW_DIR ${NNFW_EXTERNALS_DIR}/tensorflow)
+ set(TENSORFLOW_DIR ${NNAS_EXTERNALS_DIR}/tensorflow)
endif(NOT DEFINED TENSORFLOW_DIR)
if(NOT DEFINED NSYNC_ARCH)
diff --git a/infra/nnfw/command/build b/infra/nnfw/command/build
index c9fe814c8..b0301d2f4 100644
--- a/infra/nnfw/command/build
+++ b/infra/nnfw/command/build
@@ -2,23 +2,10 @@
import "build.configuration"
-if [[ ! -d "${BUILD_ALIAS}" ]]; then
- echo "'${BUILD_ALIAS}' does not exist. Please run 'configure' first"
+if [[ ! -d "${BUILD_PATH}" ]]; then
+ echo "[ERROR] '${BUILD_PATH}' does not exist. Please run 'configure' first"
exit 255
fi
-# Set parallel build
-# TODO Use argument instead of environment variable
-HOST_OS=${HOST_OS:-linux}
-NPROCS=${NPROCS:-1}
-PARALLEL_BUILD=${PARALLEL_BUILD:-1}
-
-if [ "${PARALLEL_BUILD}" == "1" ]; then
- # Get number of processors (linux only for now)
- if [ "${HOST_OS}" == "linux" ]; then
- NPROCS="$(grep -c ^processor /proc/cpuinfo)"
- fi
-fi
-
-cd ${BUILD_ALIAS}
-make -j ${NPROCS} "$@"
+cd ${BUILD_PATH}
+make "$@"
diff --git a/infra/nnfw/command/configure b/infra/nnfw/command/configure
index 2f47dfedc..98cad7671 100644
--- a/infra/nnfw/command/configure
+++ b/infra/nnfw/command/configure
@@ -2,11 +2,25 @@
import "build.configuration"
-BUILD_PATH="${NNFW_BUILD_DIR:-${BUILD_ALIAS}}"
-INSTALL_PATH="${NNFW_INSTALL_PREFIX:-${INSTALL_ALIAS}}"
+INSTALL_PATH="${NNFW_INSTALL_PREFIX:-${WORKSPACE_PATH}/out}"
+# Normalize to absolute path
+if [[ "${INSTALL_PATH}" != /* ]]; then
+ INSTALL_PATH=${NNFW_PROJECT_PATH}/${INSTALL_PATH}
+fi
# Create "BUILD_PATH"
mkdir -p "${BUILD_PATH}"
+if [ ! -d "${INSTALL_PATH}" ]; then
+ echo "[WARNING] Cannot find install directory '${INSTALL_PATH}'"
+ echo " Try to make install directory"
+ mkdir -p "${INSTALL_PATH}"
+ if [ ! -d "${INSTALL_PATH}" ]; then
+ echo "[ERROR] Fail to make install directory '${INSTALL_PATH}'"
+ echo " Please make '${INSTALL_PATH}' directory first"
+ exit 255
+ fi
+fi
+
cd "${BUILD_PATH}"
cmake "${NNFW_PROJECT_PATH}"/infra/nnfw -DCMAKE_INSTALL_PREFIX="${INSTALL_PATH}" "$@"
diff --git a/infra/nnfw/command/copyright-check b/infra/nnfw/command/copyright-check
index 9401e69d3..b5e133f82 100644
--- a/infra/nnfw/command/copyright-check
+++ b/infra/nnfw/command/copyright-check
@@ -2,52 +2,48 @@
INVALID_EXIT=0
-check_copyright_year() {
- DIRECTORIES_NOT_TO_BE_TESTED=$2
- YEAR=`date +"%Y"`
- CORRECT_COPYRIGHT="Copyright (c) $YEAR Samsung Electronics Co"
- FILE_EXT_TO_SEARCH="\.h$\|\.hpp$\|\.cc$\|\.cpp$\|\.cl$"
-
- # Check newly added files
- #this also includes files, that were moved here from another dir
- NEW_FILES_OF_SUBDIR_TO_CHECK=$(git whatchanged --diff-filter=A --since "01/01/2019"\
- --oneline --name-only --pretty=format: . | sort | uniq\
- | grep $FILE_EXT_TO_SEARCH)
- ARR=($NEW_FILES_OF_SUBDIR_TO_CHECK)
- for s in ${DIRECTORIES_NOT_TO_BE_TESTED[@]}; do
- if [[ $s = $TEST_DIR* ]]; then
- skip=${s#$TEST_DIR/}/
- ARR=(${ARR[*]//$skip*})
- fi
- done
- NEW_FILES_OF_SUBDIR_TO_CHECK=${ARR[*]}
- if [[ ${#NEW_FILES_OF_SUBDIR_TO_CHECK} -ne 0 ]]; then
- for f in $NEW_FILES_OF_SUBDIR_TO_CHECK; do
- [[ -f "$f" ]] || continue
-
- CREATED_YEAR=$(git log --follow --format=%aD $f | tail -1)
- [[ $CREATED_YEAR != *"$YEAR"* ]] && continue
-
- COPYRIGHT_YEAR=$(sed -rn '0,/.*Copyright \(c\) ([^ ]+).*/ s//\1/p' $f)
- if [[ $COPYRIGHT_YEAR != $YEAR ]]; then
- [[ -z "$COPYRIGHT_YEAR" ]] && COPYRIGHT_YEAR="None"
- echo "Copyright year of $f is incorrect: expected $YEAR, found $COPYRIGHT_YEAR"
- INVALID_EXIT=1
- elif ! grep -q "$CORRECT_COPYRIGHT" $f; then
- echo "Copyright format of $f is incorrect: expected $CORRECT_COPYRIGHT"
- INVALID_EXIT=1
- fi
- done
+check_copyright() {
+ DIRECTORIES_NOT_TO_BE_TESTED=$1
+ CORRECT_COPYRIGHT="Copyright \(c\) [0-9\-]+ Samsung Electronics Co\., Ltd\. All Rights Reserved"
+
+ FILES_TO_CHECK=$(git ls-files -c --exclude-standard)
+ FILES_TO_CHECK_COPYRIGHTS=()
+ for f in ${FILES_TO_CHECK[@]}; do
+ # Manually ignore checking
+ if [[ ${f} == +(*/NeuralNetworks.h|*/NeuralNetworksExtensions.h) ]]; then
+ continue
+ fi
+
+ # File extension to check
+ if [[ ${f} == +(*.h|*.hpp|*.cpp|*.cc|*.c|*.cl) ]]; then
+ FILES_TO_CHECK_COPYRIGHTS+=("${f}")
fi
+ done
+
+ for s in ${DIRECTORIES_NOT_TO_BE_TESTED[@]}; do
+ FILES_TO_CHECK_COPYRIGHTS=(${FILES_TO_CHECK_COPYRIGHTS[*]/$s*/})
+ done
+
+ if [[ ${#FILES_TO_CHECK_COPYRIGHTS} -ne 0 ]]; then
+ for f in ${FILES_TO_CHECK_COPYRIGHTS[@]}; do
+ if ! grep -qE "$CORRECT_COPYRIGHT" $f; then
+ CREATED_YEAR=$(git log --follow --format=%aD $f | tail -1 | awk '{print $4}')
+ EXAMPLE_COPYRIGHT="Copyright (c) $CREATED_YEAR Samsung Electronics Co., Ltd. All Rights Reserved"
+ echo "Copyright format of $f is incorrect: recommend \"$EXAMPLE_COPYRIGHT\""
+ INVALID_EXIT=1
+ fi
+ done
+ fi
}
DIRECTORIES_NOT_TO_BE_TESTED=()
-for DIR_NOT_TO_BE_TESTED in $(find -name '.FORMATDENY' -exec dirname {} \;); do
+for DIR_NOT_TO_BE_TESTED in $(git ls-files -co --exclude-standard '*/.FORMATDENY'); do
DIRECTORIES_NOT_TO_BE_TESTED+=("$DIR_NOT_TO_BE_TESTED")
+ DIRECTORIES_NOT_TO_BE_TESTED+=($(dirname "${DIR_NOT_TO_BE_TESTED}"))
done
-check_copyright_year $DIRECTORIES_NOT_TO_BE_TESTED
+check_copyright $DIRECTORIES_NOT_TO_BE_TESTED
if [[ $INVALID_EXIT -ne 0 ]]; then
echo "[FAILED] Invalid copyright check exit."
diff --git a/infra/nnfw/command/count-unittest b/infra/nnfw/command/count-unittest
new file mode 100644
index 000000000..3ce7bbac3
--- /dev/null
+++ b/infra/nnfw/command/count-unittest
@@ -0,0 +1,74 @@
+#!/bin/bash
+#
+# Copyright (c) 2020 Samsung Electronics Co., Ltd. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Usage()
+{
+ echo "Usage: ./$0 [--install-path=Product/out]"
+}
+
+INSTALL_PATH=Product/out
+
+for i in "$@"
+do
+ case $i in
+ -h|--help|help)
+ Usage
+ exit 1
+ ;;
+ --install-path=*)
+ INSTALL_PATH=${i#*=}
+ ;;
+ *)
+ Usage
+ exit 1
+ ;;
+ esac
+ shift
+done
+
+# List of gtest binaries
+GTEST_BINS=(
+ unittest_standalone/nnfw_api_gtest
+ unittest_standalone/test_compute
+ unittest_standalone/test_onert
+ unittest_standalone/test_onert_backend_cpu_common
+ unittest_standalone/test_onert_frontend_nnapi
+ unittest_standalone/tflite_test
+)
+
+# Collect test cases
+echo "Install Path : '$INSTALL_PATH'"
+TEST_LIST=
+for GTEST_BIN in ${GTEST_BINS[@]}; do
+ GTEST_PATH=$INSTALL_PATH/$GTEST_BIN
+ echo "Collecting test cases from '$GTEST_PATH'"
+ TESTS=$($GTEST_PATH --gtest_list_tests | grep '^ ')
+ if [ "$?" -ne 0 ]; then
+ echo "Error collecting test cases from '$GTEST_PATH'"
+ exit 1;
+ fi
+ TEST_LIST=$TEST_LIST$TESTS
+done
+
+# Count stats
+TOTAL_TCS=$(echo "$TEST_LIST" | wc -l)
+TOTAL_NEG_TCS=$(echo "$TEST_LIST" | grep '^ neg_' | wc -l)
+TOTAL_POS_TCS=$(echo "$TEST_LIST" | grep '^ neg_' -v | wc -l)
+
+# Report stats
+printf "TOTAL NUMBER OF TEST CASES : %5d\n" $TOTAL_TCS
+printf "TOTAL NUMBER OF POSTIVE TEST CASES : %5d\n" $TOTAL_POS_TCS
+printf "TOTAL NUMBER OF NEGATIVE TEST CASES : %5d\n" $TOTAL_NEG_TCS
diff --git a/infra/nnfw/command/doxygen b/infra/nnfw/command/doxygen
deleted file mode 100644
index f455934e4..000000000
--- a/infra/nnfw/command/doxygen
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-pushd ${NNFW_PROJECT_PATH} > /dev/null
-
-doxygen infra/nnfw/doxygen/Doxyfile
-
-popd > /dev/null
diff --git a/infra/nnfw/command/gen-coverage-report b/infra/nnfw/command/gen-coverage-report
deleted file mode 100644
index 8fd398db3..000000000
--- a/infra/nnfw/command/gen-coverage-report
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/bin/bash
-
-# This file is based on https://github.sec.samsung.net/STAR/nncc/pull/80
-
-LCOV_PATH=$(command -v lcov)
-GENHTML_PATH=$(command -v genhtml)
-
-SRC_PREFIX=${SRC_PREFIX:-${NNFW_PROJECT_PATH}}
-
-if [[ -z "${LCOV_PATH}" ]]; then
- echo "ERROR: 'lcov' is not found"
- exit 255
-fi
-
-if [[ -z "${GENHTML_PATH}" ]]; then
- echo "ERROR: 'genhtml' is not found"
- exit 255
-fi
-
-if [[ -z "${GCOV_PATH}" ]]; then
- GCOV_PATH=$(command -v gcov)
- if [[ -z "${GCOV_PATH}" ]]; then
- echo "ERROR: 'gcov' is not found"
- exit 255
- fi
-fi
-
-OUTPUT_PATH="$1"
-
-if [[ -z "${OUTPUT_PATH}" ]]; then
- OUTPUT_PATH="$NNFW_PROJECT_PATH/coverage"
-fi
-
-if [[ -e "${OUTPUT_PATH}" ]]; then
- echo "ERROR: '${OUTPUT_PATH}' already exists"
- exit 255
-fi
-
-mkdir -p "${OUTPUT_PATH}"
-
-RAW_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.raw.info"
-LIBS_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.libs.info"
-INCLUDE_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.include.info"
-RUNTIMES_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.runtimes.info"
-TOOLS_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.tools.info"
-FINAL_COVERAGE_INFO_PATH="${OUTPUT_PATH}/coverage.info"
-HTML_PATH="${OUTPUT_PATH}/html"
-COVERTURA_PATH="${OUTPUT_PATH}/nnfw_coverage.xml"
-
-"${LCOV_PATH}" -c -d "${NNFW_PROJECT_PATH}" --gcov-tool ${GCOV_PATH} -o "${RAW_COVERAGE_INFO_PATH}"
-#"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${LIBS_COVERAGE_INFO_PATH}" "${SRC_PREFIX}/libs/*"
-#"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${INCLUDE_COVERAGE_INFO_PATH}" "${SRC_PREFIX}/include/*"
-"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${RUNTIMES_COVERAGE_INFO_PATH}" "${SRC_PREFIX}/runtimes/*"
-"${LCOV_PATH}" -e "${RAW_COVERAGE_INFO_PATH}" -o "${TOOLS_COVERAGE_INFO_PATH}" "${SRC_PREFIX}/tests/tools/*"
-#"${LCOV_PATH}" -a "${LIBS_COVERAGE_INFO_PATH}" -a "${INCLUDE_COVERAGE_INFO_PATH}" \
-# -a "${RUNTIMES_COVERAGE_INFO_PATH}" -a "${TOOLS_COVERAGE_INFO_PATH}" \
-# -o "${FINAL_COVERAGE_INFO_PATH}"
-"${LCOV_PATH}" -a "${RUNTIMES_COVERAGE_INFO_PATH}" -a "${TOOLS_COVERAGE_INFO_PATH}" -o "${FINAL_COVERAGE_INFO_PATH}"
-"${GENHTML_PATH}" "${FINAL_COVERAGE_INFO_PATH}" --output-directory "${HTML_PATH}" ${GENHTML_FLAG:-}
diff --git a/infra/nnfw/command/install b/infra/nnfw/command/install
index 2bacb876b..3bbb92e0f 100644
--- a/infra/nnfw/command/install
+++ b/infra/nnfw/command/install
@@ -2,15 +2,10 @@
import "build.configuration"
-if [[ ! -d "${BUILD_ALIAS}" ]]; then
- echo "'${BUILD_ALIAS}' does not exist. Please run 'configure' first"
+if [[ ! -d "${BUILD_PATH}" ]]; then
+ echo "'${BUILD_PATH}' does not exist. Please run 'configure' first"
exit 255
fi
-if [[ ! -d "${INSTALL_ALIAS}" ]]; then
- echo "'${INSTALL_ALIAS}' does not exist. Please run 'configure' first"
- exit 255
-fi
-
-cd ${BUILD_ALIAS}
+cd ${BUILD_PATH}
make install
diff --git a/infra/nnfw/config/build.configuration b/infra/nnfw/config/build.configuration
index cdbf3bf4b..5e1a8deb1 100644
--- a/infra/nnfw/config/build.configuration
+++ b/infra/nnfw/config/build.configuration
@@ -1,5 +1,6 @@
-WORKSPACE_RPATH=${NNFW_WORKSPACE:-Product}
-
-# Soft link path to build and install directory
-BUILD_ALIAS=${NNFW_PROJECT_PATH}/${WORKSPACE_RPATH}/obj
-INSTALL_ALIAS=${NNFW_PROJECT_PATH}/${WORKSPACE_RPATH}/out
+WORKSPACE_PATH=${NNFW_WORKSPACE:-${NNFW_PROJECT_PATH}/Product}
+# Normalize to absolute path
+if [[ "${WORKSPACE_PATH}" != /* ]]; then
+ WORKSPACE_PATH=${NNFW_PROJECT_PATH}/${WORKSPACE_PATH}
+fi
+BUILD_PATH=${WORKSPACE_PATH}/${NNFW_BUILD_RPATH:-obj}
diff --git a/infra/nnfw/config/docker.configuration b/infra/nnfw/config/docker.configuration
index b7e9ad30f..c61ab0ff2 100644
--- a/infra/nnfw/config/docker.configuration
+++ b/infra/nnfw/config/docker.configuration
@@ -1,6 +1,6 @@
#!/bin/bash
-DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnas}
+DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME:-nnfw/one-devtools}
echo "Using docker image ${DOCKER_IMAGE_NAME}"
if [ -z "`docker images ${DOCKER_IMAGE_NAME}`" ]; then
@@ -23,7 +23,9 @@ DOCKER_ENV_VARS+=" -e http_proxy"
DOCKER_ENV_VARS+=" -e no_proxy"
DOCKER_ENV_VARS+=" -e GIT_SSL_NO_VERIFY"
DOCKER_ENV_VARS+=" -e EXTERNAL_DOWNLOAD_SERVER"
-DOCKER_ENV_VARS+=" -e GENERATE_NNAPI_TESTS"
+DOCKER_ENV_VARS+=" -e NNFW_WORKSPACE"
+DOCKER_ENV_VARS+=" -e EXT_ACL_FOLDER"
+DOCKER_ENV_VARS+=" -e NDK_DIR"
DOCKER_RUN_OPTS="${DOCKER_OPTS}"
DOCKER_RUN_OPTS+=" --rm"
diff --git a/infra/nnfw/config/gbs.conf b/infra/nnfw/config/gbs.conf
index 515cadaba..bad9eb204 100644
--- a/infra/nnfw/config/gbs.conf
+++ b/infra/nnfw/config/gbs.conf
@@ -5,7 +5,7 @@ profile = profile.tizen
[profile.tizen]
user=obs_viewer
obs = obs.tizen
-repos = repo.tizen_base,repo.tizen_mobile
+repos = repo.tizen_one,repo.tizen_base,repo.tizen_mobile
buildroot = /home/GBS-ROOT/
[obs.tizen]
@@ -15,6 +15,8 @@ url = http://api.tizen.org
url = http://download.tizen.org/snapshots/tizen/unified/latest/repos/standard/packages/
[repo.tizen_base]
-url = http://download.tizen.org/snapshots/tizen/base/latest/repos/standard/packages/
+url = http://download.tizen.org/snapshots/tizen/base/latest/repos/standard/packages/
+[repo.tizen_one]
+url = http://nnfw.mooo.com/archive/tizen/
diff --git a/infra/packaging/build b/infra/packaging/build
new file mode 100644
index 000000000..e941a724b
--- /dev/null
+++ b/infra/packaging/build
@@ -0,0 +1,96 @@
+#!/bin/bash
+
+SCRIPT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+
+if [[ -z "${NNAS_PROJECT_PATH}" ]]; then
+ echo "ERROR: NNAS_PROJECT_PATH is not specified"
+ exit 255
+fi
+
+# The default preset
+PRESET="20200630"
+
+EXTRA_OPTIONS=()
+while [ "$#" -ne 0 ]; do
+ CUR="$1"
+
+ case $CUR in
+ '--prefix')
+ NNAS_INSTALL_PREFIX="$2"
+ shift 2
+ ;;
+ '--preset')
+ PRESET="$2"
+ shift 2
+ ;;
+ '--')
+ shift
+ while [ "$#" -ne 0 ]; do
+ EXTRA_OPTIONS+=("$1")
+ shift
+ done
+ ;;
+ *)
+ echo "ERROR: '${CUR}' is invalid"
+ exit 255
+ ;;
+ esac
+done
+
+# Q. Is it better to have the default value for NNAS_INSTALL_PREFIX?
+# TODO Show USAGE
+if [[ -z "${NNAS_INSTALL_PREFIX}" ]]; then
+ echo "ERROR: --prefix is not specified"
+ exit 255
+fi
+
+PRESET_PATH="${SCRIPT_PATH}/preset/${PRESET}"
+
+if [[ ! -f "${PRESET_PATH}" ]]; then
+ echo "ERROR: ${PRESET} is unavailable"
+ # TODO Show available presets
+ exit 255
+fi
+
+echo "-- Use '${PRESET}' SDK preset"
+
+source "${PRESET_PATH}"
+
+# Normalize to absolute path
+if [[ "${NNAS_INSTALL_PREFIX}" != /* ]]; then
+ NNAS_INSTALL_PREFIX=${PWD}/${NNAS_INSTALL_PREFIX}
+fi
+
+if [[ -z "${NNAS_BUILD_PREFIX}" ]]; then
+ # Create a temporary directory and use it!
+ NNAS_BUILD_PREFIX=$(mktemp -d)
+ trap "{ rm -rf $NNAS_BUILD_PREFIX; }" EXIT
+fi
+
+# Create a release directory
+mkdir -p "${NNAS_INSTALL_PREFIX}"
+
+# Build and Install NNCC
+NNCC_BUILD_PREFIX="${NNAS_BUILD_PREFIX}/nncc"
+NNCC_INSTALL_PREFIX="${NNAS_INSTALL_PREFIX}"
+
+mkdir -p "${NNCC_BUILD_PREFIX}"
+cd "${NNCC_BUILD_PREFIX}"
+
+function join_by
+{
+ local IFS="$1"; shift; echo "$*"
+}
+
+# Invoke "preset_configure" function that the preset provides
+preset_configure
+
+NPROC=${NPROC:-$(cat /proc/cpuinfo | grep -c processor)}
+echo "[BUILD] \"make\" with -j${NPROC} option. You can specify the number of jobs by defining NPROC"
+cmake --build . -- -j$((NPROC/2)) all
+cmake --build . -- install
+# Install NN Package tools
+NNPKG_INSTALL_PREFIX="${NNAS_INSTALL_PREFIX}"
+
+# Invoke "preset_install" function that the preset provides
+preset_install
diff --git a/infra/packaging/chklist/LAYOUT_191115 b/infra/packaging/chklist/LAYOUT_191115
new file mode 100644
index 000000000..e041a2c80
--- /dev/null
+++ b/infra/packaging/chklist/LAYOUT_191115
@@ -0,0 +1,47 @@
+#!/bin/bash
+
+# Check whether the package has the following layout:
+#
+# bin/
+# model2nnpkg.sh
+# tf2circle
+# tf2nnpkg
+# tf2tflite
+# tflite2circle.sh
+# tflitejson2circlejson.py
+# lib/
+# libexo.so
+# liblocoex_customop.so
+# libloco.so
+# libmoco_import.so
+# libmoco_lang.so
+# libmoco_log.so
+# libmoco_pass.so
+# libmoco_service.so
+# libmoco_support.so
+# libmoco_tf_frontend.so
+# res/
+# circle_schema.fbs
+# tflite_schema.fbs
+
+function prepare()
+{
+ export QUESTION="Is compatible with the 2019/11/15 layout?"
+}
+
+function run()
+{
+ # The result of running "find . -print | sort | tr -d '\n\0'" from the expected package
+ EXPECTED="."
+ EXPECTED+="./bin./bin/model2nnpkg.sh./bin/tf2circle./bin/tf2nnpkg./bin/tf2tflite./bin/tflite2circle.sh"
+ EXPECTED+="./bin/tflitejson2circlejson.py./lib./lib/libexo.so./lib/liblocoex_customop.so./lib/libloco.so"
+ EXPECTED+="./lib/libmoco_import.so./lib/libmoco_lang.so./lib/libmoco_log.so./lib/libmoco_pass.so"
+ EXPECTED+="./lib/libmoco_service.so./lib/libmoco_support.so./lib/libmoco_tf_frontend.so./res"
+ EXPECTED+="./res/circle_schema.fbs./res/tflite_schema.fbs"
+
+ OBTAINED=$(cd "${NNAS_INSTALL_PREFIX}" && find . -print | sort | tr -d '\n\0')
+
+ if [[ "${OBTAINED}" = "${EXPECTED}" ]]; then
+ export PASSED=1
+ fi
+}
diff --git a/infra/packaging/chklist/LAYOUT_191215 b/infra/packaging/chklist/LAYOUT_191215
new file mode 100644
index 000000000..8095197f6
--- /dev/null
+++ b/infra/packaging/chklist/LAYOUT_191215
@@ -0,0 +1,39 @@
+#!/bin/bash
+
+# Check whether the package has the following layout:
+#
+# bin/
+# model2nnpkg.sh
+# tf2circle
+# tf2nnpkg
+# lib/
+# libexo.so
+# liblocoex_customop.so
+# libloco.so
+# libmoco_import.so
+# libmoco_lang.so
+# libmoco_log.so
+# libmoco_pass.so
+# libmoco_service.so
+# libmoco_support.so
+# libmoco_tf_frontend.so
+
+function prepare()
+{
+ export QUESTION="Is compatible with the 2019/12/15 layout?"
+}
+
+function run()
+{
+ # The result of running "find . -print | sort | tr -d '\n\0'" from the expected package
+ EXPECTED="."
+ EXPECTED+="./bin./bin/model2nnpkg.sh./bin/tf2circle./bin/tf2nnpkg"
+ EXPECTED+="./lib./lib/libexo.so./lib/liblocoex_customop.so"
+ EXPECTED+="./lib/libloco.so./lib/libmoco_import.so./lib/libmoco_lang.so./lib/libmoco_log.so./lib/libmoco_pass.so./lib/libmoco_service.so./lib/libmoco_support.so./lib/libmoco_tf_frontend.so"
+
+ OBTAINED=$(cd "${NNAS_INSTALL_PREFIX}" && find . -print | sort | tr -d '\n\0')
+
+ if [[ "${OBTAINED}" = "${EXPECTED}" ]]; then
+ export PASSED=1
+ fi
+}
diff --git a/infra/packaging/chklist/TF2CIRCLE_EXIST b/infra/packaging/chklist/TF2CIRCLE_EXIST
new file mode 100644
index 000000000..b0834fc27
--- /dev/null
+++ b/infra/packaging/chklist/TF2CIRCLE_EXIST
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+function prepare()
+{
+ export QUESTION="Is there tf2circle?"
+}
+
+function run()
+{
+ if [[ -f "${NNAS_INSTALL_PREFIX}/bin/tf2circle" ]]; then
+ export PASSED=1
+ fi
+}
diff --git a/infra/packaging/chklist/TF2CIRCLE_RUNNABLE b/infra/packaging/chklist/TF2CIRCLE_RUNNABLE
new file mode 100644
index 000000000..597778ff4
--- /dev/null
+++ b/infra/packaging/chklist/TF2CIRCLE_RUNNABLE
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+function prepare()
+{
+ export QUESTION="Is it possible to run tf2circle?"
+}
+
+function run()
+{
+ BIN="${NNAS_INSTALL_PREFIX}/bin/tf2circle"
+ if [[ -f "${BIN}" ]]; then
+ if [[ $(ldd -r "${BIN}" | grep '^undefined' | wc -l) -eq 0 ]]; then
+ export PASSED=1
+ fi
+ fi
+}
diff --git a/infra/packaging/chklist/TF2NNPKG_EXIST b/infra/packaging/chklist/TF2NNPKG_EXIST
new file mode 100644
index 000000000..bbe9a3157
--- /dev/null
+++ b/infra/packaging/chklist/TF2NNPKG_EXIST
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+function prepare()
+{
+ export QUESTION="Is there tf2nnpkg?"
+}
+
+function run()
+{
+ if [[ -f "${NNAS_INSTALL_PREFIX}/bin/tf2nnpkg" ]]; then
+ export PASSED=1
+ fi
+}
diff --git a/infra/packaging/chklist/TF2TFLITE_EXIST b/infra/packaging/chklist/TF2TFLITE_EXIST
new file mode 100644
index 000000000..1a1c65cbc
--- /dev/null
+++ b/infra/packaging/chklist/TF2TFLITE_EXIST
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+function prepare()
+{
+ export QUESTION="Is there tf2tflite?"
+}
+
+function run()
+{
+ if [[ -f "${NNAS_INSTALL_PREFIX}/bin/tf2tflite" ]]; then
+ export PASSED=1
+ fi
+}
diff --git a/infra/packaging/chklist/TF2TFLITE_RUNNABLE b/infra/packaging/chklist/TF2TFLITE_RUNNABLE
new file mode 100644
index 000000000..4c1239c33
--- /dev/null
+++ b/infra/packaging/chklist/TF2TFLITE_RUNNABLE
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+function prepare()
+{
+ export QUESTION="Is it possible to run tf2tflite?"
+}
+
+function run()
+{
+ if [[ $(ldd -r "${NNAS_INSTALL_PREFIX}/bin/tf2tflite" | grep '^undefined' | wc -l) -eq 0 ]]; then
+ export PASSED=1
+ fi
+}
diff --git a/infra/packaging/preset/20191115 b/infra/packaging/preset/20191115
new file mode 100644
index 000000000..e2f6f8c73
--- /dev/null
+++ b/infra/packaging/preset/20191115
@@ -0,0 +1,49 @@
+#!/bin/bash
+
+function preset_configure()
+{
+ REQUIRED_UNITS=()
+ # Common Libraries
+ REQUIRED_UNITS+=("angkor" "bino" "cwrap" "fipe" "pepper-str" "pepper-strcast" "pp" "stdex")
+ REQUIRED_UNITS+=("oops" "pepper-assert")
+ # Hermes Logging Framework
+ REQUIRED_UNITS+=("hermes" "hermes-std")
+ # loco IR and related utilities
+ REQUIRED_UNITS+=("loco" "locop" "locomotiv" "logo-core" "logo")
+ # loco IR extension: Custom Op Support
+ REQUIRED_UNITS+=("locoex-customop")
+ # TensorFlow Libraries
+ REQUIRED_UNITS+=("tfinfo" "plier-tf")
+ # TensorFlow GraphDef I/O
+ REQUIRED_UNITS+=("mio-tf")
+ # TensorFlow Frontend (.pb/.pbtxt -> loco.caninical)
+ REQUIRED_UNITS+=("moco-log" "moco" "moco-tf")
+ # TensorFlow Lite/Circle Backend (loco.canonical -> .tflite, loco.circle -> .circle)
+ REQUIRED_UNITS+=("exo")
+ # Tools
+ REQUIRED_UNITS+=("tf2tflite" "tf2circle")
+
+ # TODO Use "nncc configure" and "nncc build"
+ cmake \
+ -DCMAKE_INSTALL_PREFIX="${NNCC_INSTALL_PREFIX}" \
+ -DCMAKE_BUILD_TYPE=release \
+ -DBUILD_WHITELIST=$(join_by ";" "${REQUIRED_UNITS[@]}") \
+ ${EXTRA_OPTIONS[@]} \
+ "${NNAS_PROJECT_PATH}/infra/nncc"
+}
+
+function preset_install()
+{
+ install -t "${NNPKG_INSTALL_PREFIX}/bin" -D \
+ "${NNAS_PROJECT_PATH}/tools/nnpackage_tool/model2nnpkg/model2nnpkg.sh" \
+ "${NNAS_PROJECT_PATH}/tools/nnpackage_tool/tflite2circle/tflite2circle.sh" \
+ "${NNAS_PROJECT_PATH}/tools/nnpackage_tool/tflite2circle/tflitejson2circlejson.py"
+
+ install -T -m 644 -D \
+ "${SCRIPT_PATH}/res/tflite_schema.fbs" "${NNPKG_INSTALL_PREFIX}/res/tflite_schema.fbs"
+ install -T -m 644 -D \
+ "${NNAS_PROJECT_PATH}/nnpackage/schema/circle_schema.fbs" "${NNPKG_INSTALL_PREFIX}/res/circle_schema.fbs"
+
+ # Install tf2nnpkg
+ install -T -m 755 -D "${SCRIPT_PATH}/res/tf2nnpkg" "${NNAS_INSTALL_PREFIX}/bin/tf2nnpkg"
+}
diff --git a/infra/packaging/preset/20191215 b/infra/packaging/preset/20191215
new file mode 100644
index 000000000..980d729fc
--- /dev/null
+++ b/infra/packaging/preset/20191215
@@ -0,0 +1,42 @@
+#!/bin/bash
+
+function preset_configure()
+{
+ REQUIRED_UNITS=()
+ # Common Libraries
+ REQUIRED_UNITS+=("angkor" "bino" "cwrap" "fipe" "pepper-str" "pepper-strcast" "pp" "stdex")
+ REQUIRED_UNITS+=("oops" "pepper-assert")
+ # Hermes Logging Framework
+ REQUIRED_UNITS+=("hermes" "hermes-std")
+ # loco IR and related utilities
+ REQUIRED_UNITS+=("loco" "locop" "locomotiv" "logo-core" "logo")
+ # loco IR extension: Custom Op Support
+ REQUIRED_UNITS+=("locoex-customop")
+ # TensorFlow Libraries
+ REQUIRED_UNITS+=("tfinfo" "plier-tf")
+ # TensorFlow GraphDef I/O
+ REQUIRED_UNITS+=("mio-tf")
+ # TensorFlow Frontend (.pb/.pbtxt -> loco.caninical)
+ REQUIRED_UNITS+=("moco-log" "moco" "moco-tf")
+ # TensorFlow Lite/Circle Backend (loco.canonical -> .tflite, loco.circle -> .circle)
+ REQUIRED_UNITS+=("exo")
+ # Tools
+ REQUIRED_UNITS+=("tf2circle")
+
+ # TODO Use "nncc configure" and "nncc build"
+ cmake \
+ -DCMAKE_INSTALL_PREFIX="${NNCC_INSTALL_PREFIX}" \
+ -DCMAKE_BUILD_TYPE=release \
+ -DBUILD_WHITELIST=$(join_by ";" "${REQUIRED_UNITS[@]}") \
+ ${EXTRA_OPTIONS[@]} \
+ "${NNAS_PROJECT_PATH}/infra/nncc"
+}
+
+function preset_install()
+{
+ install -t "${NNPKG_INSTALL_PREFIX}/bin" -D \
+ "${NNAS_PROJECT_PATH}/tools/nnpackage_tool/model2nnpkg/model2nnpkg.sh"
+
+ # Install tf2nnpkg
+ install -T -m 755 -D "${SCRIPT_PATH}/res/tf2nnpkg.20191215" "${NNAS_INSTALL_PREFIX}/bin/tf2nnpkg"
+}
diff --git a/infra/packaging/preset/20191231_windows b/infra/packaging/preset/20191231_windows
new file mode 100644
index 000000000..aad64ea72
--- /dev/null
+++ b/infra/packaging/preset/20191231_windows
@@ -0,0 +1,54 @@
+#!/bin/bash
+
+function preset_configure()
+{
+ REQUIRED_UNITS=()
+ # Common Libraries
+ REQUIRED_UNITS+=("angkor" "bino" "cwrap" "fipe" "pepper-str" "pepper-strcast" "pp" "stdex")
+ REQUIRED_UNITS+=("oops" "pepper-assert")
+ # Hermes Logging Framework
+ REQUIRED_UNITS+=("hermes" "hermes-std")
+ # loco IR and related utilities
+ REQUIRED_UNITS+=("loco" "locop" "locomotiv" "logo-core" "logo")
+ # loco IR extension: Custom Op Support
+ REQUIRED_UNITS+=("locoex-customop")
+ # TensorFlow Libraries
+ REQUIRED_UNITS+=("tfinfo" "plier-tf")
+ # TensorFlow GraphDef I/O
+ REQUIRED_UNITS+=("mio-tf")
+ # TensorFlow Frontend (.pb/.pbtxt -> loco.caninical)
+ REQUIRED_UNITS+=("moco-log" "moco" "moco-tf")
+ # TensorFlow Lite/Circle Backend (loco.canonical -> .tflite, loco.circle -> .circle)
+ REQUIRED_UNITS+=("exo")
+ # Tools
+ REQUIRED_UNITS+=("tf2circle")
+
+ NPROC=$(cat /proc/cpuinfo | grep -c processor)
+
+ # TODO Use "nncc configure" and "nncc build"
+ cmake \
+ -G "MSYS Makefiles" \
+ -DUSE_PROTOBUF_LEGACY_IMPORT=ON \
+ -DCMAKE_EXE_LINKER_FLAGS="-Wl,--allow-multiple-definition" \
+ -DCMAKE_SHARED_LINKER_FLAGS="-Wl,--allow-multiple-definition" \
+ -DENABLE_TEST=OFF \
+ -DDOWNLOAD_GTEST=OFF \
+ -DBUILD_GTEST=OFF \
+ -DCMAKE_C_COMPILER=gcc \
+ -DCMAKE_CXX_COMPILER=g++ \
+ -DCMAKE_INSTALL_PREFIX="${NNCC_INSTALL_PREFIX}" \
+ -DCMAKE_BUILD_TYPE=release \
+ -DBUILD_WHITELIST=$(join_by ";" "${REQUIRED_UNITS[@]}") \
+ -DEXTERNALS_BUILD_THREADS=$((NPROC/2)) \
+ ${EXTRA_OPTIONS[@]} \
+ "${NNAS_PROJECT_PATH}/infra/nncc"
+}
+
+function preset_install()
+{
+ install -t "${NNPKG_INSTALL_PREFIX}/bin" -D \
+ "${NNAS_PROJECT_PATH}/tools/nnpackage_tool/model2nnpkg/model2nnpkg.sh"
+
+ # Install tf2nnpkg
+ install -T -m 755 -D "${SCRIPT_PATH}/res/tf2nnpkg.20191215" "${NNAS_INSTALL_PREFIX}/bin/tf2nnpkg"
+}
diff --git a/infra/packaging/preset/20200115_windows b/infra/packaging/preset/20200115_windows
new file mode 100644
index 000000000..f71b7643d
--- /dev/null
+++ b/infra/packaging/preset/20200115_windows
@@ -0,0 +1,53 @@
+#!/bin/bash
+
+function preset_configure()
+{
+ REQUIRED_UNITS=()
+ # Common Libraries
+ REQUIRED_UNITS+=("angkor" "bino" "cwrap" "fipe" "pepper-str" "pepper-strcast" "pp" "stdex")
+ REQUIRED_UNITS+=("oops" "pepper-assert")
+ # Hermes Logging Framework
+ REQUIRED_UNITS+=("hermes" "hermes-std")
+ # loco IR and related utilities
+ REQUIRED_UNITS+=("loco" "locop" "locomotiv" "logo-core" "logo")
+ # loco IR extension: Custom Op Support
+ REQUIRED_UNITS+=("locoex-customop")
+ # TensorFlow Libraries
+ REQUIRED_UNITS+=("tfinfo" "plier-tf")
+ # TensorFlow GraphDef I/O
+ REQUIRED_UNITS+=("mio-tf")
+ # TensorFlow Frontend (.pb/.pbtxt -> loco.caninical)
+ REQUIRED_UNITS+=("moco-log" "moco" "moco-tf")
+ # TensorFlow Lite/Circle Backend (loco.canonical -> .tflite, loco.circle -> .circle)
+ REQUIRED_UNITS+=("exo")
+ # Tools
+ REQUIRED_UNITS+=("tf2nnpkg")
+
+ NPROC=$(cat /proc/cpuinfo | grep -c processor)
+
+ # TODO Use "nncc configure" and "nncc build"
+ cmake \
+ -G "MSYS Makefiles" \
+ -DTF2NNPKG_FOR_WINDOWS=ON \
+ -DUSE_PROTOBUF_LEGACY_IMPORT=ON \
+ -DCMAKE_EXE_LINKER_FLAGS="-Wl,--allow-multiple-definition" \
+ -DCMAKE_SHARED_LINKER_FLAGS="-Wl,--allow-multiple-definition" \
+ -DENABLE_TEST=OFF \
+ -DDOWNLOAD_GTEST=OFF \
+ -DBUILD_GTEST=OFF \
+ -DCMAKE_C_COMPILER=gcc \
+ -DCMAKE_CXX_COMPILER=g++ \
+ -DCMAKE_INSTALL_PREFIX="${NNCC_INSTALL_PREFIX}" \
+ -DCMAKE_BUILD_TYPE=release \
+ -DBUILD_WHITELIST=$(join_by ";" "${REQUIRED_UNITS[@]}") \
+ -DEXTERNALS_BUILD_THREADS=$((NPROC/2)) \
+ ${EXTRA_OPTIONS[@]} \
+ "${NNAS_PROJECT_PATH}/infra/nncc"
+}
+
+function preset_install()
+{
+ # Install libraries to bin/ for Windows release
+ mv ${NNCC_INSTALL_PREFIX}/lib/*.dll ${NNCC_INSTALL_PREFIX}/bin
+ rm -rf ${NNCC_INSTALL_PREFIX}/lib
+}
diff --git a/infra/packaging/preset/20200220 b/infra/packaging/preset/20200220
new file mode 100644
index 000000000..411f3771c
--- /dev/null
+++ b/infra/packaging/preset/20200220
@@ -0,0 +1,44 @@
+#!/bin/bash
+
+PRESET="20200220"
+
+function preset_configure()
+{
+ REQUIRED_UNITS=()
+ # Common Libraries
+ REQUIRED_UNITS+=("angkor" "bino" "cwrap" "fipe" "pepper-str" "pepper-strcast" "pp" "stdex")
+ REQUIRED_UNITS+=("oops" "pepper-assert")
+ # Hermes Logging Framework
+ REQUIRED_UNITS+=("hermes" "hermes-std")
+ # loco IR and related utilities
+ REQUIRED_UNITS+=("loco" "locop" "locomotiv" "logo-core" "logo")
+ # loco IR extension: Custom Op Support
+ REQUIRED_UNITS+=("locoex-customop")
+ # TensorFlow Libraries
+ REQUIRED_UNITS+=("tfinfo" "plier-tf")
+ # TensorFlow GraphDef I/O
+ REQUIRED_UNITS+=("mio-tf")
+ # TensorFlow Frontend (.pb/.pbtxt -> loco.caninical)
+ REQUIRED_UNITS+=("moco-log" "moco" "moco-tf")
+ # TensorFlow Lite/Circle Backend (loco.canonical -> .tflite, loco.circle -> .circle)
+ REQUIRED_UNITS+=("exo")
+ # Tools
+ REQUIRED_UNITS+=("tf2circle")
+
+ # TODO Use "nncc configure" and "nncc build"
+ cmake \
+ -DCMAKE_INSTALL_PREFIX="${NNCC_INSTALL_PREFIX}" \
+ -DCMAKE_BUILD_TYPE=release \
+ -DBUILD_WHITELIST=$(join_by ";" "${REQUIRED_UNITS[@]}") \
+ ${EXTRA_OPTIONS[@]} \
+ "${NNAS_PROJECT_PATH}/infra/nncc"
+}
+
+function preset_install()
+{
+ install -t "${NNPKG_INSTALL_PREFIX}/bin" -D \
+ "${NNAS_PROJECT_PATH}/tools/nnpackage_tool/model2nnpkg/model2nnpkg.sh"
+
+ # Install tf2nnpkg
+ install -T -m 755 -D "${SCRIPT_PATH}/res/tf2nnpkg.${PRESET}" "${NNAS_INSTALL_PREFIX}/bin/tf2nnpkg"
+}
diff --git a/infra/packaging/preset/20200508 b/infra/packaging/preset/20200508
new file mode 100644
index 000000000..15ec3453c
--- /dev/null
+++ b/infra/packaging/preset/20200508
@@ -0,0 +1,54 @@
+#!/bin/bash
+
+# NOTE purpose of this file is static analysis only
+# new official preset will be added when new programs are ready
+
+PRESET="20200508"
+
+function preset_configure()
+{
+ REQUIRED_UNITS=()
+ # Common Libraries
+ REQUIRED_UNITS+=("angkor" "cwrap" "pepper-str" "pepper-strcast" "pp" "stdex")
+ REQUIRED_UNITS+=("oops" "pepper-assert" "foder")
+ REQUIRED_UNITS+=("safemain")
+ # Hermes Logging Framework
+ REQUIRED_UNITS+=("hermes" "hermes-std")
+ # loco IR and related utilities
+ REQUIRED_UNITS+=("loco" "locop" "locomotiv" "logo-core" "logo")
+ # Flatbuffer I/O
+ REQUIRED_UNITS+=("mio-tflite" "mio-circle")
+ # Circle compiler library (.circle -> .circle)
+ REQUIRED_UNITS+=("luci")
+ # Tools
+ REQUIRED_UNITS+=("tflite2circle" "circle2circle" "tflchef" "circlechef")
+ REQUIRED_UNITS+=("tf2tfliteV2" "luci-interpreter" "circle-verify")
+ REQUIRED_UNITS+=("record-minmax")
+
+ # TODO Use "nncc configure" and "nncc build"
+ cmake \
+ -DCMAKE_INSTALL_PREFIX="${NNCC_INSTALL_PREFIX}" \
+ -DCMAKE_BUILD_TYPE=release \
+ -DBUILD_WHITELIST=$(join_by ";" "${REQUIRED_UNITS[@]}") \
+ ${EXTRA_OPTIONS[@]} \
+ "${NNAS_PROJECT_PATH}/infra/nncc"
+}
+
+function preset_install()
+{
+ install -t "${NNPKG_INSTALL_PREFIX}/bin" -D \
+ "${NNAS_PROJECT_PATH}/tools/nnpackage_tool/model2nnpkg/model2nnpkg.sh"
+
+ # Install tf2nnpkg
+ install -T -m 755 -D "${SCRIPT_PATH}/res/tf2nnpkg.${PRESET}" "${NNAS_INSTALL_PREFIX}/bin/tf2nnpkg"
+
+ # Create python virtual enviornment
+ python3 -m venv "${NNAS_INSTALL_PREFIX}/bin/venv"
+
+ # Install tensorflow
+ source "${NNAS_INSTALL_PREFIX}/bin/venv/bin/activate"
+ python -m pip --default-timeout=1000 --trusted-host pypi.org --trusted-host files.pythonhost.org \
+ install -U pip setuptools
+ python -m pip --default-timeout=1000 --trusted-host pypi.org --trusted-host files.pythonhost.org \
+ install tensorflow==2.2.0
+}
diff --git a/infra/packaging/preset/20200616_windows b/infra/packaging/preset/20200616_windows
new file mode 100644
index 000000000..9bf533e96
--- /dev/null
+++ b/infra/packaging/preset/20200616_windows
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+function preset_configure()
+{
+ REQUIRED_UNITS=()
+ # Common Libraries
+ REQUIRED_UNITS+=("angkor" "cwrap" "foder" "oops" "pepper-assert")
+ REQUIRED_UNITS+=("pepper-str" "pepper-strcast" "pp" "safemain" "stdex")
+ # Hermes Logging Framework
+ REQUIRED_UNITS+=("hermes" "hermes-std")
+ # loco IR and related utilities
+ REQUIRED_UNITS+=("loco" "locop" "locomotiv" "logo-core" "logo")
+ # TensorFlow GraphDef I/O
+ REQUIRED_UNITS+=("mio-tflite" "mio-circle")
+ # Circle compiler library (.circle -> .circle)
+ REQUIRED_UNITS+=("luci")
+ # Tools
+ REQUIRED_UNITS+=("circlechef" "circle2circle" "circle-verify")
+ REQUIRED_UNITS+=("luci-interpreter" "record-minmax" "tflchef")
+ REQUIRED_UNITS+=("tflite2circle" "tf2tfliteV2")
+
+ NPROC=$(cat /proc/cpuinfo | grep -c processor)
+
+ # TODO Use "nncc configure" and "nncc build"
+ cmake \
+ -G "MSYS Makefiles" \
+ -DTF2NNPKG_FOR_WINDOWS=ON \
+ -DUSE_PROTOBUF_LEGACY_IMPORT=ON \
+ -DCMAKE_EXE_LINKER_FLAGS="-Wl,--allow-multiple-definition" \
+ -DCMAKE_SHARED_LINKER_FLAGS="-Wl,--allow-multiple-definition" \
+ -DENABLE_TEST=OFF \
+ -DDOWNLOAD_GTEST=OFF \
+ -DBUILD_GTEST=OFF \
+ -DCMAKE_C_COMPILER=gcc \
+ -DCMAKE_CXX_COMPILER=g++ \
+ -DCMAKE_INSTALL_PREFIX="${NNCC_INSTALL_PREFIX}" \
+ -DCMAKE_BUILD_TYPE=release \
+ -DBUILD_WHITELIST=$(join_by ";" "${REQUIRED_UNITS[@]}") \
+ -DEXTERNALS_BUILD_THREADS=$((NPROC/2)) \
+ ${EXTRA_OPTIONS[@]} \
+ "${NNAS_PROJECT_PATH}/infra/nncc"
+}
+
+function preset_install()
+{
+ # Install libraries to bin/ for Windows release
+ mv ${NNCC_INSTALL_PREFIX}/lib/*.dll ${NNCC_INSTALL_PREFIX}/bin
+ rm -rf ${NNCC_INSTALL_PREFIX}/lib
+
+ install -t "${NNPKG_INSTALL_PREFIX}/bin" -D \
+ "${NNAS_PROJECT_PATH}/tools/nnpackage_tool/model2nnpkg/model2nnpkg.sh"
+
+ # Install tf2nnpkg
+ install -T -m 755 -D "${SCRIPT_PATH}/res/tf2nnpkg.${PRESET}" "${NNAS_INSTALL_PREFIX}/bin/tf2nnpkg"
+
+ # Though you have to install tensorflow to run 'tf2tfliteV2',
+ # tensorflow can't be installed in mingw. First, You can install tensorflow
+ # from Window native CMD(run as administrator) with python virtual environment.
+ # And, you must copy it to "${NNAS_INSTALL_PREFIX}/bin/venv"
+}
diff --git a/infra/packaging/preset/20200630 b/infra/packaging/preset/20200630
new file mode 100644
index 000000000..506b9f8db
--- /dev/null
+++ b/infra/packaging/preset/20200630
@@ -0,0 +1,52 @@
+#!/bin/bash
+
+# NOTE purpose of this file is static analysis only
+# new official preset will be added when new programs are ready
+
+PRESET="20200630"
+
+function preset_configure()
+{
+ REQUIRED_UNITS=()
+ # Common Libraries
+ REQUIRED_UNITS+=("angkor" "cwrap" "pepper-str" "pepper-strcast" "pp" "stdex")
+ REQUIRED_UNITS+=("oops" "pepper-assert" "foder")
+ REQUIRED_UNITS+=("souschef")
+ REQUIRED_UNITS+=("safemain")
+ REQUIRED_UNITS+=("arser")
+ REQUIRED_UNITS+=("vconone")
+ # Hermes Logging Framework
+ REQUIRED_UNITS+=("hermes" "hermes-std")
+ # loco IR and related utilities
+ REQUIRED_UNITS+=("loco" "locop" "locomotiv" "logo-core" "logo")
+ # Flatbuffer I/O
+ REQUIRED_UNITS+=("mio-tflite" "mio-circle")
+ # Circle compiler library (.circle -> .circle)
+ REQUIRED_UNITS+=("luci")
+ # Tools
+ REQUIRED_UNITS+=("tflite2circle" "circle2circle" "tflchef" "circlechef")
+ REQUIRED_UNITS+=("tf2tfliteV2" "luci-interpreter" "circle-verify")
+ REQUIRED_UNITS+=("record-minmax" "circle-quantizer" "rawdata2hdf5")
+ REQUIRED_UNITS+=("one-cmds")
+ REQUIRED_UNITS+=("bcq-tools")
+
+ NPROC=${NPROC:-$(cat /proc/cpuinfo | grep -c processor)}
+
+ # TODO Use "nncc configure" and "nncc build"
+ cmake \
+ -DCMAKE_INSTALL_PREFIX="${NNCC_INSTALL_PREFIX}" \
+ -DCMAKE_BUILD_TYPE=release \
+ -DBUILD_WHITELIST=$(join_by ";" "${REQUIRED_UNITS[@]}") \
+ -DEXTERNALS_BUILD_THREADS=$((NPROC/2)) \
+ ${EXTRA_OPTIONS[@]} \
+ "${NNAS_PROJECT_PATH}/infra/nncc"
+}
+
+function preset_install()
+{
+ install -t "${NNPKG_INSTALL_PREFIX}/bin" -D \
+ "${NNAS_PROJECT_PATH}/tools/nnpackage_tool/model2nnpkg/model2nnpkg.sh"
+
+ # Install tf2nnpkg
+ install -T -m 755 -D "${SCRIPT_PATH}/res/tf2nnpkg.${PRESET}" "${NNAS_INSTALL_PREFIX}/bin/tf2nnpkg"
+}
diff --git a/infra/packaging/preset/20200731_windows b/infra/packaging/preset/20200731_windows
new file mode 100644
index 000000000..763487a47
--- /dev/null
+++ b/infra/packaging/preset/20200731_windows
@@ -0,0 +1,65 @@
+#!/bin/bash
+
+function preset_configure()
+{
+ REQUIRED_UNITS=()
+ # Common Libraries
+ REQUIRED_UNITS+=("angkor" "cwrap" "pepper-str" "pepper-strcast" "pp" "stdex")
+ REQUIRED_UNITS+=("oops" "pepper-assert" "foder")
+ REQUIRED_UNITS+=("souschef")
+ REQUIRED_UNITS+=("safemain")
+ REQUIRED_UNITS+=("arser")
+ REQUIRED_UNITS+=("vconone")
+ # Hermes Logging Framework
+ REQUIRED_UNITS+=("hermes" "hermes-std")
+ # loco IR and related utilities
+ REQUIRED_UNITS+=("loco" "locop" "locomotiv" "logo-core" "logo")
+ # Flatbuffer I/O
+ REQUIRED_UNITS+=("mio-tflite" "mio-circle")
+ # Circle compiler library (.circle -> .circle)
+ REQUIRED_UNITS+=("luci")
+ # Tools
+ REQUIRED_UNITS+=("tflite2circle" "circle2circle" "tflchef" "circlechef")
+ REQUIRED_UNITS+=("tf2tfliteV2" "luci-interpreter" "circle-verify")
+ REQUIRED_UNITS+=("record-minmax" "circle-quantizer" "rawdata2hdf5")
+ REQUIRED_UNITS+=("one-cmds")
+ REQUIRED_UNITS+=("bcq-tools")
+
+ NPROC=$(cat /proc/cpuinfo | grep -c processor)
+
+ # TODO Use "nncc configure" and "nncc build"
+ cmake \
+ -G "MSYS Makefiles" \
+ -DUSE_PROTOBUF_LEGACY_IMPORT=ON \
+ -DCMAKE_EXE_LINKER_FLAGS="-Wl,--allow-multiple-definition" \
+ -DCMAKE_SHARED_LINKER_FLAGS="-Wl,--allow-multiple-definition" \
+ -DENABLE_TEST=OFF \
+ -DDOWNLOAD_GTEST=OFF \
+ -DBUILD_GTEST=OFF \
+ -DCMAKE_C_COMPILER=gcc \
+ -DCMAKE_CXX_COMPILER=g++ \
+ -DCMAKE_INSTALL_PREFIX="${NNCC_INSTALL_PREFIX}" \
+ -DCMAKE_BUILD_TYPE=release \
+ -DBUILD_WHITELIST=$(join_by ";" "${REQUIRED_UNITS[@]}") \
+ -DEXTERNALS_BUILD_THREADS=$((NPROC/2)) \
+ ${EXTRA_OPTIONS[@]} \
+ "${NNAS_PROJECT_PATH}/infra/nncc"
+}
+
+function preset_install()
+{
+ # Install libraries to bin/ for Windows release
+ mv ${NNCC_INSTALL_PREFIX}/lib/*.dll ${NNCC_INSTALL_PREFIX}/bin
+ rm -rf ${NNCC_INSTALL_PREFIX}/lib
+
+ install -t "${NNPKG_INSTALL_PREFIX}/bin" -D \
+ "${NNAS_PROJECT_PATH}/tools/nnpackage_tool/model2nnpkg/model2nnpkg.sh"
+
+ # Install tf2nnpkg
+ install -T -m 755 -D "${SCRIPT_PATH}/res/tf2nnpkg.20200630" "${NNAS_INSTALL_PREFIX}/bin/tf2nnpkg"
+
+ # Though you have to install tensorflow to run 'tf2tfliteV2',
+ # tensorflow can't be installed in mingw. First, You can install tensorflow
+ # from Window native CMD(run as administrator) with python virtual environment.
+ # And, you must copy it to "${NNAS_INSTALL_PREFIX}/bin/venv"
+}
diff --git a/infra/packaging/res/tf2nnpkg b/infra/packaging/res/tf2nnpkg
new file mode 100644
index 000000000..c300eb856
--- /dev/null
+++ b/infra/packaging/res/tf2nnpkg
@@ -0,0 +1,87 @@
+#!/bin/bash
+
+set -e
+
+ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
+
+command_exists() {
+ if [ "$#" -le 0 ]; then
+ return 1
+ fi
+ command -v "$@" > /dev/null 2>&1
+}
+
+usage()
+{
+ echo "Usage: tf2nnpkg --info <path/to/info> --graphdef <path/to/pb> -o <path/to/nnpkg/directory>"
+ exit 0
+}
+
+USE_TF2CIRCLE=0
+
+# Parse command-line arguments
+#
+while [ "$#" -ne 0 ]; do
+ CUR="$1"
+
+ case $CUR in
+ '--help')
+ usage
+ ;;
+ '--info')
+ export INFO_FILE="$2"
+ shift 2
+ ;;
+ '--graphdef')
+ export GRAPHDEF_FILE="$2"
+ shift 2
+ ;;
+ '-o')
+ export OUTPUT_DIR="$2"
+ shift 2
+ ;;
+ '--use-tf2circle')
+ USE_TF2CIRCLE=1
+ shift 1
+ ;;
+ *)
+ echo "${CUR}"
+ shift
+ ;;
+ esac
+done
+
+if [ -z ${GRAPHDEF_FILE} ] || [ ! -e ${GRAPHDEF_FILE} ]; then
+ echo "pb is not found. Please check --graphdef is correct."
+ exit 2
+fi
+
+if [ -z ${INFO_FILE} ] || [ ! -e ${INFO_FILE} ]; then
+ echo "info is not found. Please check --info is correct."
+ exit 2
+fi
+
+FILE_BASE=$(basename ${GRAPHDEF_FILE})
+MODEL_NAME="${FILE_BASE%.*}"
+
+if [[ ${USE_TF2CIRCLE} -eq 0 ]]; then
+ export flatc=$(which flatc)
+ export tflite_schema="${ROOT}/res/tflite_schema.fbs"
+ export circle_schema="${ROOT}/res/circle_schema.fbs"
+
+ if ! command_exists $flatc; then
+ echo "Please make sure flatc is in path"
+ exit 2
+ fi
+fi
+
+TMPDIR=$(mktemp -d)
+trap "{ rm -rf $TMPDIR; }" EXIT
+
+if [[ ${USE_TF2CIRCLE} -eq 0 ]]; then
+ "${ROOT}/bin/tf2tflite" "${INFO_FILE}" "${GRAPHDEF_FILE}" "${TMPDIR}/${MODEL_NAME}.tflite"
+ "${ROOT}/bin/tflite2circle.sh" -o "${TMPDIR}" "${TMPDIR}/${MODEL_NAME}.tflite"
+else
+ "${ROOT}/bin/tf2circle" "${INFO_FILE}" "${GRAPHDEF_FILE}" "${TMPDIR}/${MODEL_NAME}.circle"
+fi
+"${ROOT}/bin/model2nnpkg.sh" -o "${OUTPUT_DIR}" "${TMPDIR}/${MODEL_NAME}.circle"
diff --git a/infra/packaging/res/tf2nnpkg.20191215 b/infra/packaging/res/tf2nnpkg.20191215
new file mode 100644
index 000000000..d334a908d
--- /dev/null
+++ b/infra/packaging/res/tf2nnpkg.20191215
@@ -0,0 +1,68 @@
+#!/bin/bash
+
+set -e
+
+ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
+
+command_exists() {
+ if [ "$#" -le 0 ]; then
+ return 1
+ fi
+ command -v "$@" > /dev/null 2>&1
+}
+
+usage()
+{
+ echo "Usage: tf2nnpkg --info <path/to/info> --graphdef <path/to/pb> -o <path/to/nnpkg/directory>"
+ exit 0
+}
+
+# Parse command-line arguments
+#
+while [ "$#" -ne 0 ]; do
+ CUR="$1"
+
+ case $CUR in
+ '--help')
+ usage
+ ;;
+ '--info')
+ export INFO_FILE="$2"
+ shift 2
+ ;;
+ '--graphdef')
+ export GRAPHDEF_FILE="$2"
+ shift 2
+ ;;
+ '-o')
+ export OUTPUT_DIR="$2"
+ shift 2
+ ;;
+ '--use-tf2circle')
+ echo "WARNING! --use-tf2circle is deprecated"
+ shift 1
+ ;;
+ *)
+ echo "${CUR}"
+ shift
+ ;;
+ esac
+done
+
+if [ -z ${GRAPHDEF_FILE} ] || [ ! -e ${GRAPHDEF_FILE} ]; then
+ echo "pb is not found. Please check --graphdef is correct."
+ exit 2
+fi
+
+if [ -z ${INFO_FILE} ] || [ ! -e ${INFO_FILE} ]; then
+ echo "info is not found. Please check --info is correct."
+ exit 2
+fi
+
+FILE_BASE=$(basename ${GRAPHDEF_FILE})
+MODEL_NAME="${FILE_BASE%.*}"
+TMPDIR=$(mktemp -d)
+trap "{ rm -rf $TMPDIR; }" EXIT
+
+"${ROOT}/bin/tf2circle" "${INFO_FILE}" "${GRAPHDEF_FILE}" "${TMPDIR}/${MODEL_NAME}.circle"
+"${ROOT}/bin/model2nnpkg.sh" -o "${OUTPUT_DIR}" "${TMPDIR}/${MODEL_NAME}.circle"
diff --git a/infra/packaging/res/tf2nnpkg.20200220 b/infra/packaging/res/tf2nnpkg.20200220
new file mode 100644
index 000000000..0875bad2f
--- /dev/null
+++ b/infra/packaging/res/tf2nnpkg.20200220
@@ -0,0 +1,89 @@
+#!/bin/bash
+
+set -e
+
+ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
+
+command_exists() {
+ if [ "$#" -le 0 ]; then
+ return 1
+ fi
+ command -v "$@" > /dev/null 2>&1
+}
+
+usage()
+{
+ echo "Convert TensorFlow model to nnpackage."
+ echo "Usage: tf2nnpkg --info <path/to/info> --graphdef <path/to/pb> [OPTION] -o <path/to/nnpkg/directory>"
+ echo "option:"
+ echo " --customop <path/to/customop.conf>"
+ exit 0
+}
+
+# Parse command-line arguments
+#
+while [ "$#" -ne 0 ]; do
+ CUR="$1"
+
+ case $CUR in
+ '--help')
+ usage
+ ;;
+ '--info')
+ export INFO_FILE="$2"
+ shift 2
+ ;;
+ '--graphdef')
+ export GRAPHDEF_FILE="$2"
+ shift 2
+ ;;
+ '--customop')
+ export CUSTOMOP_CONF_FILE="$2"
+ shift 2
+ ;;
+ '-o')
+ export OUTPUT_DIR="$2"
+ shift 2
+ ;;
+ '--use-tf2circle')
+ echo "WARNING! --use-tf2circle is deprecated"
+ shift 1
+ ;;
+ *)
+ echo "${CUR}"
+ shift
+ ;;
+ esac
+done
+
+if [ -z ${GRAPHDEF_FILE} ] || [ ! -e ${GRAPHDEF_FILE} ]; then
+ echo "pb is not found. Please check --graphdef is correct."
+ exit 2
+fi
+
+if [ -z ${INFO_FILE} ] || [ ! -e ${INFO_FILE} ]; then
+ echo "info is not found. Please check --info is correct."
+ exit 2
+fi
+
+# optional param
+if [ ${CUSTOMOP_CONF_FILE} ]; then
+ if [ ! -e ${CUSTOMOP_CONF_FILE} ]; then
+ echo "customop.conf is not found. Please check --customop is correct."
+ exit 2
+ fi
+fi
+
+FILE_BASE=$(basename ${GRAPHDEF_FILE})
+MODEL_NAME="${FILE_BASE%.*}"
+TMPDIR=$(mktemp -d)
+trap "{ rm -rf $TMPDIR; }" EXIT
+
+if [ ${CUSTOMOP_CONF_FILE} ]; then
+ "${ROOT}/bin/tf2circle" "${INFO_FILE}" "${GRAPHDEF_FILE}" "${TMPDIR}/${MODEL_NAME}.circle" \
+ "--customop" "${CUSTOMOP_CONF_FILE}"
+else
+ "${ROOT}/bin/tf2circle" "${INFO_FILE}" "${GRAPHDEF_FILE}" "${TMPDIR}/${MODEL_NAME}.circle"
+fi
+
+"${ROOT}/bin/model2nnpkg.sh" -o "${OUTPUT_DIR}" "${TMPDIR}/${MODEL_NAME}.circle"
diff --git a/infra/packaging/res/tf2nnpkg.20200508 b/infra/packaging/res/tf2nnpkg.20200508
new file mode 100644
index 000000000..a9923b4a4
--- /dev/null
+++ b/infra/packaging/res/tf2nnpkg.20200508
@@ -0,0 +1,97 @@
+#!/bin/bash
+
+set -e
+
+ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
+
+command_exists() {
+ if [ "$#" -le 0 ]; then
+ return 1
+ fi
+ command -v "$@" > /dev/null 2>&1
+}
+
+usage()
+{
+ echo "Convert TensorFlow model to nnpackage."
+ echo "Usage: tf2nnpkg --info <path/to/info> --graphdef <path/to/pb> [OPTION] -o <path/to/nnpkg/directory>"
+ echo "option:"
+ echo " --customop <path/to/customop.conf>"
+ exit 0
+}
+
+# Parse command-line arguments
+#
+while [ "$#" -ne 0 ]; do
+ CUR="$1"
+
+ case $CUR in
+ '--help')
+ usage
+ ;;
+ '--info')
+ export INFO_FILE="$2"
+ shift 2
+ ;;
+ '--graphdef')
+ export GRAPHDEF_FILE="$2"
+ shift 2
+ ;;
+ '--customop')
+ export CUSTOMOP_CONF_FILE="$2"
+ shift 2
+ ;;
+ '-o')
+ export OUTPUT_DIR="$2"
+ shift 2
+ ;;
+ *)
+ echo "${CUR}"
+ shift
+ ;;
+ esac
+done
+
+if [ -z ${GRAPHDEF_FILE} ] || [ ! -e ${GRAPHDEF_FILE} ]; then
+ echo "pb is not found. Please check --graphdef is correct."
+ exit 2
+fi
+
+if [ -z ${INFO_FILE} ] || [ ! -e ${INFO_FILE} ]; then
+ echo "info is not found. Please check --info is correct."
+ exit 2
+fi
+
+# optional param
+if [ ${CUSTOMOP_CONF_FILE} ]; then
+ if [ ! -e ${CUSTOMOP_CONF_FILE} ]; then
+ echo "customop.conf is not found. Please check --customop is correct."
+ exit 2
+ fi
+fi
+
+FILE_BASE=$(basename ${GRAPHDEF_FILE})
+MODEL_NAME="${FILE_BASE%.*}"
+TMPDIR=$(mktemp -d)
+trap "{ rm -rf $TMPDIR; }" EXIT
+
+if [ ${CUSTOMOP_CONF_FILE} ]; then
+ echo "This option is supported by previous version."
+ exit 2
+fi
+
+# activate python virtual environment
+source "${ROOT}/bin/venv/bin/activate"
+
+# parse inputs, outputs from info file
+INPUT=$(awk -F, '/^input/ { print $2 }' ${INFO_FILE} | cut -d: -f1 | tr -d ' ' | paste -d, -s)
+OUTPUT=$(awk -F, '/^output/ { print $2 }' ${INFO_FILE} | cut -d: -f1 | tr -d ' ' | paste -d, -s)
+
+# generate tflite file
+python "${ROOT}/bin/tf2tfliteV2.py" --v1 --input_path ${GRAPHDEF_FILE} \
+--output_path "${TMPDIR}/${MODEL_NAME}.tflite" \
+--input_arrays ${INPUT} --output_arrays ${OUTPUT}
+
+"${ROOT}/bin/tflite2circle" "${TMPDIR}/${MODEL_NAME}.tflite" "${TMPDIR}/${MODEL_NAME}.circle"
+
+"${ROOT}/bin/model2nnpkg.sh" -o "${OUTPUT_DIR}" "${TMPDIR}/${MODEL_NAME}.circle"
diff --git a/infra/packaging/res/tf2nnpkg.20200616 b/infra/packaging/res/tf2nnpkg.20200616
new file mode 100644
index 000000000..9f0957115
--- /dev/null
+++ b/infra/packaging/res/tf2nnpkg.20200616
@@ -0,0 +1,89 @@
+#!/bin/bash
+
+set -e
+
+ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
+
+command_exists() {
+ if [ "$#" -le 0 ]; then
+ return 1
+ fi
+ command -v "$@" > /dev/null 2>&1
+}
+
+usage()
+{
+ echo "Convert TensorFlow model to nnpackage."
+ echo "Usage: tf2nnpkg --info <path/to/info> --graphdef <path/to/pb> [OPTION] -o <path/to/nnpkg/directory>"
+ exit 0
+}
+
+# Parse command-line arguments
+#
+while [ "$#" -ne 0 ]; do
+ CUR="$1"
+
+ case $CUR in
+ '--help')
+ usage
+ ;;
+ '--info')
+ export INFO_FILE="$2"
+ shift 2
+ ;;
+ '--graphdef')
+ export GRAPHDEF_FILE="$2"
+ shift 2
+ ;;
+ '-o')
+ export OUTPUT_DIR="$2"
+ shift 2
+ ;;
+ *)
+ echo "${CUR}"
+ shift
+ ;;
+ esac
+done
+
+if [ -z ${GRAPHDEF_FILE} ] || [ ! -e ${GRAPHDEF_FILE} ]; then
+ echo "pb is not found. Please check --graphdef is correct."
+ exit 2
+fi
+
+if [ -z ${INFO_FILE} ] || [ ! -e ${INFO_FILE} ]; then
+ echo "info is not found. Please check --info is correct."
+ exit 2
+fi
+
+FILE_BASE=$(basename ${GRAPHDEF_FILE})
+MODEL_NAME="${FILE_BASE%.*}"
+TMPDIR=$(mktemp -d)
+trap "{ rm -rf $TMPDIR; }" EXIT
+
+# activate python virtual environment
+VIRTUALENV_LINUX="${ROOT}/bin/venv/bin/activate"
+VIRTUALENV_WINDOWS="${ROOT}/bin/venv/Scripts/activate"
+
+if [ -e ${VIRTUALENV_LINUX} ]; then
+ source ${VIRTUALENV_LINUX}
+elif [ -e ${VIRTUALENV_WINDOWS} ]; then
+ source ${VIRTUALENV_WINDOWS}
+fi
+
+# parse inputs, outputs from info file
+INPUT=$(awk -F, '/^input/ { print $2 }' ${INFO_FILE} | cut -d: -f1 | tr -d ' ' | paste -d, -s)
+OUTPUT=$(awk -F, '/^output/ { print $2 }' ${INFO_FILE} | cut -d: -f1 | tr -d ' ' | paste -d, -s)
+
+# generate tflite file
+python "${ROOT}/bin/tf2tfliteV2.py" --v1 --input_path ${GRAPHDEF_FILE} \
+--output_path "${TMPDIR}/${MODEL_NAME}.tflite" \
+--input_arrays ${INPUT} --output_arrays ${OUTPUT}
+
+# convert .tflite to .circle
+"${ROOT}/bin/tflite2circle" "${TMPDIR}/${MODEL_NAME}.tflite" "${TMPDIR}/${MODEL_NAME}.circle"
+
+# optimize
+"${ROOT}/bin/circle2circle" "${TMPDIR}/${MODEL_NAME}.circle" "${TMPDIR}/${MODEL_NAME}.circle"
+
+"${ROOT}/bin/model2nnpkg.sh" -o "${OUTPUT_DIR}" "${TMPDIR}/${MODEL_NAME}.circle"
diff --git a/infra/packaging/res/tf2nnpkg.20200630 b/infra/packaging/res/tf2nnpkg.20200630
new file mode 100644
index 000000000..db7053a7b
--- /dev/null
+++ b/infra/packaging/res/tf2nnpkg.20200630
@@ -0,0 +1,130 @@
+#!/bin/bash
+
+set -e
+
+ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
+
+command_exists() {
+ if [ "$#" -le 0 ]; then
+ return 1
+ fi
+ command -v "$@" > /dev/null 2>&1
+}
+
+usage()
+{
+ echo "Convert TensorFlow model to nnpackage."
+ echo "Usage: tf2nnpkg"
+ echo " --info <path/to/info>"
+ echo " --graphdef <path/to/pb>"
+ echo " -o <path/to/nnpkg/directory>"
+ echo " --v2 (optional) Use TF 2.x interface"
+ exit 255
+}
+
+TF_INTERFACE="--v1"
+
+# Parse command-line arguments
+#
+while [ "$#" -ne 0 ]; do
+ CUR="$1"
+
+ case $CUR in
+ '--help')
+ usage
+ ;;
+ '--info')
+ export INFO_FILE="$2"
+ shift 2
+ ;;
+ '--graphdef')
+ export GRAPHDEF_FILE="$2"
+ shift 2
+ ;;
+ '-o')
+ export OUTPUT_DIR="$2"
+ shift 2
+ ;;
+ '--v2')
+ TF_INTERFACE="--v2"
+ shift
+ ;;
+ *)
+ echo "${CUR}"
+ shift
+ ;;
+ esac
+done
+
+if [ -z ${GRAPHDEF_FILE} ] || [ ! -e ${GRAPHDEF_FILE} ]; then
+ echo "pb is not found. Please check --graphdef is correct."
+ exit 2
+fi
+
+if [ -z ${INFO_FILE} ] || [ ! -e ${INFO_FILE} ]; then
+ echo "info is not found. Please check --info is correct."
+ exit 2
+fi
+
+if [ -z ${OUTPUT_DIR} ]; then
+ echo "output directory is not specifed. Please check -o is correct.."
+ exit 2
+fi
+
+FILE_BASE=$(basename ${GRAPHDEF_FILE})
+MODEL_NAME="${FILE_BASE%.*}"
+TMPDIR=$(mktemp -d)
+trap "{ rm -rf $TMPDIR; }" EXIT
+
+# activate python virtual environment
+VIRTUALENV_LINUX="${ROOT}/bin/venv/bin/activate"
+VIRTUALENV_WINDOWS="${ROOT}/bin/venv/Scripts/activate"
+
+if [ -e ${VIRTUALENV_LINUX} ]; then
+ source ${VIRTUALENV_LINUX}
+elif [ -e ${VIRTUALENV_WINDOWS} ]; then
+ source ${VIRTUALENV_WINDOWS}
+fi
+
+# parse inputs, outputs from info file
+INPUT=$(awk -F, '/^input/ { print $2 }' ${INFO_FILE} | cut -d: -f1 | tr -d ' ' | paste -d, -s)
+OUTPUT=$(awk -F, '/^output/ { print $2 }' ${INFO_FILE} | cut -d: -f1 | tr -d ' ' | paste -d, -s)
+
+INPUT_SHAPES=$(grep ^input ${INFO_FILE} | cut -d "[" -f2 | cut -d "]" -f1 | tr -d ' ' | xargs | tr ' ' ':')
+
+# Generate BCQ information metadata
+# If model has no BCQ information or invalid information, pb file is not changed.
+"${ROOT}/bin/generate_bcq_metadata" \
+--input_path "${GRAPHDEF_FILE}" \
+--output_path "${TMPDIR}/${MODEL_NAME}_withmeta.pb" \
+--output_arrays "${OUTPUT}"
+
+# Generate BCQ information nodes as output_arrays
+# If model has no BCQ information, output_arrays would be empty.
+"${ROOT}/bin/generate_bcq_output_arrays" \
+--input_path "${TMPDIR}/${MODEL_NAME}_withmeta.pb" \
+--metadata_path "${TMPDIR}/${MODEL_NAME}_metadata_arrays.txt" \
+--output_arrays_path "${TMPDIR}/${MODEL_NAME}_output_arrays.txt"
+
+# generate tflite file
+TF2TFLITE_CONVERT_SCRIPT="python ${ROOT}/bin/tf2tfliteV2.py ${TF_INTERFACE} "
+TF2TFLITE_CONVERT_SCRIPT+="--input_path ${TMPDIR}/${MODEL_NAME}_withmeta.pb "
+TF2TFLITE_CONVERT_SCRIPT+="--input_arrays ${INPUT} "
+TF2TFLITE_CONVERT_SCRIPT+="--output_path ${TMPDIR}/${MODEL_NAME}.tflite "
+TF2TFLITE_CONVERT_SCRIPT+="--output_arrays "
+TF2TFLITE_CONVERT_SCRIPT+="$(cat ${TMPDIR}/${MODEL_NAME}_metadata_arrays.txt)"
+TF2TFLITE_CONVERT_SCRIPT+="${OUTPUT}"
+TF2TFLITE_CONVERT_SCRIPT+="$(cat ${TMPDIR}/${MODEL_NAME}_output_arrays.txt) "
+if [ ! -z ${INPUT_SHAPES} ]; then
+ TF2TFLITE_CONVERT_SCRIPT+="--input_shapes ${INPUT_SHAPES} "
+fi
+
+${TF2TFLITE_CONVERT_SCRIPT}
+
+# convert .tflite to .circle
+"${ROOT}/bin/tflite2circle" "${TMPDIR}/${MODEL_NAME}.tflite" "${TMPDIR}/${MODEL_NAME}.tmp.circle"
+
+# optimize
+"${ROOT}/bin/circle2circle" --all "${TMPDIR}/${MODEL_NAME}.tmp.circle" "${TMPDIR}/${MODEL_NAME}.circle"
+
+"${ROOT}/bin/model2nnpkg.sh" -o "${OUTPUT_DIR}" "${TMPDIR}/${MODEL_NAME}.circle"
diff --git a/infra/packaging/res/tflite_schema.fbs b/infra/packaging/res/tflite_schema.fbs
new file mode 100644
index 000000000..3da3188c3
--- /dev/null
+++ b/infra/packaging/res/tflite_schema.fbs
@@ -0,0 +1,698 @@
+// Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Revision History
+// Version 0: Initial version.
+// Version 1: Add subgraphs to schema.
+// Version 2: Rename operators to conform to NN API.
+// Version 3: Move buffer data from Model.Subgraph.Tensors to Model.Buffers.
+
+namespace tflite;
+
+// This corresponds to the version.
+file_identifier "TFL3";
+// File extension of any written files.
+file_extension "tflite";
+
+// The type of data stored in a tensor.
+enum TensorType : byte {
+ FLOAT32 = 0,
+ FLOAT16 = 1,
+ INT32 = 2,
+ UINT8 = 3,
+ INT64 = 4,
+ STRING = 5,
+ BOOL = 6,
+ INT16 = 7,
+ COMPLEX64 = 8,
+}
+
+// Parameters for converting a quantized tensor back to float. Given a
+// quantized value q, the corresponding float value f should be:
+// f = scale * (q - zero_point)
+table QuantizationParameters {
+ min:[float]; // For importing back into tensorflow.
+ max:[float]; // For importing back into tensorflow.
+ scale:[float]; // For dequantizing the tensor's values.
+ zero_point:[long];
+}
+
+table Tensor {
+ // The tensor shape. The meaning of each entry is operator-specific but
+ // builtin ops use: [batch size, height, width, number of channels] (That's
+ // Tensorflow's NHWC).
+ shape:[int];
+ type:TensorType;
+ // An index that refers to the buffers table at the root of the model. Or,
+ // if there is no data buffer associated (i.e. intermediate results), then
+ // this is 0 (which refers to an always existent empty buffer).
+ //
+ // The data_buffer itself is an opaque container, with the assumption that the
+ // target device is little-endian. In addition, all builtin operators assume
+ // the memory is ordered such that if `shape` is [4, 3, 2], then index
+ // [i, j, k] maps to data_buffer[i*3*2 + j*2 + k].
+ buffer:uint;
+ name:string; // For debugging and importing back into tensorflow.
+ quantization:QuantizationParameters; // Optional.
+
+ is_variable:bool = false;
+}
+
+// A list of builtin operators. Builtin operators are slightly faster than custom
+// ones, but not by much. Moreover, while custom operators accept an opaque
+// object containing configuration parameters, builtins have a predetermined
+// set of acceptable options.
+enum BuiltinOperator : byte {
+ ADD = 0,
+ AVERAGE_POOL_2D = 1,
+ CONCATENATION = 2,
+ CONV_2D = 3,
+ DEPTHWISE_CONV_2D = 4,
+ // DEPTH_TO_SPACE = 5,
+ DEQUANTIZE = 6,
+ EMBEDDING_LOOKUP = 7,
+ FLOOR = 8,
+ FULLY_CONNECTED = 9,
+ HASHTABLE_LOOKUP = 10,
+ L2_NORMALIZATION = 11,
+ L2_POOL_2D = 12,
+ LOCAL_RESPONSE_NORMALIZATION = 13,
+ LOGISTIC = 14,
+ LSH_PROJECTION = 15,
+ LSTM = 16,
+ MAX_POOL_2D = 17,
+ MUL = 18,
+ RELU = 19,
+ // NOTE(aselle): RELU_N1_TO_1 used to be called RELU1, but it was renamed
+ // since different model developers use RELU1 in different ways. Never
+ // create another op called RELU1.
+ RELU_N1_TO_1 = 20,
+ RELU6 = 21,
+ RESHAPE = 22,
+ RESIZE_BILINEAR = 23,
+ RNN = 24,
+ SOFTMAX = 25,
+ SPACE_TO_DEPTH = 26,
+ SVDF = 27,
+ TANH = 28,
+ // TODO(aselle): Consider rename to CONCATENATE_EMBEDDINGS
+ CONCAT_EMBEDDINGS = 29,
+ SKIP_GRAM = 30,
+ CALL = 31,
+ CUSTOM = 32,
+ EMBEDDING_LOOKUP_SPARSE = 33,
+ PAD = 34,
+ UNIDIRECTIONAL_SEQUENCE_RNN = 35,
+ GATHER = 36,
+ BATCH_TO_SPACE_ND = 37,
+ SPACE_TO_BATCH_ND = 38,
+ TRANSPOSE = 39,
+ MEAN = 40,
+ SUB = 41,
+ DIV = 42,
+ SQUEEZE = 43,
+ UNIDIRECTIONAL_SEQUENCE_LSTM = 44,
+ STRIDED_SLICE = 45,
+ BIDIRECTIONAL_SEQUENCE_RNN = 46,
+ EXP = 47,
+ TOPK_V2 = 48,
+ SPLIT = 49,
+ LOG_SOFTMAX = 50,
+ // DELEGATE is a special op type for the operations which are delegated to
+ // other backends.
+ // WARNING: Experimental interface, subject to change
+ DELEGATE = 51,
+ BIDIRECTIONAL_SEQUENCE_LSTM = 52,
+ CAST = 53,
+ PRELU = 54,
+ MAXIMUM = 55,
+ ARG_MAX = 56,
+ MINIMUM = 57,
+ LESS = 58,
+ NEG = 59,
+ PADV2 = 60,
+ GREATER = 61,
+ GREATER_EQUAL = 62,
+ LESS_EQUAL = 63,
+ SELECT = 64,
+ SLICE = 65,
+ SIN = 66,
+ TRANSPOSE_CONV = 67,
+ SPARSE_TO_DENSE = 68,
+ TILE = 69,
+ EXPAND_DIMS = 70,
+ EQUAL = 71,
+ NOT_EQUAL = 72,
+ LOG = 73,
+ SUM = 74,
+ SQRT = 75,
+ RSQRT = 76,
+ SHAPE = 77,
+ POW = 78,
+ ARG_MIN = 79,
+ FAKE_QUANT = 80,
+ REDUCE_PROD = 81,
+ REDUCE_MAX = 82,
+ PACK = 83,
+ LOGICAL_OR = 84,
+ ONE_HOT = 85,
+ LOGICAL_AND = 86,
+ LOGICAL_NOT = 87,
+ UNPACK = 88,
+ REDUCE_MIN = 89,
+ FLOOR_DIV = 90,
+ REDUCE_ANY = 91,
+ SQUARE = 92,
+ ZEROS_LIKE = 93,
+ FILL = 94,
+}
+
+// Options for the builtin operators.
+union BuiltinOptions {
+ Conv2DOptions,
+ DepthwiseConv2DOptions,
+ ConcatEmbeddingsOptions,
+ LSHProjectionOptions,
+ Pool2DOptions,
+ SVDFOptions,
+ RNNOptions,
+ FullyConnectedOptions,
+ SoftmaxOptions,
+ ConcatenationOptions,
+ AddOptions,
+ L2NormOptions,
+ LocalResponseNormalizationOptions,
+ LSTMOptions,
+ ResizeBilinearOptions,
+ CallOptions,
+ ReshapeOptions,
+ SkipGramOptions,
+ SpaceToDepthOptions,
+ EmbeddingLookupSparseOptions,
+ MulOptions,
+ PadOptions,
+ GatherOptions,
+ BatchToSpaceNDOptions,
+ SpaceToBatchNDOptions,
+ TransposeOptions,
+ ReducerOptions,
+ SubOptions,
+ DivOptions,
+ SqueezeOptions,
+ SequenceRNNOptions,
+ StridedSliceOptions,
+ ExpOptions,
+ TopKV2Options,
+ SplitOptions,
+ LogSoftmaxOptions,
+ CastOptions,
+ DequantizeOptions,
+ MaximumMinimumOptions,
+ ArgMaxOptions,
+ LessOptions,
+ NegOptions,
+ PadV2Options,
+ GreaterOptions,
+ GreaterEqualOptions,
+ LessEqualOptions,
+ SelectOptions,
+ SliceOptions,
+ TransposeConvOptions,
+ SparseToDenseOptions,
+ TileOptions,
+ ExpandDimsOptions,
+ EqualOptions,
+ NotEqualOptions,
+ ShapeOptions,
+ PowOptions,
+ ArgMinOptions,
+ FakeQuantOptions,
+ PackOptions,
+ LogicalOrOptions,
+ OneHotOptions,
+ LogicalAndOptions,
+ LogicalNotOptions,
+ UnpackOptions,
+ FloorDivOptions,
+ SquareOptions,
+ ZerosLikeOptions,
+ FillOptions,
+}
+
+enum Padding : byte { SAME, VALID }
+
+enum ActivationFunctionType : byte {
+ NONE = 0,
+ RELU = 1,
+ RELU_N1_TO_1 = 2,
+ RELU6 = 3,
+ TANH = 4,
+ SIGN_BIT = 5,
+}
+
+table Conv2DOptions {
+ padding:Padding;
+ stride_w:int;
+ stride_h:int;
+ fused_activation_function:ActivationFunctionType;
+ dilation_w_factor:int = 1;
+ dilation_h_factor:int = 1;
+}
+
+table Pool2DOptions {
+ padding:Padding;
+ stride_w:int;
+ stride_h:int;
+ filter_width:int;
+ filter_height:int;
+ fused_activation_function:ActivationFunctionType;
+}
+
+table DepthwiseConv2DOptions {
+ // Parameters for DepthwiseConv version 1 or above.
+ padding:Padding;
+ stride_w:int;
+ stride_h:int;
+ depth_multiplier:int;
+ fused_activation_function:ActivationFunctionType;
+ // Parameters for DepthwiseConv version 2 or above.
+ dilation_w_factor:int = 1;
+ dilation_h_factor:int = 1;
+}
+
+table ConcatEmbeddingsOptions {
+ num_channels:int;
+ num_columns_per_channel:[int];
+ embedding_dim_per_channel:[int]; // This could be inferred from parameters.
+}
+
+enum LSHProjectionType: byte {
+ UNKNOWN = 0,
+ SPARSE = 1,
+ DENSE = 2,
+}
+
+table LSHProjectionOptions {
+ type: LSHProjectionType;
+}
+
+table SVDFOptions {
+ rank:int;
+ fused_activation_function:ActivationFunctionType;
+}
+
+// An implementation of TensorFlow RNNCell.
+table RNNOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+// An implementation of TensorFlow dynamic_rnn with RNNCell.
+table SequenceRNNOptions {
+ time_major:bool;
+ fused_activation_function:ActivationFunctionType;
+}
+
+// An implementation of TensorFlow bidrectional_dynamic_rnn with RNNCell.
+table BidirectionalSequenceRNNOptions {
+ time_major:bool;
+ fused_activation_function:ActivationFunctionType;
+}
+
+enum FullyConnectedOptionsWeightsFormat: byte {
+ DEFAULT = 0,
+ SHUFFLED4x16INT8 = 1,
+}
+
+// An implementation of TensorFlow fully_connected (a.k.a Dense) layer.
+table FullyConnectedOptions {
+ // Parameters for FullyConnected version 1 or above.
+ fused_activation_function:ActivationFunctionType;
+
+ // Parameters for FullyConnected version 2 or above.
+ weights_format:FullyConnectedOptionsWeightsFormat = DEFAULT;
+}
+
+table SoftmaxOptions {
+ beta: float;
+}
+
+// An implementation of TensorFlow concat.
+table ConcatenationOptions {
+ axis:int;
+ fused_activation_function:ActivationFunctionType;
+}
+
+table AddOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table MulOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table L2NormOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table LocalResponseNormalizationOptions {
+ radius:int;
+ bias:float;
+ alpha:float;
+ beta:float;
+}
+
+enum LSTMKernelType : byte {
+ // Full LSTM kernel which supports peephole and projection.
+ FULL = 0,
+ // Basic LSTM kernels. Equivalent to TensorFlow BasicLSTMCell.
+ BASIC = 1,
+}
+
+// An implementation of TensorFlow LSTMCell and CoupledInputForgetGateLSTMCell
+table LSTMOptions {
+ // Parameters for LSTM version 1 or above.
+ fused_activation_function:ActivationFunctionType;
+ cell_clip: float; // Optional, 0.0 means no clipping
+ proj_clip: float; // Optional, 0.0 means no clipping
+
+ // Parameters for LSTM version 2 or above.
+ // Basic kernel is only supported in version 2 or above.
+ kernel_type: LSTMKernelType = FULL;
+}
+
+table ResizeBilinearOptions {
+ new_height: int (deprecated);
+ new_width: int (deprecated);
+ align_corners: bool;
+}
+
+// A call operation options
+table CallOptions {
+ // The subgraph index that needs to be called.
+ subgraph:uint;
+}
+
+table PadOptions {
+}
+
+table PadV2Options {
+}
+
+table ReshapeOptions {
+ new_shape:[int];
+}
+
+table SpaceToBatchNDOptions {
+}
+
+table BatchToSpaceNDOptions {
+}
+
+table SkipGramOptions {
+ ngram_size: int;
+ max_skip_size: int;
+ include_all_ngrams: bool;
+}
+
+table SpaceToDepthOptions {
+ block_size: int;
+}
+
+table SubOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table DivOptions {
+ fused_activation_function:ActivationFunctionType;
+}
+
+table TopKV2Options {
+}
+
+enum CombinerType : byte {
+ SUM = 0,
+ MEAN = 1,
+ SQRTN = 2,
+}
+
+table EmbeddingLookupSparseOptions {
+ combiner:CombinerType;
+}
+
+table GatherOptions {
+ axis: int;
+}
+
+table TransposeOptions {
+}
+
+table ExpOptions {
+}
+
+table ReducerOptions {
+ keep_dims: bool;
+}
+
+table SqueezeOptions {
+ squeeze_dims:[int];
+}
+
+table SplitOptions {
+ num_splits: int;
+}
+
+table StridedSliceOptions {
+ begin_mask: int;
+ end_mask: int;
+ ellipsis_mask: int;
+ new_axis_mask: int;
+ shrink_axis_mask: int;
+}
+
+table LogSoftmaxOptions {
+}
+
+table CastOptions {
+ in_data_type: TensorType;
+ out_data_type: TensorType;
+}
+
+table DequantizeOptions {
+}
+
+table MaximumMinimumOptions {
+}
+
+table TileOptions {
+}
+
+table ArgMaxOptions {
+ output_type : TensorType;
+}
+
+table ArgMinOptions {
+ output_type : TensorType;
+}
+
+table GreaterOptions {
+}
+
+table GreaterEqualOptions {
+}
+
+table LessOptions {
+}
+
+table LessEqualOptions {
+}
+
+table NegOptions {
+}
+
+table SelectOptions {
+}
+
+table SliceOptions {
+}
+
+table TransposeConvOptions {
+ padding:Padding;
+ stride_w:int;
+ stride_h:int;
+}
+
+table ExpandDimsOptions {
+}
+
+table SparseToDenseOptions {
+ validate_indices:bool;
+}
+
+table EqualOptions {
+}
+
+table NotEqualOptions {
+}
+
+table ShapeOptions {
+ // Optional output type of the operation (int32 or int64). Defaults to int32.
+ out_type : TensorType;
+}
+
+table PowOptions {
+}
+
+table FakeQuantOptions {
+ // Parameters supported by version 1:
+ min:float;
+ max:float;
+ num_bits:int;
+
+ // Parameters supported by version 2:
+ narrow_range:bool;
+}
+
+table PackOptions {
+ values_count:int;
+ axis:int;
+}
+
+table LogicalOrOptions {
+}
+
+table OneHotOptions {
+ axis:int;
+}
+
+table LogicalAndOptions {
+}
+
+table LogicalNotOptions {
+}
+
+table UnpackOptions {
+ num:int;
+ axis:int;
+}
+
+table FloorDivOptions {
+}
+
+table SquareOptions {
+}
+
+table ZerosLikeOptions {
+}
+
+table FillOptions {
+}
+
+// An OperatorCode can be an enum value (BuiltinOperator) if the operator is a
+// builtin, or a string if the operator is custom.
+table OperatorCode {
+ builtin_code:BuiltinOperator;
+ custom_code:string;
+
+ // The version of the operator. The version need to be bumped whenever new
+ // parameters are introduced into an op.
+ version:int = 1;
+}
+
+enum CustomOptionsFormat : byte {
+ FLEXBUFFERS = 0,
+}
+
+// An operator takes tensors as inputs and outputs. The type of operation being
+// performed is determined by an index into the list of valid OperatorCodes,
+// while the specifics of each operations is configured using builtin_options
+// or custom_options.
+table Operator {
+ // Index into the operator_codes array. Using an integer here avoids
+ // complicate map lookups.
+ opcode_index:uint;
+
+ // Optional input and output tensors are indicated by -1.
+ inputs:[int];
+ outputs:[int];
+
+ builtin_options:BuiltinOptions;
+ custom_options:[ubyte];
+ custom_options_format:CustomOptionsFormat;
+
+ // A list of booleans indicating the input tensors which are being mutated by
+ // this operator.(e.g. used by RNN and LSTM).
+ // For example, if the "inputs" array refers to 5 tensors and the second and
+ // fifth are mutable variables, then this list will contain
+ // [false, true, false, false, true].
+ //
+ // If the list is empty, no variable is mutated in this operator.
+ // The list either has the same length as `inputs`, or is empty.
+ mutating_variable_inputs:[bool];
+}
+
+// The root type, defining a subgraph, which typically represents an entire
+// model.
+table SubGraph {
+ // A list of all tensors used in this subgraph.
+ tensors:[Tensor];
+
+ // Indices of the tensors that are inputs into this subgraph. Note this is
+ // the list of non-static tensors that feed into the subgraph for inference.
+ inputs:[int];
+
+ // Indices of the tensors that are outputs out of this subgraph. Note this is
+ // the list of output tensors that are considered the product of the
+ // subgraph's inference.
+ outputs:[int];
+
+ // All operators, in execution order.
+ operators:[Operator];
+
+ // Name of this subgraph (used for debugging).
+ name:string;
+}
+
+// Table of raw data buffers (used for constant tensors). Referenced by tensors
+// by index. The generous alignment accommodates mmap-friendly data structures.
+table Buffer {
+ data:[ubyte] (force_align: 16);
+}
+
+table Model {
+ // Version of the schema.
+ version:uint;
+
+ // A list of all operator codes used in this model. This is
+ // kept in order because operators carry an index into this
+ // vector.
+ operator_codes:[OperatorCode];
+
+ // All the subgraphs of the model. The 0th is assumed to be the main
+ // model.
+ subgraphs:[SubGraph];
+
+ // A description of the model.
+ description:string;
+
+ // Buffers of the model.
+ // Note the 0th entry of this array must be an empty buffer (sentinel).
+ // This is a convention so that tensors without a buffer can provide 0 as
+ // their buffer.
+ buffers:[Buffer];
+
+ // Metadata about the model. Indirects into the existings buffers list.
+ metadata_buffer:[int];
+}
+
+root_type Model;
diff --git a/infra/packaging/verify b/infra/packaging/verify
new file mode 100644
index 000000000..4d5e610af
--- /dev/null
+++ b/infra/packaging/verify
@@ -0,0 +1,82 @@
+#!/bin/bash
+
+#
+# HOW TO USE
+#
+# ./nnas verify-package [CHECK 1] [CHECK 2] ... [CHECK N]
+#
+# REQUIRE: N >= 1
+#
+SCRIPT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+
+if [[ -z "${NNAS_PROJECT_PATH}" ]]; then
+ echo "ERROR: NNAS_PROJECT_PATH is not specified"
+ exit 255
+fi
+
+CHECKLIST=()
+
+while [ "$#" -ne 0 ]; do
+ CUR="$1"
+
+ case $CUR in
+ '--prefix')
+ NNAS_INSTALL_PREFIX="$2"
+ shift 2
+ ;;
+ *)
+ if [[ ! -f "${SCRIPT_PATH}/chklist/$CUR" ]]; then
+ echo "ERROR: '${CUR}' is invalid"
+ # TODO Show supported checks
+ exit 255
+ fi
+
+ CHECKLIST+=("${CUR}")
+ shift 1
+ ;;
+ esac
+done
+
+# Q. Is it better to have the default value for NNAS_INSTALL_PREFIX?
+# TODO Show USAGE
+# TODO Use a proper exitcode on error (http://tldp.org/LDP/abs/html/exitcodes.html)
+if [[ -z "${NNAS_INSTALL_PREFIX}" ]]; then
+ echo "ERROR: --prefix is not specified"
+ exit 255
+fi
+
+if [[ ${#CHECKLIST[@]} -eq 0 ]]; then
+ echo "ERROR: Check is not specified"
+ exit 255
+fi
+
+EXITCODE=0
+
+for CHECK_NAME in ${CHECKLIST[@]}; do
+ source "${SCRIPT_PATH}/chklist/${CHECK_NAME}"
+
+ prepare
+
+ echo -n "${QUESTION}"
+
+ PASSED=0
+
+ run
+
+ if [[ ${PASSED} -ne 0 ]]; then
+ ANSWER="Yes"
+ else
+ ANSWER="No"
+ # Reference: https://www.tldp.org/LDP/abs/html/exitcodes.html
+ EXITCODE=1
+ fi
+
+ echo " - ${ANSWER}"
+done
+
+if [[ ${EXITCODE} -ne 0 ]]; then
+ echo
+ echo "FAIL"
+fi
+
+exit ${EXITCODE}
diff --git a/infra/scripts/build-tcm.sh b/infra/scripts/build-tcm.sh
new file mode 100755
index 000000000..38533c1f9
--- /dev/null
+++ b/infra/scripts/build-tcm.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+# STEP 1
+# Download latest TCM tool from
+# https://github.sec.samsung.net/RS-TCM/tca-standalone/releases/download/v0.0.8/tca-standalone-0.0.8.jar
+#
+# STEP 2
+# Create symbolic link `./src` for source directory to be analyzed which has `.ahub` configuration.
+#
+# STEP 3
+# run this `build-tcm.sh` script.
+#
+# See the following link for additional details.
+# https://github.sec.samsung.net/RS-TCM/tca-standalone/wiki/Tutorials-CPP-Gtest
+#
+
+echo ${PROJECT_DIR:=${PWD}}
+
+java -jar $PROJECT_DIR/tca-standalone-0.0.8.jar \
+ --outdir=$PROJECT_DIR/tcm-output \
+ --config=$PROJECT_DIR/src/.ahub/tcchecker-tca/config.yaml \
+ --local=$PROJECT_DIR/src \
+ --logfile=$PROJECT_DIR/tcm-output/tcm.log \
+ --debug
diff --git a/infra/scripts/build_android_runtime_release.sh b/infra/scripts/build_android_runtime_release.sh
new file mode 100755
index 000000000..fe933c648
--- /dev/null
+++ b/infra/scripts/build_android_runtime_release.sh
@@ -0,0 +1,21 @@
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+# prepare pre-built armcompute library
+# android build requires pre-built armcompute library
+if [ ! -n "$EXT_ACL_FOLDER" ]; then
+ echo "Please set EXT_ACL_FOLDER to use pre-built armcompute library"
+ exit 1
+fi
+
+# prepare ndk
+if [ ! -n "$NDK_DIR" ]; then
+ export NDK_DIR=$ROOT_PATH/tools/cross/ndk/r20/ndk
+ echo "It will use default external path"
+fi
+
+export TARGET_OS=android
+export CROSS_BUILD=1
+make -f Makefile.template
diff --git a/infra/scripts/build_nnpkg.sh b/infra/scripts/build_nnpkg.sh
deleted file mode 100755
index 221c7210f..000000000
--- a/infra/scripts/build_nnpkg.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/bash
-
-# Test suite: nnpkg-test-suite.tar.gz
-
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
-
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="${CURRENT_PATH}/../../"
-
-pushd ${ROOT_PATH} > /dev/null
-
-for f in `find build/compiler/tf2tflite -name "UNIT*" | cut -d'.' -f1 | sort | uniq`;
-do
- tools/nnpackage_tool/nncc-tc-to-nnpkg-tc/nncc-tc-to-nnpkg-tc.sh -o nnpkg-tcs -i ${f%/*} $(basename $f);
-done
-
-tar -zcf nnpkg-test-suite.tar.gz tools/nnpackage_tool/nnpkg_test nnpkg-tcs
-
-popd > /dev/null
diff --git a/infra/scripts/common.sh b/infra/scripts/common.sh
new file mode 100755
index 000000000..818957a21
--- /dev/null
+++ b/infra/scripts/common.sh
@@ -0,0 +1,155 @@
+#!/bin/bash
+
+# Don't run this script
+[[ "${BASH_SOURCE[0]}" == "${0}" ]] && echo "Please don't execute ${BASH_SOURCE[0]}, source it" && return
+
+# Global variable
+# CURRENT_PATH: infra/scripts directory absolute path
+# ROOT_PATH: nnfw root directory absolute path
+
+# Functions
+#
+# CheckTestPrepared
+# Check environment variable setting to run test
+#
+# TFLiteModelVerification $1 $2 $3
+# Run ./tests/scripts/test-driver.sh script verification test
+#
+# NNAPIGTest $1 $2 $3
+# Run [INSTALL_PATH]/test/onert-test unittest command for nnapi gtest
+#
+# NNPackageTest $1 $2
+# Run [INSTALL_PATH]/test/onert-test nnpkg-test command
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$(cd ${CURRENT_PATH}/../../ && pwd)"
+
+# Install path on CI
+INSTALL_PATH=$ROOT_PATH/Product/out
+
+function CheckTestPrepared()
+{
+ # Model download server setting
+ if [[ -z "${MODELFILE_SERVER}" ]]; then
+ echo "Model file server is not set. Try to use default setting."
+ else
+ echo "Model Server: ${MODELFILE_SERVER}"
+ fi
+ $INSTALL_PATH/test/onert-test prepare-model
+}
+
+# $1: (required) backend
+# $2: (required) framework list file relative path from nnfw root directory
+# pass empty string if there is no skiplist
+# $3: (required) relative path to report from nnfw root directory
+function TFLiteModelVerification()
+{
+ [[ $# -ne 3 ]] && echo "Invalid function argument setting" && exit 1
+
+ pushd ${ROOT_PATH} > /dev/null
+
+ export BACKENDS=$1
+ if [[ "$2" == "" ]]; then
+ $INSTALL_PATH/test/onert-test verify-tflite --api=nnapi \
+ --reportdir=$ROOT_PATH/$3
+ else
+ $INSTALL_PATH/test/onert-test verify-tflite --api=nnapi \
+ --list=$2 \
+ --reportdir=$ROOT_PATH/$3
+ fi
+ unset BACKENDS
+
+ popd > /dev/null
+}
+
+# $1: (required) backend
+# $2: (required) nnapi gtest skiplist file relative path from nnfw root directory
+# pass empty string if there is no test list
+# $3: (required) relative path for report from nnfw root directory
+function NNAPIGTest()
+{
+ [[ $# -ne 3 ]] && echo "Invalid function argument setting" && exit 1
+
+ pushd ${ROOT_PATH} > /dev/null
+
+ # Backup original nnapi_gtest.skip
+ # TODO Pass skiplist to test-driver.sh
+ SKIPLIST_FILE="${INSTALL_PATH}/unittest/nnapi_gtest.skip"
+ BACKUP_FILE="${SKIPLIST_FILE}.backup"
+ if [[ "$2" != "" ]]; then
+ cp ${SKIPLIST_FILE} ${BACKUP_FILE}
+ cp ${ROOT_PATH}/$2 ${SKIPLIST_FILE}
+ fi
+
+ export BACKENDS=$1
+ $INSTALL_PATH/test/onert-test unittest \
+ --reportdir=$ROOT_PATH/$3 \
+ --unittestdir=$INSTALL_PATH/unittest
+ unset BACKENDS
+
+ # TODO Pass skiplist to test-driver.sh
+ # Restore original nnapi_gtest.skip
+ if [[ "$2" != "" ]]; then
+ cp ${BACKUP_FILE} ${SKIPLIST_FILE}
+ rm ${BACKUP_FILE}
+ fi
+
+ popd > /dev/null
+}
+
+# $1: (require) backend
+# $2: (require) list
+function NNPackageTest()
+{
+ [[ $# -ne 2 ]] && echo "Invalid function argument setting" && exit 1
+
+ pushd ${ROOT_PATH} > /dev/null
+
+ echo "[Package Test] Run $1 backend nnpackage test"
+
+ EXITCODE=0
+ PKG_LIST=$(cat $2)
+ for f in ${PKG_LIST}
+ do
+ for entry in "nnpkg-tcs"/$f; do
+ if [ -e $entry ]; then
+ BACKENDS="$1" $INSTALL_PATH/test/onert-test nnpkg-test -d -i nnpkg-tcs $(basename "$entry")
+ fi
+ done
+ EXITCODE_F=$?
+
+ if [ ${EXITCODE_F} -ne 0 ]; then
+ EXITCODE=${EXITCODE_F}
+ fi
+ done
+
+ if [ ${EXITCODE} -ne 0 ]; then
+ exit ${EXITCODE}
+ fi
+
+ popd > /dev/null
+}
+
+# $1: (required) backend
+# $2: (required) test list file relative path from nnfw root directory
+# pass empty string if there is no skiplist
+# $3: (required) relative path to report from nnfw root directory
+function TFLiteLoaderTest()
+{
+ [[ $# -ne 3 ]] && echo "TFLiteLoaderTest: Invalid function argument setting" && exit 1
+
+ pushd ${ROOT_PATH} > /dev/null
+
+ export BACKENDS=$1
+ if [[ "$2" == "" ]]; then
+ $INSTALL_PATH/test/onert-test verify-tflite --api=loader \
+ --reportdir=$ROOT_PATH/$3
+ else
+ $INSTALL_PATH/test/onert-test verify-tflite --api=loader \
+ --list=$2 \
+ --reportdir=$ROOT_PATH/$3
+ fi
+ unset BACKENDS
+
+ popd > /dev/null
+}
diff --git a/infra/scripts/compiler_modules.sh b/infra/scripts/compiler_modules.sh
new file mode 100644
index 000000000..a0323e0a0
--- /dev/null
+++ b/infra/scripts/compiler_modules.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+
+# Don't run this script
+[[ "${BASH_SOURCE[0]}" == "${0}" ]] && echo "Please don't execute ${BASH_SOURCE[0]}, source it" && return
+
+DEBUG_BUILD_ITEMS="angkor;cwrap;pepper-str;pepper-strcast;pp;stdex"
+DEBUG_BUILD_ITEMS+=";oops;pepper-assert"
+DEBUG_BUILD_ITEMS+=";hermes;hermes-std"
+DEBUG_BUILD_ITEMS+=";loco;locop;locomotiv;logo-core;logo"
+DEBUG_BUILD_ITEMS+=";foder;souschef;arser;vconone"
+DEBUG_BUILD_ITEMS+=";safemain;mio-circle;mio-tflite"
+DEBUG_BUILD_ITEMS+=";tflite2circle"
+DEBUG_BUILD_ITEMS+=";luci"
+DEBUG_BUILD_ITEMS+=";luci-interpreter"
+DEBUG_BUILD_ITEMS+=";luci-value-test"
+DEBUG_BUILD_ITEMS+=";circle2circle;record-minmax;circle-quantizer"
+DEBUG_BUILD_ITEMS+=";circle-verify"
+DEBUG_BUILD_ITEMS+=";tflchef;circlechef"
+DEBUG_BUILD_ITEMS+=";common-artifacts"
+DEBUG_BUILD_ITEMS+=";circle2circle-dredd-recipe-test"
+DEBUG_BUILD_ITEMS+=";record-minmax-conversion-test"
+DEBUG_BUILD_ITEMS+=";tf2tfliteV2;tf2tfliteV2-conversion-test"
+DEBUG_BUILD_ITEMS+=";tflite2circle-conversion-test"
diff --git a/infra/scripts/configure_compiler_coverage.sh b/infra/scripts/configure_compiler_coverage.sh
new file mode 100755
index 000000000..e80c3e9c0
--- /dev/null
+++ b/infra/scripts/configure_compiler_coverage.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+set -eo pipefail
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+source ${CURRENT_PATH}/compiler_modules.sh
+
+NNCC_CFG_OPTION=" -DCMAKE_BUILD_TYPE=Debug"
+NNCC_CFG_STRICT=" -DENABLE_STRICT_BUILD=ON"
+NNCC_COV_DEBUG=" -DBUILD_WHITELIST=$DEBUG_BUILD_ITEMS"
+
+if [ $# -ne 0 ]; then
+ echo "Additional cmake configuration: $@"
+fi
+
+./nncc configure \
+ $NNCC_CFG_OPTION $NNCC_COV_DEBUG $NNCC_CFG_STRICT \
+ -DENABLE_COVERAGE=ON "$@"
diff --git a/infra/scripts/docker_build_cross_arm_pacl.sh b/infra/scripts/docker_build_cross_aarch64_runtime.sh
index 2091287c0..011d14c18 100755
--- a/infra/scripts/docker_build_cross_arm_pacl.sh
+++ b/infra/scripts/docker_build_cross_aarch64_runtime.sh
@@ -6,7 +6,7 @@ CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT_PATH="$CURRENT_PATH/../../"
# prepare rootfs
-if [ ! -d $ROOTFS_DIR ]; then
+if [ -z "$ROOTFS_DIR" ] || [ ! -d $ROOTFS_DIR ]; then
echo "It will use default rootfs path"
else
DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
@@ -31,17 +31,18 @@ if [[ -z $EXTERNAL_DOWNLOAD_SERVER ]]; then
echo "It will not use mirror server"
fi
-DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
+DOCKER_ENV_VARS+=" -e TARGET_ARCH=aarch64"
DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
+set -e
+
pushd $ROOT_PATH > /dev/null
# TODO use command instead of makefile
export DOCKER_ENV_VARS
export DOCKER_VOLUMES
-CMD="export OPTIONS='-DBUILD_NEURUN=OFF -DBUILD_PURE_ARM_COMPUTE=ON -DBUILD_TFLITE_LOADER=OFF' && \
- cp -nv Makefile.template Makefile && \
+CMD="cp -nv Makefile.template Makefile && \
make all install build_test_suite"
-./nnfw docker-run bash -c "${CMD}"
+./nnfw docker-run bash -c "$CMD"
popd > /dev/null
diff --git a/infra/scripts/docker_build_cross_arm_benchmark_model.sh b/infra/scripts/docker_build_cross_arm_benchmark_model.sh
deleted file mode 100755
index 0bc20b142..000000000
--- a/infra/scripts/docker_build_cross_arm_benchmark_model.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/bin/bash
-
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
-
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="$CURRENT_PATH/../../"
-
-# prepare rootfs
-if [ ! -d $ROOTFS_DIR ]; then
- echo "It will use default rootfs path"
-else
- DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
- DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
-fi
-
-# mount volume (or directory) for externals
-if [ -n "$EXTERNAL_VOLUME" ]; then
- DOCKER_VOLUMES+=" -v $EXTERNAL_VOLUME:/externals"
- DOCKER_ENV_VARS+=" -e EXTERNAL_VOLUME=/externals"
-else
- echo "It will use default external path"
-fi
-
-# docker image name
-if [ -z $DOCKER_IMAGE_NAME ]; then
- echo "It will use default docker image name"
-fi
-
-# Mirror server setting
-if [ -z $EXTERNAL_DOWNLOAD_SERVER ]; then
- echo "It will not use mirror server"
-fi
-
-DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
-DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
-DOCKER_ENV_VARS+=" -e BENCHMARK_ACL_BUILD=1"
-DOCKER_ENV_VARS+=" -e BUILD_TYPE=Release"
-
-pushd $ROOT_PATH > /dev/null
-
-# TODO use command instead of makefile
-export DOCKER_ENV_VARS
-export DOCKER_VOLUMES
-CMD="export OPTIONS='-DBUILD_PURE_ARM_COMPUTE=ON -DBUILD_NEURUN=OFF -DBUILD_TFLITE_BENCHMARK_MODEL=ON -DBUILD_TFLITE_LOADER=OFF' && \
- cp -nv Makefile.template Makefile && \
- make all install build_test_suite"
-./nnfw docker-run bash -c "$CMD"
-
-popd > /dev/null
diff --git a/infra/scripts/docker_build_cross_arm_pacl_release.sh b/infra/scripts/docker_build_cross_arm_pacl_release.sh
deleted file mode 100755
index fdac80071..000000000
--- a/infra/scripts/docker_build_cross_arm_pacl_release.sh
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/bin/bash
-
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
-
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="$CURRENT_PATH/../../"
-
-# prepare rootfs
-if [ ! -d $ROOTFS_DIR ]; then
- echo "It will use default rootfs path"
-else
- DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
- DOCKER_ENV_VARS+=" -e ROOTFS_DIR=/opt/rootfs"
-fi
-
-# mount volume (or directory) for externals
-if [ -n "$EXTERNAL_VOLUME" ]; then
- DOCKER_VOLUMES+=" -v $EXTERNAL_VOLUME:/externals"
- DOCKER_ENV_VARS+=" -e EXTERNAL_VOLUME=/externals"
-else
- echo "It will use default external path"
-fi
-
-# docker image name
-if [[ -z $DOCKER_IMAGE_NAME ]]; then
- echo "It will use default docker image name"
-fi
-
-DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
-DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
-DOCKER_ENV_VARS+=" -e BUILD_TYPE=release"
-
-# Mirror server setting
-if [[ -z $EXTERNAL_DOWNLOAD_SERVER ]]; then
- echo "It will not use mirror server"
-fi
-
-pushd $ROOT_PATH > /dev/null
-
-# TODO use command instead of makefile
-export DOCKER_ENV_VARS
-export DOCKER_VOLUMES
-CMD="export OPTIONS='-DBUILD_NEURUN=OFF -DBUILD_PURE_ARM_COMPUTE=ON -DBUILD_TFLITE_LOADER=OFF' && \
- cp -nv Makefile.template Makefile && \
- make all install build_test_suite"
-./nnfw docker-run bash -c "${CMD}"
-
-popd > /dev/null
diff --git a/infra/scripts/docker_build_cross_arm_neurun.sh b/infra/scripts/docker_build_cross_arm_runtime.sh
index c1014c57c..551fb5700 100755
--- a/infra/scripts/docker_build_cross_arm_neurun.sh
+++ b/infra/scripts/docker_build_cross_arm_runtime.sh
@@ -6,7 +6,7 @@ CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT_PATH="$CURRENT_PATH/../../"
# prepare rootfs
-if [ ! -d $ROOTFS_DIR ]; then
+if [ -z "$ROOTFS_DIR" ] || [ ! -d $ROOTFS_DIR ]; then
echo "It will use default rootfs path"
else
DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
@@ -34,6 +34,8 @@ fi
DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
+set -e
+
pushd $ROOT_PATH > /dev/null
# TODO use command instead of makefile
diff --git a/infra/scripts/docker_build_cross_arm_neurun_release.sh b/infra/scripts/docker_build_cross_arm_runtime_release.sh
index c8c3e997e..876f318f4 100755
--- a/infra/scripts/docker_build_cross_arm_neurun_release.sh
+++ b/infra/scripts/docker_build_cross_arm_runtime_release.sh
@@ -6,7 +6,7 @@ CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT_PATH="$CURRENT_PATH/../../"
# prepare rootfs
-if [ ! -d $ROOTFS_DIR ]; then
+if [ -z "$ROOTFS_DIR" ] || [ ! -d $ROOTFS_DIR ]; then
echo "It will use default rootfs path"
else
DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
@@ -35,6 +35,8 @@ DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
DOCKER_ENV_VARS+=" -e BUILD_TYPE=release"
+set -e
+
pushd $ROOT_PATH > /dev/null
# TODO use command instead of makefile
diff --git a/infra/scripts/docker_build_cross_coverage.sh b/infra/scripts/docker_build_cross_coverage.sh
index 661e85b00..f42251baa 100755
--- a/infra/scripts/docker_build_cross_coverage.sh
+++ b/infra/scripts/docker_build_cross_coverage.sh
@@ -6,7 +6,7 @@ CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT_PATH="$CURRENT_PATH/../../"
# prepare rootfs
-if [ ! -d $ROOTFS_DIR ]; then
+if [ -z "$ROOTFS_DIR" ] || [ ! -d $ROOTFS_DIR ]; then
echo "It will use default rootfs path"
else
DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
@@ -31,10 +31,17 @@ if [[ -z $EXTERNAL_DOWNLOAD_SERVER ]]; then
echo "It will not use mirror server"
fi
+NNAS_WORKSPACE=${NNAS_WORKSPACE:-build}
+if [[ -z "${ARCHIVE_PATH}" ]]; then
+ ARCHIVE_PATH=${NNAS_WORKSPACE}/archive
+fi
+
DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
DOCKER_ENV_VARS+=" -e COVERAGE_BUILD=1"
+set -e
+
pushd $ROOT_PATH > /dev/null
# TODO use command instead of makefile
@@ -44,4 +51,8 @@ CMD="cp -nv Makefile.template Makefile && \
make all install build_coverage_suite"
./nnfw docker-run bash -c "$CMD"
+mkdir -p ${ARCHIVE_PATH}
+# TODO change workspace usage in makefile
+mv Product/out/coverage-suite.tar.gz ${ARCHIVE_PATH}/
+
popd > /dev/null
diff --git a/infra/scripts/docker_build_nncc.sh b/infra/scripts/docker_build_nncc.sh
new file mode 100755
index 000000000..5fd49a46f
--- /dev/null
+++ b/infra/scripts/docker_build_nncc.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+CONFIG_OPTIONS=""
+# mount volume (or directory) for externals
+if [ -n "$EXTERNAL_VOLUME" ]; then
+ DOCKER_OPTS+=" -v $EXTERNAL_VOLUME:/externals"
+ CONFIG_OPTIONS+=" -DNNAS_EXTERNALS_DIR=/externals"
+else
+ echo "It will use default external path"
+fi
+
+# mount volume (or directory) for overlay
+if [ -n "$OVERLAY_VOLUME" ]; then
+ DOCKER_OPTS+=" -v $OVERLAY_VOLUME:/overlay"
+ CONFIG_OPTIONS+=" -DNNCC_OVERLAY_DIR=/overlay"
+else
+ echo "It will use default overlay path"
+fi
+
+# prepare tensorflow
+if [ -d $TENSORFLOW_PREFIX ]; then
+ DOCKER_OPTS+=" -v $TENSORFLOW_PREFIX:/opt/tensorflow"
+ CONFIG_OPTIONS+=" -DTENSORFLOW_PREFIX=/opt/tensorflow"
+fi
+
+# prepare onnx
+if [ -d $ONNXRUNTIME_PREFIX ]; then
+ DOCKER_OPTS+=" -v $ONNXRUNTIME_PREFIX:/opt/onnxruntime"
+ CONFIG_OPTIONS+=" -DONNXRUNTIME_PREFIX=/opt/onnxruntime"
+fi
+
+# docker image name
+if [[ -z $DOCKER_IMAGE_NAME ]]; then
+ echo "It will use default docker image name"
+fi
+
+NNAS_WORKSPACE=${NNAS_WORKSPACE:-build}
+NNCC_INSTALL_PREFIX=${NNAS_WORKSPACE}/out
+DOCKER_OPTS+=" -e NNAS_BUILD_PREFIX=${NNAS_WORKSPACE}"
+export DOCKER_OPTS
+if [[ -z "${ARCHIVE_PATH}" ]]; then
+ ARCHIVE_PATH=${NNAS_WORKSPACE}/archive
+fi
+
+set -e
+
+pushd $ROOT_PATH > /dev/null
+
+mkdir -p ${NNCC_INSTALL_PREFIX}
+./nncc docker-run ./nnas create-package --prefix "${PWD}/${NNCC_INSTALL_PREFIX}" -- "${CONFIG_OPTIONS}"
+
+mkdir -p ${ARCHIVE_PATH}
+tar -zcf ${ARCHIVE_PATH}/nncc-package.tar.gz -C ${NNCC_INSTALL_PREFIX} --exclude test ./
+tar -zcf ${ARCHIVE_PATH}/nncc-test-package.tar.gz -C ${NNCC_INSTALL_PREFIX} ./test
+
+popd > /dev/null
diff --git a/infra/scripts/docker_build_test_x64.sh b/infra/scripts/docker_build_test_x64.sh
index a6078b755..16fcf3fa7 100755
--- a/infra/scripts/docker_build_test_x64.sh
+++ b/infra/scripts/docker_build_test_x64.sh
@@ -23,20 +23,18 @@ if [[ -z $EXTERNAL_DOWNLOAD_SERVER ]]; then
echo "It will not use mirror server"
fi
+set -e
+
pushd $ROOT_PATH > /dev/null
export DOCKER_ENV_VARS
export DOCKER_VOLUMES
# Disable nnpackage_run build: mismatch between buildtool for CI and installed hdf5
CMD="export OPTIONS='-DBUILD_NNPACKAGE_RUN=OFF' && \
+ export BUILD_TYPE=Release && \
cp -nv Makefile.template Makefile && \
make all install build_test_suite"
./nnfw docker-run bash -c "$CMD"
-EXIT_CODE=$?
-
-if [ ${EXIT_CODE} -ne 0 ]; then
- exit ${EXIT_CODE}
-fi
# Model download server setting
if [[ -z $MODELFILE_SERVER ]]; then
@@ -44,10 +42,8 @@ if [[ -z $MODELFILE_SERVER ]]; then
exit 1
fi
-set -e
-
export DOCKER_ENV_VARS=" -e MODELFILE_SERVER=$MODELFILE_SERVER"
-./nnfw docker-run-user bash -c "./infra/scripts/test_x64_neurun_cpu.sh"
-./nnfw docker-run-user bash -c "./infra/scripts/test_neurun_interp.sh"
+./nnfw docker-run-user ./infra/scripts/test_ubuntu_runtime.sh --backend cpu
+./nnfw docker-run-user ./infra/scripts/test_ubuntu_runtime.sh --interp
popd > /dev/null
diff --git a/infra/scripts/docker_build_tizen_cross.sh b/infra/scripts/docker_build_tizen_cross.sh
index bcd0378ac..ee0f183f1 100755
--- a/infra/scripts/docker_build_tizen_cross.sh
+++ b/infra/scripts/docker_build_tizen_cross.sh
@@ -6,7 +6,7 @@ CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT_PATH="$CURRENT_PATH/../../"
# prepare rootfs
-if [ ! -d $ROOTFS_DIR ]; then
+if [ -z "$ROOTFS_DIR" ] || [ ! -d $ROOTFS_DIR ]; then
echo "It will use default rootfs path"
else
DOCKER_VOLUMES+=" -v $ROOTFS_DIR:/opt/rootfs"
@@ -30,19 +30,20 @@ DOCKER_ENV_VARS+=" -e TARGET_ARCH=armv7l"
DOCKER_ENV_VARS+=" -e CROSS_BUILD=1"
DOCKER_ENV_VARS+=" -e TARGET_OS=tizen"
DOCKER_ENV_VARS+=" -e BUILD_TYPE=release"
-# Disable arm compute build (use rootfs)
-DOCKER_ENV_VARS+=" -e OPTIONS=-DBUILD_ARMCOMPUTE=OFF"
# Mirror server setting
if [[ -z $EXTERNAL_DOWNLOAD_SERVER ]]; then
echo "It will not use mirror server"
fi
+set -e
+
pushd $ROOT_PATH > /dev/null
export DOCKER_ENV_VARS
export DOCKER_VOLUMES
-CMD="cp -nv Makefile.template Makefile && \
+CMD="export OPTIONS+=' -DGENERATE_RUNTIME_NNAPI_TESTS=ON' && \
+ cp -nv Makefile.template Makefile && \
make all install build_test_suite"
./nnfw docker-run bash -c "$CMD"
diff --git a/infra/scripts/docker_build_tizen_gbs.sh b/infra/scripts/docker_build_tizen_gbs.sh
index 501cd3fdd..2d508f4c7 100755
--- a/infra/scripts/docker_build_tizen_gbs.sh
+++ b/infra/scripts/docker_build_tizen_gbs.sh
@@ -16,6 +16,8 @@ fi
DOCKER_ENV_VARS=" --privileged"
+set -e
+
pushd $ROOT_PATH > /dev/null
CMD="gbs -c $ROOT_PATH/infra/nnfw/config/gbs.conf build \
diff --git a/infra/scripts/docker_collect_nnpkg_resources.sh b/infra/scripts/docker_collect_nnpkg_resources.sh
new file mode 100755
index 000000000..55adaa15d
--- /dev/null
+++ b/infra/scripts/docker_collect_nnpkg_resources.sh
@@ -0,0 +1,100 @@
+#!/bin/bash
+
+function join_by
+{
+ local IFS="$1"; shift; echo "$*"
+}
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+
+CONFIG_OPTIONS=""
+# mount volume (or directory) for externals
+if [ -n "$EXTERNAL_VOLUME" ]; then
+ DOCKER_OPTS+=" -v $EXTERNAL_VOLUME:/externals"
+ CONFIG_OPTIONS+=" -DNNAS_EXTERNALS_DIR=/externals"
+else
+ echo "It will use default external path"
+fi
+
+# mount volume (or directory) for overlay
+if [ -n "$OVERLAY_VOLUME" ]; then
+ DOCKER_OPTS+=" -v $OVERLAY_VOLUME:/overlay"
+ CONFIG_OPTIONS+=" -DNNCC_OVERLAY_DIR=/overlay"
+else
+ echo "It will use default overlay path"
+fi
+
+# prepare tensorflow
+if [ -d $TENSORFLOW_PREFIX ]; then
+ DOCKER_OPTS+=" -v $TENSORFLOW_PREFIX:/opt/tensorflow"
+ CONFIG_OPTIONS+=" -DTENSORFLOW_PREFIX=/opt/tensorflow"
+fi
+
+# prepare onnx
+if [ -d $ONNXRUNTIME_PREFIX ]; then
+ DOCKER_OPTS+=" -v $ONNXRUNTIME_PREFIX:/opt/onnxruntime"
+ CONFIG_OPTIONS+=" -DONNXRUNTIME_PREFIX=/opt/onnxruntime"
+fi
+
+# docker image name
+if [[ -z $DOCKER_IMAGE_NAME ]]; then
+ echo "It will use default docker image name"
+fi
+
+# Assume that build is already finished, and ready to test
+NNAS_WORKSPACE=${NNAS_WORKSPACE:-build}
+export NNCC_WORKSPACE=${NNAS_WORKSPACE}/nncc
+export DOCKER_OPTS
+
+if [[ -z "${ARCHIVE_PATH}" ]]; then
+ ARCHIVE_PATH=${NNAS_WORKSPACE}/archive
+fi
+
+set -e
+
+pushd $ROOT_PATH > /dev/null
+
+REQUIRED_UNITS=()
+# Common Libraries
+REQUIRED_UNITS+=("angkor" "cwrap" "pepper-str" "pepper-strcast" "pp" "stdex")
+REQUIRED_UNITS+=("oops" "safemain" "foder" "arser" "vconone")
+# Hermes Logging Framework
+REQUIRED_UNITS+=("hermes" "hermes-std")
+# loco IR and related utilities
+REQUIRED_UNITS+=("loco" "locop" "locomotiv" "logo-core" "logo")
+# Circle compiler library (.circle -> .circle)
+REQUIRED_UNITS+=("luci")
+# Flatbuffer I/O
+REQUIRED_UNITS+=("mio-tflite" "mio-circle")
+# Tools
+REQUIRED_UNITS+=("tflite2circle" "circle2circle" "luci-interpreter")
+REQUIRED_UNITS+=("souschef" "tflchef" "circlechef" "circle-verify")
+# common-artifacts
+REQUIRED_UNITS+=("common-artifacts")
+
+# Reset whitelist to build all
+./nncc docker-run ./nncc configure -DENABLE_STRICT_BUILD=ON -DCMAKE_BUILD_TYPE=release \
+ -DBUILD_WHITELIST=$(join_by ";" "${REQUIRED_UNITS[@]}") \
+ $CONFIG_OPTIONS
+./nncc docker-run ./nncc build -j4
+
+mkdir -p ${ARCHIVE_PATH}
+TEMP_DIR=$(mktemp -d -t resXXXX)
+rm -f ${TEMP_DIR}/*
+mkdir -p ${TEMP_DIR}/nnpkg-tcs
+
+# Copy nnpakcage only if it has its test data
+for nnpkg in $NNCC_WORKSPACE/compiler/common-artifacts/*; do
+ if [ -d $nnpkg/metadata/tc ]; then
+ cp -r $nnpkg ${TEMP_DIR}/nnpkg-tcs
+ fi
+done
+
+tar -zcf ${ARCHIVE_PATH}/nnpkg-test-suite.tar.gz -C ${TEMP_DIR} ./
+rm -rf ${TEMP_DIR}
+
+echo "resouce generation end"
+popd > /dev/null
diff --git a/infra/scripts/docker_coverage_report.sh b/infra/scripts/docker_coverage_report.sh
index c9bd9f1f5..677462d63 100755
--- a/infra/scripts/docker_coverage_report.sh
+++ b/infra/scripts/docker_coverage_report.sh
@@ -1,5 +1,7 @@
#!/bin/bash
+# coverage test data: ${ARCHIVE_PATH}/coverage-data.tar.gz
+
[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
@@ -10,13 +12,20 @@ if [[ -z $DOCKER_IMAGE_NAME ]]; then
echo "It will use default docker image name"
fi
+NNAS_WORKSPACE=${NNAS_WORKSPACE:-build}
+if [[ -z "${ARCHIVE_PATH}" ]]; then
+ ARCHIVE_PATH=${NNAS_WORKSPACE}/archive
+fi
+
set -e
pushd $ROOT_PATH > /dev/null
-CMD="GCOV_PATH=arm-linux-gnueabihf-gcov NNAS_WORKSPACE=Product ./nnas gen-coverage-report runtimes &&
+tar -zxf ${ARCHIVE_PATH}/coverage-data.tar.gz
+
+CMD="GCOV_PATH=arm-linux-gnueabihf-gcov NNAS_WORKSPACE=Product ./nnas gen-coverage-report runtime compute &&
tar -zcf coverage/coverage_report.tar.gz coverage/html &&
- python tools/lcov-to-cobertura-xml/lcov_cobertura.py coverage/coverage.info -o coverage/nnfw_coverage.xml"
+ python runtime/3rdparty/lcov-to-cobertura-xml/lcov_cobertura.py coverage/coverage.info -o coverage/nnfw_coverage.xml"
./nnfw docker-run-user bash -c "$CMD"
diff --git a/infra/scripts/test_arm_neurun_acl_cl.sh b/infra/scripts/test_arm_neurun_acl_cl.sh
deleted file mode 100755
index c41862514..000000000
--- a/infra/scripts/test_arm_neurun_acl_cl.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
-
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="$CURRENT_PATH/../../"
-
-# Model download server setting
-if [[ -z $MODELFILE_SERVER ]]; then
- echo "Need model file server setting"
- exit 1
-fi
-
-set -e
-
-pushd $ROOT_PATH > /dev/null
-
-export OP_BACKEND_ALLOPS=acl_cl
-
-cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-linux ./Product/out/unittest/nnapi_gtest.skip
-export EXECUTOR=Linear
-source ./tests/scripts/test_driver.sh \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/acl_cl" .
-
-export EXECUTOR=Dataflow
-source ./tests/scripts/test_driver.sh \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/acl_cl" .
-
-export EXECUTOR=Parallel
-source ./tests/scripts/test_driver.sh \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/acl_cl" .
-
-popd > /dev/null
diff --git a/infra/scripts/test_arm_neurun_acl_neon.sh b/infra/scripts/test_arm_neurun_acl_neon.sh
deleted file mode 100755
index 3c6e6ce02..000000000
--- a/infra/scripts/test_arm_neurun_acl_neon.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
-
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="$CURRENT_PATH/../../"
-
-# Model download server setting
-if [[ -z $MODELFILE_SERVER ]]; then
- echo "Need model file server setting"
- exit 1
-fi
-
-set -e
-
-pushd $ROOT_PATH > /dev/null
-
-export OP_BACKEND_ALLOPS=acl_neon
-
-cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-linux.acl_neon ./Product/out/unittest/nnapi_gtest.skip
-export EXECUTOR=Linear
-source ./tests/scripts/test_driver.sh \
- --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.acl_neon.txt \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/acl_neon" .
-
-export EXECUTOR=Dataflow
-source ./tests/scripts/test_driver.sh \
- --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.acl_neon.txt \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/acl_neon" .
-
-export EXECUTOR=Parallel
-source ./tests/scripts/test_driver.sh \
- --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.acl_neon.txt \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/acl_neon" .
-
-
-popd > /dev/null
diff --git a/infra/scripts/test_arm_neurun_cpu.sh b/infra/scripts/test_arm_neurun_cpu.sh
deleted file mode 100755
index 6bf48598c..000000000
--- a/infra/scripts/test_arm_neurun_cpu.sh
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/bin/bash
-
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
-
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="$CURRENT_PATH/../../"
-
-# Model download server setting
-if [[ -z $MODELFILE_SERVER ]]; then
- echo "Need model file server setting"
- exit 1
-fi
-
-set -e
-
-pushd $ROOT_PATH > /dev/null
-
-export OP_BACKEND_ALLOPS=cpu
-
-cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-linux.cpu ./Product/out/unittest/nnapi_gtest.skip
-export EXECUTOR=Linear
-source ./tests/scripts/test_driver.sh \
- --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.cpu.txt \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/cpu" .
-
-export EXECUTOR=Dataflow
-source ./tests/scripts/test_driver.sh \
- --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.cpu.txt \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/cpu" .
-
-export EXECUTOR=Parallel
-source ./tests/scripts/test_driver.sh \
- --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.cpu.txt \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/cpu" .
-
-# Test tflite_loader
-source ./tests/scripts/test_driver.sh \
- --frameworktest \
- --framework_driverbin="$ROOT_PATH/Product/out/bin/tflite_loader_test_tool" \
- --frameworktest_list_file=tests/scripts/neurun_frameworktest_list.armv7l.cpu.txt \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/cpu" .
-
-popd > /dev/null
diff --git a/infra/scripts/test_arm_neurun_mixed.sh b/infra/scripts/test_arm_neurun_mixed.sh
deleted file mode 100755
index 7ba9d2098..000000000
--- a/infra/scripts/test_arm_neurun_mixed.sh
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/bash
-
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
-
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="$CURRENT_PATH/../../"
-
-# Model download server setting
-if [[ -z $MODELFILE_SERVER ]]; then
- echo "Need model file server setting"
- exit 1
-fi
-
-set -e
-
-pushd $ROOT_PATH > /dev/null
-
-export EXECUTOR=Linear
-
-# NOTE Fixed backend assignment by type of operation
-# TODO Enhance this with randomized test
-BACKENDS=(cpu acl_cl acl_neon)
-unset OP_BACKEND_ALLOPS
-export OP_BACKEND_Conv2DNode=cpu
-export OP_BACKEND_MaxPool2DNode=acl_cl
-export OP_BACKEND_AvgPool2DNode=acl_neon
-export ACL_LAYOUT=NCHW
-
-# Get the intersect of framework test list files(each backend has a lsit)
-TESTLIST_PREFIX="tests/scripts/neurun_frameworktest_list.armv7l"
-cat $TESTLIST_PREFIX.${BACKENDS[0]}.txt | sort > $TESTLIST_PREFIX.intersect.txt
-for BACKEND in $BACKENDS; do
- comm -12 <(sort $TESTLIST_PREFIX.intersect.txt) <(sort $TESTLIST_PREFIX.$BACKEND.txt) > $TESTLIST_PREFIX.intersect.next.txt
- mv $TESTLIST_PREFIX.intersect.next.txt $TESTLIST_PREFIX.intersect.txt
-done
-
-# Run the test
-cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-linux ./Product/out/unittest/nnapi_gtest.skip
-source ./tests/scripts/test_driver.sh \
- --frameworktest_list_file=$TESTLIST_PREFIX.intersect.txt \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib/neurun:$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/mixed" .
-
-popd > /dev/null
diff --git a/infra/scripts/test_arm_nnpkg.sh b/infra/scripts/test_arm_nnpkg.sh
index 23759a319..d00eb730f 100755
--- a/infra/scripts/test_arm_nnpkg.sh
+++ b/infra/scripts/test_arm_nnpkg.sh
@@ -1,24 +1,16 @@
#!/bin/bash
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+set -eo pipefail
+source "$(dirname "${BASH_SOURCE[0]}")/common.sh"
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="${CURRENT_PATH}/../../"
+BACKENDS=("acl_cl" "acl_neon" "cpu")
-pushd ${ROOT_PATH} > /dev/null
-
-EXITCODE=0
-PKG_LIST=$(cat tools/nnpackage_tool/nnpkg_test/list)
-for f in ${PKG_LIST}
+for BACKEND in "${BACKENDS[@]}";
do
- tools/nnpackage_tool/nnpkg_test/nnpkg_test.sh -d -i nnpkg-tcs $f
- EXITCODE_F=$?
-
- if [ ${EXITCODE_F} -ne 0 ]; then
- EXITCODE=${EXITCODE_F}
- fi
+ NNPackageTest ${BACKEND} "Product/out/test/list/nnpkg_test_list.armv7l-linux.${BACKEND}"
done
-popd > /dev/null
-
-exit ${EXITCODE}
+# Interpreter test
+export DISABLE_COMPILE=1
+NNPackageTest "interp" "Product/out/test/list/nnpkg_test_list.noarch.interp"
+unset DISABLE_COMPILE
diff --git a/infra/scripts/test_arm_pacl.sh b/infra/scripts/test_arm_pacl.sh
deleted file mode 100755
index eb50e2610..000000000
--- a/infra/scripts/test_arm_pacl.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
-
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="$CURRENT_PATH/../../"
-
-# Model download server setting
-if [[ -z $MODELFILE_SERVER ]]; then
- echo "Need model file server setting"
- exit 1
-fi
-
-set -e
-
-pushd $ROOT_PATH > /dev/null
-
-cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-linux.pacl ./Product/out/unittest/nnapi_gtest.skip
-source ./tests/scripts/test_driver.sh \
- --frameworktest_list_file=tests/scripts/pacl_frameworktest_list.armv7l-linux.txt \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib/pureacl:$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report" .
-
-popd > /dev/null
diff --git a/infra/scripts/test_coverage.sh b/infra/scripts/test_coverage.sh
index 7dd5ece28..369e53239 100755
--- a/infra/scripts/test_coverage.sh
+++ b/infra/scripts/test_coverage.sh
@@ -1,46 +1,52 @@
#!/bin/bash
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+# Test suite: ${ARCHIVE_PATH}/coverage-suite.tar.gz
+# NNPackage test suite: ${ARCHIVE_PATH}/nnpkg-test-suite.tar.gz (optional)
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="$CURRENT_PATH/../../"
+set -eo pipefail
+source "$(dirname "${BASH_SOURCE[0]}")/common.sh"
-# Model download server setting
-if [[ -z $MODELFILE_SERVER ]]; then
- echo "Need model file server setting"
- exit 1
+pushd $ROOT_PATH > /dev/null
+
+NNAS_WORKSPACE=${NNAS_WORKSPACE:-build}
+if [[ -z "${ARCHIVE_PATH}" ]]; then
+ ARCHIVE_PATH=${NNAS_WORKSPACE}/archive
+ echo "Default archive directory including nncc package and resources: ${ARCHIVE_PATH}"
fi
-export MODELFILE_SERVER=$MODELFILE_SERVER
+
+tar -zxf ${ARCHIVE_PATH}/coverage-suite.tar.gz -C ./
+
+CheckTestPrepared
if [[ ! -e $ROOT_PATH/tests/scripts/build_path_depth.txt ]]; then
echo "Cannot find prefix strip file"
exit 1
fi
-
-set -e
-
export GCOV_PREFIX_STRIP=`cat $ROOT_PATH/tests/scripts/build_path_depth.txt`
-pushd $ROOT_PATH > /dev/null
-
-./infra/scripts/test_arm_neurun_acl_cl.sh
-./infra/scripts/test_arm_neurun_acl_neon.sh
-./infra/scripts/test_arm_neurun_cpu.sh
-./infra/scripts/test_arm_neurun_mixed.sh
+./infra/scripts/test_ubuntu_runtime.sh --backend acl_cl --tflite-loader
+./infra/scripts/test_ubuntu_runtime.sh --backend acl_neon
+./infra/scripts/test_ubuntu_runtime.sh --backend cpu
-# Enable all logs (acl_cl kernel)
-NEURUN_LOG_ENABLE=1 GRAPH_DOT_DUMP=1 ./infra/scripts/test_arm_neurun_acl_cl.sh
+# Enable all logs (mixed backend)
+TENSOR_LOGGING=trace_log.txt ONERT_LOG_ENABLE=1 GRAPH_DOT_DUMP=1 ./infra/scripts/test_ubuntu_runtime_mixed.sh
+# Enable trace event (acl_cl default backend)
+export TRACE_FILEPATH=trace.json
+TFLiteModelVerification "acl_cl" "Product/out/test/list/frameworktest_list.armv7l.acl_cl.txt" "report/acl_cl/trace"
+unset TRACE_FILEPATH
# Interpreter
-./infra/scripts/test_neurun_interp.sh
+./infra/scripts/test_ubuntu_runtime.sh --interp
-if [[ -e ${ROOT_PATH}/tools/nnpackage_tool/nnpkg_test/nnpkg_test.sh ]]; then
+# nnpackage test suite
+if [[ -e ${ARCHIVE_PATH}/nnpkg-test-suite.tar.gz ]]; then
+ tar -zxf ${ARCHIVE_PATH}/nnpkg-test-suite.tar.gz -C ./
./infra/scripts/test_arm_nnpkg.sh
fi
# Pack coverage test data: coverage-data.tar.gz
find Product -type f \( -iname *.gcda -or -iname *.gcno \) > include_lists.txt
-tar -zcf coverage-data.tar.gz nnas nnfw infra runtimes tools -T include_lists.txt
+tar -zcf ${ARCHIVE_PATH}/coverage-data.tar.gz -T include_lists.txt
rm -rf include_lists.txt
popd > /dev/null
diff --git a/infra/scripts/test_make_nnpkg.sh b/infra/scripts/test_make_nnpkg.sh
new file mode 100755
index 000000000..c94be3e24
--- /dev/null
+++ b/infra/scripts/test_make_nnpkg.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+
+# Resource: test-resources.tar.gz
+# nncc package: nncc-package.tar.gz
+
+# Test suite: nnpkg-test-suite.tar.gz
+
+set -eo pipefail
+source "$(dirname "${BASH_SOURCE[0]}")/common.sh"
+
+NNAS_WORKSPACE=${NNAS_WORKSPACE:-build}
+if [[ -z "${ARCHIVE_PATH}" ]]; then
+ ARCHIVE_PATH=${NNAS_WORKSPACE}/archive
+ echo "Default archive directory including nncc package and resources: ${ARCHIVE_PATH}"
+fi
+
+pushd ${ROOT_PATH} > /dev/null
+
+RESOURCE_PATH=${NNAS_WORKSPACE}/tfmodel
+BIN_PATH=${NNAS_WORKSPACE}/bin/nncc
+mkdir -p ${BIN_PATH}
+mkdir -p ${RESOURCE_PATH}
+tar -zxf ${ARCHIVE_PATH}/nncc-package.tar.gz -C ${BIN_PATH}
+tar -zxf ${ARCHIVE_PATH}/test-resources.tar.gz -C ${RESOURCE_PATH}
+
+export PATH=${PATH}:${PWD}/${BIN_PATH}/bin
+
+for f in `find ${RESOURCE_PATH} -name "*.pb" | cut -d'.' -f1 | sort | uniq`;
+do
+ tools/nnpackage_tool/nncc-tc-to-nnpkg-tc/nncc-tc-to-nnpkg-tc.sh -o nnpkg-tcs -i ${f%/*} $(basename $f);
+done
+
+tar -zcf ${ARCHIVE_PATH}/nnpkg-test-suite.tar.gz nnpkg-tcs
+
+popd > /dev/null
diff --git a/infra/scripts/test_neurun_interp.sh b/infra/scripts/test_neurun_interp.sh
deleted file mode 100755
index 6687e8089..000000000
--- a/infra/scripts/test_neurun_interp.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/bash
-
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
-
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="$CURRENT_PATH/../../"
-
-# Model download server setting
-if [[ -z $MODELFILE_SERVER ]]; then
- echo "Need model file server setting"
- exit 1
-fi
-
-export DISABLE_COMPILE=1
-
-cp -v ./Product/out/unittest/nnapi_gtest.skip.noarch.interp ./Product/out/unittest/nnapi_gtest.skip
-./tests/scripts/test_driver.sh \
- --ldlibrarypath=$ROOT_PATH/Product/out/lib --unittest .
diff --git a/infra/scripts/test_tizen_neurun_acl_cl.sh b/infra/scripts/test_tizen_neurun_acl_cl.sh
deleted file mode 100755
index d09895463..000000000
--- a/infra/scripts/test_tizen_neurun_acl_cl.sh
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/bash
-
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
-
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="$CURRENT_PATH/../../"
-
-set -e
-
-pushd $ROOT_PATH > /dev/null
-
-export OP_BACKEND_ALLOPS=acl_cl
-
-cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-tizen ./Product/out/unittest/nnapi_gtest.skip
-export EXECUTOR=Linear
-source ./tests/scripts/test_driver.sh \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/acl_cl" .
-
-export EXECUTOR=Dataflow
-source ./tests/scripts/test_driver.sh \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/acl_cl" .
-
-export EXECUTOR=Parallel
-source ./tests/scripts/test_driver.sh \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/acl_cl" .
-
-popd > /dev/null
diff --git a/infra/scripts/test_tizen_neurun_mixed.sh b/infra/scripts/test_tizen_neurun_mixed.sh
deleted file mode 100755
index ef1781486..000000000
--- a/infra/scripts/test_tizen_neurun_mixed.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash
-
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
-
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="$CURRENT_PATH/../../"
-
-set -e
-
-pushd $ROOT_PATH > /dev/null
-
-export EXECUTOR=Linear
-
-# NOTE Fixed backend assignment by type of operation
-# TODO Enhance this with randomized test
-BACKENDS=(cpu acl_cl acl_neon)
-unset OP_BACKEND_ALLOPS
-export OP_BACKEND_Conv2DNode=cpu
-export OP_BACKEND_MaxPool2DNode=acl_cl
-export OP_BACKEND_AvgPool2DNode=acl_neon
-export ACL_LAYOUT=NCHW
-
-# Get the intersect of framework test list files(each backend has a lsit)
-TESTLIST_PREFIX="tests/scripts/neurun_frameworktest_list.armv7l"
-cat $TESTLIST_PREFIX.${BACKENDS[0]}.txt | sort > $TESTLIST_PREFIX.intersect.txt
-for BACKEND in $BACKENDS; do
- comm -12 <(sort $TESTLIST_PREFIX.intersect.txt) <(sort $TESTLIST_PREFIX.$BACKEND.txt) > $TESTLIST_PREFIX.intersect.next.txt
- mv $TESTLIST_PREFIX.intersect.next.txt $TESTLIST_PREFIX.intersect.txt
-done
-
-# Run the test
-cp -v ./Product/out/unittest/nnapi_gtest.skip.armv7l-tizen ./Product/out/unittest/nnapi_gtest.skip
-source ./tests/scripts/test_driver.sh \
- --frameworktest_list_file=$TESTLIST_PREFIX.intersect.txt \
- --ldlibrarypath="$ROOT_PATH/Product/out/lib/neurun:$ROOT_PATH/Product/out/lib" \
- --reportdir="$ROOT_PATH/report/mixed" .
-
-popd > /dev/null
diff --git a/infra/scripts/test_ubuntu_runtime.sh b/infra/scripts/test_ubuntu_runtime.sh
new file mode 100755
index 000000000..db70580f8
--- /dev/null
+++ b/infra/scripts/test_ubuntu_runtime.sh
@@ -0,0 +1,109 @@
+#!/bin/bash
+
+set -eo pipefail
+source "$(dirname "${BASH_SOURCE[0]}")/common.sh"
+
+: ${TEST_ARCH:=$(uname -m | tr '[:upper:]' '[:lower:]')}
+BACKEND="cpu"
+TEST_OS="linux"
+TEST_PLATFORM="$TEST_ARCH-$TEST_OS"
+TFLITE_LOADER="0"
+LINEAR_ONLY="0"
+RUN_INTERP="0"
+
+function Usage()
+{
+ echo "Usage: $0 $(basename ${BASH_SOURCE[0]}) [OPTIONS]"
+ echo ""
+ echo "Options:"
+ echo " --backend <BACKEND> Runtime backend to test (default: ${BACKEND})"
+ echo " --tflite-loader Enable TFLite Loader test"
+ echo " --linear-only Use Linear executor only"
+}
+
+while [[ $# -gt 0 ]]
+do
+ arg="$1"
+ case $arg in
+ -h|--help|help)
+ Usage
+ exit 0
+ ;;
+ --backend)
+ BACKEND=$(echo $2 | tr '[:upper:]' '[:lower:]')
+ shift 2
+ ;;
+ --backend=*)
+ BACKEND=$(echo ${1#*=} | tr '[:upper:]' '[:lower:]')
+ shift
+ ;;
+ --tflite-loader)
+ TFLITE_LOADER="1"
+ shift
+ ;;
+ --linear-only)
+ LINEAR_ONLY="1"
+ shift
+ ;;
+ --interp)
+ RUN_INTERP="1"
+ shift;
+ ;;
+ *)
+ # Ignore
+ shift
+ ;;
+ esac
+done
+
+CheckTestPrepared
+
+if [ $RUN_INTERP = "1" ]; then
+ TEST_PLATFORM="noarch"
+ TEST_ARCH="noarch"
+ BACKEND="interp"
+ echo "[[ Interpreter test ]]"
+else
+ echo "[[ ${TEST_PLATFORM}: ${BACKEND} backend test ]]"
+fi
+
+UNITTEST_SKIPLIST="Product/out/unittest/nnapi_gtest.skip.${TEST_PLATFORM}.${BACKEND}"
+FRAMEWORK_TESTLIST="Product/out/test/list/frameworktest_list.${TEST_ARCH}.${BACKEND}.txt"
+REPORT_BASE="report/${BACKEND}"
+EXECUTORS=("Linear" "Dataflow" "Parallel")
+
+if [ $LINEAR_ONLY = "1" ]; then
+ EXECUTORS=("Linear")
+fi
+if [ $RUN_INTERP = "1" ]; then
+ EXECUTORS=("Interpreter")
+fi
+
+for EXECUTOR in "${EXECUTORS[@]}";
+do
+ echo "[EXECUTOR]: ${EXECUTOR}"
+ REPORT_PATH="${REPORT_BASE}/${EXECUTOR}"
+
+ if [ $EXECUTOR = "Interpreter" ]; then
+ export DISABLE_COMPILE=1
+ BACKEND=""
+ else
+ export EXECUTOR="${EXECUTOR}"
+ fi
+
+ NNAPIGTest "${BACKEND}" "${UNITTEST_SKIPLIST}" "${REPORT_PATH}"
+ TFLiteModelVerification "${BACKEND}" "${FRAMEWORK_TESTLIST}" "${REPORT_PATH}"
+
+ if [ $EXECUTOR = "Interpreter" ]; then
+ unset DISABLE_COMPILE
+ else
+ unset EXECUTOR
+ fi
+done
+
+# Current support acl_cl backend testlist only
+# TODO Support more backends
+TFLITE_LOADER_TESTLIST="Product/out/test/list/tflite_loader_list.${TEST_ARCH}.txt"
+if [[ $TFLITE_LOADER = "1" ]]; then
+ TFLiteLoaderTest "${BACKEND}" "${TFLITE_LOADER_TESTLIST}" "${REPORT_BASE}/loader/${EXECUTOR}"
+fi
diff --git a/infra/scripts/test_ubuntu_runtime_mixed.sh b/infra/scripts/test_ubuntu_runtime_mixed.sh
new file mode 100755
index 000000000..40f59ebfd
--- /dev/null
+++ b/infra/scripts/test_ubuntu_runtime_mixed.sh
@@ -0,0 +1,62 @@
+#!/bin/bash
+
+set -eo pipefail
+source "$(dirname "${BASH_SOURCE[0]}")/common.sh"
+
+CheckTestPrepared
+
+# TODO Get argument for mix configuration
+: ${TEST_ARCH:=$(uname -m | tr '[:upper:]' '[:lower:]')}
+TEST_OS="linux"
+
+# nnfw_api_gtest
+# NOTE: This test is run here as it does not depend on BACKEND or EXECUTOR
+
+# This test requires test model installation
+pushd ${ROOT_PATH} > /dev/null
+echo ""
+echo "==== Run standalone unittest begin ===="
+echo ""
+Product/out/test/onert-test unittest --unittestdir=Product/out/unittest_standalone
+echo ""
+echo "==== Run standalone unittest end ===="
+echo ""
+
+# Test custom op
+pushd ${ROOT_PATH} > /dev/null
+./Product/out/test/FillFrom_runner
+popd > /dev/null
+
+# NOTE Fixed backend assignment by type of operation
+# TODO Enhance this with randomized test
+BACKENDS=(acl_cl acl_neon cpu)
+
+# Get the intersect of framework test list files
+TESTLIST_PREFIX="Product/out/test/list/frameworktest_list.${TEST_ARCH}"
+SKIPLIST_PREFIX="Product/out/unittest/nnapi_gtest.skip.${TEST_ARCH}-${TEST_OS}"
+sort $TESTLIST_PREFIX.${BACKENDS[0]}.txt > $TESTLIST_PREFIX.intersect.txt
+sort $SKIPLIST_PREFIX.${BACKENDS[0]} > $SKIPLIST_PREFIX.union
+for BACKEND in "${BACKENDS[@]:1}"; do
+ comm -12 <(sort $TESTLIST_PREFIX.intersect.txt) <(sort $TESTLIST_PREFIX.$BACKEND.txt) > $TESTLIST_PREFIX.intersect.next.txt
+ comm <(sort $SKIPLIST_PREFIX.union) <(sort $SKIPLIST_PREFIX.$BACKEND) | tr -d "[:blank:]" > $SKIPLIST_PREFIX.union.next
+ mv $TESTLIST_PREFIX.intersect.next.txt $TESTLIST_PREFIX.intersect.txt
+ mv $SKIPLIST_PREFIX.union.next $SKIPLIST_PREFIX.union
+done
+popd > /dev/null
+
+# Fail on NCHW layout (acl_cl, acl_neon)
+# TODO Fix bug
+echo "GeneratedTests.*weights_as_inputs*" >> $SKIPLIST_PREFIX.union
+echo "GeneratedTests.logical_or_broadcast_4D_2D_nnfw" >> $SKIPLIST_PREFIX.union
+echo "GeneratedTests.mean" >> $SKIPLIST_PREFIX.union
+echo "GeneratedTests.add_broadcast_4D_2D_after_nops_float_nnfw" >> $SKIPLIST_PREFIX.union
+echo "GeneratedTests.argmax_*" >> $SKIPLIST_PREFIX.union
+echo "GeneratedTests.squeeze_relaxed" >> $SKIPLIST_PREFIX.union
+
+# Run the test
+export OP_BACKEND_Conv2D="cpu"
+export OP_BACKEND_MaxPool2D="acl_cl"
+export OP_BACKEND_AvgPool2D="acl_neon"
+export ACL_LAYOUT="NCHW"
+NNAPIGTest "acl_cl;acl_neon;cpu" "Product/out/unittest/nnapi_gtest.skip.${TEST_ARCH}-${TEST_OS}.union" "report/mixed"
+TFLiteModelVerification "acl_cl;acl_neon;cpu" "${TESTLIST_PREFIX}.intersect.txt" "report/mixed"
diff --git a/infra/scripts/test_x64_neurun_cpu.sh b/infra/scripts/test_x64_neurun_cpu.sh
deleted file mode 100755
index c522ea1fb..000000000
--- a/infra/scripts/test_x64_neurun_cpu.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-
-[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
-
-CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-ROOT_PATH="$CURRENT_PATH/../../"
-
-# Model download server setting
-if [[ -z $MODELFILE_SERVER ]]; then
- echo "Need model file server setting"
- exit 1
-fi
-
-export BACKENDS=cpu
-export OP_BACKEND_ALLOPS=cpu
-
-./tests/scripts/test_driver.sh \
- --ldlibrarypath=$ROOT_PATH/Product/out/lib \
- --frameworktest_list_file=./tests/scripts/neurun_frameworktest_list.x86-64.cpu.txt \
- --reportdir=$ROOT_PATH/report/ .
diff --git a/infra/scripts/tizen_xu4_test.sh b/infra/scripts/tizen_xu4_test.sh
index 19aa0297e..f412e7f7a 100755
--- a/infra/scripts/tizen_xu4_test.sh
+++ b/infra/scripts/tizen_xu4_test.sh
@@ -8,14 +8,44 @@ fi
function Usage()
{
- echo "Usage: ./tizen_xu4_test.sh --rpm-dir=path/to/rpm-dir --unittest --verification"
- echo "Usage: ./tizen_xu4_test.sh --test-suite-path=path/to/test-suite.tar.gz --unittest --verification"
- echo "--rpm-dir : directory containing nnfw.rpm and nnfw-test.rpm"
- echo "--test-suite-path : filepath to test-suite.tar.gz"
- echo "--unittest : run unittest"
- echo "--verification : run verification"
- echo "--framework : run framework"
- echo "--gcov-dir : directory to save gcov files"
+ echo "Usage: ./tizen_xu4_test.sh"
+ echo "Usage: ./tizen_xu4_test.sh --rpm-dir=path/to/rpm-dir"
+ echo "Usage: ./tizen_xu4_test.sh --test-suite-path=path/to/test-suite.tar.gz"
+ echo "Usage: ./tizen_xu4_test.sh --skip-install-model"
+ echo "Usage: ./tizen_xu4_test.sh --rpm-dir=path/to/rpm-dir --skip-test"
+ echo ""
+ echo "--rpm-dir <dir> : directory containing nnfw.rpm and nnfw-test.rpm"
+ echo "--test-suite-path <dir> : filepath to test-suite.tar.gz"
+ echo "--skip-install-model : skip install downloaded model"
+ echo "--skip-test : skip running test"
+ echo "--gcov-dir <dir> : directory to save gcov files"
+}
+
+function install_model()
+{
+ # download tflite model files
+ pushd $HOST_HOME
+ tests/scripts/models/run_test.sh --download=on --run=off
+ # TODO Since this command removes model file(.zip),
+ # We must always download the file unlike model file(.tflite).
+ # Because caching applies only to tflite file.
+ find tests -name "*.zip" -exec rm {} \;
+ tar -zcf cache.tar.gz -C tests/scripts/models cache
+ $SDB_CMD push cache.tar.gz $TEST_ROOT/.
+ rm -rf cache.tar.gz
+ $SDB_CMD shell tar -zxf $TEST_ROOT/cache.tar.gz -C $TEST_ROOT/Product/out/test/models
+
+ # download api test model file for nnfw_api_gtest
+ MODEL_CACHE_DIR=$(mktemp -d)
+ tests/scripts/models/run_test.sh --download=on --run=off \
+ --configdir=tests/scripts/models/nnfw_api_gtest \
+ --cachedir=$MODEL_CACHE_DIR
+ tar -zcf $MODEL_CACHE_DIR/api_model_test.tar.gz -C $MODEL_CACHE_DIR .
+ $SDB_CMD push $MODEL_CACHE_DIR/api_model_test.tar.gz $TEST_ROOT/Product/out/unittest_standalone/nnfw_api_gtest_models/
+ $SDB_CMD shell tar -zxf $TEST_ROOT/Product/out/unittest_standalone/nnfw_api_gtest_models/api_model_test.tar.gz \
+ -C $TEST_ROOT/Product/out/unittest_standalone/nnfw_api_gtest_models/
+ rm -rf $MODEL_CACHE_DIR
+ popd
}
@@ -26,20 +56,11 @@ function prepare_rpm_test()
$SDB_CMD shell rm -rf $TEST_ROOT
$SDB_CMD shell mkdir -p $TEST_ROOT
# install nnfw nnfw-test rpms
- for file in $RPM_DIR/*
+ for file in $RPM_DIR/*.rpm
do
$SDB_CMD push $file $TEST_ROOT
$SDB_CMD shell rpm -Uvh $TEST_ROOT/$(basename $file) --force --nodeps
done
-
- # download tflite model files
- pushd $HOST_HOME
- tests/framework/run_test.sh --download=on
- find tests -name "*.zip" -exec rm {} \;
- tar -zcf cache.tar.gz tests/framework/cache
- $SDB_CMD push cache.tar.gz $TEST_ROOT/.
- rm -rf cache.tar.gz
- $SDB_CMD shell tar -zxf $TEST_ROOT/cache.tar.gz -C $TEST_ROOT
}
function prepare_suite_test()
@@ -52,18 +73,10 @@ function prepare_suite_test()
# install test-suite
$SDB_CMD push $TEST_SUITE_PATH $TEST_ROOT/$(basename $TEST_SUITE_PATH)
$SDB_CMD shell tar -zxf $TEST_ROOT/$(basename $TEST_SUITE_PATH) -C $TEST_ROOT
-
- # download tflite model files
- pushd $HOST_HOME
- tests/framework/run_test.sh --download=on
- find tests -name "*.zip" -exec rm {} \;
- tar -zcf cache.tar.gz tests/framework/cache
- $SDB_CMD push cache.tar.gz $TEST_ROOT/.
- rm -rf cache.tar.gz
- $SDB_CMD shell tar -zxf $TEST_ROOT/cache.tar.gz -C $TEST_ROOT
}
-
+INSTALL_MODEL="1"
+RUN_TEST="1"
# Parse command argv
for i in "$@"
do
@@ -75,21 +88,26 @@ do
--rpm-dir=*)
RPM_DIR=${i#*=}
;;
+ --rpm-dir)
+ RPM_DIR="$2"
+ shift
+ ;;
--test-suite-path=*)
TEST_SUITE_PATH=${i#*=}
;;
- --unittest)
- UNITTEST=on
- ;;
- --verification)
- VERIFICATION=on
+ --test-suite-path)
+ RPM_DIR="$2"
+ shift
;;
- --framework)
- FRAMEWORK=on
+ --skip-install-model)
+ INSTALL_MODEL="0"
;;
--gcov-dir=*)
GCOV_DIR=${i#*=}
;;
+ --skip-test)
+ RUN_TEST="0"
+ ;;
esac
shift
done
@@ -121,41 +139,56 @@ SCRIPT_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ROOT=$SCRIPT_ROOT/../
if [ -z "$RPM_DIR" ] && [ -z "$TEST_SUITE_PATH" ]; then
- echo "Please provide --rpm-dir or --test-suite-path"
- exit 255
+ echo "======= Skip install runtime ======="
fi
if [ ! -z "$RPM_DIR" ]; then
prepare_rpm_test
-else
+elif [ ! -z "$TEST_SUITE_PATH" ]; then
prepare_suite_test
fi
-# For tizen, we run acl_cl and mixed test
-$SDB_CMD shell /bin/bash -c "IGNORE_MD5=1 $TEST_ROOT/infra/scripts/test_tizen_neurun_acl_cl.sh"
-$SDB_CMD shell /bin/bash -c "IGNORE_MD5=1 $TEST_ROOT/infra/scripts/test_tizen_neurun_mixed.sh"
-
-# run unittest
-if [ "$UNITTEST" == "on" ]; then
- $SDB_CMD shell $TEST_ROOT/tests/scripts/test_driver.sh --unittest --artifactpath=$TEST_ROOT
-fi
-
-# run framework test
-if [ "$FRAMEWORK" == "on" ]; then
- $SDB_CMD shell $TEST_ROOT/tests/scripts/test_driver.sh --frameworktest --artifactpath=$TEST_ROOT
+if [ $INSTALL_MODEL = "1" ]; then
+ install_model
+else
+ echo "======= Skip install model ======="
fi
-# run verification
-if [ "$VERIFICATION" == "on" ]; then
- $SDB_CMD shell $TEST_ROOT/tests/scripts/test_driver.sh --verification --artifactpath=$TEST_ROOT
+if [ $RUN_TEST = "0" ]; then
+ echo "======= Skip test ======="
+ exit 0
fi
-# pull gcov files
-if [ -n "$GCOV_DIR" ]; then
- $SDB_CMD shell 'rm -rf /home/gcov && mkdir -p /home/gcov'
- $SDB_CMD shell 'find / -type f \( -iname "*.gcda" -or -iname "*.gcno" \) -exec cp {} /home/gcov/. \;'
- $SDB_CMD shell 'cd /home/ && tar -zcvf gcov.tar.gz ./gcov '
- cd $GCOV_DIR
- sdb pull /home/gcov.tar.gz
- tar -zxvf gcov.tar.gz
+if [ -z "${GCOV_DIR}" ]; then
+ ${SDB_CMD} shell /bin/bash -c "IGNORE_MD5=1 TEST_ARCH=armv7l ${TEST_ROOT}/infra/scripts/test_ubuntu_runtime.sh --backend acl_cl --tflite-loader"
+ ${SDB_CMD} shell /bin/bash -c "IGNORE_MD5=1 TEST_ARCH=armv7l ${TEST_ROOT}/infra/scripts/test_ubuntu_runtime.sh --backend acl_neon"
+ ${SDB_CMD} shell /bin/bash -c "IGNORE_MD5=1 TEST_ARCH=armv7l ${TEST_ROOT}/infra/scripts/test_ubuntu_runtime.sh --backend cpu"
+ ${SDB_CMD} shell /bin/bash -c "IGNORE_MD5=1 TEST_ARCH=armv7l ${TEST_ROOT}/infra/scripts/test_ubuntu_runtime_mixed.sh"
+ ${SDB_CMD} shell /bin/bash -c "IGNORE_MD5=1 TEST_ARCH=armv7l ${TEST_ROOT}/infra/scripts/test_ubuntu_runtime.sh --interp"
+else
+ mkdir -p ${GCOV_DIR}
+ rm -rf ${GCOV_DIR}/*
+ pushd ${GCOV_DIR}
+
+ sdb pull ${TEST_ROOT}/Product/out/test/build_path.txt
+ SRC_PREFIX=`cat build_path.txt`
+ GCOV_PREFIX_STRIP=`echo "${SRC_PREFIX}" | grep -o '/' | wc -l`
+ GCOV_DATA_PATH="/opt/usr/nnfw-gcov"
+
+ # TODO For coverage check, we run acl_cl and mixed test
+ ${SDB_CMD} shell /bin/bash -c "GCOV_PREFIX_STRIP=${GCOV_PREFIX_STRIP} IGNORE_MD5=1 TEST_ARCH=armv7l ${TEST_ROOT}/infra/scripts/test_ubuntu_runtime.sh --backend acl_cl --tflite-loader"
+ ${SDB_CMD} shell /bin/bash -c "GCOV_PREFIX_STRIP=${GCOV_PREFIX_STRIP} IGNORE_MD5=1 TEST_ARCH=armv7l ${TEST_ROOT}/infra/scripts/test_ubuntu_runtime.sh --backend acl_neon"
+ ${SDB_CMD} shell /bin/bash -c "GCOV_PREFIX_STRIP=${GCOV_PREFIX_STRIP} IGNORE_MD5=1 TEST_ARCH=armv7l ${TEST_ROOT}/infra/scripts/test_ubuntu_runtime.sh --backend cpu"
+ ${SDB_CMD} shell /bin/bash -c "GCOV_PREFIX_STRIP=${GCOV_PREFIX_STRIP} IGNORE_MD5=1 TEST_ARCH=armv7l ${TEST_ROOT}/infra/scripts/test_ubuntu_runtime_mixed.sh"
+ ${SDB_CMD} shell /bin/bash -c "GCOV_PREFIX_STRIP=${GCOV_PREFIX_STRIP} IGNORE_MD5=1 TEST_ARCH=armv7l ${TEST_ROOT}/infra/scripts/test_ubuntu_runtime.sh --interp"
+
+ # More test to check coverage
+ ${SDB_CMD} shell "rm -rf ${GCOV_DATA_PATH} && mkdir -p ${GCOV_DATA_PATH}"
+ ${SDB_CMD} shell "find ${TEST_ROOT} -type f \( -iname '*.gcda' -or -iname '*.gcno' \) -exec cp {} ${GCOV_DATA_PATH}/. \;"
+ ${SDB_CMD} shell "cd ${TEST_ROOT} && tar -zcvf coverage-data.tar.gz -C ${GCOV_DATA_PATH} ."
+
+ # pull gcov files
+ sdb pull ${TEST_ROOT}/coverage-data.tar.gz
+ tar -zxvf coverage-data.tar.gz
+ popd
fi
diff --git a/infra/scripts/unittest_compiler_xml.sh b/infra/scripts/unittest_compiler_xml.sh
new file mode 100755
index 000000000..46d3bc813
--- /dev/null
+++ b/infra/scripts/unittest_compiler_xml.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+[[ "${BASH_SOURCE[0]}" != "${0}" ]] && echo "Please don't source ${BASH_SOURCE[0]}, execute it" && return
+
+set -eo pipefail
+
+CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ROOT_PATH="$CURRENT_PATH/../../"
+NNCC_WORKSPACE=${NNCC_WORKSPACE:-${ROOT_PATH}build}
+UNITTEST_REPORT_DIR=${NNCC_WORKSPACE}/unittest_compiler_xml
+
+for i in "$@"
+do
+ case $i in
+ --reportdir=*)
+ UNITTEST_REPORT_DIR=${i#*=}
+ ;;
+ esac
+ shift
+done
+
+if [ ! -e "$UNITTEST_REPORT_DIR" ]; then
+ mkdir -p $UNITTEST_REPORT_DIR
+fi
+
+for TEST_BIN in `find ${NNCC_WORKSPACE}/compiler -type f -executable -name *_test`; do
+ TEST_NAME="$(basename -- $TEST_BIN)"
+ LUGI_LOG=999 $TEST_BIN --gtest_output="xml:$UNITTEST_REPORT_DIR/$TEST_NAME.xml"
+done